<?xml version="1.0" encoding="utf-8"?>
<raweb xmlns:xlink="http://www.w3.org/1999/xlink" xml:lang="en" year="2013">
  <identification id="potioc" isproject="false">
    <shortname>POTIOC</shortname>
    <projectName>Popular interaction with 3d content</projectName>
    <theme-de-recherche>Interaction and visualization</theme-de-recherche>
    <domaine-de-recherche>Perception, Cognition and Interaction</domaine-de-recherche>
    <urlTeam>http://team.inria.fr/potioc</urlTeam>
    <datecreation type="Team">2012 January 01</datecreation>
    <dateupdate type="Project-Team">2014 January 01</dateupdate>
    <UR name="Bordeaux"/>
    <keywords>
      <term>Interaction</term>
      <term>User Interface</term>
      <term>Virtual Reality</term>
      <term>Brain Computer Interface</term>
      <term>Augmented Reality</term>
    </keywords>
    <moreinfo/>
  </identification>
  <team id="uid1">
    <person key="iparla-2005-id18195">
      <firstname>Martin</firstname>
      <lastname>Hachet</lastname>
      <categoryPro>Chercheur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Team leader, Researcher, Inria</moreinfo>
      <hdr>oui</hdr>
    </person>
    <person key="siames-2005-id18481">
      <firstname>Fabien</firstname>
      <lastname>Lotte</lastname>
      <categoryPro>Chercheur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Researcher, Inria</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983231680">
      <firstname>Florent</firstname>
      <lastname>Berthaut</lastname>
      <categoryPro>Technique</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux I, Engineer, until Aug 2013</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983242432">
      <firstname>Alison</firstname>
      <lastname>Cellard</lastname>
      <categoryPro>Technique</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria, Engineer</moreinfo>
    </person>
    <person key="iparla-2011-idp140711554775856">
      <firstname>Pauline</firstname>
      <lastname>Davignon</lastname>
      <categoryPro>Technique</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux I, Engineer, granted by FUI SIMCA, until Feb 2013</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983237056">
      <firstname>Thomas</firstname>
      <lastname>Hulin</lastname>
      <categoryPro>Technique</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria, Engineer, granted by Caisse des Dépôts et Consignations</moreinfo>
    </person>
    <person key="iparla-2010-id59958">
      <firstname>Jérémy</firstname>
      <lastname>Laviole</lastname>
      <categoryPro>PhD</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux I, PhD Student</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782579760">
      <firstname>Jérémy</firstname>
      <lastname>Frey</lastname>
      <categoryPro>PhD</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux I, PhD Student</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983226304">
      <firstname>Renaud</firstname>
      <lastname>Gervais</lastname>
      <categoryPro>PhD</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria, PhD Student, CORDI-S</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782584368">
      <firstname>Camille</firstname>
      <lastname>Jeunet</lastname>
      <categoryPro>PhD</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux Segalen, PhD Student</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983218240">
      <firstname>Jacek</firstname>
      <lastname>Jankowski</lastname>
      <categoryPro>PostDoc</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria, Postdoc, granted by Caisse des Dépôts et Consignations</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983215552">
      <firstname>Florian</firstname>
      <lastname>Larrue</lastname>
      <categoryPro>PostDoc</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Univ Bordeaux 1, Postdoc, granted by FUI SIMCA, until Apr 2013</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782591472">
      <firstname>Christian</firstname>
      <lastname>Mühl</lastname>
      <categoryPro>PostDoc</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria, Post-doc</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782593776">
      <firstname>Flavio</firstname>
      <lastname>Bertini</lastname>
      <categoryPro>Visiteur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>University of Bologna, from Dec 2013</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782596080">
      <firstname>Nicoletta</firstname>
      <lastname>Caramia</lastname>
      <categoryPro>Visiteur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>University of Pavia, Italy, from Apr 2013 until Jul 2013</moreinfo>
    </person>
    <person key="potioc-2012-idp140283983245120">
      <firstname>Pierre-Alexandre</firstname>
      <lastname>Favier</lastname>
      <categoryPro>Visiteur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Associate Professor, IPB</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782600752">
      <firstname>Asier</firstname>
      <lastname>Marzo</lastname>
      <categoryPro>Visiteur</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Universidad Pública de Navarra, from Sep 2013 until Nov 2013</moreinfo>
    </person>
    <person key="potioc-2013-idp140285782603184">
      <firstname>Catherine</firstname>
      <lastname>Cattaert Megrat</lastname>
      <categoryPro>Assistant</categoryPro>
      <research-centre>Bordeaux</research-centre>
      <moreinfo>Inria</moreinfo>
    </person>
  </team>
  <presentation id="uid2">
    <bodyTitle>Overall Objectives</bodyTitle>
    <subsection id="uid3" level="1">
      <bodyTitle>Introduction</bodyTitle>
      <p>The overall objective of Potioc is to <b>open 3D digital worlds<formula type="inline"><math xmlns="http://www.w3.org/1998/Math/MathML" overflow="scroll"><msup><mrow/><mn>1</mn></msup></math></formula> to everyone</b> with the ultimate goal of stimulating creation, communication, understanding, or entertainment. To this end, we propose to focus on the <b>design</b>, <b>development</b> and <b>evaluation</b> of <b>3D User Interfaces</b> (UI), mostly targeted at the general public. In other words, we aim at designing <b>popular</b> 3D interfaces.</p>
      <object id="uid4">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/PapARt.png" type="float" width="298.8987pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Example of a 3D user interface where a child manipulates a 3D scene projected on a sheet of paper to prepare a drawing. <i>Palais de la découverte, Paris, Nov. 2011</i>.</caption>
      </object>
      <p>Printing, photography, Internet, are examples of technologies that have changed the way people communicate together, understand, create, or get entertainment. At the beginning, these technologies were dedicated to expert users, then they have become available to all, contributing to the fulfillment of everyone. Today, 3D digital worlds could play a similar role, but they tend to be fully exploited only by a limited number of highly trained people. Our motivation is to democratize such 3D technologies for favoring communication, understanding, creation, or entertainment for everyone.</p>
      <p>For example, in the scope of learning, it can be difficult for a child to understand how molecules can be assembled together from a drawing on a piece of paper, as classically done in school. We can presume that, with a 3D visualization of the molecules, the understanding of the data can be improved. Beyond a 3D visualization, it will be particularly important for the child to manipulate the molecules to understand how they fit together. Indeed, perception and action are extremely linked, as it has been shown by Berthoz. By being active in the process of interactive visualization, the implication of the child will be increased and, consequently, the learning process will be favored. Such an example requires the completion of complex interaction tasks for which there is no standard solution. Numerous research questions are thus linked to such a challenge, and this is what we want to address in Potioc.</p>
      <p>To reach this goal, we focus on the interaction aspect, which we consider as the main barrier for increasing the adoption of 3D digital worlds by all. Indeed, without a proper interface, users would not be able or would not want to use 3D digital worlds. We address 3D interaction tasks such as navigation in 3D environments, manipulation of 3D objects, exploration of volumes, etc. The conception of relevant user interfaces for completing such tasks implies tackling at the same time motor, perceptive, and cognitive aspects, at different levels: hardware, software, and at the user level. In Potioc, we follow a multi-disciplinary approach where our research and development works are arranged according to the following three main axes:</p>
      <simplelist>
        <li id="uid5">
          <p noindent="true">Exploring and enhancing input/output interaction space</p>
        </li>
        <li id="uid6">
          <p noindent="true">Designing targeted interaction techniques</p>
        </li>
        <li id="uid7">
          <p noindent="true">Understanding and assessing user interaction</p>
        </li>
      </simplelist>
      <p>We do not focus on one specific technology. Instead, we take advantage of the wide input and output space that is available today, with a special interest for emerging technologies. In particular, we consider both technologies that are able to detect motor activities, i.e., physical actions of the user (e.g., joysticks, multitouch surfaces and 3D spatial technologies) and interfaces measuring and interpreting the physiological signals of the user, i.e., non-motor activities of the user. This notably includes Brain-Computer Interfaces (BCI)<formula type="inline"><math xmlns="http://www.w3.org/1998/Math/MathML" overflow="scroll"><msup><mrow/><mn>2</mn></msup></math></formula>, which provide a unique way to access the users' mental states, thus opening the door to many promising applications and tools that tap into this new kind of input, unavailable to other interfaces. We also explore various output spaces, including large-scale visualization, stereoscopic visualization, or tangible approaches.</p>
      <p>Based on this rich interaction space, we design interaction techniques dedicated to the completion of 3D interaction tasks. In this project part, we put aside hardware considerations to concentrate on the software part of the UIs. Such interaction techniques target users that are not expert in 3D interaction. In particular, our target users are the general public, specialists with no expertize in 3D, and people with impairments.</p>
      <p>Finally, a large part of the project is dedicated to human factors for understanding and for evaluating user interaction. In particular, we conduct experiments <i>a-priori</i> to guide our developments and <i>a-posteriori</i> to assess them. We also explore passive BCI to better evaluate 3D interaction.</p>
      <p>Previous research works on 3D UI tended to be driven by criteria of speed, efficiency or precision for industrial purposes. These criteria, which are very important in domains where profitability is crucial, may not be the most fundamental ones in our project where the approach is to increase the intrinsic motivation of non-expert users. Therefore, in Potioc, we will emphasize criteria such as appeal and enjoyment, which have been scarcely taken into account in the past. Indeed, we believe that non-expert users need pleasant and enjoyable interfaces to adopt 3D digital worlds, in the scope of education, art, assistive technologies, or entertainment.</p>
      <p> </p>
      <p noindent="true"> </p>
      <p noindent="true"><formula type="inline"><math xmlns="http://www.w3.org/1998/Math/MathML" overflow="scroll"><msup><mrow/><mn>1</mn></msup></math></formula><i>A 3D digital world can be defined as a 3D data space with which one can interact in real-time, the display of such data being either monoscopic or stereoscopic. For instance, a city that has been modeled in 3D, in which a user can navigate using a joystick is a 3D digital world. On the contrary, a 3D movie for which the user remains a passive viewer does not belong to this category.</i>
 </p>
      <p noindent="true">
        <formula type="inline">
          <math xmlns="http://www.w3.org/1998/Math/MathML" overflow="scroll">
            <msup>
              <mrow/>
              <mn>2</mn>
            </msup>
          </math>
        </formula>
        <i>Brain-Computer Interface are devices that enable its users to interact with computers by mean of brain-activity only, this brain activity being measured (generally by ElectroEncephaloGraphy (EEG)) and processed by the system. A typical example of a BCI would be a system in which a user could move an object on screen towards the left or right, by imagining movement of his left or right hand, respectively.</i>
      </p>
    </subsection>
    <subsection id="uid8" level="1">
      <bodyTitle>Highlights of the Year</bodyTitle>
      <simplelist>
        <li id="uid9">
          <p noindent="true">Potioc has organized <ref xlink:href="http://ihm13.bordeaux.inria.fr" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">IHM 2013</ref>, the French conference on Human-Computer Interaction, in cooperation with the AFIHM association. This conference gathered about 125 participants. A <ref xlink:href="http://hal.inria.fr/IHM-2013" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">dedicated collection</ref> has been created on HAL.</p>
        </li>
        <li id="uid10">
          <p noindent="true">A tutorial about interaction techniques for 3D environments was presented at Eurographics 2013 <ref xlink:href="#potioc-2013-bid0" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> and Web3D 2013 <ref xlink:href="#potioc-2013-bid1" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
        </li>
      </simplelist>
    </subsection>
  </presentation>
  <fondements id="uid11">
    <bodyTitle>Research Program</bodyTitle>
    <subsection id="uid12" level="1">
      <bodyTitle>Introduction</bodyTitle>
      <p>The design of new user interfaces is a complex process that requires tackling research challenges at different levels. First, at a technological level, the input and output interaction space is becoming richer and richer. We will explore the new input/output modalities offered by such a technological evolution, and we will contribute to extend these modalities for the purpose of our main objective, which is to make 3D digital worlds available to all. Then, we will concentrate on the design of good interaction techniques that rely on such input/output modalities, and that are dedicated to the population targeted by this project, i.e. general public, specialists which are not 3D experts, and people with impairments. Finally, a large part of our work will be dedicated to the understanding and the assessment of user interaction. In particular, we will conduct user studies to guide the design of hardware and software UI, to evaluate them, and to better understand how a user interacts with 3D environments.</p>
      <p>These three levels, input/output modalities, interaction techniques, and human factors will be the three main research directions of Potioc. Of course, they are extremely linked, and they cannot be studied independently, one after the other. In particular, user studies will follow the design process of hardware/software user interfaces from the beginning to the end, and both hardware and software exploration will be interdependent. The design of a new 3D user interface will thus require some work at different levels, as illustrated in Figure <ref xlink:href="#uid13" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>. All members of Potioc will contribute in each of these research directions.</p>
      <object id="uid13">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/loop2.png" type="float" width="298.8987pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Diagram of an interactive system and the three main research axes of the Potioc project (blue boxes).</caption>
      </object>
    </subsection>
    <subsection id="uid14" level="1">
      <bodyTitle>Exploring and enhancing input/output interaction space</bodyTitle>
      <p>The Potioc project-team will be widely oriented towards new innovative input and output modalities, even though standard approaches based on keyboard/mouse and standard screens will not be excluded. This includes motor-based interfaces, and physiological interfaces like BCI, as well as stereoscopic display and augmented reality setups. These technologies may have a great potential for opening 3D digital worlds to everyone, if they are correctly exploited.</p>
      <p>We will explore various input/output modalities. Of course, we will not explore all of them at the same time, but we do not want to set an agenda either, for focusing on one of them. For a given need fed by end-users, we will choose among the various input/output modalities the ones that have the biggest potential. In the following paragraphs, we explain in more details the research challenges we will focus on to benefit from the existing and upcoming technologies.</p>
      <subsection id="uid15" level="2">
        <bodyTitle>Real-time acquisition and signal processing</bodyTitle>
        <p>There is a wide number of sensors that can detect users' activity. Beyond the mouse that detects <i>x</i> and <i>y</i> movements in the plane, various sensors are dedicated to the detection of 3D movements, pressure, brain and physiological activity, and so on. These sensors provide information that may be very rich, either to detect command intent from the user, or to estimate and understand the user's state in real-time, but that is difficutly exploitable as it. Hence, a major challenge here is to extract the relevant information from the noisy raw data provided by the sensor.</p>
        <p>An example, and important research topic in Potioc, is the analysis of brain signals for the design of BCI. Indeed, brain signals are usually measured by EEG, such EEG signals being very noisy, complex and non-stationary. Moreover, for BCI-based applications, they need to be processed and analyzed in real-time. Finally, EEG signals exhibit large inter-user differences and there are usually few examples of EEG signals available to tune the BCI to a given user (we cannot ask the user to perform thousands of time the same mental task just to collect examples). As such, appropriate signal processing algorithms must be designed in order to robustly identify EEG patterns reflecting the user's intention. The research challenges are thus to design algorithms with high performances (in terms of rate of correctly identified user's state) anytime, anywhere, that are fully automatic and with minimal or no calibration time. In other words, we must design BCI that are convenient, comfortable and efficient enough so that they can be accepted and used by the end-user. Indeed, most users, in particular healthy users in the general public are used to highly convenient and efficient input devices (e.g., a simple mouse) and would not easily tolerate systems with a lower performance. Achieving this would make BCI good enough to be usable outside laboratories, e.g., for video gamers or patients. This will also make BCI valuable and reliable evaluation tools, e.g., to understand users' state during a given task. To address these challenges, pattern recognition and machine learning techniques are often used in order to find the optimal signal processing parameters. Similar approaches may contribute to the analysis of signals coming from other input devices than BCI. An example is the exploitation of depth cameras, where we need to find relevant information from noisy signals. Other emerging technologies will require similar attention, where the goal will be to transform an unstructured raw signal into a set of higher level descriptors that can be used as input parameters for controlling interaction techniques.</p>
      </subsection>
      <subsection id="uid16" level="2">
        <bodyTitle>Restitution and perceptive feedback</bodyTitle>
        <p>Similarly to the input side, the feedback provided to the user through various output modalities will be explored in Potioc. Beyond the standard screens that are commonly used, we will explore various displays. In particular, in the scope of visual restitution, we will notably focus on large screens and tables, mobile setups and projection on real objects, and stereoscopic visualization. The challenge here will be to conceive good visual metaphors dedicated to these unconventional output devices in order to maximize the attractiveness and the pleasure linked to the use of these technologies.</p>
        <p>For example, we will investigate the use of stereoscopic displays for extending the current visualization approaches. Indeed, stereoscopic visualization has been little explored outside complex VR setups dedicated to professional users and 3DTV. We believe that this modality may be very interesting for non-expert users, in wider contexts. To reach this goal, we will thus concentrate on new visual metaphors that benefit from stereoscopic visualization, and we will explore how, when, and where stereoscopy may be used.</p>
        <p>Depending on the targeted interaction tasks, we may also investigate various additional output modalities such as tangible interaction, audio displays, and so on. In any case, our approach will be the same: understanding how new perceptive modalities may push the frontier of our current interactive systems.</p>
      </subsection>
      <subsection id="uid17" level="2">
        <bodyTitle>Creation of new systems</bodyTitle>
        <p>In addition to the exploration and the exploitation of existing input and output modalities for enhancing interaction with 3D content, we may also contribute to extend the current input/output interaction space by building new interactive systems. This will be done by combining hardware components, or by collaborating with mechanics/electronics specialists.</p>
      </subsection>
    </subsection>
    <subsection id="uid18" level="1">
      <bodyTitle>Designing targeted interaction techniques</bodyTitle>
      <p>In the previous section, we focused on the input/output interaction space, which is closely related to hardware components. In this part, we focus on the design of interaction techniques, which we define here as the means through which a user will complete an interaction task in a given interaction space. Even if this is naturally also linked to the underlying hardware components, the research conducted in this axis of the project will mainly concern software developments.</p>
      <p>Similar to the input/output interaction space, the design of interaction techniques requires focusing on both the motor and the sensory components. Thus, in our 3D spatial context, the challenges will be to find good mappings between the available input and the DOF that need to be controlled in the 3D environment, and to provide relevant feedback to users so that they can understand well what they are doing.</p>
      <p>The design of interaction techniques should be strongly guided by the targeted end-users. For example, a 3D UI dedicated to an expert user will not suit a novice user, and the converse is also true. In Potioc, where the final goal is to open 3D digital worlds to anyone, we will concentrate on the general public, specialists that are not 3D experts, and people with impairments.</p>
      <subsection id="uid19" level="2">
        <bodyTitle>General public</bodyTitle>
        <p>3D UIs have mainly been designed for professional use. For example, modeling tools require expertize to be used correctly and, consequently, they exclude the general public from the process of creating 3D content. Similarly, immersive technologies have been dedicated to professional users for a long time. Therefore, immersive 3D interaction techniques have generally been thought for trained users, and they may not fit well with a general public context. In Potioc, an important motivation will be to re-invent 3D UIs to adapt them to the general public. This motivation will guide us towards new approaches that have been little explored until now. In particular, to reach our objective, we will give a strong importance to the following criteria:</p>
        <simplelist>
          <li id="uid20">
            <p noindent="true">Intuitiveness: a very short learning curve is required.</p>
          </li>
          <li id="uid21">
            <p noindent="true">Enjoyability: this is needed to motivate novice users in the complex process of interaction with 3D content.</p>
          </li>
          <li id="uid22">
            <p noindent="true">Robustness: the UIs should support untrained users that may potentially interact with unpredictable actions.</p>
          </li>
        </simplelist>
        <p>In addition, we will keep connected with societal and technological factors surrounding the general public. For example, [multi]touch-screens have become very popular these past few years, and everyone tends to be familiar with a standard gesture vocabulary (e.g. pinch gestures and flicking gestures). We will rely on these commonly acquired <i>ways-of-interact</i> to optimize the acceptability of the 3D UIs we will design. In this part of the project the challenge will be to conceive 3D UIs that offer a high degree of interactivity, while ensuring an easy access to technology, as well as a wide adherence.</p>
      </subsection>
      <subsection id="uid23" level="2">
        <bodyTitle>Specialists</bodyTitle>
        <p>General public will be one of the main targets of Potioc for the design of 3D UIs. However, we do not exclude specialists, who have little experience with 3D interaction. These specialists can be for example artists, archaeologists, or architects. In any case, we are convinced that 3D digital worlds could benefit to such categories of users if we propose dedicated 3D UIs that allow them to better understand, communicate, or create, with their respective skills.
Because such specialists will gain expertize while interacting with 3D content, it will be necessary to design 3D UIs that can adapt to their evolving level of expertize. In particular, the UIs should be easy to use and attractive enough to encourage new users. At the same time, they should provide advanced features that the specialist can discover while gaining expertize.</p>
      </subsection>
      <subsection id="uid24" level="2">
        <bodyTitle>People with impairments</bodyTitle>
        <p>While the general public has been only scarcely considered as a potential target audience for 3D digital worlds, another category of users is even more neglected: people with impairments. Indeed, such people, in particular those with motor impairments, are unable to use classical input devices, since they have been designed for healthy users. People with motor impairment have to use dedicated input devices, adapted to their disabilities, such as a single switch. Since such input devices usually have much fewer degrees of freedom than classical devices, it is necessary to come up with appropriate interaction techniques in order to efficiently use this limited number of DOF to still enable the user to perform complex tasks in the 3D environment. In Potioc, our focus will be on the use of BCI to enable motor impaired users to interact with 3D environment for learning, creation and entertainment. Indeed, BCI enable a user to interact without any motor movement.</p>
      </subsection>
    </subsection>
    <subsection id="uid25" level="1">
      <bodyTitle>Understanding and assessing user interaction</bodyTitle>
      <p>The exploration of the input/output interaction space, and the design of new interaction techniques, are strongly linked with human factors, which will be the third research axis of the Potioc project. Indeed, to guide the developments described in the previous sections, we first need to well understand users' motor and cognitive skills for the completion of 3D interaction tasks. This will be explored thanks to <i>a-priori</i> experiments. In order to evaluate our hardware and software interfaces, we will conduct <i>a-posteriori</i> user studies. Finally, we will explore new approaches for a real-time cognitive analysis of the performance and the experience of a user interacting with a 3D environment.</p>
      <p>The main challenge in this part of the project will be to design good experimental protocols that will allow us to finely analyze various parameters for improving our interfaces. In 2D, there exist many standard protocols and prediction laws for evaluating UIs (e.g. Fitts law and ISO 9241). This is not the case in 3D. Consequently, a special care must be taken when evaluating interaction in 3D spatial contexts.</p>
      <p>In addition to the standard experiments we will conduct in our lab, we will conduct large scale experiments thanks to the strong collaboration we have with the center for the widespread diffusion of scientific culture, Cap Sciences (see Collaboration section). With such kind of experiments, we will be able to test hundreds of participants of various ages, gender, or level of expertize that we will be able to track thanks to the Navinum system <footnote id="uid26" id-text="1">Navinum is a system based on a RFID technology that is used to collect informations about the activity of the visitors in Cap Sciences. <ref xlink:href="http://www.scribd.com/doc/55178878/Dossier-de-Presse-Numerique-100511" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>www.<allowbreak/>scribd.<allowbreak/>com/<allowbreak/>doc/<allowbreak/>55178878/<allowbreak/>Dossier-de-Presse-Numerique-100511</ref></footnote>, and this during long a period of time. A challenge for us will be to gain benefit from this wealth of information for the development of our 3D UIs.</p>
      <subsection id="uid27" level="2">
        <bodyTitle>A-priori user studies</bodyTitle>
        <p>Before designing 3D UIs, it is important to understand what a user is good at, and what may cause difficulties. This is true at a motor level, as well as a cognitive level. For example, are users able to coordinate the movements of several fingers on a touchscreen at the same time, or are they able to finely control the quantity of force applied on it while moving their hand? Similarly, are the users able to mentally predict a 3D rotation, and how many levels of depth are they able to distinguish when visualizing stereoscopic images? To answer these questions, we will conduct preliminary studies.</p>
        <p>Our research in that direction will guide our developments for the other research axes described above. For example, it will be interesting to explore touch-based 3D UIs that take into account several levels of force if we see that this parameter can be easily handled by users. On the other hand, if the results of a-priori tests show that this input cannot be easily controlled, then we will not push forward that direction.</p>
        <p>The members of Potioc have already conducted such kinds of experiments, and we will continue our work in that direction. For some investigations, we will collaborate with psychologists and experts in cognitive science (see Collaborations section) to explore in more depth motor and cognitive human skills.</p>
        <p><i>A-priori</i> studies will allow us to understand how users tend to ”<i>naturally</i>” interact to complete 3D interaction tasks, and to understand which feedbacks are the best suited. This will be a first answer to our global quest of providing pleasant interfaces. Indeed, this will allow us to adapt the UIs to the users, and not the opposite. This should enhance the global acceptability and motivation of users facing a new interactive system.</p>
      </subsection>
      <subsection id="uid28" level="2">
        <bodyTitle>A-posteriori user studies</bodyTitle>
        <p>In Potioc, we will conceive new hardware and software interfaces. To validate these UIs, and to improve them, we will conduct user experiments, as classically done in the field of HCI. This is a standard methodology that we currently follow (see Bibliography). We will do this in our lab, and in Cap Sciences.</p>
        <p>Beyond the standard evaluation criteria that are based on performance for speed, accuracy, coordination, and so on, we will also consider other criteria that are more relevant for the Potioc project. Indeed, we will give a great importance to enjoyability, pleasure of use, accessibility, and so on. Consequently, we will need to redefine the standard way to evaluate UIs. Once again, our relationship with Cap Sciences will help us in such investigations. The use of questionnaires will be a way to better understand how an interface should be designed to reach a successful use. In addition, we will observe and analyze how visitors tend to interact with various interfaces we will propose. For example, we will collect information like the time spent on a given interactive system or the number of smiles recorded during an interaction process. The identification of good criteria to use for the evaluation of a popular 3D UI will be one of the research directions of our team.</p>
        <p>Conducting such <i>a-posteriori</i> studies, in particular with experts of mediation, with new criteria of success, will be a second answer to our goal of evaluating the pleasure linked to the use of 3D UIs.</p>
      </subsection>
      <subsection id="uid29" level="2">
        <bodyTitle>Real-time cognitive analysis</bodyTitle>
        <p>Classically, the user's subjective preferences for a given 3D UI are assessed using questionnaires. While these questionnaires provide important information, this is only a partial, biased, a-posteriori/a-priori measure, since they are collected before or after the 3D interaction process. When questionnaires are administered during 3D interaction, this interrupts and disturbs the user, hence biasing the evaluation. Moreover, while evaluating performance and usefulness is now well described and understood, evaluating the user's experience and thus the system usability appears as much more difficult, with a lack of systematic and standard approaches. Ideally, we would like to measure the user response and subjective experience while he/she is using the 3D UI, i.e., in real-time and without interrupting him/her, in order to precisely identify the UI pros and cons. Questionnaires cannot provide such a measure.</p>
        <p>Fortunately, it has been recently shown that BCI could be used in a passive way, to monitor the user's mental state. More precisely, recent results suggested that appropriately processed EEG signals could provide information about mental states such as error perception, attention or mental workload. As such, BCI are emerging as a new tool to monitor a user's mental state and brain responses to various stimuli, in real-time. In the Potioc project, we propose a completely new way to evaluate 3DUI: rather than relying only on questionnaires to estimate the user's subjective experience , we propose to exploit passive BCI to estimate the user's mental state in real-time, without interrupting nor disturbing him or her, while he/she is using the 3DUI. In particular, we aim at measuring and processing EEG and other biosignals (e.g., pulse, galvanic skin response, electromyogram) in real-time in order to estimate mental states such as interaction error potentials or workload/attention levels, among others. This will be used to finely identify how intuitive, easy-to-use and (ideally) enjoyable any given 3D UI is. More specifically, it will allow us to identify how, when and where the UI has flaws. Because the analysis will occur in real-time, we will potentially be able to modify the interface while the user is interacting. This should lead to a better understanding of 3D interaction. The work that will be achieved in this area could potentially also be useful for 2D interface design. However, since Potioc's main target is 3DUI, we will naturally focus the real-time cognitive evaluations on 3D contexts, with specific targets such as depth perception, or perception of 3D rotations.</p>
        <p>This real-time cognitive analysis will be a third answer to reach the objectives of Potioc, which are to open 3D digital worlds to everyone by increasing the pleasure of use.</p>
      </subsection>
    </subsection>
  </fondements>
  <domaine id="uid30">
    <bodyTitle>Application Domains</bodyTitle>
    <subsection id="uid31" level="1">
      <bodyTitle>Application domains</bodyTitle>
      <p>Since our project aims at providing 3D digital worlds to all, including the general public, to stimulate understanding, learning, communication and creation, our scope of applications will naturally be the following one:</p>
      <simplelist>
        <li id="uid32">
          <p noindent="true"><b>Culture and education:</b>
We are convinced that a 3D digital world is a powerful media that may contribute to enhance understanding processes. For example, a museum would benefit from new 3D user interfaces allowing visitors to better understand complex content. Similarly, at school, this media has an extraordinary potential for enhancing learning. For example, a child being able to navigate in archaeological sites, or to manipulate by himself 3D molecules as described previously, will probably understand and learn things while having pleasure in interacting with the content.</p>
        </li>
        <li id="uid33">
          <p noindent="true"><b>Art:</b>
We believe that 3D digital worlds may stimulate creativity, too. Our first investigations with music and drawings have shown that this media opens new possibilities for creation. The challenge here will be to design good interfaces that will allow artists to explore new dimensions. The user may be an experienced artist, or a three years old child who would express his creativity through tools that go beyond papers and pens.</p>
        </li>
        <li id="uid34">
          <p noindent="true"><b>Assistive technologies:</b>
People suffering from motor or cognitive impairments are one of the target populations of the Potioc project. Indeed, we believe that new interfaces that exploit 3D digital worlds may help people to overcome their disabilities. For example, someone with very reduced motor capabilities could benefit from BCI to explore a virtual museum, or a children having difficulties for concentrating may benefit from new 3D interactive systems.</p>
        </li>
        <li id="uid35">
          <p noindent="true"><b>Entertainment:</b>
The objective of Potioc is to open 3D digital worlds to everyone by designing innovative interfaces driven by enjoyment. Consequently, the entertainment industry will be an obvious application domain where Potioc can contribute. This can be in the scope of video games, entertainment parks, Web and TV of the future, applications for mobile devices, and so on.</p>
        </li>
      </simplelist>
      <p>Naturally, we will not necessarily address all these applications, and certainly not all at the same time. These areas just define the applicative scope of our work. As an example, much of our current work is targeted at artistic and entertainment applications, with VR-based musical performances, augmented paper-based drawing or BCI-based video games. We are also currently starting to conduct research on digital cities, in order to provide ordinary citizens suitable tools and UI to explore 3D content related to their city, such as 3D maps, information about population density or sound nuisance, among other.
It should also be noted that our work might find applicative connexions outside these main application domains and benefit to a large range of academic and industrial areas, with which we could build relationships. For example, in the scope of medicine, new and easy to adopt user interfaces designed in Potioc could prove valuable for medical professionals as well, to better access and interact with biological 3D content (e.g., X-rays or MRI scans).</p>
    </subsection>
  </domaine>
  <logiciels id="uid36">
    <bodyTitle>Software and Platforms</bodyTitle>
    <subsection id="uid37" level="1">
      <bodyTitle>OpenViBE</bodyTitle>
      <participants>
        <person key="siames-2005-id18481">
          <firstname>Fabien</firstname>
          <lastname>Lotte</lastname>
          <moreinfo>local correspondant</moreinfo>
        </person>
        <person key="potioc-2012-idp140283983242432">
          <firstname>Alison</firstname>
          <lastname>Cellard</lastname>
          <moreinfo>engineer</moreinfo>
        </person>
      </participants>
      <p>As part of our research work on BCI, we contribute to the development of the OpenViBE <footnote id="uid38" id-text="2"><ref xlink:href="http://openvibe.inria.fr" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>openvibe.<allowbreak/>inria.<allowbreak/>fr</ref></footnote> software, which is an open source platform dedicated to the design, evaluation and use of BCI for real and virtual applications. OpenViBE development is led by Inria, and Potioc is one of the Inria team contributing to its evolution. Moreover, Potioc is involved in the Inria ADT (Technological Development Action) OpenViBE-NT that is dedicated to the development of OpenViBE together with 3 other Inria teams (Hybrid, Athena, Neurosys).</p>
    </subsection>
    <subsection id="uid39" level="1">
      <bodyTitle>Drile</bodyTitle>
      <participants>
        <person key="potioc-2012-idp140283983231680">
          <firstname>Florent</firstname>
          <lastname>Berthaut</lastname>
        </person>
      </participants>
      <p>As part of the research on Virtual Reality for Musical Performance, notably the Drile system, various software pieces are being developped and made available to the community.
These software pieces are the following:</p>
      <simplelist>
        <li id="uid40">
          <p noindent="true">Pure-Data external to access data from the Virtual Reality Peripheral Network : <ref xlink:href="https://github.com/scrime/vrpd" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">https://<allowbreak/>github.<allowbreak/>com/<allowbreak/>scrime/<allowbreak/>vrpd</ref></p>
        </li>
        <li id="uid41">
          <p noindent="true">Drile: <ref xlink:href="http://hitmuri.net/index.php/Research/Drile" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hitmuri.<allowbreak/>net/<allowbreak/>index.<allowbreak/>php/<allowbreak/>Research/<allowbreak/>Drile</ref></p>
        </li>
      </simplelist>
    </subsection>
    <subsection id="uid42" level="1">
      <bodyTitle>3DCityTestbed</bodyTitle>
      <participants>
        <person key="potioc-2012-idp140283983218240">
          <firstname>Jacek</firstname>
          <lastname>Jankowski</lastname>
        </person>
        <person key="potioc-2012-idp140283983237056">
          <firstname>Thomas</firstname>
          <lastname>Hulin</lastname>
        </person>
      </participants>
      <p>As part of the research on the "Villes transparentes" project in collaboration with Mappy (Solocal group) and Vectuel - VirtuelCity, we develop a software platform dedicated to the design and evaluation of innovative interaction techniques for the navigation in urban 3D environments. This code is not publicly available yet.</p>
    </subsection>
    <subsection id="uid43" level="1">
      <bodyTitle>PapARt</bodyTitle>
      <participants>
        <person key="iparla-2010-id59958">
          <firstname>Jérémy</firstname>
          <lastname>Laviole</lastname>
          <moreinfo>Main developer</moreinfo>
        </person>
      </participants>
      <p>As part of his thesis work, Jérémy Laviole has developed a software suite for PapARt : Paper Augmented Reality Toolkit.
This suite enables the calibration of depth cameras such as the Kinect with a planar surface and with a videoprojection. It also enables the detection of finger touch on the planar surface.
This system can be extended to 3D objects. Many external devices are compatible, such as pen tablets (Wacom) and LEAP Motion. The code runs on Linux, Mac OS and Windows, and is made for Processing <footnote id="uid44" id-text="3"><ref xlink:href="http://www.processing.org" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>www.<allowbreak/>processing.<allowbreak/>org</ref></footnote>. This code is not publicly available yet.</p>
    </subsection>
  </logiciels>
  <resultats id="uid45">
    <bodyTitle>New Results</bodyTitle>
    <subsection id="uid46" level="1">
      <bodyTitle>Navigation techniques in 3D digital cities on mobile touch devices</bodyTitle>
      <participants>
        <person key="potioc-2012-idp140283983218240">
          <firstname>Jacek</firstname>
          <lastname>Jankowski</lastname>
        </person>
        <person key="potioc-2012-idp140283983237056">
          <firstname>Thomas</firstname>
          <lastname>Hulin</lastname>
        </person>
        <person key="iparla-2005-id18195">
          <firstname>Martin</firstname>
          <lastname>Hachet</lastname>
        </person>
      </participants>
      <p>This study aimed at characterizing today's most common interaction techniques for street-level navigation in 3D digital cities, for mobile touch devices, in terms of their efficiency and usability. To do so, we conducted a user study, where we compared target selection (Go-To), rate control (Joystick), position control, and stroke-based control navigation metaphors (see Figure <ref xlink:href="#uid47" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>). The results suggest that users performed best with the Go-To interaction technique. The subjective comments showed a preference of novices towards Go-To and expert users towards the Joystick technique. This work has been accepted for publication at the 3DUI 2014 conference <ref xlink:href="#potioc-2013-bid2" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
      <object id="uid47">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/mobileTouchNavigation.png" type="float" width="341.6013pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Four techniques for navigating in a 3D city on a mobile touch device.</caption>
      </object>
      <p>As part of this project on Navigation in 3D digital cities, the Potioc group also built a tutorial about interaction techniques for 3D environments. It was presented at Eurographics 2013 <ref xlink:href="#potioc-2013-bid0" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> and Web3D 2013 <ref xlink:href="#potioc-2013-bid1" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>. The goal of this work is to provide an up-to-date state-of-the-art of this topic to the community.</p>
    </subsection>
    <subsection id="uid48" level="1">
      <bodyTitle>Interaction with spatial augmented reality for physical drawing</bodyTitle>
      <participants>
        <person key="iparla-2010-id59958">
          <firstname>Jérémy</firstname>
          <lastname>Laviole</lastname>
        </person>
        <person key="iparla-2005-id18195">
          <firstname>Martin</firstname>
          <lastname>Hachet</lastname>
        </person>
      </participants>
      <p>We developed tools that enable precise interactive projection on pieces of
paper. The sheets of paper are tracked by a camera while the user's inputs
(e.g., touch and hovering events) are detected by a Kinect. The paper
acts as a screen, its image coming from an overhead projector.
The focus of this work is to use such tools to assist the creation of physical drawings
and painting. In this context we propose Digital Construction Lines
(DCL), in opposition with physical construction lines. Traditionally,
the structure of a physical drawing can be created with construction lines
which are light pencil strokes. These strokes are then erased during
the drawing process. With DCL, it is not required to erase the construction lines
anymore. Furthermore, it is possible to create construction lines on
fragile material like a canvas for waterpainting or on fresh paint. It
also enables construction lines on a dark canvas. In addition to these
projection advantages, it is possible to create these DCL interactively and
directly onto the support. Consequently, the DCL complement
the physical ones during the creation process.</p>
      <object id="uid49">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/touch.png" type="float" width="341.6013pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Using Digital Construction Lines for spatial augmented reality-based physical drawing.</caption>
      </object>
      <p>We investigated in a user study if the DCL could effectively replace
the physical construction lines, and compared the performance (speed,
cleanliness) between the two kinds of construction lines. In this user
study we also evaluated the quality and usability of projection of
thin lines in a fully controlled environment with a low-cost setup.
The study showed that DCL could effectively replace physical
construction lines, even though it might not be desirable. The study also showed that the
drawing experience was as pleasant with projection, and with the
usual tools. The feedback about the quality of tracking and projection
was also positive. The only negative evaluation concerned the size of the
projection area, which was limited by the resolution of the projector.
This work was published as part of Jérémy Laviole's PhD thesis <ref xlink:href="#potioc-2013-bid3" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
      <p>PapARt was also used as part of a museum exhibition on the Lascaux caves, together with other 3D UI from Potioc.
This exhibitation has provided us with the opportunity to experiment with touch-based interfaces for manipulating 3D virtual objects. We targeted three tasks: observing rare objects with Cubtile, reassembling object fragments with Toucheo, and reproducing artwork with PapARt <ref xlink:href="#potioc-2013-bid4" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> (see Figure <ref xlink:href="#uid50" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>). These exhibitions allowed us to experiment our systems in real conditions. It led to a Living Lab, where the visitors can test our devices.</p>
      <object id="uid50">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/BoisDeRenne.png" type="float" width="298.8987pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Manipulation of a 3D model and lighthing conditions for drawing on a prehistoric object in a museum.</caption>
      </object>
    </subsection>
    <subsection id="uid51" level="1">
      <bodyTitle>Rouages: Revealing the Mechanisms of Digital Musical Instruments to the Audience</bodyTitle>
      <participants>
        <person key="potioc-2012-idp140283983231680">
          <firstname>Florent</firstname>
          <lastname>Berthaut</lastname>
        </person>
        <person key="iparla-2005-id18195">
          <firstname>Martin</firstname>
          <lastname>Hachet</lastname>
        </person>
        <person key="PASUSERID">
          <firstname>Pierre-Marie</firstname>
          <lastname>Plans</lastname>
        </person>
      </participants>
      <object id="uid52">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/collaborationMusic.jpg" type="float" width="192.1487pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Collaborative setup for enhancing interaction between immersed musicians.</caption>
      </object>
      <p>We have developped Rouages <ref xlink:href="#potioc-2013-bid5" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>, a mixed-reality display system associated with a 3D visualization application. Rouages reveals the mechanisms of digital musical instruments in two ways. First, by amplifying musicians’ gestures with virtual extensions of the sensors. Second, by representing the sound components with 3D shapes and specific behaviors and by showing the impact of musicians’ gestures on these components. In addition, we have explored new setups to enhance collaboration between musicians using our VR-based instruments. This is illustrated in Figure <ref xlink:href="#uid52" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
    </subsection>
    <subsection id="uid53" level="1">
      <bodyTitle>Gateway driving simulator</bodyTitle>
      <participants>
        <person key="potioc-2012-idp140283983215552">
          <firstname>Florian</firstname>
          <lastname>Larrue</lastname>
        </person>
        <person key="iparla-2011-idp140711554775856">
          <firstname>Pauline</firstname>
          <lastname>Davignon</lastname>
        </person>
        <person key="iparla-2005-id18195">
          <firstname>Martin</firstname>
          <lastname>Hachet</lastname>
        </person>
      </participants>
      <p>As part of the SIMCA FUI project, the POTIOC team focuses on the design and evaluation of a gateway driving simulator (see Figure <ref xlink:href="#uid54" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>), to teach drivers how to drive an airport gateway in virtual reality, i.e., in a safe and cost- effective environment. We conducted a comprehensive user study to assess the impact of various parameters on user performances. This study allowed us to provide a set of recommendations for the design of an actual simulator.</p>
      <object id="uid54">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/simca.jpg" type="float" width="234.8513pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Gateway simulator.</caption>
      </object>
    </subsection>
    <subsection id="uid55" level="1">
      <bodyTitle>Training Approaches for Brain-Computer Interfaces</bodyTitle>
      <participants>
        <person key="siames-2005-id18481">
          <firstname>Fabien</firstname>
          <lastname>Lotte</lastname>
        </person>
        <person key="potioc-2012-idp140283983215552">
          <firstname>Florian</firstname>
          <lastname>Larrue</lastname>
        </person>
        <person key="potioc-2013-idp140285782591472">
          <firstname>Christian</firstname>
          <lastname>Mühl</lastname>
        </person>
      </participants>
      <p>While recent research on Brain-Computer Interfaces (BCI) has highlighted their potential for many applications, they remain barely used outside laboratories due to a lack of robustness. Spontaneous BCI (i.e., mental imagery-based BCI) often rely on mutual learning efforts by the user and the machine, with BCI users learning to produce stable EEG patterns (spontaneous BCI control being widely acknowledged as a skill) while the computer learns to automatically recognize these EEG patterns, using signal processing. Most research so far was focused on signal processing, mostly neglecting the human in the loop. However, how well the user masters the BCI skill is also a key element explaining BCI robustness. Unfortunately, despite the importance of BCI training protocols, they have been scarcely studied so far, and used mostly unchanged for years. In our work, we advocate that current human training approaches for spontaneous BCI are most likely inappropriate. We notably studied instructional design literature in order to identify the key requirements and guidelines for a successful training procedure that promotes a good and efficient skill learning. This literature study highlighted that current spontaneous BCI user training procedures satisfy very few of these requirements and hence are likely to be suboptimal. We therefore identified the flaws in BCI training protocols according to instructional design principles. We also proposed new research directions that are theoretically expected to address some of these flaws and to help users learn the BCI skill more efficiently. This work has been published in the Frontiers in Neuroscience journal <ref xlink:href="#potioc-2013-bid6" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
      <object id="uid56">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/brainArena.jpg" type="float" width="234.8513pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Multiplayer BCI-based gaming and feedback <ref xlink:href="#potioc-2013-bid7" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</caption>
      </object>
      <p>On a related topic, together with colleagues from Inria Rennes (A. Lécuyer and L. Bonnet, Hybrid team) we explored the design and evaluation of multiuser BCI applications, notably to see their impact on user training and performance. We created a multiuser videogame called BrainArena in which two users can play a simple football game by means of two BCIs. They can score goals on the left or right side of the screen by simply imagining left or right hand movements (see Figure <ref xlink:href="#uid56" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>). To add another interesting element, the gamers can play in a collaborative manner (their two mental activities are combined to score in the same goal), or in a competitive manner (the gamers must push the ball in opposite directions). Interestingly enough, our results showed that compared to a single player version of the same BCI-game, collaborative multiplayer BCI-gaming increased the motivation and performance of the most skilled of each player pairs, while leaving the performance of the other players unchanged, hence proving a useful tool to improve BCI training. This work has been published in the IEEE Transactions on Computational Intelligence and AI in Games journal <ref xlink:href="#potioc-2013-bid7" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
    </subsection>
    <subsection id="uid57" level="1">
      <bodyTitle>Inducing, measuring and estimating mental and psychosocial stress from physiological signals</bodyTitle>
      <participants>
        <person key="potioc-2013-idp140285782591472">
          <firstname>Christian</firstname>
          <lastname>Mühl</lastname>
        </person>
        <person key="potioc-2013-idp140285782584368">
          <firstname>Camille</firstname>
          <lastname>Jeunet</lastname>
        </person>
        <person key="siames-2005-id18481">
          <firstname>Fabien</firstname>
          <lastname>Lotte</lastname>
        </person>
      </participants>
      <p>Stress is a major societal issue with negative impacts on health and economy. Physiological computing offers a continuous, direct, and unobtrusive method for stress level assessment and computer-assisted stress management. However, stress is a complex construct and its physiology can vary depending on its source, for example cognitive load or social evaluation. To study the feasibility of physiology-based load-invariant psychosocial stress-detection, we designed a stress-induction protocol able to independently vary the relevant types of psychophysiological activity: mental and psychosocial stress. In <ref xlink:href="#potioc-2013-bid8" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>, <ref xlink:href="#potioc-2013-bid9" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>, we validate the efficacy of our protocol to induce psychosocial and mental stress. Our participants (N=24) had to perform a cognitive task associated with two workload conditions (low/high mental stress), in two contexts (low/high psychosocial stress), during which we recorded subject’s self-reports, behavior, physiology and neurophysiology. Questionnaires showed that the subjectively perceived level of anxiety varied with the psychosocial stress induction, while perceived arousal and mental effort levels vary with mental stress induction. Behavior and physiology corroborated the validity of our protocol further. Heart rate and skin conductance globally increased after psychosocial stress induction relative to the non-stressful condition. Moreover, we demonstrated that higher workload tasks (mental stress) led to decrease in performance and a marked increase of heart rate.</p>
      <object id="uid58">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/stressSetup.png" type="float" width="234.8513pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>The setup of the experiment to induce and measure mental and psychosocial stress.</caption>
      </object>
      <p>Based on this protocol, we also explored the effect of stress on workload estimation. Workload estimation from electroencephalographic signals (EEG) offers a highly sensitive tool to adapt the human-computer interaction to the user state. To create systems that reliably work in the complexity of the real world, a robustness against contextual changes (e.g., mood), has to be achieved. To study the resilience of state-of-the-art EEG-based workload classification against stress, we test the capability of the workload classifier to generalize across affective contexts (stress/non-stress). We show that the classifier is able to transfer between affective contexts, though performance suffers. However, cross-context training is a simple and powerful remedy allowing the extraction of features more resilient to task-unrelated variations in signal characteristics, leading to a performance comparable to within-context training and testing.</p>
    </subsection>
    <subsection id="uid59" level="1">
      <bodyTitle>Exploring electroencephalography as an evaluation method for human-computer interaction</bodyTitle>
      <participants>
        <person key="potioc-2013-idp140285782579760">
          <firstname>Jérémy</firstname>
          <lastname>Frey</lastname>
        </person>
        <person key="PASUSERID">
          <firstname>Léonard</firstname>
          <lastname>Pommereau</lastname>
        </person>
        <person key="siames-2005-id18481">
          <firstname>Fabien</firstname>
          <lastname>Lotte</lastname>
        </person>
        <person key="potioc-2013-idp140285782591472">
          <firstname>Christian</firstname>
          <lastname>Mühl</lastname>
        </person>
        <person key="iparla-2005-id18195">
          <firstname>Martin</firstname>
          <lastname>Hachet</lastname>
        </person>
      </participants>
      <p>Evaluating human-computer interaction is essential as a broadening population uses machines, sometimes in sensitive contexts. However, traditional evaluation methods may fail to combine real-time measures, an objective approach and data contextualization. We presented a review seeking how neuroimaging techniques can respond to such needs. We focused on electroencephalography (EEG), as it could be handled effectively during a dedicated evaluation phase. We identified workload, attention, vigilance, fatigue, error recognition, emotions, engagement, flow and immersion as being recognizable by EEG. We find that workload, attention and emotions assessments would benefit the most from EEG. Moreover, we advocate to study further error recognition through neuroimaging to enhance usability and increase user experience. This review paper was published in the proceeding of the Physiological Computing Systems (PhyCS) conference <ref xlink:href="#potioc-2013-bid10" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>.</p>
      <object id="uid60">
        <table>
          <tr>
            <td>
              <ressource xlink:href="IMG/photo2_xp_stereo.jpg" type="float" width="234.8513pt" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest" media="WEB"/>
            </td>
          </tr>
        </table>
        <caption>Stereoscopic dispaly studied through EEG by varying apparent depth.</caption>
      </object>
      <p>Along this line of research, we also explored whether it was possible to assess the zone of comfort in stereoscopic displays using electroencephalography.
Indeed, the conflict between vergence (eye movement) and accommodation (crystalline lens deformation) occurs with every stereoscopic display. It could cause important stress outside the "zone of comfort", when stereoscopic effect is too strong. This conflict has already been studied using questionnaires, during viewing sessions of several minutes. We built an experimental protocol (see Figure <ref xlink:href="#uid60" location="intern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/>) which compares two different comfort conditions using electroencephalography over short viewing sequences. Analyses showed significant differences both in event-related potentials and in frequency bands power. By extending our protocol it should be possible to study at the same time comfort and depth perception, having a better understanding of stereoscopy.</p>
    </subsection>
  </resultats>
  <partenariat id="uid61">
    <bodyTitle>Partnerships and Cooperations</bodyTitle>
    <subsection id="uid62" level="1">
      <bodyTitle>Regional Initiatives</bodyTitle>
      <p>Potioc has strong relationships with <ref xlink:href="http://www.cap-sciences.net" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">Cap Sciences</ref>.
</p>
    </subsection>
    <subsection id="uid63" level="1">
      <bodyTitle>National Initiatives</bodyTitle>
      <p><b>FUI SIMCA 2000</b>:</p>
      <simplelist>
        <li id="uid64">
          <p noindent="true">duration: 2011-2013</p>
        </li>
        <li id="uid65">
          <p noindent="true">partners: Oktal, ENAC (Ecole Nationale de l'Aviation Civile), Toulouse-Blagnac airport, Air France, CGx AERO in SYS</p>
        </li>
        <li id="uid66">
          <p noindent="true">website: <ref xlink:href="https://team.inria.fr/potioc/fr/collaborative-projects/simca/" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">https://<allowbreak/>team.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>potioc/<allowbreak/>fr/<allowbreak/>collaborative-projects/<allowbreak/>simca/</ref></p>
        </li>
      </simplelist>
      <p> </p>
      <p noindent="true"><b>PIA ville numérique "Villes transparentes"</b>:</p>
      <simplelist>
        <li id="uid67">
          <p noindent="true">duration: 2012-2014</p>
        </li>
        <li id="uid68">
          <p noindent="true">partners: Pages Jaunes/Mappy, Vectuel/Virtuelcity</p>
        </li>
      </simplelist>
      <p> </p>
      <p noindent="true"><b>Inria ADT OpenViBE-NT</b>:</p>
      <simplelist>
        <li id="uid69">
          <p noindent="true">duration: 2012-2014</p>
        </li>
        <li id="uid70">
          <p noindent="true">partners: Inria teams Hybrid, Neurosys and Athena</p>
        </li>
        <li id="uid71">
          <p noindent="true">website: <ref xlink:href="http://openvibe.inria.fr" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>openvibe.<allowbreak/>inria.<allowbreak/>fr</ref></p>
        </li>
      </simplelist>
      <p><b>Inria Project Lab BCI-LIFT</b>:</p>
      <simplelist>
        <li id="uid72">
          <p noindent="true">partners: Inria team Athena (Inria Sophia-Antipolis), Inria team Hybrid (Inria Rennes), Inria team Neurosys (Inria Nancy), LITIS (Université de Rouen), Inria team DEMAR (Inria Sophia-Antipolis), Inria team MINT (Inria Lille), DyCOG (INSERM Lyon)</p>
        </li>
        <li id="uid73">
          <p noindent="true">Project around BCI in the evaluation process, with collaboration just starting (first meeting with all the partners in October 2013)</p>
        </li>
      </simplelist>
    </subsection>
    <subsection id="uid74" level="1">
      <bodyTitle>European Initiatives</bodyTitle>
      <subsection id="uid75" level="2">
        <bodyTitle>Collaborations with Major European Organizations</bodyTitle>
        <p>
          <b>Collaboration with the University of Bristol, BIG (UK):</b>
        </p>
        <simplelist>
          <li id="uid76">
            <p noindent="true">University of Bristol, Bristol Interaction and Graphics (BIG) group, UK (Head: Pr. Sriram Subramanian)</p>
          </li>
          <li id="uid77">
            <p noindent="true">We have strong relationships with Sriram Subramanian. This has led to joint paper publications, numerous visits and a co-supervision of a PhD thesis (Camille Jeunet)</p>
          </li>
        </simplelist>
        <p>
          <b>Bordeaux Idex project "Conception de Système d'interfaces cerveau-ordinateur prenant en compte les facteurs humains afin d'optimiser l'apprentissage de l'utilisateur:</b>
        </p>
        <simplelist>
          <li id="uid78">
            <p noindent="true">Bordeaux Idex funding for international PhD project</p>
          </li>
          <li id="uid79">
            <p noindent="true">partners: Bordeaux Segalen University (Handicap &amp; Système nerveux team), Bristol University (BIG team)</p>
          </li>
          <li id="uid80">
            <p noindent="true">duration: October 2013 - September 2016</p>
          </li>
        </simplelist>
        <p>
          <b>LIRA Stress and Relaxation project:</b>
        </p>
        <simplelist>
          <li id="uid81">
            <p noindent="true">Program: Inria - Philips - Fraunhofer partnership</p>
          </li>
          <li id="uid82">
            <p noindent="true">Project title: LIfe-style Research Association, Lifestyle Management: Stress and Relaxation</p>
          </li>
          <li id="uid83">
            <p noindent="true">Coordinator: Frederic Alexandre</p>
          </li>
          <li id="uid84">
            <p noindent="true">Other partners: Philips (Netherlands), Fraunhofer (Germany), Inria teams Hybrid and Mimetic</p>
          </li>
          <li id="uid85">
            <p noindent="true">Abstract: The Stress and Relaxation project aims at offering services to a user, at home or at work, to help this user evaluate and control his level of stress</p>
          </li>
        </simplelist>
      </subsection>
    </subsection>
    <subsection id="uid86" level="1">
      <bodyTitle>International Initiatives</bodyTitle>
      <subsection id="uid87" level="2">
        <bodyTitle>Inria International Partners</bodyTitle>
        <subsection id="uid88" level="3">
          <bodyTitle>Informal International Partners</bodyTitle>
          <simplelist>
            <li id="uid89">
              <p noindent="true">Pr. Gerwin Schalk (Schalk Lab, Wadsworth center, NY, USA), Pr. Jonathan Brumberg (Kansas University, USA), Dr. Cuntai Guan (I2R, Singapore).</p>
            </li>
            <li id="uid90">
              <p noindent="true">Collaboration in the context of a project around the analysis of ElectroCorticoGraphic (ECoG) brain signals in order to decode speech related information from them.</p>
            </li>
            <li id="uid91">
              <p noindent="true">Multidisciplinary project involving experts from ECoG signals (Gerwin Schalk), speech neuroscience (Jonathan Brumberg) and speech recognition (Cuntai Guan)</p>
            </li>
          </simplelist>
        </subsection>
      </subsection>
    </subsection>
    <subsection id="uid92" level="1">
      <bodyTitle>International Research Visitors</bodyTitle>
      <subsection id="uid93" level="2">
        <bodyTitle>Internships</bodyTitle>
        <p>This year, the Potioc team has hosted three PhD students</p>
        <simplelist>
          <li id="uid94">
            <p noindent="true">Nicoletta Caramia, University de Pavia, Italy (Avril-July 2013)</p>
          </li>
          <li id="uid95">
            <p noindent="true">Asier Marzo, Universidad Pública de Navarra, Spain (August-November 2013)</p>
          </li>
          <li id="uid96">
            <p noindent="true">Flavio Bertini, University of Bologna, Italy (December 2013-February 2014)</p>
          </li>
        </simplelist>
      </subsection>
    </subsection>
  </partenariat>
  <diffusion id="uid97">
    <bodyTitle>Dissemination</bodyTitle>
    <subsection id="uid98" level="1">
      <bodyTitle>Scientific Animation</bodyTitle>
      <simplelist>
        <li id="uid99">
          <p noindent="true">Editoring : ACM Journal of Computer and Cultural Heritage - Special issue on "Interacting with the Past" (M. Hachet)</p>
        </li>
        <li id="uid100">
          <p noindent="true">Conference organization (all Potioc team) and general chairing (M. Hachet): IHM 2013</p>
        </li>
        <li id="uid101">
          <p noindent="true">Workshop organization: "affective Brain-Computer Interface (aBCI) workshop" at ACII, 2013 (C. Mühl)</p>
        </li>
        <li id="uid102">
          <p noindent="true">Workshop organization: "Brain-Computer Interface workshop in Bordeaux", co-organized with the g.tec R&amp;D company, March 2013 (F. Lotte)</p>
        </li>
        <li id="uid103">
          <p noindent="true">Workshop organization: "Teachning the BCI skill: feedback and human training approaches", International BCI meeting 2013, Asilomar, CA, USA, co-organized with R. Scherer (TU Graz) and A. Lécuyer (Inria Rennes), June 2013 (F. Lotte)</p>
        </li>
        <li id="uid104">
          <p noindent="true">Steering committee : IEEE 3DUI (M. Hachet)</p>
        </li>
        <li id="uid105">
          <p noindent="true">Program committees : IEE VR 2013 (M. Hachet), ISIS3D (M. Hachet), Web3D 2013 (J. Jankowski), aBCI workshop @ ACII 2013 (C. Mühl, F. Lotte), ACII 2013 (F. Lotte), PRNI 2013 (F. Lotte), IEEE SSCI-CCMB 2013 (F. Lotte)</p>
        </li>
        <li id="uid106">
          <p noindent="true">Reviewing for conferences: ACM CHI 2013 (M. Hachet, F. Lotte), ACM CHI 2014 (M. Hachet, F. Lotte, J. Jankowski, C. Mühl, J. Laviole), ACM UIST 2013 (M. Hachet, J. Laviole), INTERACT 2013 (J. Jankowski, F. Lotte), Web3D 2013 (J. Jankowski), SUI 2013 (J. Jankowski), MobileHCI 2013 (J. Jankowski), CSCW 2014 (J. Jankowski), 3DUI 2014 (J. Jankowski, J. Laviole), ISMAR 2013 (J. Laviole), ITS 2013 (R. Gervais), PG 2013 (R. Gervais), ACII 2013 (F. Lotte), aBCI Workshop @ ACII 2013 (J. Frey, C. Mühl, F. Lotte), IHM 2013 (F. Lotte), PRNI 2013 (F. Lotte), IEEE SSCI-CCMB 2013 (F. Lotte)</p>
        </li>
        <li id="uid107">
          <p noindent="true">Reviewing for journals: ACM Journal on Computing and Cultural Heritage (J. Jankowski, R. Gervais, J. Frey), GeoInformatica (J. Jankowski), Technique et Science Informatique (TSI) (R. Gervais), IEEE Transactions on Affective Computing (C. Mühl), Brain-Computer Interfaces (F. Lotte), Frontiers in Neuroprosthetics (F. Lotte), IEEE Transactions on Biomedical Engineering (F. Lotte), IEEE Transactions on Human Machine Systems (F. Lotte), International Journal of Neural Systems (F. Lotte), Journal of Neural Engineering (F. Lotte), NeuroImage (F. Lotte), Presence (F. Lotte), IEEE Reviews in Biomedical Engineering (F. Lotte)</p>
        </li>
        <li id="uid108">
          <p noindent="true">Reviewing for book chapters: Handbook of Affective Computing @Oxford (C. Mühl)</p>
        </li>
        <li id="uid109">
          <p noindent="true">ANR : Revue de programme CONTINT 2013 (M. Hachet)</p>
        </li>
      </simplelist>
    </subsection>
    <subsection id="uid110" level="1">
      <bodyTitle>Teaching - Supervision - Juries</bodyTitle>
      <subsection id="uid111" level="2">
        <bodyTitle>Teaching</bodyTitle>
        <p> </p>
        <p noindent="true">Teaching at University of Bordeaux I:</p>
        <simplelist>
          <li id="uid112">
            <p noindent="true">Master: Jérémy Laviole, Réalité Virtuelle immersive, Projet Étude et Développement, Master 2, University of Bordeaux I, France</p>
          </li>
          <li id="uid113">
            <p noindent="true">Bachelor: Jérémy Frey, supervision of 3 programming projects during "semestre rebondir", 30h, 1st year, University of Bordeaux 1, France</p>
          </li>
          <li id="uid114">
            <p noindent="true">Bachelor: Jérémy Frey, réseau et projet de programmation 2, 35h, 3rd year, University of Bordeaux 1, France</p>
          </li>
        </simplelist>
        <p> </p>
        <p noindent="true">Teaching at University of Bordeaux II:</p>
        <simplelist>
          <li id="uid115">
            <p noindent="true">Master: Martin Hachet, Réalité Virtuelle, 24h eqtd, M2 Sciences Cognitives, Université Bordeaux Segalen, France</p>
          </li>
          <li id="uid116">
            <p noindent="true">Master: Fabien Lotte, Réalité Virtuelle, 12h eqtd, M2 Sciences Cognitives, Université Bordeaux Segalen, France</p>
          </li>
          <li id="uid117">
            <p noindent="true">Bachelor: Camille Jeunet, Knowledge and Representations, 18h, 3rd Year, Cognitive Sciences, Université Bordeaux Segalen, France</p>
          </li>
        </simplelist>
        <p> </p>
        <p noindent="true">Teaching at Enseirb-Matmeca:</p>
        <simplelist>
          <li id="uid118">
            <p noindent="true">Master: Martin Hachet, Réalité Virtuelle, 12h eqtd, 3rd Year, Enseirb-Matmeca, IPB, France</p>
          </li>
          <li id="uid119">
            <p noindent="true">Master: Fabien Lotte, Réalité Virtuelle, 2h eqtd, 3rd Year, Enseirb-Matmeca, IPB, France</p>
          </li>
          <li id="uid120">
            <p noindent="true">Master: Jérémy Laviole, Virtual Reality, 9h, 3rd Year, Enseirb-Matmeca, France</p>
          </li>
          <li id="uid121">
            <p noindent="true">Master: Jérémy Laviole, Object Tracking Project, 8h, 3rd Year, Enseirb-Matmeca, France</p>
          </li>
          <li id="uid122">
            <p noindent="true">Bachelor: Renaud Gervais, Medias numeriques, 4h, Enseirb-Matmeca, France</p>
          </li>
        </simplelist>
        <p> </p>
        <p noindent="true">Other teaching activities:</p>
        <simplelist>
          <li id="uid123">
            <p noindent="true">Master: Jérémy Laviole, Augmented Reality Course + TP, 16h, 3rd Year, ENJMIN, France</p>
          </li>
        </simplelist>
      </subsection>
      <subsection id="uid124" level="2">
        <bodyTitle>Supervision</bodyTitle>
        <simplelist>
          <li id="uid125">
            <p noindent="true">PhD: Jérémy Laviole, "Interaction en Réalité Augmentée Spatiale pour le Dessin Physique", Université Bordeaux 1, Defended December 5th, 2013, Martin Hachet</p>
          </li>
          <li id="uid126">
            <p noindent="true">PhD in progress: Renaud Gervais, "Organic User Interfaces", started December 1st, 2012, Martin Hachet</p>
          </li>
          <li id="uid127">
            <p noindent="true">PhD in progress: Jérémy Frey, "Assesing 3DUI with passive Brain-Computer Interfaces", started October 1st, 2012, Fabien Lotte and Martin Hachet</p>
          </li>
          <li id="uid128">
            <p noindent="true">PhD in progress: Camille Jeunet, "Human Learning for Brain-Computer Interfaces", started October 1st, 2013, Fabien Lotte, Martin Hachet, Bernard N'kaoua and Sriram Subramanian.</p>
          </li>
          <li id="uid129">
            <p noindent="true">Master 2: Camille Jeunet, Master 2 from University of Bordeaux 2, affective BCI, February-June 2013 (C. Mühl, F. Lotte)</p>
          </li>
          <li id="uid130">
            <p noindent="true">Engineer (Master 2): Pierre-Marie Plans, Enseirb-Matmeca 3rd year, Collaborative virtual reality, February-June 2013 (M. Hachet)</p>
          </li>
          <li id="uid131">
            <p noindent="true">Master 1: Leonard Pommereau, Master 1 from University of Bordeaux 2, EEG and stereoscopy, April-June 2013 (F. Lotte, M. Hachet)</p>
          </li>
        </simplelist>
      </subsection>
      <subsection id="uid132" level="2">
        <bodyTitle>Juries</bodyTitle>
        <simplelist>
          <li id="uid133">
            <p noindent="true">PhD Jury: Prashant Arvind-Pala, Université Bordeaux Segalen, 18 Dec. 2013 (M. Hachet)</p>
          </li>
          <li id="uid134">
            <p noindent="true">PhD Jury: Javier Mauricio Antelis, University of Zaragoza, Spain, June 2013 (F. Lotte)</p>
          </li>
          <li id="uid135">
            <p noindent="true">PhD Jury: Mohit Kumar Goel, Ecole Polytechnique Fédérale de Lausanne (EPFL), Switzerland, June 2013 (F. Lotte)</p>
          </li>
          <li id="uid136">
            <p noindent="true">PhD Jury for half-PhD (after 1.5 year of PhD work): Raphaelle N. Roy, CEA-Leti/Gipsa-Lab, Grenoble, France (F. Lotte)</p>
          </li>
          <li id="uid137">
            <p noindent="true">External PhD Review: Adam Westerski - Universidad Politecnica de Madrid (J. Jankowski)</p>
          </li>
        </simplelist>
      </subsection>
    </subsection>
    <subsection id="uid138" level="1">
      <bodyTitle>Popularization</bodyTitle>
      <p> </p>
      <p noindent="true">The members of Potioc have published several popularization articles targeted to the general public:</p>
      <simplelist>
        <li id="uid139">
          <p noindent="true">"Quand la réalité virtuelle rencontre les surfaces tactiles" (<ref xlink:href="http://interstices.info/toucheo" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>interstices.<allowbreak/>info/<allowbreak/>toucheo</ref>), Interstices <ref xlink:href="#potioc-2013-bid11" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> (M. Hachet)</p>
        </li>
        <li id="uid140">
          <p noindent="true">"Arrêtez de regarder la 3D, prenez-en le contrôle !" (<ref xlink:href="http://www.inriality.fr/informatique/interaction-homme-machine/3d/arretez-de-regarder-la/" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>www.<allowbreak/>inriality.<allowbreak/>fr/<allowbreak/>informatique/<allowbreak/>interaction-homme-machine/<allowbreak/>3d/<allowbreak/>arretez-de-regarder-la/</ref>), Inriality <ref xlink:href="#potioc-2013-bid12" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> (M. Hachet)</p>
        </li>
        <li id="uid141">
          <p noindent="true">"Grâce au numérique, on peut lire dans les pensées" (<ref xlink:href="https://interstices.info/jcms/nn_73094/grace-au-numerique-on-peut-lire-dans-les-pensees" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">https://<allowbreak/>interstices.<allowbreak/>info/<allowbreak/>jcms/<allowbreak/>nn_73094/<allowbreak/>grace-au-numerique-on-peut-lire-dans-les-pensees</ref>), Interstices : Idée reçue <ref xlink:href="#potioc-2013-bid13" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> (F. Lotte)</p>
        </li>
        <li id="uid142">
          <p noindent="true">" Interaction Hommes-Machines : une histoire de 30 ans" (<ref xlink:href="http://www.inriality.fr/informatique/interface-homme-machine/inria/interaction-hommes-machines-une/" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>www.<allowbreak/>inriality.<allowbreak/>fr/<allowbreak/>informatique/<allowbreak/>interface-homme-machine/<allowbreak/>inria/<allowbreak/>interaction-hommes-machines-une/</ref>), Inriality <ref xlink:href="#potioc-2013-bid14" location="biblio" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest"/> (M. Hachet)</p>
        </li>
      </simplelist>
      <p> </p>
      <p noindent="true">They gave the following popularization talks:</p>
      <simplelist>
        <li id="uid143">
          <p noindent="true">"L'activité cérébrale pilote directement l'ordinateur : présentation de l'interface cerveau-ordinateur (Brain-Computer interface)", conference "Demain les objets sont connectés !", event "Semaine Digitale 2013" in Bordeaux (J. Frey)</p>
        </li>
        <li id="uid144">
          <p noindent="true">Talk introducing BCI and follow-up debate around augmented human, conference "L'homme « augmenté » : notre avenir est-il « cyborg » ?", event "Nancy Renaissance 2013" in Nancy (J. Frey)</p>
        </li>
        <li id="uid145">
          <p noindent="true">"Utiliser son cerveau pour contrôler une machine", Cité des sciences, Paris (F. Lotte)</p>
        </li>
        <li id="uid146">
          <p noindent="true">"Le cerveau au commandes", conference as part of the "Cervorama" exhibition, Cap-Sciences, Bordeaux (F. Lotte)</p>
        </li>
        <li id="uid147">
          <p noindent="true">"Interfaces Cerveau-Ordinateur et Homme Augmenté", conférence "Neurotechnologies: vers un homme augmenté?", Bordeaux (F. Lotte)</p>
        </li>
      </simplelist>
      <p> </p>
      <p noindent="true">They also demonstrated their work to the general public during various events:</p>
      <simplelist>
        <li id="uid148">
          <p noindent="true">Portes ouvertes Université Bordeaux1, demonstrations de PapARt (J. Laviole)</p>
        </li>
        <li id="uid149">
          <p noindent="true">Cap Sciences - expo permanente, PapARt - development (J. Laviole)</p>
        </li>
        <li id="uid150">
          <p noindent="true">Cap Sciences - Évènement IHM 2013, demonstration de PapARt (J. Laviole)</p>
        </li>
        <li id="uid151">
          <p noindent="true">BCI demo for highschool students following "informatique et sciences du numérique" courses (J. Frey, A. Cellard, F. Lotte)</p>
        </li>
        <li id="uid152">
          <p noindent="true">BCI demo for the general public during "La nuit des chercheurs" in Talence (F. Lotte, A. Cellard)</p>
        </li>
      </simplelist>
      <p> </p>
      <p noindent="true">Finally, they also participated to the following events</p>
      <simplelist>
        <li id="uid153">
          <p noindent="true">Aquitec 2013, for promoting research jobs (J. Frey)</p>
        </li>
        <li id="uid154">
          <p noindent="true">Film-debate, "ExistenZ : faut-il avoir peur de la réalité virtuelle ?", University of Bordeaux 1 (J. Frey)</p>
        </li>
        <li id="uid155">
          <p noindent="true">Interview for a highschool class for their video project on science (A. Cellard)</p>
        </li>
        <li id="uid156">
          <p noindent="true">Participation to “visages des sciences 2013” (postcard and video interview) (A. Cellard)</p>
        </li>
        <li id="uid157">
          <p noindent="true">Interview (video and paper) for the SoNews online journal (local Inria Bordeaux newspaper) (A. Cellard)</p>
        </li>
      </simplelist>
    </subsection>
  </diffusion>
  <biblio id="bibliography" html="bibliography" numero="10" titre="Bibliography">
    
    <biblStruct id="potioc-2013-bid28" type="article" rend="refer" n="refercite:cohe:hal-00737900">
      <identifiant type="doi" value="10.1016/j.cag.2012.09.004"/>
      <identifiant type="hal" value="hal-00737900"/>
      <analytic>
        <title level="a">Beyond the mouse: Understanding user gestures for manipulating 3D objects from touchscreen inputs</title>
        <author>
          <persName key="iparla-2009-id59898">
            <foreName>Aurélie</foreName>
            <surname>Cohé</surname>
            <initial>A.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes">
        <title level="j">Computers and Graphics</title>
        <imprint>
          <dateStruct>
            <month>December</month>
            <year>2012</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00737900" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00737900</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid26" type="article" rend="refer" n="refercite:hachet:hal-00789500">
      <identifiant type="doi" value="10.1109/MCG.2013.34"/>
      <identifiant type="hal" value="hal-00789500"/>
      <analytic>
        <title level="a">Touch-Based Interfaces for Interacting with 3D Content in Public Exhibitions</title>
        <author>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
          <persName>
            <foreName>Jean-Baptiste</foreName>
            <surname>De La Rivière</surname>
            <initial>J.-B.</initial>
          </persName>
          <persName key="iparla-2010-id59958">
            <foreName>Jérémy</foreName>
            <surname>Laviole</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2009-id59898">
            <foreName>Aurélie</foreName>
            <surname>Cohé</surname>
            <initial>A.</initial>
          </persName>
          <persName>
            <foreName>Sebastien</foreName>
            <surname>Cursan</surname>
            <initial>S.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes">
        <title level="j">IEEE Computer Graphics and Applications</title>
        <imprint>
          <biblScope type="volume">33</biblScope>
          <biblScope type="number">2</biblScope>
          <dateStruct>
            <month>March</month>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">80-85</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00789500" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00789500</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid27" type="article" rend="refer" n="refercite:lotte:hal-00862716">
      <identifiant type="doi" value="10.3389/fnhum.2013.00568"/>
      <identifiant type="hal" value="hal-00862716"/>
      <analytic>
        <title level="a">Flaws in current human training protocols for spontaneous Brain-Computer Interfaces: lessons learned from instructional design</title>
        <author>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName key="potioc-2012-idp140283983215552">
            <foreName>Florian</foreName>
            <surname>Larrue</surname>
            <initial>F.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes">
        <title level="j">Frontiers in Human Neurosciences</title>
        <imprint>
          <biblScope type="volume">7</biblScope>
          <biblScope type="number">568</biblScope>
          <dateStruct>
            <month>September</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00862716" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00862716</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid23" type="book" rend="year" n="cite:coyle:hal-00843602">
      <identifiant type="hal" value="hal-00843602"/>
      <monogr x-international-audience="yes">
        <title level="m">Guest Editorial: Brain/Neuronal-Computer Game Interfaces and Interaction</title>
        <editor role="editor">
          <persName>
            <foreName>Damien</foreName>
            <surname>Coyle</surname>
            <initial>D.</initial>
          </persName>
          <persName>
            <foreName>Jose</foreName>
            <surname>Principe</surname>
            <initial>J.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName>
            <foreName>Anton</foreName>
            <surname>Nijholt</surname>
            <initial>A.</initial>
          </persName>
        </editor>
        <imprint>
          <publisher>
            <orgName>IEEE</orgName>
          </publisher>
          <dateStruct>
            <month>June</month>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">77-81</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00843602" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00843602</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid3" type="phdthesis" rend="year" n="cite:laviole:tel-00935602">
      <identifiant type="hal" value="tel-00935602"/>
      <monogr>
        <title level="m">Interaction en Réalité Augmentée Spatiale pour le Dessin Physique</title>
        <author>
          <persName key="iparla-2010-id59958">
            <foreName>Jérémy</foreName>
            <surname>Laviole</surname>
            <initial>J.</initial>
          </persName>
        </author>
        <imprint>
          <publisher>
            <orgName type="school">Université Sciences et Technologies - Bordeaux I</orgName>
          </publisher>
          <dateStruct>
            <month>December</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/tel-00935602" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>tel-00935602</ref>
        </imprint>
      </monogr>
      <note type="typdoc">Ph. D. Thesis</note>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid7" type="article" rend="year" n="cite:bonnet:hal-00784886">
      <identifiant type="hal" value="hal-00784886"/>
      <analytic>
        <title level="a">Two Brains, One Game: Design and Evaluation of a Multi-User BCI Video Game Based on Motor Imagery</title>
        <author>
          <persName key="bunraku-2009-id59899">
            <foreName>Laurent</foreName>
            <surname>Bonnet</surname>
            <initial>L.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName key="siames-2005-id18146">
            <foreName>Anatole</foreName>
            <surname>Lécuyer</surname>
            <initial>A.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes" id="rid00802">
        <idno type="issn">1943-068X</idno>
        <title level="j">IEEE Transactions on Computational Intelligence and AI in games</title>
        <imprint>
          <biblScope type="volume">5</biblScope>
          <biblScope type="number">2</biblScope>
          <dateStruct>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">185-198</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00784886" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00784886</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid15" type="article" rend="year" n="cite:dearaujo:hal-00795343">
      <identifiant type="doi" value="10.1016/j.cag.2012.12.005"/>
      <identifiant type="hal" value="hal-00795343"/>
      <analytic>
        <title level="a">Mockup Builder: 3D Modeling On and Above the Surface</title>
        <author>
          <persName key="mint-2012-idp140410726763152">
            <foreName>Bruno</foreName>
            <surname>De Araujo</surname>
            <initial>B.</initial>
          </persName>
          <persName key="alcove-2005-id18121">
            <foreName>Géry</foreName>
            <surname>Casiez</surname>
            <initial>G.</initial>
          </persName>
          <persName>
            <foreName>Joaquim</foreName>
            <surname>Jorge</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes" id="rid00455">
        <idno type="issn">0097-8493</idno>
        <title level="j">computer &amp; graphics</title>
        <imprint>
          <biblScope type="volume">37</biblScope>
          <biblScope type="number">3</biblScope>
          <dateStruct>
            <month>January</month>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">165-178</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00795343" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00795343</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid12" type="article" rend="year" n="cite:hachet:hal-00824873">
      <identifiant type="hal" value="hal-00824873"/>
      <analytic>
        <title level="a">Arrêtez de regarder la 3D, prenez-en le contrôle !</title>
        <author>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
          <persName>
            <foreName>Christophe</foreName>
            <surname>Castro</surname>
            <initial>C.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-scientific-popularization="yes" x-editorial-board="no" x-international-audience="no">
        <title level="j">Inriality</title>
        <imprint>
          <dateStruct>
            <month>May</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00824873" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00824873</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid4" type="article" rend="year" n="cite:hachet:hal-00789500">
      <identifiant type="doi" value="10.1109/MCG.2013.34"/>
      <identifiant type="hal" value="hal-00789500"/>
      <analytic>
        <title level="a">Touch-Based Interfaces for Interacting with 3D Content in Public Exhibitions</title>
        <author>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
          <persName>
            <foreName>Jean-Baptiste</foreName>
            <surname>De La Rivière</surname>
            <initial>J.-B.</initial>
          </persName>
          <persName key="iparla-2010-id59958">
            <foreName>Jérémy</foreName>
            <surname>Laviole</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2009-id59898">
            <foreName>Aurélie</foreName>
            <surname>Cohé</surname>
            <initial>A.</initial>
          </persName>
          <persName>
            <foreName>Sebastien</foreName>
            <surname>Cursan</surname>
            <initial>S.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes" id="rid00761">
        <idno type="issn">0272-1716</idno>
        <title level="j">IEEE Computer Graphics and Applications</title>
        <imprint>
          <biblScope type="volume">33</biblScope>
          <biblScope type="number">2</biblScope>
          <dateStruct>
            <month>March</month>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">80-85</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00789500" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00789500</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid11" type="article" rend="year" n="cite:hachet:hal-00804154">
      <identifiant type="hal" value="hal-00804154"/>
      <analytic>
        <title level="a">Quand la réalité virtuelle rencontre les surfaces tactiles</title>
        <author>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-scientific-popularization="yes" x-editorial-board="no" x-international-audience="no" id="rid01098">
        <title level="j">Interstices</title>
        <imprint>
          <dateStruct>
            <month>March</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00804154" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00804154</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid14" type="article" rend="year" n="cite:hachet:hal-00905520">
      <identifiant type="hal" value="hal-00905520"/>
      <analytic>
        <title level="a">Interaction Hommes-Machines : une histoire de 30 ans</title>
        <author>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
          <persName>
            <foreName>Bellet</foreName>
            <surname>Martin</surname>
            <initial>B.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-scientific-popularization="yes" x-editorial-board="no" x-international-audience="no">
        <title level="j">Inriality</title>
        <imprint>
          <dateStruct>
            <month>November</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00905520" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00905520</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid16" type="article" rend="year" n="cite:jankowski:hal-00822676">
      <identifiant type="doi" value="10.1016/j.ijhcs.2013.04.006"/>
      <identifiant type="hal" value="hal-00822676"/>
      <analytic>
        <title level="a">On the Design of a Dual-Mode User Interface for Accessing 3D Content on the World Wide Web</title>
        <author>
          <persName key="potioc-2012-idp140283983218240">
            <foreName>Jacek</foreName>
            <surname>Jankowski</surname>
            <initial>J.</initial>
          </persName>
          <persName>
            <foreName>Stefan</foreName>
            <surname>Decker</surname>
            <initial>S.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes" id="rid01004">
        <idno type="issn">1071-5819</idno>
        <title level="j">International Journal of Human-Computer Studies</title>
        <imprint>
          <dateStruct>
            <month>May</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00822676" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00822676</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid22" type="incollection" rend="year" n="cite:lotte:hal-00735932">
      <identifiant type="hal" value="hal-00735932"/>
      <analytic>
        <title level="a">Combining BCI with Virtual Reality: Towards New Applications and Improved BCI</title>
        <author>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName>
            <foreName>Josef</foreName>
            <surname>Faller</surname>
            <initial>J.</initial>
          </persName>
          <persName>
            <foreName>Christoph</foreName>
            <surname>Guger</surname>
            <initial>C.</initial>
          </persName>
          <persName key="bunraku-2006-id18591">
            <foreName>Yann</foreName>
            <surname>Renard</surname>
            <initial>Y.</initial>
          </persName>
          <persName>
            <foreName>Gert</foreName>
            <surname>Pfurtscheller</surname>
            <initial>G.</initial>
          </persName>
          <persName key="siames-2005-id18146">
            <foreName>Anatole</foreName>
            <surname>Lécuyer</surname>
            <initial>A.</initial>
          </persName>
          <persName>
            <foreName>Robert</foreName>
            <surname>Leeb</surname>
            <initial>R.</initial>
          </persName>
        </author>
      </analytic>
      <monogr>
        <editor role="editor">
          <persName>
            <foreName>Brendan Z.</foreName>
            <surname>Allison</surname>
            <initial>B. Z.</initial>
          </persName>
          <persName>
            <foreName>Stephen</foreName>
            <surname>Dunne</surname>
            <initial>S.</initial>
          </persName>
          <persName>
            <foreName>Robert</foreName>
            <surname>Leeb</surname>
            <initial>R.</initial>
          </persName>
          <persName>
            <foreName>José Del R.</foreName>
            <surname>Millán</surname>
            <initial>J. D. R.</initial>
          </persName>
          <persName>
            <foreName>Anton</foreName>
            <surname>Nijholt</surname>
            <initial>A.</initial>
          </persName>
        </editor>
        <title level="m">Towards Practical Brain-Computer Interfaces:</title>
        <imprint>
          <publisher>
            <orgName>Springer</orgName>
          </publisher>
          <dateStruct>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00735932" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00735932</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid6" type="article" rend="year" n="cite:lotte:hal-00862716">
      <identifiant type="doi" value="10.3389/fnhum.2013.00568"/>
      <identifiant type="hal" value="hal-00862716"/>
      <analytic>
        <title level="a">Flaws in current human training protocols for spontaneous Brain-Computer Interfaces: lessons learned from instructional design</title>
        <author>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName key="potioc-2012-idp140283983215552">
            <foreName>Florian</foreName>
            <surname>Larrue</surname>
            <initial>F.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-editorial-board="yes" x-international-audience="yes" id="rid0246211111112">
        <idno type="issn">1662-5161</idno>
        <title level="j">Frontiers in Human Neurosciences</title>
        <imprint>
          <biblScope type="volume">7</biblScope>
          <biblScope type="number">568</biblScope>
          <dateStruct>
            <month>September</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00862716" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00862716</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid13" type="article" rend="year" n="cite:lotte:hal-00863574">
      <identifiant type="hal" value="hal-00863574"/>
      <analytic>
        <title level="a">Grâce au numérique, on peut lire dans les pensées</title>
        <author>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-scientific-popularization="yes" x-editorial-board="yes" x-international-audience="no" id="rid01098">
        <title level="j">Interstices : Idée reçue</title>
        <imprint>
          <dateStruct>
            <month>September</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00863574" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00863574</ref>
        </imprint>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid5" type="inproceedings" rend="year" n="cite:berthaut:hal-00807049">
      <identifiant type="hal" value="hal-00807049"/>
      <analytic>
        <title level="a">Rouages: Revealing the Mechanisms of Digital Musical Instruments to the Audience</title>
        <author>
          <persName key="potioc-2012-idp140283983231680">
            <foreName>Florent</foreName>
            <surname>Berthaut</surname>
            <initial>F.</initial>
          </persName>
          <persName>
            <foreName>Mark</foreName>
            <surname>Marshall</surname>
            <initial>M.</initial>
          </persName>
          <persName>
            <foreName>Sriram</foreName>
            <surname>Subramanian</surname>
            <initial>S.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">New Interfaces for Musical Expression</title>
        <loc>Daejeon, Korea, Republic Of</loc>
        <imprint>
          <dateStruct>
            <month>May</month>
            <year>2013</year>
          </dateStruct>
          <biblScope type="pages">6</biblScope>
          <ref xlink:href="http://hal.inria.fr/hal-00807049" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00807049</ref>
        </imprint>
        <meeting id="cid293584">
          <title>International Conference on New Interfaces for Musical Expression</title>
          <num>2013</num>
          <abbr type="sigle">NIME</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid20" type="inproceedings" rend="year" n="cite:cutellic:hal-00843050">
      <identifiant type="hal" value="hal-00843050"/>
      <analytic>
        <title level="a">Augmented Iterations: Integrating Neural Activity in Evolutionary Computation for Design</title>
        <author>
          <persName>
            <foreName>Pierre</foreName>
            <surname>Cutellic</surname>
            <initial>P.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">eCAADe 2013</title>
        <loc>Delft, Netherlands</loc>
        <imprint>
          <dateStruct>
            <month>September</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00843050" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00843050</ref>
        </imprint>
        <meeting id="cid624240">
          <title>Education and Research in Computer Aided Architectural Design in Europe Conference</title>
          <num>2013</num>
          <abbr type="sigle">eCAADe</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid10" type="inproceedings" rend="year" n="cite:frey:hal-00881756">
      <identifiant type="hal" value="hal-00881756"/>
      <analytic>
        <title level="a">Review of the Use of Electroencephalography as an Evaluation Method for Human-Computer Interaction</title>
        <author>
          <persName>
            <foreName>Jérémy</foreName>
            <surname>Frey</surname>
            <initial>J.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">PhyCS 2014 - International Conference on Physiological Computing Systems</title>
        <loc>Lisbonne, Portugal</loc>
        <imprint>
          <publisher>
            <orgName>SCITEPRESS</orgName>
          </publisher>
          <dateStruct>
            <month>January</month>
            <year>2014</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00881756" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00881756</ref>
        </imprint>
        <meeting id="cid624239">
          <title>International Conference on Physiological Computing Systems</title>
          <num>2014</num>
          <abbr type="sigle">PhyCS</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid0" type="inproceedings" rend="year" n="cite:jankowski:hal-00789413">
      <identifiant type="hal" value="hal-00789413"/>
      <analytic>
        <title level="a">A Survey of Interaction Techniques for Interactive 3D Environments</title>
        <author>
          <persName key="potioc-2012-idp140283983218240">
            <foreName>Jacek</foreName>
            <surname>Jankowski</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">Eurographics 2013 - STAR</title>
        <loc>Girona, Spain</loc>
        <imprint>
          <dateStruct>
            <month>May</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00789413" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00789413</ref>
        </imprint>
        <meeting id="cid63778">
          <title>Eurographics State-of-the-Art</title>
          <num>2013</num>
          <abbr type="sigle"/>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid1" type="inproceedings" rend="year" n="cite:jankowski:hal-00821895">
      <identifiant type="hal" value="hal-00821895"/>
      <analytic>
        <title level="a">Interaction Techniques for Interactive 3D Web Environments</title>
        <author>
          <persName key="potioc-2012-idp140283983218240">
            <foreName>Jacek</foreName>
            <surname>Jankowski</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">Web3D</title>
        <loc>San Sebastian, Spain</loc>
        <imprint>
          <dateStruct>
            <month>June</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00821895" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00821895</ref>
        </imprint>
        <meeting id="cid108735">
          <title>International Conference on 3D Web Technology</title>
          <num>18</num>
          <abbr type="sigle">Web3D</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid2" type="inproceedings" rend="year" n="cite:jankowski:hal-00931012">
      <identifiant type="hal" value="hal-00931012"/>
      <analytic>
        <title level="a">A Study of Street-level Navigation Techniques in 3D Digital Cities on Mobile Touch Devices</title>
        <author>
          <persName key="potioc-2012-idp140283983218240">
            <foreName>Jacek</foreName>
            <surname>Jankowski</surname>
            <initial>J.</initial>
          </persName>
          <persName key="potioc-2012-idp140283983237056">
            <foreName>Thomas</foreName>
            <surname>Hulin</surname>
            <initial>T.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">IEEE 3DUI 2014 - Symposium on 3D User Interfaces</title>
        <loc>Minneapolis, United States</loc>
        <imprint>
          <dateStruct>
            <month>March</month>
            <year>2014</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00931012" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00931012</ref>
        </imprint>
        <meeting id="cid93832">
          <title>IEEE Symposium on 3D User Interfaces</title>
          <num>9</num>
          <abbr type="sigle">3DUI</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid19" type="inproceedings" rend="year" n="cite:jankowski:hal-00822667">
      <identifiant type="doi" value="10.1145/2466533.2466547"/>
      <identifiant type="hal" value="hal-00822667"/>
      <analytic>
        <title level="a">Declarative Integration of Interactive 3D Graphics into the World-Wide Web: Principles, Current Approaches, and Research Agenda</title>
        <author>
          <persName key="potioc-2012-idp140283983218240">
            <foreName>Jacek</foreName>
            <surname>Jankowski</surname>
            <initial>J.</initial>
          </persName>
          <persName>
            <foreName>Sandy</foreName>
            <surname>Ressler</surname>
            <initial>S.</initial>
          </persName>
          <persName>
            <foreName>Kristian</foreName>
            <surname>Sons</surname>
            <initial>K.</initial>
          </persName>
          <persName>
            <foreName>Yvonne</foreName>
            <surname>Jung</surname>
            <initial>Y.</initial>
          </persName>
          <persName>
            <foreName>Johannes</foreName>
            <surname>Behr</surname>
            <initial>J.</initial>
          </persName>
          <persName>
            <foreName>Philipp</foreName>
            <surname>Slusallek</surname>
            <initial>P.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">18th International Conference on 3D Web Technology (Web3D'13)</title>
        <loc>San Sebastian, Spain</loc>
        <imprint>
          <dateStruct>
            <month>June</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00822667" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00822667</ref>
        </imprint>
        <meeting id="cid554022">
          <title>ACM International Conference on 3D Web Technology</title>
          <num>18</num>
          <abbr type="sigle">Web3D</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid9" type="inproceedings" rend="year" n="cite:jeunet:hal-00879966">
      <identifiant type="hal" value="hal-00879966"/>
      <analytic>
        <title level="a">Design and Validation of a Mental and Social Stress Induction Protocol Towards Load-Invariant Physiology-Based Detection</title>
        <author>
          <persName>
            <foreName>Camille</foreName>
            <surname>Jeunet</surname>
            <initial>C.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">International Conference on Physiological Computing Systems</title>
        <loc>Lisbonne, Portugal</loc>
        <imprint>
          <dateStruct>
            <month>January</month>
            <year>2014</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00879966" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00879966</ref>
        </imprint>
        <meeting id="cid624239">
          <title>International Conference on Physiological Computing Systems</title>
          <num>2014</num>
          <abbr type="sigle">PhyCS</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid18" type="inproceedings" rend="year" n="cite:larrue:hal-00804266">
      <identifiant type="hal" value="hal-00804266"/>
      <analytic>
        <title level="a">Assessing the impact of automatic vs. controlled rotations on spatial transfer with a joystick and a walking interface in VR</title>
        <author>
          <persName key="potioc-2012-idp140283983215552">
            <foreName>Florian</foreName>
            <surname>Larrue</surname>
            <initial>F.</initial>
          </persName>
          <persName key="phoenix-2011-idp140690212037536">
            <foreName>Hélène</foreName>
            <surname>Sauzeon</surname>
            <initial>H.</initial>
          </persName>
          <persName>
            <foreName>Déborah</foreName>
            <surname>Foloppe</surname>
            <initial>D.</initial>
          </persName>
          <persName>
            <foreName>Gregory</foreName>
            <surname>Wallet</surname>
            <initial>G.</initial>
          </persName>
          <persName>
            <foreName>Jean-René</foreName>
            <surname>Cazalets</surname>
            <initial>J.-R.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Gross</surname>
            <initial>C.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
          <persName key="phoenix-2011-idp140690212034784">
            <foreName>Bernard</foreName>
            <surname>N'Kaoua</surname>
            <initial>B.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">Interact 2013</title>
        <loc>Cap Town, South Africa</loc>
        <imprint>
          <dateStruct>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00804266" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00804266</ref>
        </imprint>
        <meeting id="cid101395">
          <title>IFIP TC13 International Conference on Human-Computer Interaction</title>
          <num>15</num>
          <abbr type="sigle">INTERACT</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid17" type="inproceedings" rend="year" n="cite:lotte:hal-00802403">
      <identifiant type="hal" value="hal-00802403"/>
      <analytic>
        <title level="a">BCI Research at Inria Bordeaux: making BCI designs usable outside the lab</title>
        <author>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
          <persName key="potioc-2012-idp140283983215552">
            <foreName>Florian</foreName>
            <surname>Larrue</surname>
            <initial>F.</initial>
          </persName>
          <persName key="iparla-2005-id18195">
            <foreName>Martin</foreName>
            <surname>Hachet</surname>
            <initial>M.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">International BCI meeting</title>
        <loc>Asilomar, United States</loc>
        <imprint>
          <dateStruct>
            <month>June</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00802403" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00802403</ref>
        </imprint>
        <meeting id="cid105860">
          <title>International Brain-Computer Interface Workshop</title>
          <num>2013</num>
          <abbr type="sigle">BCI</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid21" type="inproceedings" rend="year" n="cite:ridel:hal-00852053">
      <identifiant type="doi" value="10.1145/2534903.2534906"/>
      <identifiant type="hal" value="hal-00852053"/>
      <analytic>
        <title level="a">La Lampe torche magique : Une interface tangible pour l'inspection géométrique d'objets en réalité augmentée spatiale</title>
        <author>
          <persName>
            <foreName>Brett</foreName>
            <surname>Ridel</surname>
            <initial>B.</initial>
          </persName>
          <persName key="iparla-2005-id18209">
            <foreName>Patrick</foreName>
            <surname>Reuter</surname>
            <initial>P.</initial>
          </persName>
          <persName key="iparla-2010-id59958">
            <foreName>Jeremy</foreName>
            <surname>Laviole</surname>
            <initial>J.</initial>
          </persName>
          <persName key="iparla-2009-id60046">
            <foreName>Nicolas</foreName>
            <surname>Mellado</surname>
            <initial>N.</initial>
          </persName>
          <persName key="iparla-2005-id18167">
            <foreName>Xavier</foreName>
            <surname>Granier</surname>
            <initial>X.</initial>
          </persName>
          <persName>
            <foreName>Nadine</foreName>
            <surname>Couture</surname>
            <initial>N.</initial>
          </persName>
        </author>
      </analytic>
      <monogr x-international-audience="yes" x-proceedings="yes">
        <title level="m">25ème conférence francophone sur l'Interaction Homme-Machine, IHM'13</title>
        <loc>Bordeaux, France</loc>
        <imprint>
          <publisher>
            <orgName>ACM</orgName>
          </publisher>
          <publisher>
            <orgName type="organisation">AFIHM</orgName>
          </publisher>
          <dateStruct>
            <month>July</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00852053" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00852053</ref>
        </imprint>
        <meeting id="cid50669">
          <title>Conférence Francophone sur l'Interaction Homme-Machine</title>
          <num>25</num>
          <abbr type="sigle">IHM</abbr>
        </meeting>
      </monogr>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid8" type="misc" rend="year" n="cite:jeunet:hal-00879588">
      <identifiant type="hal" value="hal-00879588"/>
      <analytic>
        <title level="a">Conception et validation d'un protocole pour induire du stress et le mesurer dans des signaux physiologiques</title>
        <author>
          <persName>
            <foreName>Camille</foreName>
            <surname>Jeunet</surname>
            <initial>C.</initial>
          </persName>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
          <persName key="siames-2005-id18481">
            <foreName>Fabien</foreName>
            <surname>Lotte</surname>
            <initial>F.</initial>
          </persName>
        </author>
      </analytic>
      <monogr>
        <title level="m">25ème conférence francophone sur l'Interaction Homme-Machine, IHM'13</title>
        <loc>Bordeaux, France</loc>
        <imprint>
          <dateStruct>
            <month>November</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00879588" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00879588</ref>
        </imprint>
      </monogr>
      <note type="howpublished">25ème conférence francophone sur l'Interaction Homme-Machine, IHM'13</note>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid25" type="misc" rend="year" n="cite:muhl:hal-00907118">
      <identifiant type="hal" value="hal-00907118"/>
      <analytic>
        <title level="a">Third Workshop on Affective Brain-Computer Interfaces: Introduction</title>
        <author>
          <persName>
            <foreName>Christian</foreName>
            <surname>Mühl</surname>
            <initial>C.</initial>
          </persName>
          <persName>
            <foreName>Guillaume</foreName>
            <surname>Chanel</surname>
            <initial>G.</initial>
          </persName>
          <persName>
            <foreName>Brendan Z.</foreName>
            <surname>Allison</surname>
            <initial>B. Z.</initial>
          </persName>
          <persName>
            <foreName>Anton</foreName>
            <surname>Nijholt</surname>
            <initial>A.</initial>
          </persName>
        </author>
      </analytic>
      <monogr>
        <title level="m">Affective Computing and Intelligent Interaction</title>
        <loc>Geneva, Switzerland</loc>
        <imprint>
          <dateStruct>
            <month>September</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00907118" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00907118</ref>
        </imprint>
      </monogr>
      <note type="howpublished">Affective Computing and Intelligent Interaction</note>
    </biblStruct>
    
    <biblStruct id="potioc-2013-bid24" type="misc" rend="year" n="cite:plans:hal-00879603">
      <identifiant type="hal" value="hal-00879603"/>
      <analytic>
        <title level="a">Les Passages : Une métaphore pour la Collaboration Musicale en Réalité Virtuelle</title>
        <author>
          <persName>
            <foreName>Pierre-Marie</foreName>
            <surname>Plans</surname>
            <initial>P.-M.</initial>
          </persName>
          <persName key="potioc-2012-idp140283983231680">
            <foreName>Florent</foreName>
            <surname>Berthaut</surname>
            <initial>F.</initial>
          </persName>
        </author>
      </analytic>
      <monogr>
        <title level="m">25ème conférence francophone sur l'Interaction Homme-Machine, IHM'13</title>
        <loc>Bordeaux, France</loc>
        <imprint>
          <dateStruct>
            <month>November</month>
            <year>2013</year>
          </dateStruct>
          <ref xlink:href="http://hal.inria.fr/hal-00879603" location="extern" xlink:type="simple" xlink:show="replace" xlink:actuate="onRequest">http://<allowbreak/>hal.<allowbreak/>inria.<allowbreak/>fr/<allowbreak/>hal-00879603</ref>
        </imprint>
      </monogr>
    </biblStruct>
  </biblio>
</raweb>
