Rename 1-introduction/ => 1-background/

This commit is contained in:
2024-09-26 20:41:14 +02:00
parent 45ab5eb19f
commit f1712b7c94
182 changed files with 21 additions and 22 deletions

View File

@@ -0,0 +1,2 @@
\part{Background}
\label{part:background}

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@@ -0,0 +1,20 @@
- <https://thenounproject.com/icon/finger-pointing-4230431/>
- <https://thenounproject.com/icon/finger-pointing-4230346/>
- <https://thenounproject.com/icon/vibration-6478365/> from <https://thenounproject.com/creator/iconbunny/>
- <https://thenounproject.com/icon/hololens-1499195/> from <https://thenounproject.com/creator/daniel2021/>
- <https://thenounproject.com/icon/hololens-sideview-966758/> from <https://thenounproject.com/creator/henningg/>
- <https://thenounproject.com/icon/hololens-973887/> from <https://thenounproject.com/creator/henningg/>
## by [Gan Khoon Lay](https://thenounproject.com/creator/leremy/) / [CC BY](https://creativecommons.org/licenses/by/3.0/)
- grab
- accuse
- turning knob
- pressing button
- <https://thenounproject.com/icon/hand-using-mouse-4230206/>
- <https://thenounproject.com/icon/hand-using-phone-4230292/>
- <https://thenounproject.com/icon/hand-getting-cream-4230201/>
- <https://thenounproject.com/icon/pressing-button-4230381/>
- <https://thenounproject.com/icon/hand-holding-a-cube-4230175/>
- <https://thenounproject.com/icon/take-a-break-4230351/>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.4 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.2 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.3 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 9.9 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.0 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.9 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.3 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.2 KiB

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" x="0px" y="0px" viewBox="0 0 492 615" style="enable-background:new 0 0 492 492;" xml:space="preserve"><g><g><path d="M151.4,131.2h189.8c5.6,0,10.7,2.3,14.4,6c3.7,3.7,6,8.8,6,14.4v190.4c0,5.6-2.3,10.7-6,14.4c-3.7,3.7-8.8,6-14.4,6H151.4 c-5.6,0-10.7-2.3-14.4-6c-3.7-3.7-6-8.8-6-14.4V151.5c0-5.6,2.3-10.7,6-14.4C140.7,133.5,145.8,131.2,151.4,131.2L151.4,131.2z M341.2,146.6H151.4c-1.4,0-2.6,0.6-3.5,1.5c-0.9,0.9-1.5,2.1-1.5,3.5v190.4c0,1.4,0.6,2.6,1.5,3.5c0.9,0.9,2.1,1.5,3.5,1.5h189.8 c1.4,0,2.6-0.6,3.5-1.5c0.9-0.9,1.5-2.1,1.5-3.5V151.5c0-1.4-0.6-2.6-1.5-3.5C343.8,147.1,342.5,146.6,341.2,146.6z"/><path d="M357,220.6h5.4c4.2,0,7.7,3.4,7.7,7.7v36.9c0,4.2-3.4,7.7-7.7,7.7H357c-4.2,0-7.7-3.4-7.7-7.7v-36.9 C349.3,224.1,352.8,220.6,357,220.6z"/><path d="M130.1,220.6h5.4c4.2,0,7.7,3.4,7.7,7.7v36.9c0,4.2-3.4,7.7-7.7,7.7h-5.4c-4.2,0-7.7-3.4-7.7-7.7v-36.9 C122.4,224.1,125.9,220.6,130.1,220.6z"/><path d="M154.3,131.4c-4.1,1-6.6,5.2-5.6,9.3c1,4.1,5.2,6.6,9.3,5.6c20.5-5.1,22.8-22.1,25.1-39.4c2-14.9,4.1-30.2,26.5-29.1l0,0 c0.1,0,0.2,0,0.4,0l0,0h0h36.3h36.3c0.3,0,0.6,0,0.9,0c21.9-0.8,23.9,14.3,25.9,29.2c2.3,17.3,4.6,34.2,25.1,39.4 c4.1,1,8.3-1.5,9.3-5.6c1-4.1-1.5-8.3-5.6-9.3c-10.4-2.6-12-14.5-13.7-26.5c-2.9-21.7-5.9-43.8-41.7-42.4c-0.1,0-0.2,0-0.3,0 h-36.3h-36.3v0c-36-1.5-39,20.6-42,42.4C166.3,117,164.7,128.8,154.3,131.4z"/><path d="M158,347.2c-4.1-1-8.3,1.5-9.3,5.6c-1,4.1,1.5,8.3,5.6,9.3c10.4,2.6,12,14.5,13.7,26.5c2.9,21.8,6,44,42,42.4v0h36.3h36.3 c0.1,0,0.2,0,0.3,0c35.7,1.4,38.7-20.7,41.7-42.4c1.6-12.1,3.2-23.9,13.7-26.5c4.1-1,6.6-5.2,5.6-9.3c-1-4.1-5.2-6.6-9.3-5.6 c-20.5,5.1-22.8,22.1-25.1,39.4c-2,14.8-4.1,29.9-25.9,29.2c-0.3,0-0.6,0-0.9,0h-36.3H210h0l0,0c-0.1,0-0.2,0-0.4,0l0,0 c-22.4,1-24.4-14.2-26.5-29.1C180.8,369.3,178.5,352.3,158,347.2z"/><path d="M98.8,195.8c3.4-2.5,4.2-7.3,1.7-10.7c-2.5-3.4-7.3-4.2-10.7-1.7c-4.1,2.9-7.7,6.6-10.8,10.9 c-10.1,14-15.2,34.6-15.2,55.1s5.1,41,15.2,55.1c3.1,4.3,6.7,8,10.8,10.9c3.4,2.5,8.2,1.7,10.7-1.7s1.7-8.2-1.7-10.7 c-2.7-2-5.2-4.5-7.3-7.5c-8.2-11.3-12.3-28.6-12.3-46.1s4.1-34.7,12.3-46.1C93.7,200.3,96.1,197.8,98.8,195.8z"/><path d="M402.7,178.1c-3.4-2.5-8.2-1.7-10.7,1.7s-1.7,8.2,1.7,10.7c2.7,2,5.2,4.5,7.3,7.5c8.2,11.3,12.3,28.6,12.3,46.1h0 c0,17.5-4.1,34.7-12.3,46.1c-2.2,3-4.6,5.5-7.3,7.5c-3.4,2.5-4.2,7.3-1.7,10.7c2.5,3.4,7.3,4.2,10.7,1.7 c4.1-2.9,7.7-6.6,10.8-10.9c10.1-14,15.2-34.6,15.2-55.1h0c0-20.4-5.1-41.1-15.2-55.1C410.4,184.8,406.8,181.1,402.7,178.1z"/><path d="M123.9,205.2c3.4-2.5,4.2-7.3,1.7-10.7c-2.5-3.4-7.3-4.2-10.7-1.7c-3.5,2.5-6.6,5.7-9.3,9.4c-8.6,11.9-13,29.4-13,46.7 c0,17.3,4.3,34.8,13,46.7c2.7,3.7,5.8,6.9,9.3,9.4c3.4,2.5,8.2,1.7,10.7-1.7c2.5-3.4,1.7-8.2-1.7-10.7c-2.2-1.6-4.1-3.6-5.8-6 c-6.7-9.2-10-23.4-10-37.7c0-14.3,3.3-28.5,10-37.7C119.8,208.8,121.7,206.8,123.9,205.2z"/><path d="M377.6,188.4c-3.4-2.5-8.2-1.7-10.7,1.7c-2.5,3.4-1.7,8.2,1.7,10.7c2.2,1.6,4.1,3.6,5.8,6c6.7,9.2,10,23.4,10,37.7 s-3.3,28.5-10,37.7c-1.7,2.4-3.7,4.4-5.8,6c-3.4,2.5-4.2,7.3-1.7,10.7c2.5,3.4,7.3,4.2,10.7,1.7c3.5-2.5,6.6-5.7,9.3-9.4 c8.6-11.9,13-29.4,13-46.7s-4.3-34.8-13-46.7C384.2,194.1,381.2,191,377.6,188.4z"/></g></g><text x="0" y="507" fill="#000000" font-size="5px" font-weight="bold" font-family="'Helvetica Neue', Helvetica, Arial-Unicode, Arial, Sans-serif">Created by Iconbunny</text><text x="0" y="512" fill="#000000" font-size="5px" font-weight="bold" font-family="'Helvetica Neue', Helvetica, Arial-Unicode, Arial, Sans-serif">from the Noun Project</text></svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

View File

@@ -0,0 +1,271 @@
\chapter{Introduction}
\mainlabel{introduction}
This thesis presents research on direct hand interaction with real and virtual everyday objects, visually and haptically augmented using immersive \AR and wearable haptic devices.
\section{Visual and Tactile Object Augmentations}
\label{visuo_haptic_augmentations}
\subsectionstarbookmark{Hand Interaction with Everyday Objects}
In daily life, we simultaneously look and touch the everyday objects around us without even thinking about it.
Many of these object properties can be perceived in a complementary way through both vision and touch, such as their shape, size or texture \cite{baumgartner2013visual}.
But vision often precedes touch, enabling us to expect the tactile sensations we will feel when touching the object, \eg stiffness or texture, and even to predict properties that we cannot see, \eg weight or temperature.
Information from different sensory sources may be complementary, redundant or contradictory \cite{ernst2004merging}.
This is why we sometimes want to touch an object to check one of its properties that we have seen and to compare or confront our visual and tactile sensations.
We then instinctively construct a unified perception of the object we are exploring and manipulating from these visual and tactile sensory modalities, as well as from the movement of our hand and fingers on the object \cite{ernst2002humans}.
The sense of touch also allows us to perceive our environment, but also to represent ourselves in space and to interact with the surrounding objects.
This is due to the many sensory receptors distributed throughout our hands and body, and which can be divided into two modalities: kinesthetic (or proprioception), which are the forces felt by muscles and tendons, and cutaneous (or tactile), which are the pressures, stretches, vibrations and temperatures felt by the skin.
This rich and complex variety of actions and sensations makes it particularly difficult to artificially recreate capabilities of touch, for example in virtual or remote operating environments \cite{culbertson2018haptics}.
\subsectionstarbookmark{Wearable Haptics Promise Everyday Use}
\emph{Haptics} is the study of the sense of touch and user interfaces that involve touch.
Haptic devices can be categorized according to how they interface with a user: graspable, touchable and wearable, as illustrated in \figref{haptic-categories}.
Graspable interfaces are the traditional haptic devices that are held in the hand.
They are either grounded devices that provide kinesthetic (force) feedback, \eg a robotic arm or a joystick, or ungrounded tactile devices, \eg a game controller or a smartphone.
Touchable interfaces are actuated devices that are directly touched and that can dynamically change their shape or surface property, such as stiffness or friction, providing simultaneous kinesthetic and cutaneous feedback.
However, graspable interfaces occupy the hand, preventing interaction with other objects, and touchable interfaces often involve cumbersome mechanisms and are by definition limited to their own working surface.
Instead, wearable interfaces are directly mounted on the body to provide cutaneous sensations on the skin in a portable way and without restricting the user's movements \cite{pacchierotti2017wearable}.
\begin{subfigs}{haptic-categories}{
Haptic devices can be classified into three categories according to their interface with the user:
}[][
\item graspable,
\item touchable, and
\item wearable. Adapted from \textcite{culbertson2018haptics}.
]
\subfig[0.25]{culbertson2018haptics-graspable}
\subfig[0.25]{culbertson2018haptics-touchable}
\subfig[0.25]{culbertson2018haptics-wearable}
\end{subfigs}
A wide range of wearable haptic devices have been developed to provide the user with rich virtual haptic sensations, including normal force, skin stretch, vibration and thermal feedback.
\figref{wearable-haptics} shows some examples of different wearable haptic devices with different form factors and rendering capabilities.
Their portability, \ie their small form factor, light weight and unobtrusiveness, makes them particularly promising for everyday use in a variety of applications such as robotics, teleoperation, \VR, and social interactions \cite{pacchierotti2017wearable,culbertson2018haptics}.
But their use in combination with \AR has been little explored so far.
\begin{subfigs}{wearable-haptics}{
Wearable haptic devices can render sensations on the skin as feedback to real or virtual objects being touched.
}[][
\item Wolverine, a wearable exoskeleton that simulate contact and grasping of virtual objects with force feedback on the fingers \cite{choi2016wolverine}.
\item Touch\&Fold, a wearable haptic device mounted on the nail that fold on demand to render contact, normal force and vibrations to the fingertip \cite{teng2021touch}.
\item The hRing, a wearable haptic ring mounted on the proximal phalanx able to render normal and shear forces to the finger \cite{pacchierotti2016hring}.
\item Tasbi, a haptic bracelet capable of rendering squeeze and vibrotactile feedback to the wrist \cite{pezent2022design}.
]
\subfigsheight{28mm}
\subfig{choi2016wolverine}
\subfig{teng2021touch}
\subfig{pacchierotti2016hring}
\subfig{pezent2019tasbi}
\end{subfigs}
\subsectionstarbookmark{Augmented Reality Is Not Only Visual}
\AR integrates virtual content into the real world perception, creating the illusion of a unique \AE.
It thus promises natural and seamless interaction with the physical and digital objects (and their combination) directly with our hands.
It is technically and conceptually closely related to \VR, which replaces the \RE perception with a \VE.
\AR and \VR can be placed on a reality-virtuality continuum, as proposed by \textcite{milgram1994taxonomy} and illustrated in \figref{rv-continuum}\footnote{On the original reality-virtuality continuum of \textcite{milgram1994taxonomy}, augmented virtuality is also considered, as the incorporation of real objects to a \VE, and is placed between \AR and \VR. For simplicity, we only consider \AR and \VR in this thesis.}.
It describes the degree of virtuality of the environment along an axis, with one end being the \RE and the other end being a pure \VE, \ie indistinguishable from the real world (such as \emph{The Matrix} movies).
Between these two extremes lies \MR, which comprises \AR and \VR as different levels of mixing real and virtual environments \cite{skarbez2021revisiting}.\footnote{This is the original and classic definition of \MR, but there is still a debate on defining and characterize \AR and \MR experiences \cite{speicher2019what,skarbez2021revisiting}.}
\AR/\VR is most often understood as addressing only the visual sense, and as haptics, it can take many forms as a user interface.
The most promising devices are \AR headset, which are portable displays worn directly on the head, providing the user with an immersive \AE/\VE.
\begin{subfigs}{rv-continuums}{Reality-virtuality continuums. }[][
\item For the visual sense, as originally proposed by and adapted from \textcite{milgram1994taxonomy}.
\item Extension to include the haptic sense on a second, orthogonal axis, proposed by and adapted from \textcite{jeon2009haptic}.
]
\subfig[0.44]{rv-continuum}
\subfig[0.54]{visuo-haptic-rv-continuum3}
\end{subfigs}
\AR/\VR can also be extended to render for sensory modalities other than vision.
\textcite{jeon2009haptic} proposed extending the reality-virtuality continuum to include haptic feedback by decoupling into two orthogonal haptic and visual axes (\figref{visuo-haptic-rv-continuum3}).
The combination of the two axes defines 9 types of visuo-haptic environments, with 3 possible levels of virtuality for each visual or haptic axis: real, augmented and virtual.
For example, a visual \AE that uses a tangible (touchable) object as a proxy to manipulate virtual content is considered a haptic \RE (\eg \figref{kahl2023using}; bottom middle cell in \figref{visuo-haptic-rv-continuum3}), whereas a device that provides synthetic haptic feedback when touching a \VO is considered a haptic \VE (\eg \figref{meli2018combining}; top middle cell in \figref{visuo-haptic-rv-continuum3}).
Haptic \AR is then the combination of real and virtual haptic stimuli \cite{bhatia2024augmenting} (middle row in \figref{visuo-haptic-rv-continuum3}).
In particular, it has been implemented by enhancing the haptic perception of tangible objects by providing timely tactile stimuli using wearable haptics.
\figref{salazar2020altering} shows an example of modifying the perceived stiffness of a tangible object in \VR using simultaneous pressure feedback on the finger (left middle cell in \figref{visuo-haptic-rv-continuum3}).
\figref{bau2012revel} shows another example of visuo-haptic \AR rendering of virtual texture when running the finger on a tangible surface (middle cell in the two axes in \figref{visuo-haptic-rv-continuum3}).
Current visual \AR systems often lack haptic feedback, creating a deceptive and incomplete user experience when reaching the \VE with the hand.
All visual \VOs are inherently intangible and cannot physically constrain a user's hand, making it difficult to perceive their properties congruently and interact with them with confidence and efficiency.
It is therefore necessary to provide haptic feedback that is consistent with the visual \AE and ensures the best possible user experience.
The integration of wearable haptics with \AR seems to be one of the most promising solutions, but it remains challenging due to their many respective characteristics and the additional constraints of combining them.
\begin{subfigs}{visuo-haptic-environments}{Visuo-haptic environments with different degrees of reality-virtuality. }[][
\item Visual \AR environment with a real, tangible haptic object used as a proxy to manipulate a \VO \cite{kahl2023using}.
\item Visual \AR environment with a wearable haptic device that provides virtual, synthetic feedback from contact with a \VO \cite{meli2018combining}.
\item A tangible object seen in a visual \VR environment whose haptic perception of stiffness is augmented with the hRing haptic device \cite{salazar2020altering}.
\item Visuo-haptic rendering of texture on a touched tangible object with a visual \AR display and haptic electrovibration feedback \cite{bau2012revel}.
]
\subfigsheight{31mm}
\subfig{kahl2023using}
\subfig{meli2018combining}
\subfig{salazar2020altering}
\subfig{bau2012revel}
\end{subfigs}
\section{Research Challenges of Wearable Visuo-Haptic Augmented Reality}
\label{research_challenges}
The integration of wearable haptics with \AR to create a visuo-haptic \AE is complex and presents many perceptual and interaction challenges, \ie sensing the \AE and acting effectively upon it.
We are particularly interested in enabling direct contact of virtual and augmented objects with the bare hand.
Our goal is to enable congruent, intuitive and seamless perception and manipulation of the visuo-haptic \AE.
The experience of such a visuo-haptic \AE relies on an interaction loop with the user, as illustrated in \figref{interaction-loop}.
The \RE and the user's hand are tracked in real time by sensors and reconstructed in visual and haptic \VEs.
The interactions between the virtual hand and objects are then simulated and rendered as visual and haptic feedback to the user using an \AR headset and a wearable haptic device.
Because the visuo-haptic \VE is displayed in real time, colocalized and aligned with the real one, the user is given the illusion of directly perceiving and interacting with the virtual content as if it were part of the \RE.
\fig{interaction-loop}{The interaction loop between a user and a visuo-haptic augmented environment.}[
One interact with the visual (in blue) and haptic (in red) virtual environment through a virtual hand (in purple) interaction technique that tracks real hand movements and simulates contact with \VOs.
The virtual environment is rendered back to the user co-localized with the real one (in gray) using a visual \AR headset and a wearable haptic device.
]
In this context, we identify two main research challenges that we address in this thesis:
\begin{enumerate*}[label=(\Roman*)]
\item providing plausible and coherent visuo-haptic augmentations, and
\item enabling effective manipulation of the augmented environment.
\end{enumerate*}
Each of these challenges also raises numerous design, technical and human issues specific to each of the two types of feedback, wearable haptics and immersive \AR, as well as multimodal rendering and user experience issues in integrating these two sensorimotor feedbacks into a coherent and seamless visuo-haptic \AE.
\subsectionstarbookmark{Provide Plausible and Coherent Visuo-Haptic Augmentations}
Many haptic devices have been designed and evaluated specifically for use in \VR, providing realistic and varied kinesthetic and tactile feedback to \VOs.
Although closely related, (visual) \AR and \VR have key differences in their respective renderings that can affect user perception.
First, the user's hand and \RE are visible in \AR, unlike \VR where there is total control over the visual rendering of the hand and \VE.
As such, in \VR, visual sensations are particularly dominant in perception, and conflicts with haptic sensations are also specifically created to influence the user's perception, for example to create pseudo-haptic \cite{ujitoko2021survey} or haptic retargeting \cite{azmandian2016haptic} effects.
Moreover, many wearable haptic devices take the form of controllers, gloves or exoskeletons, all of which cover the fingertips and are therefore not suitable for \AR.
The user's hand must be indeed free to touch and interact with the \RE while wearing a wearable haptic device.
It is possible instead to place the haptic actuator close to the point of contact with the \RE, as described above to implement haptic \AR, \eg providing haptic feedback on the nail \cite{ando2007fingernailmounted,teng2021touch}, another phalanx \cite{asano2015vibrotactile,salazar2020altering} or the wrist \cite{pezent2022design,sarac2022perceived} for rendering fingertip contacts with virtual content.
Therefore, when touching a virtual or augmented object, the real and virtual visual sensations are seen as co-localized, but the virtual haptic feedback is not.
It remains to be investigated how such potential discrepancies affect the overall perception to design visuo-haptic renderings adapted to \AR.
So far, \AR can only add visual and haptic sensations to the user's overall perception of the environment, but conversely it is very difficult to remove sensations.
These added virtual sensations can therefore be perceived as out of sync or even inconsistent with the sensations of the \RE, for example with a lower rendering quality, a temporal latency, a spatial shift, or a combination of these.
It is therefore unclear to what extent the real and virtual visuo-haptic sensations will be perceived as realistic or plausible, and to what extent they will conflict or complement each other in the perception of the \AE.
\subsectionstarbookmark{Enable Effective Manipulation of the Augmented Environment}
Touching, grasping and manipulating \VOs are fundamental interactions for \AR \cite{kim2018revisiting}, \VR \cite{bergstrom2021how} and \VEs in general \cite{laviolajr20173d}.
As the hand is not occupied or covered with a haptic device to not impair interaction with the \RE, as described in the previous section, one can expect a seamless and direct manipulation of the hand with the virtual content as if it were real.
Thus, augmenting a tangible object has the advantage of physically constraining the hand, allowing for easy and natural interaction, but manipulating a purely \VO with the bare hand can be challenging without good haptic feedback \cite{maisto2017evaluation,meli2018combining}. %, and one will rely on visual and haptic feedback to guide the interaction.
In addition, current \AR systems have visual rendering limitations that also affect interaction with \VOs. %, due to depth underestimation, a lack of mutual occlusions, and hand tracking latency.
Visual \AR is the display of superimposed images of the virtual world, synchronized with the user's current view of the real world.
But the depth perception of the \VOs is often underestimated \cite{peillard2019studying,adams2022depth}, and there is often a lack of mutual occlusion between the hand and a \VO, \ie that the hand can hide the object or be hidden by the object \cite{macedo2023occlusion}.
Finally, as illustrated in \figref{interaction-loop}, interacting with a \VO is an illusion, because in fact the real hand is controlling in real time a virtual hand, like an avatar, whose contacts with \VOs are then simulated in the \VE.
Therefore, there is inevitably a latency delay between the real hand's movements and the \VO's return movements, and a spatial shift between the real hand and the virtual hand, whose movements are constrained to the touched \VO \cite{prachyabrued2014visual}.
This makes it difficult to perceive the position of the fingers relative to the object before touching or grasping it, and also to estimate the force required to grasp and move the object to a desired location.
Hence, it is necessary to provide visual and haptic feedback that allows the user to efficiently contact, grasp and manipulate a \VO with the hand.
Yet, it is unclear which type of visual and haptic feedback is the best suited to guide the \VO manipulation, and whether one or the other of a combination of the two is most beneficial for users.
\section{Approach and Contributions}
\label{contributions}
The aim of this thesis is to understand how immersive visual and wearable haptic augmentations complement each other in the context of direct hand perception and manipulation with virtual and augmented objects.
As described in the Research Challenges section above, providing a convincing, consistent and effective visuo-haptic \AE to a user is complex and raises many issues.
Our approach is to
\begin{enumerate*}[label=(\arabic*)]
\item design immersive and wearable multimodal visuo-haptic renderings that augment both the objects being interacted with and the hand interacting with them, and
\item evaluate in user studies how these renderings affect the perception of and interaction with these objects using psychophysical, performance, and user experience methods.
\end{enumerate*}
We consider two main axes of research, each addressing one of the research challenges identified above:
\begin{enumerate*}[label=(\Roman*)]
\item modifying the perception of tangible surfaces using visuo-haptic texture augmentations, and
\item improving the manipulation of virtual objects using visuo-haptic augmentations of the hand-object interaction.
\end{enumerate*}
Our contributions in these two axes are summarized in \figref{contributions}.
\fig[0.95]{contributions}{Summary of our contributions through the simplified interaction loop.}[
The contributions are represented in dark gray boxes, and the research axes in light green circles.
The first axis is \textbf{(I)} the design and evaluation of the perception of visuo-haptic texture augmentations of tangible surfaces, directly touched by the hand.
The second axis focuses on \textbf{(II)} improving the manipulation of \VOs with the bare hand using visuo-haptic augmentations of the hand as interaction feedback.
]
\subsectionstarbookmark{Axis I: Modifying the Perception of Tangible Surfaces with Visuo-Haptic Texture Augmentations}
Wearable haptic devices have proven to be effective in modifying the perception of a touched tangible surface, without modifying the tangible, nor covering the fingertip, forming a haptic \AE \cite{bau2012revel,detinguy2018enhancing,salazar2020altering}.
%It is achieved by placing the haptic actuator close to the fingertip, to let it free to touch the surface, and rendering tactile stimuli timely synchronised with the finger movement.
%It enables rich haptic feedback as the combination of kinesthetic sensation from the tangible and cutaneous sensation from the actuator.
However, wearable haptic \AR have been little explored with visual \AR, as well as the visuo-haptic augmentation of textures.
Texture is indeed one of the main tactile sensation of a surface material \cite{hollins1993perceptual,okamoto2013psychophysical}, perceived equally well by both sight and touch \cite{bergmanntiest2007haptic,baumgartner2013visual}, and one of the most studied haptic (only, without visual) rendering \cite{unger2011roughness,culbertson2014modeling,strohmeier2017generating}.
For this first axis of research, we propose to design and evaluate the perception of virtual visuo-haptic textures augmenting tangible surfaces. %, using an immersive \AR headset and a wearable vibrotactile device.
To this end, we (1) design a system for rendering virtual visuo-haptic texture augmentations, to (2) evaluate how the perception of these textures is affected by the visual virtuality of the hand and the environment (\AR \vs \VR), and (3) investigate the perception of co-localized visuo-haptic texture augmentations in \AR.
First, an effective approach to rendering haptic textures is to generate a vibrotactile signal that represents the finger-texture interaction \cite{culbertson2014modeling,asano2015vibrotactile}.
Yet, to achieve the natural interaction with the hand and a coherent visuo-haptic feedback, it requires a real time rendering of the textures, no constraints on the hand movements, and a good synchronization between the visual and haptic feedback.
Thus, our first objective is to design an immersive, real time system that allows free exploration with the bare hand of visuo-haptic texture augmentations on tangible surfaces.
Second, many works have investigated the haptic rendering of virtual textures, but few have integrated them with immersive \VEs or have considered the influence of the visual rendering on their perception.
Still, it is known that the visual feedback can alter the perception of real and virtual haptic sensations \cite{schwind2018touch,choi2021augmenting} but also that the force feedback perception of grounded haptic devices is not the same in \AR and \VR \cite{diluca2011effects,gaffary2017ar}.
Hence, our second objective is to understand how the perception of haptic texture augmentation differs depending on the degree of visual virtuality of the hand and the environment.
Finally, some visuo-haptic texture databases have been modeled from real texture captures \cite{culbertson2014penn,balasubramanian2024sens3}, to be rendered as virtual textures with graspable haptics that are perceived as similar to real textures \cite{culbertson2015should,friesen2024perceived}.
However, the rendering of these textures in an immersive and natural visuo-haptic \AR using wearable haptics remains to be investigated.
Our third objective is to evaluate the perception of simultaneous and co-localized visuo-haptic texture augmentation of tangible surfaces in \AR, directly touched by the hand, and to understand to what extent each sensory modality contributes to the overall perception of the augmented texture.
\subsectionstarbookmark{Axis II: Improving Virtual Object Manipulation with Visuo-Haptic Augmentations of the Hand}
In immersive and wearable visuo-haptic \AR, the hand is free to touch and interact seamlessly with real, augmented, and virtual objects, and one can expect natural and direct contact and manipulation of \VOs with the bare hand.
However, the intangibility of the visual \VE, the display limitations of current visual \OST-\AR systems and the inherent spatial and temporal discrepancies between the user's hand actions and the visual feedback in the \VE can make the interaction with \VOs particularly challenging.
%However, the intangibility of the virtual visual environment, the lack of kinesthetic feedback of wearable haptics, the visual rendering limitations of current \AR systems, as well as the spatial and temporal discrepancies between the real environment, the visual feedback, and the haptic feedback, can make the interaction with \VOs with bare hands particularly challenging.
Two particular sensory feedbacks are known to improve such direct \VO manipulation, but they have not been properly investigated in immersive \AR: visual rendering of the hand \cite{piumsomboon2014graspshell,prachyabrued2014visual} and delocalized haptic rendering \cite{lopes2018adding,teng2021touch}.
For this second axis of research, we propose to design and evaluate \textbf{the role of visuo-haptic augmentations of the hand as interaction feedback with \VOs in immersive \OST-\AR}.
We consider the effect on the user performance an experience of (1) the visual rendering as hand augmentation and (2) combination of different visuo-haptic rendering of the hand manipulation with \VOs
First, the visual rendering of the virtual hand is a key element for interacting and manipulating \VOs in \VR \cite{prachyabrued2014visual,grubert2018effects}.
A few works have also investigated the visual rendering of the virtual hand in \AR, from simulating mutual occlusions between the hand and \VOs \cite{piumsomboon2014graspshell,al-kalbani2016analysis} to displaying the virtual hand as an avatar overlay \cite{blaga2017usability,yoon2020evaluating}, augmenting the real hand.
But \OST-\AR has significant perceptual differences from \VR due to the visibility of the real hand and environment, and these visual hand augmentations have not been evaluated in the context of \VO manipulation with the bare hand.
Thus, our fourth objective is to \textbf{investigate the visual rendering as hand augmentation} for direct manipulation of \VOs in \OST-\AR.
Second, as described above, wearable haptics for visual \AR rely on moving the haptic actuator away from the fingertips to not impair the hand movements, sensations, and interactions with the \RE.
Previous works have shown that wearable haptics that provide feedback on the hand manipulation with \VOs in \AR can significantly improve the user performance and experience \cite{maisto2017evaluation,meli2018combining}.
However, it is unclear which positioning of the actuator is the most beneficial nor how a haptic augmentation of the hand compares or complements with a visual augmentation of the hand.
Our last objective is to \textbf{investigate the visuo-haptic rendering of the hand manipulation} with \VOs in \OST-\AR using wearable vibrotactile haptic.
\section{Thesis Overview}
\label{thesis_overview}
This thesis is divided in four parts.
In \textbf{\partref{background}}, we describe the context and background of our research, within which this first current \textit{Introduction} chapter we present the research challenges, and the objectives, approach, and contributions of this thesis.
In \textbf{\chapref{related_work}}, we then review previous work on the perception and manipulation with virtual and augmented objects, directly with the hand, using either wearable haptics, \AR, or their combination.
First, we overview how the hand perceives and manipulate real everyday objects.
Second, we present wearable haptics and haptic augmentations of roughness and hardness of real objects.
Third, we introduce \AR, and how \VOs can be manipulated directly with the hand.
Finally, we describe how multimodal visual and haptic feedback have been combined in \AR to enhance perception and interaction with the hand.
We then address each of our two research axes in a dedicated part.
\noindentskip
In \textbf{\partref{perception}}, we describe our contributions to the first axis of research, augmenting the visuo-haptic texture perception of tangible surfaces.
We evaluate how the visual rendering of the hand (real or virtual), the environment (\AR or \VR) and the textures (displayed or hidden) affect the roughness perception of virtual vibrotactile textures rendered on real surfaces and touched directly with the index finger.
In \textbf{\chapref{vhar_system}}, we detail a system for rendering visuo-haptic virtual textures that augment tangible surfaces using an immersive \AR/\VR headset and a wearable vibrotactile device.
The haptic textures are rendered as a real-time vibrotactile signal representing a grating texture, and is provided to the middle phalanx of the index finger touching the texture using a voice-coil actuator.
The tracking of the real hand and environment is done using marker-based technique, and the visual rendering of their virtual counterparts is done using the immersive \OST \AR headset Microsoft HoloLens~2.
In \textbf{\chapref{xr_perception}}, we investigate, in a user study, how different the perception of virtual haptic textures is in \AR \vs \VR and when touched by a virtual hand \vs one's own hand.
We use psychophysical methods to measure the user roughness perception of the virtual textures, and extensive questionnaires to understand how this perception is affected by the visual rendering of the hand and the environment.
In \textbf{\chapref{ar_textures}}, we evaluate the perception of visuo-haptic texture augmentations, touched directly with one's own hand in \AR.
The virtual textures are paired visual and tactile models of real surfaces \cite{culbertson2014one} that we render as visual and haptic overlays on the touched augmented surfaces.
Our objective is to assess the perceived realism, coherence and roughness of the combination of nine representative visuo-haptic texture pairs.
\noindentskip
In \textbf{\partref{manipulation}}, we describe our contributions to the second axis of research: improving the manipulation of \VOs using visuo-haptic augmentations of the hand as interaction feedback with \VOs in immersive \OST-\AR.
In \textbf{\chapref{visual_hand}}, we investigate in a user study the effect of six visual renderings as hand augmentations for the direct manipulation of \VOs, as a set of the most popular hand renderings in the \AR literature.
Using the \OST-\AR headset Microsoft HoloLens~2, we evaluate the user performance and experience in two representative manipulation tasks: push-and-slide and grasp-and-place a \VO directly with the hand.
In \textbf{\chapref{visuo_haptic_hand}}, we evaluate in a user study two vibrotactile contact techniques, provided at four different positionings on the user's hand, as haptic rendering of the hand manipulation with \VOs.
They are compared to the two most representative visual hand renderings from the previous chapter, and the user performance and experience are evaluated within the same \OST-\AR setup and manipulation tasks.
\noindentskip
In \textbf{\partref{part:conclusion}}, we conclude this thesis and discuss short-term future work and long-term perspectives for each of our contributions and research axes.

View File

@@ -0,0 +1,370 @@
\section{Perception and Interaction with the Hand}
\label{haptic_hand}
% describe how the hand senses and acts on its environment to perceive the haptic properties of real everyday objects.
The haptic sense has specific characteristics that make it unique in regard to other senses.
It enables us to perceive a large diversity of properties of the everyday objects, up to a complex combination of sensations produced by numerous sensory receptors distributed throughout the body, but particularly in the hand.
It also allows us to act on these objects with the hand, to come into contact with them, to grasp them, to actively explore them, and to manipulate them.
This implies that the haptic perception is localized at the points of contact between the hand and the environment, \ie we cannot haptically perceive an object without actively touching it.
These two mechanisms, \emph{action} and \emph{perception}, are therefore closely associated and both are essential to form the haptic experience of interacting with the environment using the hand \cite{lederman2009haptic}.
\subsection{The Haptic Sense}
\label{haptic_sense}
Perceiving the properties of an object involves numerous sensory receptors embedded in the skin, but also in the muscles and joints of the hand, and distributed across the body. They can be divided into two main modalities: \emph{cutaneous} and \emph{kinesthetic}.
\subsubsection{Cutaneous Sensitivity}
\label{cutaneous_sensitivity}
Cutaneous haptic receptors are specialized nerve endings implanted in the skin that respond differently to the various stimuli applied to the skin. \figref{blausen2014medical_skin} shows the location of the four main cutaneous receptors that respond to mechanical deformation of the skin.
\fig[0.6]{blausen2014medical_skin}{Schema of cutaneous mechanoreceptors in a section of the skin \cite{blausen2014medical}.}
Adaptation rate and receptor size are the two key characteristics that respectively determine the temporal and spatial resolution of these \emph{mechanoreceptors}, as summarized in \tabref{cutaneous_receptors}.
The \emph{adaptation rate} is the speed and duration of the response to a stimulus.
Meissner and Pacinian receptors, known as fast-adapting (FA), respond rapidly to a stimulus but stop quickly even though the stimulus is still present, allowing the detection of high-frequency changes.
In contrast, Merkel and Ruffini receptors, known as slow-adapting (SA), have a slower but continuous response to a static, prolonged stimulus.
The \emph{size of the receptor} determines the area of skin that can be sensed by a single nerve ending.
Meissner and Merkel receptors have a small detection area (named Type I) and are sensitive to fine skin deformations, while Ruffini and Pacinian receptors have a larger detection area (named Type II).
The density of mechanoreceptors varies according to skin type and body region.
\emph{Glabrous skin}, especially on the face, feet, hands, and more importantly, the fingers, is particularly rich in cutaneous receptors, giving these regions great tactile sensitivity.
The density of the Meissner and Merkel receptors, which are the most sensitive, is notably high in the fingertips \cite{johansson2009coding}.
Conversely, \emph{hairy skin} is less sensitive and does not contain Meissner receptors, but has additional receptors at the base of the hairs, as well as receptors known as C-tactile, which are involved in pleasantness and affective touch \cite{ackerley2014touch}.
There are also two types of thermal receptors implanted in the skin, which respond to increases or decreases in skin temperature, respectively, providing sensations of warmth or cold \cite{lederman2009haptic}.
Finally, free nerve endings (without specialized receptors) provide information about pain \cite{mcglone2007discriminative}.
\begin{tab}{cutaneous_receptors}{Characteristics of the cutaneous mechanoreceptors.}[
Adaptation rate is the speed and duration of the receptor's response to a stimulus. Receptive size is the area of skin detectable by a single receptor. Sensitivities are the stimuli detected by the receptor. Adapted from \textcite{mcglone2007discriminative} and \textcite{johansson2009coding}.
]
\begin{tabularx}{\linewidth}{p{1.7cm} p{2cm} p{2cm} X}
\toprule
\textbf{Receptor} & \textbf{Adaptation Rate} & \textbf{Receptive Size} & \textbf{Sensitivities} \\
\midrule
Meissner & Fast & Small & Discontinuities (\eg edges), medium-frequency vibration (\qtyrange{5}{50}{\Hz}) \\
Merkel & Slow & Small & Pressure, low-frequency vibration (\qtyrange{0}{5}{\Hz}) \\
Pacinian & Fast & Large & High-frequency vibration (\qtyrange{40}{400}{\Hz}) \\
Ruffini & Slow & Large & Skin stretch \\
\bottomrule
\end{tabularx}
\end{tab}
\subsubsection{Kinesthetic Sensitivity}
\label{kinesthetic_sensitivity}
Kinesthetic receptors are also mechanoreceptors but are located in the muscles, tendons and joints \cite{jones2006human}.
The muscle spindles respond to the length and the rate of stretch/contraction of the muscles.
Golgi tendon organs, located at the junction of muscles and tendons, respond to the force developed by the muscles.
Ruffini and Pacini receptors are found in the joints and respond to joint movement.
Together, these receptors provide sensory feedback about the movement, speed and strength of the muscles and the rotation of the joints during a movement.
They can also sense external forces and torques applied to the body.
Kinesthetic receptors are therefore closely linked to the motor control of the body.
By providing sensory feedback in response to the position and movement of our limbs, they enable us to perceive our body in space, a perception called \emph{proprioception}.
This allows us to plan and execute precise movements to touch or grasp a target, even with our eyes closed.
Cutaneous mechanoreceptors are essential for this perception because any movement of the body or contact with the environment necessarily deforms the skin \cite{johansson2009coding}.
\subsection{Hand-Object Interactions}
\label{hand_object_interactions}
The sense of touch is thus composed of a rich and complex set of various cutaneous and kinesthetic receptors under the skin, in the muscles and in the joints.
These receptors give the hand its great tactile sensitivity and great dexterity in its movements.
\subsubsection{Sensorimotor Continuum of the Hand}
\label{sensorimotor_continuum}
\textcite{jones2006human} have proposed a sensorimotor continuum of hand functions, from mainly sensory activities to activities with a more important motor component.
As illustrated in the \figref{sensorimotor_continuum}, \Citeauthor{jones2006human} propose to delineate four categories of hand function on this continuum:
\begin{itemize}
\item \emph{Passive touch}, or tactile sensing, is the ability to perceive an object through cutaneous sensations with a static hand contact. The object may be moving, but the hand remains static. It allows for relatively good surface perception, \eg in \textcite{gunther2022smooth}.
\item \emph{Exploration}, or active haptic sensing, is the manual and voluntary exploration of an object with the hand, involving all cutaneous and kinesthetic sensations. It enables a more precise perception than passive touch \cite{lederman2009haptic}.
\item \emph{Prehension} is the action of grasping and holding an object with the hand. It involves fine coordination between hand and finger movements and the haptic sensations produced.
\item \emph{Gestures}, or non-prehensible skilled movements, are motor activities without constant contact with an object. Examples include pointing at a target, typing on a keyboard, accompanying speech with gestures, or signing in sign language, \eg in \textcite{yoon2020evaluating}.
\end{itemize}
\fig[0.65]{sensorimotor_continuum}{
The sensorimotor continuum of the hand function proposed by and adapted from \textcite{jones2006human}.
}[
Functions of the hand are classified into four categories based on the relative importance of sensory and motor components.
Icons are from \href{https://thenounproject.com/creator/leremy/}{Gan Khoon Lay} / \href{https://creativecommons.org/licenses/by/3.0/}{CC BY}.
]
This classification has been further refined by \textcite{bullock2013handcentric} into 15 categories of possible hand interactions with an object.
In this thesis, we are interested in exploring visuo-haptic augmentations (\partref{perception}) and grasping of \VOs (\partref{manipulation}) in the context of \AR and wearable haptics.
\subsubsection{Hand Anatomy and Motion}
\label{hand_anatomy}
Before we describe how the hand is used to explore and grasp objects, we need to look at its anatomy.
Underneath the skin, muscles and tendons can actually move because they are anchored to the bones.
As shown in the \figref{blausen2014medical_hand}, the skeleton of the hand is formed of 27 articulated bones.
The wrist, comprising 8 carpal bones, connects the hand to the arm and is the base for the 5 metacarpal bones of the palm, one for each finger.
Each finger is formed by a chain of 3 phalanges, proximal, middle and distal, except for the thumb which has only two phalanges, proximal and distal.
The joints at the base of each phalanx allow flexion and extension, \ie folding and unfolding movements relative to the preceding bone.
The proximal phalanges can also adduct and abduct, \ie move the fingers towards and away from each other.
Finally, the metacarpal of the thumb is capable of flexion/extension and adduction/abduction, which allows the thumb to oppose the other fingers.
These axes of movement are called DoFs and can be represented by a \emph{kinematic model} of the hand with 27 DoFs as shown in the \figref{blausen2014medical_hand}.
Thus the thumb has 5 DoFs, each of the other four fingers has 4 DoFs and the wrist has 6 DoFs and can take any position (3 DoFs) or orientation (3 DoFs) in space \cite{erol2007visionbased}.
This complex structure enables the hand to perform a wide range of movements and gestures. However, the way we explore and grasp objects follows simpler patterns, depending on the object being touched and the aim of the interaction.
\begin{subfigs}{hand}{Anatomy and motion of the hand. }[][
\item Schema of the hand skeleton. Adapted from \textcite{blausen2014medical}.
\item Kinematic model of the hand with 27 \DoFs \cite{erol2007visionbased}.
]
\subfigsheight{58mm}
\subfig{blausen2014medical_hand}
\subfig{kinematic_hand_model}
\end{subfigs}
\subsubsection{Exploratory Procedures}
\label{exploratory_procedures}
The exploration of an object by the hand follows patterns of movement, called exploratory procedures \cite{lederman1987hand}.
As illustrated in the \figref{exploratory_procedures}, a specific and optimal movement of the hand is performed for a given property of the object being explored to acquire the most relevant sensory information for that property.
For example, a \emph{lateral movement} of the fingers on the surface to identify its texture, a \emph{pressure} with the finger to perceive its hardness, or a \emph{contour following} of the object to infer its shape.
These three procedures involve only the fingertips and in particular the index finger \cite{gonzalez2014analysis}.
For the other procedures, the whole hand is used: for example, approaching or posing the palm to feel the temperature (\emph{static contact}), holding the object in the hand to estimate its weight (\emph{unsupported holding}).
The \emph{enclosure} with the hand makes it possible to judge the general shape and size of the object.
It takes only \qtyrange{2}{3}{\s} to perform these procedures, except for contour following, which can take about ten seconds \cite{jones2006human}.
\fig{exploratory_procedures}{Exploratory procedures and their associated object properties (in parentheses). Adapted from \textcite{lederman2009haptic}.}
%Le sens haptique seul (sans la vision) nous permet ainsi de reconnaitre les objets et matériaux avec une grande précision.
%La reconnaissance des propriété matérielles, \ie la surface et sa texture, rigidité et température est meilleure qu'avec le sens visuel seul.
%Mais la reconnaissance des propriétés spatiales, la forme et la taille de l'objet, est moins bonne avec l'haptique qu'avec la vision \cite{lederman2009haptic}.
%Quelques secondes (\qtyrange{2}{3}{\s}) suffisent pour effectuer ces procédures, à l'exception du suivi de contour qui peut prendre une dizaine de secondes \cite{jones2006human}.
\subsubsection{Grasp Types}
\label{grasp_types}
Thanks to the degrees of freedom of its skeleton, the hand can take many postures to grasp an object (\secref{hand_anatomy}).
By placing the thumb or palm against the other fingers (pad or palm opposition respectively), or by placing the fingers against each other as if holding a cigarette (side opposition), the hand can hold the object securely.
Grasping adapts to the shape of the object and the task to be performed, \eg grasping a pen with the fingertips then holding it to write, or taking a mug by the body to fill it and by the handle to drink it \cite{cutkosky1986modeling}.
Three types of grasp are differentiated according to their degree of strength and precision.
In \emph{power grasps}, the object is held firmly and follows the movements of the hand rigidly.
In \emph{precision grasps}, the fingers can move the object within the hand but without moving the arm.
\emph{Intermediate grasps} combine strength and precision in equal proportions \cite{feix2016grasp}.
For all possible objects and tasks, the number of grasp types can be reduced to 34 and classified as the taxonomy on \figref{gonzalez2014analysis} \cite{gonzalez2014analysis}.\footnote{An updated taxonomy was then proposed by \textcite{feix2016grasp}: it is more complete but harder to present.}
For everyday objects, this number is even smaller, with between 5 and 10 grasp types depending on the activity \cite{bullock2013grasp}.
Furthermore, the fingertips are the most involved areas of the hand, both in terms of frequency of use and time spent in contact: In particular, the thumb is almost always used, as well as the index and middle fingers, but the other fingers are used less frequently \cite{gonzalez2014analysis}.
This can be explained by the sensitivity of the fingertips (\secref{haptic_sense}) and the ease with which the thumb can be opposed to the index and middle fingers compared to the other fingers.
\fig{gonzalez2014analysis}{Taxonomy of grasp types of \textcite{gonzalez2014analysis}}[, classified according to their type (power, precision or intermediate) and the shape of the grasped object. Each grasp shows the area of the palm and fingers in contact with the object and the grasp with an example of object.]
\subsection{Haptic Perception of Roughness and Hardness}
\label{object_properties}
The active exploration of an object with the hand is performed as a sensorimotor loop: The exploratory movements (\secref{exploratory_procedures}) guide the search for and adapt to sensory information (\secref{haptic_sense}), allowing to construct a haptic perception of the object's properties.
There are two main types of \emph{perceptual properties}.
The \emph{material properties} are the perception of the roughness, hardness, temperature and friction of the surface of the object \cite{bergmanntiest2010tactual}.
The \emph{spatial properties} are the perception of the weight, shape and size of the object \cite{lederman2009haptic}.
Each of these properties is closely related to a physical property of the object, which is defined and measurable, but perception is a subjective experience and often differs from this physical measurement.
Perception also depends on many other factors, such as the movements made and the exploration time, but also on the person, their sensitivity \cite{hollins2000individual} or age \cite{jones2006human}, and the context of the interaction \cite{kahrimanovic2009context,kappers2013haptic}.
These properties are described and rated\footnotemark using scales opposing two adjectives such as \enquote{rough/smooth} or \enquote{hot/cold} \cite{okamoto2013psychophysical}.
\footnotetext{All the haptic perception measurements described in this chapter were performed by blindfolded participants, to control for the influence of vision.}
The most salient and fundamental perceived material properties are the roughness and hardness of the object \cite{hollins1993perceptual,baumgartner2013visual}, which are also the most studied and best understood \cite{bergmanntiest2010tactual}.
\subsubsection{Roughness}
\label{roughness}
Roughness (or smoothness) is the perception of the \emph{micro-geometry} of a surface, \ie asperities with differences in height on the order of millimeters to micrometers \cite{bergmanntiest2010tactual}.
It is, for example, the perception of the fibers of fabric or wood and the texture of sandpaper or paint.
Roughness is what essentially characterizes the perception of the \emph{texture} of the surface \cite{hollins1993perceptual,baumgartner2013visual}.
When touching a surface in static touch, the asperities deform the skin and cause pressure sensations that allow a good perception of coarse roughness.
But when running the finger over the surface with a lateral movement (\secref{exploratory_procedures}), vibrations are also caused which give a better discrimination range and precision of roughness \cite{bensmaia2005pacinian}.
In particular, when the asperities are smaller than \qty{0.1}{mm}, such as paper fibers, the pressure cues are no longer captured and only the movement, \ie the vibrations, can be used to detect the roughness \cite{hollins2000evidence}.
This limit distinguishes \emph{macro-roughness} from \emph{micro-roughness}.
The physical properties of the surface determine the haptic perception of roughness.
The most important characteristic is the density of the surface elements, \ie the spacing between them: The perceived (subjective) intensity of roughness increases with spacing, for macro-roughness \cite{klatzky2003feeling,lawrence2007haptic} and micro-roughness \cite{bensmaia2003vibrations}.
For macro-textures, the size of the elements, the force applied and the speed of exploration have limited effects on the intensity perceived \cite{klatzky2010multisensory}: macro-roughness is a \emph{spatial perception}.
This allows us to read Braille \cite{lederman2009haptic}.
However, the speed of exploration affects the perceived intensity of micro-roughness \cite{bensmaia2003vibrations}.
To establish the relationship between spacing and intensity for macro-roughness, patterned textured surfaces were manufactured: as a linear grating (on one axis) composed of ridges and grooves, \eg in \figref{lawrence2007haptic_1} \cite{lederman1972fingertip,lawrence2007haptic}, or as a surface composed of micro conical elements on two axes, \eg in \figref{klatzky2003feeling_1} \cite{klatzky2003feeling}.
As shown in \figref{lawrence2007haptic_2}, there is a quadratic relationship between the logarithm of the perceived roughness intensity $r$ and the logarithm of the space between the elements $s$ ($a$, $b$ and $c$ are empirical parameters to be estimated) \cite{klatzky2003feeling}:
\begin{equation}{roughness_intensity}
log(r) \sim a \, log(s)^2 + b \, s + c
\end{equation}
A larger spacing between elements increases the perceived roughness, but reaches a plateau from \qty{\sim 5}{\mm} for the linear grating \cite{lawrence2007haptic}, while the roughness decreases from \qty{\sim 2.5}{\mm} \cite{klatzky2003feeling} for the conical elements.
\begin{subfigs}{lawrence2007hapti}{Estimation of haptic roughness of a linear grating surface by active exploration \cite{lawrence2007haptic}. }[][
\item Schema of a linear grating surface, composed of ridges and grooves.
\item Perceived intensity of roughness (vertical axis) of the surface as a function of the size of the grooves (horizontal axis, interval of \qtyrange{0.125}{4.5}{mm}), the size of the ridges (RW, circles and squares) and the mode of exploration (with the finger in white and via a rigid probe held in hand in black).
]
\subfigsheight{56mm}
\subfig{lawrence2007haptic_1}
\subfig{lawrence2007haptic_2}
\end{subfigs}
It is also possible to perceive the roughness of a surface by \emph{indirect touch}, with a tool held in the hand, for example by writing with a pen on paper \cite{klatzky2003feeling}.
The skin is no longer deformed and only the vibrations of the tool are transmitted.
But this information is sufficient to feel the roughness, which perceived intensity follows the same quadratic law.
The intensity peak varies with the size of the contact surface of the tool, \eg a small tool allows perceiving finer spaces between the elements than with the finger (\figref{klatzky2003feeling_2}).
However, as the speed of exploration changes the transmitted vibrations, a faster speed shifts the perceived intensity peak slightly to the right, \ie decreasing perceived roughness for fine spacings and increasing it for large spacings \cite{klatzky2003feeling}.
\begin{subfigs}{klatzky2003feeling}{Estimation of haptic roughness of a surface of conical micro-elements by active exploration \cite{klatzky2003feeling}. }[][
\item Electron micrograph of conical micro-elements on the surface.
\item Perceived intensity of roughness (vertical axis) of the surface as a function of the average spacing of the elements (horizontal axis, interval of \qtyrange{0.8}{4.5}{mm}) and the mode of exploration (with the finger in black and via a rigid probe held in hand in white).
]
\subfig[.25]{klatzky2003feeling_1}
\subfig[.5]{klatzky2003feeling_2}
\end{subfigs}
Even when the fingertips are deafferented (absence of cutaneous sensations), the perception of roughness is maintained \cite{libouton2012tactile}, thanks to the propagation of vibrations in the finger, hand and wrist, for both pattern and "natural" everyday textures \cite{delhaye2012textureinduced}.
The spectrum of vibrations shifts to higher frequencies as the exploration speed increases, but the brain integrates this change with proprioception to keep the \emph{perception constant} of the texture.
For patterned textures, as illustrated in \figref{delhaye2012textureinduced}, the ratio of the finger speed $v$ to the frequency of the vibration intensity peak $f_p$ is measured most of the time equal to the period $\lambda$ of the spacing of the elements:
\begin{equation}{grating_vibrations}
\lambda \sim \frac{v}{f_p}
\end{equation}
The vibrations generated by exploring everyday textures are also very specific to each texture and similar between individuals, making them identifiable by vibration alone \cite{manfredi2014natural,greenspon2020effect}.
This shows the importance of vibration cues even for macro textures and the possibility of generating virtual texture sensations with vibrotactile rendering.
\fig[0.55]{delhaye2012textureinduced}{Speed of finger exploration (horizontal axis) on grating textures with different periods $\lambda$ of spacing (in color) and frequency of the vibration intensity peak $f_p$ propagated in the wrist (vertical axis) \cite{delhaye2012textureinduced}.}
The everyday textures are more complex to study because they are composed of multiple elements of different sizes and spacings.
In addition, the perceptions of micro and macro roughness overlap and are difficult to distinguish \cite{okamoto2013psychophysical}.
Thus, individuals have a subjective definition of roughness, with some paying more attention to larger elements and others to smaller ones \cite{bergmanntiest2007haptic}, or even including other perceptual properties such as hardness or friction \cite{bergmanntiest2010tactual}.
\subsubsection{Hardness}
\label{hardness}
Hardness (or softness) is the perception of the \emph{resistance to deformation} of an object when pressed or tapped \cite{bergmanntiest2010tactual}.
The perceived softness of a fruit allows us to judge its ripeness, while ceramic is perceived as hard.
By tapping on a surface, metal will be perceived as harder than wood.
If the surface returns to its original shape after being deformed, the object is elastic (like a spring), otherwise it is plastic (like clay).
When the finger presses on an object (\figref{exploratory_procedures}), its surface will move and deform with some resistance, and the contact area of the skin will also expand, changing the pressure distribution.
When the surface is touched or tapped, vibrations are also transmitted to the skin \cite{higashi2019hardness}.
Passive touch (without voluntary hand movements) and tapping allow a perception of hardness as good as active touch \cite{friedman2008magnitude}.
Two physical properties determine the haptic perception of hardness: its stiffness and elasticity, as shown in \figref{hardness} \cite{bergmanntiest2010tactual}.
The \emph{stiffness} $k$ of an object is the ratio between the applied force $F$ and the resulting \emph{displacement} $D$ of the surface:
\begin{equation}{stiffness}
k = \frac{F}{D}
\end{equation}
The \emph{elasticity} of an object is expressed by its Young's modulus $Y$, which is the ratio between the applied pressure (the force $F$ per unit area $A$) and the resulting deformation $D / l$ (the relative displacement) of the object:
\begin{equation}{young_modulus}
Y = \frac{F / A}{D / l}
\end{equation}
\begin{subfigs}{stiffness_young}{Perceived hardness of an object by finger pressure. }[][
\item Diagram of an object with a stiffness coefficient $k$ and a length $l$ compressed by a force $F$ on an area $A$ by a distance $D$.
\item Identical perceived hardness intensity between Young's modulus (horizontal axis) and stiffness (vertical axis). The dashed and dotted lines indicate the objects tested, the arrows the correspondences made between these objects, and the grey lines the predictions of the quadratic relationship \cite{bergmanntiest2009cues}.
]
\subfig[.3]{hardness}
\subfig[.45]{bergmanntiest2009cues}
\end{subfigs}
\textcite{bergmanntiest2009cues} showed the role of these two physical properties in the perception of hardness.
With finger pressure, a relative difference (the \emph{Weber fraction}) of \percent{\sim 15} is required to discriminate between two objects of different stiffness or elasticity.
However, in the absence of pressure sensations (by placing a thin disc between the finger and the object), the necessary relative difference becomes much larger (Weber fraction of \percent{\sim 50}).
Thus, the perception of hardness relies on \percent{90} on surface deformation cues and \percent{10} on displacement cues.
In addition, an object with low stiffness but high Young's modulus can be perceived as hard, and vice versa, as shown in \figref{bergmanntiest2009cues}.
%Finally, when pressing with the finger, the perceived hardness intensity $h$ follows a power law with the stiffness $k$ \cite{harper1964subjective}:
%\begin{equation}{hardness_intensity}
% h = k^{0.8}
%\end{equation}
%En pressant du doigt, l'intensité perçue (subjective) de dureté suit avec la raideur une relation selon une loi de puissance avec un exposant de \num{0.8} \cite{harper1964subjective}, \ie quand la raideur double, la dureté perçue augmente de \num{1.7}.
%\textcite{bergmanntiest2009cues} ont ainsi observé une relation quadratique d'égale intensité perçue de dureté, comme illustré sur la \figref{bergmanntiest2009cues}.
%\subsubsection{Friction}
%\label{friction}
%
%Friction (or slipperiness) is the perception of \emph{resistance to movement} on a surface \cite{bergmanntiest2010tactual}.
%Sandpaper is typically perceived as sticky because it has a strong resistance to sliding on its surface, while glass is perceived as more slippery.
%This perceptual property is closely related to the perception of roughness \cite{hollins1993perceptual,baumgartner2013visual}.
%
%When running the finger on a surface with a lateral movement (\secref{exploratory_procedures}), the skin-surface contacts generate frictional forces in the opposite direction to the finger movement, giving kinesthetic cues, and also stretch the skin, giving cutaneous cues.
%As illustrated in \figref{smith1996subjective_1}, a stick-slip phenomenon can also occur, where the finger is intermittently slowed by friction before continuing to move, on both rough and smooth surfaces \cite{derler2013stick}.
%The amplitude of the frictional force $F_s$ is proportional to the normal force of the finger $F_n$, \ie the force perpendicular to the surface, according to a coefficient of friction $\mu$:
%\begin{equation}{friction}
% F_s = \mu \, F_n
%\end{equation}
%The perceived intensity of friction is thus roughly related to the friction coefficient $\mu$ \cite{smith1996subjective}.
%However, it is a complex perception because it is more determined by the micro-scale interactions between the surface and the skin: It depends on many factors such as the normal force applied, the speed of movement, the contact area and the moisture of the skin and the surface \cite{adams2013finger,messaoud2016relation}.
%In this sense, the perception of friction is still poorly understood \cite{okamoto2013psychophysical}.
%
%\begin{subfigs}{smith1996subjective}{Perceived intensity of friction of different materials by active exploration with the finger \cite{smith1996subjective}. }[
% \item Measurements of normal $F_n$ and tangential $F_t$ forces when exploring two surfaces: one smooth (glass) and one rough (nyloprint). The fluctuations in the tangential force are due to the stick-slip phenomenon. The coefficient of friction $\mu$ can be estimated as the slope of the relationship between the normal and tangential forces.
% \item Perceived friction intensity (vertical axis) as a function of the estimated friction coefficient $\mu$ of the exploration (horizontal axis) for four materials (shapes and colors).
% ]
% \subfigsheight{55mm}
% \subfig{smith1996subjective_1}
% \subfig{smith1996subjective_2}
%\end{subfigs}
%
%Yet, it is a fundamental perception for grasping and manipulating objects.
%The forces of friction make it indeed possible to hold the object firmly in the hand and prevent it from slipping
%The perception of friction also allows us to automatically and very quickly adjust the force we apply to the object in order to grasp it \cite{johansson1984roles}.
%If the finger is anaesthetized, the lack of cutaneous sensation prevents effective adjustment of the gripping force: the forces of the object on the finger are no longer correctly perceived, and the fingers then press harder on the object in compensation, but without achieving good opposition of the fingers \cite{witney2004cutaneous}.
%\subsubsection{Temperature}
%\label{temperature}
%
%Temperature (or coldness/warmness) is the perception of the \emph{transfer of heat} between the touched surface and the skin \cite{bergmanntiest2010tactual}:
%When heat is removed from (added to) the skin, the surface is perceived as cold (hot).
%Metal will be perceived as colder than wood at the same room temperature: This perception is different from the physical temperature of the material and is therefore an important property for distinguishing between materials \cite{ho2006contribution}.
%This perception depends on the thermal conductivity and heat capacity of the material, the volume of the object, the initial temperature difference and the area of contact between the surface and the skin \cite{kappers2013haptic}.
%For example, a larger object or a smoother surface, which increases the contact area, causes more heat circulation and a more intense temperature sensation (hot or cold) \cite{bergmanntiest2008thermosensory}.
%Parce qu'elle est basée sur la circulation de la chaleur, la perception de la température est plus lente que les autres propriétés matérielles et demande un toucher statique (voir \figref{exploratory_procedures}) de plusieurs secondes pour que la température de la peau s'équilibre avec celle de l'objet.
%La température $T(t)$ du doigt à l'instant $t$ et au contact avec une surface suit une loi décroissante exponentielle, où $T_s$ est la température initiale de la peau, $T_e$ est la température de la surface, $t$ est le temps et $\tau$ est la constante de temps:
%\begin{equation}{temperature}
% T(t) = (T_s - T_e) \, e^{-t / \tau} + T_e
%\end{equation}
%Le taux de transfert de chaleur, décrit par $\tau$, et l'écart de température $T_s - T_e$, sont les deux indices essentiels pour la perception de la température.
%Dans des conditions de la vie de tous les jours, avec une température de la pièce de \qty{20}{\celsius}, une différence relative du taux de transfert de chaleur de \percent{43} ou un écart de \qty{2}{\celsius} est nécessaire pour percevoir une différence de température \cite{bergmanntiest2009tactile}.
%\subsubsection{Spatial Properties}
%\label{spatial_properties}
%Weight, size and shape are haptic spatial properties that are independent of the material properties described above.
%Weight (or heaviness/lightness) is the perceived \emph{mass} of the object \cite{bergmanntiest2010haptic}.
%It is typically estimated by holding the object statically in the palm of the hand to feel the gravitational force (\secref{exploratory_procedures}).
%A relative weight difference of \percent{8} is then required to be perceptible \cite{brodie1985jiggling}.
%By lifting the object, it is also possible to feel the object's force of inertia, \ie its resistance to velocity.
%This provides an additional perceptual cue to its mass and slightly improves weight discrimination.
%For both gravity and inertia, kinesthetic cues to force are much more important than cutaneous cues to pressure \cite{bergmanntiest2012investigating}.
%Le lien entre le poids physique et l'intensité perçue est variable selon les individus \cite{kappers2013haptic}.
%Size can be perceived as the object's \emph{length} (in one dimension) or its \emph{volume} (in three dimensions) \cite{kappers2013haptic}.
%In both cases, and if the object is small enough, a precision grip (\figref{gonzalez2014analysis}) between the thumb and index finger can discriminate between sizes with an accuracy of \qty{1}{\mm}, but with an overestimation of length (power law with exponent \qty{1.3}).
%Alternatively, it is necessary to follow the contours of the object with the fingers to estimate its length (\secref{exploratory_procedures}), but with ten times less accuracy and an underestimation of length (power law with an exponent of \qty{0.9}) \cite{bergmanntiest2011cutaneous}.
%The perception of the volume of an object that is not small is typically done by hand enclosure, but the estimate is strongly influenced by the size, shape and mass of the object, for an identical volume \cite{kahrimanovic2010haptic}.
%The shape of an object can be defined as the perception of its \emph{global geometry}, \ie its shape and contours.
%This is the case, for example, when looking for a key in a pocket.
%The exploration of contours and enclosure are then employed, as for the estimation of length and volume.
%If the object is not known in advance, object identification is rather slow, taking several seconds \cite{norman2004visual}.
%Therefore, the exploration of other properties is favoured to recognize the object more quickly, in particular marked edges \cite{klatzky1987there}, \eg a screw among nails (\figref{plaisier2009salient_2}), or certain material properties \cite{lakatos1999haptic,plaisier2009salient}, \eg a metal object among plastic objects.
%\begin{subfigs}{plaisier2009salient}{Identifcation of a sphere among cubes \cite{plaisier2009salient}. }[
% \item The shape has a significant effect on the perception of the volume of an object, \eg a sphere is perceived smaller than a cube of the same volume.
% \item The absence of a marked edge on the sphere makes it easy to identify among cubes.
% ]
% \subfigsheight{40mm}
% \subfig{plaisier2009salient_1}
% \subfig{plaisier2009salient_2}
%\end{subfigs}
\subsection{Conclusion}
\label{haptic_sense_conclusion}
Haptic perception and manipulation of objects with the hand involves several simultaneous mechanisms with complex interactions.
Exploratory movements of the hand are performed on contact with the object to obtain multiple sensory information from several cutaneous and kinaesthetic receptors.
These sensations express physical parameters in the form of perceptual cues, which are then integrated to form a perception of the property being explored.
It is often the case that one perceptual cue is particularly important in the perception of a property, but perceptual constancy is possible by compensating for its absence with others.
In turn, these perceptions help to guide the grasping and manipulation of the object by adapting the grasp type and the forces applied to the shape of the object and the task to be performed.

View File

@@ -0,0 +1,365 @@
\section{Augmenting Object Perception with Wearable Haptics}
\label{wearable_haptics}
One of the roles of haptic systems is to render virtual interactions and sensations that are \emph{similar and comparable} to those experienced by the haptic sense with real objects, particularly in a visual \VE \cite{maclean2008it,culbertson2018haptics}.
Due to the high complexity of the haptic sense and the variety of sensations it can feel, haptic actuators and renderings are designed to only address a subset of these sensations.
While it is challenging to create a realistic haptic experience, it is more important to provide the right sensory stimulus \enquote{at the right moment and at the right place} \cite{hayward2007it}.
Moreover, a haptic augmentation system should \enquote{modulating the feel of a real object by virtual [haptic] feedback} \cite{jeon2009haptic}, \ie a touch interaction with a real object whose perception is modified by the addition of virtual haptic feedback.
The haptic system should be hand-held or worn, \eg on the hand, and \enquote{not permanently attached to or integrated in the object} \cite{bhatia2024augmenting}.
\subsection{Level of Wearability}
\label{wearability_level}
Different types of haptic devices can be worn on the hand, but only some of them can be considered wearable.
\textcite{pacchierotti2017wearable} classify them into three levels of wearability, as illustrated in the \figref{pacchierotti2017wearable}.
An increasing wearability resulting in the loss of the system's kinesthetic feedback capability.
\begin{subfigs}{pacchierotti2017wearable}{
Schematic wearability level of haptic devices for the hand \cite{pacchierotti2017wearable}.
}[][
\item World-grounded haptic devices are fixed on the environment to provide kinesthetic feedback to the user.
\item Exoskeletons are body-grounded kinesthetic devices.
\item Wearable haptic devices are grounded on the point of application of the tactile stimulus.
]
\subfigsheight{34mm}
\subfig{pacchierotti2017wearable_1}
\subfig{pacchierotti2017wearable_2}
\subfig{pacchierotti2017wearable_3}
\end{subfigs}
Haptic research comes from robotics and teleoperation, and historically led to the design of haptic systems that are \emph{world-grounded} to an external support in the environment, such as a table (\figref{pacchierotti2017wearable_1}).
These are robotic arms whose end-effector is either held in the hand or worn on a finger and which simulate interactions with a \VE by providing kinesthetic forces and torques feedback (\figref{pacchierotti2015cutaneous}).
They provide high fidelity haptic feedback but are heavy, bulky and limited to small workspaces \cite{culbertson2018haptics}.
More portable designs have been developed by moving the grounded part to the user's body.
The entire robotic system is thus mounted on the user, forming an exoskeleton capable of providing kinesthetic feedback to the finger, \eg in \figref{achibet2017flexifingers}.
However, it cannot constrain the movements of the wrist and the reaction force is transmitted to the user where the device is grounded (\figref{pacchierotti2017wearable_2}).
Such \emph{body-grounded} devices are often heavy and bulky and cannot be considered wearable.
\textcite{pacchierotti2017wearable} defined that : \enquote{A wearable haptic interface should also be small, easy to carry, comfortable, and it should not impair the motion of the wearer}.
An approach is then to move the grounding point very close to the end-effector (\figref{pacchierotti2017wearable_3}): the interface is limited to cutaneous haptic feedback, but its design is more compact, lightweight, comfortable and portable, \eg in \figref{grounded_to_wearable}.
Moreover, as detailed in \secref{object_properties}, cutaneous sensations are necessary and often sufficient for the perception of the haptic properties of an object explored with the hand, as also argued by \textcite{pacchierotti2017wearable}.
\begin{subfigs}{grounded_to_wearable}{Haptic devices for the hand with different wearability levels. }[][
\item Teleoperation of a virtual cube grasped with the thumb and index fingers each attached to a grounded haptic device \cite{pacchierotti2015cutaneous}.
\item A passive exoskeleton for fingers simulating stiffness of a trumpet's pistons \cite{achibet2017flexifingers}.
\item Manipulation of a virtual cube with the thumb and index fingers each attached with the 3-RSR wearable haptic device \cite{leonardis20173rsr}.
]
\subfigsheight{38mm}
\subfig{pacchierotti2015cutaneous}
\subfig{achibet2017flexifingers}
\subfig{leonardis20173rsr}
\end{subfigs}
% Tradeoff realistic and cost + analogy with sound, Hi-Fi costs a lot and is realistic, but 40$ BT headphone is more practical and enough, as cutaneous feedback without kinesthesic could be enough for wearable haptics and far more affordable and comfortable than world- or body-grounded haptics + cutaneous even better than kine for rendering surface curvature and fine manipulation
\subsection{Wearable Haptic Devices for the Hand}
\label{wearable_haptic_devices}
We present an overview of wearable haptic devices for the hand, following the categories of \textcite{pacchierotti2017wearable}.
The rendering of a haptic device is indeed determined by the nature of the actuators employed, which form the interface between the haptic system and the user's skin, and therefore the types of mechanical stimuli they can supply.
Several actuators are often combined in a haptic device to obtain richer haptic feedback.
\subsubsection{Moving Platforms}
\label{moving_platforms}
The moving platforms translate perpendicularly on the skin to create sensations of contact, pressure and edges \cite{pacchierotti2017wearable}.
Placed under the fingertips, they can come into contact with the skin with different forces, speeds and orientations.
The platform is moved by means of cables, \eg in \figref{gabardi2016new}, or articulated arms, \eg in \figref{perez2017optimizationbased}, activated by motors grounded to the nail \cite{gabardi2016new,perez2017optimizationbased}.
The motors lengthen and shorten the cables or orient the arms to move the platform over 3 \DoFs: two for orientation and one for normal force relative to the finger.
However, these platforms are specifically designed to provide haptic feedback to the fingertip in \VEs, preventing interaction with a \RE.
\subsubsection{Pin and Pneumatic Arrays}
\label{array_actuators}
A pin-array is a surface made up of small, rigid pins arranged very close together in a grid and that can be moved individually.
When placed in contact with the fingertip, it can create sensations of edge, pressure and texture.
The \figref{sarakoglou2012high} shows an example of a pin-array consisting of \numproduct{4 x 4} pins of \qty{1.5}{\mm} diameter and \qty{2}{\mm} height, spaced at \qty{2}{\mm} \cite{sarakoglou2012high}.
Pneumatic systems use a fluid such as air or water to inflate membranes under the skin, creating sensations of contact and pressure \cite{raza2024pneumatically}.
Multiple membranes are often used in a grid to simulate edges and textures, as in the \figref{ujitoko2020development} \cite{ujitoko2020development}.
Although these two types of effector can be considered wearable, their actuation requires a high level of mechanical and electronic complexity that makes the system as a whole not portable.
\begin{subfigs}{normal_actuators}{
Normal indentation actuators for the fingertip.
}[][
\item A moving platform actuated with cables \cite{gabardi2016new}.
\item A moving platform actuated by articulated limbs \cite{perez2017optimizationbased}.
\item Diagram of a pin-array of tactors \cite{sarakoglou2012high}.
\item A pneumatic system composed of a \numproduct{12 x 10} array of air cylinders \cite{ujitoko2020development}.
]
\subfigsheight{37mm}
\subfig{gabardi2016new}
\subfig{perez2017optimizationbased}
\subfig{sarakoglou2012high}
\subfig{ujitoko2020development}
\end{subfigs}
\subsubsection{Tangential Motion Actuators}
\label{tangential_actuators}
Similar in design to the mobile platforms, the tangential motion actuators activate a rigid pin or surface in contact with the fingertip under the finger to create shearing sensation on the skin.
An articulated and motorized arm structure moves the effector in multiple directions over 2 \DoFs parallel to the skin, \eg in \figref{leonardis2015wearable} \cite{leonardis2015wearable}.
Some actuators are capable of both normal and tangential motion over 3 \DoFs on the skin and can also make or break contact with the finger, \eg in \figref{schorr2017fingertip} \cite{schorr2017fingertip}.
\subsubsection{Compression Belts}
\label{belt_actuators}
A simpler alternative approach is to place a belt under the finger, and to actuate it over 2 \DoFs by two motors placed on top of the finger \cite{minamizawa2007gravity}.
By turning in opposite directions, the motors shorten the belt and create a sensation of pressure.
Conversely, by turning simultaneously in the same direction, the belt pulls on the skin, creating a shearing sensation.
The simplicity of this approach allows the belt to be placed anywhere on the hand, leaving the fingertip free to interact with the \RE, \eg the hRing on the proximal phalanx in \figref{pacchierotti2016hring} \cite{pacchierotti2016hring} or Tasbi on the wrist in \figref{pezent2022design} \cite{pezent2022design}.
\begin{subfigs}{tangential_belts}{Tangential motion actuators and compression belts. }[][
\item A skin strech actuator for the fingertip \cite{leonardis2015wearable}.
\item A 3 \DoF actuator capable of normal and tangential motion on the fingertip \cite{schorr2017fingertip}.
%\item A shearing belt actuator for the fingertip \cite{minamizawa2007gravity}.
\item The hRing, a shearing belt actuator for the proximal phalanx of the finger \cite{pacchierotti2016hring}.
\item Tasbi, a wristband capable of pressure and vibrotactile feedback \cite{pezent2022design}.
]
\subfigsheight{33.5mm}
\subfig{leonardis2015wearable}
\subfig{schorr2017fingertip}
\subfig{pacchierotti2016hring}
\subfig{pezent2022design}
\end{subfigs}
\subsubsection{Vibrotactile Actuators}
\label{vibrotactile_actuators}
Vibrotactile actuators are the most common and simplest wearable haptic interfaces, and are available as consumer products.
They are small, lightweight and can be placed directly on any part of the hand.
\textcite{choi2013vibrotactile} provided a detailed review.
All vibrotactile actuators are based on the same principle: generating an oscillating motion from an electric current with a frequency and amplitude high enough to be perceived by cutaneous mechanoreceptors.
Several types of vibrotactile actuators are used in haptics, with different trade-offs between size, proposed \DoFs and application constraints.
An \ERM is a direct current (DC) motor that rotates an off-center mass when a voltage or current is applied (\figref{precisionmicrodrives_erm}). \ERMs are easy to control, inexpensive and can be encapsulated in a few millimeters cylinder or coin form factor. However, they have only one \DoF because both the frequency and amplitude of the vibration are coupled to the speed of the rotation, \eg low (high) frequencies output at low (high) amplitudes, as shown on \figref{precisionmicrodrives_erm_performances}.
\begin{subfigs}{erm}{Diagram and performance of \ERMs. }[][
\item Diagram of a cylindrical encapsulated \ERM. From Precision Microdrives~\footnotemark.
\item Amplitude and frequency output of an \ERM as a function of the input voltage.
]
\subfigsheight{45mm}
\subfig{precisionmicrodrives_erm}
\subfig{precisionmicrodrives_erm_performances}
\end{subfigs}
\footnotetext{\url{https://www.precisionmicrodrives.com/}}
A \LRA consists of a coil that creates a magnetic field from an alternative current (AC) to oscillate a magnet attached to a spring, as an audio loudspeaker (\figref{precisionmicrodrives_lra}).
They are more complex to control and a bit larger than \ERMs.
Each \LRA is designed to vibrate with maximum amplitude at a given resonant frequency, but won't vibrate efficiently at other frequencies, \ie their bandwidth is narrow, as shown on \figref{azadi2014vibrotactile}.
A voice-coil actuator is a \LRA but capable of generating vibration at two \DoF, with an independent control of the frequency and amplitude of the vibration on a wide bandwidth.
They are larger in size than \ERMs and \LRAs, but can generate more complex renderings.
Piezoelectric actuators deform a solid material when a voltage is applied.
They are very small and thin and provide two \DoFs of amplitude and frequency control.
However, they require high voltages to operate, limiting their use in wearable devices.
\begin{subfigs}{lra}{Diagram and performance of \LRAs. }[][
\item Diagram. From Precision Microdrives~\footnotemarkrepeat.
\item Force generated by two \LRAs as a function of sinusoidal wave input with different frequencies: both their maximum force and resonant frequency are different \cite{azadi2014vibrotactile}.
]
\subfigsheight{50mm}
\subfig{precisionmicrodrives_lra}
\subfig{azadi2014vibrotactile}
\end{subfigs}
\subsection{Modifying Perceived Haptic Roughness and Hardness}
\label{tactile_rendering}
Rendering a haptic property consists in modeling and reproducing virtual sensations comparable to those perceived when interacting with real objects \cite{klatzky2013haptic}.
By adding such rendering as feedback timely synchronized with the touch actions of the hand on a real object \cite{bhatia2024augmenting}, the perception of the object's haptic property can be modified.
The integration of the real and virtual sensations into a single property perception is discussed in more details in \secref{sensations_perception}.
%, both the real and virtual haptic sensations are integrated into a single property perception, as presented in \secref{sensations_perception}, \ie the perceived haptic property is modulated by the added virtual feedback.
In particular, the visual rendering of a touched object can also influence the perception of its haptic properties, \eg by modifying its visual texture in \AR or \VR, as discussed in the \secref{visuo_haptic}.
\textcite{bhatia2024augmenting} categorize the haptic augmentations into three types: direct touch, touch-through, and tool-mediated.
In \emph{direct touch}, the haptic device does not cover the inside of the hand so as not to impair the user's interaction with the \RE, and is typically achieved with wearable haptics.
In touch-through and tool-mediated, or \emph{indirect feel-through} \cite{jeon2015haptic}, the haptic device is placed between the hand and the \RE.
%We are interested in direct touch augmentations with wearable haptics (\secref{wearable_haptic_devices}), as their integration with \AR is particularly promising for free hand interaction with visuo-haptic augmentations.
Many haptic augmentations were first developed with touch-through devices, and some (but not all) were later transposed to direct touch augmentation with wearable haptic devices.
%We also focus on tactile augmentations stimulating the mechanoreceptors of the skin (\secref{haptic_sense}), thus excluding temperature perception, as they are the most common existing haptic interfaces.
Since we have chosen to focus in \secref{object_properties} on the haptic perception of roughness and hardness of objects, we review below the methods to modify the perception of these properties.
Of course, wearable haptics can also be used in a direct touch context to modify the perceived friction \cite{konyo2008alternative,salazar2020altering}, weight \cite{minamizawa2007gravity}, or local deformation \cite{salazar2020altering} of real objects, but they are rare \cite{bhatia2024augmenting} and will not be detailed here.
% \cite{klatzky2003feeling} : rendering roughness, friction, deformation, temperatures
% \cite{girard2016haptip} : renderings with a tangential motion actuator
\subsubsection{Roughness}
\label{texture_rendering}
To modify the perception of the haptic roughness (or texture, see \secref{roughness}) of a real object, vibrations are typically applied to the skin by the haptic device as the user moves over the surface.
%This is because running the finger or a tool on a textured surface generates pressures and vibrations (\secref{roughness}) at frequencies that are too high for rendering capabilities of most haptic devices \cite{campion2005fundamental,culbertson2018haptics}.
There are two main approaches to modify virtual textures perception: \emph{simulation models} and \emph{data-driven models} \cite{klatzky2013haptic,culbertson2018haptics}.
\paragraph{Simulation Models}
%Simulations of virtual textures are based on the physics of the interaction between the finger and the surface, and are used to generate the vibrations that the user feels when running the finger over the surface.
The simplest texture simulation model is a 1D sinusoidal grating $v(t)$ with spatial period $\lambda$ and amplitude $A$ that is scanned by the user at velocity $\dot{x}(t)$:
\begin{equation}{grating_rendering}
v(t) = A \sin(\frac{2 \pi \dot{x}(t)}{\lambda})
\end{equation}
That is, this model generates a periodic signal whose frequency is modulated and proportional to the user's velocity, implementing the speed-frequency ratio observed with real patterned textures (\eqref{grating_vibrations}).
It gives the user the illusion of a texture with a \emph{fixed spatial period} that approximate the real manufactured grating textures (\secref{roughness}).
The user's position could have been used instead of the velocity, but it requires measuring the position and generating the signal at frequencies too high (\qty{10}{\kHz}) for most sensors and haptic actuators \cite{campion2005fundamental}.
With a voice-coil actuator attached to the middle phalanx of the finger, \textcite{asano2015vibrotactile} used this model to increase the perceived roughness (\figref{asano2015vibrotactile_2})
Participants moved their finger over real grating textures (\qtyrange{0.15}{.29}{\mm} groove and ridge width) with a virtual sine grating (\qty{1}{\mm} spatial period) superimposed, rendered after \eqref{grating_rendering}.
The perceived roughness increased proportionally to the virtual texture amplitude, but a high amplitude decreased it instead.
\textcite{ujitoko2019modulating} instead used a square wave signal and a hand-held stylus with an embedded voice-coil.
\textcite{friesen2024perceived} compared the frequency modulation of \eqref{grating_rendering} with amplitude modulation (\figref{friesen2024perceived}), and found that the frequency modulation was perceived as more similar to real sinusoidal gratings for lower spatial periods (\qty{0.5}{\mm}) but both modulations were effective for higher spatial periods (\qty{1.5}{\mm}).
%\textcite{friesen2024perceived} proposed
The model in \eqref{grating_rendering} can be extended to 2D textures by adding a second sinusoidal grating with an orthogonal orientation as \textcite{girard2016haptip}.
More complex models have also been developed to be physically accurate and reproduce with high fidelity the roughness perception of real patterned surfaces \cite{unger2011roughness}, but they require high-fidelity force feedback devices that are expensive and have a limited workspace.
\paragraph{Data-driven Models}
Because simulations of realistic virtual textures can be very complex to design and to render in real-time, direct capture and models of real textures have been developed \cite{culbertson2018haptics}.
\textcite{okamura1998vibration} were the first to measure the vibrations produced by the interaction of a stylus dragged over sandpaper and patterned surfaces.
They found that the contact vibrations with patterns can be modeled as exponentially decaying sinusoids (\eqref{contact_transient}) that depend on the normal force and the scanning velocity of the stylus on the surface.
This technique was used by \textcite{ando2007fingernailmounted} to augment a smooth sheet of paper with a virtual patterned texture: With a \LRA mounted on the nail, they rendered the virtual finger contacts with \qty{20}{\ms} vibration impulses at \qty{130}{\Hz} (\figref{ando2007fingernailmounted}).
Participants matched the virtual textures to the real ones, with \qty{0.25}{\mm} height and \qtyrange{1}{10}{\mm} width, but systematically overestimated the virtual width to be \qty{4}{\mm} longer.
Other models have been then developed to capture everyday textures (such as sandpaper) \cite{guruswamy2011iir} with many force and velocity measures \cite{romano2012creating,culbertson2014modeling}.
Such data-based models are capable of interpolating from the user's measures of velocity and force as inputs to generate a virtual texture in real time (\secref{vibrotactile_actuators}).
This led to the release of the Penn Haptic Texture Toolkit (HaTT) database, a public set of stylus recordings and models of 100 haptic textures \cite{culbertson2014one}.
A similar database, but captured from a direct touch context with the fingertip, has recently been released \cite{balasubramanian2024sens3}.
A limitation of these data-driven models is that they can only render \emph{isotropic} textures: their record does not depend on the position of the measure, and the rendering is the same regardless of the direction of the movement.
Alternative models have been proposed to both render both isotropic and patterned textures \cite{chan2021hasti}.
When comparing real textures felt through a stylus with their virtual models rendered with a voice-coil actuator attached to the stylus (\figref{culbertson2012refined}), the virtual textures were found to accurately reproduce the perception of roughness, but hardness and friction were not rendered properly \cite{culbertson2014modeling}.
\textcite{culbertson2015should} further showed that the perceived realism of the virtual textures, and similarity to the real textures, depended mostly on the user's speed but not on the user's force as inputs to the model, \ie responding to speed is sufficient to render isotropic virtual textures.
\begin{subfigs}{textures_rendering_data}{Augmentating haptic texture perception with voice-coil actuators. }[][
\item Increasing and decreasing the perceived roughness of a real patterned texture in direct touch \cite{asano2015vibrotactile}.
\item Comparing real patterned texture with virtual texture augmentation in direct touch \cite{friesen2024perceived}.
\item Rendering virtual contacts in direct touch with the virtual texture \cite{ando2007fingernailmounted}.
\item Rendering an isotropic virtual texture over a real surface while sliding a hand-held stylus across it \cite{culbertson2012refined}.
]
\subfigsheight{36mm}
\subfig{asano2015vibrotactile_2}
\subfig{friesen2024perceived}
\subfig{ando2007fingernailmounted}
\subfig{culbertson2012refined}
\end{subfigs}
\subsubsection{Hardness}
\label{hardness_rendering}
The perceived hardness (\secref{hardness}) of a real surface can be modified by rendering forces or vibrations.
\paragraph{Modulating Forces}
When tapping or pressing a real object, the perceived stiffness $\tilde{k}$ of its surface can be modulated with force feedback \cite{jeon2015haptic}.
This was first proposed by \textcite{jeon2008modulating} who augmented a real surface tapped in 1 \DoF with a grounded force-feedback device held in the hand (\figref{jeon2009haptic_1}).
When the haptic end-effector contacts the object at time $t$, the object's surface deforms by displacement $x_r(t)$ and opposes a real reaction force $f_r(t)$.
The virtual force of the device $\tilde{f_r}(t)$ is then controlled to:
\begin{equation}{stiffness_augmentation}
\tilde{f_r}(t) = f_r(t) - \tilde{k} x_r(t)
\end{equation}
A force sensor embedded in the device measures the reaction force $f_r(t)$.
The displacement $x_r(t)$ is estimated with the reaction force and the tapping velocity using a predefined model of different materials as described in \textcite{jeon2011extensions}.
As shown in \figref{jeon2009haptic_2}, the force $\tilde{f_r}(t)$ perceived by the user is modulated, but not the displacement $x_r(t)$, hence the perceived stiffness is $\tilde{k}(t)$.
This stiffness augmentation technique was then extended to allow tapping and pressing with 3 \DoFs \cite{jeon2010stiffness}, to render friction and weight augmentations \cite{jeon2011extensions}, and to grasp and squeeze the real object with two contact points \cite{jeon2012extending}.
\begin{subfigs}{stiffness_rendering_grounded}{
Augmenting the perceived stiffness of a real surface with a hand-held force-feedback device.
}[][
\item Diagram of a user tapping the surface \cite{jeon2009haptic}.
\item Displacement-force curves of a real rubber ball (dashed line) and when its perceived stiffness $\tilde{k}$ is modulated \cite{jeon2009haptic}.
]
\subfig[0.38]{jeon2009haptic_1}
\subfig[0.42]{jeon2009haptic_2}
\end{subfigs}
\textcite{detinguy2018enhancing} transposed this stiffness augmentation technique with the hRing device (\secref{belt_actuators}): While pressing a real piston with the fingertip by displacement $x_r(t)$, the belt compressed the finger with a virtual force $\tilde{k}\,x_r(t)$ where $\tilde{k}$ is the added stiffness (\eqref{stiffness_augmentation}), increasing the perceived stiffness of the piston (\figref{detinguy2018enhancing}).
More importantly, the augmentation proved to be robust to the placement of the device, as the increased stiffness was perceived the same on the fingertip, middle phalanx, and proximal.
Conversely, the technique allowed to \emph{decrease} the perceived stiffness by compressing the phalanx before the contact and reducing the pressure when the user pressed the piston \cite{salazar2020altering}.
\textcite{tao2021altering} proposed instead to restrict the deformation of the fingerpad by pulling a hollow frame around it to decrease perceived stiffness (\figref{tao2021altering}): it augments the finger contact area and thus the perceived Young's modulus of the object (\secref{hardness}).
\begin{subfigs}{stiffness_rendering_wearable}{
Modifying the perceived stiffness with wearable pressure devices.
}[][
\item Modify the perceived stiffness of a piston by pressing the finger during or prior the contact \cite{detinguy2018enhancing,salazar2020altering}.
\item Decrease perceived stiffness of hard object by restricting the fingerpad deformation \cite{tao2021altering}.
]
\subfigsheight{35mm}
\subfig{detinguy2018enhancing}
\subfig{tao2021altering}
\end{subfigs}
\paragraph{Vibrations Augmentations}
\textcite{okamura2001realitybased} measured impact vibrations $v(t)$ when tapping on real objects and found they can be modeled as exponential decaying sinusoid:
\begin{equation}{contact_transient}
v(t) = A \, |v_{in}| \, e^{- \tau t} sin(2 \pi f t)
\end{equation}
With $A$ the amplitude slope, $\tau$ the decay rate and $f$ the frequency, which are measured material properties, and $v_{in}$ the impact velocity.
It has been shown that these material properties perceptually express the stiffness (\secref{hardness}) of real \cite{higashi2019hardness} and virtual surface \cite{choi2021perceived}.
Therefore, when contacting or tapping a real object through an indirect feel-through interface that provides such vibrations (\figref{choi2021augmenting_control}) using a voice-coil (\secref{vibrotactile_actuators}), the perceived stiffness can be increased or decreased \cite{kuchenbecker2006improving,hachisu2012augmentation,choi2021augmenting}, \eg sponge feels stiffer or wood feels softer (\figref{choi2021augmenting_results}).
A challenge with this technique is to provide the vibration feedback at the right time to be felt simultaneously with the real contact \cite{park2023perceptual}.
\begin{subfigs}{contact_vibrations}{
Augmenting perceived stiffness using vibrations when touching a real surface \cite{choi2021augmenting}.
}[][
\item Voltage inputs (top) to the voice-coil for soft, medium, and hard vibrations, with the corresponding displacement (middle) and force (bottom) outputs of the actuator.
\item Perceived stiffness intensity of real sponge ("Sp") and wood ("Wd") surfaces without added vibrations ("N") and modified by soft ("S"), medium ("M") and hard ("H") vibrations.
]
\subfigsheight{50mm}
\subfig{choi2021augmenting_control}
\subfig{choi2021augmenting_results}
\end{subfigs}
Vibrations on contact have been employed with wearable haptics, but to the best of our knowledge only to render \VOs \cite{pezent2019tasbi,teng2021touch,sabnis2023haptic}.
We describe them in the \secref{vhar_haptics}.
%A promising alternative approach
%\cite{kildal20103dpress}
%\begin{subfigs}{vibtration_grains}{Augmenting perceived stiffness of a real surface with vibrations. }
% \subfigsheight{35mm}
% \subfig{sabnis2023haptic_device}
% \subfig{sabnis2023haptic_control}
%\end{subfigs}
%\textcite{choi2021perceived} combined and compared these two rendering approaches (spring-damper and exponential decaying sinusoids) but to render purely virtual surfaces.
%They found that the perceived intensity of the virtual hardness $\tilde{h}$ followed a power law, similarly to \eqref{hardness_intensity}, with the amplitude $a$, the %frequency $f$ and the damping $b$ of the vibration, but not the decay time $\tau$.
%\cite{park2023perceptual}
%\subsubsection{Friction}
%\label{friction_rendering}
%\cite{konyo2008alternative}
%\cite{provancher2009fingerpad}
%\cite{smith2010roughness}
%\cite{jeon2011extensions}
%\cite{salazar2020altering}
%\cite{yim2021multicontact}
%\subsubsection{Weight}
%\label{weight_rendering}
%\cite{minamizawa2007gravity}
%\cite{minamizawa2008interactive}
%\cite{jeon2011extensions}
%\cite{choi2017grabity}
%\cite{culbertson2017waves}
\subsection{Conclusion}
\label{wearable_haptics_conclusion}
Haptic systems aim to provide virtual interactions and sensations similar to those with real objects.
The complexity of the haptic sense has led to the design of numerous haptic devices and renderings.
While many haptic devices can be worn on the hand, only a few can be considered wearable as they are compact and portable, but they are limited to cutaneous feedback.
If the haptic rendering is timely associated with the user's touch actions on a real object, the perceived haptic properties of the object can be modified.
Several rendering methods have been developed to modify the perceived roughness and hardness, mostly using vibrotactile feedback and, to a lesser extent, pressure feedback.
However, not all of these haptic augmentations have been already transposed to wearable haptics.
%, unlike most previous actuators that are designed specifically for fingertips and would require mechanical adaptation to be placed on other parts of the hand.
% thanks to the vibration propagation and the sensory capabilities distributed throughout the skin, they can be placed without adaption and on any part of the hand

View File

@@ -0,0 +1,334 @@
\section{Manipulating Objects with the Hands in AR}
\label{augmented_reality}
As with haptic systems (\secref{wearable_haptics}), visual \AR devices generate and integrate virtual content into the user's perception of the \RE, creating the illusion of the presence of the virtual.
Immersive systems such as headsets leave the hands free to interact with \VOs, promising natural and intuitive interactions similar to those with everyday real objects.
%\begin{subfigs}{sutherland1968headmounted}{Photos of the first \AR system \cite{sutherland1968headmounted}. }[
% \item The \AR headset.
% \item Wireframe \ThreeD \VOs were displayed registered in the real environment (as if there were part of it).
% ]
% \subfigsheight{45mm}
% \subfig{sutherland1970computer3}
% \subfig{sutherland1970computer2}
%\end{subfigs}
\subsection{What is Augmented Reality?}
\label{what_is_ar}
The first \AR headset was invented by \textcite{sutherland1968headmounted}: With the technology available at the time, it was already capable of displaying \VOs at a fixed point in space in real time, giving the user the illusion that the content was present in the room.
Fixed to the ceiling, the headset displayed a stereoscopic (one image per eye) perspective projection of the virtual content on a transparent screen, taking into account the user's position, and thus already following the interaction loop presented in \figref[introduction]{interaction-loop}.
\subsubsection{A Definition of AR}
\label{ar_definition}
%\footnotetext{There quite confusion in the literature and in (because of) the industry about the terms \AR and \MR. The term \MR is very often used as a synonym of \AR, or a version of \AR that enables an interaction with the virtual content. The title of this section refers to the title of the highly cited paper by \textcite{speicher2019what} that examines this debate.}
The first formal definition of \AR was proposed by \textcite{azuma1997survey}: (1) combine real and virtual, (2) be interactive in real time, and (3) register real and virtual\footnotemark.
Each of these characteristics is essential: the real-virtual combination distinguishes \AR from \VR, a movie with integrated digital content is not interactive and a \TwoD overlay like an image filter is not registered.
There are also two key aspects to this definition: it does not focus on technology or method, but on the user's perspective of the system experience, and it does not specify a particular human sense, \ie it can be auditory \cite{yang2022audio}, haptic \cite{bhatia2024augmenting}, or even olfactory \cite{brooks2021stereosmell} or gustatory \cite{brooks2023taste}.
Yet, most of the research have focused on visual augmentations, and the term \AR (without a prefix) is almost always understood as visual \AR.
\footnotetext{This third characteristic has been slightly adapted to use the version of \textcite{marchand2016pose}, the original definition was: \enquote{registered in \ThreeD}.}
%For example, \textcite{milgram1994taxonomy} proposed a taxonomy of \MR experiences based on the degree of mixing real and virtual environments, and \textcite{skarbez2021revisiting} revisited this taxonomy to include the user's perception of the experience.
\subsubsection{Applications of AR}
\label{ar_applications}
Advances in technology, research and development have enabled many usages of \AR, including medicine, education, industrial, navigation, collaboration and entertainment applications \cite{dey2018systematic}.
For example, \AR can provide surgery training simulations in safe conditions \cite{harders2009calibration} (\figref{harders2009calibration}), or improve the learning of students with complex concepts and phenomena such as optics or chemistry \cite{bousquet2024reconfigurable}.
It can also guide workers in complex tasks, such as assembly, maintenance or verification \cite{hartl2013mobile} (\figref{hartl2013mobile}), reinvent the way we interact with desktop computers \cite{lee2013spacetop} (\figref{lee2013spacetop}), or can create complete new forms of gaming or tourism experiences \cite{roo2017inner} (\figref{roo2017inner}).
Most of (visual) \AR/\VR experience can now be implemented with commercially available hardware and software solutions, in particular for tracking, rendering and display.
Yet, the user experience in \AR is still highly dependent on the display used.
\begin{subfigs}{ar_applications}{Examples of \AR applications. }[][
\item Visuo-haptic surgery training with cutting into virtual soft tisues \cite{harders2009calibration}.
\item \AR can interactively guide in document verification tasks by recognizing and comparing with virtual references \cite{hartl2013mobile}.
\item SpaceTop is transparent \AR desktop computer featuring direct hand manipulation of \ThreeD content \cite{lee2013spacetop}.
\item Inner Garden is a spatial \AR zen garden made of real sand visually augmented to create a mini world that can be reshaped by hand \cite{roo2017inner}.
]
\subfigsheight{41mm}
\subfig{harders2009calibration}
\subfig{hartl2013mobile}
\subfig{lee2013spacetop}
\subfig{roo2017inner}
\end{subfigs}
\subsubsection{AR Displays}
\label{ar_displays}
To experience a virtual content combined and registered with the \RE, an output \UI that display the \VE to the user is necessary.
There is a large variety of \AR displays with different methods of combining the real and virtual content, and different locations on the \RE or the user \cite{billinghurst2015survey}.
In \emph{\VST-\AR}, the virtual images are superimposed to images of the \RE captured by a camera \cite{marchand2016pose}, and the combined real-virtual image is displayed on a screen to the user, as illustrated in \figref{itoh2022indistinguishable_vst}, \eg \figref{hartl2013mobile}.
This augmented view through the camera has the advantage of a complete control on the real-virtual combination such as mutual occlusion between real and virtual objects \cite{macedo2023occlusion}, coherent lighting and no delay between the real and virtual images \cite{kruijff2010perceptual}.
But, due to the camera and the screen, the user's view is degraded with a lower resolution, frame rate, field of view, and an overall visual latency compared to proprioception \cite{kruijff2010perceptual}.
An \emph{\OST-\AR} directly combines the virtual images with the real world view using a transparent optical system \cite{itoh2022indistinguishable} to like augmented glasses, as illustrated in \figref{itoh2022indistinguishable_ost}, \eg \figref{lee2013spacetop}.
These displays feature a direct, preserved view of the \RE at the cost of more difficult registration (spatial misalignment or temporal latency between the real and virtual content) \cite{grubert2018survey} and mutual real-virtual occlusion \cite{macedo2023occlusion}.
Finally, \emph{projection-based \AR} overlays the virtual images on the real world using a projector, as illustrated in \figref{roo2017one_2}, \eg \figref{roo2017inner}.
It doesn't require the user to wear the display, but requires a real surface to project the virtual on, and is vulnerable to shadows created by the user or the real objects \cite{billinghurst2015survey}.
\begin{subfigs}{ar_displays}{Simplified operating diagram of \AR display methods. }[][
\item \VST-\AR \cite{itoh2022indistinguishable}.
\item \OST-\AR \cite{itoh2022indistinguishable}.
\item Spatial \AR \cite{roo2017one}.
]
\subfigsheight{44mm}
\subfig{itoh2022indistinguishable_vst}
\subfig{itoh2022indistinguishable_ost}
\subfig{roo2017one_2}
\end{subfigs}
Regardless the \AR display, it can be placed at different locations \cite{bimber2005spatial}, as shown in \figref{roo2017one_1}.
\emph{Spatial \AR} is usually projection-based displays placed at fixed location (\figref{roo2017inner}), but it can also be \OST or \VST \emph{fixed windows} (\figref{lee2013spacetop}).
Alternatively, \AR displays can be \emph{hand-held}, like a \VST smartphone (\figref{hartl2013mobile}), or body-attached, like a micro-projector used as a flashlight \cite{billinghurst2015survey}.
Finally, \AR displays can be head-worn like \VR \emph{headsets} or glasses, providing a highly immersive and portable experience.
%Smartphones, shipped with sensors, computing ressources and algorithms, are the most common \AR today's displays, but research and development promise more immersive and interactive \AR with headset displays \cite{billinghurst2021grand}.
\fig[0.75]{roo2017one_1}{Locations of \AR displays from eye-worn to spatially projected. Adapted by \textcite{roo2017one} from \textcite{bimber2005spatial}.}
\subsubsection{Presence and Embodiment in AR}
\label{ar_presence_embodiment}
%Despite the clear and acknowledged definition presented in \secref{ar_definition} and the viewpoint of this thesis that \AR and \VR are two type of \MR experience with different levels of mixing real and virtual environments, as presented in \secref[introduction]{visuo_haptic_augmentations}, there is still a debate on defining \AR and \MR as well as how to characterize and categorized such experiences \cite{speicher2019what,skarbez2021revisiting}.
Presence and embodiment are two key concepts that characterize the user experience in \AR and \VR.
While there is a large literature on these topics in \VR, they are less defined and studied for \AR \cite{genay2022being,tran2024survey}.
Still, these concepts are useful to design, evaluate and discuss our contributions in the next chapters.
\paragraph{Presence}
\label{ar_presence}
\AR and \VR are both essentially illusions as the virtual content does not physically exist but is just digitally simulated and rendered to the user's senses through display \UIs.
Such experience of disbelief suspension in \VR is what is called \emph{presence}, and it can be decomposed into two dimensions: place illusion and plausibility \cite{slater2009place}.
Place illusion is the sense of the user of \enquote{being there} in the \VE (\figref{presence-vr}).
It emerges from the real time rendering of the \VE from the user's perspective: to be able to move around inside the \VE and look from different point of views.
Plausibility is the illusion that the virtual events are really happening, even if the user knows that they are not real.
It doesn't mean that the virtual events are realistic, but that they are plausible and coherent with the user's expectations.
%The \AR presence is far less defined and studied than for \VR \cite{tran2024survey}
For \AR, \textcite{slater2022separate} proposed to invert place illusion to what we can call \enquote{object illusion}, \ie the sense of the \VO to \enquote{feels here} in the \RE (\figref{presence-ar}).
As with \VR, \VOs must be able to be seen from different angles by moving the head but also, this is more difficult, be consistent with the \RE, \eg occlude or be occluded by real objects \cite{macedo2023occlusion}, cast shadows or reflect lights.
The plausibility can be applied to \AR as is, but the \VOs must additionally have knowledge of the \RE and react accordingly to it.
%\textcite{skarbez2021revisiting} also named place illusion for \AR as \enquote{immersion} and plausibility as \enquote{coherence}, and these terms will be used in the remainder of this thesis.
%One main issue with presence is how to measure it both in \VR \cite{slater2022separate} and \AR \cite{tran2024survey}.
\begin{subfigs}{presence}{
The sense of immersion in virtual and augmented environments. Adapted from \textcite{stevens2002putting}.
}[][
\item Place illusion is the sense of the user of \enquote{being there} in the \VE.
\item Objet illusion is the sense of the \VO to \enquote{feels here} in the \RE.
]
\subfigsheight{35mm}
\subfig{presence-vr}
\subfig{presence-ar}
\end{subfigs}
\paragraph{Embodiment}
\label{ar_embodiment}
The sense of embodiment is the \enquote{subjective experience of using and having a body} \cite{blanke2009fullbody}, \ie the feeling that a body is our own.
In everyday life, we are used to being, seeing and controlling our own body, but it is possible to embody a virtual body as an avatar while in \AR \cite{genay2022being} or \VR \cite{guy2023sense}.
This illusion arises when the visual, proprioceptive and (if any) haptic sensations of the virtual body are coherent \cite{kilteni2012sense}.
It can be decomposed into three subcomponents: \emph{Agency}, which is the feeling of controlling the body; \emph{Ownership}, which is the feeling that \enquote{the body is the source of the experienced sensations}; and \emph{Self-Location}, which is the feeling \enquote{spatial experience of being inside [the] body} \cite{kilteni2012sense}.
In \AR, it could take the form of body accessorization, \eg wearing virtual clothes or make-up in overlay, of partial avatarization, \eg using a virtual prothesis, or a full avatarization \cite{genay2022being}.
\subsection{Direct Hand Manipulation in AR}
\label{ar_interaction}
A user in \AR must be able to interact with the virtual content to fulfil the second point of \textcite{azuma1997survey}'s definition (\secref{ar_definition}) and complete the interaction loop (\figref[introduction]{interaction-loop}).%, \eg through a hand-held controller, a tangible object, or even directly with the hands.
In all examples of \AR applications shown in \secref{ar_applications}, the user interacts with the \VE using their hands, either directly or through a physical interface.
\subsubsection{User Interfaces and Interaction Techniques}
\label{interaction_techniques}
For a user to interact with a computer system (desktop, mobile, \AR, etc.), they first perceive the state of the system and then acts upon it through an input \UI.
Inputs \UI can be either an \emph{active sensing}, a held or worn device, such as a mouse, a touch screen, or a hand-held controller, or a \emph{passive sensing}, that does not require a contact, such as eye trackers, voice recognition, or hand tracking \cite{laviolajr20173d}.
The information gathered from the sensors by the \UI is then translated into actions within the computer system by an \emph{interaction technique} (\figref{interaction-technique}).
For example, a cursor on a screen can be moved using either with a mouse or with the arrow keys on a keyboard, or a two-finger swipe on a touchscreen can be used to scroll or zoom an image.
Choosing useful and efficient \UIs and interaction techniques is crucial for the user experience and the tasks that can be performed within the system.
\fig[0.5]{interaction-technique}{An interaction technique map user inputs to actions within a computer system. Adapted from \textcite{billinghurst2005designing}.}
\subsubsection{Tasks with Virtual Environments}
\label{ve_tasks}
\textcite{laviolajr20173d} classify interaction techniques into three categories based on the tasks they enable users to perform: manipulation, navigation, and system control.
\textcite{hertel2021taxonomy} proposed a taxonomy of interaction techniques specifically for immersive \AR.
The \emph{manipulation tasks} are the most fundamental tasks in \AR and \VR systems, and the building blocks for more complex interactions.
\emph{Selection} is the identification or acquisition of a specific \VO, \eg pointing at a target as in \figref{grubert2015multifi}, touching a button with a finger, or grasping an object with a hand.
\emph{Positioning} and \emph{rotation} of a selected object are the change of its position and orientation in \ThreeD space respectively.
It is also common to \emph{resize} a \VO to change its size.
These three operations are geometric (rigid) manipulations of the object: they do not change its shape.
The \emph{navigation tasks} are the movements of the user within the \VE.
Travel is the control of the position and orientation of the viewpoint in the \VE, \eg physical walking, velocity control, or teleportation.
Wayfinding is the cognitive planning of the movement, such as path finding or route following (\figref{grubert2017pervasive}).
The \emph{system control tasks} are changes to the system state through commands or menus such as creating, deleting, or modifying \VOs, \eg as in \figref{roo2017onea}. It is also the input of text, numbers, or symbols.
\begin{subfigs}{interaction-techniques}{Interaction techniques in \AR. }[][
\item Spatial selection of virtual item of an extended display using a hand-held smartphone \cite{grubert2015multifi}.
\item Displaying as an overlay registered on the \RE the route to follow \cite{grubert2017pervasive}.
\item Virtual drawing on a tangible object with a hand-held pen \cite{roo2017onea}.
\item Simultaneous Localization and Mapping (SLAM) algorithms such as KinectFusion \cite{newcombe2011kinectfusion} reconstruct the \RE in real time and enables to register the \VE in it.
]
\subfigsheight{36mm}
\subfig{grubert2015multifi}
\subfig{grubert2017pervasive}
\subfig{roo2017onea}
\subfig{newcombe2011kinectfusion}
\end{subfigs}
\subsubsection{Reducing the Real-Virtual Gap}
\label{real_virtual_gap}
In \AR and \VR, the state of the system is displayed to the user as a \ThreeD spatial \VE.
In an immersive and portable \AR system, this \VE is experienced at a 1:1 scale and as an integral part of the \RE.
The rendering gap between the real and virtual elements, as described on the interaction loop in \figref[introduction]{interaction-loop}, is thus experienced as very narrow or even not consciously perceived by the user.
This manifests as a sense of presence of the virtual, as described in \secref{ar_presence}.
As the gap between real and virtual rendering is reduced, one could expects a similar and seamless interaction with the \VE as with a \RE, which \textcite{jacob2008realitybased} called \emph{reality based interactions}.
As of today, an immersive \AR system tracks itself with the user in \ThreeD, using tracking sensors and pose estimation algorithms \cite{marchand2016pose}, \eg as in \figref{newcombe2011kinectfusion}.
It enables the \VE to be registered with the \RE and the user simply moves to navigate within the virtual content.
%This tracking and mapping of the user and \RE into the \VE is named the \enquote{extent of world knowledge} by \textcite{skarbez2021revisiting}, \ie to what extent the \AR system knows about the \RE and is able to respond to changes in it.
However, direct hand manipulation of virtual content is a challenge that requires specific interaction techniques \cite{billinghurst2021grand}.
It is often achieved using two interaction techniques: \emph{tangible objects} and \emph{virtual hands} \cite{billinghurst2015survey,hertel2021taxonomy}.
\subsubsection{Manipulating with Tangibles}
\label{ar_tangibles}
As \AR integrates visual virtual content into \RE perception, it can involve real surrounding objects as \UI: to visually augment them, \eg by superimposing visual textures \cite{roo2017inner} (\figref{roo2017inner}), and to use them as physical proxies to support interaction with \VOs \cite{ishii1997tangible}.
According to \textcite{billinghurst2005designing}, each \VO is coupled to a tangible object, and the \VO is physically manipulated through the tangible object, providing a direct, efficient and seamless interaction with both the real and virtual content.
This technique is similar to mapping the movements of a mouse to a virtual cursor on a screen.
Methods have been developed to automatically pair and adapt the \VOs for rendering with available tangibles of similar shape and size \cite{hettiarachchi2016annexing,jain2023ubitouch} (\figref{jain2023ubitouch}).
The issue with these \emph{space-multiplexed} interfaces is the large number and variety of tangibles required.
An alternative is to use a single \emph{universal} tangible object like a hand-held controller, such as a cube \cite{issartel2016tangible} or a sphere \cite{englmeier2020tangible}.
These \emph{time-multiplexed} interfaces require interaction techniques that allow the user to pair the tangible with any \VO, \eg by placing the tangible into the \VO and pressing the fingers \cite{issartel2016tangible} (\figref{issartel2016tangible}), similar to a real grasp (\secref{grasp_types}).
Still, the virtual visual rendering and the tangible haptic sensations can be inconsistent.
Especially in \OST-\AR, since the \VOs are inherently slightly transparent allowing the paired tangibles to be seen through them.
In a pick-and-place task with tangibles of different shapes, a difference in size \cite{kahl2021investigation} (\figref{kahl2021investigation}) and shape \cite{kahl2023using} (\figref{kahl2023using}) of the \VOs does not affect user performance or presence, and that small variations (\percent{\sim 10} for size) were not even noticed by the users.
This suggests the feasibility of using simplified tangibles in \AR whose spatial properties (\secref{object_properties}) abstract those of the \VOs.
Similarly, in \secref{tactile_rendering} we described how a material property (\secref{object_properties}) of a touched tangible can be modified using wearable haptic devices \cite{detinguy2018enhancing,salazar2020altering}: It could be used to render coherent visuo-haptic material perceptions directly touched with the hand in \AR.
\begin{subfigs}{ar_tangibles}{Manipulating \VOs with tangibles. }[][
\item Ubi-Touch paired the movements and screw interaction of a virtual drill with a real vaporizer held by the user \cite{jain2023ubitouch}.
\item A tangible cube that can be moved into the \VE and used to grasp and manipulate \VOs \cite{issartel2016tangible}.
\item Size and
\item shape difference between a tangible and a \VO is acceptable for manipulation in \AR \cite{kahl2021investigation,kahl2023using}.
]
\subfigsheight{37.5mm}
\subfig{jain2023ubitouch}
\subfig{issartel2016tangible}
\subfig{kahl2021investigation}
\subfig{kahl2023using}
\end{subfigs}
\subsubsection{Manipulating with Virtual Hands}
\label{ar_virtual_hands}
Natural \UIs allow the user to use their body movements directly as inputs to the \VE \cite{billinghurst2015survey}.
Our hands allow us to manipulate real everyday objects with both strength and precision (\secref{grasp_types}), so virtual hand interaction techniques seem to be the most natural way to manipulate virtual objects \cite{laviolajr20173d}.
Initially tracked by active sensing devices such as gloves or controllers, it is now possible to track hands in real time using cameras and computer vision algorithms natively integrated into \AR/\VR headsets \cite{tong2023survey}.
The user's hand is therefore tracked and reconstructed as a \emph{virtual hand} model in the \VE \cite{billinghurst2015survey,laviolajr20173d}.
The simplest models represent the hand as a rigid 3D object that follows the movements of the real hand with \qty{6}{\DoF} (position and orientation in space) \cite{talvas2012novel}.
An alternative is to model only the fingertips (\figref{lee2007handy}) or the whole hand (\figref{hilliges2012holodesk_1}) as points.
The most common technique is to reconstruct all the phalanges of the hand in an articulated kinematic model (\secref{hand_anatomy}) \cite{borst2006spring}.
The contacts between the virtual hand model and the \VOs are then simulated using heuristic or physics-based techniques \cite{laviolajr20173d}.
Heuristic techniques use rules to determine the selection, manipulation and release of a \VO (\figref{piumsomboon2013userdefined_1}).
However, they produce unrealistic behaviour and are limited to the cases predicted by the rules.
Physics-based techniques simulate forces at the points of contact between the virtual hand and the \VO.
In particular, \textcite{borst2006spring} have proposed an articulated kinematic model in which each phalanx is a rigid body simulated with the god-object method \cite{zilles1995constraintbased}:
The virtual phalanx follows the movements of the real phalanx, but remains constrained to the surface of the virtual objects during contact.
The forces acting on the object are calculated as a function of the distance between the real and virtual hands (\figref{borst2006spring}).
More advanced techniques simulate the friction phenomena \cite{talvas2013godfinger} and finger deformations \cite{talvas2015aggregate}, allowing highly accurate and realistic interactions, but which can be difficult to compute in real time.
\begin{subfigs}{virtual-hand}{Manipulating \VOs with virtual hands. }[][
\item A fingertip tracking that allows to select a \VO by opening the hand \cite{lee2007handy}.
\item Physics-based hand-object manipulation with a virtual hand made of numerous many small rigid-body spheres \cite{hilliges2012holodesk}.
\item Grasping a through gestures when the fingers are detected as opposing on the \VO \cite{piumsomboon2013userdefined}.
\item A kinematic hand model with rigid-body phalanges (in beige) that follows the real tracked hand (in green) but kept physically constrained to the \VO. Applied forces are shown as red arrows \cite{borst2006spring}.
]
\subfigsheight{37mm}
\subfig{lee2007handy}
\subfig{hilliges2012holodesk_1}
\subfig{piumsomboon2013userdefined_1}
\subfig{borst2006spring}
\end{subfigs}
However, the lack of physical constraints on the user's hand movements makes manipulation actions tiring \cite{hincapie-ramos2014consumed}.
While the user's fingers traverse the virtual object, a physics-based virtual hand remains in contact with the object, a discrepancy that may degrade the user's performance in \VR \cite{prachyabrued2012virtual}.
Finally, in the absence of haptic feedback on each finger, it is difficult to estimate the contact and forces exerted by the fingers on the object during grasping and manipulation \cite{maisto2017evaluation,meli2018combining}.
While a visual rendering of the virtual hand in \VR can compensate for these issues \cite{prachyabrued2014visual}, the visual and haptic rendering of the virtual hand, or their combination, in \AR is under-researched.
\subsection{Visual Rendering of Hands in AR}
\label{ar_visual_hands}
In \VR, since the user is fully immersed in the \VE and cannot see their real hands, it is necessary to represent them virtually (\secref{ar_embodiment}).
When interacting with a physics-based virtual hand method (\secref{ar_virtual_hands}), the visual rendering of the virtual hand has an influence on perception, interaction performance, and preference of users \cite{prachyabrued2014visual,argelaguet2016role,grubert2018effects,schwind2018touch}.
In a pick-and-place manipulation task in \VR, \textcite{prachyabrued2014visual} and \textcite{canales2019virtual} found that the visual hand rendering whose motion was constrained to the surface of the \VOs similar as to \textcite{borst2006spring} (\enquote{Outer Hand} in \figref{prachyabrued2014visual}) performed the worst, while the visual hand rendering following the tracked human hand (thus penetrating the \VOs, \enquote{Inner Hand} in \figref{prachyabrued2014visual}) performed the best, though it was rather disliked.
\textcite{prachyabrued2014visual} also found that the best compromise was a double rendering, showing both the virtual hand and the tracked hand (\enquote{2-Hand} in \figref{prachyabrued2014visual}).
While a realistic rendering of the human hand increased the sense of ownership \cite{lin2016need}, a skeleton-like rendering provided a stronger sense of agency \cite{argelaguet2016role} (\secref{ar_embodiment}), and a minimalist fingertip rendering reduced typing errors \cite{grubert2018effects}.
A visual hand rendering while in \VE also seems to affect how one grasps an object \cite{blaga2020too}, or how real bumps and holes are perceived \cite{schwind2018touch}.
\fig{prachyabrued2014visual}{Visual hand renderings affect user experience in \VR \cite{prachyabrued2014visual}.}
Conversely, a user sees their own hands in \AR, and the mutual occlusion between the hands and the \VOs is a common issue (\secref{ar_displays}), \ie hiding the \VO when the real hand is in front of it, and hiding the real hand when it is behind the \VO (\figref{hilliges2012holodesk_2}).
%For example, in \figref{hilliges2012holodesk_2}, the user is pinching a virtual cube in \OST-\AR with their thumb and index fingers, but while the index is behind the cube, it is seen as in front of it.
While in \VST-\AR, this could be solved as a masking problem by combining the real and virtual images \cite{battisti2018seamless}, \eg in \figref{suzuki2014grasping}, in \OST-\AR, this is much more difficult because the \VE is displayed as a transparent \TwoD image on top of the \ThreeD \RE, which cannot be easily masked \cite{macedo2023occlusion}.
%Yet, even in \VST-\AR,
%An alternative is to render the \VOs and the virtual hand semi-transparents, so that they are partially visible even when one is occluding the other (\figref{buchmann2005interaction}).
%Although perceived as less natural, this seems to be preferred to a mutual visual occlusion in \VST-\AR \cite{buchmann2005interaction,ha2014wearhand,piumsomboon2014graspshell} and \VR \cite{vanveldhuizen2021effect}, but has not yet been evaluated in \OST-\AR.
%However, this effect still causes depth conflicts that make it difficult to determine if one's hand is behind or in front of a \VO, \eg the thumb is in front of the virtual cube, but could be perceived to be behind it.
Since the \VE is intangible, adding a visual rendering of the virtual hand in \AR that is physically constrained to the \VOs would achieve a similar result to the promising double-hand rendering of \textcite{prachyabrued2014visual}.
A \VO overlaying a tangible object in \OST-\AR can vary in size and shape without degrading user experience or manipulation performance \cite{kahl2021investigation,kahl2023using}.
This suggests that a visual hand rendering superimposed on the real hand as a partial avatarization (\secref{ar_embodiment}) might be helpful without impairing the user.
Few works have compared different visual hand rendering in \AR or with wearable haptic feedback.
Rendering the real hand as a semi-transparent hand in \VST-\AR is perceived as less natural but seems to be preferred to a mutual visual occlusion for interaction with real and virtual objects \cite{buchmann2005interaction,piumsomboon2014graspshell}.
%Although perceived as less natural, this seems to be preferred to a mutual visual occlusion in \VST-\AR \cite{buchmann2005interaction,ha2014wearhand,piumsomboon2014graspshell} and \VR \cite{vanveldhuizen2021effect}, but has not yet been evaluated in \OST-\AR.
Similarly, \textcite{blaga2017usability} evaluated direct hand manipulation in non-immersive \VST-\AR with a skeleton-like rendering \vs no visual hand rendering: while user performance did not improve, participants felt more confident with the virtual hand (\figref{blaga2017usability}).
%\textcite{krichenbauer2018augmented} found that participants were \percent{22} faster in immersive \VST-\AR than in \VR in the same pick-and-place manipulation task, but no visual hand rendering was used in \VR while the real hand was visible in \AR.
In a collaborative task in immersive \OST-\AR \vs \VR, \textcite{yoon2020evaluating} showed that a realistic human hand rendering was the most preferred over a low-polygon hand and a skeleton-like hand for the remote partner.
\textcite{genay2021virtual} found that the sense of embodiment with robotic hands overlay in \OST-\AR was stronger when the environment contained both real and virtual objects (\figref{genay2021virtual}).
Finally, \textcite{maisto2017evaluation} and \textcite{meli2018combining} compared the visual and haptic rendering of the hand in \VST-\AR, as detailed in the next section (\secref{vhar_rings}).
Taken together, these results suggest that a visual rendering of the hand in \AR could improve usability and performance in direct hand manipulation tasks, but the best rendering has yet to be determined.
%\cite{chan2010touching} : cues for touching (selection) \VOs.
%\textcite{saito2021contact} found that masking the real hand with a textured 3D opaque virtual hand did not improve performance in a reach-to-grasp task but displaying the points of contact on the \VO did.
%To the best of our knowledge, evaluating the role of a visual rendering of the hand displayed \enquote{and seen} directly above real tracked hands in immersive OST-AR has not been explored, particularly in the context of \VO manipulation.
\begin{subfigs}{visual-hands}{Visual hand renderings in \AR. }[][
\item Grasping a \VO in \OST-\AR with no visual hand rendering \cite{hilliges2012holodesk}.
\item Simulated mutual-occlusion between the hand grasping and the \VO in \VST-\AR \cite{suzuki2014grasping}.
\item Grasping a real object with a semi-transparent hand in \VST-\AR \cite{buchmann2005interaction}.
\item Skeleton rendering overlaying the real hand in \VST-\AR \cite{blaga2017usability}.
\item Robotic rendering overlaying the real hands in \OST-\AR \cite{genay2021virtual}.
]
\subfigsheight{29.5mm}
\subfig{hilliges2012holodesk_2}
\subfig{suzuki2014grasping}
\subfig{buchmann2005interaction}
\subfig{blaga2017usability}
\subfig{genay2021virtual}
%\subfig{yoon2020evaluating}
\end{subfigs}
\subsection{Conclusion}
\label{ar_conclusion}
\AR systems integrate virtual content into the user's perception as if it were part of the \RE.
\AR headsets now enable real-time tracking of the head and hands, and high-quality display of virtual content, while being portable and mobile.
They enable highly immersive \AEs that users can explore with a strong sense of the presence of the virtual content.
However, without a direct and seamless interaction with the \VOs using the hands, the coherence of the \AE experience is compromised.
In particular, there is a lack of mutual occlusion and interaction cues between the hands and virtual content when manipulating \VOs in \OST-\AR that could be mitigated by a visual rendering of the hand.
A common alternative approach is to use tangible objects as proxies for interaction with \VOs, but this raises concerns about their consistency with the visual rendering.
In this context, the use of wearable haptic systems worn on the hand seems to be a promising solution both for improving direct hand manipulation of \VOs and for coherent visuo-haptic augmentation of touched tangible objects.

View File

@@ -0,0 +1,254 @@
\section{Visuo-Haptic Augmentations of Hand-Object Interactions}
\label{visuo_haptic}
Everyday perception and manipulation of objects with the hand typically involves both the visual and haptic senses.
Each sense has unique capabilities for perceiving certain object properties, such as color for vision or temperature for touch, but they are equally capable for many properties, such as roughness, hardness, or geometry \cite{baumgartner2013visual}.
Both \AR and wearable haptic systems integrate virtual content into the user's perception as sensory illusions.
It is essential to understand how a multimodal visuo-haptic rendering of a \VO is perceived as a coherent object property, and how wearable haptics have been integrated with immersive \AR.%, especially in immersive \AR where the haptic actuator is moved away so as not to cover the inside of the hand.
% spatial and temporal integration of visuo-haptic feedback as perceptual cues vs proprioception and real touch sensations
% delocalized : not at the point of contact = difficult to integrate with other perceptual cues ?
\subsection{Visuo-Haptic Perception of Virtual and Augmented Objects}
\label{sensations_perception}
\subsubsection{Merging the Sensations into a Perception}
\label{sensations_perception}
A \emph{perception} is the merging of multiple sensations from different sensory modalities (visual, cutaneous, proprioceptive, etc.) about the same event or object property \cite{ernst2004merging}.
For example, it is the haptic hardness perceived through skin pressure and force sensations (\secref{hardness}), the hand movement from proprioception and a visual hand avatar (\secref{ar_displays}), or the perceived size of a tangible with a co-localized \VO (\secref{ar_tangibles}).
If the sensations are redundant, \ie if only one sensation could suffice to estimate the property, they are integrated to form a single perception \cite{ernst2004merging}.
No sensory information is completely reliable and may give different answers to the same property when measured multiple times, \eg the weight of an object.
Therefore, each sensation $i$ is said to be an estimate $\tilde{s}_i$ with variance $\sigma_i^2$ of the property $s$.
The \MLE model then predicts that the integrated estimated property $\tilde{s}$ is the weighted sum of the individual sensory estimates:
\begin{equation}{MLE}
\tilde{s} = \sum_i w_i \tilde{s}_i \quad \text{with} \quad \sum_i w_i = 1
\end{equation}
Where the individual weights $w_i$ are proportional to their inverse variances:
\begin{equation}{MLE_weights}
w_i = \frac{1/\sigma_i^2}{\sigma^2}
\end{equation}
And the integrated variance $\sigma^2$ is the inverse of the sum of the individual variances:
\begin{equation}{MLE_variance}
\sigma^2 = \left( \sum_i \frac{1}{\sigma_i^2} \right)^{-1}
\end{equation}
This was demonstrated by \textcite{ernst2002humans} in a user study where participants estimated the height of a virtual bar using a fixed-window \OST-\AR display (\secref{ar_displays}) and force-feedback devices worn on the thumb and index finger (\secref{wearability_level}), as shown in \figref{ernst2002humans_setup}.
%They first measured the individual variances of the visual and haptic estimates (\figref{ernst2002humans_within}) and then the combined variance of the visuo-haptic estimates (\figref{ernst2002humans_visuo-haptic}).
On each trial, participants compared the visuo-haptic reference bar (of a fixed height) to a visuo-haptic comparison bar (of a variable height) in a \TIFC task (one bar is tested first, a pause, then the other) and indicated which was taller.
The reference bar had different conflicting visual $s_v$ and haptic $s_h$ heights, and different noise levels were added to the visual feedback to increase its variance.
The objective was to determine a \PSE between the comparison and reference bars, where the participant was equally likely to choose one or the other (\percent{50} of the trials).
%\figref{ernst2002humans_within} shows the discrimination of participants with only the haptic or visual feedback, and how much the estimation becomes difficult (thus higher variance) when noise is added to the visual feedback.
\figref{ernst2004merging_results} shows that when the visual noise was low, the visual feedback had more weight, but as visual noise increased, haptic feedback gained more weight, as predicted by the \MLE model.
\begin{subfigs}{ernst2002humans}{
Visuo-haptic perception of height of a virtual bar \cite{ernst2002humans}.
}[][
\item Experimental setup.%: Participants estimated height visually with an \OST-\AR display and haptically with force-feedback devices worn on the thumb and index fingers.
%\item with only haptic feedback (red) or only visual feedback (blue, with different added noise),
%\item combined visuo-haptic feedback (purple, with different visual noises).
\item Proportion of trials (vertical axis) where the comparison bar (horizontal axis) was perceived taller than the reference bar as function of increase variance (inverse of reliability) of the visual feedback (colors).
The reference had different conflicting visual $s_v$ and haptic $s_h$ heights.
]
\subfig[.34]{ernst2002humans_setup}
\subfig[.64]{ernst2004merging_results}
%\subfig{ernst2002humans_within}
%\subfig{ernst2002humans_visuo-haptic}
\end{subfigs}
%Hence, the \MLE model explains how a (visual) \VO in \AR can be perceived as coherent when combined with real haptic sensations of a tangible or a wearable haptic feedback.
The \MLE model implies that when seeing and touching a \VO in \AR, the combination of visual and haptic stimuli, real or virtual, presented to the user can be perceived as a coherent single object property.
%As long as the user is able to associate the sensations as the same object property, and even if there are discrepancies between the sensations, the overall perception can be influenced by changing one of the stimuli, as discussed in the next sections.
%for example by including tangible objects, wearable haptic feedback, or even by altering the visual rendering of the \VO, as discussed in the next sections.
\subsubsection{Influence of Visual Rendering on Tangible Perception}
\label{visual_haptic_influence}
Thus, a visuo-haptic perception of an object's property is robust to some difference between the two sensory modalities, as long as one can match their respective sensations to the same property.
In particular, the texture perception of everyday objects is known to be constructed from both vision and touch \cite{klatzky2010multisensory}.
More precisely, when surfaces are evaluated by vision or touch alone, both senses discriminate their materials mainly by the same properties of roughness, hardness, and friction, and with similar performance \cite{bergmanntiest2007haptic,baumgartner2013visual,vardar2019fingertip}.
The overall perception can then be modified by changing one of the sensory modalities.
\textcite{yanagisawa2015effects} altered the perceived roughness, stiffness, and friction of real tactile materials touched by the finger by superimposing different real visual textures using a half-mirror.
In a similar setup, but in immersive \VST-\AR, \textcite{kitahara2010sensory} overlaid visual textures on real textured surfaces touched through a glove: many visual textures were found to match the real haptic textures.
\textcite{degraen2019enhancing} and \textcite{gunther2022smooth} also combined multiple \VOs in \VR with \ThreeD-printed hair structures or with everyday real surfaces, respectively.
They found that the visual perception of roughness and hardness influenced the haptic perception, and that only a few tangibles seemed to be sufficient to match all the visual \VOs (\figref{gunther2022smooth}).
%Taken together, these studies suggest that a set of haptic textures, real or virtual, can be perceived as coherent with a larger set of visual virtual textures.
\fig{gunther2022smooth}{In a passive touch context in \VR, only a smooth and a rough real surfaces were found to match all the visual \VOs \cite{gunther2022smooth}.}
Visual feedback can even be intentionally designed to influence haptic perception, usually by deforming the visual representation of a user input, creating a \enquote{pseudo-haptic feedback} \cite{ujitoko2021survey}.
For example, in a fixed \VST-\AR screen (\secref{ar_displays}), by visually deforming the geometry of a tangible object touched by the hand, as well as the touching hand, the visuo-haptic perception of the size, shape, or curvature can be altered \cite{ban2013modifying,ban2014displaying}.
\textcite{punpongsanon2015softar} used this technique in spatial \AR (\secref{ar_displays}) to induce a softness illusion of a hard tangible object by superimposing a virtual texture that deforms when pressed by the hand (\figref{punpongsanon2015softar}).
\textcite{ujitoko2019modulating} increased the perceived roughness of a virtual patterned texture rendered as vibrations through a hand-held stylus (\secref{texture_rendering}) by adding small oscillations to the visual feedback of the stylus on a screen.
\begin{subfigs}{pseudo_haptic}{Pseudo-haptic feedback in \AR. }[][
\item A virtual soft texture projected on a table and that deforms when pressed by the hand \cite{punpongsanon2015softar}.
\item Modifying visually a tangible object and the hand touching it in \VST-\AR to modify its perceived shape \cite{ban2014displaying}.
]
\subfigsheight{42mm}
\subfig{punpongsanon2015softar}
\subfig{ban2014displaying}
\end{subfigs}
%In all of these studies, the visual expectations of participants influenced their haptic perception.
%In particular, in \AR and \VR, the perception of a haptic rendering or augmentation can be influenced by the visual rendering of the \VO.
\subsubsection{Perception of Visuo-Haptic Rendering in AR and \VR}
\label{AR_vs_VR}
Some studies have investigated the visuo-haptic perception of \VOs rendered with force-feedback and vibrotactile feedback in \AR and \VR.
In an immersive \VST-\AR setup, \textcite{knorlein2009influence} rendered a virtual piston using force-feedback haptics that participants pressed directly with their hand (\figref{visuo-haptic-stiffness}).
In a \TIFC task (\secref{sensations_perception}), participants pressed two pistons and indicated which was stiffer.
One had a reference stiffness but an additional visual or haptic delay, while the other varied with a comparison stiffness but had no delay. \footnote{Participants were not told about the delays and stiffness tested, nor which piston was the reference or comparison. The order of the pistons (which one was pressed first) was also randomized.}
Adding a visual delay increased the perceived stiffness of the reference piston, while adding a haptic delay decreased it, and adding both delays cancelled each other out (\figref{knorlein2009influence_2}).
\begin{subfigs}{visuo-haptic-stiffness}{
Perception of haptic stiffness in \VST-\AR \cite{knorlein2009influence}.
}[][
\item Participant pressing a virtual piston rendered by a force-feedback device with their hand.
\item Proportion of comparison piston perceived as stiffer than reference piston (vertical axis) as a function of the comparison stiffness (horizontal axis) and visual and haptic delays of the reference (colors).
]
\subfig[.44]{knorlein2009influence_1}
\subfig[.55]{knorlein2009influence_2}
\end{subfigs}
%explained how these delays affected the integration of the visual and haptic perceptual cues of stiffness.
The stiffness $\tilde{k}(t)$ of the piston is indeed estimated at time $t$ by both sight and proprioception as the ratio of the exerted force $F(t)$ and the displacement $D(t)$ of the piston, following \eqref{stiffness}, but with potential visual $\Delta t_v$ or haptic $\Delta t_h$ delays:
\begin{equation}{stiffness_delay}
\tilde{k}(t) = \frac{F(t + \Delta t_h)}{D(t + \Delta t_v)}
\end{equation}
Therefore, the perceived stiffness $\tilde{k}(t)$ increases with a haptic delay in force and decreases with a visual delay in displacement \cite{diluca2011effects}.
In a similar \TIFC user study, participants compared perceived stiffness of virtual pistons in \OST-\AR and \VR \cite{gaffary2017ar}.
However, the force-feedback device and the participant's hand were not visible (\figref{gaffary2017ar}).
The reference piston was judged to be stiffer when seen in \VR than in \AR, without participants noticing this difference, and more force was exerted on the piston overall in \VR.
This suggests that the haptic stiffness of \VOs feels \enquote{softer} in an \AE than in a full \VE.
%Two differences that could be worth investigating with the two previous studies are the type of \AR (visuo or optical) and to see the hand touching the \VO.
\begin{subfigs}{gaffary2017ar}{Perception of haptic stiffness in \OST-\AR \vs \VR \cite{gaffary2017ar}. }[][
\item Experimental setup: a virtual piston was pressed with a force-feedback placed to the side of the participant.
\item View of the virtual piston seen in front of the participant in \OST-\AR and
\item in \VR.
]
\subfig[0.35]{gaffary2017ar_1}
\subfig[0.3]{gaffary2017ar_3}
\subfig[0.3]{gaffary2017ar_4}
\end{subfigs}
Finally, \textcite{diluca2019perceptual} investigated the perceived simultaneity of visuo-haptic contact with a \VO in \VR.
The contact was rendered both by a vibrotactile piezoelectric device on the fingertip and by a visual change in the color of the \VO.
The visuo-haptic simultaneity was varied by adding a visual delay or by triggering the haptic feedback earlier.
No participant (out of 19) was able to detect a \qty{50}{\ms} visual lag and a \qty{15}{\ms} haptic lead, and only half of them detected a \qty{100}{\ms} visual lag and a \qty{70}{\ms} haptic lead.
These studies have shown how the latency of the visual rendering of a \VO or the type of environment (\VE or \RE) can affect the perceived haptic stiffness of the object, rendered with a grounded force-feedback device.
We describe in the next section how wearable haptics have been integrated with immersive \AR.
\subsection{Wearable Haptics for Direct Hand Interaction in AR}
\label{vhar_haptics}
A few wearable haptic devices have been specifically designed or experimentally tested for direct hand interaction in immersive \AR.
Since virtual or augmented objects are naturally touched, grasped, and manipulated directly with the fingertips (\secref{exploratory_procedures} and \secref{grasp_types}), the main challenge of wearable haptics for \AR is to provide haptic sensations of these interactions while keeping the fingertips free to interact with the \RE.
Several approaches have been proposed to move the haptic actuator to a different location on the hand.
Yet, they differ greatly in the actuators used (\secref{wearable_haptic_devices}), thus the haptic feedback (\secref{tactile_rendering}), and the placement of the haptic rendering.
Other wearable haptic actuators have been proposed for \AR, but are not discussed here.
A first reason is that they permanently cover the fingertip and affect the interaction with the \RE, such as thin-skin tactile interfaces \cite{withana2018tacttoo,teng2024haptic} or fluid-based interfaces \cite{han2018hydroring}.
Another category of actuators relies on systems that cannot be considered as portable, such as REVEL \cite{bau2012revel}, which provide friction sensations with reverse electrovibration that must modify the real objects to augment, or Electrical Muscle Stimulation (EMS) devices \cite{lopes2018adding}, which provide kinesthetic feedback by contracting the muscles.
\subsubsection{Nail-Mounted Devices}
\label{vhar_nails}
\textcite{ando2007fingernailmounted} were the first to propose to moving the actuator from the fingertip to the nail, as described in \secref{texture_rendering}.
This approach was later extended by \textcite{teng2021touch} with Touch\&Fold, a haptic device able to unfold its end-effector on demand to make contact with the fingertip when touching \VOs (\figref{teng2021touch}).
This moving platform also contains a \LRA (\secref{moving_platforms}) and provides contact pressure and texture sensations.
%The whole system is very compact (\qtyproduct{24 x 24 x 41}{\mm}), lightweight (\qty{9.5}{\g}), and fully portable by including a battery and Bluetooth wireless communication. \qty{20}{\ms} for the Bluetooth
When touching \VOs in \OST-\AR with the index finger, this device was found to be more realistic overall (5/7) than vibrations with a \LRA at \qty{170}{\Hz} on the nail (3/7).
Still, there is a high (\qty{92}{\ms}) latency for the folding mechanism and this design is not suitable for augmenting real tangible objects.
% teng2021touch: (5.27+3.03+5.23+5.5+5.47)/5 = 4.9
% ando2007fingernailmounted: (2.4+2.63+3.63+2.57+3.2)/5 = 2.9
With Fingeret, \textcite{maeda2022fingeret} adapted the belt actuators (\secref{belt_actuators}) as a \enquote{finger-side actuator} that leaves the fingertip free (\figref{maeda2022fingeret}).
Two rollers, one on each side, can deform the skin: When rotated inward, they pull the skin, simulating a contact sensation, and when rotated outward, they push the skin, simulating a release sensation.
They can also simulate a texture sensation by rapidly rotating in and out.
%The device is also very compact (\qty{60 x 25 x 36}{\mm}), lightweight (\qty{18}{\g}), and portable with a battery and Bluetooth wireless communication with \qty{83}{\ms} latency.
In a user study not in \AR, but directly touching images on a tablet, Fingeret was found to be more realistic (4/7) than a \LRA at \qty{100}{\Hz} on the nail (3/7) for rendering buttons and a patterned texture (\secref{texture_rendering}), but not different from vibrations for rendering high-frequency textures (3.5/7 for both).
However, as with \textcite{teng2021touch}, finger speed was not taken into account when rendering vibrations, which may have been detrimental to texture perception, as described in \secref{texture_rendering}.
Finally, \textcite{preechayasomboon2021haplets} (\figref{preechayasomboon2021haplets}) and \textcite{sabnis2023haptic} designed Haplets and Haptic Servo, respectively: These are very compact and lightweight vibrotactile \LRA devices designed to provide both integrated finger motion sensing and very low latency haptic feedback (\qty{<5}{ms}).
However, no proper user study has been conducted to evaluate these devices in \AR.
\begin{subfigs}{ar_wearable}{Nail-mounted wearable haptic devices designed for \AR. }[][
%\item A voice-coil rendering a virtual haptic texture on a real sheet of paper \cite{ando2007fingernailmounted}.
\item Touch\&Fold provide contact pressure and vibrations on demand to the fingertip \cite{teng2021touch}.
\item Fingeret is a finger-side wearable haptic device that pulls and pushs the fingertip skin \cite{maeda2022fingeret}.
\item Haplets is a very compact nail device with integrated sensing and vibrotactile feedback \cite{preechayasomboon2021haplets}.
]
\subfigsheight{33mm}
%\subfig{ando2007fingernailmounted}
\subfig{teng2021touch}
\subfig{maeda2022fingeret}
\subfig{preechayasomboon2021haplets}
\end{subfigs}
\subsubsection{Belt Devices}
\label{vhar_rings}
The haptic ring belt devices of \textcite{minamizawa2007gravity} and \textcite{pacchierotti2016hring}, presented in \secref{belt_actuators}, have been used to improve the manipulation of \VOs in \AR (\secref{ar_interaction}).
Recall that these devices have also been used to modify the perceived stiffness, softness, friction and localized bumps and holes on smooth real surfaces (\secref{hardness_rendering}) \cite{detinguy2018enhancing,salazar2020altering}, but have not been tested in \AR.
In a \VST-\AR setup, \textcite{scheggi2010shape} explored the effect of rendering the weight of a virtual cube placed on a real surface hold with the thumb, index, and middle fingers (\figref{scheggi2010shape}).
The middle phalanx of each of these fingers was equipped with a haptic ring of \textcite{minamizawa2007gravity}.
%However, no proper user study was conducted to evaluate this feedback.% on the manipulation of the cube.
%that simulated the weight of the cube.
%A virtual cube that could push on the cube was manipulated with the other hand through a force-feedback device.
\textcite{scheggi2010shape} report that 12 out of 15 participants found the weight haptic feedback essential to feeling the presence of the virtual cube.
In a pick-and-place task in non-immersive \VST-\AR involving direct hand manipulation of both virtual and real objects (\figref{maisto2017evaluation}), \textcite{maisto2017evaluation} and \textcite{meli2018combining} compared the effects of providing haptic or visual feedback about fingertip-object contacts.
They compared the haptic ring of \textcite{pacchierotti2016hring} on the proximal phalanx, the moving platform of \textcite{chinello2020modular} on the fingertip, and a visual rendering of the tracked fingertips as virtual points.
They showed that the haptic feedback improved the completion time, reduced the force exerted on the cubes compared to the visual feedback (\figref{visual-hands}).
The haptic ring was also perceived as more effective than the moving platform.
However, the measured difference in performance could be due to either the device or the device position (proximal vs fingertip), or both.
These two studies were also conducted in non-immersive setups, where users viewed a screen displaying the visual interactions, and only compared the haptic and visual rendering of the hand-object contacts, but did not examine them together.
\begin{subfigs}{ar_rings}{Wearable haptic ring devices for \AR. }[][
\item Rendering weight of a virtual cube placed on a real surface \cite{scheggi2010shape}.
\item Rendering the contact force exerted by the fingers on a virtual cube \cite{maisto2017evaluation,meli2018combining}.
]
\subfigsheight{57mm}
\subfig{scheggi2010shape}
\subfig{maisto2017evaluation}
\end{subfigs}
%\subsubsection{Wrist Bracelet Devices}
%\label{vhar_bracelets}
With their \enquote{Tactile And Squeeze Bracelet Interface} (Tasbi), already mentioned in \secref{belt_actuators}, \textcite{pezent2019tasbi} and \textcite{pezent2022design} explored the use of a wrist-worn bracelet actuator.
It is capable of providing a uniform pressure sensation (up to \qty{15}{\N} and \qty{10}{\Hz}) and vibration with six \LRAs (\qtyrange{150}{200}{\Hz} bandwidth).
A user study was conducted in \VR to compare the perception of visuo-haptic stiffness rendering \cite{pezent2019tasbi}, and showed that the haptic pressure feedback was more important than the visual displacement.
%In a \TIFC task (\secref{sensations_perception}), participants pressed a virtual button with different levels of stiffness via a virtual hand constrained by the \VE (\figref{pezent2019tasbi_2}).
%A higher visual stiffness required a larger physical displacement to press the button (C/D ratio, see \secref{pseudo_haptic}), while the haptic stiffness control the rate of the pressure feedback when pressing.
%When the visual and haptic stiffness were coherent or when only the haptic stiffness changed, participants easily discriminated two buttons with different stiffness levels (\figref{pezent2019tasbi_3}).
%However, if only the visual stiffness changed, participants were not able to discriminate the different stiffness levels (\figref{pezent2019tasbi_4}).
%This suggests that in \VR, the haptic pressure is more important perceptual cue than the visual displacement to render stiffness.
%A short vibration (\qty{25}{\ms} \qty{175}{\Hz} square-wave) was also rendered when contacting the button, but kept constant across all conditions: It may have affected the overall perception when only the visual stiffness changed.
%\begin{subfigs}{pezent2019tasbi}{Visuo-haptic stiffness rendering of a virtual button in \VR with the Tasbi bracelet. }[][
% \item The \VE seen by the user: the virtual hand (in beige) is constrained by the virtual button. The displacement is proportional to the visual stiffness. The real hand (in green) is hidden by the \VE.
% \item When the rendered visuo-haptic stiffness are coherents (in purple) or only the haptic stiffness change (in blue), participants easily discrimated the different levels.
% \item When varying only the visual stiffness (in red) but keeping the haptic stiffness constant, participants were not able to discriminate the different stiffness levels.
% ]
% \subfigsheight{45mm}
% \subfig{pezent2019tasbi_2}
% \subfig{pezent2019tasbi_3}
% \subfig{pezent2019tasbi_4}
%\end{subfigs}
% \cite{sarac2022perceived,palmer2022haptic} not in \AR but studies on relocating to the wrist the haptic feedback of the fingertip-object contacts.
%\subsection{Conclusion}
%\label{visuo_haptic_conclusion}

View File

@@ -0,0 +1,25 @@
\section{Conclusion}
\label{conclusion}
Haptic perception and manipulation of objects with the hand involves exploratory movements or grasp types, respectively, with simultaneous sensory feedback from multiple cutaneous and kinaesthetic receptors embedded beneath the skin.
These receptors provide sensory cues about the physical properties of objects, such as roughness and hardness, which are then integrated to form a perception of the property being explored.
Perceptual constancy is possible in the absence of one cue by compensating with others.
Haptic systems aim to provide virtual interactions and sensations similar to those with real objects.
Only a few can be considered wearable due to their compactness and portability, but they are limited to cutaneous feedback.
If their haptic rendering is timely associated with the user's touch actions on a real object, the perceived haptic properties of the object, such as its roughness and hardness, can be modified.
Wearable haptic augmentation is mostly achieved with vibrotactile feedback.
\AR headsets integrate virtual content immersively into the user's perception as if it were part of the \RE, with real-time tracking of the head and hands.
However, direct hand interaction and manipulation of \VOs is difficult due to the lack of haptic feedback and of mutual occlusion rendering between the hand and the \VO, which could be improved by a visual rendering of the hand.
Tangibles are also used as proxies for manipulating \VOs, but can be inconsistent with the visual rendering, being haptically passives.
Wearable haptics on the hand is a promising solution for improving direct hand manipulation of \VOs and for coherent visuo-haptic augmentation of tangibles.
Providing coherent multimodal visuo-haptic feedback to enhance direct hand perception and manipulation with \VOs in immersive \AR is challenging.
While many wearable haptic devices have been developed and are capable of providing varied tactile feedback, few can be integrated or experimentally evaluated for direct hand interaction in \AR.
Their haptic end-effector must be moved away from the inside of the hand so as not to interfere with the user interaction with the \RE.
Different relocation strategies have been proposed for different parts of the hand, such as the nail, the index phalanges, or the wrist, but it remains unclear whether any of them are best suited for direct hand interaction in \AR.
In all cases, the real and virtual visual sensations are considered co-localized, but the virtual haptic feedback is not.
Such a discrepancy may affect the user's perception and experience and should be further investigated.
When integrating different sensory feedback, haptic and visual, real and virtual, into a single object property, perception is robust to variations in reliability and to spatial and temporal differences.
However, the same haptic rendering or augmentation can be influenced by the user's visual expectation or the visual rendering of the \VO.

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 212 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 403 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 183 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 137 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More