@inproceedings{iber_mind_2021, title = {Mind the {Steps}: {Towards} {Auditory} {Feedback} in {Tele}-{Rehabilitation} {Based} on {Automated} {Gait} {Classification}}, doi = {10/gnt2tc}, abstract = {We describe a proof-of-concept for the implementation of a mobile auditory biofeedback system based on automated classification of functional gait disorders. The classification is embedded in a sensor-instrumented insole and is based on ground reaction forces (GRFs). GRF data have been successfully used for the classification of gait patterns into clinically relevant classes and are frequently used in clinical practice to quantitatively describe human motion. A feed-forward neural network that was implemented on the firmware of the insole is used to estimate the GRFs using pressure and accelerator data. Compared to GRF measurements obtained from force plates, the estimated GRFs performed highly accurately. To distinguish between physiological gait and gait disorders, we trained and evaluated a support vector machine with labeled data from a publicly accessible database. The automated gait classification was sonified for auditory feedback. The high potential of the implemented auditory feedback for preventive and supportive applications in physical therapy, such as supervised therapy settings and tele-rehabilitation, was highlighted by a semi- structured interview with two experts.}, booktitle = {In {Proceedings} of the 16th {International} {Audio} {Mostly} {Conference} ({AM}’21)}, publisher = {ACM}, author = {Iber, Michael and Dumphart, Bernhard and Oliveira, Victor A. de. J. and Ferstl, Stefan and Reis, Joschua and Slijepcevic, Djordje and Heller, Mario and Raberger, Anna-Maria and Horsak, Brian}, year = {2021}, note = {Projekt: Sonigait II}, keywords = {Artificial Intelligence, Biofeedback, Biomechanics, CDHI, Digital Health, Forschungsgruppe Media Computing, Gait Analysis, Human-computer interaction, Institut für Creative Media Technologies, Institut für Gesundheitswissenschaften, Motor rehabilitation, Vortrag, Wiss. Beitrag, best, best-lbiber, peer-reviewed}, } @inproceedings{de_jesus_oliveira_requirements_2022, address = {Aarhus, Denmark}, title = {Requirements and {Concepts} for {Interactive} {Media} {Retrieval} {User} {Interfaces}}, copyright = {CC BY}, url = {https://dl.acm.org/doi/10.1145/3546155.3546701}, doi = {10.1145/3546155.3546701}, abstract = {Large amounts of multimedia data are constantly digitized and stored in archives. Accurate search and annotation tools are essential for the fast retrieval of archival records by archivists, scientists, and the general public. The complexity of processing and navigating large collections evidence the demand for solutions that are tailored to the needs of diverse target groups. In this paper, we investigate the requirements for multimedia search and annotation tools. After identifying examples of graphical user interfaces and visualization techniques to support navigating and annotating audiovisual content in archives, we performed an iterative user research. Based on expert interviews, focus groups, and surveys, we propose a series of requirements and concept ideas for user interfaces aimed at quality control and AI-assisted search of multimedia data. Results also show that open challenges and needs include the definition of tailored ontologies to describe archival multimedia data.}, language = {en}, booktitle = {Nordic {Human}-{Computer} {Interaction} {Conference} ({NordiCHI} '22)}, publisher = {ACM}, author = {de Jesus Oliveira, Victor Adriel and Rottermanner, Gernot and Größbacher, Stefanie and Boucher, Magdalena and Judmaier, Peter}, year = {2022}, note = {Projekt: TailoredMedia}, keywords = {Forschungsgruppe Media Computing, Human-Computer Interaction, Institut für Creative Media Technologies, Vortrag, peer-reviewed}, pages = {1--10}, } @misc{rind_daten_2022, address = {St. Pölten}, type = {Demo \& {Poster}}, title = {Daten erlebbar machen: {Wie} {Daten} sichtbar, hörbar und greifbar werden}, abstract = {In welchen Medien inseriert das Bundeskanzleramt? Welche Länder exportieren die meisten Waffensysteme? Wie steht es um die psychische Gesundheit in EU-Ländern? Durch die steigende Digitalisierung vermehren sich die verfügbaren Daten rasant. Doch was können sie uns über unsere Welt verraten? Dazu sind anschauliche Darstellungen notwendig, um große Datenmengen leichter begreifbar zu machen. Hier erfahren Sie, mit welchen visuellen, klanglichen und physikalischen Methoden Daten dargestellt werden können. Das Programm „netflower“, mit dem man Geldflüsse zwischen öffentlichen Institutionen und Medien übersichtlich darstellen und vergleichen kann, ist eines der Beispiele für interaktive Visualisierungen bei unserer Station. Das sind bildliche Darstellungen von Daten, bei denen die Betrachter:innen selbst entscheiden können, welche Teilbereiche sie gerade ansehen oder noch genauer erforschen möchten. Aber auch über andere Sinne können die Besucher:innen bei dieser Station Daten als Sonifikation hören und als Physikalisierung begreifen. Darüber hinaus wird gezeigt, wie Data Comics den Einstieg in die Datenanalyse erleichtern und Sensordaten in Extended Reality visualisiert werden.}, author = {Rind, Alexander and Aigner, Wolfgang and Böck, Julia and Grassinger, Florian and Oliveira, Victor A. de. J. and Wu, Hsiang-Yun and Zauchinger, Michael}, month = sep, year = {2022}, note = {Projekt: SoniVis Projekt: SEVA Projekt: VALID Projekt: TransSoDia Projekt: Dataskop}, keywords = {Demo, Departement Medien und Digitale Technologien, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Poster, Visualization}, } @inproceedings{oliveira_tactile_2014, title = {Tactile interface for navigation in underground mines}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84910092878&doi=10.1109%2fSVR.2014.16&partnerID=40&md5=464d82be6a09a1053f73b72b971ec032}, doi = {10/gh376t}, abstract = {This paper presents the design and evaluation of a tactile vocabulary to aid navigation in an underground mine. We studied different ways to construct tactile vocabularies and assessed several tactile icons for aid navigation. After trying a dozen stimuli families, we have selected tactons based on the users’ ability to perceive and process them during navigation in virtual environments to design a more usable tactile interface. Then, we performed a user experiment in a virtual simulation of an emergency situation in an underground mine. The user study shows that the tactile feedback facilitated the execution of the task. Also, the perceptual adjustment of the tactile vocabulary increased its usability as well as the memorization of its signals. © 2014 IEEE.}, booktitle = {Proceedings - 2014 16th {Symposium} on {Virtual} and {Augmented} {Reality}, {SVR} 2014}, author = {Oliveira, Victor A. de. J. and Marques, E and De Lemos Peroni, R and Maciel, A}, year = {2014}, keywords = {Extern, Navigation, User experiments, Vibrotactile, Virtual reality, user study}, pages = {230--237}, } @inproceedings{oliveira_applying_2014, title = {Applying tactile languages for {3D} navigation}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84900304917&doi=10.1109%2f3DUI.2014.6798878&partnerID=40&md5=020778fff12f3dc4bd96ff8c523343b3}, doi = {10/gh3755}, abstract = {In this paper we present the design and evaluate alternative tactile vocabularies to support navigation in 3D environments. We have focused on the tactile communication expressiveness by applying a prefixation approach in the construction of the tactile icons. We conducted user experiments to analyze the effects of both prefixation and the use of tactile sequences on the user’s performance in a navigation task. Results show that the group that used the prefixation-based vocabulary performed better. © 2014 IEEE.}, booktitle = {{IEEE} {Symposium} on {3D} {User} {Interfaces} 2014, {3DUI} 2014 - {Proceedings}}, author = {Oliveira, Victor A. de. J. and Maciel, A}, year = {2014}, keywords = {Extern, Navigation, Navigation aids, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {177--178}, } @article{oliveira_assessment_2014, title = {Assessment of tactile languages as navigation aid in {3D} environments}, volume = {8619}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84910114998&doi=10.1007%2f978-3-662-44196-1_14&partnerID=40&md5=398f305bb4a777ffeb9231ac14eef383}, doi = {10/gh3756}, abstract = {In this paper we present the design and evaluate alternative tactile vocabularies to support navigation in 3D environments. We have focused on the tactile communication expressiveness by applying a prefixation approach in the construction of the tactile icons. We conducted user experiments to analyze the effects of both prefixation and the use of tactile sequences on the user’s performance in a navigation task. Results show that, even if tactile sequences are more difficult to process during the navigation task, the prefixed patterns were easier to learn in all assessed vocabularies. © Springer-Verlag Berlin Heidelberg 2014.}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, author = {Oliveira, Victor A. de. J. and Maciel, A}, year = {2014}, keywords = {Extern, Navigation, Navigation aids, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {104--111}, } @article{oliveira_assessing_2018, title = {Assessing {Articulatory} {Modalities} for {Intercommunication} {Using} {Vibrotactile} {HMDs}}, volume = {10894 LNCS}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85048674028&doi=10.1007%2f978-3-319-93399-3_45&partnerID=40&md5=466d8d1de8a0a276b70457cd15f36c55}, doi = {10/gh3757}, abstract = {In computer-mediated tactile intercommunication, users not only have to perceive tactile cues but also have to articulate them to carry a two-way interaction. By pressing buttons or performing specific gestures, interlocutors can exchange tactile signals but are not able to extrapolate the given vocabulary. When more access to hardware parameters is provided instead, interlocutors can have more autonomy. Yet, changes in articulation might produce tactile signals that are not perceptually suitable, hindering mutual understanding during intercommunication. In this paper, we explore the trade-off between freedom of articulation and mutual understanding by comparing three articulatory approaches. Dyads performed a collaborative task using their vibrotactile HMDs to communicate. Their performance during the task, as well as mutual understanding, workload and easiness, were assessed as a function of each articulatory condition. Results show that static and mediating conditions support higher performance and mutual understanding compared to a dynamic articulation. © Springer International Publishing AG, part of Springer Nature 2018.}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, author = {Oliveira, Victor A. de. J. and Nedel, L and Maciel, A}, year = {2018}, keywords = {Collaborative virtual environment, Extern, Head mounted displays, Multisensory interaction, Navigation, Navigation aids, Sensory perception, Spatial awareness, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {526--538}, } @article{oliveira_anti-veering_2018, title = {Anti-{Veering} {Vibrotactile} {HMD} for {Assistance} of {Blind} {Pedestrians}}, volume = {10894 LNCS}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85048682856&doi=10.1007%2f978-3-319-93399-3_43&partnerID=40&md5=5c81b796a71328017905d6d323ce5f47}, doi = {10/gh3759}, abstract = {Veering is a common experience for blind pedestrians and for individuals walking in unfamiliar spaces. In this paper, we assess a vibrotactile Head-Mounted Display to assist blind individuals to walk straight from a point to another. Our goal was to assess such device for both assistance and self-Orientation and Mobility (O\&M) training to provide more autonomy to blind pedestrians. Blind and blindfolded subjects performed a series of assisted and non-assisted sessions to verify how deviation errors are modulated according to the use of the device. Moreover, the vibrotactile feedback was compared to audible walking signals commonly present in many road-cross scenarios, as well as in traditional O\&M sessions. Performance and subjective measures were assessed as a function of stimulus modality and group profile. Results show that the vibrotactile feedback significantly reduces the veering for both sighted and blind subjects. © Springer International Publishing AG, part of Springer Nature 2018.}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, author = {Oliveira, Victor A. de. J. and Nedel, L and Maciel, A and Brayda, L}, year = {2018}, keywords = {Anti-veering, Assistive technology, Blind individuals, Extern, Haptics, Head mounted displays, Spatial coding, Tactile guidances}, pages = {500--512}, } @inproceedings{oliveira_design_2017, title = {Design and assessment of haptic interfaces: {An} essay on proactive haptic articulation}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85018416745&doi=10.1109%2fVR.2017.7892350&partnerID=40&md5=514f7056d6dee184b57ecbdbcf706df1}, doi = {10/gh3753}, abstract = {We looked up to elements present in speech articulation to introduce the proactive haptic articulation as a novel approach for intercommunication. The ability to use a haptic interface as a tool for implicit communication can supplement communication and support near and remote collaborative tasks in virtual and physical environments. In addition, the proactive articulation can be applied during the design process, including the user in the construction of more dynamic and optimized vibrotactile vocabularies. In this proposal, we discuss the thesis of the haptic proactive communication and our method to assess and implement it. Our goal is to understand the phenomena related to the proactive articulation of haptic signals and its use for communication and for the design of optimized tactile vocabularies. © 2017 IEEE.}, booktitle = {Proceedings - {IEEE} {Virtual} {Reality}}, author = {Oliveira, Victor A. de. J.}, year = {2017}, keywords = {Extern, Head mounted displays, Multisensory interaction, Navigation, Navigation aids, Sensory perception, Spatial awareness, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {409--410}, } @article{oliveira_designing_2017, title = {Designing a {Vibrotactile} {Head}-{Mounted} {Display} for {Spatial} {Awareness} in {3D} {Spaces}}, volume = {23}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85017621840&doi=10.1109%2fTVCG.2017.2657238&partnerID=40&md5=a0617c234c770fb99f6c7d0b72f685b1}, doi = {10/f9wkqh}, abstract = {Due to the perceptual characteristics of the head, vibrotactile Head-mounted Displays are built with low actuator density. Therefore, vibrotactile guidance is mostly assessed by pointing towards objects in the azimuthal plane. When it comes to multisensory interaction in 3D environments, it is also important to convey information about objects in the elevation plane. In this paper, we design and assess a haptic guidance technique for 3D environments. First, we explore the modulation of vibration frequency to indicate the position of objects in the elevation plane. Then, we assessed a vibrotactile HMD made to render the position of objects in a 3D space around the subject by varying both stimulus loci and vibration frequency. Results have shown that frequencies modulated with a quadratic growth function allowed a more accurate, precise, and faster target localization in an active head pointing task. The technique presented high usability and a strong learning effect for a haptic search across different scenarios in an immersive VR setup. © 1995-2012 IEEE.}, journal = {IEEE Transactions on Visualization and Computer Graphics}, author = {Oliveira, Victor A. de. J. and Brayda, L and Nedel, L and Maciel, A}, year = {2017}, keywords = {Extern, Head mounted displays, Multisensory interaction, Navigation, Navigation aids, Sensory perception, Spatial awareness, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {1340--1348}, } @article{da_silva_construcao_2017, title = {Construção e estudo de evidências de validade da {Escala} de {Avaliação} {Docente} [{Construction} and study of validity evidence of the {Teaching} {Assessment} {Scale}]}, volume = {22}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85025666213&doi=10.1590%2fS1413-24782017227035&partnerID=40&md5=47c6d2c94d5ee2f1ea10858d9abf8248}, doi = {10/gh3754}, abstract = {The objective of this study was to present the construction of the Teaching Assessment Scale (TAS) and the validity evidence for it. This scale was developed through a literature review and interviews with graduate students. The evidence for content validity of the TAS was evaluated by ten referees, and this was followed by the analysis of the evidence for construct validity and reliability. The tetrachoric correlation matrix was submitted for exploratory factorial analysis, and the Hull method was used to decide the number of dimensions to be retained. Item response theory (IRT) analysis was performed using the rating scale model with the result that seven items needed to be excluded. The Kaiser-Meyer-Olkin (KMO) index and Bartlett’s Test of Sphericity indicated that the polychoric correlation matrix was factorable. The Hull method suggested the retention of a factor, with the eigenvalue of 15.49. The factor’s reliability measures were α = 0.96 and ω = 0.95. As a result, the TAS is considered helpful in evaluating higher education teaching methods in Brazil.}, journal = {Revista Brasileira de Educacao}, author = {Da Silva, M.A and Machado, W.L and Pilotto, L.M and Backes, B and Zanon, R.B and Machado, P.V and Zoltowski, A.P.C and Vieira, R.V.A and Endres, R.G and Francalanci, M and Oliveira, Victor A. de. J. and Krug, J.S and Bandeira, D.R}, year = {2017}, keywords = {Extern}, pages = {690--707}, } @inproceedings{oliveira_does_2015, title = {Does vibrotactile intercommunication increase collaboration?}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84954501893&doi=10.1109%2fVR.2015.7223391&partnerID=40&md5=da35eb165587a8fc47c7d814cbc27611}, doi = {10/gh375z}, abstract = {Communication is a fundamental process in collaborative work. In natural conditions, communication between team members is multimodal. This allows for redundancy, adaptation to different contexts, and different levels of focus. In collaborative virtual environments, however, hardware limitations and lack of appropriate interaction metaphors reduce the amount of collaboration. In this poster, we propose the design and use of a vibrotactile language to improve user intercommunication in CVE and, consequently, to increase the amount of effective collaboration. © 2015 IEEE.}, booktitle = {2015 {IEEE} {Virtual} {Reality} {Conference}, {VR} 2015 - {Proceedings}}, author = {Oliveira, Victor A. de. J. and Sarmiento, W.J and Maciel, A and Nedel, L and Collazos, C.A}, year = {2015}, keywords = {Collaborative virtual environment, Extern, Navigation, Navigation aids, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {253--254}, } @inproceedings{oliveira_experiencing_2017, title = {Experiencing guidance in {3D} spaces with a vibrotactile head-mounted display}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85018368790&doi=10.1109%2fVR.2017.7892375&partnerID=40&md5=e0588383630f5330cf3c308799144a17}, doi = {10/gh375x}, abstract = {Vibrotactile feedback is broadly used to support different tasks in virtual and augmented reality applications, such as navigation, communication, attentional redirection, or to enhance the sense of presence in virtual environments. Thus, we aim to include the haptic component to the most popular wearable used in VR applications: the VR headset. After studying the acuity around the head for vibrating stimuli, and trying different parameters, actuators, and configurations, we developed a haptic guidance technique to be used in a vibrotactile Head-mounted Display (HMD). Our vi-brotactile HMD was made to render the position of objects in a 3D space around the subject by varying both stimulus loci and vibration frequency. In this demonstration, the participants will interact with different scenarios where the mission is to select a number of predefined objects. However, instead of displaying occlusive graphical information to point to these objects, vibrotactile cues will provide guidance in the VR setup. Participants will see that our haptic guidance technique can be both easy to use and entertaining. (See Video: https://youtu.be/-H0MQy6QD7M). © 2017 IEEE.}, booktitle = {Proceedings - {IEEE} {Virtual} {Reality}}, author = {Oliveira, Victor A. de. J. and Brayda, L and Nedel, L and Maciel, A}, year = {2017}, keywords = {Extern, Head mounted displays, Multisensory interaction, Navigation, Navigation aids, Sensory perception, Spatial awareness, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {453--454}, } @article{oliveira_introducing_2014, title = {Introducing the modifier tactile pattern for vibrotactile communication}, volume = {8618}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84910149501&doi=10.1007%2f978-3-662-44193-0_39&partnerID=40&md5=20b977f28fc4802838fe0bc77e32d4a2}, doi = {10/gh375w}, abstract = {We introduce the concept of “Modifier Tactile Pattern” as a pattern that modifies the interpretation of other elements that compose a Tacton or an entire tactile message. This concept was inspired by the prefixation strategies of the Braille system. We also show how to design tactile languages applying the concept of Modifier by following methodologies and approaches of Tacton design that already exist in literature. Then a modifier-based tactile language is designed and assessed in a user study. © Springer-Verlag Berlin Heidelberg 2014.}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, author = {Oliveira, Victor A. de. J. and Maciel, A.}, year = {2014}, keywords = {Extern, Modifier, Navigation, Navigation aids, Tactile communications, Tactile prefixation, User experiments, Vibrotactile, Virtual reality, user study}, pages = {309--316}, } @inproceedings{oliveira_proactive_2016, title = {Proactive haptic articulation for intercommunication in collaborative virtual environments}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84974623160&doi=10.1109%2f3DUI.2016.7460036&partnerID=40&md5=17811772762c2dd2913dfda0e9d96183}, doi = {10/gh375t}, abstract = {In this paper, we look upon elements present in speech articulation to introduce proactive haptic articulation as a novel approach for communication in Collaborative Virtual Environments. We defend the hypothesis that elements present in natural language, when added to the design of the vibrotactile vocabulary, should provide an expressive medium for intercommunication. Moreover, the ability to render tactile cues to a teammate should encourage users to extrapolate a given vocabulary while using it. We implemented a collaborative puzzle task to observe the use of such vocabulary. Results show that participants autonomously adapted it to attend their communication needs during the assembly. © 2016 IEEE.}, booktitle = {2016 {IEEE} {Symposium} on {3D} {User} {Interfaces}, {3DUI} 2016 - {Proceedings}}, author = {Oliveira, Victor A. de. J. and Nedel, L and Maciel, A}, year = {2016}, keywords = {Collaborative virtual environment, Extern, Navigation, Navigation aids, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {91--94}, } @article{oliveira_localized_2016, title = {Localized magnification in vibrotactile {HMDs} for accurate spatial awareness}, volume = {9775}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84978221098&doi=10.1007%2f978-3-319-42324-1_6&partnerID=40&md5=570531a79b6bc1fbc11b96b686fafbeb}, doi = {10/gh375v}, abstract = {Actuator density is an important parameter in the design of vibrotactile displays. When it comes to obstacle detection or navigation tasks, a high number of tactors may provide more information, but not necessarily better performance. Depending on the body site and vibration parameters adopted, high density can make it harder to detect tactors in an array. In this paper, we explore the trade-off between actuator density and precision by comparing three kinds of directional cues. After performing a within-subject naive search task using a head-mounted vibrotactile display, we found that increasing the density of the array locally provides higher performance in detecting directional cues. © Springer International Publishing Switzerland 2016.}, journal = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, author = {Oliveira, Victor A. de. J. and Nedel, L and Maciel, A and Brayda, L}, year = {2016}, keywords = {Actuators, Extern, Head stimulation, Navigation, Spatial awareness, Tactile display, Vibration parameters, Vibrotactile displays}, pages = {55--64}, } @inproceedings{oliveira_speaking_2016, title = {Speaking haptics: {Proactive} haptic articulation for intercommunication in virtual environments}, volume = {2016-July}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84979763427&doi=10.1109%2fVR.2016.7504748&partnerID=40&md5=8e27e29b81e6fb5cae6709b7d3d3e83f}, doi = {10/gh375r}, abstract = {Communication is crucial in collaborative tasks. Multimodal strategies are commonly applied to complement, reinforce and disam-biguate information exchange. However, although multimodal communication is commonplace in Collaborative Virtual Environments, the proactive use of touch for intercommunication is surprisingly neglected regardless its importance for communication. In this paper, we look up to elements present in speech articulation to introduce the proactive haptic articulation as a novel approach for communication in CVEs. We defend the hypothesis that elements present in natural language, when added to the design of the vibro-tactile vocabulary, should provide an expressive medium for intercommunication. Moreover, we hypothesize that the ability to render tactile cues to a teammate will encourage users to adapt a given vocabulary spontaneously during its use. We implemented a case study around a collaborative puzzle task to demonstrate the use of such vocabulary. Results show that the proactive haptic articulation provided a way for participants to autonomously and dynamically adapt the provided tactile vocabulary to attend their communication needs during the task. © 2016 IEEE.}, booktitle = {Proceedings - {IEEE} {Virtual} {Reality}}, author = {Oliveira, Victor A. de. J. and Nedel, L. and Maciel, A.}, year = {2016}, keywords = {Collaborative virtual environment, Extern, Navigation, Navigation aids, Tactile communications, User experiments, Vibrotactile, Virtual reality, user study}, pages = {251--252}, } @inproceedings{oliveira_spatial_2016, title = {Spatial discrimination of vibrotactile stimuli around the head}, volume = {2016-April}, url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84967317336&doi=10.1109%2fHAPTICS.2016.7463147&partnerID=40&md5=2e963539c4dad787219299b2ff943a2e}, doi = {10/gh375s}, abstract = {Several studies evaluated vibrotactile stimuli on the head to aid orientation and communication. However, the acuity for vibration of the head’s skin still needs to be explored. In this paper, we report the assessment of the spatial resolution on the head. We performed a 2AFC psychophysical experiment systematically varying the distance between pairs of stimuli in a standard-comparison approach. We took into consideration not only the perceptual thresholds but also the reaction times and subjective factors, like workload and vibration pleasantness. Results show that the region around the forehead is not only the most sensitive, with thresholds under 5mm, but it is also the region wherein the spatial discrimination was felt to be easier to perform. We also have found that it is possible to describe acuity on the head for vibrating stimulus as a function of skin type (hairy or glabrous) and of the distance of the stimulated loci from the head midline. © 2016 IEEE.}, booktitle = {{IEEE} {Haptics} {Symposium}, {HAPTICS}}, author = {Oliveira, Victor A. de. J. and Nedel, L and Maciel, A and Brayda, L}, year = {2016}, keywords = {Extern, Head stimulation, Perceptual threshold, Psychophysical experiments, Spatial discrimination, Spatial resolution, Vibration parameters, Vibrotactile displays}, pages = {1--6}, } @article{de_jesus_oliveira_assessment_2018, title = {Assessment of an articulatory interface for tactile intercommunication in immersive virtual environments}, volume = {76}, issn = {0097-8493}, url = {http://www.sciencedirect.com/science/article/pii/S0097849318301067}, doi = {10/gfnmx5}, journal = {Computers \& Graphics}, author = {de Jesus Oliveira, Victor Adriel and Nedel, L and Maciel, A}, year = {2018}, keywords = {Articulatory interface, Extern, Tactile intercommunication, Vibrotactile HMD}, pages = {18 -- 28}, }