@inproceedings{fischer_auditory_2016, address = {Segovia, Spain}, series = {Biosystems \& {Biorobotics}}, title = {An {Auditory} {Feedback} {System} in {Use} with {People} {Aged} +50 {Years}: {Compliance} and {Modifications} in {Gait} {Pattern}}, copyright = {©2017 Springer International Publishing AG}, isbn = {978-3-319-46668-2 978-3-319-46669-9}, shorttitle = {An {Auditory} {Feedback} {System} in {Use} with {People} {Aged} +50 {Years}}, url = {http://link.springer.com/chapter/10.1007/978-3-319-46669-9_143}, doi = {10/gnt2tg}, abstract = {Aging leads to gait impairments, which increases the risk for falls. In this study the impact of the auditory feedback system SONIGait on gait parameters in elderly persons was investigated. Twenty-one participants walked at self-selected speed with four variations of real-time auditory feedback of their plantar pressure. Repeated measures ANOVA was utilized to determine changes in time-distance parameters between walking without feedback and four feedback variations. After walking, they completed a questionnaire about their appraisal of the SONIGait system and the four different feedback modalities. There was a significant reduction in gait velocity (0.142 ± 0.04 m/s; p {\textless} 0.001) and prolongation of step time (0.02 ± 0.005 s; p {\textless} 0.001) during walking with SONIGait. No significant preference for any of the feedback variations was observed. Most participants evaluated the system SONIGait positively. Thus, real-time auditory feedback may be used in gait rehabilitation and may support an older person’s gait stability.}, language = {en}, urldate = {2016-10-19}, booktitle = {Converging {Clinical} and {Engineering} {Research} on {Neurorehabilitation} {II}}, publisher = {Springer International Publishing}, author = {Fischer, Theresa and Kiselka, Anita and Dlapka, Ronald and Doppler, Jakob and Iber, Michael and Gradl, Christian and Gorgas, Anna-Maria and Siragy, Tarique and Horsak, Brian}, editor = {Ibáñez, Jaime and González-Vargas, José and Azorín, José María and Akay, Metin and Pons, José Luis}, year = {2016}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {Biofeedback, Biomechanics, Center for Digital Health Innovation, Center for Digital Health and Social Innovation, DHLab, Departement Soziales, Department Gesundheit, Department Technologie, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, Publikationstyp Schriftpublikation, Publikationstyp Vortrag, SP CDHSI Motor Rehabilitation, SP IGW Clinical \& Healthcare Research, SP IGW Health Promotion \& Healthy Ageing, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, interdisziplinär, peer-reviewed, project\_carma, project\_sonigait}, pages = {881--885}, } @book{seidl_fmt_2018, title = {{FMT} 2018 - {Proceedings} of the 11th {Forum} {Media} {Technology} and 4th {All} {Around} {Audio} {Symposium}}, url = {http://ceur-ws.org/Vol-2299/}, publisher = {CEUR-WS.org}, editor = {Seidl, Markus and Moser, Thomas and Blumenstein, Kerstin and Zeppelzauer, Matthias and Iber, Michael}, year = {2018}, keywords = {Center for Artificial Intelligence, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Digital Technologies, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Media Computing Group, Wiss. Beitrag, peer-reviewed}, } @book{aigner_fmt_2017, title = {{FMT} 2017 - {Proceedings} of the 10th {Forum} {Media} {Technology} and 3rd {All} {Around} {Audio} {Symposium}}, url = {http://ceur-ws.org/Vol-2009/}, publisher = {CEUR-WS.org}, editor = {Aigner, Wolfgang and Moser, Thomas and Blumenstein, Kerstin and Zeppelzauer, Matthias and Iber, Michael and Schmiedl, Grischa}, year = {2017}, keywords = {2017, Audio Design, Center for Artificial Intelligence, Center for Digital Health Innovation, Computer Science, Data Modeling, Digital Media Experience, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Digital Technologies, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Media Computing Group, Media Technology, Mobile Applications, Publikationstyp Schriftpublikation, Wiss. Beitrag, data analysis, information visualization, peer-reviewed, usability}, } @book{aigner_fmt_2016, title = {{FMT} 2016 - {Proceedings} of the 9th {Forum} {Media} {Technology} and 2nd {All} {Around} {Audio} {Symposium}}, isbn = {978-1-326-88118-4}, url = {http://ceur-ws.org/Vol-1734/}, publisher = {CEUR-WS.org}, editor = {Aigner, Wolfgang and Schmiedl, Grischa and Blumenstein, Kerstin and Zeppelzauer, Matthias and Iber, Michael}, month = nov, year = {2016}, keywords = {2016, Center for Artificial Intelligence, Center for Digital Health Innovation, Creative Industries, Department Medien und Digitale Technologien, Department Technologie, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Media Computing Group, Publikationstyp Schriftpublikation, Wiss. Beitrag, peer-reviewed}, } @misc{fischer_auditory_2016-1, address = {Segovia, Spain}, title = {An {Auditory} {Feedback} {System} in {Use} with {People} {Aged} +50 {Years}: {Compliance} and {Modifications} in {Gait} {Pattern}}, abstract = {Aging leads to gait impairments, which increases the risk for falls. In this study the impact of the auditory feedback system SONIGait on gait parameters in elderly persons was investigated. Twenty-one participants walked at self-selected speed with four variations of real-time auditory feedback of their plantar pressure. Repeated measures ANOVA was utilized to determine changes in time-distance parameters between walking without feedback and four feedback variations. After walking, they completed a questionnaire about their appraisal of the SONIGait system and the four different feedback modalities. There was a significant reduction in gait velocity (0.142 ± 0.04 m/s; p {\textless} 0.001) and prolongation of step time (0.02 ± 0.005 s; p {\textless} 0.001) during walking with SONIGait. No significant preference for any of the feedback variations was observed. Most participants evaluated the system SONIGait positively. Thus, real-time auditory feedback may be used in gait rehabilitation and may support an older person’s gait stability.}, author = {Fischer, Theresa and Kiselka, Anita}, collaborator = {Dlapka, Ronald and Doppler, Jakob and Iber, Michael and Gradl, Christian and Gorgas, Anna-Maria and Siragy, Tarique and Horsak, Brian}, year = {2016}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {Biofeedback, Center for Digital Health Innovation, Center for Digital Health and Social Innovation, DHLab, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, SP CDHSI Motor Rehabilitation, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, peer-reviewed}, } @inproceedings{gorgas_short-term_2016, address = {Segovia, Spain}, series = {Biosystems \& {Biorobotics}}, title = {Short-{Term} {Effects} of {Real}-{Time} {Auditory} {Display} ({Sonification}) on {Gait} {Parameters} in {People} with {Parkinsons}’ {Disease}—{A} {Pilot} {Study}}, copyright = {©2017 Springer International Publishing AG}, isbn = {978-3-319-46668-2 978-3-319-46669-9}, url = {http://link.springer.com/chapter/10.1007/978-3-319-46669-9_139}, doi = {10/gnt2th}, abstract = {Parkinson’s disease PD patients frequently experience gait impairments. Auditory input has been shown to be an effective measure to benefit critical gait aspects related to the timing and initiation of movement. An instrumented shoe insole device for real-time sonification of gait has been developed for rehabilitation purposes (SONIGait). The objective of the present pilot study was to gain insight about possible effects of SONIGait on gait parameters in PD patients. Five PD patients participated in this pilot study and completed three series of trials with and without sonification. Spatio-temporal gait parameters were recorded during these trials. The outcomes revealed an increase in walking velocity and cadence along with other gait parameters between pre- and posttest. These data indicate that sonification affects gait parameters and fosters (short-term) learning effects in PD patients. Thus, SONIGait may be a suitable measure to promote gait rehabilitation in PD in the future.}, language = {en}, urldate = {2016-10-19}, booktitle = {Converging {Clinical} and {Engineering} {Research} on {Neurorehabilitation} {II}}, publisher = {Springer International Publishing}, author = {Gorgas, Anna-Maria and Schön, Lena and Dlapka, Ronald and Doppler, Jakob and Iber, Michael and Gradl, Christian and Kiselka, Anita and Siragy, Tarique and Horsak, Brian}, editor = {Ibáñez, Jaime and González-Vargas, José and Azorín, José María and Akay, Metin and Pons, José Luis}, year = {2016}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {Biofeedback, Biomechanics, Center for Digital Health Innovation, Center for Digital Health and Social Innovation, DHLab, Department Gesundheit, Department Technologie, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, Publikationstyp Schriftpublikation, Publikationstyp Vortrag, SP CDHSI Motor Rehabilitation, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, interdisziplinär, peer-reviewed, project\_carma, project\_sonigait}, pages = {855--859}, } @article{horsak_sonigait_2016, title = {{SONIGait}: a wireless instrumented insole device for real-time sonification of gait}, volume = {10}, issn = {1783-7677, 1783-8738}, shorttitle = {{SONIGait}}, url = {http://link.springer.com/10.1007/s12193-016-0216-9}, doi = {10/gh38bg}, language = {en}, number = {3}, urldate = {2016-04-26}, journal = {Journal on Multimodal User Interfaces}, author = {Horsak, Brian and Dlapka, Ronald and Iber, Michael and Gorgas, Anna-Maria and Kiselka, Anita and Gradl, Christian and Siragy, Tarique and Doppler, Jakob}, year = {2016}, note = {Projekt: CARMA Projekt: SoniGait Projekt: DHLab}, keywords = {Biofeedback, Biomechanics, Center for Digital Health Innovation, Center for Digital Health and Social Innovation, DHLab, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Creative Media Technologies, Institut für Gesundheitswissenschaften, SP CDHSI Motor Rehabilitation, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, peer-reviewed}, pages = {195--206}, } @inproceedings{siragy_framework_2016, address = {Wien, Österreich}, title = {Framework for {Real}-time {Auditory} {Display} of {Plantar} {Pressure} during {Walking}}, booktitle = {Tagungsband des 10. {Forschungsforum} der Österreichischen {Fachhochschulen}}, author = {Siragy, Tarique and Doppler, Jakob and Gorgas, Anna-Maria and Dlapka, Ronald and Iber, Michael and Kiselka, Anita and Gradl, Christian and Horsak, Brian}, year = {2016}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {Biofeedback, Biomechanics, Center for Digital Health Innovation, Center for Digital Health and Social Innovation, DHLab, Department Gesundheit und Soziales, Department Medien und Digitale Technologien, Department Technologie, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Creative Media Technologies, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, Publikationstyp Schriftpublikation, Publikationstyp Vortrag, SP CDHSI Motor Rehabilitation, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, interdisziplinär, peer-reviewed, project\_carma, project\_sonigait, ⛔ No DOI found}, } @inproceedings{iber_pilotstudie_2015, address = {FH St. Pölten}, title = {Pilotstudie zur sonifikationsgestützten {Ganganalyse}}, isbn = {987-3-86488-090-2}, abstract = {Verletzungs- oder krankheitsbedingte Beeinträchtigungen des Ganges stellen die physiotherapeutische Behandlung vor große Herausforderungen. Aktuelle Technologien erlauben heute die Entwicklung preiswerter tragbarer Ganganalysesysteme, die den gewohnten Bewegungsablauf nicht einschränken und auch außerhalb eines Labors verwendet werden können. Über eine diagnostische Anwendung hinaus können sie auch den motorischen Lernprozess in der physiotherapeutischen Behandlung unterstützen. Eine akustische Darstellung des Abrollverhaltens erlaubt PatientInnen mögliche Abweichungen wahrzunehmen und ermöglicht folglich Eigenkontrolle und Eigenständigkeit beim Üben. Auf Grundlage dieser Rahmenbedingungen wurde ein Hardware-Prototyp bestehend aus einem Paar mit Sensoren ausgestatteter Schuhsohlen und einem Mikroprozessor mit BluetoothLE entwickelt, der Bewegungsdaten in Echtzeit an ein handelsübliches mobiles Endgerät schickt. Auf diesem werden die parametrisierten Daten in Echtzeit sonifiziert, d.h. als Klänge synthetisiert, und über Kopfhörer der PatientIn zugespielt. Dadurch erhält die PatientIn eine zusätzliche Rückmeldung zu seinem Gangmuster. In einer Pilotstudie wurden Sonifikationsvarianten entwickelt und nach einer Vorauswahl durch PhysiotherapeutInnen durch eine Gruppe gesunder ProbandInnen evaluiert. Darüber hinaus wurde der objektive Einfluss der Sonifikationen auf das Gangmuster anhand von Bewegungsdaten, die mit Druckmessplatten erhobenen wurden, verglichen.}, booktitle = {Forum {Medientechnik} - {Next} {Generation}, {New} {Ideas}}, publisher = {Verlag Werner Hülsbusch, Fachverlag für Medientechnik und -wirtschaft}, author = {Iber, Michael and Horsak, Brian and Bauer, Karin and Kiselka, Anita and Gorgas, Anna-Maria and Dlapka, Ronald and Doppler, Jakob}, year = {2015}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {2015, Biomechanics, Center for Digital Health Innovation, DHLab, Department Gesundheit und Soziales, Department Medien und Digitale Technologien, Forschungsgruppe Digital Technologies, Forschungsgruppe Media Computing, Gait Analysis, Institut für Creative Media Technologies, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, Publikationstyp Schriftpublikation, Publikationstyp Vortrag, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, peer-reviewed, project\_carma, project\_sonigait}, pages = {51--68}, } @inproceedings{horsak_wireless_2015, address = {Graz, Austria}, title = {A wireless instrumented insole device for real-time sonification of gait}, isbn = {978-3-902949-01-1}, booktitle = {Proceedings of the 21st {International} {Conference} on {Auditory} {Display}}, author = {Horsak, Brian and Iber, Michael and Bauer, Karin and Kiselka, Anita and Gorgas, Anna-Maria and Dlapka, Ronald and Doppler, Jakob}, year = {2015}, note = {Projekt: CARMA Projekt: DHLab}, keywords = {Biomechanics, Center for Digital Health Innovation, DHLab, Department Gesundheit und Soziales, Department Medien und Digitale Technologien, Department Technologie, Forschungsgruppe Digital Technologies, Gait Analysis, Institut für Creative Media Technologies, Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, Publikationstyp Schriftpublikation, Publikationstyp Vortrag, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie, Wiss. Beitrag, best, best-bhorsak, interdisziplinär, peer-reviewed, project\_carma, project\_sonigait}, pages = {94--101}, } @inproceedings{rind_bridging_2018, title = {Bridging the {Gap} {Between} {Sonification} and {Visualization}}, url = {https://doi.org/10.5281/zenodo.6510341}, doi = {10.5281/zenodo.6510341}, abstract = {Extensive research has been carried out both on auditory and visual representation of data. Still, there is huge potential for complementary audio-visual analytics environments. This position paper works towards a research agenda for interdisciplinary work.}, booktitle = {Proc. {AVI} {Workshop} on {Multimodal} {Interaction} for {Data} {Visualization} ({MultimodalVis})}, author = {Rind, Alexander and Iber, Michael and Aigner, Wolfgang}, year = {2018}, note = {Projekt: KAVA-Time Projekt: VAST}, keywords = {FH SP Data Analytics \& Visual Computing, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Wiss. Beitrag, peer-reviewed}, } @article{felberbauer_datavison_2016, title = {{DataViSon}: ein {Schritt} hin zur dezentralen {Produktionssteuerung}}, volume = {32}, number = {6}, journal = {Industrie 4.0 Management}, author = {Felberbauer, Thomas and Moser, Thomas and Iber, Michael and Fidler, Franz}, year = {2016}, keywords = {2016, Department Medien und Digitale Technologien, Department Technologie, Forschungsgruppe Digital Technologies, Institut für Creative Media Technologies, best-lbfelberbauer, peer-reviewed, ⛔ No DOI found}, pages = {22--26}, } @inproceedings{iber_auditory_2019, address = {Nottingham, UNited Kingdom}, title = {Auditory {Augmented} {Reality} for {Cyber} {Physical} {Production} {Systems}}, isbn = {978-1-4503-7297-8}, doi = {10.1145/3356590.3356600}}, abstract = {We describe a proof-of-concept approach on the sonification of estimated operation states of 3D printing processes. The results of this study form the basis for the development of an “intelligent” noise protection headphone as part of Cyber Physical Production Systems, which provides auditorily augmented information to machine operators and enables radio communication between them. Further application areas are implementations in control rooms (equipped with multichannel loudspeaker systems) and utilization for training purposes. The focus of our research lies on situation-specific acoustic processing of conditioned machine sounds and operation related data with high information content, considering the often highly auditorily influenced working knowledge of skilled workers. As a proof-of-concept the data stream of error probability estimations regarding partly manipulated 3D printing processes was mapped to three sonification models, giving evidence about momentary operation states. The neural network applied indicates a high accuracy ({\textgreater}93\%) concerning error estimation distinguishing between normal and manipulated operation states. None of the manipulated states could be identified by listening. An auditory augmentation, respectively sonification of these error estimations provides a considerable benefit to process monitoring.}, booktitle = {{AudioMostly} ({AM}'19)}, publisher = {ACM New York, NY, USA}, author = {Iber, Michael and Lechner, Patrik and Jandl, Christian and Mader, Manuel and Reichmann, Michael}, year = {2019}, note = {Projekt: IML}, keywords = {Auditory Display, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Media Computing, Forschungsgruppe Media Creation, Immersive Media (AR, VR, 360°), Institut für Creative Media Technologies, Smart Manufacturing, Vortrag, best, best-lbiber, peer-reviewed, ⚠️ Invalid DOI}, } @inproceedings{rottermanner_low-fidelity_2018, address = {Bonn}, title = {Low-{Fidelity} {Prototyping} for the {Air} {Traffic} {Control} {Domain}}, url = {https://dl.gi.de/handle/20.500.12116/16931}, abstract = {In the next 20 years, significant changes in air traffic control are planned (SESAR, 2015). Next to an increase in air traffic, reduction in delays and improvement of safety, 4D trajectories will ensure flights on the most direct route to the destination airport. Within the research project VAST (Virtual Airspace and Tower), the team wants to explore the design space of future air traffic control inter-faces. Three low-fidelity prototypes were developed to evaluate them as early as possible with the target group, namely Air Traffic Control Officers (ATCOs). They will be described in this paper in more detail.}, booktitle = {Mensch und {Computer} 2018 - {Workshopband}}, publisher = {Gesellschaft für Informatik e.V.}, author = {Rottermanner, Gernot and Wagner, Markus and Kalteis, Martin and Iber, Michael and Judmaier, Peter and Aigner, Wolfgang and Settgast, Volker and Eggeling, Eva}, year = {2018}, note = {Projekt: VAST}, keywords = {FH SP Data Analytics \& Visual Computing, Forschungsgruppe Digital Technologies, Forschungsgruppe Media Computing, Human-Computer Interaction, Institut für Creative Media Technologies, Visual Computing, Wiss. Beitrag, peer-reviewed, ⛔ No DOI found}, pages = {605--614}, } @inproceedings{rottermanner_design_2020, address = {Atlanta, USA}, title = {Design and {Evaluation} of a {Tool} to {Support} {Air} {Traffic} {Control} with {2D} and {3D} {Visualizations}}, isbn = {978-1-72815-608-8}, url = {https://ieeexplore.ieee.org/document/9089606}, doi = {10/ghppx4}, abstract = {Air traffic control officers (ATCOs) are specialized workers responsible to monitor and guide airplanes in their assigned airspace. Such a task is highly visual and mainly supported by 2D visualizations. In this paper, we designed and assessed an application for visualizing air traffic in both orthographic (2D) and perspective (3D) views. A user study was then performed to compare these two types of representations in terms of situation awareness, workload, performance, and user acceptance. Results show that the 3D view yielded both higher situation awareness and less workload than the 2D view condition. However, such a performance does not match the opinion of the ATCOs about the 3D representation.}, publisher = {IEEE Computer Society}, author = {Rottermanner, Gernot and Oliveira, Victor A. De J. and Kreiger, Mylene and Graf, Philipp and Rokitansky, Carl-Herbert and Eschbacher, Kurt and Settgast, Volker and Grantz, Volker and Iber, Michael and Lechner, Patrik and Judmaier, Peter and Wagner, Markus}, year = {2020}, note = {Projekt: VAST}, keywords = {Digital Media, Eintrag überprüfen, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Media Computing, Green OA, Human-Computer Interaction, Institut für Creative Media Technologies, Institutional Access, Visual Computing, Vortrag, Wiss. Beitrag, best-grottermanner, peer-reviewed}, } @article{iber_auditory_2020, title = {Auditory augmented process monitoring for cyber physical production systems}, issn = {1617-4909, 1617-4917}, url = {http://link.springer.com/10.1007/s00779-020-01394-3}, doi = {10/ghz24q}, language = {en}, urldate = {2020-03-30}, journal = {Personal and Ubiquitous Computing}, author = {Iber, Michael and Lechner, Patrik and Jandl, Christian and Mader, Manuel and Reichmann, Michael}, month = mar, year = {2020}, note = {Projekt: IML}, keywords = {Auditory Display, Eintrag überprüfen, FH SP Data Analytics \& Visual Computing, Forschungsgruppe Media Computing, Forschungsgruppe Media Creation, Green OA, Immersive Media (AR, VR, 360°), Institut für Creative Media Technologies, Open Access, Smart Manufacturing, Wiss. Beitrag, best, best-lbiber, peer-reviewed}, } @article{iber_order-related_2012, title = {Order-{Related} {Acoustic} {Characterization} of {Production} {Data}}, volume = {5}, url = {https://link.springer.com/article/10.1007/s12159-012-0084-y}, doi = {10/ghppzm}, number = {3-4}, journal = {Logistics Research}, author = {Iber, Michael and Windt, K.}, month = sep, year = {2012}, keywords = {Extern, Vortrag, best-lbiber, peer-reviewed}, pages = {89--98}, } @phdthesis{iber_auditory_2014, title = {Auditory {Logistic} {Analysis}}, abstract = {The accuracy of logistic performance becomes more and more a crucial factor for the competiveness of enterprises. Even small advantages decide on economic success or failure. Therefore companies collect data at an increasing level of detail in order to gain more knowledge about in-depth manufacturing processes and their dynamics, serving as a basis for the adjustment of Production Planning and Control. In traditional logistics, the application of conventional statistical methods for the analysis of manufacturing data has led to formidable improvements of the achievements of logistic objectives. In order to reduce complexity, these traditional approaches rely on greatly simplified models, mostly based on averaging. In order to evaluate data on a higher level of detail for further logistic improvements, actual analyses of manufacturing data more and more include advanced statistical methods, such as Knowledge Discovery in Databases (KDD) and Explorative Data Analysis (EDA). The scientific discipline of Auditory Display, i.e., the acoustic representation of information, has been successfully established in EDA as counterpart to graphical statistics. More specifically known as data sonification, an auditory display is particularly suitable to analyze chronological events, such as the workflow of a production. Furthermore, human auditory perception is not only capable to highly differentiate between pitches and colors of sound in its temporal evolution, but also to localize the position of a sound. The spatial distribution of the acoustic information, therefore, is also an important feature of an auditory display. Besides this interplay between tonal and spatial attributes for the conditioning of information, sound has a further quality that predestines it for an application in logistic analysis: Models in manufacturing distinguish between dynamic continuous flow and discrete event perspectives. Sound is able to represent both perspectives simultaneously: a periodic sound signal can be considered as sum of sinusoids representing the continuous flow perspective, whereas, from a musical perspective, the same sound may be regarded as a sequence of discrete events. In this fundamental research, Auditory Display has been applied to the analysis of manufacturing processes. A methodical transfer from logistic to sonic parameters has been developed in order to investigate the potential of sonifications for the identification of bottlenecks in a production workflow, also in regard to the impact of cross-system dynamics. The developed sonification model is based on the throughput element of the Bottleneck Oriented Logistic Analysis. Additionally to auditory representations of established logistic perspectives, with “synchronous view”, a novel perspective is introduced, displaying the complete workflow of an order as characteristic “fingerprints”. The developed method enables trained analysts to identify systematic order- and process-related bottlenecks, their temporal dynamic evolutions, and their overall impact on schedule adherence of a production workflow.}, author = {Iber, Michael}, year = {2014}, keywords = {Extern}, } @article{iber_soundalike_2005, title = {soundalike: sounds like sounds we like}, volume = {166}, number = {6}, journal = {Neue Zeitschrift für Musik}, author = {Iber, Michael}, year = {2005}, keywords = {Extern}, pages = {16--17}, } @incollection{iber_grooving_2011, address = {Boisenburg}, title = {Grooving {Factory} : {Engpassanalyse} in der {Produktionslogistik}}, isbn = {978-3-86488-005-6}, booktitle = {Forum {Medientechnik} - {Next} {Generartion}, {New} {Ideas}}, publisher = {Verlag Werner Hülsbusch}, author = {Iber, Michael and Windt, Katja}, editor = {Frotschnig, Alois and Raffaseder, Hannes}, year = {2011}, keywords = {Extern}, pages = {83--91}, } @incollection{iber_grooving_2012, address = {Bielefeld}, edition = {1}, title = {Die {Grooving} {Factory} - {Logistische} {Datenanalse} im {Klanglabor}}, isbn = {3-8376-2049-2}, url = {http://www.amazon.de/Das-geschulte-Ohr-Kulturgeschichte-Sonifikation/dp/3837620492/ref=sr_1_1?ie=UTF8&qid=1330036444&sr=8-1}, booktitle = {Das geschulte {Ohr}: {Eine} {Kulturgeschichte} der {Sonifikation}}, publisher = {Transcript}, author = {Iber, Michael and Klein, Julian and Windt, Katja}, editor = {Schoon, Andi and Volmar, Axel}, year = {2012}, keywords = {Extern}, pages = {147--162}, } @misc{jandl_aarip-virtual_2019, title = {{AARiP}-{Virtual} {Reality} {App} for {Oculus} {Go}}, abstract = {VR-App zum Evaluieren der Sonifikation im Usecase AARiP (IML Projekt)}, author = {Jandl, Christian and Mader, Christian and Iber, Michael}, month = sep, year = {2019}, note = {Projekt: IML}, keywords = {Forschungsgruppe Digital Technologies, Forschungsgruppe Media Creation, Immersive Media (AR, VR, 360°), Institut für Creative Media Technologies}, } @incollection{filimowicz_auditory_2020, address = {New York}, title = {Auditory {Display} in {Workspace} {Environments}}, isbn = {978-1-138-09389-8}, booktitle = {Foundations in {Sound} {Design} for {Embedded} {Media}: {A} {Multidisciplinary} {Approach}}, publisher = {Routledge}, author = {Iber, Michael}, editor = {Filimowicz, Michael}, year = {2020}, keywords = {Auditory Display, Forschungsgruppe Media Computing, Human-Computer Interaction, Institut für Creative Media Technologies, Wiss. Beitrag, best-lbiber}, pages = {131--154}, } @misc{iber_breeze_2016, address = {Hochschule Darmstadt}, title = {A {Breeze} of {Familiarity}}, abstract = {Apart from storing audio recordings as timebased series of discrete sample values in wave files, they can also be conserved as multidimensional datasets containing retrievable information e.g. about frequency and phase distributions at discrete points of time. In order to use these data sets to generate musical scores, the enormous amount of information needs to be reduced to the main frequency components and a limited set of dynamic levels. Also a time grid needs to be imposed in order for the score to be playable by musicians. What do we perceive? Does our Proustian Memory take over and sets us onto the trails of our emotional past or do we perceive a music completely new to us? And: What, if we get to know about the underlying original music?}, author = {Iber, Michael}, month = sep, year = {2016}, keywords = {Forschungsgruppe Media Computing, Institut für Creative Media Technologies}, } @misc{iber_sonification_2015, address = {Aveiro, Portugal}, type = {Keynote}, title = {Sonification and {Music} - the thin line between art and information}, url = {http://eaw.web.ua.pt}, language = {english}, urldate = {2016-12-01}, author = {Iber, Michael}, month = sep, year = {2015}, keywords = {Forschungsgruppe Media Computing, Institut für Creative Media Technologies}, } @misc{endemann_musizierende_2016, title = {Die musizierende {Schuhsohle} - {So} klingt {Ihre} persönliche {Gangstörung}}, url = {http://oe1.orf.at/programm/454122}, journal = {radiodoktor – das ö1 gesundheitsmagazin}, publisher = {Ö1}, author = {Endemann, Michaela}, collaborator = {Iber, Michael and Kiselka, Anita}, year = {2016}, keywords = {Institut für Gesundheitswissenschaften, Publikationstyp Präsentation, SP IGW Clinical \& Healthcare Research, Studiengang Physiotherapie}, } @inproceedings{rottermanner_requirements_2017, address = {Phoenix, Arizona USA}, title = {Requirements {Analysis} \& {Concepts} for {Future} {European} {Air} {Traffic} {Control} {Systems}}, url = {http://mc.fhstp.ac.at/sites/default/files/publications/Rottermanner_2017_Requirements.pdf}, abstract = {Since decades, Air Traffic Control Officers (ATCOs) are working with 2D representations of the airspace (RADAR). Based on the Single European Sky Air traffic management Research (SESAR), some planned innovations will change the way, air traffic will be handled in the future. Therefore, the paper first presents a requirements analysis in order to understand the current workflow as well as the necessities and concerns of ATCOs for future developments. Second, the paper summarizes the conception \& evaluation phase for representing air traffic not only in 2D. The results show that a user-centered design approach is essential to involve end users as much as possible to avoid undesirable development. In end user interviews, ATCOs were very open to presented hardware and interaction techniques. The focus group with more concrete concepts then resulted in uncertainties especially regarding 3D representations of complex air traffic.}, booktitle = {Workshop {Vis} in {Practice} - {Visualization} {Solutions} in the {Wild}, {IEEE} {VIS} 2017}, publisher = {IEEE}, author = {Rottermanner, Gernot and Wagner, Markus and Settgast, Volker and Grantz, Volker and Iber, Michael and Kriegshaber, Ursula and Aigner, Wolfgang and Judmaier, Peter and Eggeling, Eva}, month = oct, year = {2017}, note = {Projekt: VAST}, keywords = {2017, Forschungsgruppe Digital Technologies, Forschungsgruppe Media Computing, Institut für Creative Media Technologies, Publikationstyp Schriftpublikation, User-Centered Design, air traffic control, peer-reviewed, safety critical environment}, }