Pub Date : 2021-11-19DOI: 10.1007/s12193-021-00385-9
F. Martins, E. Naves, Y. Morère, A. D. de Sá
{"title":"Preliminary assessment of a multimodal electric-powered wheelchair simulator for training of activities of daily living","authors":"F. Martins, E. Naves, Y. Morère, A. D. de Sá","doi":"10.1007/s12193-021-00385-9","DOIUrl":"https://doi.org/10.1007/s12193-021-00385-9","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"193 - 205"},"PeriodicalIF":2.9,"publicationDate":"2021-11-19","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"48453499","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-10-30DOI: 10.1007/s12193-021-00384-w
F. Fontana, Francesco Muzzolini, D. Rocchesso
{"title":"Importance of force feedback for following uneven virtual paths with a stylus","authors":"F. Fontana, Francesco Muzzolini, D. Rocchesso","doi":"10.1007/s12193-021-00384-w","DOIUrl":"https://doi.org/10.1007/s12193-021-00384-w","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"183 - 191"},"PeriodicalIF":2.9,"publicationDate":"2021-10-30","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"41613732","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-10-18DOI: 10.1007/s12193-021-00381-z
M. Khalaji, M. Aghdaei, A. Farsi, A. Piras
{"title":"The effect of eye movement sonification on visual search patterns and anticipation in novices","authors":"M. Khalaji, M. Aghdaei, A. Farsi, A. Piras","doi":"10.1007/s12193-021-00381-z","DOIUrl":"https://doi.org/10.1007/s12193-021-00381-z","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"173 - 182"},"PeriodicalIF":2.9,"publicationDate":"2021-10-18","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"44376858","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-10-11DOI: 10.1007/s12193-021-00382-y
L. Cuturi, G. Cappagli, N. Yiannoutsou, S. Price, M. Gori
{"title":"Informing the design of a multisensory learning environment for elementary mathematics learning","authors":"L. Cuturi, G. Cappagli, N. Yiannoutsou, S. Price, M. Gori","doi":"10.1007/s12193-021-00382-y","DOIUrl":"https://doi.org/10.1007/s12193-021-00382-y","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"155 - 171"},"PeriodicalIF":2.9,"publicationDate":"2021-10-11","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"48945586","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-08-10DOI: 10.1007/s12193-021-00380-0
F. Liarokapis, S. Mammen, A. Vourvopoulos
{"title":"Advanced multimodal interaction techniques and user interfaces for serious games and virtual environments","authors":"F. Liarokapis, S. Mammen, A. Vourvopoulos","doi":"10.1007/s12193-021-00380-0","DOIUrl":"https://doi.org/10.1007/s12193-021-00380-0","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"15 1","pages":"255 - 256"},"PeriodicalIF":2.9,"publicationDate":"2021-08-10","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"https://sci-hub-pdf.com/10.1007/s12193-021-00380-0","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"45515001","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-08-09DOI: 10.1007/s12193-021-00379-7
Mrim M. Alnfiai
{"title":"TapCalculator: nonvisual touchscreen calculator for visually impaired people preliminary user study","authors":"Mrim M. Alnfiai","doi":"10.1007/s12193-021-00379-7","DOIUrl":"https://doi.org/10.1007/s12193-021-00379-7","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"143 - 154"},"PeriodicalIF":2.9,"publicationDate":"2021-08-09","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"https://sci-hub-pdf.com/10.1007/s12193-021-00379-7","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"45984970","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-07-27DOI: 10.1007/s12193-021-00378-8
Arthur Paté, Gaspard Farge, Benjamin K. Holtzman, Anna C. Barth, Piero Poli, Lapo Boschi, Leif Karlstrom
Data visualization, and to a lesser extent data sonification, are classic tools to the scientific community. However, these two approaches are very rarely combined, although they are highly complementary: our visual system is good at recognizing spatial patterns, whereas our auditory system is better tuned for temporal patterns. In this article, data representation methods are proposed that combine visualization, sonification, and spatial audio techniques, in order to optimize the user’s perception of spatial and temporal patterns in a single display, to increase the feeling of immersion, and to take advantage of multimodal integration mechanisms. Three seismic data sets are used to illustrate the methods, covering different physical phenomena, time scales, spatial distributions, and spatio-temporal dynamics. The methods are adapted to the specificities of each data set, and to the amount of information that the designer wants to display. This leads to further developments, namely the use of audification with two time scales, the switch from pure audification to time-modulated noise, and the switch from pure audification to sonic icons. First user feedback from live demonstrations indicates that the methods presented in this article seem to enhance the perception of spatio-temporal patterns, which is a key parameter to the understanding of seismically active systems, and a step towards apprehending the processes that drive this activity.
{"title":"Combining audio and visual displays to highlight temporal and spatial seismic patterns","authors":"Arthur Paté, Gaspard Farge, Benjamin K. Holtzman, Anna C. Barth, Piero Poli, Lapo Boschi, Leif Karlstrom","doi":"10.1007/s12193-021-00378-8","DOIUrl":"https://doi.org/10.1007/s12193-021-00378-8","url":null,"abstract":"<p>Data visualization, and to a lesser extent data sonification, are classic tools to the scientific community. However, these two approaches are very rarely combined, although they are highly complementary: our visual system is good at recognizing spatial patterns, whereas our auditory system is better tuned for temporal patterns. In this article, data representation methods are proposed that combine visualization, sonification, and spatial audio techniques, in order to optimize the user’s perception of spatial and temporal patterns in a single display, to increase the feeling of immersion, and to take advantage of multimodal integration mechanisms. Three seismic data sets are used to illustrate the methods, covering different physical phenomena, time scales, spatial distributions, and spatio-temporal dynamics. The methods are adapted to the specificities of each data set, and to the amount of information that the designer wants to display. This leads to further developments, namely the use of audification with two time scales, the switch from pure audification to time-modulated noise, and the switch from pure audification to sonic icons. First user feedback from live demonstrations indicates that the methods presented in this article seem to enhance the perception of spatio-temporal patterns, which is a key parameter to the understanding of seismically active systems, and a step towards apprehending the processes that drive this activity.\u0000</p>","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"45 1","pages":""},"PeriodicalIF":2.9,"publicationDate":"2021-07-27","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"138508815","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-07-02DOI: 10.1007/s12193-021-00376-w
Giles Hamilton-Fletcher, James Alvarez, Marianna Obrist, Jamie Ward
{"title":"SoundSight: a mobile sensory substitution device that sonifies colour, distance, and temperature","authors":"Giles Hamilton-Fletcher, James Alvarez, Marianna Obrist, Jamie Ward","doi":"10.1007/s12193-021-00376-w","DOIUrl":"https://doi.org/10.1007/s12193-021-00376-w","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"107 - 123"},"PeriodicalIF":2.9,"publicationDate":"2021-07-02","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"https://sci-hub-pdf.com/10.1007/s12193-021-00376-w","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"42978218","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
In automotive domain, operation of secondary tasks like accessing infotainment system, adjusting air conditioning vents, and side mirrors distract drivers from driving. Though existing modalities like gesture and speech recognition systems facilitate undertaking secondary tasks by reducing duration of eyes off the road, those often require remembering a set of gestures or screen sequences. In this paper, we have proposed two different modalities for drivers to virtually touch the dashboard display using a laser tracker with a mechanical switch and an eye gaze switch. We compared performances of our proposed modalities against conventional touch modality in automotive environment by comparing pointing and selection times of representative secondary task and also analysed effect on driving performance in terms of deviation from lane, average speed, variation in perceived workload and system usability. We did not find significant difference in driving and pointing performance between laser tracking system and existing touchscreen system. Our result also showed that the driving and pointing performance of the virtual touch system with eye gaze switch was significantly better than the same with mechanical switch. We evaluated the efficacy of the proposed virtual touch system with eye gaze switch inside a real car and investigated acceptance of the system by professional drivers using qualitative research. The quantitative and qualitative studies indicated importance of using multimodal system inside car and highlighted several criteria for acceptance of new automotive user interface.
{"title":"A wearable virtual touch system for IVIS in cars","authors":"Gowdham Prabhakar, Priyam Rajkhowa, Dharmesh Harsha, Pradipta Biswas","doi":"10.1007/s12193-021-00377-9","DOIUrl":"https://doi.org/10.1007/s12193-021-00377-9","url":null,"abstract":"<p>In automotive domain, operation of secondary tasks like accessing infotainment system, adjusting air conditioning vents, and side mirrors distract drivers from driving. Though existing modalities like gesture and speech recognition systems facilitate undertaking secondary tasks by reducing duration of eyes off the road, those often require remembering a set of gestures or screen sequences. In this paper, we have proposed two different modalities for drivers to virtually touch the dashboard display using a laser tracker with a mechanical switch and an eye gaze switch. We compared performances of our proposed modalities against conventional touch modality in automotive environment by comparing pointing and selection times of representative secondary task and also analysed effect on driving performance in terms of deviation from lane, average speed, variation in perceived workload and system usability. We did not find significant difference in driving and pointing performance between laser tracking system and existing touchscreen system. Our result also showed that the driving and pointing performance of the virtual touch system with eye gaze switch was significantly better than the same with mechanical switch. We evaluated the efficacy of the proposed virtual touch system with eye gaze switch inside a real car and investigated acceptance of the system by professional drivers using qualitative research. The quantitative and qualitative studies indicated importance of using multimodal system inside car and highlighted several criteria for acceptance of new automotive user interface.</p>","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"46 3","pages":""},"PeriodicalIF":2.9,"publicationDate":"2021-06-22","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"138508813","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 2021-06-21DOI: 10.1007/s12193-021-00375-x
Isabelle Su, Ian Hattwick, Christine Southworth, Evan Ziporyn, Ally Bisshop, R. Mühlethaler, Tomás Saraceno, M. Buehler
{"title":"Interactive exploration of a hierarchical spider web structure with sound","authors":"Isabelle Su, Ian Hattwick, Christine Southworth, Evan Ziporyn, Ally Bisshop, R. Mühlethaler, Tomás Saraceno, M. Buehler","doi":"10.1007/s12193-021-00375-x","DOIUrl":"https://doi.org/10.1007/s12193-021-00375-x","url":null,"abstract":"","PeriodicalId":17529,"journal":{"name":"Journal on Multimodal User Interfaces","volume":"16 1","pages":"71 - 85"},"PeriodicalIF":2.9,"publicationDate":"2021-06-21","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"https://sci-hub-pdf.com/10.1007/s12193-021-00375-x","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"52688825","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":3,"RegionCategory":"计算机科学","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}