{"title":"Task Classification using Eye Movements and Graph Neural Networks","authors":"Jarod P. Hartley","doi":"10.1145/3649902.3655097","DOIUrl":"https://doi.org/10.1145/3649902.3655097","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"7 8","pages":"28:1-28:3"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141267441","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Prasanth Chandran, Yifeng Huang, J. Munsell, Brian Howatt, Brayden Wallace, Lindsey Wilson, Sidney K. D’Mello, Minh Hoai, N. S. Rebello, Lester C. Loschky
{"title":"Characterizing Learners' Complex Attentional States During Online Multimedia Learning Using Eye-tracking, Egocentric Camera, Webcam, and Retrospective recalls","authors":"Prasanth Chandran, Yifeng Huang, J. Munsell, Brian Howatt, Brayden Wallace, Lindsey Wilson, Sidney K. D’Mello, Minh Hoai, N. S. Rebello, Lester C. Loschky","doi":"10.1145/3649902.3653939","DOIUrl":"https://doi.org/10.1145/3649902.3653939","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"4 12","pages":"68:1-68:7"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141267606","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Joint Attention on the Future: Pro-Ecological Attitudes Change In Collaboration","authors":"Iga Szwoch","doi":"10.1145/3649902.3655100","DOIUrl":"https://doi.org/10.1145/3649902.3655100","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"2 4","pages":"26:1-26:3"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141268007","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Impact of reward expectation on pupillary change during an adaptive two player card game","authors":"Ryo Yasuda, Minoru Nakayama","doi":"10.1145/3649902.3655649","DOIUrl":"https://doi.org/10.1145/3649902.3655649","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"10 4","pages":"43:1-43:2"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141268099","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Florian Hauser, Lisa Grabinger, Timur Ezer, J. Mottok, Hans Gruber
{"title":"Analyzing and Interpreting Eye Movements in C++: Using Holistic Models of Image Perception","authors":"Florian Hauser, Lisa Grabinger, Timur Ezer, J. Mottok, Hans Gruber","doi":"10.1145/3649902.3655093","DOIUrl":"https://doi.org/10.1145/3649902.3655093","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"6 6","pages":"72:1-72:7"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141266582","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"The role of stress in silent reading","authors":"Kristina Cergol, M. Palmović","doi":"10.1145/3649902.3656492","DOIUrl":"https://doi.org/10.1145/3649902.3656492","url":null,"abstract":"","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"3 12","pages":"83:1-83:5"},"PeriodicalIF":0.0,"publicationDate":"2024-06-04","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"141266654","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
A. Chung, F. Deligianni, Xiao-Peng Hu, Guang-Zhong Yang
{"title":"Visual feature extraction via eye tracking for saliency driven 2D/3D registration","authors":"A. Chung, F. Deligianni, Xiao-Peng Hu, Guang-Zhong Yang","doi":"10.1145/968363.968371","DOIUrl":"https://doi.org/10.1145/968363.968371","url":null,"abstract":"This paper presents a new technique for extracting visual saliency from experimental eye tracking data. An eye-tracking system is employed to determine which features that a group of human observers considered to be salient when viewing a set of video images. With this information, a biologically inspired saliency map is derived by transforming each observed video image into a feature space representation. By using a feature normalisation process based on the relative abundance of visual features within the background image and those dwelled on eye tracking scan paths, features related to visual attention are determined. These features are then back projected to the image domain to determine spatial areas of interest for unseen video images. The strengths and weaknesses of the method are demonstrated with feature correspondence for 2D to 3D image registration of endoscopy videos with computed tomography data. The biologically derived saliency map is employed to provide an image similarity measure that forms the heart of the 2D/3D registration method. It is shown that by only processing selective regions of interest as determined by the saliency map, rendering overhead can be greatly reduced. Significant improvements in pose estimation efficiency can be achieved without apparent reduction in registration accuracy when compared to that of using a non-saliency based similarity measure.","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"27 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2004-03-22","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"123587953","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Mental imagery in problem solving: an eye tracking study","authors":"Daesub Yoon, N. Hari Narayanan","doi":"10.1145/968363.968382","DOIUrl":"https://doi.org/10.1145/968363.968382","url":null,"abstract":"Cognitive models and empirical studies of problem solving in visuo-spatial and causal domains suggest that problem solving tasks in such domains invoke cognitive processes involving mental animation and imagery. If these internal processes are externally manifested in the form of eye movements, such tasks present situations in which the trajectory of a user's visual attention can provide clues regarding his or her information needs to an Attentive User Interface [Vertegaal 2002]. In this paper, we briefly review research related to problem solving that involves mental imagery, and describe an experiment that looked for evidence and effects of an imagery strategy in problem solving. We eye-tracked 90 subjects solving two causal reasoning problems, one in which a diagram of the problem appeared on the stimulus display, and a second related problem that was posed on a blank display. Results indicated that 42% of the subjects employed mental imagery and visually scanned the display in a correspondingly systematic fashion. This suggests that information displays that respond to a user's visual attention trajectory, a kind of Attentive User Interface, are more likely to benefit this class of users.","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"42 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2004-03-22","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"122648496","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Effects of feedback on eye typing with a short dwell time","authors":"P. Majaranta, A. Aula, Kari-Jouko Räihä","doi":"10.1145/968363.968390","DOIUrl":"https://doi.org/10.1145/968363.968390","url":null,"abstract":"Eye typing provides means of communication especially for people with severe disabilities. Recent research indicates that the type of feedback impacts typing speed, error rate, and the user's need to switch her gaze between the on-screen keyboard and the typed text field. The current study focuses on the issues of feedback when a short dwell time (450 ms vs. 900 ms in a previous study) is used. Results show that the findings obtained using longer dwell times only partly apply for shorter dwell times. For example, with a short dwell time, spoken feedback results in slower text entry speed and double entry errors. A short dwell time requires sharp and clear feedback that supports the typing rhythm.","PeriodicalId":127538,"journal":{"name":"Eye Tracking Research & Application","volume":"62 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2004-03-22","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"128297745","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}