{"title":"Copyright Page","authors":"","doi":"10.1109/etcea57049.2022.10009740","DOIUrl":"https://doi.org/10.1109/etcea57049.2022.10009740","url":null,"abstract":"","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"321 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-29","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"124549846","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Dynamic Facial Recognition in Autism: The Case of Happy Face Expression","authors":"M. B. Almourad, Emad Bataineh, Zelal Wattar","doi":"10.1109/ICTACSE50438.2022.10009884","DOIUrl":"https://doi.org/10.1109/ICTACSE50438.2022.10009884","url":null,"abstract":"Through the use of eye tracking equipment, this study compares the behavior and gaze patterns of persons who have autism (AP) and participants who are typically developing (ND). Participants in the experiment are given access to a video with a happy face expression. The participants' gaze patterns are recorded and tracked using eye tracking technology. We discovered a substantial difference in both participant groups' visual behavior through heat map analysis, primarily in the stimulus's area of interest. The eyes and mouth area of a happy face expression received minimal attention from AP, who were primarily interested in non-facial regions. On the other hand, the happy face stimuli's mouth and eyes catch the attention of ND individuals. These findings can be used to create new techniques for early ASD detection and for enhancing the skills and abilities of autistic children.","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"1 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-29","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"129593550","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"An Automatic Epileptic Seizure Recognition Using Two-Dimensional Convolutional Neural Network and Scalp EEG Signals","authors":"Niloufar Asghari, S. A. Hosseini","doi":"10.1109/ICTACSE50438.2022.10009651","DOIUrl":"https://doi.org/10.1109/ICTACSE50438.2022.10009651","url":null,"abstract":"Epilepsy affects many people around the world. Experts usually detect epileptic seizures manually, but an intelligent system is required as it is a tedious and time-taking process and may cause human errors. In recent years, deep learning has been used in various medical applications, but still, It has not reached its maximum development potential. This paper presents a simple deep learning-based model. ElectroEncephaloGraphy (EEG) signals are plotted and directly fed into a convolutional neural network (CNN) model as input data. Through a CNN in a binary classification problem, the model learns to distinct seizures from non-seizures. The proposed method is superior and achieved 100% accuracy on the small sample of the Bonn University scalp EEG dataset.","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"7 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-29","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"132167213","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"SRA System Design: Using Deep Learning to Analyse Experimental data for Speech Researchers","authors":"Tianshi Xie, Dallin J Bailey, Cheryl D. Seals","doi":"10.1109/ICTACSE50438.2022.10009746","DOIUrl":"https://doi.org/10.1109/ICTACSE50438.2022.10009746","url":null,"abstract":"Speech researchers expend tremendous effort when measuring and analyzing audio data from experimental participants and evaluating large amounts of audio data to aid speech research. In traditional assessment methods, researchers listen verbatim to long audio files from participants to find the keywords needed for the assessment. This approach is very tedious and time intensive for speech researchers. To improve research efficiency and efficacy, we designed a system called SRA (Speech Recognition Assistant) based on the DeepSpeech model. SRA effectively supports speech researchers in the evaluation of participant experimental audio data. The purpose is to achieve the following: (i) simplify the workflow for analyzing data and (ii) reduce the time cost for researchers to extract data; (iii) provide a framework with potential for adaptation for use in other fields, such as speech science, audiology, and hearing science.","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"68 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-29","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"124030422","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"A Reverse Engineering Tool that Directly Injects Shellcodes to the Code Caves in Portable Executable Files","authors":"K. Açıcı, Güney Uğurlu","doi":"10.1109/ICTACSE50438.2022.10009732","DOIUrl":"https://doi.org/10.1109/ICTACSE50438.2022.10009732","url":null,"abstract":"Code caves are used in cybersecurity and reverse engineering and describe the space in a PE file that consists of sequential and random unused or empty bytes. Malware writers and hackers design malwares to inject shellcode into these code caves and can create backdoors on computers through to the shellcodes they inject. Apart from malicious use, the benefits of injecting code into code caves should also be considered. When software developers develop new software, they can use code caves and code injection to make minor changes to the compiled software. With the reverse engineering tool we developed named CodeCaveInjection, we demonstrated how to inject shell codes with 2 different methods and made this process easier.","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"15 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-29","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"125073713","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Committees/Boards","authors":"","doi":"10.1109/icrera55966.2022.9922795","DOIUrl":"https://doi.org/10.1109/icrera55966.2022.9922795","url":null,"abstract":"","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"73 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2022-09-18","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"121300835","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Organizers","authors":"Antonios Liapis","doi":"10.1609/aiide.v9i2.12588","DOIUrl":"https://doi.org/10.1609/aiide.v9i2.12588","url":null,"abstract":"\u0000 \u0000 List of organizers of the Artificial Intelligence and Game Aesthetics 2013 workshop held at AIIDE-13.\u0000 \u0000","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"9 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2021-06-30","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"125054618","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
{"title":"Conference Program Summary","authors":"H. Schmidt","doi":"10.1109/icrera.2017.8191075","DOIUrl":"https://doi.org/10.1109/icrera.2017.8191075","url":null,"abstract":"","PeriodicalId":301767,"journal":{"name":"2022 International Conference on Theoretical and Applied Computer Science and Engineering (ICTASCE)","volume":"380 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"2018-12-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"123345277","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}