Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1237-0
Hansol X. Ryu, M. Srinivasan
{"title":"Deriving Loss Functions for Regression and Classification from Humans","authors":"Hansol X. Ryu, M. Srinivasan","doi":"10.32470/ccn.2022.1237-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1237-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"154 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"134206068","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1173-0
Subhrasankar Chatterjee, Debasis Samanta
{"title":"VOneCAE: Interpreting through the eyes of V1","authors":"Subhrasankar Chatterjee, Debasis Samanta","doi":"10.32470/ccn.2022.1173-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1173-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"93 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"133837483","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1217-0
Hui-Yuan Miao, Hojin Jang, F. Tong
{"title":"How many non-linear computations are required for CNNs to account for the response properties of V1?","authors":"Hui-Yuan Miao, Hojin Jang, F. Tong","doi":"10.32470/ccn.2022.1217-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1217-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"30 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"133886988","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1270-0
Yaoguang Jiang, M. Platt
{"title":"The neurobiology of strategic competition","authors":"Yaoguang Jiang, M. Platt","doi":"10.32470/ccn.2022.1270-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1270-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"27 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"134618448","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1153-0
R. Ramezan, Mei-Ching Chen, Martin Lysy, P. Marriott
{"title":"A Multivariate Point Process Model for Neural Spike Trains","authors":"R. Ramezan, Mei-Ching Chen, Martin Lysy, P. Marriott","doi":"10.32470/ccn.2022.1153-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1153-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"52 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"121665736","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1150-0
Vanessa Ceja, Yussuf Ezzeldine, Megan A. K. Peters
{"title":"Models of confidence to facilitate engaging task designs","authors":"Vanessa Ceja, Yussuf Ezzeldine, Megan A. K. Peters","doi":"10.32470/ccn.2022.1150-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1150-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"10 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"114848237","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1064-0
Caroline Bévalot, Florent Meyniel
{"title":"Dissociation Between The Use of Implicit and Explicit Priors in Bayesian Perceptual Inference","authors":"Caroline Bévalot, Florent Meyniel","doi":"10.32470/ccn.2022.1064-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1064-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"72 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"116086188","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1156-0
R. Murray, Devin Kehoe
: Deep neural networks have made rapid advances in object recognition, but progress has mostly been made through experimentation, with little guidance from normative theories. Here we use ideal observer theory and associated methods to compare current network performance to theoretical limits on performance. We measure network performance and ideal observer performance on a modified ImageNet task, where model observers view samples from a limited number of object categories, in several levels of external white Gaussian noise. We find that although current networks achieve 90% performance or better on the standard ImageNet task, the ideal observer performs vastly better on the more limited task we consider here. The networks' "calculation efficiency", a measure of the extent to which they use all available information to perform a task, is on the order of 10 -5 , an exceedingly small value. We consider reasons why efficiency may be so low, and outline further uses of ideal obsevers and noise methods to understand network performance.
{"title":"Efficiency of object recognition networks on an absolute scale","authors":"R. Murray, Devin Kehoe","doi":"10.32470/ccn.2022.1156-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1156-0","url":null,"abstract":": Deep neural networks have made rapid advances in object recognition, but progress has mostly been made through experimentation, with little guidance from normative theories. Here we use ideal observer theory and associated methods to compare current network performance to theoretical limits on performance. We measure network performance and ideal observer performance on a modified ImageNet task, where model observers view samples from a limited number of object categories, in several levels of external white Gaussian noise. We find that although current networks achieve 90% performance or better on the standard ImageNet task, the ideal observer performs vastly better on the more limited task we consider here. The networks' \"calculation efficiency\", a measure of the extent to which they use all available information to perform a task, is on the order of 10 -5 , an exceedingly small value. We consider reasons why efficiency may be so low, and outline further uses of ideal obsevers and noise methods to understand network performance.","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"31 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"121171190","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1079-0
Sebastian Hellmann, Michael Zehetleitner, Manuel Rausch
{"title":"Dynamical Models of Decision Confidence in Visual Perception: Implementation and Comparison","authors":"Sebastian Hellmann, Michael Zehetleitner, Manuel Rausch","doi":"10.32470/ccn.2022.1079-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1079-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"92 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"121311058","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}
Pub Date : 1900-01-01DOI: 10.32470/ccn.2022.1114-0
C. Liao, Masataka Sawayama, Bei Xiao
{"title":"Unsupervised learning of translucent material appearance using StyleGAN","authors":"C. Liao, Masataka Sawayama, Bei Xiao","doi":"10.32470/ccn.2022.1114-0","DOIUrl":"https://doi.org/10.32470/ccn.2022.1114-0","url":null,"abstract":"","PeriodicalId":341186,"journal":{"name":"2022 Conference on Cognitive Computational Neuroscience","volume":"16 1","pages":"0"},"PeriodicalIF":0.0,"publicationDate":"1900-01-01","publicationTypes":"Journal Article","fieldsOfStudy":null,"isOpenAccess":false,"openAccessPdf":"","citationCount":null,"resultStr":null,"platform":"Semanticscholar","paperid":"114734427","PeriodicalName":null,"FirstCategoryId":null,"ListUrlMain":null,"RegionNum":0,"RegionCategory":"","ArticlePicture":[],"TitleCN":null,"AbstractTextCN":null,"PMCID":"","EPubDate":null,"PubModel":null,"JCR":null,"JCRName":null,"Score":null,"Total":0}