2024 CPAL Classification Bias on a Data Diet Pote, Tejas, Adnan, Mohammed, Yargic, Yigit, and Ioannou, Yani In Conference on Parsimony and Learning (Recent Spotlight Track), Hong Kong 2024 Bib Poster @inproceedings{pote2024classification, title = {Classification Bias on a Data Diet}, abbr = {CPAL}, venue = {{Hong Kong}}, eventdate = {2024-01-03/2024-02-06}, author = {Pote, Tejas and Adnan, Mohammed and Yargic, Yigit and Ioannou, Yani}, booktitle = {Conference on Parsimony and Learning (Recent Spotlight Track)}, year = {2024}, url = {https://openreview.net/pdf?id=7n6CQrcJI9}, poster = {https://www.calgaryml.com/assets/pdf/Classification_Bias_on_a_Data_Diet_Poster.pdf}, bibtex_show = {true}, selected = {true} } ICLR Dynamic Sparse Training with Structured Sparsity Lasby, Mike, Golubeva, Anna, Evci, Utku, Nica, Mihai, and Ioannou, Yani In International Conference on Learning Representations (ICLR), Vienna, Austria 2024 arXiv Bib @inproceedings{lasby2024srigl, author = {Lasby, Mike and Golubeva, Anna and Evci, Utku and Nica, Mihai and Ioannou, Yani}, booktitle = {{International Conference on Learning Representations (ICLR)}}, abbr = {ICLR}, venue = {{Vienna, Austria}}, eventdate = {2024-05-07/2024-05-11}, title = {Dynamic Sparse Training with Structured Sparsity}, year = {2024}, arxivid = {2305.02299}, eprint = {2305.02299}, eprinttype = {arXiv}, bibtex_show = {true}, selected = {true} } 2023 TMLR Bounding generalization error with input compression: An empirical study with infinite-width networks Galloway, Angus, Golubeva, Anna, Salem, Mahmoud, Nica, Mihai, Ioannou, Yani A., and Taylor, Graham W. Transactions on Machine Learning Research (TMLR) 2023 arXiv Bib @article{galloway2023bounding, author = {Galloway, Angus and Golubeva, Anna and Salem, Mahmoud and Nica, Mihai and Ioannou, Yani A. and Taylor, Graham W.}, title = {Bounding generalization error with input compression: An empirical study with infinite-width networks}, journal = {Transactions on Machine Learning Research (TMLR)}, volume = {2023}, year = {2023}, url = {https://openreview.net/forum?id=jbZEUtULft}, arxivid = {2207.09408}, eprint = {2207.09408}, eprinttype = {arXiv}, abbr = {TMLR}, bibtex_show = {true} } Meta-GCN: A Dynamically Weighted Loss Minimization Method for Dealing with the Data Imbalance in Graph Neural Networks Mohammadizadeh, Mahdi, Mozhdehi, Arash, Ioannou, Yani, and Wang, Xin In 36th Canadian Conference on Artificial Intelligence, Canadian AI, CANAI 2023, Montreal, Canada, June 5-9, 2023, Proceedings, 2023 DOI Bib @inproceedings{DBLP:conf/ai/MohammadizadehM23, author = {Mohammadizadeh, Mahdi and Mozhdehi, Arash and Ioannou, Yani and Wang, Xin}, editor = {Soares, Am{\'{\i}}lcar and Zulkernine, Farhana H. and Dividino, Renata and Rabbany, Reihaneh and Ye, Qiang and Beach, David and Ali, Karim}, title = {Meta-GCN: {A} Dynamically Weighted Loss Minimization Method for Dealing with the Data Imbalance in Graph Neural Networks}, booktitle = {36th Canadian Conference on Artificial Intelligence, Canadian AI, {CANAI} 2023, Montreal, Canada, June 5-9, 2023, Proceedings}, publisher = {Canadian Artificial Intelligence Association}, year = {2023}, url = {https://doi.org/10.21428/594757db.0041f830}, doi = {10.21428/594757DB.0041F830}, bibtex_show = {true} } 2022 AAAI Gradient Flow in Sparse Neural Networks and How Lottery Tickets Win Evci, Utku, Ioannou, Yani A., Keskin, Cem, and Dauphin, Yann In Proceedings of the 36th AAAI Conference on Artificial Intelligence, Vancouver, BC, Canada Feb 2022 arXiv Bib Poster @inproceedings{evci2022gradientflowsparse, author = {{Evci}, Utku and {Ioannou}, Yani A. and {Keskin}, Cem and {Dauphin}, Yann}, title = {{Gradient Flow in Sparse Neural Networks and How Lottery Tickets Win}}, year = {2022}, booktitle = {Proceedings of the 36th AAAI Conference on Artificial Intelligence}, abbr = {AAAI}, venue = {Vancouver, BC, Canada}, eventdate = {2022-02-22/2022-03-1}, month = feb, arxivid = {2010.03533}, eprint = {2010.03533}, eprinttype = {arXiv}, poster = {https://doi.org/10.5281/zenodo.6047581}, bibtex_show = {true}, selected = {true} } SNN Condensing Sparse Layers Golubeva, Anna, Lasby, Mike, Ioannou, Yani, and Nica, Mihai In 2nd Workshop on Sparsity in Neural Networks, Virtual Jul 2022 Bib Poster @inproceedings{golubeva2022condensed, author = {Golubeva, Anna and Lasby, Mike and Ioannou, Yani and Nica, Mihai}, title = {Condensing Sparse Layers}, year = {2022}, booktitle = {2nd Workshop on Sparsity in Neural Networks}, month = jul, day = {13}, eventdate = {2022-07-13}, venue = {Virtual}, bibtex_show = {true}, abbr = {SNN}, poster = {https://drive.google.com/open?id=1QQRPaFDhyEZEVKEZ73AJdP2ISDgYdu5F} } ICML Monitoring Shortcut Learning using Mutual Information Adnan, Mohammed, Ioannou, Yani, Tsai, Chuan-Yung, Galloway, Angus, Tizhoosh, H. R., and Taylor, Graham W. In ICML 2022 Workshop on Spurious Correlations, Invariance, and Stability, Baltimore, MD, USA Jul 2022 arXiv Bib @inproceedings{mohammed2022monitoringshortcuts, author = {Adnan, Mohammed and Ioannou, Yani and Tsai, Chuan-Yung and Galloway, Angus and Tizhoosh, H. R. and Taylor, Graham W.}, title = {Monitoring Shortcut Learning using Mutual Information}, year = {2022}, booktitle = {ICML 2022 Workshop on Spurious Correlations, Invariance, and Stability}, month = jul, day = {22}, abbr = {ICML}, eventdate = {2022-07-22}, venue = {Baltimore, MD, USA}, bibtex_show = {true}, arxivid = {2206.13034}, eprint = {2206.13034}, eprinttype = {arXiv} } 2021 NeurIPS Domain-Agnostic Clustering with Self-Distillation Adnan, Mohammed, Ioannou, Yani, Tsai, Chuan-Yung, and Taylor, Graham In 2nd NeurIPS Workshop on Self-Supervised Learning: Theory and Practice, Virtual Conference Nov 2021 arXiv Bib @inproceedings{mohammed2021clusteringselfdistillation, author = {Adnan, Mohammed and Ioannou, Yani and Tsai, Chuan-Yung and Taylor, Graham}, title = {Domain-Agnostic Clustering with Self-Distillation}, year = {2021}, booktitle = {2nd NeurIPS Workshop on Self-Supervised Learning: Theory and Practice}, abbr = {NeurIPS}, month = nov, day = {23}, eventdate = {2021-11-23}, venue = {Virtual Conference}, arxivid = {2111.12170}, eprint = {2111.12170}, eprinttype = {arXiv}, bibtex_show = {true} }