I'm eager to build sustainable learning algorithms for real-world problems in various research fields,
such as continual learning, online learning, federated learning, self-supervised learning, video understanding,
multimodal learning, large language models, open-world problems, reinforcement learning, etc.
Please feel free to reach out.
Efficient Deep Learning:Continual Learning, Federated Learning, and Neural Network Compression
Egocentric Vision:Video Understanding, and Multimodal Learning with video, audio, and language information
Learning with Real-world Data:Un-(Self-)/Semi-supervised Learning and Input Selective Training
My research interest mainly focuses on developing lifelong-evolving and efficient deep learning alrogithms
for deploying sustainable on-device artificial general intelligence systems.
In particular, I've been focusing on tackling practical and real-world challenges
in application domains, such as online/streaming learning, egocentric videos, and audio-video-text multimodal problems.
New Preprints
[P6] Text-Guided Token Selection for Text-to-Image Synthesis with Token-based Diffusion Models
Jaewoong Lee*, Sangwon Jang*, Jaehyeong Jo, Jaehong Yoon, Yunji Kim, Jin-Hwa Kim, Jung-Woo Ha, Sung Ju Hwang
Preprint, 2023
PaperBibTeX
[P5] Continual Learners are Incremental Model Generalizers
Jaehong Yoon, Sung Ju Hwang, and Yue Cao
Preprint, 2023
PaperBibTeX
[P4] Efficient Video Representation Learning via Masked Video Modeling with Motion-centric Token Selection
Sunil Hwang*, Jaehong Yoon*, Youngwan Lee, and Sung Ju Hwang
@article{hwang2022efficient,
title={Efficient Video Representation Learning via Masked Video Modeling with Motion-centric Token Selection},
author={Hwang, Sunil and Yoon, Jaehong and Lee, Youngwan and Hwang, Sung Ju},
journal={arXiv preprint arXiv:2211.10636},
year={2022},
}
[P3] Personalized Subgraph Federated Learning
Jinheon Baek*, Wonyong Jeong*, Jiongdao Jin, Jaehong Yoon, and Sung Ju Hwang
@inproceedings{kang2023on,
title={On the Soft-Subnetwork for Few-shot Class Incremental Learning},
author={Kang, Haeyong and Yoon, Jaehong and Madjid, Sultan Rizky Hikmawan and Hwang, Sung Ju and Yoo, Chang D},
booktitle={International Conference on Learning Representation},
year={2023},
url={https://openreview.net/forum?id=z57WK5lGeHd}
}
[W1] BiTAT: Neural Network Binarization with Task-dependent Aggregated Transformation
Geon Park*, Jaehong Yoon*, Haiyang Zhang, Xing Zhang, Sung Ju Hwang, and Yonina Eldar
ECCV 2022 Workshop on Computational Aspects of Deep Learning (CADL)
@article{park2022bitat,
title={BiTAT: Neural Network Binarization with Task-dependent Aggregated Transformation},
author={Park, Geon and Yoon, Jaehong and Zhang, Haiyang and Zhang, Xing and Hwang, Sung Ju and Eldar, Yonina C},
journal={arXiv preprint arXiv:2207.01394},
year={2022},
}
[C9] Bitwidth Heterogeneous Federated Learning with Progressive Weight Dequantization
Jaehong Yoon*, Geon Park*, Wonyong Jeong, and Sung Ju Hwang
@inproceedings{yoon2022bitwidth,
title={Bitwidth Heterogeneous Federated Learning with Progressive Weight Dequantization},
author={Yoon, Jaehong and Park, Geon and Jeong, Wonyong and Hwang, Sung Ju},
booktitle={International Conference on Machine Learning},
pages={25552--25565},
year={2022},
organization={PMLR}
}
[C8] Forget-free Continual Learning with Winning Subnetworks
Haeyong Kang*, Rusty J. L. Mina*, Sultan R. H. Madjid, Jaehong Yoon, Mark Hasegawa-Johnson, Sung Ju Hwang, and Chang D. Yoo
@inproceedings{kang2022forget,
title={Forget-free Continual Learning with Winning Subnetworks},
author={Kang, Haeyong and Mina, Rusty John Lloyd and Madjid, Sultan Rizky Hikmawan and Yoon, Jaehong and Hasegawa-Johnson, Mark and Hwang, Sung Ju and Yoo, Chang D},
booktitle={International Conference on Machine Learning},
pages={10734--10750},
year={2022},
organization={PMLR}
}
[C7] Representational Continuity for Unsupervised Continual Learning
Divyam Madaan, Jaehong Yoon, Yuanchun Li, Yunxin Liu, and Sung Ju Hwang
@inproceedings{madaan2022rethinking,
title={Rethinking the Representational Continuity: Towards Unsupervised Continual Learning},
author={Madaan, Divyam and Yoon, Jaehong and Li, Yuanchun and Liu, Yunxin and Hwang, Sung Ju},
booktitle={International Conference on Learning Representations},
year={2022},
url={https://openreview.net/forum?id=9Hrka5PA7LW}
}
[C6] Online Coreset Selection for Rehearsal-based Continual Learning
Jaehong Yoon, Divyam Madaan, Eunho Yang, and Sung Ju Hwang
@inproceedings{Yoon2020Scalable,
title={Scalable and Order-robust Continual Learning with Additive Parameter Decomposition},
author={Yoon, Jaehong and Kim, Saehoon and Yang, Eunho and Hwang, Sung Ju},
booktitle={International Conference on Learning Representations},
year={2020},
url={https://openreview.net/forum?id=r1gdj2EKPB}
}
[C2] Lifelong Learning with Dynamically Expandable Networks
Jaehong Yoon, Eunho Yang, Jeongtae Lee, and Sung Ju Hwang
@inproceedings{yoon2017combined,
title={Combined group and exclusive sparsity for deep neural networks},
author={Yoon, Jaehong and Hwang, Sung Ju},
booktitle={International Conference on Machine Learning},
pages={3958--3966},
year={2017},
organization={PMLR}
}
[P2] Rapid Structural Pruning of Neural Networks with Set-based Task-Adaptive Meta-Pruning
Minyoung Song, Jaehong Yoon, Eunho Yang, and Sung Ju Hwang
@article{lee2019adaptive,
title={Adaptive Network Sparsification with Dependent Variational Beta-Bernoulli Dropout},
author={Lee, Juho and Kim, Saehoon and Yoon, Jaehong and Lee, Hae Beom and Yang, Eunho and Hwang, Sung Ju},
journal={CoRR},
volume={abs/1805.10896},
eprint={1805.10896},
archivePrefix={arXiv},
year={2018},
}