From d978d35e57f3bee861849839d2fc84cb4accd080 Mon Sep 17 00:00:00 2001 From: Hwijoon Lim Date: Thu, 2 May 2024 10:50:47 +0900 Subject: [PATCH] ES-MoE Accept --- _bibliography/papers.bib | 12 +++++++++++- _news/announcement_20.md | 7 +++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 _news/announcement_20.md diff --git a/_bibliography/papers.bib b/_bibliography/papers.bib index 3aac29e0..4a7cb408 100644 --- a/_bibliography/papers.bib +++ b/_bibliography/papers.bib @@ -863,4 +863,14 @@ @article{park2024graph month={Aug}, publisher={IEEE}, abbr={ToN} -} \ No newline at end of file +} +@inproceedings{kim2024scaling, + title = {Scaling Beyond the {GPU} Memory Limit for Large {M}ixture-of-{E}xperts Model Training}, + author = {Kim, Yechan and Lim, Hwijoon and Han, Dongsu}, + booktitle = {Proceedings of the 41st International Conference on Machine Learning (To Appear)}, + year = {2024}, + volume = {162}, + series = {Proceedings of Machine Learning Research}, + month = {Jul}, + abbr={ICML}, +} diff --git a/_news/announcement_20.md b/_news/announcement_20.md new file mode 100644 index 00000000..062eb6fc --- /dev/null +++ b/_news/announcement_20.md @@ -0,0 +1,7 @@ +--- +layout: post +date: 2024-05-02 10:00:00 +inline: true +--- + +Our paper titled **"Scaling Beyond the GPU Memory Limit for Large Mixture-of-Experts Model Training"** was accepted to ICML 2024. Congratulations Yechan, Hwijoon! \ No newline at end of file