Update README.md
Browse files
README.md
CHANGED
@@ -175,6 +175,6 @@ Our paper, detailing the efficient training methods for MoE models using Scale-U
|
|
175 |
@article{AquilaMoE2024,
|
176 |
title={AquilaMoE: Efficient Training for MoE Models with Scale-Up and Scale-Out Strategies},
|
177 |
author={{Language Foundation Model \& Software Team, Beijing Academy of Artificial Intelligence (BAAI)}},
|
178 |
-
journal={arXiv preprint arXiv:
|
179 |
year={2024}
|
180 |
}
|
|
|
175 |
@article{AquilaMoE2024,
|
176 |
title={AquilaMoE: Efficient Training for MoE Models with Scale-Up and Scale-Out Strategies},
|
177 |
author={{Language Foundation Model \& Software Team, Beijing Academy of Artificial Intelligence (BAAI)}},
|
178 |
+
journal={arXiv preprint arXiv:arxiv.org/abs/2408.06567},
|
179 |
year={2024}
|
180 |
}
|