Bo1015 commited on
Commit
51ded4e
·
verified ·
1 Parent(s): d227e14

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -7
README.md CHANGED
@@ -64,19 +64,20 @@ The code in this repository is open source under the [Apache-2.0 license](./LICE
64
 
65
  If you find our work useful, please consider citing the following paper:
66
  ```
67
- @article{chen2024xtrimopglm,
68
  title={xTrimoPGLM: unified 100B-scale pre-trained transformer for deciphering the language of protein},
69
  author={Chen, Bo and Cheng, Xingyi and Li, Pan and Geng, Yangli-ao and Gong, Jing and Li, Shen and Bei, Zhilei and Tan, Xu and Wang, Boyan and Zeng, Xin and others},
70
- journal={arXiv preprint arXiv:2401.06199},
71
- year={2024}
 
 
 
72
  }
73
 
74
- @article{cheng2024training,
75
  title={Training Compute-Optimal Protein Language Models},
76
  author={Cheng, Xingyi and Chen, Bo and Li, Pan and Gong, Jing and Tang, Jie and Song, Le},
77
- journal={bioRxiv},
78
- pages={2024--06},
79
  year={2024},
80
- publisher={Cold Spring Harbor Laboratory}
81
  }
82
  ```
 
64
 
65
  If you find our work useful, please consider citing the following paper:
66
  ```
67
+ @misc{chen2024xtrimopglm,
68
  title={xTrimoPGLM: unified 100B-scale pre-trained transformer for deciphering the language of protein},
69
  author={Chen, Bo and Cheng, Xingyi and Li, Pan and Geng, Yangli-ao and Gong, Jing and Li, Shen and Bei, Zhilei and Tan, Xu and Wang, Boyan and Zeng, Xin and others},
70
+ year={2024},
71
+ eprint={2401.06199},
72
+ archivePrefix={arXiv},
73
+ primaryClass={cs.CL},
74
+ note={arXiv preprint arXiv:2401.06199}
75
  }
76
 
77
+ @misc{cheng2024training,
78
  title={Training Compute-Optimal Protein Language Models},
79
  author={Cheng, Xingyi and Chen, Bo and Li, Pan and Gong, Jing and Tang, Jie and Song, Le},
 
 
80
  year={2024},
81
+ note={bioRxiv, Cold Spring Harbor Laboratory, pages 2024--06}
82
  }
83
  ```