mihaimasala commited on
Commit
971fb7d
1 Parent(s): 9f51c9a

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +12 -8
README.md CHANGED
@@ -473,6 +473,8 @@ OpenLLM represents the first open-source effort to build a LLM specialized for R
473
  - **License:** Llama2 Community License Agreement
474
  - **Continual pretrained from model:** [Llama-2-7b](https://huggingface.co/meta-llama/Llama-2-7b-hf)
475
  - **Trained using:** [CulturaX](https://huggingface.co/datasets/uonlp/CulturaX)
 
 
476
  ### Model Sources
477
 
478
  <!-- Provide the basic links for the model. -->
@@ -526,10 +528,10 @@ print(tokenizer.decode(outputs[0]))
526
  <td><strong><center>TruthfulQA</center></strong></td>
527
  </tr>
528
  <tr>
529
- <td>Llama-2-7b</td><td><center>37.04</center></td><td><center>36.05</center></td><td><center><strong>33.66</strong></center></td><td><center>57.56</center></td><td><center>48.01</center></td><td><center><strong>4.75</strong></center></td><td><center>42.22</center></td>
530
  </tr>
531
  <tr>
532
- <td><em>RoLlama2-7b-Base</em></td><td><center><em><strong>38.03</strong></em></center></td><td><center><em><strong>37.95</strong></em></center></td><td><center><em>27.22</em></center></td><td><center><em><strong>59.29</strong></em></center></td><td><center><em><strong>57.22</strong></em></center></td><td><center><em>2.53</em></center></td><td><center><em><strong>44.00</strong></em></center></td>
533
  </tr>
534
  </tbody>
535
  </table>
@@ -566,11 +568,12 @@ print(tokenizer.decode(outputs[0]))
566
  <td>Llama-2-7b</td><td><center><strong>93.19</strong></center></td><td><center>54.11</center></td><td><center>98.43</center></td><td><center>87.22</center></td><td><center><strong>14.90</strong></center></td><td><center><strong>26.61</strong></center></td><td><center>24.95</center></td><td><center>39.09</center></td>
567
  </tr>
568
  <tr>
569
- <td><em>RoLlama2-7b-Base</em></td><td><center><em>83.25</em></center></td><td><center><em><strong>61.04</strong></em></center></td><td><center><em><strong>98.97</strong></em></center></td><td><center><em><strong>87.72</strong></em></center></td><td><center><em>10.01</em></center></td><td><center><em>13.03</em></center></td><td><center><em><strong>27.85</strong></em></center></td><td><center><em><strong>39.30</strong></em></center></td>
570
  </tr>
571
  </tbody>
572
  </table>
573
 
 
574
  <table>
575
  <tbody>
576
  <tr>
@@ -597,10 +600,10 @@ print(tokenizer.decode(outputs[0]))
597
  <td><center><strong>(Pearson)</strong></center></td>
598
  </tr>
599
  <tr>
600
- <td>Llama-2-7b</td><td><center><strong>38.91</strong></center></td><td><center><strong>56.82</strong></center></td><td><center>65.46</center></td><td><center>79.43</center></td><td><center><strong>9.08</strong></center></td><td><center><strong>9.07</strong></center></td><td><center><strong>79.93</strong></center></td><td><center><strong>81.08</strong></center></td>
601
  </tr>
602
  <tr>
603
- <td><em>RoLlama2-7b-Base</em></td><td><center><em>30.15</em></center></td><td><center><em>47.03</em></center></td><td><center><em><strong>67.06</strong></em></center></td><td><center><em><strong>79.96</strong></em></center></td><td><center><em>7.89</em></center></td><td><center><em>7.98</em></center></td><td><center><em>71.75</em></center></td><td><center><em>71.99</em></center></td>
604
  </tr>
605
  </tbody>
606
  </table>
@@ -610,9 +613,10 @@ print(tokenizer.decode(outputs[0]))
610
 
611
  | Model | Link |
612
  |--------------------|:--------:|
613
- |*RoLlama2-7b-Base* | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base) |
614
- |RoLlama2-7b-Instruct| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct) |
615
-
 
616
 
617
  ## Citation
618
 
 
473
  - **License:** Llama2 Community License Agreement
474
  - **Continual pretrained from model:** [Llama-2-7b](https://huggingface.co/meta-llama/Llama-2-7b-hf)
475
  - **Trained using:** [CulturaX](https://huggingface.co/datasets/uonlp/CulturaX)
476
+
477
+
478
  ### Model Sources
479
 
480
  <!-- Provide the basic links for the model. -->
 
528
  <td><strong><center>TruthfulQA</center></strong></td>
529
  </tr>
530
  <tr>
531
+ <td>Llama-2-7b</td><td><center>37.04</center></td><td><center>36.05</center></td><td><center><strong>33.66</strong></center></td><td><center>57.56</center></td><td><center>48.00</center></td><td><center><strong>4.75</strong></center></td><td><center>42.22</center></td>
532
  </tr>
533
  <tr>
534
+ <td><em>RoLlama2-7b-Base-2024-05-14</em></td><td><center><em><strong>38.03</strong></em></center></td><td><center><em><strong>37.95</strong></em></center></td><td><center><em>27.22</em></center></td><td><center><em><strong>59.29</strong></em></center></td><td><center><em><strong>57.22</strong></em></center></td><td><center><em>2.53</em></center></td><td><center><em><strong>44.00</strong></em></center></td>
535
  </tr>
536
  </tbody>
537
  </table>
 
568
  <td>Llama-2-7b</td><td><center><strong>93.19</strong></center></td><td><center>54.11</center></td><td><center>98.43</center></td><td><center>87.22</center></td><td><center><strong>14.90</strong></center></td><td><center><strong>26.61</strong></center></td><td><center>24.95</center></td><td><center>39.09</center></td>
569
  </tr>
570
  <tr>
571
+ <td><em>RoLlama2-7b-Base-2024-05-14</em></td><td><center><em>83.25</em></center></td><td><center><em><strong>61.04</strong></em></center></td><td><center><em><strong>98.97</strong></em></center></td><td><center><em><strong>87.72</strong></em></center></td><td><center><em>10.01</em></center></td><td><center><em>13.03</em></center></td><td><center><em><strong>27.85</strong></em></center></td><td><center><em><strong>39.30</strong></em></center></td>
572
  </tr>
573
  </tbody>
574
  </table>
575
 
576
+
577
  <table>
578
  <tbody>
579
  <tr>
 
600
  <td><center><strong>(Pearson)</strong></center></td>
601
  </tr>
602
  <tr>
603
+ <td>Llama-2-7b</td><td><center><strong>38.91</strong></center></td><td><center><strong>56.82</strong></center></td><td><center>65.46</center></td><td><center>79.42</center></td><td><center><strong>9.08</strong></center></td><td><center><strong>9.07</strong></center></td><td><center><strong>79.93</strong></center></td><td><center><strong>81.08</strong></center></td>
604
  </tr>
605
  <tr>
606
+ <td><em>RoLlama2-7b-Base-2024-05-14</em></td><td><center><em>30.15</em></center></td><td><center><em>47.03</em></center></td><td><center><em><strong>67.06</strong></em></center></td><td><center><em><strong>79.96</strong></em></center></td><td><center><em>7.89</em></center></td><td><center><em>7.98</em></center></td><td><center><em>71.75</em></center></td><td><center><em>71.99</em></center></td>
607
  </tr>
608
  </tbody>
609
  </table>
 
613
 
614
  | Model | Link |
615
  |--------------------|:--------:|
616
+ |RoLlama2-7b-Base-2024-05-14 | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base-2024-05-14) |
617
+ |RoLlama2-7b-Instruct-2024-05-14 | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-2024-05-14) |
618
+ |*RoLlama2-7b-Instruct-2024-10-09*| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-2024-10-09) |
619
+ |RoLlama2-7b-Instruct-DPO-2024-10-09| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-DPO-2024-10-09) |
620
 
621
  ## Citation
622