mihaimasala commited on
Commit
9f51c9a
1 Parent(s): 63cc847

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -6
README.md CHANGED
@@ -526,7 +526,7 @@ print(tokenizer.decode(outputs[0]))
526
  <td><strong><center>TruthfulQA</center></strong></td>
527
  </tr>
528
  <tr>
529
- <td>Llama-2-7b</td><td><center>37.04</center></td><td><center>36.05</center></td><td><center><strong>33.66</strong></center></td><td><center>57.56</center></td><td><center>48.00</center></td><td><center><strong>4.75</strong></center></td><td><center>42.22</center></td>
530
  </tr>
531
  <tr>
532
  <td><em>RoLlama2-7b-Base</em></td><td><center><em><strong>38.03</strong></em></center></td><td><center><em><strong>37.95</strong></em></center></td><td><center><em>27.22</em></center></td><td><center><em><strong>59.29</strong></em></center></td><td><center><em><strong>57.22</strong></em></center></td><td><center><em>2.53</em></center></td><td><center><em><strong>44.00</strong></em></center></td>
@@ -534,7 +534,6 @@ print(tokenizer.decode(outputs[0]))
534
  </tbody>
535
  </table>
536
 
537
-
538
  ## Downstream Tasks
539
 
540
 
@@ -553,7 +552,7 @@ print(tokenizer.decode(outputs[0]))
553
  <td colspan="2"><center><strong>Finetuned</strong></center></td>
554
  </tr>
555
  <tr>
556
- <td></td>
557
  <td><center><strong>Binary<br>(Macro F1)</strong></center></td>
558
  <td><center><strong>Multiclass<br>(Macro F1)</strong></center></td>
559
  <td><center><strong>Binary<br>(Macro F1)</strong></center></td>
@@ -587,7 +586,7 @@ print(tokenizer.decode(outputs[0]))
587
  <td colspan="2"><center><strong>Finetuned</strong></center></td>
588
  </tr>
589
  <tr>
590
- <td></td>
591
  <td><center><strong>(EM)</strong></center></td>
592
  <td><center><strong>(F1)</strong></center></td>
593
  <td><center><strong>(EM)</strong></center></td>
@@ -598,7 +597,7 @@ print(tokenizer.decode(outputs[0]))
598
  <td><center><strong>(Pearson)</strong></center></td>
599
  </tr>
600
  <tr>
601
- <td>Llama-2-7b</td><td><center><strong>38.91</strong></center></td><td><center><strong>56.82</strong></center></td><td><center>65.46</center></td><td><center>79.42</center></td><td><center><strong>9.08</strong></center></td><td><center><strong>9.07</strong></center></td><td><center><strong>79.93</strong></center></td><td><center><strong>81.08</strong></center></td>
602
  </tr>
603
  <tr>
604
  <td><em>RoLlama2-7b-Base</em></td><td><center><em>30.15</em></center></td><td><center><em>47.03</em></center></td><td><center><em><strong>67.06</strong></em></center></td><td><center><em><strong>79.96</strong></em></center></td><td><center><em>7.89</em></center></td><td><center><em>7.98</em></center></td><td><center><em>71.75</em></center></td><td><center><em>71.99</em></center></td>
@@ -613,7 +612,6 @@ print(tokenizer.decode(outputs[0]))
613
  |--------------------|:--------:|
614
  |*RoLlama2-7b-Base* | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base) |
615
  |RoLlama2-7b-Instruct| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct) |
616
- |RoLlama2-7b-Chat | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Chat) |
617
 
618
 
619
  ## Citation
 
526
  <td><strong><center>TruthfulQA</center></strong></td>
527
  </tr>
528
  <tr>
529
+ <td>Llama-2-7b</td><td><center>37.04</center></td><td><center>36.05</center></td><td><center><strong>33.66</strong></center></td><td><center>57.56</center></td><td><center>48.01</center></td><td><center><strong>4.75</strong></center></td><td><center>42.22</center></td>
530
  </tr>
531
  <tr>
532
  <td><em>RoLlama2-7b-Base</em></td><td><center><em><strong>38.03</strong></em></center></td><td><center><em><strong>37.95</strong></em></center></td><td><center><em>27.22</em></center></td><td><center><em><strong>59.29</strong></em></center></td><td><center><em><strong>57.22</strong></em></center></td><td><center><em>2.53</em></center></td><td><center><em><strong>44.00</strong></em></center></td>
 
534
  </tbody>
535
  </table>
536
 
 
537
  ## Downstream Tasks
538
 
539
 
 
552
  <td colspan="2"><center><strong>Finetuned</strong></center></td>
553
  </tr>
554
  <tr>
555
+ <td><strong>Model</strong></td>
556
  <td><center><strong>Binary<br>(Macro F1)</strong></center></td>
557
  <td><center><strong>Multiclass<br>(Macro F1)</strong></center></td>
558
  <td><center><strong>Binary<br>(Macro F1)</strong></center></td>
 
586
  <td colspan="2"><center><strong>Finetuned</strong></center></td>
587
  </tr>
588
  <tr>
589
+ <td><strong>Model</strong></td>
590
  <td><center><strong>(EM)</strong></center></td>
591
  <td><center><strong>(F1)</strong></center></td>
592
  <td><center><strong>(EM)</strong></center></td>
 
597
  <td><center><strong>(Pearson)</strong></center></td>
598
  </tr>
599
  <tr>
600
+ <td>Llama-2-7b</td><td><center><strong>38.91</strong></center></td><td><center><strong>56.82</strong></center></td><td><center>65.46</center></td><td><center>79.43</center></td><td><center><strong>9.08</strong></center></td><td><center><strong>9.07</strong></center></td><td><center><strong>79.93</strong></center></td><td><center><strong>81.08</strong></center></td>
601
  </tr>
602
  <tr>
603
  <td><em>RoLlama2-7b-Base</em></td><td><center><em>30.15</em></center></td><td><center><em>47.03</em></center></td><td><center><em><strong>67.06</strong></em></center></td><td><center><em><strong>79.96</strong></em></center></td><td><center><em>7.89</em></center></td><td><center><em>7.98</em></center></td><td><center><em>71.75</em></center></td><td><center><em>71.99</em></center></td>
 
612
  |--------------------|:--------:|
613
  |*RoLlama2-7b-Base* | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base) |
614
  |RoLlama2-7b-Instruct| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct) |
 
615
 
616
 
617
  ## Citation