Add library name tag

#1
by nielsr HF staff - opened
Files changed (1) hide show
  1. README.md +9 -5
README.md CHANGED
@@ -1,13 +1,14 @@
1
  ---
2
- license: apache-2.0
3
- language:
4
- - en
5
  base_model:
6
  - meta-llama/Llama-3.2-3B-Instruct
 
 
 
7
  pipeline_tag: text-generation
 
8
  ---
9
 
10
-
11
  <div align="center">
12
  <b style="font-size: 40px;">Gen-8B-R2</b>
13
  </div>
@@ -41,7 +42,9 @@ def prepare_sample_text(prompt):
41
  def format_prompt_template(query, chunk_list):
42
 
43
  chunk_list = ['[Chunk ID: '+ str(idx+1) + '] ' + chunk_text for idx, chunk_text in enumerate(chunk_list)]
44
- chunk_list = '\n\n'.join(chunk_list)
 
 
45
 
46
  prompt = '''
47
  You are an expert assistant trained to generate answers based on document chunks.
@@ -86,3 +89,4 @@ do_sample=True,
86
  temperature=0.8,
87
  top_p=0.9,
88
  ```
 
 
1
  ---
 
 
 
2
  base_model:
3
  - meta-llama/Llama-3.2-3B-Instruct
4
+ language:
5
+ - en
6
+ license: apache-2.0
7
  pipeline_tag: text-generation
8
+ library_name: transformers
9
  ---
10
 
11
+ ```markdown
12
  <div align="center">
13
  <b style="font-size: 40px;">Gen-8B-R2</b>
14
  </div>
 
42
  def format_prompt_template(query, chunk_list):
43
 
44
  chunk_list = ['[Chunk ID: '+ str(idx+1) + '] ' + chunk_text for idx, chunk_text in enumerate(chunk_list)]
45
+ chunk_list = '
46
+
47
+ '.join(chunk_list)
48
 
49
  prompt = '''
50
  You are an expert assistant trained to generate answers based on document chunks.
 
89
  temperature=0.8,
90
  top_p=0.9,
91
  ```
92
+ ```