Ali-C137 commited on
Commit
e255df2
·
1 Parent(s): 3bf44ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -26
app.py CHANGED
@@ -8,15 +8,9 @@ assert (
8
  ), "LLaMA is now in HuggingFace's main branch.\nPlease reinstall it: pip uninstall transformers && pip install git+https://github.com/huggingface/transformers.git"
9
  from transformers import LlamaTokenizer, LlamaForCausalLM, GenerationConfig
10
 
11
- '''
12
- BASE_MODEL = "decapoda-research/llama-7b-hf"
13
- LORA_WEIGHTS = "tloen/alpaca-lora-7b"
14
- '''
15
-
16
  BASE_MODEL = "decapoda-research/llama-7b-hf"
17
  LORA_WEIGHTS = "Yasbok/Alpaca_instruction_fine_tune_Arabic"
18
 
19
-
20
  tokenizer = LlamaTokenizer.from_pretrained(BASE_MODEL)
21
 
22
  if torch.cuda.is_available():
@@ -62,26 +56,6 @@ else:
62
  device_map={"": device},
63
  )
64
 
65
- '''
66
- def generate_prompt(instruction, input=None):
67
- if input:
68
- return f"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
69
-
70
- ### Instruction:
71
- {instruction}
72
-
73
- ### Input:
74
- {input}
75
-
76
- ### Response:"""
77
- else:
78
- return f"""Below is an instruction that describes a task. Write a response that appropriately completes the request.
79
-
80
- ### Instruction:
81
- {instruction}
82
-
83
- ### Response:"""
84
- '''
85
 
86
  def generate_prompt(instruction, input=None):
87
  if input:
 
8
  ), "LLaMA is now in HuggingFace's main branch.\nPlease reinstall it: pip uninstall transformers && pip install git+https://github.com/huggingface/transformers.git"
9
  from transformers import LlamaTokenizer, LlamaForCausalLM, GenerationConfig
10
 
 
 
 
 
 
11
  BASE_MODEL = "decapoda-research/llama-7b-hf"
12
  LORA_WEIGHTS = "Yasbok/Alpaca_instruction_fine_tune_Arabic"
13
 
 
14
  tokenizer = LlamaTokenizer.from_pretrained(BASE_MODEL)
15
 
16
  if torch.cuda.is_available():
 
56
  device_map={"": device},
57
  )
58
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
  def generate_prompt(instruction, input=None):
61
  if input: