CreitinGameplays commited on
Commit
bc4308f
1 Parent(s): a487b7f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -12,7 +12,7 @@ model = AutoModelForCausalLM.from_pretrained(model_name)
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  model.to(device)
14
 
15
- @spaces.GPU(duration=60)
16
  def generate_text(user_prompt):
17
  """Generates text using the BLOOM model from Hugging Face Transformers and removes the user prompt."""
18
  # Construct the full prompt with system introduction, user prompt, and assistant role
@@ -24,7 +24,7 @@ def generate_text(user_prompt):
24
  # Generate text with the complete prompt and limit the maximum length to 256 tokens
25
  output = model.generate(
26
  input_ids=prompt_encoded,
27
- max_length=256,
28
  num_beams=1,
29
  num_return_sequences=1,
30
  do_sample=True,
 
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  model.to(device)
14
 
15
+ @spaces.GPU(duration=90)
16
  def generate_text(user_prompt):
17
  """Generates text using the BLOOM model from Hugging Face Transformers and removes the user prompt."""
18
  # Construct the full prompt with system introduction, user prompt, and assistant role
 
24
  # Generate text with the complete prompt and limit the maximum length to 256 tokens
25
  output = model.generate(
26
  input_ids=prompt_encoded,
27
+ max_length=1900,
28
  num_beams=1,
29
  num_return_sequences=1,
30
  do_sample=True,