unclemusclez commited on
Commit
a189388
·
verified ·
1 Parent(s): 62f8c49

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -9
app.py CHANGED
@@ -67,14 +67,12 @@ def regenerate_pubkey(pubkey, oauth_token: gr.OAuthToken | None):
67
  raise Exception(f"Error generating Ollama Host File {result.stderr}")
68
  print("Ollama Host File Created!")
69
 
70
-
71
  except Exception as e:
72
  return (f"Error: {e}", "error.png")
73
  finally:
74
  # shutil.rmtree(model_name, ignore_errors=True)
75
  print("Ollama Pubkey Generated! Copy to your user profile in the Ollama Library.")
76
 
77
-
78
  def ollamafy_model(login, model_id, ollama_library_username , ollama_q_method, latest, download_gguf_link, maintainer, oauth_token: gr.OAuthToken | None, ollama_model_name):
79
  ollama_library_username: library_username | None
80
 
@@ -178,7 +176,6 @@ def ollamafy_model(login, model_id, ollama_library_username , ollama_q_method, l
178
  raise Exception(f"Error removing to Ollama: {ollama_rm_result.stderr}")
179
  else:
180
  print("Model pushed to Ollama library successfully!")
181
-
182
 
183
  if latest:
184
  ollama_copy = f"ollama cp {library_username}/{model_id.lower()}:{q_method.lower()} {library_username}/{model_id.lower()}:latest"
@@ -204,12 +201,11 @@ def ollamafy_model(login, model_id, ollama_library_username , ollama_q_method, l
204
 
205
  ollama_rm_latest_result = subprocess.run(ollama_rm_latest, shell=True, capture_output=True)
206
  print(ollama_rm_latest_result)
207
- if ollama_rm_latest_result.returncode != 0:
208
- raise Exception(f"Error pushing to Ollama: {ollama_rm_latest.stderr}")
209
- else:
210
- print("Model pushed to Ollama library successfully!")
211
 
212
-
213
  except Exception as e:
214
  return (f"Error: {e}", "error.png")
215
  finally:
@@ -227,7 +223,6 @@ def ollamafy_model(login, model_id, ollama_library_username , ollama_q_method, l
227
  if result.returncode != 0:
228
  raise Exception(f"Error removing Ollama HOME folder {result.stderr}")
229
  print("Ollama HOME fodler removed successfully!")
230
-
231
 
232
  css="""/* Custom CSS to allow scrolling */
233
  .gradio-container {overflow-y: auto;}
 
67
  raise Exception(f"Error generating Ollama Host File {result.stderr}")
68
  print("Ollama Host File Created!")
69
 
 
70
  except Exception as e:
71
  return (f"Error: {e}", "error.png")
72
  finally:
73
  # shutil.rmtree(model_name, ignore_errors=True)
74
  print("Ollama Pubkey Generated! Copy to your user profile in the Ollama Library.")
75
 
 
76
  def ollamafy_model(login, model_id, ollama_library_username , ollama_q_method, latest, download_gguf_link, maintainer, oauth_token: gr.OAuthToken | None, ollama_model_name):
77
  ollama_library_username: library_username | None
78
 
 
176
  raise Exception(f"Error removing to Ollama: {ollama_rm_result.stderr}")
177
  else:
178
  print("Model pushed to Ollama library successfully!")
 
179
 
180
  if latest:
181
  ollama_copy = f"ollama cp {library_username}/{model_id.lower()}:{q_method.lower()} {library_username}/{model_id.lower()}:latest"
 
201
 
202
  ollama_rm_latest_result = subprocess.run(ollama_rm_latest, shell=True, capture_output=True)
203
  print(ollama_rm_latest_result)
204
+ if ollama_rm_latest_result.returncode != 0:
205
+ raise Exception(f"Error pushing to Ollama: {ollama_rm_latest.stderr}")
206
+ else:
207
+ print("Model pushed to Ollama library successfully!")
208
 
 
209
  except Exception as e:
210
  return (f"Error: {e}", "error.png")
211
  finally:
 
223
  if result.returncode != 0:
224
  raise Exception(f"Error removing Ollama HOME folder {result.stderr}")
225
  print("Ollama HOME fodler removed successfully!")
 
226
 
227
  css="""/* Custom CSS to allow scrolling */
228
  .gradio-container {overflow-y: auto;}