Gregor Betz commited on
Commit
09f46eb
1 Parent(s): bcfbb35

inference_server_url

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -212,17 +212,17 @@ with gr.Blocks() as demo:
212
  tos_approved = gr.State(False)
213
 
214
  # set up client and guide
215
- if not client_kwargs["url"]:
216
  gr.Error(
217
  "Please set the client model inference endpoint in the config.yaml file.",
218
  duration=-1
219
  )
220
- if not guide_kwargs["url"]:
221
  gr.Error(
222
  "Please set the expert model inference endpoint in the config.yaml file.",
223
  duration=-1
224
  )
225
- if not guide_kwargs["classifier_kwargs"]["url"]:
226
  gr.Error(
227
  "Please set the classifier model inference endpoint in the config.yaml file.",
228
  duration=-1
 
212
  tos_approved = gr.State(False)
213
 
214
  # set up client and guide
215
+ if not client_kwargs["inference_server_url"]:
216
  gr.Error(
217
  "Please set the client model inference endpoint in the config.yaml file.",
218
  duration=-1
219
  )
220
+ if not guide_kwargs["inference_server_url"]:
221
  gr.Error(
222
  "Please set the expert model inference endpoint in the config.yaml file.",
223
  duration=-1
224
  )
225
+ if not guide_kwargs["classifier_kwargs"]["inference_server_url"]:
226
  gr.Error(
227
  "Please set the classifier model inference endpoint in the config.yaml file.",
228
  duration=-1