crimeacs commited on
Commit
2598b7d
·
1 Parent(s): 476b80e

Fixed dumb input

Browse files
Files changed (2) hide show
  1. Gradio_app.ipynb +5 -6
  2. app.py +1 -2
Gradio_app.ipynb CHANGED
@@ -2,14 +2,14 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [
8
  {
9
  "name": "stdout",
10
  "output_type": "stream",
11
  "text": [
12
- "Running on local URL: http://127.0.0.1:7860\n",
13
  "\n",
14
  "To create a public link, set `share=True` in `launch()`.\n"
15
  ]
@@ -17,7 +17,7 @@
17
  {
18
  "data": {
19
  "text/html": [
20
- "<div><iframe src=\"http://127.0.0.1:7860/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
21
  ],
22
  "text/plain": [
23
  "<IPython.core.display.HTML object>"
@@ -30,7 +30,7 @@
30
  "data": {
31
  "text/plain": []
32
  },
33
- "execution_count": 1,
34
  "metadata": {},
35
  "output_type": "execute_result"
36
  }
@@ -574,7 +574,7 @@
574
  " return image\n",
575
  "\n",
576
  "# model = torch.jit.load(\"model.pt\")\n",
577
- "model = torch.jit.load(\"quantized_model.pt\")\n",
578
  "\n",
579
  "model.eval()\n",
580
  "\n",
@@ -769,7 +769,6 @@
769
  " with gr.Row():\n",
770
  " with gr.Column(scale=2):\n",
771
  " inputs_vel_model = [\n",
772
- " ## FIX FILE NAME ISSUE\n",
773
  " gr.Slider(minimum=-180, maximum=180, value=0, step=5, label=\"Azimuth\", interactive=True),\n",
774
  " gr.Slider(minimum=-90, maximum=90, value=30, step=5, label=\"Elevation\", interactive=True)\n",
775
  " ]\n",
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [
8
  {
9
  "name": "stdout",
10
  "output_type": "stream",
11
  "text": [
12
+ "Running on local URL: http://127.0.0.1:7861\n",
13
  "\n",
14
  "To create a public link, set `share=True` in `launch()`.\n"
15
  ]
 
17
  {
18
  "data": {
19
  "text/html": [
20
+ "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
21
  ],
22
  "text/plain": [
23
  "<IPython.core.display.HTML object>"
 
30
  "data": {
31
  "text/plain": []
32
  },
33
+ "execution_count": 2,
34
  "metadata": {},
35
  "output_type": "execute_result"
36
  }
 
574
  " return image\n",
575
  "\n",
576
  "# model = torch.jit.load(\"model.pt\")\n",
577
+ "model = torch.jit.load(\"model.pt\")\n",
578
  "\n",
579
  "model.eval()\n",
580
  "\n",
 
769
  " with gr.Row():\n",
770
  " with gr.Column(scale=2):\n",
771
  " inputs_vel_model = [\n",
 
772
  " gr.Slider(minimum=-180, maximum=180, value=0, step=5, label=\"Azimuth\", interactive=True),\n",
773
  " gr.Slider(minimum=-90, maximum=90, value=30, step=5, label=\"Elevation\", interactive=True)\n",
774
  " ]\n",
app.py CHANGED
@@ -536,7 +536,7 @@ def compute_velocity_model(azimuth, elevation):
536
  return image
537
 
538
  # model = torch.jit.load("model.pt")
539
- model = torch.jit.load("quantized_model.pt")
540
 
541
  model.eval()
542
 
@@ -731,7 +731,6 @@ with gr.Blocks() as demo:
731
  with gr.Row():
732
  with gr.Column(scale=2):
733
  inputs_vel_model = [
734
- ## FIX FILE NAME ISSUE
735
  gr.Slider(minimum=-180, maximum=180, value=0, step=5, label="Azimuth", interactive=True),
736
  gr.Slider(minimum=-90, maximum=90, value=30, step=5, label="Elevation", interactive=True)
737
  ]
 
536
  return image
537
 
538
  # model = torch.jit.load("model.pt")
539
+ model = torch.jit.load("model.pt")
540
 
541
  model.eval()
542
 
 
731
  with gr.Row():
732
  with gr.Column(scale=2):
733
  inputs_vel_model = [
 
734
  gr.Slider(minimum=-180, maximum=180, value=0, step=5, label="Azimuth", interactive=True),
735
  gr.Slider(minimum=-90, maximum=90, value=30, step=5, label="Elevation", interactive=True)
736
  ]