Spaces:
Running
Running
update
Browse files- app.py +2 -3
- examples.py +5 -2
app.py
CHANGED
@@ -38,7 +38,7 @@ table
|
|
38 |
import gradio as gr
|
39 |
from vocab import all_tokenizers
|
40 |
from util import *
|
41 |
-
from examples import example_fn
|
42 |
|
43 |
get_window_url_params = """
|
44 |
function(url_params) {
|
@@ -58,8 +58,7 @@ with gr.Blocks(css="css/style.css", title="Tokenizer Arena") as demo:
|
|
58 |
with gr.Row():
|
59 |
gr.Markdown("## Input Text")
|
60 |
dropdown_examples = gr.Dropdown(
|
61 |
-
|
62 |
-
["space", "punctuation", "symbol", "number"],
|
63 |
value="Examples",
|
64 |
type="index",
|
65 |
show_label=False,
|
|
|
38 |
import gradio as gr
|
39 |
from vocab import all_tokenizers
|
40 |
from util import *
|
41 |
+
from examples import example_fn, example_types
|
42 |
|
43 |
get_window_url_params = """
|
44 |
function(url_params) {
|
|
|
58 |
with gr.Row():
|
59 |
gr.Markdown("## Input Text")
|
60 |
dropdown_examples = gr.Dropdown(
|
61 |
+
example_types,
|
|
|
62 |
value="Examples",
|
63 |
type="index",
|
64 |
show_label=False,
|
examples.py
CHANGED
@@ -56,12 +56,15 @@ more_examples = [
|
|
56 |
# openai系列 (tiktoken)
|
57 |
("qwen", "gpt_35_turbo", ""),
|
58 |
|
59 |
-
|
60 |
]
|
61 |
|
|
|
|
|
|
|
|
|
62 |
|
63 |
def example_fn(example_idx):
|
64 |
-
return examples[
|
65 |
|
66 |
|
67 |
def get_more_example():
|
|
|
56 |
# openai系列 (tiktoken)
|
57 |
("qwen", "gpt_35_turbo", ""),
|
58 |
|
|
|
59 |
]
|
60 |
|
61 |
+
lang = "en"
|
62 |
+
|
63 |
+
example_types = [t[0].split(":")[0] for t in examples[lang]]
|
64 |
+
|
65 |
|
66 |
def example_fn(example_idx):
|
67 |
+
return examples[lang][example_idx]
|
68 |
|
69 |
|
70 |
def get_more_example():
|