Spaces:
Starting
on
A10G
Starting
on
A10G
tricktreat
commited on
Commit
•
56994ad
1
Parent(s):
c080dc3
update
Browse files- config.gradio.yaml +2 -2
- get_token_ids.py +2 -0
- requirements.txt +2 -1
config.gradio.yaml
CHANGED
@@ -5,8 +5,8 @@ huggingface:
|
|
5 |
dev: false
|
6 |
debug: true
|
7 |
log_file: logs/debug_TIMESTAMP.log
|
8 |
-
model:
|
9 |
-
use_completion:
|
10 |
inference_mode: hybrid # local, huggingface or hybrid
|
11 |
local_deployment: standard # minimal, standard or full
|
12 |
num_candidate_models: 5
|
|
|
5 |
dev: false
|
6 |
debug: true
|
7 |
log_file: logs/debug_TIMESTAMP.log
|
8 |
+
model: gpt-4 # text-davinci-003
|
9 |
+
use_completion: false
|
10 |
inference_mode: hybrid # local, huggingface or hybrid
|
11 |
local_deployment: standard # minimal, standard or full
|
12 |
num_candidate_models: 5
|
get_token_ids.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import tiktoken
|
2 |
|
3 |
encodings = {
|
|
|
4 |
"gpt-3.5-turbo": tiktoken.get_encoding("cl100k_base"),
|
5 |
"gpt-3.5-turbo-0301": tiktoken.get_encoding("cl100k_base"),
|
6 |
"text-davinci-003": tiktoken.get_encoding("p50k_base"),
|
@@ -16,6 +17,7 @@ encodings = {
|
|
16 |
}
|
17 |
|
18 |
max_length = {
|
|
|
19 |
"gpt-3.5-turbo": 4096,
|
20 |
"gpt-3.5-turbo-0301": 4096,
|
21 |
"text-davinci-003": 4096,
|
|
|
1 |
import tiktoken
|
2 |
|
3 |
encodings = {
|
4 |
+
"gpt-4": tiktoken.get_encoding("cl100k_base"),
|
5 |
"gpt-3.5-turbo": tiktoken.get_encoding("cl100k_base"),
|
6 |
"gpt-3.5-turbo-0301": tiktoken.get_encoding("cl100k_base"),
|
7 |
"text-davinci-003": tiktoken.get_encoding("p50k_base"),
|
|
|
17 |
}
|
18 |
|
19 |
max_length = {
|
20 |
+
"gpt-4": 4096,
|
21 |
"gpt-3.5-turbo": 4096,
|
22 |
"gpt-3.5-turbo-0301": 4096,
|
23 |
"text-davinci-003": 4096,
|
requirements.txt
CHANGED
@@ -15,4 +15,5 @@ timm==0.6.13
|
|
15 |
typeguard==2.13.3
|
16 |
accelerate==0.18.0
|
17 |
pytesseract==0.3.10
|
18 |
-
basicsr==1.4.2
|
|
|
|
15 |
typeguard==2.13.3
|
16 |
accelerate==0.18.0
|
17 |
pytesseract==0.3.10
|
18 |
+
basicsr==1.4.2
|
19 |
+
torch==2.0.0
|