Spaces:
Sleeping
Sleeping
Translate llama
Browse files
app.py
CHANGED
@@ -15,6 +15,11 @@ LIBRARY = None
|
|
15 |
USER_TOKEN = None
|
16 |
TOKEN = os.environ.get("HUGGINGFACE_API_LOGIN", None)
|
17 |
|
|
|
|
|
|
|
|
|
|
|
18 |
def check_for_discussion(model_name:str):
|
19 |
"Checks if an automated discussion has been opened on the model by `model-sizer-bot`"
|
20 |
global TOKEN
|
@@ -57,6 +62,8 @@ def convert_url_to_name(url:str):
|
|
57 |
|
58 |
def calculate_memory(model_name:str, library:str, options:list, access_token:str, raw=False):
|
59 |
"Calculates the memory usage for a model"
|
|
|
|
|
60 |
if library == "auto":
|
61 |
library = None
|
62 |
if "http" in model_name and "//" in model_name:
|
|
|
15 |
USER_TOKEN = None
|
16 |
TOKEN = os.environ.get("HUGGINGFACE_API_LOGIN", None)
|
17 |
|
18 |
+
def translate_llama2(text):
|
19 |
+
"Translates llama-2 to its hf counterpart"
|
20 |
+
if not text.endswith("-hf"):
|
21 |
+
return text + "-hf"
|
22 |
+
|
23 |
def check_for_discussion(model_name:str):
|
24 |
"Checks if an automated discussion has been opened on the model by `model-sizer-bot`"
|
25 |
global TOKEN
|
|
|
62 |
|
63 |
def calculate_memory(model_name:str, library:str, options:list, access_token:str, raw=False):
|
64 |
"Calculates the memory usage for a model"
|
65 |
+
if model_name.startswith("meta-llama"):
|
66 |
+
model_name = translate_llama2(model_name)
|
67 |
if library == "auto":
|
68 |
library = None
|
69 |
if "http" in model_name and "//" in model_name:
|