Spaces:
Building
Building
File size: 5,953 Bytes
71e3f1c 6bf4bea 71e3f1c 6bf4bea 71e3f1c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
# from dataclasses import dataclass
# These classes are for user facing column names, to avoid having to change them
# all around the code when a modif is needed
# @dataclass
# class ColumnContent:
# name: str
# type: str
# displayed_by_default: bool
# hidden: bool = False
# never_hidden: bool = False
# dummy: bool = False
# def fields(raw_class):
# return [
# v for k, v in raw_class.__dict__.items() if k[:2] != "__" and k[-2:] != "__"
# ]
# @dataclass(frozen=True)
# class AutoEvalColumn: # Auto evals column
# model_type_symbol = ColumnContent("T", "str", True)
# model = ColumnContent("Model", "markdown", True, never_hidden=True)
# average = ColumnContent("Average ⬆️", "number", True)
# arc = ColumnContent("ARC", "number", True)
# hellaswag = ColumnContent("HellaSwag", "number", True)
# mmlu = ColumnContent("MMLU", "number", True)
# truthfulqa = ColumnContent("TruthfulQA", "number", True)
# model_type = ColumnContent("Type", "str", False)
# precision = ColumnContent("Precision", "str", False, True)
# license = ColumnContent("Hub License", "str", False)
# params = ColumnContent("#Params (B)", "number", False)
# likes = ColumnContent("Hub ❤️", "number", False)
# revision = ColumnContent("Model sha", "str", False, False)
# dummy = ColumnContent(
# "model_name_for_query", "str", True
# ) # dummy col to implement search bar (hidden by custom CSS)
# @dataclass(frozen=True)
# class EloEvalColumn: # Elo evals column
# model = ColumnContent("Model", "markdown", True)
# gpt4 = ColumnContent("GPT-4 (all)", "number", True)
# human_all = ColumnContent("Human (all)", "number", True)
# human_instruct = ColumnContent("Human (instruct)", "number", True)
# human_code_instruct = ColumnContent("Human (code-instruct)", "number", True)
# @dataclass(frozen=True)
# class EvalQueueColumn: # Queue column
# model = ColumnContent("model", "markdown", True)
# revision = ColumnContent("revision", "str", True)
# private = ColumnContent("private", "bool", True)
# precision = ColumnContent("precision", "bool", True)
# weight_type = ColumnContent("weight_type", "str", "Original")
# status = ColumnContent("status", "str", True)
# LLAMAS = [
# "huggingface/llama-7b",
# "huggingface/llama-13b",
# "huggingface/llama-30b",
# "huggingface/llama-65b",
# ]
# KOALA_LINK = "https://huggingface.co/TheBloke/koala-13B-HF"
# VICUNA_LINK = "https://huggingface.co/lmsys/vicuna-13b-delta-v1.1"
# OASST_LINK = "https://huggingface.co/OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5"
# DOLLY_LINK = "https://huggingface.co/databricks/dolly-v2-12b"
# MODEL_PAGE = "https://huggingface.co/models"
# LLAMA_LINK = "https://ai.facebook.com/blog/large-language-model-llama-meta-ai/"
# VICUNA_LINK = "https://huggingface.co/CarperAI/stable-vicuna-13b-delta"
# ALPACA_LINK = "https://crfm.stanford.edu/2023/03/13/alpaca.html"
# def model_hyperlink(link, model_name):
# return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">{model_name}</a>'
# def make_clickable_model(model_name):
# link = f"https://huggingface.co/{model_name}"
# if model_name in LLAMAS:
# link = LLAMA_LINK
# model_name = model_name.split("/")[1]
# elif model_name == "HuggingFaceH4/stable-vicuna-13b-2904":
# link = VICUNA_LINK
# model_name = "stable-vicuna-13b"
# elif model_name == "HuggingFaceH4/llama-7b-ift-alpaca":
# link = ALPACA_LINK
# model_name = "alpaca-13b"
# if model_name == "dolly-12b":
# link = DOLLY_LINK
# elif model_name == "vicuna-13b":
# link = VICUNA_LINK
# elif model_name == "koala-13b":
# link = KOALA_LINK
# elif model_name == "oasst-12b":
# link = OASST_LINK
# else:
# link = MODEL_PAGE
# return model_hyperlink(link, model_name)
# def styled_error(error):
# return f"<p style='color: red; font-size: 20px; text-align: center;'>{error}</p>"
# def styled_warning(warn):
# return f"<p style='color: orange; font-size: 20px; text-align: center;'>{warn}</p>"
# def styled_message(message):
# return (
# f"<p style='color: green; font-size: 20px; text-align: center;'>{message}</p>"
# )
Qwen_1_8B_Chat_Link = "https://huggingface.co/Qwen/Qwen-1_8B-Chat"
Qwen_7B_Chat_Link = "https://huggingface.co/Qwen/Qwen-7B-Chat"
Qwen_14B_Chat_Link = "https://huggingface.co/Qwen/Qwen-14B-Chat"
Qwen_72B_Chat_Link = "https://huggingface.co/Qwen/Qwen-72B-Chat"
Gemma_2B_it_Link = "https://huggingface.co/google/gemma-2b-it"
Gemma_7B_it__Link = "https://huggingface.co/google/gemma-7b-it"
ChatGLM3_6B_Link = "https://huggingface.co/THUDM/chatglm3-6b"
Mistral_7B_Instruct_v0_2_Link = "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2"
LLaMA_2_7B_Chat_Link = "https://huggingface.co/meta-llama/Llama-2-7b-chat-hf"
LLaMA_2_13B_Chat_Link = "https://huggingface.co/meta-llama/Llama-2-13b-chat-hf"
LLaMA_2_70B_Chat_Link = "https://huggingface.co/meta-llama/Llama-2-70b-chat-hf"
LLaMA_3_8B_Instruct_Link = "https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct"
LLaMA_3_70B_Instruct_Link = "https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct"
Vicuna_7B_v1_3_Link = "https://huggingface.co/lmsys/vicuna-7b-v1.3"
Vicuna_13B_v1_3_Link = "https://huggingface.co/lmsys/vicuna-13b-v1.3"
Vicuna_33B_v1_3_Link = "https://huggingface.co/lmsys/vicuna-33b-v1.3"
Baichuan2_13B_Chat_Link = "https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat"
Yi_34B_Chat_Link = "https://huggingface.co/01-ai/Yi-34B-Chat"
GPT_4_Turbo_Link = "https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4"
ErnieBot_4_0_Link = "https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t"
Gemini_1_0_Pro_Link = "https://ai.google.dev/gemini-api/docs/models/gemini" |