Spaces:
Running
Running
Samuel Thomas
commited on
Commit
·
92fb29b
1
Parent(s):
b6feb1b
update llm
Browse files
app.py
CHANGED
@@ -1,12 +1,19 @@
|
|
1 |
import gradio as gr
|
|
|
2 |
from typing import TypedDict, Annotated
|
3 |
from huggingface_hub import InferenceClient, login, list_models
|
|
|
|
|
|
|
|
|
4 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFacePipeline
|
5 |
from langgraph.graph.message import add_messages
|
6 |
-
from langchain.docstore.document import Document
|
7 |
from langgraph.prebuilt import ToolNode, tools_condition
|
8 |
-
from
|
9 |
-
from
|
|
|
|
|
|
|
10 |
import os
|
11 |
from langgraph.graph import START, StateGraph
|
12 |
from langchain.tools import Tool
|
@@ -19,7 +26,8 @@ login(token=HUGGINGFACEHUB_API_TOKEN, add_to_git_credential=True)
|
|
19 |
llm = HuggingFaceEndpoint(
|
20 |
#repo_id="HuggingFaceH4/zephyr-7b-beta",
|
21 |
#repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
22 |
-
repo_id="deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct",
|
|
|
23 |
task="text-generation",
|
24 |
max_new_tokens=512,
|
25 |
do_sample=False,
|
|
|
1 |
import gradio as gr
|
2 |
+
import langgraph as lg
|
3 |
from typing import TypedDict, Annotated
|
4 |
from huggingface_hub import InferenceClient, login, list_models
|
5 |
+
from langgraph.prebuilt import ToolNode, tools_condition
|
6 |
+
from langgraph.graph.message import add_messages
|
7 |
+
from langgraph.tools import Tool
|
8 |
+
from langgraph.retrievers import BM25Retriever
|
9 |
from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFacePipeline
|
10 |
from langgraph.graph.message import add_messages
|
|
|
11 |
from langgraph.prebuilt import ToolNode, tools_condition
|
12 |
+
from langgraph.graph import START, StateGraph
|
13 |
+
from langgraph.tools.dsl import TextDocument
|
14 |
+
from langgraph.tools.dsl.query import Query
|
15 |
+
from langgraph.tools.dsl.answer import Answer
|
16 |
+
from langgraph.tools.dsl.answer_format import TextAnswer
|
17 |
import os
|
18 |
from langgraph.graph import START, StateGraph
|
19 |
from langchain.tools import Tool
|
|
|
26 |
llm = HuggingFaceEndpoint(
|
27 |
#repo_id="HuggingFaceH4/zephyr-7b-beta",
|
28 |
#repo_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
29 |
+
#repo_id="deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct",
|
30 |
+
repo_id="deepseek-ai/DeepSeek-Coder-V2-Instruct",
|
31 |
task="text-generation",
|
32 |
max_new_tokens=512,
|
33 |
do_sample=False,
|