Spaces:
Runtime error
Runtime error
Damien Benveniste
commited on
Commit
·
36118aa
1
Parent(s):
edc489a
modified
Browse files- Dockerfile +3 -0
- app/chains.py +3 -3
- app/crud.py +0 -6
- app/main.py +2 -2
Dockerfile
CHANGED
@@ -17,5 +17,8 @@ RUN --mount=type=secret,id=PINECONE_API_KEY,mode=0444,required=true \
|
|
17 |
RUN --mount=type=secret,id=OPENAI_API_KEY,mode=0444,required=true \
|
18 |
cat /run/secrets/OPENAI_API_KEY > /app/test
|
19 |
|
|
|
|
|
|
|
20 |
COPY --chown=user ./app /app
|
21 |
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
|
|
17 |
RUN --mount=type=secret,id=OPENAI_API_KEY,mode=0444,required=true \
|
18 |
cat /run/secrets/OPENAI_API_KEY > /app/test
|
19 |
|
20 |
+
RUN --mount=type=secret,id=HF_TOKEN,mode=0444,required=true \
|
21 |
+
cat /run/secrets/HF_TOKEN > /app/test
|
22 |
+
|
23 |
COPY --chown=user ./app /app
|
24 |
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
app/chains.py
CHANGED
@@ -1,6 +1,6 @@
|
|
|
|
1 |
from langchain_huggingface import HuggingFaceEndpoint
|
2 |
-
from langchain_core.runnables import
|
3 |
-
from langchain_core.output_parsers import CommaSeparatedListOutputParser
|
4 |
import schemas
|
5 |
from prompts import (
|
6 |
raw_prompt_formatted,
|
@@ -11,13 +11,13 @@ from prompts import (
|
|
11 |
tokenizer
|
12 |
)
|
13 |
from data_indexing import DataIndexer
|
14 |
-
from operator import itemgetter
|
15 |
|
16 |
|
17 |
data_indexer = DataIndexer()
|
18 |
|
19 |
llm = HuggingFaceEndpoint(
|
20 |
repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
|
|
|
21 |
max_new_tokens=512,
|
22 |
stop_sequences=[tokenizer.eos_token]
|
23 |
)
|
|
|
1 |
+
import os
|
2 |
from langchain_huggingface import HuggingFaceEndpoint
|
3 |
+
from langchain_core.runnables import RunnablePassthrough
|
|
|
4 |
import schemas
|
5 |
from prompts import (
|
6 |
raw_prompt_formatted,
|
|
|
11 |
tokenizer
|
12 |
)
|
13 |
from data_indexing import DataIndexer
|
|
|
14 |
|
15 |
|
16 |
data_indexer = DataIndexer()
|
17 |
|
18 |
llm = HuggingFaceEndpoint(
|
19 |
repo_id="meta-llama/Meta-Llama-3-8B-Instruct",
|
20 |
+
huggingfacehub_api_token=os.environ['HF_TOKEN'],
|
21 |
max_new_tokens=512,
|
22 |
stop_sequences=[tokenizer.eos_token]
|
23 |
)
|
app/crud.py
CHANGED
@@ -2,12 +2,6 @@ from sqlalchemy.orm import Session
|
|
2 |
import models, schemas
|
3 |
from fastapi import HTTPException
|
4 |
|
5 |
-
# def create_user(db: Session, user: schemas.UserCreate):
|
6 |
-
# db_user = models.User(username=user.username)
|
7 |
-
# db.add(db_user)
|
8 |
-
# db.commit()
|
9 |
-
# db.refresh(db_user)
|
10 |
-
# return db_user
|
11 |
|
12 |
def get_or_create_user(db: Session, username: str):
|
13 |
user = db.query(models.User).filter(models.User.username == username).first()
|
|
|
2 |
import models, schemas
|
3 |
from fastapi import HTTPException
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
def get_or_create_user(db: Session, username: str):
|
7 |
user = db.query(models.User).filter(models.User.username == username).first()
|
app/main.py
CHANGED
@@ -8,9 +8,9 @@ from typing import Any, List
|
|
8 |
import crud, models, schemas
|
9 |
from database import SessionLocal, engine
|
10 |
from chains import llm, formatted_chain, history_chain, rag_chain
|
11 |
-
from prompts import format_chat_history
|
12 |
from callbacks import LogResponseCallback
|
13 |
-
|
14 |
|
15 |
models.Base.metadata.create_all(bind=engine)
|
16 |
|
|
|
8 |
import crud, models, schemas
|
9 |
from database import SessionLocal, engine
|
10 |
from chains import llm, formatted_chain, history_chain, rag_chain
|
11 |
+
from prompts import format_chat_history
|
12 |
from callbacks import LogResponseCallback
|
13 |
+
|
14 |
|
15 |
models.Base.metadata.create_all(bind=engine)
|
16 |
|