Spaces:
Sleeping
Sleeping
from langchain_community.llms import HuggingFaceHub | |
from langchain.callbacks.manager import CallbackManager | |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | |
from langchain.chains import LLMChain | |
from langchain.prompts import PromptTemplate | |
from langchain_community.llms import LlamaCpp | |
question = "Who won the FIFA World Cup in the year 1994? " | |
template = """Question: {question} | |
Answer: Let's think step by step.""" | |
prompt = PromptTemplate.from_template(template) | |
llm = HuggingFaceHub( | |
repo_id="BramVanroy/Llama-2-13b-chat-dutch", model_kwargs={"temperature": 0.5, "max_length": 64} | |
) | |
llm_chain = LLMChain(prompt=prompt, llm=llm) | |
print(llm_chain.invoke(question)) |