devmodetest2 / perm /HuggingFaceAI.py
tengel's picture
Upload 56 files
9c9a39f verified
raw
history blame contribute delete
883 Bytes
from langchain_huggingface import HuggingFacePipeline, ChatHuggingFace
from langchain_core.messages import BaseMessage
from typing import List
class HuggingFaceAI(ChatHuggingFace):
def _to_chat_prompt(
self,
messages: List[BaseMessage],
) -> str:
"""Convert a list of messages into a prompt format expected by wrapped LLM."""
if not messages:
raise ValueError("At least one HumanMessage must be provided!")
if not isinstance(messages[-1], HumanMessage) and not isinstance(messages[-1], SystemMessage) :
raise ValueError("Last message must be a HumanMessage or SystemMessage!!!")
messages_dicts = [self._to_chatml_format(m) for m in messages]
return self.tokenizer.apply_chat_template(
messages_dicts, tokenize=False, add_generation_prompt=True
)