KvrParaskevi commited on
Commit
5b78f45
·
verified ·
1 Parent(s): 41b2db3

Update chatbot.py

Browse files
Files changed (1) hide show
  1. chatbot.py +12 -13
chatbot.py CHANGED
@@ -10,8 +10,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfi
10
  my_model_id = os.getenv('MODEL_REPO_ID', 'Default Value')
11
  token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
12
 
13
- template = """<s>[INST]<<SYS>>
14
- You are an AI having conversation with a human. Below is an instruction that describes a task.
15
  Write a response that appropriately completes the request.
16
  Reply with the most helpful and logic answer. During the conversation you need to ask the user
17
  the following questions to complete the hotel booking task.
@@ -21,13 +20,16 @@ the following questions to complete the hotel booking task.
21
  4) What is your name, your email address and phone number?
22
  Make sure you receive a logical answer from the user from every question to complete the hotel
23
  booking process.
24
- <</SYS>>
25
 
26
- Current conversation:
 
 
27
  {history}
28
 
 
 
29
  Human: {input}
30
- AI: [/INST]"""
31
 
32
  #@st.cache_resource
33
  def load_model():
@@ -64,22 +66,19 @@ llm = load_pipeline()
64
 
65
  def demo_miny_memory():
66
  #prompt = ChatPromptTemplate.from_template(template)
67
- memory = ConversationBufferMemory(memory_key="history", llm = llm)
68
  return memory
69
 
70
  def demo_chain(input_text,history):
71
- PROMPT = ChatPromptTemplate.from_template(template)
72
- #PROMPT = PromptTemplate(template=template, input_variables=["history", "input"])
73
  conversation = ConversationChain(
74
- prompt=PROMPT,
75
  llm=llm,
 
76
  #verbose=langchain.globals.get_verbose(),
77
  verbose=True,
78
  memory=demo_miny_memory()
79
  )
80
 
81
- chat_reply = conversation.invoke({
82
- "input" : input_text,
83
- "history" : history
84
- }, return_only_outputs=True)
85
  return chat_reply['response'] #.split('AI:')[-1]
 
10
  my_model_id = os.getenv('MODEL_REPO_ID', 'Default Value')
11
  token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
12
 
13
+ template = """You are an AI having conversation with a human. Below is an instruction that describes a task.
 
14
  Write a response that appropriately completes the request.
15
  Reply with the most helpful and logic answer. During the conversation you need to ask the user
16
  the following questions to complete the hotel booking task.
 
20
  4) What is your name, your email address and phone number?
21
  Make sure you receive a logical answer from the user from every question to complete the hotel
22
  booking process.
 
23
 
24
+ Relevant Information:
25
+
26
+
27
  {history}
28
 
29
+ Current Conversation:
30
+
31
  Human: {input}
32
+ AI:"""
33
 
34
  #@st.cache_resource
35
  def load_model():
 
66
 
67
  def demo_miny_memory():
68
  #prompt = ChatPromptTemplate.from_template(template)
69
+ memory = ConversationBufferMemory(llm = llm)
70
  return memory
71
 
72
  def demo_chain(input_text,history):
73
+ #PROMPT = ChatPromptTemplate.from_template(template)
74
+ PROMPT = PromptTemplate(template=template, input_variables=["history", "input"])
75
  conversation = ConversationChain(
 
76
  llm=llm,
77
+ prompt=PROMPT,
78
  #verbose=langchain.globals.get_verbose(),
79
  verbose=True,
80
  memory=demo_miny_memory()
81
  )
82
 
83
+ chat_reply = conversation.predict(input=input_text, return_only_outputs=True)
 
 
 
84
  return chat_reply['response'] #.split('AI:')[-1]