changed commit methods
Browse files- HumanStandardInputFlow.py +5 -2
- run.py +2 -2
HumanStandardInputFlow.py
CHANGED
@@ -162,6 +162,9 @@ class HumanStandardInputFlow(AtomicFlow):
|
|
162 |
log.info(query_message)
|
163 |
human_input = self._read_input()
|
164 |
|
165 |
-
reply_message = self.
|
|
|
|
|
|
|
166 |
|
167 |
-
self.
|
|
|
162 |
log.info(query_message)
|
163 |
human_input = self._read_input()
|
164 |
|
165 |
+
reply_message = self.package_output_message(
|
166 |
+
input_message = input_message,
|
167 |
+
response = {"human_input": human_input}
|
168 |
+
)
|
169 |
|
170 |
+
self.send_message(reply_message, is_reply = True)
|
run.py
CHANGED
@@ -73,10 +73,10 @@ if __name__ == "__main__":
|
|
73 |
)
|
74 |
|
75 |
#option2: use the proxy_flow
|
76 |
-
#input_message = proxy_flow.
|
77 |
|
78 |
#7. ~~~ Run inference ~~~
|
79 |
-
future = proxy_flow.
|
80 |
|
81 |
#uncomment this line if you would like to get the full message back
|
82 |
#reply_message = future.get_message()
|
|
|
73 |
)
|
74 |
|
75 |
#option2: use the proxy_flow
|
76 |
+
#input_message = proxy_flow.package_input_message(data = data)
|
77 |
|
78 |
#7. ~~~ Run inference ~~~
|
79 |
+
future = proxy_flow.get_reply_future(input_message)
|
80 |
|
81 |
#uncomment this line if you would like to get the full message back
|
82 |
#reply_message = future.get_message()
|