input_interface: # Connector between the "input data" and the Flow _target_: flows.interfaces.KeyInterface additional_transformations: - _target_: flows.data_transformations.KeyMatchInput # Pass the input parameters specified by the flow output_interface: # Connector between the Flow's output and the caller _target_: flows.interfaces.KeyInterface keys_to_rename: api_output: answer # Rename the api_output to answer flow: # Overrides the OpenAIChatAtomicFlow config _target_: aiflows.OpenAIChatAtomicFlowModule.OpenAIChatAtomicFlow.instantiate_from_default_config name: "SimpleQA_Flow" description: "A flow that answers questions." # ~~~ Input interface specification ~~~ input_interface_non_initialized: - "question" # ~~~ OpenAI model parameters ~~ model: "gpt-3.5-turbo" generation_parameters: n: 1 max_tokens: 3000 temperature: 0.3 model_kwargs: top_p: 0.2 frequency_penalty: 0 presence_penalty: 0 n_api_retries: 6 wait_time_between_retries: 20 # ~~~ Prompt specification ~~~ system_message_prompt_template: _target_: langchain.PromptTemplate template: |2- You are a helpful chatbot that truthfully answers questions. input_variables: [] partial_variables: {} template_format: jinja2 init_human_message_prompt_template: _target_: langchain.PromptTemplate template: |2- Answer the following question: {{question}} input_variables: ["question"] partial_variables: {} template_format: jinja2