VoxPoserExamples / cfg_voxposer.yaml
Eralien's picture
init commit
1ec9b78
raw
history blame
3.05 kB
lmps:
planner:
prompt_path: VoxPoserApiExamples/Real/planner/real_planner_prompt.py
model: gpt-4
max_tokens: 1024
temperature: 0
query_prefix: '# Query: '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: True
debug_mode: False
include_context: True
has_return: False
return_val_name: ret_val
composer:
prompt_path: VoxPoserApiExamples/Real/composer/real_composer_prompt.py
model: gpt-4
max_tokens: 1024
temperature: 0
query_prefix: '# Query: '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: False
debug_mode: False
include_context: True
has_return: False
return_val_name: ret_val
get_affordance_map:
prompt_path: VoxPoserApiExamples/Real/value_maps/real_get_affordance_map_prompt.py
model: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# Query: '
query_suffix: '.'
stop: ['#']
maintain_session: False
debug_mode: True
include_context: True
has_return: True
return_val_name: ret_val
get_avoidance_map:
prompt_path: VoxPoserApiExamples/Real/value_maps/real_get_avoidance_map_prompt.py
model: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# Query: '
query_suffix: '.'
stop: ['#', 'objects = [']
maintain_session: False
debug_mode: True
include_context: True
has_return: True
return_val_name: ret_val
get_gripper_map:
prompt_path: VoxPoserApiExamples/Real/value_maps/real_get_gripper_map_prompt.py
model: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# Query: '
query_suffix: '.'
stop: ['#']
maintain_session: False
debug_mode: True
include_context: True
has_return: True
return_val_name: new_shape_pts
get_rotation_map:
prompt_path: VoxPoserApiExamples/Real/value_maps/real_get_rotation_map_prompt.py
model: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# define function: '
query_suffix: '.'
stop: ['# define', '# example']
maintain_session: False
debug_mode: True
include_context: True
has_return: True
get_velocity_map:
prompt_path: VoxPoserApiExamples/Real/value_maps/real_get_velocity_map_prompt.py
model: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# define function: '
query_suffix: '.'
stop: ['# define', '# example']
maintain_session: False
debug_mode: True
include_context: True
has_return: True
fgen:
prompt_path: prompts/fgen.py
engine: gpt-4
max_tokens: 512
temperature: 0
query_prefix: '# define function: '
query_suffix: '.'
stop: ['# define', '# example']
maintain_session: False
debug_mode: False
include_context: True
tabletop_coords:
top_left: [-0.25, -0.25]
top_side: [0, -0.25]
top_right: [0.25, -0.25]
left_side: [-0.25, -0.5]
middle: [0, -0.5]
right_side: [0.25, -0.5]
bottom_left: [-0.25, -0.75]
bottom_side: [0, -0.75]
bottom_right: [0.25, -0.75]
table_z: 0