dromerosm commited on
Commit
e82df87
1 Parent(s): 38a7950

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -13
app.py CHANGED
@@ -45,7 +45,8 @@ models = {
45
  }
46
 
47
  # Layout for model selection and max_tokens slider
48
- col1, col2 = st.columns(2)
 
49
 
50
  with col1:
51
  model_option = st.selectbox(
@@ -54,26 +55,21 @@ with col1:
54
  format_func=lambda x: models[x]["name"],
55
  index=0, # Default to the first model in the list
56
  )
57
-
58
- # Detect model change and clear chat history if model has changed
59
- if st.session_state.selected_model != model_option:
60
- st.session_state.messages = []
61
- st.session_state.selected_model = model_option
62
-
63
- max_tokens_range = models[model_option]["tokens"]
64
-
65
- with col2:
66
- # Adjust max_tokens slider dynamically based on the selected model
67
  max_tokens = st.slider(
68
  "Max Tokens:",
69
- min_value=512, # Minimum value to allow some flexibility
70
  max_value=max_tokens_range,
71
- # Default value or max allowed if less
72
  value=min(32768, max_tokens_range),
73
  step=512,
74
  help=f"Adjust the maximum number of tokens (words) for the model's response. Max for selected model: {max_tokens_range}",
75
  )
76
 
 
 
 
 
 
77
  # Display chat messages from history on app rerun
78
  for message in st.session_state.messages:
79
  avatar = "🤖" if message["role"] == "assistant" else "🕺"
 
45
  }
46
 
47
  # Layout for model selection and max_tokens slider
48
+ col1, col2 = st.columns([1, 3]) # Adjust the ratio to make the first column smaller
49
+
50
 
51
  with col1:
52
  model_option = st.selectbox(
 
55
  format_func=lambda x: models[x]["name"],
56
  index=0, # Default to the first model in the list
57
  )
58
+ max_tokens_range = models[model_option]["tokens"]
 
 
 
 
 
 
 
 
 
59
  max_tokens = st.slider(
60
  "Max Tokens:",
61
+ min_value=512,
62
  max_value=max_tokens_range,
 
63
  value=min(32768, max_tokens_range),
64
  step=512,
65
  help=f"Adjust the maximum number of tokens (words) for the model's response. Max for selected model: {max_tokens_range}",
66
  )
67
 
68
+ # Detect model change and clear chat history if model has changed
69
+ if st.session_state.selected_model != model_option:
70
+ st.session_state.messages = []
71
+ st.session_state.selected_model = model_option
72
+
73
  # Display chat messages from history on app rerun
74
  for message in st.session_state.messages:
75
  avatar = "🤖" if message["role"] == "assistant" else "🕺"