Joshua Sundance Bailey commited on
Commit
3868a47
1 Parent(s): 827bf89

enable .env with defaults

Browse files
.env-example ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ APP_PORT=7860
2
+
3
+ LANGCHAIN_ENDPOINT="https://api.smith.langchain.com"
4
+ LANGCHAIN_API_KEY="ls__..."
5
+ LANGCHAIN_TRACING_V2="true"
6
+ LANGCHAIN_PROJECT="streamlit_test"
7
+
8
+ ANYSCALE_API_KEY="secret_..."
9
+ OPENAI_API_KEY="sk-..."
10
+ ANTHROPIC_API_KEY="sk-ant-..."
11
+
12
+ DEFAULT_SYSTEM_PROMPT="You are a helpful chatbot."
13
+
14
+ DEFAULT_MODEL="gpt-3.5-turbo"
15
+
16
+ DEFAULT_TEMPERATURE=0.7
17
+ MIN_TEMPERATURE=0.0
18
+ MAX_TEMPERATURE=1.0
19
+
20
+ DEFAULT_MAX_TOKENS=1000
21
+ MIN_MAX_TOKENS=1
22
+ MAX_MAX_TOKENS=100000
docker-compose.yml CHANGED
@@ -4,6 +4,8 @@ services:
4
  langchain-streamlit-demo:
5
  image: langchain-streamlit-demo:latest
6
  build: .
 
 
7
  ports:
8
  - "${APP_PORT:-7860}:${APP_PORT:-7860}"
9
  command: [
 
4
  langchain-streamlit-demo:
5
  image: langchain-streamlit-demo:latest
6
  build: .
7
+ env_file:
8
+ - .env
9
  ports:
10
  - "${APP_PORT:-7860}:${APP_PORT:-7860}"
11
  command: [
langchain-streamlit-demo/app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os
 
2
 
3
  import anthropic
4
  import openai
@@ -53,8 +54,25 @@ model = st.sidebar.selectbox(
53
  )
54
  provider = _MODEL_DICT[model]
55
 
56
- provider_api_key = st.sidebar.text_input(f"{provider} API key", type="password")
57
- langsmith_api_key = st.sidebar.text_input(
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  "LangSmith API Key (optional)",
59
  type="password",
60
  )
 
1
  import os
2
+ from typing import Union
3
 
4
  import anthropic
5
  import openai
 
54
  )
55
  provider = _MODEL_DICT[model]
56
 
57
+
58
+ def api_key_from_env(_provider: str) -> Union[str, None]:
59
+ if _provider == "OpenAI":
60
+ return os.environ.get("OPENAI_API_KEY")
61
+ elif _provider == "Anthropic":
62
+ return os.environ.get("ANTHROPIC_API_KEY")
63
+ elif _provider == "Anyscale Endpoints":
64
+ return os.environ.get("ANYSCALE_API_KEY")
65
+ elif _provider == "LANGSMITH":
66
+ return os.environ.get("LANGCHAIN_API_KEY")
67
+ else:
68
+ return None
69
+
70
+
71
+ provider_api_key = api_key_from_env(provider) or st.sidebar.text_input(
72
+ f"{provider} API key",
73
+ type="password",
74
+ )
75
+ langsmith_api_key = api_key_from_env("LANGSMITH") or st.sidebar.text_input(
76
  "LangSmith API Key (optional)",
77
  type="password",
78
  )
langchain-streamlit-demo/llm_stuff.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from datetime import datetime
2
 
3
  import streamlit as st
@@ -16,7 +17,10 @@ _MEMORY = ConversationBufferMemory(
16
  memory_key="chat_history",
17
  )
18
 
19
- _DEFAULT_SYSTEM_PROMPT = "You are a helpful chatbot."
 
 
 
20
 
21
  _MODEL_DICT = {
22
  "gpt-3.5-turbo": "OpenAI",
@@ -28,15 +32,15 @@ _MODEL_DICT = {
28
  "meta-llama/Llama-2-70b-chat-hf": "Anyscale Endpoints",
29
  }
30
  _SUPPORTED_MODELS = list(_MODEL_DICT.keys())
31
- _DEFAULT_MODEL = "gpt-3.5-turbo"
32
 
33
- _DEFAULT_TEMPERATURE = 0.7
34
- _MIN_TEMPERATURE = 0.0
35
- _MAX_TEMPERATURE = 1.0
36
 
37
- _DEFAULT_MAX_TOKENS = 1000
38
- _MIN_TOKENS = 1
39
- _MAX_TOKENS = 100000
40
 
41
 
42
  def get_llm(
 
1
+ import os
2
  from datetime import datetime
3
 
4
  import streamlit as st
 
17
  memory_key="chat_history",
18
  )
19
 
20
+ _DEFAULT_SYSTEM_PROMPT = os.environ.get(
21
+ "DEFAULT_SYSTEM_PROMPT",
22
+ "You are a helpful chatbot.",
23
+ )
24
 
25
  _MODEL_DICT = {
26
  "gpt-3.5-turbo": "OpenAI",
 
32
  "meta-llama/Llama-2-70b-chat-hf": "Anyscale Endpoints",
33
  }
34
  _SUPPORTED_MODELS = list(_MODEL_DICT.keys())
35
+ _DEFAULT_MODEL = os.environ.get("DEFAULT_MODEL", "gpt-3.5-turbo")
36
 
37
+ _DEFAULT_TEMPERATURE = float(os.environ.get("DEFAULT_TEMPERATURE", 0.7))
38
+ _MIN_TEMPERATURE = float(os.environ.get("MIN_TEMPERATURE", 0.0))
39
+ _MAX_TEMPERATURE = float(os.environ.get("MAX_TEMPERATURE", 1.0))
40
 
41
+ _DEFAULT_MAX_TOKENS = int(os.environ.get("DEFAULT_MAX_TOKENS", 1000))
42
+ _MIN_TOKENS = int(os.environ.get("MIN_MAX_TOKENS", 1))
43
+ _MAX_TOKENS = int(os.environ.get("MAX_MAX_TOKENS", 100000))
44
 
45
 
46
  def get_llm(