Spaces:
Running
Running
Commit
·
bbb084b
1
Parent(s):
6339aa0
reset theme
Browse files- __pycache__/app.cpython-310.pyc +0 -0
- __pycache__/app_2.cpython-310.pyc +0 -0
- __pycache__/config.cpython-311.pyc +0 -0
- app.py +2 -2
- app_2.py +206 -0
__pycache__/app.cpython-310.pyc
CHANGED
Binary files a/__pycache__/app.cpython-310.pyc and b/__pycache__/app.cpython-310.pyc differ
|
|
__pycache__/app_2.cpython-310.pyc
ADDED
Binary file (5.29 kB). View file
|
|
__pycache__/config.cpython-311.pyc
ADDED
Binary file (1.3 kB). View file
|
|
app.py
CHANGED
@@ -167,8 +167,8 @@ footer{
|
|
167 |
|
168 |
"""
|
169 |
css_string2 = ""
|
170 |
-
|
171 |
-
with gr.Blocks(
|
172 |
with gr.Row(elem_id="force_black_bg"):
|
173 |
with gr.Column(elem_id="force_black_bg"):
|
174 |
seed = gr.Text( label="AI Safety Skepticism: What's Your Take?", placeholder="Enter an argument or something you'd like to say!")
|
|
|
167 |
|
168 |
"""
|
169 |
css_string2 = ""
|
170 |
+
# theme=theme,
|
171 |
+
with gr.Blocks() as demo:
|
172 |
with gr.Row(elem_id="force_black_bg"):
|
173 |
with gr.Column(elem_id="force_black_bg"):
|
174 |
seed = gr.Text( label="AI Safety Skepticism: What's Your Take?", placeholder="Enter an argument or something you'd like to say!")
|
app_2.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'''
|
2 |
+
|
3 |
+
CONFIG AND IMPORTS
|
4 |
+
|
5 |
+
'''
|
6 |
+
from config import default_config
|
7 |
+
|
8 |
+
from types import SimpleNamespace
|
9 |
+
import gradio as gr
|
10 |
+
import os, random
|
11 |
+
from pathlib import Path
|
12 |
+
import tiktoken
|
13 |
+
from getpass import getpass
|
14 |
+
|
15 |
+
import openai
|
16 |
+
from langchain.text_splitter import MarkdownHeaderTextSplitter
|
17 |
+
import numpy as np
|
18 |
+
|
19 |
+
from langchain.embeddings import OpenAIEmbeddings
|
20 |
+
# from langchain.vectorstores import Chroma
|
21 |
+
from typing import Iterable
|
22 |
+
from gradio.themes.base import Base
|
23 |
+
from gradio.themes.utils import colors, fonts, sizes
|
24 |
+
import time
|
25 |
+
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
if os.getenv("OPENAI_API_KEY") is None:
|
30 |
+
if any(['VSCODE' in x for x in os.environ.keys()]):
|
31 |
+
print('Please enter password in the VS Code prompt at the top of your VS Code window!')
|
32 |
+
os.environ["OPENAI_API_KEY"] = getpass("Paste your OpenAI key from: https://platform.openai.com/account/api-keys\n")
|
33 |
+
openai.api_key = os.getenv("OPENAI_API_KEY", "")
|
34 |
+
|
35 |
+
assert os.getenv("OPENAI_API_KEY", "").startswith("sk-"), "This doesn't look like a valid OpenAI API key"
|
36 |
+
print("OpenAI API key configured")
|
37 |
+
|
38 |
+
embeddings_model = OpenAIEmbeddings()
|
39 |
+
|
40 |
+
md = ""
|
41 |
+
directory_path = "safety_docs"
|
42 |
+
|
43 |
+
for filename in os.listdir(directory_path):
|
44 |
+
if filename.endswith(".md"):
|
45 |
+
with open(os.path.join(directory_path, filename), 'r') as file:
|
46 |
+
content = file.read()
|
47 |
+
md = md + content
|
48 |
+
|
49 |
+
markdown_document = md
|
50 |
+
|
51 |
+
headers_to_split_on = [
|
52 |
+
("#", "Header 1"),
|
53 |
+
("##", "Header 2"),
|
54 |
+
("###", "Header 3"),
|
55 |
+
]
|
56 |
+
|
57 |
+
markdown_splitter = MarkdownHeaderTextSplitter(headers_to_split_on=headers_to_split_on)
|
58 |
+
md_header_splits = markdown_splitter.split_text(markdown_document)
|
59 |
+
|
60 |
+
def find_nearest_neighbor(argument="", max_args_in_output=2):
|
61 |
+
'''
|
62 |
+
INPUT:
|
63 |
+
argument (string)
|
64 |
+
|
65 |
+
RETURN the nearest neighbor(s) in vectorDB to argument as string
|
66 |
+
'''
|
67 |
+
|
68 |
+
embeddings = embeddings_model
|
69 |
+
embedding_matrix = np.array([embeddings.embed_query(text.page_content) for text in md_header_splits])
|
70 |
+
argument_embedding = embeddings.embed_query(argument)
|
71 |
+
|
72 |
+
dot_products = np.dot(embedding_matrix, argument_embedding)
|
73 |
+
norms = np.linalg.norm(embedding_matrix, axis=1) * np.linalg.norm(argument_embedding)
|
74 |
+
cosine_similarities = dot_products / norms
|
75 |
+
|
76 |
+
nearest_indices = np.argsort(cosine_similarities)[-max_args_in_output:][::-1]
|
77 |
+
|
78 |
+
arr = [md_header_splits[index].metadata for index in nearest_indices]
|
79 |
+
output = ""
|
80 |
+
for thing in arr:
|
81 |
+
output = output + thing['Header 1'] + "\n"
|
82 |
+
|
83 |
+
return output
|
84 |
+
|
85 |
+
def get_gpt_response(user_prompt, system_prompt=default_config.system_prompt, model=default_config.model_name, n=1, max_tokens=200):
|
86 |
+
'''
|
87 |
+
INPUT:
|
88 |
+
Argument
|
89 |
+
user_prompt
|
90 |
+
system_prompt
|
91 |
+
model
|
92 |
+
'''
|
93 |
+
|
94 |
+
messages=[
|
95 |
+
{"role": "system", "content": system_prompt},
|
96 |
+
{"role": "user", "content": user_prompt},
|
97 |
+
]
|
98 |
+
response = openai.ChatCompletion.create(
|
99 |
+
model=model,
|
100 |
+
messages=messages,
|
101 |
+
n=n,
|
102 |
+
max_tokens=max_tokens
|
103 |
+
)
|
104 |
+
|
105 |
+
|
106 |
+
for choice in response.choices:
|
107 |
+
generation = choice.message.content
|
108 |
+
return generation
|
109 |
+
|
110 |
+
|
111 |
+
# return the gpt generated response
|
112 |
+
def greet1(argument):
|
113 |
+
user_prompt = default_config.user_prompt_1 + argument + default_config.user_prompt_2
|
114 |
+
response = get_gpt_response(user_prompt=user_prompt)
|
115 |
+
return response
|
116 |
+
|
117 |
+
# return the nearest neighbor arguments
|
118 |
+
def greet2(argument):
|
119 |
+
nearest_neighbor = find_nearest_neighbor(argument)
|
120 |
+
return "Your argument may fall under the common arguments against AI safety. \nIs it one of these? \n" + nearest_neighbor + "\nSee the taxonomy of arguments below"
|
121 |
+
|
122 |
+
|
123 |
+
|
124 |
+
|
125 |
+
# theme = gr.themes.Monochrome()
|
126 |
+
theme = gr.themes.Monochrome(
|
127 |
+
# neutral_hue=gr.themes.colors.red,
|
128 |
+
|
129 |
+
# n, boxes, text, nothing bottom text most text
|
130 |
+
neutral_hue=gr.themes.Color("red", "#636363", "#636363", "lightgrey", "lightgrey", "lightgrey", "lightgrey", "grey", "red", "black", "red"),
|
131 |
+
primary_hue=gr.themes.Color("#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010", "#8c0010"),
|
132 |
+
secondary_hue=gr.themes.Color("white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white"),
|
133 |
+
)
|
134 |
+
|
135 |
+
|
136 |
+
theme = theme.set(
|
137 |
+
body_background_fill="black",
|
138 |
+
block_title_background_fill="black",
|
139 |
+
block_background_fill="black",
|
140 |
+
body_text_color="white",
|
141 |
+
link_text_color='*primary_50',
|
142 |
+
link_text_color_dark='*primary_50',
|
143 |
+
link_text_color_active='*primary_50',
|
144 |
+
link_text_color_active_dark='*primary_50',
|
145 |
+
link_text_color_hover='*primary_50',
|
146 |
+
link_text_color_hover_dark='*primary_50',
|
147 |
+
link_text_color_visited='*primary_50',
|
148 |
+
link_text_color_visited_dark='*primary_50'
|
149 |
+
)
|
150 |
+
|
151 |
+
css_string = """
|
152 |
+
@import url('https://fonts.googleapis.com/css2?family=Gabarito&family=Gothic+A1:wght@100;200;300;400;500;600;700;800;900&display=swap');
|
153 |
+
force_black_bg {
|
154 |
+
background-color: blue !important;
|
155 |
+
color: white !important;
|
156 |
+
font-family: 'Gabarito', cursive !important;
|
157 |
+
}
|
158 |
+
force_black_bg *{
|
159 |
+
background-color: blue !important;
|
160 |
+
color: white !important;
|
161 |
+
font-family: 'Gabarito', cursive !important;
|
162 |
+
}
|
163 |
+
|
164 |
+
footer{
|
165 |
+
display:none !important
|
166 |
+
}
|
167 |
+
|
168 |
+
"""
|
169 |
+
css_string2 = ""
|
170 |
+
|
171 |
+
# with gr.Blocks(theme=theme, css=css_string2) as demo:
|
172 |
+
# with gr.Row(elem_id="force_black_bg"):
|
173 |
+
# with gr.Column(elem_id="force_black_bg"):
|
174 |
+
# seed = gr.Text( label="AI Safety Skepticism: What's Your Take?", placeholder="Enter an argument or something you'd like to say!")
|
175 |
+
# btn = gr.Button("Generate >")
|
176 |
+
|
177 |
+
# english = gr.Text(elem_id="themed_question_box", label="Common Argument Classifier")
|
178 |
+
|
179 |
+
|
180 |
+
# with gr.Column():
|
181 |
+
# german = gr.Text(label="Safetybot Response")
|
182 |
+
# btn.click(greet2, inputs=[seed],outputs=english)
|
183 |
+
# btn.click(greet1, inputs=[seed],outputs=german)
|
184 |
+
|
185 |
+
|
186 |
+
# gr.Examples(["AGI is far away, I'm not worried", "AI is confined to a computer and cannot interact with the physical world", "AI isn't concious", "If we don't develop AGI, China will!", "If we don't develop AGI, the Americans will!"], inputs=[seed])
|
187 |
+
|
188 |
+
with gr.Blocks(css=css_string) as demo:
|
189 |
+
chatbot = gr.Chatbot()
|
190 |
+
msg = gr.Textbox()
|
191 |
+
clear = gr.ClearButton([msg, chatbot])
|
192 |
+
|
193 |
+
def respond(message, chat_history):
|
194 |
+
bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
|
195 |
+
chat_history.append((message, bot_message))
|
196 |
+
time.sleep(2)
|
197 |
+
return "", chat_history
|
198 |
+
|
199 |
+
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
200 |
+
|
201 |
+
demo.queue()
|
202 |
+
demo.launch()
|
203 |
+
|
204 |
+
|
205 |
+
|
206 |
+
|