Spaces:
Runtime error
Runtime error
from langchain import OpenAI, Wikipedia | |
from langchain.agents import initialize_agent, Tool | |
import os | |
os.environ["OPENAI_API_KEY"] = os.environ.get("open_ai_key") #openai key | |
import pickle | |
"""# Model Implementation""" | |
import_folder_name = "./embedded_kubernetes_docs" | |
with open(import_folder_name + '.pkl', 'rb') as f: | |
store = pickle.load(f) | |
from typing import Union | |
from langchain.docstore.base import Docstore | |
from langchain.docstore.document import Document | |
class CustomData(Docstore): | |
"""Wrapper around embedded custom data""" | |
datastore = None | |
def __init__(self, store) -> None: | |
"""Check that embedded custom data is available.""" | |
print(store) | |
self.datastore = store | |
print("initialized") | |
def search(self, search: str) -> Union[str, Document]: | |
"""Try to search for wiki page. | |
If page exists, return the page summary, and a PageWithLookups object. | |
If page does not exist, return similar entries. | |
Try to search for embedded data. | |
If doc page exists, return the first one. | |
""" | |
docs = self.datastore.similarity_search(search) | |
# print(docs[0].page_content) | |
return docs[0].page_content | |
# try: | |
# except wikipedia.PageError: | |
# result = f"Could not find [{search}]. Similar: {wikipedia.search(search)}" | |
# except wikipedia.DisambiguationError: | |
# result = f"Could not find [{search}]. Similar: {wikipedia.search(search)}" | |
# return result | |
from typing import Any, List, Optional, Tuple | |
from langchain.docstore.base import Docstore | |
from langchain.docstore.document import Document | |
class DocstoreExplorer: | |
"""Class to assist with exploration of a document store.""" | |
def __init__(self, docstore: Docstore): | |
"""Initialize with a docstore, and set initial document to None.""" | |
self.docstore = docstore | |
self.document: Optional[Document] = None | |
self.llm = OpenAI(temperature=0.7) | |
self.prompt = "You are an expert at Kubernetes. Summarize the following input: " | |
def summarize (self, result: Document) -> str: | |
text = self.prompt + result | |
return self.llm(text) | |
def search(self, term: str) -> str: | |
"""Search for a term in the docstore, and if found save.""" | |
result = self.docstore.search(term) | |
summary = self.summarize(result) | |
print("summary: ", summary) | |
if isinstance(result, Document): | |
self.document = result | |
return summary # REPLACE THIS by having an LLM run a summarize on this based on the fact that it's an expert programmer. | |
else: | |
self.document = None | |
return summary | |
def lookup(self, term: str) -> str: | |
"""Lookup a term in document (if saved).""" | |
if self.document is None: | |
raise ValueError("Cannot lookup without a successful search first") | |
return self.document.lookup(term) | |
docstore=DocstoreExplorer(CustomData(store)) | |
tools = [ | |
Tool( | |
name="Search", | |
func=docstore.search | |
), | |
Tool( | |
name="Lookup", | |
func=docstore.search | |
) | |
] | |
llm = OpenAI(temperature=0, model_name="text-davinci-003") | |
react = initialize_agent(tools, llm, agent="react-docstore", verbose=True, return_intermediate_steps=True) | |
question = "What kubernetes command can i run to see what's happening in my pod?" | |
response = react({"input":question}) | |
"""# Gradio Implementation""" | |
clerkieExamples=["What kubernetes command can i run to see what's happening in my pod", "How can I create a Secret object in Kubernetes?"] | |
import random | |
import gradio as gr | |
import openai | |
import re | |
chat_variables = { | |
"Context": "", | |
"StackTrace": "", | |
"isLanguage": "", | |
} | |
def chat(message, history): | |
print(message) | |
history = history or [] | |
print("len(history: ", len(history)) | |
response = react({"input":message}) | |
history.append((message, response['output'])) | |
return history, history | |
def set_text(inp): | |
return inp | |
def clear(arg): | |
return "" | |
with gr.Blocks() as demo: | |
user_state=gr.State([]) | |
gr.Markdown("""# Welcome to Kuber-Clerkie π€""") | |
gr.Markdown("""Kuber-Clerkie is finetuned on Kubernetes documentation to help you debug your complex Kubernetes errors / answer questions. Please feel free to give it a try and let us know what you think!""") | |
gr.Markdown("""### π P.S. [Check out our GPT-3 based Chrome Extension that debugs your code](https://chrome.google.com/webstore/detail/clerkie-ai/oenpmifpfnikheaolfpabffojfjakfnn) π₯π₯π₯""") | |
with gr.Row(): | |
with gr.Column(): | |
output = gr.Chatbot().style(color_map=("green", "pink")) | |
# allow_flagging="never" | |
inp = gr.Textbox(placeholder="enter your question here") | |
print(type(inp)) | |
btn = gr.Button("Enter message") | |
inp.submit(chat, [inp, user_state], [output, user_state]) | |
inp.submit(clear, inp, inp) | |
btn.click(chat, [inp, user_state], [output, user_state]) | |
btn.click(clear, inp, inp) | |
gr.Markdown("""### need help? got feedback? have thoughts? etc. β Join the [Discord](https://discord.gg/KvG3azf39U)""") | |
gr.Examples(clerkieExamples, | |
inputs=inp, | |
cache_examples=False, | |
) | |
if __name__ == "__main__": | |
demo.launch(debug=True) |