Joshua Sundance Bailey commited on
Commit
94f9b82
1 Parent(s): ea7edfe

python coding agent

Browse files
langchain-streamlit-demo/app.py CHANGED
@@ -28,6 +28,8 @@ from llm_resources import (
28
  get_texts_and_multiretriever,
29
  )
30
  from research_assistant.chain import get_chain as get_research_assistant_chain
 
 
31
 
32
  __version__ = "2.0.1"
33
 
@@ -469,7 +471,19 @@ if st.session_state.llm:
469
  "for quick facts, use duckduckgo instead.",
470
  )
471
 
472
- TOOLS = [research_assistant_tool] + default_tools
 
 
 
 
 
 
 
 
 
 
 
 
473
  if use_document_chat:
474
  st.session_state.doc_chain = get_runnable(
475
  use_document_chat,
 
28
  get_texts_and_multiretriever,
29
  )
30
  from research_assistant.chain import get_chain as get_research_assistant_chain
31
+ from python_coder import get_agent as get_python_agent
32
+
33
 
34
  __version__ = "2.0.1"
35
 
 
471
  "for quick facts, use duckduckgo instead.",
472
  )
473
 
474
+ python_coder_agent = get_python_agent(st.session_state.llm)
475
+
476
+ python_coder_tool = Tool.from_function(
477
+ func=lambda s: python_coder_agent.invoke(
478
+ {"input": s},
479
+ # config=get_config(callbacks),
480
+ ),
481
+ name="python-coder-assistant",
482
+ description="this assistant writes Python code. give it clear instructions and requirements.",
483
+ )
484
+
485
+ TOOLS = [research_assistant_tool, python_coder_tool] + default_tools
486
+
487
  if use_document_chat:
488
  st.session_state.doc_chain = get_runnable(
489
  use_document_chat,
langchain-streamlit-demo/python_coder.py ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """langchain python coder-- requires black, ruff, and mypy."""
2
+
3
+ import argparse
4
+ import os
5
+ import re
6
+ import subprocess # nosec
7
+ import tempfile
8
+ from importlib.util import find_spec
9
+
10
+ from langchain.agents import initialize_agent, AgentType
11
+ from langchain.agents.tools import Tool
12
+ from langchain.chat_models import ChatOpenAI
13
+ from langchain.llms.base import BaseLLM
14
+ from langchain.prompts import ChatPromptTemplate
15
+ from langchain.pydantic_v1 import BaseModel, validator, Field, ValidationError
16
+
17
+
18
+ def strip_python_markdown_tags(text: str) -> str:
19
+ pat = re.compile(r"```python\n(.*)```", re.DOTALL)
20
+ code = pat.match(text)
21
+ if code:
22
+ return code.group(1)
23
+ else:
24
+ return text
25
+
26
+
27
+ def format_black(filepath: str):
28
+ """Format a file with black."""
29
+ subprocess.run( # nosec
30
+ f"black {filepath}",
31
+ stderr=subprocess.STDOUT,
32
+ text=True,
33
+ shell=True,
34
+ timeout=3,
35
+ check=False,
36
+ )
37
+
38
+
39
+ def format_ruff(filepath: str):
40
+ """Run ruff format on a file."""
41
+ subprocess.run( # nosec
42
+ f"ruff check --fix {filepath}",
43
+ shell=True,
44
+ text=True,
45
+ timeout=3,
46
+ universal_newlines=True,
47
+ check=False,
48
+ )
49
+
50
+ subprocess.run( # nosec
51
+ f"ruff format {filepath}",
52
+ stderr=subprocess.STDOUT,
53
+ shell=True,
54
+ timeout=3,
55
+ text=True,
56
+ check=False,
57
+ )
58
+
59
+
60
+ def check_ruff(filepath: str):
61
+ """Run ruff check on a file."""
62
+ subprocess.check_output( # nosec
63
+ f"ruff check {filepath}",
64
+ stderr=subprocess.STDOUT,
65
+ shell=True,
66
+ timeout=3,
67
+ text=True,
68
+ )
69
+
70
+
71
+ def check_mypy(filepath: str, strict: bool = True, follow_imports: str = "skip"):
72
+ """Run mypy on a file."""
73
+ cmd = f"mypy {'--strict' if strict else ''} --follow-imports={follow_imports} {filepath}"
74
+
75
+ subprocess.check_output( # nosec
76
+ cmd,
77
+ stderr=subprocess.STDOUT,
78
+ shell=True,
79
+ text=True,
80
+ timeout=3,
81
+ )
82
+
83
+
84
+ class PythonCode(BaseModel):
85
+ code: str = Field(
86
+ description="Python code conforming to ruff, black, and *strict* mypy standards.",
87
+ )
88
+
89
+ @validator("code")
90
+ @classmethod
91
+ def check_code(cls, v: str) -> str:
92
+ v = strip_python_markdown_tags(v).strip()
93
+ try:
94
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file:
95
+ temp_file.write(v)
96
+ temp_file_path = temp_file.name
97
+
98
+ try:
99
+ # format with black and ruff
100
+ format_black(temp_file_path)
101
+ format_ruff(temp_file_path)
102
+ except subprocess.CalledProcessError:
103
+ pass
104
+
105
+ # update `v` with formatted code
106
+ with open(temp_file_path, "r") as temp_file:
107
+ v = temp_file.read()
108
+
109
+ # check
110
+ complaints = dict(ruff=None, mypy=None)
111
+
112
+ try:
113
+ check_ruff(temp_file_path)
114
+ except subprocess.CalledProcessError as e:
115
+ complaints["ruff"] = e.output
116
+
117
+ try:
118
+ check_mypy(temp_file_path)
119
+ except subprocess.CalledProcessError as e:
120
+ complaints["mypy"] = e.output
121
+
122
+ # raise ValueError if ruff or mypy had complaints
123
+ if any(complaints.values()):
124
+ code_str = f"```{temp_file_path}\n{v}```"
125
+ error_messages = [
126
+ f"```{key}\n{value}```"
127
+ for key, value in complaints.items()
128
+ if value
129
+ ]
130
+ raise ValueError("\n\n".join([code_str] + error_messages))
131
+
132
+ finally:
133
+ os.remove(temp_file_path)
134
+ return v
135
+
136
+
137
+ def check_code(code: str) -> str:
138
+ try:
139
+ code_obj = PythonCode(code=code)
140
+ return f"# LGTM\n# use the `submit` tool to submit this code:\n\n```python\n{code_obj.code}\n```"
141
+ except ValidationError as e:
142
+ return e.errors()[0]["msg"]
143
+
144
+
145
+ prompt = ChatPromptTemplate.from_messages(
146
+ [
147
+ (
148
+ "system",
149
+ "You are a world class Python coder who uses black, ruff, and *strict* mypy for all of your code. "
150
+ "Provide complete, end-to-end Python code to meet the user's description/requirements. "
151
+ "Always `check` your code. When you're done, you must ALWAYS use the `submit` tool.",
152
+ ),
153
+ (
154
+ "human",
155
+ ": {input}",
156
+ ),
157
+ ],
158
+ )
159
+
160
+ check_code_tool = Tool.from_function(
161
+ check_code,
162
+ name="check-code",
163
+ description="Always check your code before submitting it!",
164
+ )
165
+
166
+ submit_code_tool = Tool.from_function(
167
+ lambda s: strip_python_markdown_tags(s),
168
+ name="submit-code",
169
+ description="THIS TOOL is the most important. use it to submit your code to the user who requested it... but be sure to `check` it first!",
170
+ return_direct=True,
171
+ )
172
+
173
+ tools = [check_code_tool, submit_code_tool]
174
+
175
+
176
+ def get_agent(llm: BaseLLM, agent_type: AgentType = AgentType.OPENAI_FUNCTIONS):
177
+ return initialize_agent(
178
+ tools,
179
+ llm,
180
+ agent=agent_type,
181
+ verbose=True,
182
+ handle_parsing_errors=True,
183
+ prompt=prompt,
184
+ # return_intermediate_steps=True,
185
+ ) | (lambda output: output["output"])
186
+
187
+
188
+ if __name__ == "__main__":
189
+ for lib in ("black", "ruff", "mypy"):
190
+ if find_spec(lib) is None:
191
+ raise ImportError(
192
+ "You must install black, ruff, and mypy to use this tool. "
193
+ "You can install them with `pip install black ruff mypy`.",
194
+ )
195
+
196
+ parser = argparse.ArgumentParser()
197
+ parser.add_argument("--model", "-m", default="gpt-4-1106-preview")
198
+ parser.add_argument("instruction")
199
+ args = parser.parse_args()
200
+
201
+ instruction = args.instruction
202
+ model = args.model
203
+
204
+ llm = ChatOpenAI(model_name=model, temperature=0.0)
205
+ agent = get_agent(llm)
206
+
207
+ output = agent.invoke({"input": instruction}, config=dict(verbose=True))
208
+ print(output)
requirements.txt CHANGED
@@ -1,20 +1,23 @@
1
  anthropic==0.7.7
2
  beautifulsoup4==4.12.2
 
 
3
  duckduckgo-search==4.0.0
4
  faiss-cpu==1.7.4
5
  langchain==0.0.348
6
  langsmith==0.0.69
 
7
  numexpr==2.8.8
8
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
9
  openai==1.3.8
10
  pillow>=10.0.1 # not directly required, pinned by Snyk to avoid a vulnerability
11
  pyarrow>=14.0.1 # not directly required, pinned by Snyk to avoid a vulnerability
12
  pypdf==3.17.2
 
 
13
  streamlit==1.29.0
14
  streamlit-feedback==0.1.3
15
  tiktoken==0.5.2
16
  tornado>=6.3.3 # not directly required, pinned by Snyk to avoid a vulnerability
17
  validators>=0.21.0 # not directly required, pinned by Snyk to avoid a vulnerability
18
  wikipedia==1.4.0
19
- certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerability
20
- requests>=2.31.0 # not directly required, pinned by Snyk to avoid a vulnerability
 
1
  anthropic==0.7.7
2
  beautifulsoup4==4.12.2
3
+ black==23.12.0
4
+ certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerability
5
  duckduckgo-search==4.0.0
6
  faiss-cpu==1.7.4
7
  langchain==0.0.348
8
  langsmith==0.0.69
9
+ mypy==1.7.1
10
  numexpr==2.8.8
11
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
12
  openai==1.3.8
13
  pillow>=10.0.1 # not directly required, pinned by Snyk to avoid a vulnerability
14
  pyarrow>=14.0.1 # not directly required, pinned by Snyk to avoid a vulnerability
15
  pypdf==3.17.2
16
+ requests>=2.31.0 # not directly required, pinned by Snyk to avoid a vulnerability
17
+ ruff==0.1.8
18
  streamlit==1.29.0
19
  streamlit-feedback==0.1.3
20
  tiktoken==0.5.2
21
  tornado>=6.3.3 # not directly required, pinned by Snyk to avoid a vulnerability
22
  validators>=0.21.0 # not directly required, pinned by Snyk to avoid a vulnerability
23
  wikipedia==1.4.0