crystalai.llms / debug_log_file.txt
crystalai's picture
Upload 3 files
478823c verified
raw
history blame
162 kB
2024-07-25T11:56:26.492034
UPLOADED FILES:
File: config.py
import yaml
from log_writer import logger
def load_config():
"""
Loads the configuration from the 'config.yaml' file and sets the global variables accordingly.
If the 'GENERATE_MODEL' key in the configuration is set to 'gpt-4', it forces the use of 'gpt-4-turbo-preview'
as the value for the 'GENERATE_MODEL' key, since 'gpt-4' no longer supports json modes.
Returns:
None
"""
with open("config.yaml", "r") as conf:
config_content = yaml.safe_load(conf)
for key, value in config_content.items():
if key == "GENERATE_MODEL" and value == "gpt-4":
globals()[
key
] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the GENERATE_MODEL to gpt-4. Because gpt-4 is not longer supports json modes.
globals()[key] = value
logger(f"config: {key} -> {value}")
def edit_config(key, value):
"""
Edits the config file.
Args:
key (str): The key to edit.
value (str): The value to set.
Returns:
bool: True
"""
with open("config.yaml", "r") as conf:
config_content = conf.readlines()
with open("config.yaml", "w") as conf:
for line in config_content:
if line.startswith(key):
if value == True:
write_value = "True"
elif value == False:
write_value = "False"
else:
write_value = f'"{value}"'
if "#" in line:
conf.write(f"{key}: {write_value} # {line.split('#')[1]}\n")
else:
conf.write(f"{key}: {write_value}\n")
else:
conf.write(line)
return True
load_config()
File: config.yaml
########## EDIT REQUIRED ##########
# GPT SETTINGS #
# Get your api key from openai. Remember google/bing is always your best friend.
# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc.
# Recommend -> gpt-4-turbo (Better performance, more expensive), gpt-4-o (Good performance, cheaper)
API_KEY: "" # Free API Key with GPT-4 access: https://github.com/CubeGPT/.github/discussions/1
BASE_URL: "https://api.openai.com/v1/chat/completions"
GENERATION_MODEL: "gpt-4-turbo-2024-04-09"
FIXING_MODEL: "gpt-4-turbo-2024-04-09"
# DEVELOPER SETTINGS #
VERSION_NUMBER: "0.1.1"
# PROMPT SETTINGS #
# If you don't know what it is, please don't touch it. Be sure to backup before editing.
## Code Generation ##
SYS_GEN: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Write the code & choose a artifact name for the following files with the infomation which is also provided by the user:
codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java
codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml
codes/%ARTIFACT_NAME%/src/main/resources/config.yml
codes/%ARTIFACT_NAME%/pom.xml
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Uncompeleted".
USR_GEN: |
%DESCRIPTION%
SYS_FIX: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Fix the error in the code provided by user. The error message is also provided by the user.
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Original code" or "// Uncompeleted".
USR_FIX: |
Main.java:
%MAIN_JAVA%
plugin.yml:
%PLUGIN_YML%
config.yml:
%CONFIG_YML%
pom.xml:
%POM_XML%
error message:
%P_ERROR_MSG%
File: console.py
import sys
import uuid
import shutil
from log_writer import logger
import core
import config
import build
if __name__ == "__main__":
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
core.initialize()
print("BukkitGPT v3 beta console running")
# Get user inputs
name = input("Enter the plugin name: ")
description = input("Enter the plugin description: ")
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
print("Build failed. Passing the error to ChatGPT and let it to fix it?")
fix = input("Y/n: ")
if fix == "n":
print("Exiting...")
sys.exit(0)
else:
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", artifact_name),
config.USR_FIX.replace("%MAIN_JAVA%", main_java)
.replace("%PLUGIN_YML%", plugin_yml)
.replace("%CONFIG_YML%", config_yml)
.replace("%POM_XML%", pom_xml)
.replace("%P_ERROR_MSG%", result),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Error: Please run console.py as the main program instead of importing it from another program."
)
File: core.py
from openai import OpenAI
import chardet
import sys
import json
import locale
import os
from log_writer import logger
import config
def initialize():
"""
Initializes the software.
This function logs the software launch, including the version number and platform.
Args:
None
Returns:
None
"""
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}")
if (
"gpt-3.5" in config.GENERATION_MODEL
and config.BYPASS_NO_GPT35_FOR_GENERATION_LIMIT is False
):
print(
"gpt-3.5 writes bugs *all the time* and is not recommended for code generation. Switching to gpt-4."
)
config.edit_config(
"GENERATION_MODEL", config.GENERATION_MODEL.replace("gpt-3.5", "gpt-4")
)
def askgpt(
system_prompt: str,
user_prompt: str,
model_name: str,
disable_json_mode: bool = False,
image_url: str = None,
):
"""
Interacts with ChatGPT using the specified prompts.
Args:
system_prompt (str): The system prompt.
user_prompt (str): The user prompt.
model_name (str): The model name to use.
disable_json_mode (bool): Whether to disable JSON mode.
Returns:
str: The response from ChatGPT.
"""
if image_url is not None and config.USE_DIFFERENT_APIKEY_FOR_VISION_MODEL:
logger("Using different API key for vision model.")
client = OpenAI(api_key=config.VISION_API_KEY, base_url=config.VISION_BASE_URL)
else:
client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL)
logger("Initialized the OpenAI client.")
# Define the messages for the conversation
if image_url is not None:
messages = [
{"role": "system", "content": system_prompt},
{
"role": "user",
"content": [
{"type": "text", "text": user_prompt},
{"type": "image_url", "image_url": {"url": image_url}},
],
},
]
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
]
logger(f"askgpt: system {system_prompt}")
logger(f"askgpt: user {user_prompt}")
# Create a chat completion
if disable_json_mode:
response = client.chat.completions.create(model=model_name, messages=messages)
else:
response = client.chat.completions.create(
model=model_name, response_format={"type": "json_object"}, messages=messages
)
logger(f"askgpt: response {response}")
# Extract the assistant's reply
assistant_reply = response.choices[0].message.content
logger(f"askgpt: extracted reply {assistant_reply}")
return assistant_reply
def response_to_action(msg):
"""
Converts a response from ChatGPT to an action.
Args:
msg (str): The response from ChatGPT.
Returns:
str: The action to take.
"""
text = json.loads(msg)
codes = text["codes"]
for section in codes:
file = section["file"]
code = section["code"]
paths = file.split("/")
# Join the list elements to form a path
path = os.path.join(*paths)
# Get the directory path and the file name
dir_path, file_name = os.path.split(path)
# Create directories, if they don't exist
try:
os.makedirs(dir_path, exist_ok=True)
except FileNotFoundError:
pass
# Create the file
with open(path, "w") as f:
f.write(code) # Write an empty string to the file
def mixed_decode(text: str):
"""
Decode a mixed text containing both normal text and a byte sequence.
Args:
text (str): The mixed text to be decoded.
Returns:
str: The decoded text, where the byte sequence has been converted to its corresponding characters.
"""
# Split the normal text and the byte sequence
# Assuming the byte sequence is everything after the last colon and space ": "
try:
normal_text, byte_text = text.rsplit(": ", 1)
except (TypeError, ValueError):
# The text only contains normal text
return text
# Convert the byte sequence to actual bytes
byte_sequence = byte_text.encode(
"latin1"
) # latin1 encoding maps byte values directly to unicode code points
# Detect the encoding of the byte sequence
detected_encoding = chardet.detect(byte_sequence)
encoding = detected_encoding["encoding"]
# Decode the byte sequence
decoded_text = byte_sequence.decode(encoding)
# Combine the normal text with the decoded byte sequence
final_text = normal_text + ": " + decoded_text
return final_text
if __name__ == "__main__":
print("This script is not meant to be run directly. Please run console.py instead.")
File: log_writer.py
import os
from datetime import datetime
first_call_time = None
def get_log_filename():
global first_call_time
if first_call_time is None:
first_call_time = datetime.now()
log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y")
return log_filename
def logger(text: str):
log_filename = get_log_filename()
timestamp_prefix = datetime.now().strftime("[%H:%M:%S]")
log_line = f"{timestamp_prefix} {text}\n"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
with open(log_filename + ".log", "a", encoding="utf-8") as log_file:
log_file.write(log_line)
File: README.md
<div align="center">
<img src="https://github.com/CubeGPT/CubeAgents/blob/master/banner.jpeg?raw=true"/>
<img src="https://img.shields.io/badge/Cube-Agents-blue">
<a href="https://github.com/CubeGPT/BuilderGPT/pulls"><img src="https://img.shields.io/badge/PRs-welcome-20BF20"></a>
<img src="https://img.shields.io/badge/License-Apache-red">
<a href="https://discord.gg/kTZtXw8s7r"><img src="https://img.shields.io/discord/1212765516532289587
"></a>
<!-- <p>English | <a href="https://github.com/CubeGPT/CubeAgents/blob/master/README-zh_cn.md">简体中文</a></p> -->
<br>
<a href="https://discord.gg/kTZtXw8s7r">Join our discord</a>
<br/>
</div>
> [!NOTE]
> Developers and translators are welcome to join the CubeGPT Team!
## Introduction
> A simple template for CubeGPT projects.
CubeAgents is a template for CubeGPT's projects like [BuilderGPT](https://github.com/CubeGPT/BuilderGPT). It provides a simple and clean interface for users to interact with the program.
# Showcase
...
## Partner
[![](https://www.bisecthosting.com/partners/custom-banners/c37f58c7-c49b-414d-b53c-1a6e1b1cff71.webp)](https://bisecthosting.com/cubegpt)
## Features
- [x] Feature 1
- [x] Feature 2
- [x] Feature 3
- [ ] Feature 4
### Other projects of CubeGPT Team
- [x] Bukkit plugin generator. {*.jar} ([BukkitGPT](https://github.com/CubeGPT/BukkitGPT))
- [x] Structure generator. {*.schem} ([BuilderGPT](https://github.com/CubeGPT/BuilderGPT))
- [ ] Serverpack generator. {*.zip} (ServerpackGPT or ServerGPT, or..?)
- [ ] Have ideas or want to join our team? Send [us](mailto:[email protected]) an email!
## How it works
...
## Requirements
### Plan A. Windows/Linux (executable edition)
Nothing. Just download the executable file and run it.
### Plan B. Python (Any operating systems; Recommend if possible)
You can use BukkitGPT on any device with [Python 3+](https://www.python.org/).
And you need to install the depencies with this command:
```
pip install -r requirements.txt
```
## Quick Start
*(Make sure you have the [Python](https://www.python.org) environment installed on your computer)*
...
## Contributing
If you like the project, you can give the project a star, or [submit an issue](https://github.com/CubeGPT/CubeAgents/issues) or [pull request](https://github.com/CubeGPT/CubeAgents/pulls) to help make it better.
## License
```
Copyright [2024] [CubeGPT Team]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
File: requirements.txt
openai>=1.13.3
pyyaml
cx_Freeze
ttkbootstrap
playwright
chardet
File: ui.py
from cube_qgui.__init__ import CreateQGUI
from cube_qgui.banner_tools import *
from cube_qgui.notebook_tools import *
from playwright.sync_api import Playwright, sync_playwright
import os
import shutil
import uuid
from log_writer import logger
import config
import core
import build
# ---------- Functions ----------#
def open_config(args: dict):
"""
Opens the config file.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
os.system("notepad config.yaml")
return True
def save_apply_config(args: dict):
"""
Saves and applies the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
keys = ["API_KEY", "BASE_URL"]
for key in keys:
value = args[key].get()
if key == "ADVANCED_MODE":
value = True if value == 1 else False
else:
pass
config.edit_config(key, value)
config.load_config()
args["DevTool_CONFIG_API_KEY_DISPLAY"].set(f"CONFIG.API_KEY = {config.API_KEY}")
args["DevTools_CONFIG_BASE_URL_DISPLAY"].set(f"CONFIG.BASE_URL = {config.BASE_URL}")
return True
def load_config(args: dict):
"""
Loads the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
config.load_config()
args["API_KEY"].set(config.API_KEY)
args["BASE_URL"].set(config.BASE_URL)
return True
def print_args(args: dict):
"""
Prints the arguments.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
for arg, v_fun in args.items():
print(f"Name: {arg}, Value: {v_fun.get()}")
return True
def raise_error(args: dict):
"""
Raises an error.
Args:
args (dict): A dictionary containing the arguments.
"""
raise Exception("This is a test error.")
# ---------- Generate Function ----------#
def generate(args: dict):
"""
Generates the plugin.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
global error_msg, pkg_id_path
# Get user inputs
name = args["PluginName"].get()
description = args["PluginDescription"].get()
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
error_msg = result
print(
"Build failed. To pass the error to ChatGPT && let it fix, jump to the Fixing page and click the Fix button."
)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
def fix(args: dict):
"""
Fixes the error.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
artifact_name = args["PluginName"].get()
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", str(artifact_name)),
config.USR_FIX.replace("%MAIN_JAVA%", str(main_java))
.replace("%PLUGIN_YML%", str(plugin_yml))
.replace("%CONFIG_YML%", str(config_yml))
.replace("%POM_XML%", str(pom_xml))
.replave("%PKG_ID_LST%", pkg_id_path)
.replace("%P_ERROR_MSG%", str(error_msg)),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed again. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
# ---------- Main Program ----------#
root = CreateQGUI(title="BukkitGPT-v3", tab_names=["Generate", "Settings", "DevTools"])
error_msg = None
logger("Starting program.")
# Initialize Core
core.initialize()
print("BukkitGPT v3 beta console running")
# Banner
root.add_banner_tool(GitHub("https://github.com/CubeGPT/BukkitGPT-v3"))
# Generate Page
root.add_notebook_tool(
InputBox(name="PluginName", default="ExamplePlugin", label_info="Plugin Name")
)
root.add_notebook_tool(
InputBox(
name="PluginDescription",
default="Send msg 'hello' to every joined player.",
label_info="Plugin Description",
)
)
root.add_notebook_tool(
RunButton(
bind_func=generate,
name="Generate",
text="Generate Plugin",
checked_text="Generating...",
tab_index=0,
)
)
# Fixing Page #
# root.add_notebook_tool(Label(name="Fixing_DESCRIPTION", text="This is a fixing page. If the build fails, click the Fix button to fix the error in the LATEST build.", tab_index=1))
# root.add_notebook_tool(RunButton(bind_func=fix, name="Fix", text="Fix", checked_text="Fixing...", tab_index=1))
# Settings Page
root.add_notebook_tool(
InputBox(name="API_KEY", default=config.API_KEY, label_info="API Key", tab_index=1)
)
root.add_notebook_tool(
InputBox(
name="BASE_URL", default=config.BASE_URL, label_info="BASE URL", tab_index=1
)
)
config_buttons = HorizontalToolsCombine(
[
BaseButton(
bind_func=save_apply_config,
name="Save & Apply Config",
text="Save & Apply",
tab_index=1,
),
BaseButton(
bind_func=load_config, name="Load Config", text="Load Config", tab_index=1
),
BaseButton(
bind_func=open_config,
name="Open Config",
text="Open Full Config",
tab_index=1,
),
]
)
root.add_notebook_tool(config_buttons)
# DevTools Page
root.add_notebook_tool(
Label(
name="DevTool_DESCRIPTION",
text="This is a testing page for developers. Ignore it if you are a normal user.",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTool_CONFIG_API_KEY_DISPLAY",
text=f"CONFIG.API_KEY = {config.API_KEY}",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTools_CONFIG_BASE_URL_DISPLAY",
text=f"CONFIG.BASE_URL = {config.BASE_URL}",
tab_index=2,
)
)
root.add_notebook_tool(
RunButton(bind_func=print_args, name="Print Args", text="Print Args", tab_index=2)
)
root.add_notebook_tool(
RunButton(
bind_func=raise_error, name="Raise Error", text="Raise Error", tab_index=2
)
)
# Sidebar
root.set_navigation_about(
author="CubeGPT Team",
version=config.VERSION_NUMBER,
github_url="https://github.com/CubeGPT/BukkitGPT-v3",
)
# Run
root.run()
PROMPT:
添加生成DataPack的选项,使得用户可以选择生成Bukkit插件或者DataPack。
在`ui.py`中可以在Generate Page里添加这行代码 `root.add_notebook_tool(RadioObviousToolButton(options=["BukkitPlugin", "Datapack(Experimental)"], name="GenerationType", title="Type", default="BukkitPlugin",tab_index=0))`
你还需要自行完成DataPack的生成逻辑。你可能需要更改的文件有`ui.py`, `core.py`, `config.yaml`, `console.py`。你还可能需要创建一些新的文件或文件夹,也有可能不需要。祝你好运。
2024-07-25T11:56:26.944792
CONSOLE OUTPUT:
Error while improving the project: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
Could you please upload the debug_log_file.txt in D:\zhousl\BukkitGPT\BukkitGPT-v3\.gpteng\memory/logs folder to github?
FULL STACK TRACE:
Traceback (most recent call last):
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 379, in handle_improve_mode
files_dict = agent.improve(files_dict, prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\applications\cli\cli_agent.py", line 208, in improve
files_dict = self.improve_fn(
^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 311, in improve_fn
return _improve_loop(ai, files_dict, memory, messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 317, in _improve_loop
messages = ai.next(messages, step_name=curr_fn())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 243, in next
response = self.backoff_inference(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\backoff\_sync.py", line 105, in retry
ret = target(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 287, in backoff_inference
return self.llm.invoke(messages) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 270, in invoke
self.generate_prompt(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 703, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 560, in generate
raise e
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 550, in generate
self._generate_with_cache(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 775, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 755, in _generate
return generate_from_stream(stream_iter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 102, in generate_from_stream
generation = next(stream, None)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 676, in _stream
stream = self._client.messages.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_utils\_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\resources\messages.py", line 902, in create
return self._post(
^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 1266, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 942, in request
return self._request(
^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 968, in _request
request = self._build_request(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 461, in _build_request
headers = self._build_headers(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 416, in _build_headers
self._validate_headers(headers_dict, custom_headers)
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_client.py", line 192, in _validate_headers
raise TypeError(
TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
2024-07-25T11:59:37.704389
UPLOADED FILES:
File: config.py
import yaml
from log_writer import logger
def load_config():
"""
Loads the configuration from the 'config.yaml' file and sets the global variables accordingly.
If the 'GENERATE_MODEL' key in the configuration is set to 'gpt-4', it forces the use of 'gpt-4-turbo-preview'
as the value for the 'GENERATE_MODEL' key, since 'gpt-4' no longer supports json modes.
Returns:
None
"""
with open("config.yaml", "r") as conf:
config_content = yaml.safe_load(conf)
for key, value in config_content.items():
if key == "GENERATE_MODEL" and value == "gpt-4":
globals()[
key
] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the GENERATE_MODEL to gpt-4. Because gpt-4 is not longer supports json modes.
globals()[key] = value
logger(f"config: {key} -> {value}")
def edit_config(key, value):
"""
Edits the config file.
Args:
key (str): The key to edit.
value (str): The value to set.
Returns:
bool: True
"""
with open("config.yaml", "r") as conf:
config_content = conf.readlines()
with open("config.yaml", "w") as conf:
for line in config_content:
if line.startswith(key):
if value == True:
write_value = "True"
elif value == False:
write_value = "False"
else:
write_value = f'"{value}"'
if "#" in line:
conf.write(f"{key}: {write_value} # {line.split('#')[1]}\n")
else:
conf.write(f"{key}: {write_value}\n")
else:
conf.write(line)
return True
load_config()
File: config.yaml
########## EDIT REQUIRED ##########
# GPT SETTINGS #
# Get your api key from openai. Remember google/bing is always your best friend.
# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc.
# Recommend -> gpt-4-turbo (Better performance, more expensive), gpt-4-o (Good performance, cheaper)
API_KEY: "" # Free API Key with GPT-4 access: https://github.com/CubeGPT/.github/discussions/1
BASE_URL: "https://api.openai.com/v1/chat/completions"
GENERATION_MODEL: "gpt-4-turbo-2024-04-09"
FIXING_MODEL: "gpt-4-turbo-2024-04-09"
# DEVELOPER SETTINGS #
VERSION_NUMBER: "0.1.1"
# PROMPT SETTINGS #
# If you don't know what it is, please don't touch it. Be sure to backup before editing.
## Code Generation ##
SYS_GEN: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Write the code & choose a artifact name for the following files with the infomation which is also provided by the user:
codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java
codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml
codes/%ARTIFACT_NAME%/src/main/resources/config.yml
codes/%ARTIFACT_NAME%/pom.xml
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Uncompeleted".
USR_GEN: |
%DESCRIPTION%
SYS_FIX: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Fix the error in the code provided by user. The error message is also provided by the user.
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Original code" or "// Uncompeleted".
USR_FIX: |
Main.java:
%MAIN_JAVA%
plugin.yml:
%PLUGIN_YML%
config.yml:
%CONFIG_YML%
pom.xml:
%POM_XML%
error message:
%P_ERROR_MSG%
File: console.py
import sys
import uuid
import shutil
from log_writer import logger
import core
import config
import build
if __name__ == "__main__":
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
core.initialize()
print("BukkitGPT v3 beta console running")
# Get user inputs
name = input("Enter the plugin name: ")
description = input("Enter the plugin description: ")
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
print("Build failed. Passing the error to ChatGPT and let it to fix it?")
fix = input("Y/n: ")
if fix == "n":
print("Exiting...")
sys.exit(0)
else:
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", artifact_name),
config.USR_FIX.replace("%MAIN_JAVA%", main_java)
.replace("%PLUGIN_YML%", plugin_yml)
.replace("%CONFIG_YML%", config_yml)
.replace("%POM_XML%", pom_xml)
.replace("%P_ERROR_MSG%", result),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Error: Please run console.py as the main program instead of importing it from another program."
)
File: core.py
from openai import OpenAI
import chardet
import sys
import json
import locale
import os
from log_writer import logger
import config
def initialize():
"""
Initializes the software.
This function logs the software launch, including the version number and platform.
Args:
None
Returns:
None
"""
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}")
if (
"gpt-3.5" in config.GENERATION_MODEL
and config.BYPASS_NO_GPT35_FOR_GENERATION_LIMIT is False
):
print(
"gpt-3.5 writes bugs *all the time* and is not recommended for code generation. Switching to gpt-4."
)
config.edit_config(
"GENERATION_MODEL", config.GENERATION_MODEL.replace("gpt-3.5", "gpt-4")
)
def askgpt(
system_prompt: str,
user_prompt: str,
model_name: str,
disable_json_mode: bool = False,
image_url: str = None,
):
"""
Interacts with ChatGPT using the specified prompts.
Args:
system_prompt (str): The system prompt.
user_prompt (str): The user prompt.
model_name (str): The model name to use.
disable_json_mode (bool): Whether to disable JSON mode.
Returns:
str: The response from ChatGPT.
"""
if image_url is not None and config.USE_DIFFERENT_APIKEY_FOR_VISION_MODEL:
logger("Using different API key for vision model.")
client = OpenAI(api_key=config.VISION_API_KEY, base_url=config.VISION_BASE_URL)
else:
client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL)
logger("Initialized the OpenAI client.")
# Define the messages for the conversation
if image_url is not None:
messages = [
{"role": "system", "content": system_prompt},
{
"role": "user",
"content": [
{"type": "text", "text": user_prompt},
{"type": "image_url", "image_url": {"url": image_url}},
],
},
]
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
]
logger(f"askgpt: system {system_prompt}")
logger(f"askgpt: user {user_prompt}")
# Create a chat completion
if disable_json_mode:
response = client.chat.completions.create(model=model_name, messages=messages)
else:
response = client.chat.completions.create(
model=model_name, response_format={"type": "json_object"}, messages=messages
)
logger(f"askgpt: response {response}")
# Extract the assistant's reply
assistant_reply = response.choices[0].message.content
logger(f"askgpt: extracted reply {assistant_reply}")
return assistant_reply
def response_to_action(msg):
"""
Converts a response from ChatGPT to an action.
Args:
msg (str): The response from ChatGPT.
Returns:
str: The action to take.
"""
text = json.loads(msg)
codes = text["codes"]
for section in codes:
file = section["file"]
code = section["code"]
paths = file.split("/")
# Join the list elements to form a path
path = os.path.join(*paths)
# Get the directory path and the file name
dir_path, file_name = os.path.split(path)
# Create directories, if they don't exist
try:
os.makedirs(dir_path, exist_ok=True)
except FileNotFoundError:
pass
# Create the file
with open(path, "w") as f:
f.write(code) # Write an empty string to the file
def mixed_decode(text: str):
"""
Decode a mixed text containing both normal text and a byte sequence.
Args:
text (str): The mixed text to be decoded.
Returns:
str: The decoded text, where the byte sequence has been converted to its corresponding characters.
"""
# Split the normal text and the byte sequence
# Assuming the byte sequence is everything after the last colon and space ": "
try:
normal_text, byte_text = text.rsplit(": ", 1)
except (TypeError, ValueError):
# The text only contains normal text
return text
# Convert the byte sequence to actual bytes
byte_sequence = byte_text.encode(
"latin1"
) # latin1 encoding maps byte values directly to unicode code points
# Detect the encoding of the byte sequence
detected_encoding = chardet.detect(byte_sequence)
encoding = detected_encoding["encoding"]
# Decode the byte sequence
decoded_text = byte_sequence.decode(encoding)
# Combine the normal text with the decoded byte sequence
final_text = normal_text + ": " + decoded_text
return final_text
if __name__ == "__main__":
print("This script is not meant to be run directly. Please run console.py instead.")
File: log_writer.py
import os
from datetime import datetime
first_call_time = None
def get_log_filename():
global first_call_time
if first_call_time is None:
first_call_time = datetime.now()
log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y")
return log_filename
def logger(text: str):
log_filename = get_log_filename()
timestamp_prefix = datetime.now().strftime("[%H:%M:%S]")
log_line = f"{timestamp_prefix} {text}\n"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
with open(log_filename + ".log", "a", encoding="utf-8") as log_file:
log_file.write(log_line)
File: requirements.txt
openai>=1.13.3
pyyaml
cx_Freeze
ttkbootstrap
playwright
chardet
File: ui.py
from cube_qgui.__init__ import CreateQGUI
from cube_qgui.banner_tools import *
from cube_qgui.notebook_tools import *
from playwright.sync_api import Playwright, sync_playwright
import os
import shutil
import uuid
from log_writer import logger
import config
import core
import build
# ---------- Functions ----------#
def open_config(args: dict):
"""
Opens the config file.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
os.system("notepad config.yaml")
return True
def save_apply_config(args: dict):
"""
Saves and applies the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
keys = ["API_KEY", "BASE_URL"]
for key in keys:
value = args[key].get()
if key == "ADVANCED_MODE":
value = True if value == 1 else False
else:
pass
config.edit_config(key, value)
config.load_config()
args["DevTool_CONFIG_API_KEY_DISPLAY"].set(f"CONFIG.API_KEY = {config.API_KEY}")
args["DevTools_CONFIG_BASE_URL_DISPLAY"].set(f"CONFIG.BASE_URL = {config.BASE_URL}")
return True
def load_config(args: dict):
"""
Loads the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
config.load_config()
args["API_KEY"].set(config.API_KEY)
args["BASE_URL"].set(config.BASE_URL)
return True
def print_args(args: dict):
"""
Prints the arguments.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
for arg, v_fun in args.items():
print(f"Name: {arg}, Value: {v_fun.get()}")
return True
def raise_error(args: dict):
"""
Raises an error.
Args:
args (dict): A dictionary containing the arguments.
"""
raise Exception("This is a test error.")
# ---------- Generate Function ----------#
def generate(args: dict):
"""
Generates the plugin.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
global error_msg, pkg_id_path
# Get user inputs
name = args["PluginName"].get()
description = args["PluginDescription"].get()
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
error_msg = result
print(
"Build failed. To pass the error to ChatGPT && let it fix, jump to the Fixing page and click the Fix button."
)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
def fix(args: dict):
"""
Fixes the error.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
artifact_name = args["PluginName"].get()
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", str(artifact_name)),
config.USR_FIX.replace("%MAIN_JAVA%", str(main_java))
.replace("%PLUGIN_YML%", str(plugin_yml))
.replace("%CONFIG_YML%", str(config_yml))
.replace("%POM_XML%", str(pom_xml))
.replave("%PKG_ID_LST%", pkg_id_path)
.replace("%P_ERROR_MSG%", str(error_msg)),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed again. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
# ---------- Main Program ----------#
root = CreateQGUI(title="BukkitGPT-v3", tab_names=["Generate", "Settings", "DevTools"])
error_msg = None
logger("Starting program.")
# Initialize Core
core.initialize()
print("BukkitGPT v3 beta console running")
# Banner
root.add_banner_tool(GitHub("https://github.com/CubeGPT/BukkitGPT-v3"))
# Generate Page
root.add_notebook_tool(
InputBox(name="PluginName", default="ExamplePlugin", label_info="Plugin Name")
)
root.add_notebook_tool(
InputBox(
name="PluginDescription",
default="Send msg 'hello' to every joined player.",
label_info="Plugin Description",
)
)
root.add_notebook_tool(
RunButton(
bind_func=generate,
name="Generate",
text="Generate Plugin",
checked_text="Generating...",
tab_index=0,
)
)
# Fixing Page #
# root.add_notebook_tool(Label(name="Fixing_DESCRIPTION", text="This is a fixing page. If the build fails, click the Fix button to fix the error in the LATEST build.", tab_index=1))
# root.add_notebook_tool(RunButton(bind_func=fix, name="Fix", text="Fix", checked_text="Fixing...", tab_index=1))
# Settings Page
root.add_notebook_tool(
InputBox(name="API_KEY", default=config.API_KEY, label_info="API Key", tab_index=1)
)
root.add_notebook_tool(
InputBox(
name="BASE_URL", default=config.BASE_URL, label_info="BASE URL", tab_index=1
)
)
config_buttons = HorizontalToolsCombine(
[
BaseButton(
bind_func=save_apply_config,
name="Save & Apply Config",
text="Save & Apply",
tab_index=1,
),
BaseButton(
bind_func=load_config, name="Load Config", text="Load Config", tab_index=1
),
BaseButton(
bind_func=open_config,
name="Open Config",
text="Open Full Config",
tab_index=1,
),
]
)
root.add_notebook_tool(config_buttons)
# DevTools Page
root.add_notebook_tool(
Label(
name="DevTool_DESCRIPTION",
text="This is a testing page for developers. Ignore it if you are a normal user.",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTool_CONFIG_API_KEY_DISPLAY",
text=f"CONFIG.API_KEY = {config.API_KEY}",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTools_CONFIG_BASE_URL_DISPLAY",
text=f"CONFIG.BASE_URL = {config.BASE_URL}",
tab_index=2,
)
)
root.add_notebook_tool(
RunButton(bind_func=print_args, name="Print Args", text="Print Args", tab_index=2)
)
root.add_notebook_tool(
RunButton(
bind_func=raise_error, name="Raise Error", text="Raise Error", tab_index=2
)
)
# Sidebar
root.set_navigation_about(
author="CubeGPT Team",
version=config.VERSION_NUMBER,
github_url="https://github.com/CubeGPT/BukkitGPT-v3",
)
# Run
root.run()
File: cube_qgui\\__main__.py
# 这是对于cube_qgui的一个简单示例,展示了如何使用QGUI来创建一个简单的GUI应用。
import time
# 导入CreateQGUI模块
from qgui import CreateQGUI, MessageBox
# 【可选】导入自定义导航栏按钮模块、GitHub导航栏模块
from qgui.banner_tools import BaseBarTool, GitHub, AIStudio
# 【可选】一次性导入所有的主界面工具模块
from qgui.notebook_tools import *
# 【可选】导入占位符
from qgui.manager import QStyle, HORIZONTAL
def click(args: dict):
MessageBox.info("要开始啦~")
# 证明一下自己被点到了
print("你点到我啦~")
# 通过ChooseFileTextButton(name="文件选择")中预先设置的name参数,使用get方法即可获取对应的输入框信息
print("你选择的文件是:", args["文件选择"].get())
# 当然也可以通过name参数来设置对应的内容,使用set方法即可完成设置
print("保存位置修改为“快看,我被修改啦”", args["保存位置"].set("快看,我被修改啦"))
# 即使没有指定name,我们照样可以拿到所有的小工具情况
for arg, v_fun in args.items():
print("自定义组件Name:", arg, "状态:", v_fun.get())
# 若我们绑定了进度条,那么每当需要设置进度的时候,通过args["进度条"].set(当前进度)来进行设置吧,倒吸进度条也是可以哒
for i in range(1, 101):
time.sleep(0.01)
args["进度条"].set(i)
# 增加打印间隔
if i % 20 == 0:
print("当前进度", i)
MessageBox.warning(text="给个评价吧亲~")
# 也可以在终端中打印组件,顺便绑定用户调研函数
q_gui.print_tool(
RadioButton(
["满意", "一般", "你好垃圾啊"], title="体验如何?", name="feedback", bind_func=feedback
)
)
# 甚至打印图片
from qgui import RESOURCES_PATH
q_gui.print_image(os.path.join(RESOURCES_PATH, "demo/panda.jpg"))
def feedback(args: dict):
# 用户调研Callback
info = args["feedback"].get()
if info == "满意":
print("么么哒")
elif info == "一般":
print("啊啊啊,告诉GT哪里没做好吧")
else:
print("以后漂流瓶见吧,拜拜!")
def bind_dir(args: dict):
# 获取所选择文件所在的文件夹路径
path = os.path.dirname(args["文件选择"].get())
# 可以通过name参数来设置对应的内容,使用set方法即可完成设置
args["保存位置"].set(path)
print("保存位置已自动修改为:", path)
def go_to_first_page(args: dict):
args["QGUI-BaseNoteBook"].set(0)
# 创建主界面
q_gui = CreateQGUI(
title="一个新应用", # 界面标题
tab_names=["主控制台", "选择按钮", "其他小工具"], # 界面中心部分的分页标题 - 可不填
style=QStyle.default,
) # 皮肤
# 在界面最上方添加一个按钮,链接到GitHub主页
q_gui.add_banner_tool(GitHub(url="https://github.com/QPT-Family/QGUI"))
# 也可以是AI Studio
q_gui.add_banner_tool(
AIStudio(url="https://aistudio.baidu.com/aistudio/personalcenter/thirdview/29724")
)
# 要不试试自定义Banner按钮,在大家点击它时触发刚刚定义的click函数,并向它传递其他组件的情况
q_gui.add_banner_tool(BaseBarTool(bind_func=click, name="一个新组件"))
# 在主界面部分添加一个文件选择工具吧,并在选择文件后自动变为文件所在的路径
q_gui.add_notebook_tool(ChooseFileTextButton(name="文件选择", bind_func=bind_dir))
# 再加个文件夹选择工具
q_gui.add_notebook_tool(ChooseDirTextButton(name="保存位置"))
# 当然也可以来个输入框
q_gui.add_notebook_tool(InputBox(name="我是个木有感情的输入框"))
# 想要加一个 进度条 和 运行按钮 而且俩要水平方向排列该如何做?
# 试试HorizontalToolsCombine,它可以接受一组工具并将其进行水平排列
# 这里我们也为RunButton绑定click函数
run_menu = HorizontalToolsCombine(
[Progressbar(name="进度条"), RunButton(bind_func=click)],
text="试试HorizontalToolsCombine,它可以接受一组工具并将其进行水平排列",
)
q_gui.add_notebook_tool(run_menu)
# 第二页 - 复选框和单选框
# 使用VerticalFrameCombine可以将他们在垂直方向快速组合,它们会从左到右按顺序排列
combine_left = VerticalFrameCombine(
[
CheckButton(options=[("选择1", 0), ("选择2", 1), ("选择3", 0)]),
CheckToolButton(options=[("选择1", 0), ("选择2", 1), ("选择3", 0)]),
CheckObviousToolButton(options=[("选择1", 0), ("选择2", 1), ("选择3", 0)]),
ToggleButton(options=("开", 1)),
],
tab_index=1,
text="使用VerticalFrameCombine可以将他们在垂直方向快速组合,它们会从左到右按顺序排列",
)
q_gui.add_notebook_tool(combine_left)
# 设置title参数后会为其增加标题
combine_right = VerticalFrameCombine(
[
RadioButton(["选择1", "选择2", "选择3"], tab_index=1),
RadioToolButton(["选择1", "选择2", "选择3"], tab_index=1),
RadioObviousToolButton(["选择1", "选择2", "选择3"], tab_index=1),
],
title="右侧的复选框",
)
q_gui.add_notebook_tool(combine_right)
# 第三页
q_gui.add_notebook_tool(
Label(text="这只是个简单的Label组件", alignment=RIGHT + TOP, tab_index=2)
)
q_gui.add_notebook_tool(Slider(default=4, tab_index=2))
q_gui.add_notebook_tool(Combobox(options=["选择1", "选择2", "选择3"], tab_index=2))
q_gui.add_notebook_tool(
BaseButton(bind_func=go_to_first_page, text="回到首页", tab_index=2)
)
# 左侧信息栏
# 简单加个简介
q_gui.set_navigation_about(
author="GT",
version="0.0.1",
github_url="https://github.com/QPT-Family/QGUI",
other_info=["欢迎加入QPT!"],
)
# 也可以加一下其他信息
q_gui.set_navigation_info(title="随便写段话", info="除了QGUI,你还可以试试例如AgentQGUI这样同样简单的GUI框架")
print("小Tips:占位符可以被Print,不信你看HORIZONTAL的描述被打印了出来->", HORIZONTAL)
# 跑起来~切记!一定要放在程序末尾
q_gui.run()
PROMPT:
添加生成DataPack的选项,使得用户可以选择生成Bukkit插件或者DataPack。
在`ui.py`中可以在Generate Page里添加这行代码 `root.add_notebook_tool(RadioObviousToolButton(options=["BukkitPlugin", "Datapack(Experimental)"], name="GenerationType", title="Type", default="BukkitPlugin",tab_index=0))`
你还需要自行完成DataPack的生成逻辑。你可能需要更改的文件有`ui.py`, `core.py`, `config.yaml`, `console.py`。你还可能需要创建一些新的文件或文件夹,也有可能不需要。祝你好运。
2024-07-25T11:59:38.142362
CONSOLE OUTPUT:
Error while improving the project: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
Could you please upload the debug_log_file.txt in D:\zhousl\BukkitGPT\BukkitGPT-v3\.gpteng\memory/logs folder to github?
FULL STACK TRACE:
Traceback (most recent call last):
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 379, in handle_improve_mode
files_dict = agent.improve(files_dict, prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\applications\cli\cli_agent.py", line 208, in improve
files_dict = self.improve_fn(
^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 311, in improve_fn
return _improve_loop(ai, files_dict, memory, messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 317, in _improve_loop
messages = ai.next(messages, step_name=curr_fn())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 243, in next
response = self.backoff_inference(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\backoff\_sync.py", line 105, in retry
ret = target(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 287, in backoff_inference
return self.llm.invoke(messages) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 270, in invoke
self.generate_prompt(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 703, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 560, in generate
raise e
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 550, in generate
self._generate_with_cache(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 775, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 755, in _generate
return generate_from_stream(stream_iter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 102, in generate_from_stream
generation = next(stream, None)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 676, in _stream
stream = self._client.messages.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_utils\_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\resources\messages.py", line 902, in create
return self._post(
^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 1266, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 942, in request
return self._request(
^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 968, in _request
request = self._build_request(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 461, in _build_request
headers = self._build_headers(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 416, in _build_headers
self._validate_headers(headers_dict, custom_headers)
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_client.py", line 192, in _validate_headers
raise TypeError(
TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
2024-07-25T12:05:50.603498
UPLOADED FILES:
File: config.py
import yaml
from log_writer import logger
def load_config():
"""
Loads the configuration from the 'config.yaml' file and sets the global variables accordingly.
If the 'GENERATE_MODEL' key in the configuration is set to 'gpt-4', it forces the use of 'gpt-4-turbo-preview'
as the value for the 'GENERATE_MODEL' key, since 'gpt-4' no longer supports json modes.
Returns:
None
"""
with open("config.yaml", "r") as conf:
config_content = yaml.safe_load(conf)
for key, value in config_content.items():
if key == "GENERATE_MODEL" and value == "gpt-4":
globals()[
key
] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the GENERATE_MODEL to gpt-4. Because gpt-4 is not longer supports json modes.
globals()[key] = value
logger(f"config: {key} -> {value}")
def edit_config(key, value):
"""
Edits the config file.
Args:
key (str): The key to edit.
value (str): The value to set.
Returns:
bool: True
"""
with open("config.yaml", "r") as conf:
config_content = conf.readlines()
with open("config.yaml", "w") as conf:
for line in config_content:
if line.startswith(key):
if value == True:
write_value = "True"
elif value == False:
write_value = "False"
else:
write_value = f'"{value}"'
if "#" in line:
conf.write(f"{key}: {write_value} # {line.split('#')[1]}\n")
else:
conf.write(f"{key}: {write_value}\n")
else:
conf.write(line)
return True
load_config()
File: config.yaml
########## EDIT REQUIRED ##########
# GPT SETTINGS #
# Get your api key from openai. Remember google/bing is always your best friend.
# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc.
# Recommend -> gpt-4-turbo (Better performance, more expensive), gpt-4-o (Good performance, cheaper)
API_KEY: "" # Free API Key with GPT-4 access: https://github.com/CubeGPT/.github/discussions/1
BASE_URL: "https://api.openai.com/v1/chat/completions"
GENERATION_MODEL: "gpt-4-turbo-2024-04-09"
FIXING_MODEL: "gpt-4-turbo-2024-04-09"
# DEVELOPER SETTINGS #
VERSION_NUMBER: "0.1.1"
# PROMPT SETTINGS #
# If you don't know what it is, please don't touch it. Be sure to backup before editing.
## Code Generation ##
SYS_GEN: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Write the code & choose a artifact name for the following files with the infomation which is also provided by the user:
codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java
codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml
codes/%ARTIFACT_NAME%/src/main/resources/config.yml
codes/%ARTIFACT_NAME%/pom.xml
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Uncompeleted".
USR_GEN: |
%DESCRIPTION%
SYS_FIX: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Fix the error in the code provided by user. The error message is also provided by the user.
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Original code" or "// Uncompeleted".
USR_FIX: |
Main.java:
%MAIN_JAVA%
plugin.yml:
%PLUGIN_YML%
config.yml:
%CONFIG_YML%
pom.xml:
%POM_XML%
error message:
%P_ERROR_MSG%
File: console.py
import sys
import uuid
import shutil
from log_writer import logger
import core
import config
import build
if __name__ == "__main__":
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
core.initialize()
print("BukkitGPT v3 beta console running")
# Get user inputs
name = input("Enter the plugin name: ")
description = input("Enter the plugin description: ")
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
print("Build failed. Passing the error to ChatGPT and let it to fix it?")
fix = input("Y/n: ")
if fix == "n":
print("Exiting...")
sys.exit(0)
else:
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", artifact_name),
config.USR_FIX.replace("%MAIN_JAVA%", main_java)
.replace("%PLUGIN_YML%", plugin_yml)
.replace("%CONFIG_YML%", config_yml)
.replace("%POM_XML%", pom_xml)
.replace("%P_ERROR_MSG%", result),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Error: Please run console.py as the main program instead of importing it from another program."
)
File: core.py
from openai import OpenAI
import chardet
import sys
import json
import locale
import os
from log_writer import logger
import config
def initialize():
"""
Initializes the software.
This function logs the software launch, including the version number and platform.
Args:
None
Returns:
None
"""
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}")
if (
"gpt-3.5" in config.GENERATION_MODEL
and config.BYPASS_NO_GPT35_FOR_GENERATION_LIMIT is False
):
print(
"gpt-3.5 writes bugs *all the time* and is not recommended for code generation. Switching to gpt-4."
)
config.edit_config(
"GENERATION_MODEL", config.GENERATION_MODEL.replace("gpt-3.5", "gpt-4")
)
def askgpt(
system_prompt: str,
user_prompt: str,
model_name: str,
disable_json_mode: bool = False,
image_url: str = None,
):
"""
Interacts with ChatGPT using the specified prompts.
Args:
system_prompt (str): The system prompt.
user_prompt (str): The user prompt.
model_name (str): The model name to use.
disable_json_mode (bool): Whether to disable JSON mode.
Returns:
str: The response from ChatGPT.
"""
if image_url is not None and config.USE_DIFFERENT_APIKEY_FOR_VISION_MODEL:
logger("Using different API key for vision model.")
client = OpenAI(api_key=config.VISION_API_KEY, base_url=config.VISION_BASE_URL)
else:
client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL)
logger("Initialized the OpenAI client.")
# Define the messages for the conversation
if image_url is not None:
messages = [
{"role": "system", "content": system_prompt},
{
"role": "user",
"content": [
{"type": "text", "text": user_prompt},
{"type": "image_url", "image_url": {"url": image_url}},
],
},
]
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
]
logger(f"askgpt: system {system_prompt}")
logger(f"askgpt: user {user_prompt}")
# Create a chat completion
if disable_json_mode:
response = client.chat.completions.create(model=model_name, messages=messages)
else:
response = client.chat.completions.create(
model=model_name, response_format={"type": "json_object"}, messages=messages
)
logger(f"askgpt: response {response}")
# Extract the assistant's reply
assistant_reply = response.choices[0].message.content
logger(f"askgpt: extracted reply {assistant_reply}")
return assistant_reply
def response_to_action(msg):
"""
Converts a response from ChatGPT to an action.
Args:
msg (str): The response from ChatGPT.
Returns:
str: The action to take.
"""
text = json.loads(msg)
codes = text["codes"]
for section in codes:
file = section["file"]
code = section["code"]
paths = file.split("/")
# Join the list elements to form a path
path = os.path.join(*paths)
# Get the directory path and the file name
dir_path, file_name = os.path.split(path)
# Create directories, if they don't exist
try:
os.makedirs(dir_path, exist_ok=True)
except FileNotFoundError:
pass
# Create the file
with open(path, "w") as f:
f.write(code) # Write an empty string to the file
def mixed_decode(text: str):
"""
Decode a mixed text containing both normal text and a byte sequence.
Args:
text (str): The mixed text to be decoded.
Returns:
str: The decoded text, where the byte sequence has been converted to its corresponding characters.
"""
# Split the normal text and the byte sequence
# Assuming the byte sequence is everything after the last colon and space ": "
try:
normal_text, byte_text = text.rsplit(": ", 1)
except (TypeError, ValueError):
# The text only contains normal text
return text
# Convert the byte sequence to actual bytes
byte_sequence = byte_text.encode(
"latin1"
) # latin1 encoding maps byte values directly to unicode code points
# Detect the encoding of the byte sequence
detected_encoding = chardet.detect(byte_sequence)
encoding = detected_encoding["encoding"]
# Decode the byte sequence
decoded_text = byte_sequence.decode(encoding)
# Combine the normal text with the decoded byte sequence
final_text = normal_text + ": " + decoded_text
return final_text
if __name__ == "__main__":
print("This script is not meant to be run directly. Please run console.py instead.")
File: log_writer.py
import os
from datetime import datetime
first_call_time = None
def get_log_filename():
global first_call_time
if first_call_time is None:
first_call_time = datetime.now()
log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y")
return log_filename
def logger(text: str):
log_filename = get_log_filename()
timestamp_prefix = datetime.now().strftime("[%H:%M:%S]")
log_line = f"{timestamp_prefix} {text}\n"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
with open(log_filename + ".log", "a", encoding="utf-8") as log_file:
log_file.write(log_line)
File: requirements.txt
openai>=1.13.3
pyyaml
cx_Freeze
ttkbootstrap
playwright
chardet
File: ui.py
from cube_qgui.__init__ import CreateQGUI
from cube_qgui.banner_tools import *
from cube_qgui.notebook_tools import *
from playwright.sync_api import Playwright, sync_playwright
import os
import shutil
import uuid
from log_writer import logger
import config
import core
import build
# ---------- Functions ----------#
def open_config(args: dict):
"""
Opens the config file.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
os.system("notepad config.yaml")
return True
def save_apply_config(args: dict):
"""
Saves and applies the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
keys = ["API_KEY", "BASE_URL"]
for key in keys:
value = args[key].get()
if key == "ADVANCED_MODE":
value = True if value == 1 else False
else:
pass
config.edit_config(key, value)
config.load_config()
args["DevTool_CONFIG_API_KEY_DISPLAY"].set(f"CONFIG.API_KEY = {config.API_KEY}")
args["DevTools_CONFIG_BASE_URL_DISPLAY"].set(f"CONFIG.BASE_URL = {config.BASE_URL}")
return True
def load_config(args: dict):
"""
Loads the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
config.load_config()
args["API_KEY"].set(config.API_KEY)
args["BASE_URL"].set(config.BASE_URL)
return True
def print_args(args: dict):
"""
Prints the arguments.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
for arg, v_fun in args.items():
print(f"Name: {arg}, Value: {v_fun.get()}")
return True
def raise_error(args: dict):
"""
Raises an error.
Args:
args (dict): A dictionary containing the arguments.
"""
raise Exception("This is a test error.")
# ---------- Generate Function ----------#
def generate(args: dict):
"""
Generates the plugin.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
global error_msg, pkg_id_path
# Get user inputs
name = args["PluginName"].get()
description = args["PluginDescription"].get()
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
error_msg = result
print(
"Build failed. To pass the error to ChatGPT && let it fix, jump to the Fixing page and click the Fix button."
)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
def fix(args: dict):
"""
Fixes the error.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
artifact_name = args["PluginName"].get()
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", str(artifact_name)),
config.USR_FIX.replace("%MAIN_JAVA%", str(main_java))
.replace("%PLUGIN_YML%", str(plugin_yml))
.replace("%CONFIG_YML%", str(config_yml))
.replace("%POM_XML%", str(pom_xml))
.replave("%PKG_ID_LST%", pkg_id_path)
.replace("%P_ERROR_MSG%", str(error_msg)),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed again. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
# ---------- Main Program ----------#
root = CreateQGUI(title="BukkitGPT-v3", tab_names=["Generate", "Settings", "DevTools"])
error_msg = None
logger("Starting program.")
# Initialize Core
core.initialize()
print("BukkitGPT v3 beta console running")
# Banner
root.add_banner_tool(GitHub("https://github.com/CubeGPT/BukkitGPT-v3"))
# Generate Page
root.add_notebook_tool(
InputBox(name="PluginName", default="ExamplePlugin", label_info="Plugin Name")
)
root.add_notebook_tool(
InputBox(
name="PluginDescription",
default="Send msg 'hello' to every joined player.",
label_info="Plugin Description",
)
)
root.add_notebook_tool(
RunButton(
bind_func=generate,
name="Generate",
text="Generate Plugin",
checked_text="Generating...",
tab_index=0,
)
)
# Fixing Page #
# root.add_notebook_tool(Label(name="Fixing_DESCRIPTION", text="This is a fixing page. If the build fails, click the Fix button to fix the error in the LATEST build.", tab_index=1))
# root.add_notebook_tool(RunButton(bind_func=fix, name="Fix", text="Fix", checked_text="Fixing...", tab_index=1))
# Settings Page
root.add_notebook_tool(
InputBox(name="API_KEY", default=config.API_KEY, label_info="API Key", tab_index=1)
)
root.add_notebook_tool(
InputBox(
name="BASE_URL", default=config.BASE_URL, label_info="BASE URL", tab_index=1
)
)
config_buttons = HorizontalToolsCombine(
[
BaseButton(
bind_func=save_apply_config,
name="Save & Apply Config",
text="Save & Apply",
tab_index=1,
),
BaseButton(
bind_func=load_config, name="Load Config", text="Load Config", tab_index=1
),
BaseButton(
bind_func=open_config,
name="Open Config",
text="Open Full Config",
tab_index=1,
),
]
)
root.add_notebook_tool(config_buttons)
# DevTools Page
root.add_notebook_tool(
Label(
name="DevTool_DESCRIPTION",
text="This is a testing page for developers. Ignore it if you are a normal user.",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTool_CONFIG_API_KEY_DISPLAY",
text=f"CONFIG.API_KEY = {config.API_KEY}",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTools_CONFIG_BASE_URL_DISPLAY",
text=f"CONFIG.BASE_URL = {config.BASE_URL}",
tab_index=2,
)
)
root.add_notebook_tool(
RunButton(bind_func=print_args, name="Print Args", text="Print Args", tab_index=2)
)
root.add_notebook_tool(
RunButton(
bind_func=raise_error, name="Raise Error", text="Raise Error", tab_index=2
)
)
# Sidebar
root.set_navigation_about(
author="CubeGPT Team",
version=config.VERSION_NUMBER,
github_url="https://github.com/CubeGPT/BukkitGPT-v3",
)
# Run
root.run()
PROMPT:
添加生成DataPack的选项,使得用户可以选择生成Bukkit插件或者DataPack。
在`ui.py`中可以在Generate Page里添加这行代码 `root.add_notebook_tool(RadioObviousToolButton(options=["BukkitPlugin", "Datapack(Experimental)"], name="GenerationType", title="Type", default="BukkitPlugin",tab_index=0))`
你还需要自行完成DataPack的生成逻辑。你可能需要更改的文件有`ui.py`, `core.py`, `config.yaml`, `console.py`。你还可能需要创建一些新的文件或文件夹,也有可能不需要。祝你好运。
2024-07-25T12:05:51.036844
CONSOLE OUTPUT:
Error while improving the project: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
Could you please upload the debug_log_file.txt in D:\zhousl\BukkitGPT\BukkitGPT-v3\.gpteng\memory/logs folder to github?
FULL STACK TRACE:
Traceback (most recent call last):
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 379, in handle_improve_mode
files_dict = agent.improve(files_dict, prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\applications\cli\cli_agent.py", line 208, in improve
files_dict = self.improve_fn(
^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 311, in improve_fn
return _improve_loop(ai, files_dict, memory, messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 317, in _improve_loop
messages = ai.next(messages, step_name=curr_fn())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 243, in next
response = self.backoff_inference(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\backoff\_sync.py", line 105, in retry
ret = target(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 287, in backoff_inference
return self.llm.invoke(messages) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 270, in invoke
self.generate_prompt(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 703, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 560, in generate
raise e
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 550, in generate
self._generate_with_cache(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 775, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 755, in _generate
return generate_from_stream(stream_iter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 102, in generate_from_stream
generation = next(stream, None)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 676, in _stream
stream = self._client.messages.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_utils\_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\resources\messages.py", line 902, in create
return self._post(
^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 1266, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 942, in request
return self._request(
^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 968, in _request
request = self._build_request(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 461, in _build_request
headers = self._build_headers(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 416, in _build_headers
self._validate_headers(headers_dict, custom_headers)
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_client.py", line 192, in _validate_headers
raise TypeError(
TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
2024-07-25T12:10:01.414816
UPLOADED FILES:
File: config.py
import yaml
from log_writer import logger
def load_config():
"""
Loads the configuration from the 'config.yaml' file and sets the global variables accordingly.
If the 'GENERATE_MODEL' key in the configuration is set to 'gpt-4', it forces the use of 'gpt-4-turbo-preview'
as the value for the 'GENERATE_MODEL' key, since 'gpt-4' no longer supports json modes.
Returns:
None
"""
with open("config.yaml", "r") as conf:
config_content = yaml.safe_load(conf)
for key, value in config_content.items():
if key == "GENERATE_MODEL" and value == "gpt-4":
globals()[
key
] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the GENERATE_MODEL to gpt-4. Because gpt-4 is not longer supports json modes.
globals()[key] = value
logger(f"config: {key} -> {value}")
def edit_config(key, value):
"""
Edits the config file.
Args:
key (str): The key to edit.
value (str): The value to set.
Returns:
bool: True
"""
with open("config.yaml", "r") as conf:
config_content = conf.readlines()
with open("config.yaml", "w") as conf:
for line in config_content:
if line.startswith(key):
if value == True:
write_value = "True"
elif value == False:
write_value = "False"
else:
write_value = f'"{value}"'
if "#" in line:
conf.write(f"{key}: {write_value} # {line.split('#')[1]}\n")
else:
conf.write(f"{key}: {write_value}\n")
else:
conf.write(line)
return True
load_config()
File: config.yaml
########## EDIT REQUIRED ##########
# GPT SETTINGS #
# Get your api key from openai. Remember google/bing is always your best friend.
# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc.
# Recommend -> gpt-4-turbo (Better performance, more expensive), gpt-4-o (Good performance, cheaper)
API_KEY: "" # Free API Key with GPT-4 access: https://github.com/CubeGPT/.github/discussions/1
BASE_URL: "https://api.openai.com/v1/chat/completions"
GENERATION_MODEL: "gpt-4-turbo-2024-04-09"
FIXING_MODEL: "gpt-4-turbo-2024-04-09"
# DEVELOPER SETTINGS #
VERSION_NUMBER: "0.1.1"
# PROMPT SETTINGS #
# If you don't know what it is, please don't touch it. Be sure to backup before editing.
## Code Generation ##
SYS_GEN: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Write the code & choose a artifact name for the following files with the infomation which is also provided by the user:
codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java
codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml
codes/%ARTIFACT_NAME%/src/main/resources/config.yml
codes/%ARTIFACT_NAME%/pom.xml
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Uncompeleted".
USR_GEN: |
%DESCRIPTION%
SYS_FIX: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Fix the error in the code provided by user. The error message is also provided by the user.
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Original code" or "// Uncompeleted".
USR_FIX: |
Main.java:
%MAIN_JAVA%
plugin.yml:
%PLUGIN_YML%
config.yml:
%CONFIG_YML%
pom.xml:
%POM_XML%
error message:
%P_ERROR_MSG%
File: console.py
import sys
import uuid
import shutil
from log_writer import logger
import core
import config
import build
if __name__ == "__main__":
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
core.initialize()
print("BukkitGPT v3 beta console running")
# Get user inputs
name = input("Enter the plugin name: ")
description = input("Enter the plugin description: ")
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
print("Build failed. Passing the error to ChatGPT and let it to fix it?")
fix = input("Y/n: ")
if fix == "n":
print("Exiting...")
sys.exit(0)
else:
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", artifact_name),
config.USR_FIX.replace("%MAIN_JAVA%", main_java)
.replace("%PLUGIN_YML%", plugin_yml)
.replace("%CONFIG_YML%", config_yml)
.replace("%POM_XML%", pom_xml)
.replace("%P_ERROR_MSG%", result),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Error: Please run console.py as the main program instead of importing it from another program."
)
File: core.py
from openai import OpenAI
import chardet
import sys
import json
import locale
import os
from log_writer import logger
import config
def initialize():
"""
Initializes the software.
This function logs the software launch, including the version number and platform.
Args:
None
Returns:
None
"""
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}")
if (
"gpt-3.5" in config.GENERATION_MODEL
and config.BYPASS_NO_GPT35_FOR_GENERATION_LIMIT is False
):
print(
"gpt-3.5 writes bugs *all the time* and is not recommended for code generation. Switching to gpt-4."
)
config.edit_config(
"GENERATION_MODEL", config.GENERATION_MODEL.replace("gpt-3.5", "gpt-4")
)
def askgpt(
system_prompt: str,
user_prompt: str,
model_name: str,
disable_json_mode: bool = False,
image_url: str = None,
):
"""
Interacts with ChatGPT using the specified prompts.
Args:
system_prompt (str): The system prompt.
user_prompt (str): The user prompt.
model_name (str): The model name to use.
disable_json_mode (bool): Whether to disable JSON mode.
Returns:
str: The response from ChatGPT.
"""
if image_url is not None and config.USE_DIFFERENT_APIKEY_FOR_VISION_MODEL:
logger("Using different API key for vision model.")
client = OpenAI(api_key=config.VISION_API_KEY, base_url=config.VISION_BASE_URL)
else:
client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL)
logger("Initialized the OpenAI client.")
# Define the messages for the conversation
if image_url is not None:
messages = [
{"role": "system", "content": system_prompt},
{
"role": "user",
"content": [
{"type": "text", "text": user_prompt},
{"type": "image_url", "image_url": {"url": image_url}},
],
},
]
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
]
logger(f"askgpt: system {system_prompt}")
logger(f"askgpt: user {user_prompt}")
# Create a chat completion
if disable_json_mode:
response = client.chat.completions.create(model=model_name, messages=messages)
else:
response = client.chat.completions.create(
model=model_name, response_format={"type": "json_object"}, messages=messages
)
logger(f"askgpt: response {response}")
# Extract the assistant's reply
assistant_reply = response.choices[0].message.content
logger(f"askgpt: extracted reply {assistant_reply}")
return assistant_reply
def response_to_action(msg):
"""
Converts a response from ChatGPT to an action.
Args:
msg (str): The response from ChatGPT.
Returns:
str: The action to take.
"""
text = json.loads(msg)
codes = text["codes"]
for section in codes:
file = section["file"]
code = section["code"]
paths = file.split("/")
# Join the list elements to form a path
path = os.path.join(*paths)
# Get the directory path and the file name
dir_path, file_name = os.path.split(path)
# Create directories, if they don't exist
try:
os.makedirs(dir_path, exist_ok=True)
except FileNotFoundError:
pass
# Create the file
with open(path, "w") as f:
f.write(code) # Write an empty string to the file
def mixed_decode(text: str):
"""
Decode a mixed text containing both normal text and a byte sequence.
Args:
text (str): The mixed text to be decoded.
Returns:
str: The decoded text, where the byte sequence has been converted to its corresponding characters.
"""
# Split the normal text and the byte sequence
# Assuming the byte sequence is everything after the last colon and space ": "
try:
normal_text, byte_text = text.rsplit(": ", 1)
except (TypeError, ValueError):
# The text only contains normal text
return text
# Convert the byte sequence to actual bytes
byte_sequence = byte_text.encode(
"latin1"
) # latin1 encoding maps byte values directly to unicode code points
# Detect the encoding of the byte sequence
detected_encoding = chardet.detect(byte_sequence)
encoding = detected_encoding["encoding"]
# Decode the byte sequence
decoded_text = byte_sequence.decode(encoding)
# Combine the normal text with the decoded byte sequence
final_text = normal_text + ": " + decoded_text
return final_text
if __name__ == "__main__":
print("This script is not meant to be run directly. Please run console.py instead.")
File: log_writer.py
import os
from datetime import datetime
first_call_time = None
def get_log_filename():
global first_call_time
if first_call_time is None:
first_call_time = datetime.now()
log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y")
return log_filename
def logger(text: str):
log_filename = get_log_filename()
timestamp_prefix = datetime.now().strftime("[%H:%M:%S]")
log_line = f"{timestamp_prefix} {text}\n"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
with open(log_filename + ".log", "a", encoding="utf-8") as log_file:
log_file.write(log_line)
File: requirements.txt
openai>=1.13.3
pyyaml
cx_Freeze
ttkbootstrap
playwright
chardet
File: ui.py
from cube_qgui.__init__ import CreateQGUI
from cube_qgui.banner_tools import *
from cube_qgui.notebook_tools import *
from playwright.sync_api import Playwright, sync_playwright
import os
import shutil
import uuid
from log_writer import logger
import config
import core
import build
# ---------- Functions ----------#
def open_config(args: dict):
"""
Opens the config file.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
os.system("notepad config.yaml")
return True
def save_apply_config(args: dict):
"""
Saves and applies the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
keys = ["API_KEY", "BASE_URL"]
for key in keys:
value = args[key].get()
if key == "ADVANCED_MODE":
value = True if value == 1 else False
else:
pass
config.edit_config(key, value)
config.load_config()
args["DevTool_CONFIG_API_KEY_DISPLAY"].set(f"CONFIG.API_KEY = {config.API_KEY}")
args["DevTools_CONFIG_BASE_URL_DISPLAY"].set(f"CONFIG.BASE_URL = {config.BASE_URL}")
return True
def load_config(args: dict):
"""
Loads the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
config.load_config()
args["API_KEY"].set(config.API_KEY)
args["BASE_URL"].set(config.BASE_URL)
return True
def print_args(args: dict):
"""
Prints the arguments.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
for arg, v_fun in args.items():
print(f"Name: {arg}, Value: {v_fun.get()}")
return True
def raise_error(args: dict):
"""
Raises an error.
Args:
args (dict): A dictionary containing the arguments.
"""
raise Exception("This is a test error.")
# ---------- Generate Function ----------#
def generate(args: dict):
"""
Generates the plugin.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
global error_msg, pkg_id_path
# Get user inputs
name = args["PluginName"].get()
description = args["PluginDescription"].get()
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
error_msg = result
print(
"Build failed. To pass the error to ChatGPT && let it fix, jump to the Fixing page and click the Fix button."
)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
def fix(args: dict):
"""
Fixes the error.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
artifact_name = args["PluginName"].get()
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", str(artifact_name)),
config.USR_FIX.replace("%MAIN_JAVA%", str(main_java))
.replace("%PLUGIN_YML%", str(plugin_yml))
.replace("%CONFIG_YML%", str(config_yml))
.replace("%POM_XML%", str(pom_xml))
.replave("%PKG_ID_LST%", pkg_id_path)
.replace("%P_ERROR_MSG%", str(error_msg)),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed again. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
# ---------- Main Program ----------#
root = CreateQGUI(title="BukkitGPT-v3", tab_names=["Generate", "Settings", "DevTools"])
error_msg = None
logger("Starting program.")
# Initialize Core
core.initialize()
print("BukkitGPT v3 beta console running")
# Banner
root.add_banner_tool(GitHub("https://github.com/CubeGPT/BukkitGPT-v3"))
# Generate Page
root.add_notebook_tool(
InputBox(name="PluginName", default="ExamplePlugin", label_info="Plugin Name")
)
root.add_notebook_tool(
InputBox(
name="PluginDescription",
default="Send msg 'hello' to every joined player.",
label_info="Plugin Description",
)
)
root.add_notebook_tool(
RunButton(
bind_func=generate,
name="Generate",
text="Generate Plugin",
checked_text="Generating...",
tab_index=0,
)
)
# Fixing Page #
# root.add_notebook_tool(Label(name="Fixing_DESCRIPTION", text="This is a fixing page. If the build fails, click the Fix button to fix the error in the LATEST build.", tab_index=1))
# root.add_notebook_tool(RunButton(bind_func=fix, name="Fix", text="Fix", checked_text="Fixing...", tab_index=1))
# Settings Page
root.add_notebook_tool(
InputBox(name="API_KEY", default=config.API_KEY, label_info="API Key", tab_index=1)
)
root.add_notebook_tool(
InputBox(
name="BASE_URL", default=config.BASE_URL, label_info="BASE URL", tab_index=1
)
)
config_buttons = HorizontalToolsCombine(
[
BaseButton(
bind_func=save_apply_config,
name="Save & Apply Config",
text="Save & Apply",
tab_index=1,
),
BaseButton(
bind_func=load_config, name="Load Config", text="Load Config", tab_index=1
),
BaseButton(
bind_func=open_config,
name="Open Config",
text="Open Full Config",
tab_index=1,
),
]
)
root.add_notebook_tool(config_buttons)
# DevTools Page
root.add_notebook_tool(
Label(
name="DevTool_DESCRIPTION",
text="This is a testing page for developers. Ignore it if you are a normal user.",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTool_CONFIG_API_KEY_DISPLAY",
text=f"CONFIG.API_KEY = {config.API_KEY}",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTools_CONFIG_BASE_URL_DISPLAY",
text=f"CONFIG.BASE_URL = {config.BASE_URL}",
tab_index=2,
)
)
root.add_notebook_tool(
RunButton(bind_func=print_args, name="Print Args", text="Print Args", tab_index=2)
)
root.add_notebook_tool(
RunButton(
bind_func=raise_error, name="Raise Error", text="Raise Error", tab_index=2
)
)
# Sidebar
root.set_navigation_about(
author="CubeGPT Team",
version=config.VERSION_NUMBER,
github_url="https://github.com/CubeGPT/BukkitGPT-v3",
)
# Run
root.run()
PROMPT:
添加生成DataPack的选项,使得用户可以选择生成Bukkit插件或者DataPack。
在`ui.py`中可以在Generate Page里添加这行代码 `root.add_notebook_tool(RadioObviousToolButton(options=["BukkitPlugin", "Datapack(Experimental)"], name="GenerationType", title="Type", default="BukkitPlugin",tab_index=0))`
你还需要自行完成DataPack的生成逻辑。你可能需要更改的文件有`ui.py`, `core.py`, `config.yaml`, `console.py`。你还可能需要创建一些新的文件或文件夹,也有可能不需要。祝你好运。
2024-07-25T12:10:01.841094
CONSOLE OUTPUT:
Error while improving the project: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
Could you please upload the debug_log_file.txt in D:\zhousl\BukkitGPT\BukkitGPT-v3\.gpteng\memory/logs folder to github?
FULL STACK TRACE:
Traceback (most recent call last):
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 379, in handle_improve_mode
files_dict = agent.improve(files_dict, prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\applications\cli\cli_agent.py", line 208, in improve
files_dict = self.improve_fn(
^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 311, in improve_fn
return _improve_loop(ai, files_dict, memory, messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 317, in _improve_loop
messages = ai.next(messages, step_name=curr_fn())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 243, in next
response = self.backoff_inference(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\backoff\_sync.py", line 105, in retry
ret = target(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 287, in backoff_inference
return self.llm.invoke(messages) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 270, in invoke
self.generate_prompt(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 703, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 560, in generate
raise e
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 550, in generate
self._generate_with_cache(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 775, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 755, in _generate
return generate_from_stream(stream_iter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 102, in generate_from_stream
generation = next(stream, None)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 676, in _stream
stream = self._client.messages.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_utils\_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\resources\messages.py", line 902, in create
return self._post(
^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 1266, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 942, in request
return self._request(
^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 968, in _request
request = self._build_request(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 461, in _build_request
headers = self._build_headers(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 416, in _build_headers
self._validate_headers(headers_dict, custom_headers)
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_client.py", line 192, in _validate_headers
raise TypeError(
TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
2024-07-25T12:12:17.814305
UPLOADED FILES:
File: config.py
import yaml
from log_writer import logger
def load_config():
"""
Loads the configuration from the 'config.yaml' file and sets the global variables accordingly.
If the 'GENERATE_MODEL' key in the configuration is set to 'gpt-4', it forces the use of 'gpt-4-turbo-preview'
as the value for the 'GENERATE_MODEL' key, since 'gpt-4' no longer supports json modes.
Returns:
None
"""
with open("config.yaml", "r") as conf:
config_content = yaml.safe_load(conf)
for key, value in config_content.items():
if key == "GENERATE_MODEL" and value == "gpt-4":
globals()[
key
] = "gpt-4-turbo-preview" # Force using gpt-4-turbo-preview if the user set the GENERATE_MODEL to gpt-4. Because gpt-4 is not longer supports json modes.
globals()[key] = value
logger(f"config: {key} -> {value}")
def edit_config(key, value):
"""
Edits the config file.
Args:
key (str): The key to edit.
value (str): The value to set.
Returns:
bool: True
"""
with open("config.yaml", "r") as conf:
config_content = conf.readlines()
with open("config.yaml", "w") as conf:
for line in config_content:
if line.startswith(key):
if value == True:
write_value = "True"
elif value == False:
write_value = "False"
else:
write_value = f'"{value}"'
if "#" in line:
conf.write(f"{key}: {write_value} # {line.split('#')[1]}\n")
else:
conf.write(f"{key}: {write_value}\n")
else:
conf.write(line)
return True
load_config()
File: config.yaml
########## EDIT REQUIRED ##########
# GPT SETTINGS #
# Get your api key from openai. Remember google/bing is always your best friend.
# Model names: gpt-4-turbo-preview, gpt-3.5-turbo, etc.
# Recommend -> gpt-4-turbo (Better performance, more expensive), gpt-4-o (Good performance, cheaper)
API_KEY: "" # Free API Key with GPT-4 access: https://github.com/CubeGPT/.github/discussions/1
BASE_URL: "https://api.openai.com/v1/chat/completions"
GENERATION_MODEL: "gpt-4-turbo-2024-04-09"
FIXING_MODEL: "gpt-4-turbo-2024-04-09"
# DEVELOPER SETTINGS #
VERSION_NUMBER: "0.1.1"
# PROMPT SETTINGS #
# If you don't know what it is, please don't touch it. Be sure to backup before editing.
## Code Generation ##
SYS_GEN: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Write the code & choose a artifact name for the following files with the infomation which is also provided by the user:
codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java
codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml
codes/%ARTIFACT_NAME%/src/main/resources/config.yml
codes/%ARTIFACT_NAME%/pom.xml
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Uncompeleted".
USR_GEN: |
%DESCRIPTION%
SYS_FIX: |
You're a minecraft bukkit plugin coder AI. Game Version: 1.13.2 (1.13.2-R0.1-SNAPSHOT)
Fix the error in the code provided by user. The error message is also provided by the user.
Response in json format:
{
\"codes\": [
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/java/%PKG_ID_LST%Main.java\",
\"code\": \"package ...;\\nimport org.bukkit.Bukkit;\\npublic class Main extends JavaPlugin implements CommandExecutor {\\n... (The code you need to write)\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/plugin.yml\",
\"code\": \"name: ...\\nversion: ...\\n...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/src/main/resources/config.yml\",
\"code\": \"...\"
},
{
\"file\": \"codes/%ARTIFACT_NAME%/pom.xml\",
\"code\": \"...\"
}
]
}
You should never response anything else. Never use Markdown format. Use \n for line feed, and never forget to use \ before ". Never write uncompeleted codes, such as leave a comment that says "// Your codes here" or "// Original code" or "// Uncompeleted".
USR_FIX: |
Main.java:
%MAIN_JAVA%
plugin.yml:
%PLUGIN_YML%
config.yml:
%CONFIG_YML%
pom.xml:
%POM_XML%
error message:
%P_ERROR_MSG%
File: console.py
import sys
import uuid
import shutil
from log_writer import logger
import core
import config
import build
if __name__ == "__main__":
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
core.initialize()
print("BukkitGPT v3 beta console running")
# Get user inputs
name = input("Enter the plugin name: ")
description = input("Enter the plugin description: ")
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
print("Build failed. Passing the error to ChatGPT and let it to fix it?")
fix = input("Y/n: ")
if fix == "n":
print("Exiting...")
sys.exit(0)
else:
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", artifact_name),
config.USR_FIX.replace("%MAIN_JAVA%", main_java)
.replace("%PLUGIN_YML%", plugin_yml)
.replace("%CONFIG_YML%", config_yml)
.replace("%POM_XML%", pom_xml)
.replace("%P_ERROR_MSG%", result),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
print("Exiting...")
sys.exit(0)
else:
print(
"Error: Please run console.py as the main program instead of importing it from another program."
)
File: core.py
from openai import OpenAI
import chardet
import sys
import json
import locale
import os
from log_writer import logger
import config
def initialize():
"""
Initializes the software.
This function logs the software launch, including the version number and platform.
Args:
None
Returns:
None
"""
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
logger(f"Launch. Software version {config.VERSION_NUMBER}, platform {sys.platform}")
if (
"gpt-3.5" in config.GENERATION_MODEL
and config.BYPASS_NO_GPT35_FOR_GENERATION_LIMIT is False
):
print(
"gpt-3.5 writes bugs *all the time* and is not recommended for code generation. Switching to gpt-4."
)
config.edit_config(
"GENERATION_MODEL", config.GENERATION_MODEL.replace("gpt-3.5", "gpt-4")
)
def askgpt(
system_prompt: str,
user_prompt: str,
model_name: str,
disable_json_mode: bool = False,
image_url: str = None,
):
"""
Interacts with ChatGPT using the specified prompts.
Args:
system_prompt (str): The system prompt.
user_prompt (str): The user prompt.
model_name (str): The model name to use.
disable_json_mode (bool): Whether to disable JSON mode.
Returns:
str: The response from ChatGPT.
"""
if image_url is not None and config.USE_DIFFERENT_APIKEY_FOR_VISION_MODEL:
logger("Using different API key for vision model.")
client = OpenAI(api_key=config.VISION_API_KEY, base_url=config.VISION_BASE_URL)
else:
client = OpenAI(api_key=config.API_KEY, base_url=config.BASE_URL)
logger("Initialized the OpenAI client.")
# Define the messages for the conversation
if image_url is not None:
messages = [
{"role": "system", "content": system_prompt},
{
"role": "user",
"content": [
{"type": "text", "text": user_prompt},
{"type": "image_url", "image_url": {"url": image_url}},
],
},
]
else:
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
]
logger(f"askgpt: system {system_prompt}")
logger(f"askgpt: user {user_prompt}")
# Create a chat completion
if disable_json_mode:
response = client.chat.completions.create(model=model_name, messages=messages)
else:
response = client.chat.completions.create(
model=model_name, response_format={"type": "json_object"}, messages=messages
)
logger(f"askgpt: response {response}")
# Extract the assistant's reply
assistant_reply = response.choices[0].message.content
logger(f"askgpt: extracted reply {assistant_reply}")
return assistant_reply
def response_to_action(msg):
"""
Converts a response from ChatGPT to an action.
Args:
msg (str): The response from ChatGPT.
Returns:
str: The action to take.
"""
text = json.loads(msg)
codes = text["codes"]
for section in codes:
file = section["file"]
code = section["code"]
paths = file.split("/")
# Join the list elements to form a path
path = os.path.join(*paths)
# Get the directory path and the file name
dir_path, file_name = os.path.split(path)
# Create directories, if they don't exist
try:
os.makedirs(dir_path, exist_ok=True)
except FileNotFoundError:
pass
# Create the file
with open(path, "w") as f:
f.write(code) # Write an empty string to the file
def mixed_decode(text: str):
"""
Decode a mixed text containing both normal text and a byte sequence.
Args:
text (str): The mixed text to be decoded.
Returns:
str: The decoded text, where the byte sequence has been converted to its corresponding characters.
"""
# Split the normal text and the byte sequence
# Assuming the byte sequence is everything after the last colon and space ": "
try:
normal_text, byte_text = text.rsplit(": ", 1)
except (TypeError, ValueError):
# The text only contains normal text
return text
# Convert the byte sequence to actual bytes
byte_sequence = byte_text.encode(
"latin1"
) # latin1 encoding maps byte values directly to unicode code points
# Detect the encoding of the byte sequence
detected_encoding = chardet.detect(byte_sequence)
encoding = detected_encoding["encoding"]
# Decode the byte sequence
decoded_text = byte_sequence.decode(encoding)
# Combine the normal text with the decoded byte sequence
final_text = normal_text + ": " + decoded_text
return final_text
if __name__ == "__main__":
print("This script is not meant to be run directly. Please run console.py instead.")
File: log_writer.py
import os
from datetime import datetime
first_call_time = None
def get_log_filename():
global first_call_time
if first_call_time is None:
first_call_time = datetime.now()
log_filename = first_call_time.strftime("logs/%b-%d-%H-%M-%S-%Y")
return log_filename
def logger(text: str):
log_filename = get_log_filename()
timestamp_prefix = datetime.now().strftime("[%H:%M:%S]")
log_line = f"{timestamp_prefix} {text}\n"
os.makedirs(os.path.dirname(log_filename), exist_ok=True)
with open(log_filename + ".log", "a", encoding="utf-8") as log_file:
log_file.write(log_line)
File: requirements.txt
openai>=1.13.3
pyyaml
cx_Freeze
ttkbootstrap
playwright
chardet
File: ui.py
from cube_qgui.__init__ import CreateQGUI
from cube_qgui.banner_tools import *
from cube_qgui.notebook_tools import *
from playwright.sync_api import Playwright, sync_playwright
import os
import shutil
import uuid
from log_writer import logger
import config
import core
import build
# ---------- Functions ----------#
def open_config(args: dict):
"""
Opens the config file.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
os.system("notepad config.yaml")
return True
def save_apply_config(args: dict):
"""
Saves and applies the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
keys = ["API_KEY", "BASE_URL"]
for key in keys:
value = args[key].get()
if key == "ADVANCED_MODE":
value = True if value == 1 else False
else:
pass
config.edit_config(key, value)
config.load_config()
args["DevTool_CONFIG_API_KEY_DISPLAY"].set(f"CONFIG.API_KEY = {config.API_KEY}")
args["DevTools_CONFIG_BASE_URL_DISPLAY"].set(f"CONFIG.BASE_URL = {config.BASE_URL}")
return True
def load_config(args: dict):
"""
Loads the configuration.
Args:
args (dict): A dictionary containing the necessary arguments.
Returns:
bool: Always True.
"""
config.load_config()
args["API_KEY"].set(config.API_KEY)
args["BASE_URL"].set(config.BASE_URL)
return True
def print_args(args: dict):
"""
Prints the arguments.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
for arg, v_fun in args.items():
print(f"Name: {arg}, Value: {v_fun.get()}")
return True
def raise_error(args: dict):
"""
Raises an error.
Args:
args (dict): A dictionary containing the arguments.
"""
raise Exception("This is a test error.")
# ---------- Generate Function ----------#
def generate(args: dict):
"""
Generates the plugin.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
global error_msg, pkg_id_path
# Get user inputs
name = args["PluginName"].get()
description = args["PluginDescription"].get()
artifact_name = name.replace(" ", "")
package_id = f"org.cubegpt.{uuid.uuid4().hex[:8]}"
pkg_id_path = ""
for id in package_id.split("."):
pkg_id_path += id + "/"
logger(f"user_input -> name: {name}")
logger(f"user_input -> description: {description}")
logger(f"random_generate -> package_id: {package_id}")
logger(f"str_path -> pkg_id_path: {pkg_id_path}")
print("Generating plugin...")
codes = core.askgpt(
config.SYS_GEN.replace("%ARTIFACT_NAME%", artifact_name).replace(
"%PKG_ID_LST%", pkg_id_path
),
config.USR_GEN.replace("%DESCRIPTION", description),
config.GENERATION_MODEL,
)
logger(f"codes: {codes}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
elif "Compilation failure":
error_msg = result
print(
"Build failed. To pass the error to ChatGPT && let it fix, jump to the Fixing page and click the Fix button."
)
else:
print(
"Unknown error. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
def fix(args: dict):
"""
Fixes the error.
Args:
args (dict): A dictionary containing the arguments.
Returns:
bool: Always True.
"""
artifact_name = args["PluginName"].get()
print("Passing the error to ChatGPT...")
files = [
f"codes/{artifact_name}/src/main/java/{pkg_id_path}Main.java",
f"codes/{artifact_name}/src/main/resources/plugin.yml",
f"codes/{artifact_name}/src/main/resources/config.yml",
f"codes/{artifact_name}/pom.xml",
]
ids = ["main_java", "plugin_yml", "config_yml", "pom_xml"]
main_java = None
plugin_yml = None
config_yml = None
pom_xml = None
for file in files:
with open(file, "r") as f:
code = f.read()
id = ids[files.index(file)]
globals()[id] = code
print("Generating...")
codes = core.askgpt(
config.SYS_FIX.replace("%ARTIFACT_NAME%", str(artifact_name)),
config.USR_FIX.replace("%MAIN_JAVA%", str(main_java))
.replace("%PLUGIN_YML%", str(plugin_yml))
.replace("%CONFIG_YML%", str(config_yml))
.replace("%POM_XML%", str(pom_xml))
.replave("%PKG_ID_LST%", pkg_id_path)
.replace("%P_ERROR_MSG%", str(error_msg)),
config.FIXING_MODEL,
)
shutil.rmtree(f"codes/{artifact_name}")
core.response_to_action(codes)
print("Code generated. Building now...")
result = build.build_plugin(artifact_name)
if "BUILD SUCCESS" in result:
print(
f"Build complete. Find your plugin at 'codes/{artifact_name}/target/{artifact_name}.jar'"
)
else:
print(
"Build failed again. Please check the logs && send the log to @BaimoQilin on discord."
)
return True
# ---------- Main Program ----------#
root = CreateQGUI(title="BukkitGPT-v3", tab_names=["Generate", "Settings", "DevTools"])
error_msg = None
logger("Starting program.")
# Initialize Core
core.initialize()
print("BukkitGPT v3 beta console running")
# Banner
root.add_banner_tool(GitHub("https://github.com/CubeGPT/BukkitGPT-v3"))
# Generate Page
root.add_notebook_tool(
InputBox(name="PluginName", default="ExamplePlugin", label_info="Plugin Name")
)
root.add_notebook_tool(
InputBox(
name="PluginDescription",
default="Send msg 'hello' to every joined player.",
label_info="Plugin Description",
)
)
root.add_notebook_tool(
RunButton(
bind_func=generate,
name="Generate",
text="Generate Plugin",
checked_text="Generating...",
tab_index=0,
)
)
# Fixing Page #
# root.add_notebook_tool(Label(name="Fixing_DESCRIPTION", text="This is a fixing page. If the build fails, click the Fix button to fix the error in the LATEST build.", tab_index=1))
# root.add_notebook_tool(RunButton(bind_func=fix, name="Fix", text="Fix", checked_text="Fixing...", tab_index=1))
# Settings Page
root.add_notebook_tool(
InputBox(name="API_KEY", default=config.API_KEY, label_info="API Key", tab_index=1)
)
root.add_notebook_tool(
InputBox(
name="BASE_URL", default=config.BASE_URL, label_info="BASE URL", tab_index=1
)
)
config_buttons = HorizontalToolsCombine(
[
BaseButton(
bind_func=save_apply_config,
name="Save & Apply Config",
text="Save & Apply",
tab_index=1,
),
BaseButton(
bind_func=load_config, name="Load Config", text="Load Config", tab_index=1
),
BaseButton(
bind_func=open_config,
name="Open Config",
text="Open Full Config",
tab_index=1,
),
]
)
root.add_notebook_tool(config_buttons)
# DevTools Page
root.add_notebook_tool(
Label(
name="DevTool_DESCRIPTION",
text="This is a testing page for developers. Ignore it if you are a normal user.",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTool_CONFIG_API_KEY_DISPLAY",
text=f"CONFIG.API_KEY = {config.API_KEY}",
tab_index=2,
)
)
root.add_notebook_tool(
Label(
name="DevTools_CONFIG_BASE_URL_DISPLAY",
text=f"CONFIG.BASE_URL = {config.BASE_URL}",
tab_index=2,
)
)
root.add_notebook_tool(
RunButton(bind_func=print_args, name="Print Args", text="Print Args", tab_index=2)
)
root.add_notebook_tool(
RunButton(
bind_func=raise_error, name="Raise Error", text="Raise Error", tab_index=2
)
)
# Sidebar
root.set_navigation_about(
author="CubeGPT Team",
version=config.VERSION_NUMBER,
github_url="https://github.com/CubeGPT/BukkitGPT-v3",
)
# Run
root.run()
PROMPT:
添加生成DataPack的选项,使得用户可以选择生成Bukkit插件或者DataPack。
在`ui.py`中可以在Generate Page里添加这行代码 `root.add_notebook_tool(RadioObviousToolButton(options=["BukkitPlugin", "Datapack(Experimental)"], name="GenerationType", title="Type", default="BukkitPlugin",tab_index=0))`
你还需要自行完成DataPack的生成逻辑。你可能需要更改的文件有`ui.py`, `core.py`, `config.yaml`, `console.py`。你还可能需要创建一些新的文件或文件夹,也有可能不需要。祝你好运。
2024-07-25T12:12:18.250819
CONSOLE OUTPUT:
Error while improving the project: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"
Could you please upload the debug_log_file.txt in D:\zhousl\BukkitGPT\BukkitGPT-v3\.gpteng\memory/logs folder to github?
FULL STACK TRACE:
Traceback (most recent call last):
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 379, in handle_improve_mode
files_dict = agent.improve(files_dict, prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\applications\cli\cli_agent.py", line 208, in improve
files_dict = self.improve_fn(
^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 311, in improve_fn
return _improve_loop(ai, files_dict, memory, messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\default\steps.py", line 317, in _improve_loop
messages = ai.next(messages, step_name=curr_fn())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 243, in next
response = self.backoff_inference(messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\backoff\_sync.py", line 105, in retry
ret = target(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\gpt_engineer\core\ai.py", line 287, in backoff_inference
return self.llm.invoke(messages) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 270, in invoke
self.generate_prompt(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 703, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 560, in generate
raise e
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 550, in generate
self._generate_with_cache(
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 775, in _generate_with_cache
result = self._generate(
^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 755, in _generate
return generate_from_stream(stream_iter)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_core\language_models\chat_models.py", line 102, in generate_from_stream
generation = next(stream, None)
^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\langchain_anthropic\chat_models.py", line 676, in _stream
stream = self._client.messages.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_utils\_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\resources\messages.py", line 902, in create
return self._post(
^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 1266, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 942, in request
return self._request(
^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 968, in _request
request = self._build_request(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 461, in _build_request
headers = self._build_headers(options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_base_client.py", line 416, in _build_headers
self._validate_headers(headers_dict, custom_headers)
File "C:\Users\BaimoQilin\AppData\Local\Programs\Python\Python312\Lib\site-packages\anthropic\_client.py", line 192, in _validate_headers
raise TypeError(
TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"