morganacryl commited on
Commit
b3eca82
·
verified ·
1 Parent(s): 00e7089

Upload run.py

Browse files
Files changed (1) hide show
  1. run.py +44 -0
run.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM
2
+ import torch
3
+
4
+ model_id = "Acryl-Jonathan/coder-0.1"
5
+ tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
6
+ model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True, torch_dtype=torch.bfloat16).cuda()
7
+ prompt= """{}
8
+ ### Instruction:
9
+ {}
10
+ ### Response:"""
11
+
12
+ system_message = """You are an expert in C/C++ debugging. Please detect the error codes and propose guidelines for fixing them.
13
+ #### IMPORTANT RULES
14
+ 1. Only Use Korean
15
+ 2. Organize the detected errors clearly and in order by code lines.
16
+ 3. Describe how you detected errors and the appropriate measures you took to correct them.
17
+ 4. comment detected error command line number
18
+ #### Final answer
19
+ detected error code line : line number
20
+ corrected error
21
+ correcting guidelines
22
+ """
23
+ user_message="""```cpp
24
+ #include <iostream>
25
+ using namespace std;
26
+ int main() {
27
+ int a, b;
28
+ cout << "Enter two numbers: ";
29
+ cin >> a >> c;
30
+ if (a > 0 || b > 0) {
31
+ cout << "Both numbers are positive." << endl;
32
+ } else {
33
+ cout << "At least one number is not positive." << endl;
34
+ }
35
+ for (int i = 0; i < 5; i++); {
36
+ cout << "i: " << i << endl;
37
+ }
38
+ return "Done";
39
+ }```
40
+ """
41
+ input_prmpt = prompt.format(system_message, user_message)
42
+ inputs = tokenizer(input_prmpt , return_tensors="pt").to(model.device)
43
+ outputs = model.generate(**inputs, max_length=128)
44
+ print(tokenizer.decode(outputs[0], skip_special_tokens=True))