abrakjamson commited on
Commit
f655011
·
1 Parent(s): 172c019

Initial commit

Browse files
Files changed (4) hide show
  1. anger.gguf +0 -0
  2. app.py +163 -0
  3. requirements.txt +0 -0
  4. truthful.gguf +0 -0
anger.gguf ADDED
Binary file (509 kB). View file
 
app.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ from repeng import ControlVector, ControlModel
5
+ import gradio as gr
6
+
7
+ # Initialize model and tokenizer
8
+ mistral_path = "./models/mistral" # Update this path as needed
9
+
10
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
11
+ #tokenizer = AutoTokenizer.from_pretrained("E:/language_models/models/mistral")
12
+ tokenizer.pad_token_id = 0
13
+
14
+ model = AutoModelForCausalLM.from_pretrained(
15
+ mistral_path,
16
+ torch_dtype=torch.float16,
17
+ trust_remote_code=True,
18
+ use_safetensors=True
19
+ )
20
+ model = model.to("cuda:0" if torch.cuda.is_available() else "cpu")
21
+ model = ControlModel(model, list(range(-5, -18, -1)))
22
+
23
+ # Generation settings
24
+ generation_settings = {
25
+ "pad_token_id": tokenizer.eos_token_id, # Silence warning
26
+ "do_sample": False, # Deterministic output
27
+ "max_new_tokens": 256,
28
+ "repetition_penalty": 1.1, # Reduce repetition
29
+ }
30
+
31
+ # Tags for prompt formatting
32
+ user_tag, asst_tag = "[INST]", "[/INST]"
33
+
34
+ # List available control vectors
35
+ control_vector_files = [f for f in os.listdir('.') if f.endswith('.gguf')]
36
+
37
+ if not control_vector_files:
38
+ raise FileNotFoundError("No .gguf control vector files found in the current directory.")
39
+
40
+ # Function to toggle slider visibility based on checkbox state
41
+ def toggle_slider(checked):
42
+ return gr.update(visible=checked)
43
+
44
+ # Function to generate the model's response
45
+ def generate_response(system_prompt, user_message, *args, history):
46
+ # args contains alternating checkbox and slider values
47
+ num_controls = len(control_vector_files)
48
+ checkboxes = args[0::2] # Extract every first item in each pair
49
+ sliders = args[1::2] # Extract every second item in each pair
50
+
51
+ # Reset any previous control vectors
52
+ model.reset()
53
+
54
+ # Apply selected control vectors with their corresponding weights
55
+ for i in range(num_controls):
56
+ if checkboxes[i]:
57
+ cv_file = control_vector_files[i]
58
+ weight = sliders[i]
59
+ try:
60
+ control_vector = ControlVector.import_gguf(cv_file)
61
+ model.set_control(control_vector, weight)
62
+ except Exception as e:
63
+ print(f"Failed to set control vector {cv_file}: {e}")
64
+
65
+ # Format the prompt
66
+ if system_prompt.strip():
67
+ formatted_prompt = f"{system_prompt}\n{user_tag}{user_message}{asst_tag}"
68
+ else:
69
+ formatted_prompt = f"{user_tag}{user_message}{asst_tag}"
70
+
71
+ # Tokenize the input
72
+ input_ids = tokenizer(formatted_prompt, return_tensors="pt").to(model.device)
73
+
74
+ # Generate the response
75
+ output_ids = model.generate(**input_ids, **generation_settings)
76
+ response = tokenizer.decode(output_ids.squeeze(), skip_special_tokens=True)
77
+
78
+ # Update conversation history
79
+ history = history or []
80
+ history.append((user_message, response))
81
+ return history
82
+
83
+ # Function to reset the conversation history
84
+ def reset_chat():
85
+ return []
86
+
87
+ # Build the Gradio interface
88
+ with gr.Blocks() as demo:
89
+ gr.Markdown("# 🧠 Language Model Interface")
90
+
91
+ with gr.Row():
92
+ with gr.Column(scale=1):
93
+ # System Prompt Input
94
+ system_prompt = gr.Textbox(
95
+ label="System Prompt",
96
+ lines=2,
97
+ placeholder="Enter system-level instructions here..."
98
+ )
99
+
100
+ # User Message Input
101
+ user_input = gr.Textbox(
102
+ label="User Message",
103
+ lines=2,
104
+ placeholder="Type your message here..."
105
+ )
106
+
107
+ gr.Markdown("### 📊 Control Vectors")
108
+
109
+ # Create checkboxes and sliders for each control vector
110
+ control_checks = []
111
+ control_sliders = []
112
+ for cv_file in control_vector_files:
113
+ with gr.Row():
114
+ # Checkbox to select the control vector
115
+ checkbox = gr.Checkbox(label=cv_file, value=False)
116
+ control_checks.append(checkbox)
117
+
118
+ # Slider to adjust the control vector's weight
119
+ slider = gr.Slider(
120
+ minimum=-2.5,
121
+ maximum=2.5,
122
+ value=0.0,
123
+ step=0.1,
124
+ label=f"{cv_file} Weight",
125
+ visible=False
126
+ )
127
+ control_sliders.append(slider)
128
+
129
+ # Link the checkbox to toggle slider visibility
130
+ checkbox.change(
131
+ toggle_slider,
132
+ inputs=checkbox,
133
+ outputs=slider
134
+ )
135
+
136
+ with gr.Row():
137
+ # Submit and New Chat buttons
138
+ submit_button = gr.Button("💬 Submit")
139
+ new_chat_button = gr.Button("🆕 New Chat")
140
+
141
+ with gr.Column(scale=2):
142
+ # Chatbot to display conversation
143
+ chatbot = gr.Chatbot(label="🗨️ Conversation")
144
+
145
+ # State to keep track of conversation history
146
+ state = gr.State([])
147
+
148
+ # Define button actions
149
+ submit_button.click(
150
+ generate_response,
151
+ inputs=[system_prompt, user_input] + control_checks + control_sliders + [state],
152
+ outputs=[chatbot]
153
+ )
154
+
155
+ new_chat_button.click(
156
+ reset_chat,
157
+ inputs=[],
158
+ outputs=[chatbot]
159
+ )
160
+
161
+ # Launch the Gradio app
162
+ if __name__ == "__main__":
163
+ demo.launch()
requirements.txt ADDED
Binary file (3.54 kB). View file
 
truthful.gguf ADDED
Binary file (509 kB). View file