Update app.py
Browse files
app.py
CHANGED
@@ -5,6 +5,8 @@ import os
|
|
5 |
import google.generativeai as genai
|
6 |
import zipfile
|
7 |
from git import Repo
|
|
|
|
|
8 |
|
9 |
|
10 |
# Database setup
|
@@ -267,6 +269,63 @@ def identify_required_functions(project_path, functionality_description):
|
|
267 |
# Process and return the response
|
268 |
return response.text
|
269 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
270 |
def generate_documentation_page():
|
271 |
st.subheader(f"Generate Documentation for {st.session_state.current_project}")
|
272 |
st.write("Enter the functionality or parts of the project for which you'd like to identify relevant functions.")
|
@@ -283,18 +342,23 @@ def generate_documentation_page():
|
|
283 |
user_folder = os.path.join("user_projects", st.session_state.username)
|
284 |
project_folder = os.path.join(user_folder, st.session_state.current_project)
|
285 |
|
286 |
-
# Ensure compatibility with GitHub repositories
|
287 |
if os.path.exists(project_folder):
|
288 |
try:
|
289 |
-
# Call
|
290 |
-
|
291 |
-
|
292 |
-
#
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
298 |
except Exception as e:
|
299 |
st.error(f"An error occurred: {e}")
|
300 |
else:
|
@@ -309,6 +373,7 @@ def generate_documentation_page():
|
|
309 |
|
310 |
|
311 |
|
|
|
312 |
#------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
313 |
|
314 |
|
|
|
5 |
import google.generativeai as genai
|
6 |
import zipfile
|
7 |
from git import Repo
|
8 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
9 |
+
import torch
|
10 |
|
11 |
|
12 |
# Database setup
|
|
|
269 |
# Process and return the response
|
270 |
return response.text
|
271 |
|
272 |
+
|
273 |
+
# Load Hugging Face model and tokenizer
|
274 |
+
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-Coder-32B-Instruct")
|
275 |
+
hf_model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2.5-Coder-32B-Instruct", device_map="auto")
|
276 |
+
|
277 |
+
def validate_and_generate_documentation(hf_model, tokenizer, gemini_output, file_contents, functionality_description):
|
278 |
+
"""Uses Hugging Face model to validate functions and generate full documentation."""
|
279 |
+
# Generate the prompt for the Hugging Face model
|
280 |
+
prompt = f"""
|
281 |
+
The user-provided functionality: '{functionality_description}'
|
282 |
+
The functions identified by Gemini:
|
283 |
+
{gemini_output}
|
284 |
+
|
285 |
+
Project files:
|
286 |
+
"""
|
287 |
+
for file_path, content in file_contents.items():
|
288 |
+
prompt += f"File: {os.path.basename(file_path)}\n{content[:1000]}...\n\n" # Truncate large files for the model
|
289 |
+
|
290 |
+
prompt += """
|
291 |
+
Task:
|
292 |
+
1. Validate if the functions identified by Gemini are sufficient for implementing the functionality.
|
293 |
+
2. If not, identify all additional functions required.
|
294 |
+
3. For all relevant functions, generate detailed documentation in the following format:
|
295 |
+
"""
|
296 |
+
prompt += """
|
297 |
+
Project Summary:
|
298 |
+
Summary of the entire project, making sure to mention the language it's programmed in and any libraries or other dependencies it has
|
299 |
+
|
300 |
+
Functionality Summary:
|
301 |
+
Summary of the user-specified functionality
|
302 |
+
|
303 |
+
Functionality Flow:
|
304 |
+
How the programmer goes from inputting information into the first function to the last function and its output to complete
|
305 |
+
the functionality that is described by the user. Make sure to mention each function that is used, and how inputs and outputs flow between each other.
|
306 |
+
|
307 |
+
Function Documentation:
|
308 |
+
For each file that contains the relevant functions:
|
309 |
+
For each function determined as relevant within the current file:
|
310 |
+
Summary:
|
311 |
+
summarize what the function does
|
312 |
+
Inputs:
|
313 |
+
the inputs and their data types, and their relevance in the scope of the specified functionality
|
314 |
+
Outputs:
|
315 |
+
the output, its data type, and its relevance in the scope of the specified functionality
|
316 |
+
Dependencies:
|
317 |
+
the dependencies of the function and where they come from
|
318 |
+
Data structures:
|
319 |
+
the data structures that the function relies on
|
320 |
+
"""
|
321 |
+
# Encode and call the Hugging Face model
|
322 |
+
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=2048).to(hf_model.device)
|
323 |
+
outputs = hf_model.generate(inputs["input_ids"], max_length=4096, num_return_sequences=1)
|
324 |
+
|
325 |
+
# Decode the response
|
326 |
+
decoded_output = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
327 |
+
return decoded_output
|
328 |
+
|
329 |
def generate_documentation_page():
|
330 |
st.subheader(f"Generate Documentation for {st.session_state.current_project}")
|
331 |
st.write("Enter the functionality or parts of the project for which you'd like to identify relevant functions.")
|
|
|
342 |
user_folder = os.path.join("user_projects", st.session_state.username)
|
343 |
project_folder = os.path.join(user_folder, st.session_state.current_project)
|
344 |
|
|
|
345 |
if os.path.exists(project_folder):
|
346 |
try:
|
347 |
+
# Call Gemini to identify required functions
|
348 |
+
gemini_result = identify_required_functions(project_folder, functionality)
|
349 |
+
|
350 |
+
# Read project files
|
351 |
+
file_paths = read_project_files(project_folder)
|
352 |
+
file_contents = read_files(file_paths)
|
353 |
+
|
354 |
+
# Call Hugging Face model for validation and documentation
|
355 |
+
final_documentation = validate_and_generate_documentation(
|
356 |
+
hf_model, tokenizer, gemini_result, file_contents, functionality
|
357 |
+
)
|
358 |
+
|
359 |
+
# Display the final documentation
|
360 |
+
st.success("Documentation generated successfully!")
|
361 |
+
st.text_area("Generated Documentation", final_documentation, height=600)
|
362 |
except Exception as e:
|
363 |
st.error(f"An error occurred: {e}")
|
364 |
else:
|
|
|
373 |
|
374 |
|
375 |
|
376 |
+
|
377 |
#------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
378 |
|
379 |
|