Spaces:
Sleeping
Sleeping
import gradio as gr | |
from llama_cpp import Llama | |
# Load the SecurityLLM model | |
try: | |
llm = Llama.from_pretrained( | |
repo_id="QuantFactory/SecurityLLM-GGUF", | |
filename="SecurityLLM.Q5_K_M.gguf", # Ensure the file path is correct | |
) | |
except Exception as e: | |
raise RuntimeError(f"Failed to load model: {e}") | |
# Function to match CV to job descriptions | |
def match_cv_to_jobs(cv_text, job_descriptions): | |
# Split job descriptions by line | |
descriptions = job_descriptions.strip().split("\n") | |
results = [] | |
for description in descriptions: | |
# Create a prompt to compare the CV with each job description | |
prompt = ( | |
f"Compare the following job description with this resume. Job Description: {description}. " | |
f"Resume: {cv_text}. Provide a match score and a brief analysis." | |
) | |
# Generate response from the model | |
response = llm.create_chat_completion( | |
messages=[ | |
{ | |
"role": "user", | |
"content": prompt | |
} | |
] | |
) | |
# Extract and store the analysis text | |
analysis_text = response["choices"][0]["message"]["content"] | |
results.append({ | |
"Job Description": description, | |
"Analysis": analysis_text | |
}) | |
return results | |
# Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# CV to Job Description Matcher") | |
cv_text = gr.Textbox(label="CV Text", placeholder="Enter the CV text here", lines=10) | |
job_descriptions = gr.Textbox(label="Job Descriptions (one per line)", placeholder="Enter each job description on a new line", lines=5) | |
match_button = gr.Button("Match CV to Job Descriptions") | |
output = gr.JSON(label="Match Results") | |
match_button.click(fn=match_cv_to_jobs, inputs=[cv_text, job_descriptions], outputs=output) | |
demo.launch() | |