File size: 3,645 Bytes
9208e17
5716ab8
96ed827
e079d59
b397dc0
e079d59
725f549
 
e079d59
 
725f549
 
 
 
96ed827
 
 
 
 
 
 
 
 
f65dc03
9208e17
f65dc03
9208e17
 
96ed827
 
 
 
 
 
 
 
725f549
96ed827
 
 
 
 
 
 
 
 
f65dc03
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96ed827
f65dc03
96ed827
 
 
9208e17
f65dc03
9208e17
 
 
96ed827
9208e17
e079d59
9208e17
 
 
e079d59
 
9208e17
f65dc03
9f26a6c
e079d59
f65dc03
91207a8
9208e17
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
from llama_cpp import Llama
from transformers import pipeline
import json

# Load the Llama model
try:
    llm = Llama.from_pretrained(
        repo_id="HuggingFaceTB/SmolLM2-360M-Instruct-GGUF",
        filename="smollm2-360m-instruct-q8_0.gguf"  # Replace with the correct path to your GGUF file
    )
except Exception as e:
    raise RuntimeError(f"Failed to load model: {e}")

# Load summarization model
summarizer = pipeline("summarization")

# Summarize text to fit within token limits
def summarize_text(text, max_length=100):
    # Use the summarizer to condense the text
    summary = summarizer(text, max_length=max_length, min_length=25, do_sample=False)
    return summary[0]["summary_text"]

# Function to match CV to job descriptions with debug information
def match_cv_to_jobs(cv_text, job_descriptions):
    debug_info = "Debug Info:\n"
    results = []
    
    # Summarize `cv_text` and `job_descriptions` to manage token limits
    summarized_cv = summarize_text(cv_text, max_length=400)
    debug_info += f"Summarized CV Text: {summarized_cv}\n"
    
    descriptions = job_descriptions.strip().split("\n")
    for description in descriptions:
        summarized_description = summarize_text(description, max_length=100)
        debug_info += f"\nSummarized Job Description: {summarized_description}\n"
        
        # Create a prompt to compare the summarized CV with each summarized job description
        prompt = (
            f"Compare the following job description with this resume. Job Description: {summarized_description}. "
            f"Resume: {summarized_cv}. Provide a match score and a brief analysis."
        )
        debug_info += f"\nGenerated Prompt: {prompt}\n"
        
        # Generate response from the model
        try:
            response = llm.create_chat_completion(
                messages=[
                    {
                        "role": "user",
                        "content": prompt
                    }
                ]
            )
            
            # Extract the analysis text
            response_content = response["choices"][0]["message"]["content"]
            debug_info += f"Model Response: {response_content}\n"
            
            # Attempt to parse as JSON; if not JSON, use the raw text
            try:
                response_data = json.loads(response_content)
                results.append(response_data)
            except json.JSONDecodeError:
                results.append({
                    "Job Description": description,
                    "Analysis": response_content
                })
        except Exception as e:
            debug_info += f"Error: {str(e)}\n"
            results.append({"Job Description": description, "Error": str(e)})
    
    return results, debug_info

# Gradio interface
with gr.Blocks() as demo:
    gr.Markdown("# CV and Job Description Matcher with Summarization and Debugging")
    
    # Input fields for CV and job descriptions
    cv_text = gr.Textbox(label="CV Text", placeholder="Enter the CV text here", lines=10)
    job_descriptions = gr.Textbox(label="Job Descriptions (one per line)", placeholder="Enter each job description on a new line", lines=5)
    
    # Button and output area
    match_button = gr.Button("Match CV to Job Descriptions")
    output = gr.JSON(label="Match Results")
    debug_output = gr.Textbox(label="Debug Info", lines=10)  # Add a debug box to display debug info
    
    # Set button click to run the function
    match_button.click(fn=match_cv_to_jobs, inputs=[cv_text, job_descriptions], outputs=[output, debug_output])

demo.launch()