File size: 2,859 Bytes
bb7c702
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ce0876e
bb7c702
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56a99ba
 
bb7c702
 
 
ce0876e
bb7c702
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import openai
import os
from paperqa import Docs
import gradio as gr
from langchain.document_loaders import PyPDFLoader
from langchain.vectorstores import Chroma
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.document_loaders import UnstructuredPDFLoader
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.chat_models import ChatOpenAI

css_style = """

.gradio-container {
    font-family: "IBM Plex Mono";
}

.answerText p {
    font-size: 24px !important;
    color: #8dbcfe !important;
}
"""


def run(uploaded_files):
    all_files = []
    if uploaded_files is None:
        return all_files
    for file in uploaded_files:
        if file.name.endswith('.pdf'):
            all_files.append(file.name)
    print(all_files)
    return all_files


def createAnswer(files, designation, openaikey):
    os.environ['OPENAI_API_KEY'] = openaikey.strip()
    docs = Docs(llm='gpt-3.5-turbo')
    for d in files:
        docs.add(d.name)
    answer = docs.query(
        f"Who is the best canidate to hire for {designation}. Provide a list with the candidate name. If you don't know, simply say None of the canidates are suited for the Job role.")
    print(answer.formatted_answer)
    print(type(answer))
    return answer.answer


with gr.Blocks(css=css_style) as demo:
    gr.Markdown(f"""
    # HR-GPT - Filter & Find The Best Candidate for the Job using AI

    *By Amin Memon ([@AminMemon](https://twitter.com/AminMemon))*

    This tool will enable asking questions of your uploaded text, PDF documents,.
    It uses OpenAI's ChatGPT model & OpenAI Embeddings and thus you must enter your API key below.

    This tool is under active development and currently uses many tokens - up to 10,000
    for a single query. That is $0.10-0.20 per query, so please be careful!
    Porting it to Llama.cpp soon for saved cost.

    1. Enter API Key ([What is that?](https://platform.openai.com/account/api-keys))
    2. Upload your Resumes (Try a few resumes/cv to try < 5)
    3. Provide Designation for which you are hiring
    """)

    openaikey = gr.Text(
        label='Your OpenAI Api Key', value="")
    position = gr.Text(
        label='Position/Designation for which you are hiring for', value="")

    with gr.Tab('File Upload'):
        uploaded_files = gr.File(
            label="Resume Upload - ONLY PDF. (Doc File Support Coming Soon)", file_count="multiple", show_progress=True)

    uploaded_files.change(
        fn=run, inputs=[uploaded_files], outputs=[uploaded_files])
    ask = gr.Button("Find Top Candidate")
    answer = gr.Markdown(label="Result",  elem_classes='answerText')
    ask.click(fn=createAnswer, inputs=[
              uploaded_files, position, openaikey], outputs=[answer])

demo.queue(concurrency_count=20)
demo.launch(show_error=True)