File size: 3,817 Bytes
96911b6
 
16d491d
96911b6
 
 
 
 
 
 
9200125
96911b6
9200125
96911b6
 
 
 
 
3d7d31a
96911b6
 
89661b3
96911b6
bb3ba32
96911b6
 
 
 
 
 
 
 
b1adea5
 
96911b6
bb3ba32
96911b6
 
d766d8b
 
 
 
028cea4
d766d8b
 
b1adea5
d766d8b
96911b6
e1b9d08
bb3ba32
3d7d31a
d766d8b
 
 
f90d375
d766d8b
 
 
 
 
 
 
 
bb3ba32
 
e1b9d08
bb3ba32
e1b9d08
 
 
 
 
bb3ba32
e1b9d08
bb3ba32
e1b9d08
 
3d7d31a
e1b9d08
bb3ba32
e1b9d08
bb3ba32
 
 
 
 
 
 
e1b9d08
bb3ba32
e1b9d08
 
 
bb3ba32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d766d8b
 
 
 
 
bb3ba32
 
d766d8b
 
 
 
 
bb3ba32
 
dd27210
6bc65f5
e1b9d08
bb3ba32
e1b9d08
bb3ba32
 
96911b6
d766d8b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import gradio as gr
from gradio_client import Client, handle_file
import matplotlib.pyplot as plt
import os


# Define your Hugging Face token (make sure to set it as an environment variable)
HF_TOKEN = os.getenv("HF_TOKEN")  # Replace with your actual token if not using an environment variable

# Initialize the Gradio Client for the specified API
client = Client("mangoesai/Elections_Comparison_Agent_V4", hf_token=HF_TOKEN)

# client_name = ['2016 Election','2024 Election', 'Comparison two years']



def stream_chat_with_rag(
    message: str,
    # history: list,
    client_name: str
):
    # print(f"Message: {message}")
    #answer = client.predict(question=question, api_name="/run_graph")
    answer, fig = client.predict(
    	query= message,
		election_year=client_name,
		api_name="/process_query"
    )

    # Debugging: Print the raw response
    print("Raw answer from API:")
    print(answer)
    print("top works from API:")
    print(fig)

    return answer, fig


    

def heatmap(top_n):
    fig = client.predict(
    	top_n= top_n,
		api_name="/heatmap"
    )
    print(type(fig))
    return fig

# Create Gradio interface
with gr.Blocks(title="Reddit Election Analysis") as demo:
    gr.Markdown("# Reddit Public sentiment & Social topic distribution ")
    with gr.Row():
        with gr.Column():
            with gr.Row():
                top_n = gr.Dropdown(choices=[1,2,3,4,5,6,7,8,9,10])
            with gr.Row():
                fresh_btn = gr.Button("Refresh Heatmap")
        with gr.Column():
            output_heatmap = gr.Plot(
                label="Top Public sentiment & Social topic Heatmap",
                container=True,  # Ensures the plot is contained within its area
                elem_classes="heatmap-plot"  # Add a custom class for styling
            )

    gr.Markdown("# Reddit Election Posts/Comments Analysis")
    gr.Markdown("Ask questions about election-related comments and posts")

    with gr.Row():
        with gr.Column():
            year_selector = gr.Radio(
                choices=["2016 Election", "2024 Election", "Comparison two years"],
                label="Select Election Year",
                value="2016 Election"
            )

            query_input = gr.Textbox(
                label="Your Question",
                placeholder="Ask about election comments or posts..."
            )

            submit_btn = gr.Button("Submit")

            gr.Markdown("""
            ## Example Questions:
            - Is there any comments don't like the election results
            - Summarize the main discussions about voting process
            - What are the common opinions about candidates?
            """)
        with gr.Column():
            output_text = gr.Textbox(
                label="Response",
                lines=20
            )

    with gr.Row():
        output_plot = gr.Plot(
            label="Topic Distribution",
            container=True,  # Ensures the plot is contained within its area
            elem_classes="topic-plot"  # Add a custom class for styling
        )

    # Add custom CSS to ensure proper plot sizing
    gr.HTML("""
        <style>
            .topic-plot {
                min-height: 600px;
                width: 100%;
                margin: auto;
            }
            .heatmap-plot {
                min-height: 400px;
                width: 100%;
                margin: auto;
            }
        </style>
    """)
    fresh_btn.click(
        fn=heatmap,
        inputs=top_n,
        outputs=output_heatmap
    )

    # Update both outputs when submit is clicked
    submit_btn.click(
        fn=stream_chat_with_rag,
        inputs=[query_input, year_selector],
        outputs=[output_text, output_plot]
    )


if __name__ == "__main__":
    demo.launch(share=True)