File size: 3,469 Bytes
5fea23c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import streamlit as st
import requests
import json
import time

st.title("Log Summarizer")


# Input for submitting new transactions
new_transactions_input = st.text_area("Enter your logs (comma-separated)", key="input_area")
submit_button = st.button("Submit New Logs", type="primary")

# Session state to keep track of job ID and status
if "job_id" not in st.session_state:
    st.session_state.job_id = None
if "job_status" not in st.session_state:
    st.session_state.job_status = None

if submit_button:
    # Split transactions and strip whitespace
    new_transactions = [i.strip() for i in new_transactions_input.split(',') if i.strip()]
else:
    new_transactions = []

# Determine URL based on model selection
base_url = "https://api.runpod.ai/v2/v1a28yp6h5vurs/"

# When submit button is clicked and there are transactions to process
if submit_button and new_transactions:
    url = base_url + "runsync"
    
    # Retrieve API key from Streamlit secrets
    api_key = st.secrets["api_key"]
    
    headers = {
        'Content-Type': 'application/json',
        'Authorization': api_key
    }
    data = {
        'input': {
            'logs': new_transactions
        }
    }
    
    json_data = json.dumps(data)

    # Show a spinner while waiting for the response
    with st.spinner("Processing..."):
        try:
            # Send POST request to start processing
            response = requests.post(url, headers=headers, data=json_data)
            response.raise_for_status()  # Raise an error for bad status codes

            # Parse response to get job ID
            result = response.json()
            st.session_state.job_id = result['id']
            st.write(f"New Job ID: {st.session_state.job_id}")

            # Keep checking status until it's no longer 'IN_QUEUE' or cancelled
            status_url = f"{base_url}status/{st.session_state.job_id}"
            st.session_state.job_status = "IN_QUEUE"
            while st.session_state.job_status != "COMPLETED":
                status_response = requests.get(status_url, headers=headers)
                status_data = status_response.json()
                st.session_state.job_status = status_data.get('status', '')
                if st.session_state.job_status == "COMPLETED":
                    break
                time.sleep(2)  # Adjust interval as needed

            # Once status changes, display final status
            st.write("Final status:", status_data)
            
        except requests.exceptions.RequestException as e:
            st.error(f"An error occurred: {e}")

# Cancel button
if st.session_state.job_id and st.session_state.job_status == "IN_QUEUE":
    cancel_button = st.button("Cancel Request")
    if cancel_button:
        cancel_url = f"{base_url}cancel/{st.session_state.job_id}"
        try:
            cancel_response = requests.post(cancel_url, headers=headers)
            cancel_response.raise_for_status()
            cancel_result = cancel_response.json()
            st.session_state.job_status = "CANCELLED"
            st.write(f"Job {st.session_state.job_id} has been cancelled.")
        except requests.exceptions.RequestException as e:
            st.error(f"An error occurred while cancelling: {e}")

# Reset button
if st.button("Reset", key="reset_button"):
    st.session_state.job_id = None
    st.session_state.job_status = None
    st.text_area("Enter your transactions (comma-separated)", value="", key="reset_area")