streamlit / data.py
SUNGJIN LEE
Initial commit
312b9eb
raw
history blame
1.48 kB
import pandas as pd
import streamlit as st
from google.oauth2.service_account import Credentials
from googleapiclient.discovery import build
from googleapiclient.http import MediaIoBaseDownload
import io
# Google Drive ๋ฐ Sheets API ์ธ์ฆ
scope = [
"https://www.googleapis.com/auth/drive.readonly"
]
credentials = Credentials.from_service_account_info(st.secrets["google"], scopes=scope)
file_id = st.secrets["drive"]["file_id"]
drive_service = build('drive', 'v3', credentials=credentials)
@st.cache_data(show_spinner=False)
def load_data():
request = drive_service.files().get_media(fileId=file_id)
file_buffer = io.BytesIO()
downloader = MediaIoBaseDownload(file_buffer, request)
done = False
progress_bar = st.sidebar.progress(0)
progress_text = st.sidebar.empty()
progress_text.text("๋ฐ์ดํ„ฐ ๋ถˆ๋Ÿฌ์˜ค๋Š” ์ค‘...")
while not done:
status, done = downloader.next_chunk()
progress = int(status.progress() * 100)
progress_bar.progress(progress)
progress_text.text(f"๋ฐ์ดํ„ฐ ๋ถˆ๋Ÿฌ์˜ค๋Š” ์ค‘...({progress}%)")
file_buffer.seek(0)
df = pd.read_csv(file_buffer)
df['ru_svc_lat_val'] = df['ru_svc_lat_val'].astype(float)
df['ru_svc_lng_val'] = df['ru_svc_lng_val'].astype(float)
df_map = df.drop_duplicates(subset=['ru_svc_lat_val', 'ru_svc_lng_val'])
progress_bar.empty()
progress_text.empty()
st.sidebar.success("๋ฐ์ดํ„ฐ ๋กœ๋“œ ์™„๋ฃŒ!")
return df, df_map