BadAtc0ding commited on
Commit
98f580f
·
1 Parent(s): 5dc0abf

added form submission handling

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .env
__pycache__/config.cpython-310.pyc ADDED
Binary file (1.54 kB). View file
 
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import gradio as gr
 
2
  from services.huggingface import init_huggingface, update_dataset
3
  from services.json_generator import generate_json
4
  from ui.form_components import (
@@ -18,9 +19,13 @@ init_huggingface()
18
 
19
  # Create Gradio interface
20
  with gr.Blocks() as demo:
21
- gr.Markdown("## Data Collection Form")
22
  gr.Markdown("Welcome to this Huggingface space that helps you fill in a form for monitoring the energy consumption of an AI model.")
23
 
 
 
 
 
24
  # Create form tabs
25
  header_components = create_header_tab()
26
  task_components = create_task_tab()
@@ -31,6 +36,116 @@ with gr.Blocks() as demo:
31
  environment_components = create_environment_tab()
32
  quality_components = create_quality_tab()
33
  hash_components = create_hash_tab()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
 
35
  # Submit and Download Buttons
36
  submit_button = gr.Button("Submit")
@@ -59,5 +174,9 @@ with gr.Blocks() as demo:
59
  outputs=output
60
  )
61
 
 
 
62
  if __name__ == "__main__":
63
- demo.launch()
 
 
 
1
  import gradio as gr
2
+ import pandas as pd
3
  from services.huggingface import init_huggingface, update_dataset
4
  from services.json_generator import generate_json
5
  from ui.form_components import (
 
19
 
20
  # Create Gradio interface
21
  with gr.Blocks() as demo:
22
+ gr.Markdown("## ML-related Data Collection Form")
23
  gr.Markdown("Welcome to this Huggingface space that helps you fill in a form for monitoring the energy consumption of an AI model.")
24
 
25
+
26
+ csv_upload = gr.File(label="Upload CSV", file_types=[".csv"])
27
+ gr.Label("Please upload a CSV file with the data you want to analyze.")
28
+
29
  # Create form tabs
30
  header_components = create_header_tab()
31
  task_components = create_task_tab()
 
36
  environment_components = create_environment_tab()
37
  quality_components = create_quality_tab()
38
  hash_components = create_hash_tab()
39
+
40
+ # Gather all form components in the order they appear in the inputs
41
+
42
+ all_form_components = (
43
+ header_components # 11 items (indices 0-10)
44
+ + task_components # 28 items (indices 11-38)
45
+ + measures_components # 14 items (indices 39-52)
46
+ + system_components # 3 items (indices 53-55)
47
+ + software_components # 2 items (indices 56-57)
48
+ + infrastructure_components # 10 items (indices 58-67)
49
+ + environment_components # 7 items (indices 68-74)
50
+ + quality_components # 1 item (index 75)
51
+ + hash_components # 3 items (indices 76-78)
52
+ )
53
+
54
+ # Parse CSV and update form values
55
+ def parse_csv_and_update_form(csv_file, *current_values):
56
+ updated_values = list(current_values)
57
+ if csv_file is None:
58
+ return updated_values
59
+
60
+ try:
61
+ df = pd.read_csv(csv_file.name)
62
+ csv_data = df.iloc[0].to_dict()
63
+
64
+ # ========== HEADER ==========
65
+ updated_values[3] = csv_data.get('run_id', '') # reportId (index 3)
66
+ updated_values[4] = csv_data.get('timestamp', '') # reportDatetime (4)
67
+ updated_values[8] = csv_data.get('project_name', '') # publisher_projectName (8)
68
+
69
+ # ========== SYSTEM ==========
70
+ updated_values[53] = csv_data.get('os', '') # os (index 53)
71
+ updated_values[54] = "" # distribution (54)
72
+ updated_values[55] = "" # distributionVersion (55)
73
+
74
+ # ========== MEASURES ==========
75
+ updated_values[39] = csv_data.get('tracking_mode', '') # measurementMethod (39)
76
+ updated_values[47] = "kWh" # unit (47)
77
+ updated_values[50] = csv_data.get('energy_consumed', '') # powerConsumption (50)
78
+
79
+ # Duration conversion (hours → seconds)
80
+ if 'duration' in csv_data:
81
+ try:
82
+ hours = float(csv_data['duration'])
83
+ updated_values[51] = str(round(hours * 3600, 2)) # measurementDuration (51)
84
+ except:
85
+ updated_values[51] = ""
86
+
87
+ updated_values[52] = "" # measurementDateTime (52)
88
+
89
+ # ========== SOFTWARE ==========
90
+ updated_values[56] = "Python" # language (56)
91
+ updated_values[57] = csv_data.get('python_version', '') # version_software (57)
92
+
93
+ # ========== INFRASTRUCTURE ==========
94
+ # infraType (58) - Dropdown
95
+ on_cloud = str(csv_data.get('on_cloud', 'No')).lower().strip()
96
+ updated_values[58] = "publicCloud" if on_cloud == "yes" else "onPremise"
97
+
98
+ # Cloud fields (59-60)
99
+ updated_values[59] = csv_data.get('cloud_provider', '') if on_cloud == "yes" else ""
100
+ updated_values[60] = csv_data.get('cloud_region', '') if on_cloud == "yes" else ""
101
+
102
+ # Component logic (61-67)
103
+ gpu_count = int(csv_data.get('gpu_count', 0))
104
+ cpu_count = int(csv_data.get('cpu_count', 0))
105
+
106
+ if gpu_count > 0:
107
+ updated_values[61] = "GPU" # componentName (61)
108
+ updated_values[62] = str(gpu_count) # nbComponent (62)
109
+ model = csv_data.get('gpu_model', '')
110
+ elif cpu_count > 0:
111
+ updated_values[61] = "CPU" # componentName (61)
112
+ updated_values[62] = str(cpu_count) # nbComponent (62)
113
+ model = csv_data.get('cpu_model', '')
114
+ else:
115
+ model = ""
116
+
117
+ # Memory size (63)
118
+ ram_size = csv_data.get('ram_total_size', '')
119
+ updated_values[63] = f"{ram_size} GB" if ram_size and float(ram_size) > 0 else ""
120
+
121
+ # Split model into manufacturer/family/series (64-66)
122
+ if model:
123
+ parts = model.replace("(R)", "").replace("(TM)", "").split()
124
+ updated_values[64] = parts[0] if parts else "" # manufacturer_infra (64)
125
+ updated_values[65] = " ".join(parts[1:3]) if len(parts) >= 3 else "" # family (65)
126
+ updated_values[66] = " ".join(parts[3:]) if len(parts) > 3 else "" # series (66)
127
+ else:
128
+ updated_values[64] = updated_values[65] = updated_values[66] = ""
129
+
130
+ updated_values[67] = "" # share (67)
131
+
132
+ # ========== ENVIRONMENT ==========
133
+ updated_values[68] = csv_data.get('country_name', '') # country (68)
134
+ updated_values[69] = csv_data.get('latitude', '') # latitude (69)
135
+ updated_values[70] = csv_data.get('longitude', '') # longitude (70)
136
+ updated_values[71] = csv_data.get('region', '') # location (71)
137
+
138
+ except Exception as e:
139
+ print(f"CSV Processing Error: {str(e)}")
140
+
141
+ return updated_values
142
+
143
+ # Parse CSV and update form values
144
+ csv_upload.change(
145
+ fn=parse_csv_and_update_form,
146
+ inputs=[csv_upload] + all_form_components,
147
+ outputs=all_form_components
148
+ )
149
 
150
  # Submit and Download Buttons
151
  submit_button = gr.Button("Submit")
 
174
  outputs=output
175
  )
176
 
177
+
178
+
179
  if __name__ == "__main__":
180
+ demo.launch()
181
+
182
+
services/__pycache__/huggingface.cpython-310.pyc ADDED
Binary file (6.44 kB). View file
 
services/__pycache__/json_generator.cpython-310.pyc ADDED
Binary file (3.23 kB). View file
 
services/huggingface.py CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  from huggingface_hub import login
2
  from datasets import load_dataset, Dataset, concatenate_datasets
3
  import json
 
1
+ from dotenv import load_dotenv
2
+ load_dotenv('/home/hlymly/Documents/rse/group1/EcoMindAI/.env') # Load .env before importing config.py
3
+
4
  from huggingface_hub import login
5
  from datasets import load_dataset, Dataset, concatenate_datasets
6
  import json
ui/__pycache__/form_components.cpython-310.pyc ADDED
Binary file (13.7 kB). View file
 
utils/__pycache__/validation.cpython-310.pyc ADDED
Binary file (966 Bytes). View file