muellerzr HF staff commited on
Commit
08f37e8
1 Parent(s): 6b9c5c7

Right column name

Browse files
Files changed (2) hide show
  1. src/app.py +3 -3
  2. src/model_utils.py +1 -1
src/app.py CHANGED
@@ -17,13 +17,13 @@ def get_results(model_name: str, library: str, options: list, access_token: str)
17
  stages = {"model": [], "gradients": [], "optimizer": [], "step": []}
18
  for i, option in enumerate(data):
19
  for stage in stages:
20
- stages[stage].append(option["Training using Adam"][stage])
21
- value = max(data[i]["Training using Adam"].values())
22
  if value == -1:
23
  value = "N/A"
24
  else:
25
  value = convert_bytes(value)
26
- data[i]["Training using Adam"] = value
27
 
28
  if any(value != -1 for value in stages["model"]):
29
  out_explain = "## Training using Adam explained:\n"
 
17
  stages = {"model": [], "gradients": [], "optimizer": [], "step": []}
18
  for i, option in enumerate(data):
19
  for stage in stages:
20
+ stages[stage].append(option["Training using Adam (Peek vRAM)"][stage])
21
+ value = max(data[i]["Training using Adam (Peek vRAM)"].values())
22
  if value == -1:
23
  value = "N/A"
24
  else:
25
  value = convert_bytes(value)
26
+ data[i]["Training using Adam (Peek vRAM)"] = value
27
 
28
  if any(value != -1 for value in stages["model"]):
29
  out_explain = "## Training using Adam explained:\n"
src/model_utils.py CHANGED
@@ -97,7 +97,7 @@ def calculate_memory(model: torch.nn.Module, options: list):
97
  "dtype": dtype,
98
  "Largest Layer or Residual Group": dtype_largest_layer,
99
  "Total Size": dtype_total_size,
100
- "Training using Adam": dtype_training_size,
101
  }
102
  )
103
  return data
 
97
  "dtype": dtype,
98
  "Largest Layer or Residual Group": dtype_largest_layer,
99
  "Total Size": dtype_total_size,
100
+ "Training using Adam (Peek vRAM)": dtype_training_size,
101
  }
102
  )
103
  return data