Skip to content

Commit f99db35

Browse files
committed
wip creates utils method to save outputs
Signed-off-by: Flavia Beo <[email protected]>
1 parent e94c886 commit f99db35

File tree

2 files changed

+13
-5
lines changed

2 files changed

+13
-5
lines changed

aiu_fms_testing_utils/utils/metrics_utils.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,17 @@
11
import numpy as np
22
import torch
33
import torch.nn as nn
4+
import json
45

6+
def save_layers_output(out_dict, file_path):
7+
serializable_data = {}
8+
for key, value in out_dict.items():
9+
if isinstance(value, torch.Tensor):
10+
serializable_data[key] = value.tolist()
11+
else:
12+
serializable_data[key] = value
13+
with open(file_path, 'w') as f:
14+
json.dump(serializable_data, f, indent=4)
515

616
def abs_diff_linalg_norm(res_vector):
717
"""

scripts/generate_layers_metrics.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from aiu_fms_testing_utils.testing.validation import get_default_validation_prefix
1818

1919
from aiu_fms_testing_utils.utils import prepare_inputs
20-
from aiu_fms_testing_utils.utils.metrics_utils import tensor_abs_diff, tensor_cos_sim
20+
from aiu_fms_testing_utils.utils.metrics_utils import tensor_abs_diff, tensor_cos_sim, save_layers_output
2121

2222

2323
logger = logging.getLogger(__name__)
@@ -390,8 +390,7 @@ def generate_layers_metrics(model_path, batch_size, seq_length, max_new_tokens):
390390
seq_length=seq_length, max_new_tokens=max_new_tokens,
391391
tokenizer=tokenizer)
392392

393-
with open(os.path.join(output_path,f"{model_path}-layer-output-stack-cpu.json"), 'w') as f:
394-
json.dump(layer_stack_cpu, f)
393+
save_layers_output(os.path.join(output_path,f"{model_path}-layer-output-stack-cpu.json"),layer_stack_cpu)
395394

396395
global generate_iters
397396
generate_iters = 0
@@ -403,8 +402,7 @@ def generate_layers_metrics(model_path, batch_size, seq_length, max_new_tokens):
403402
seq_length=seq_length, max_new_tokens=max_new_tokens,
404403
tokenizer=tokenizer)
405404

406-
with open(os.path.join(output_path,f"{model_path}-layer-output-stack-gpu.json"), 'w') as f:
407-
json.dump(layer_stack_cuda, f)
405+
save_layers_output(os.path.join(output_path,f"{model_path}-layer-output-stack-gpu.json"), layer_stack_cuda)
408406

409407
assert len(layer_stack_cuda.keys()) == len(layer_stack_cpu.keys())
410408

0 commit comments

Comments
 (0)