Skip to content

Commit 695da06

Browse files
authored
[QoL] Add generated text summary to top of output file (#116)
* add generated text summary to output file Signed-off-by: Benjamin Chislett <[email protected]> * lint Signed-off-by: Benjamin Chislett <[email protected]> --------- Signed-off-by: Benjamin Chislett <[email protected]>
1 parent 570da12 commit 695da06

File tree

2 files changed

+6
-1
lines changed

2 files changed

+6
-1
lines changed

src/flexible_inference_benchmark/engine/data.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ def __init__(
308308
filtered_dataset = [
309309
(prompt_str, prompt_len, output_len)
310310
for prompt_str, prompt_len, output_len in tokenized_dataset
311-
if (prompt_len > 4 and output_len > 4)
311+
if (prompt_len > 4 and output_len > 0)
312312
]
313313

314314
self.data = filtered_dataset

src/flexible_inference_benchmark/main.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -927,9 +927,14 @@ def run_main(args: argparse.Namespace) -> None:
927927
output_list: List[Any] = send_requests(client, requests_prompts, requests_times, arr_dims)
928928
benchmark_time = time.perf_counter() - t
929929
# pylint: disable=line-too-long
930+
931+
text_summaries: list[str] = []
932+
if any(hasattr(o, "generated_text") for o in output_list):
933+
text_summaries = [o.generated_text for o in output_list if hasattr(o, "generated_text")] # type: ignore
930934
output = {
931935
"backend": args.backend,
932936
"time": benchmark_time,
937+
"summary": text_summaries,
933938
"outputs": [request_func_output.model_dump() for request_func_output in output_list], # type: ignore
934939
"inputs": requests_prompts,
935940
"tokenizer": args.tokenizer if args.tokenizer else args.model,

0 commit comments

Comments
 (0)