Skip to content

Commit 8fd4ca4

Browse files
authored
Merge pull request #148 from github/split-summary-table
Split out the summary table into 2 tables
2 parents c2b59a5 + 3da633e commit 8fd4ca4

File tree

3 files changed

+106
-69
lines changed

3 files changed

+106
-69
lines changed

.vscode/settings.json

+5-1
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,9 @@
33
"."
44
],
55
"python.testing.unittestEnabled": false,
6-
"python.testing.pytestEnabled": true
6+
"python.testing.pytestEnabled": true,
7+
"[python]": {
8+
"editor.defaultFormatter": "ms-python.black-formatter"
9+
},
10+
"python.formatting.provider": "none"
711
}

markdown_writer.py

+59-42
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ def write_to_markdown(
7878
num_issues_closed: Union[int, None],
7979
labels=None,
8080
search_query=None,
81+
hide_label_metrics=False,
8182
) -> None:
8283
"""Write the issues with metrics to a markdown file.
8384
@@ -94,6 +95,7 @@ def write_to_markdown(
9495
num_issues_closed (int): The number of issues that were closed.
9596
labels (List[str]): A list of the labels that are used in the issues.
9697
search_query (str): The search query used to find the issues.
98+
hide_label_metrics (bool): Represents whether the user has chosen to hide label metrics in the output
9799
98100
Returns:
99101
None.
@@ -112,7 +114,7 @@ def write_to_markdown(
112114
file.write("# Issue Metrics\n\n")
113115

114116
# Write first table with overall metrics
115-
write_overall_metrics_table(
117+
write_overall_metrics_tables(
116118
issues_with_metrics,
117119
average_time_to_first_response,
118120
average_time_to_close,
@@ -123,6 +125,7 @@ def write_to_markdown(
123125
labels,
124126
columns,
125127
file,
128+
hide_label_metrics,
126129
)
127130

128131
# Write second table with individual issue/pr/discussion metrics
@@ -168,7 +171,7 @@ def write_to_markdown(
168171
print("Wrote issue metrics to issue_metrics.md")
169172

170173

171-
def write_overall_metrics_table(
174+
def write_overall_metrics_tables(
172175
issues_with_metrics,
173176
stats_time_to_first_response,
174177
stats_time_to_close,
@@ -179,49 +182,63 @@ def write_overall_metrics_table(
179182
labels,
180183
columns,
181184
file,
185+
hide_label_metrics,
182186
):
183-
"""Write the overall metrics table to the markdown file."""
184-
file.write("| Metric | Average | Median | 90th percentile |\n")
185-
file.write("| --- | --- | --- | ---: |\n")
186-
if "Time to first response" in columns:
187-
if stats_time_to_first_response is not None:
188-
file.write(
189-
f"| Time to first response "
190-
f"| {stats_time_to_first_response['avg']} "
191-
f"| {stats_time_to_first_response['med']} "
192-
f"| {stats_time_to_first_response['90p']} |\n"
193-
)
194-
else:
195-
file.write("| Time to first response | None | None | None |\n")
196-
if "Time to close" in columns:
197-
if stats_time_to_close is not None:
198-
file.write(
199-
f"| Time to close "
200-
f"| {stats_time_to_close['avg']} "
201-
f"| {stats_time_to_close['med']} "
202-
f"| {stats_time_to_close['90p']} |\n"
203-
)
204-
else:
205-
file.write("| Time to close | None | None | None |\n")
206-
if "Time to answer" in columns:
207-
if stats_time_to_answer is not None:
208-
file.write(
209-
f"| Time to answer "
210-
f"| {stats_time_to_answer['avg']} "
211-
f"| {stats_time_to_answer['med']} "
212-
f"| {stats_time_to_answer['90p']} |\n"
213-
)
214-
else:
215-
file.write("| Time to answer | None | None | None |\n")
216-
if labels and stats_time_in_labels:
217-
for label in labels:
218-
if f"Time spent in {label}" in columns and label in stats_time_in_labels['avg']:
187+
"""Write the overall metrics tables to the markdown file."""
188+
if (
189+
"Time to first response" in columns
190+
or "Time to close" in columns
191+
or "Time to answer" in columns
192+
or (hide_label_metrics is False and len(labels) > 0)
193+
):
194+
file.write("| Metric | Average | Median | 90th percentile |\n")
195+
file.write("| --- | --- | --- | ---: |\n")
196+
if "Time to first response" in columns:
197+
if stats_time_to_first_response is not None:
198+
file.write(
199+
f"| Time to first response "
200+
f"| {stats_time_to_first_response['avg']} "
201+
f"| {stats_time_to_first_response['med']} "
202+
f"| {stats_time_to_first_response['90p']} |\n"
203+
)
204+
else:
205+
file.write("| Time to first response | None | None | None |\n")
206+
if "Time to close" in columns:
207+
if stats_time_to_close is not None:
219208
file.write(
220-
f"| Time spent in {label} "
221-
f"| {stats_time_in_labels['avg'][label]} "
222-
f"| {stats_time_in_labels['med'][label]} "
223-
f"| {stats_time_in_labels['90p'][label]} |\n"
209+
f"| Time to close "
210+
f"| {stats_time_to_close['avg']} "
211+
f"| {stats_time_to_close['med']} "
212+
f"| {stats_time_to_close['90p']} |\n"
224213
)
214+
else:
215+
file.write("| Time to close | None | None | None |\n")
216+
if "Time to answer" in columns:
217+
if stats_time_to_answer is not None:
218+
file.write(
219+
f"| Time to answer "
220+
f"| {stats_time_to_answer['avg']} "
221+
f"| {stats_time_to_answer['med']} "
222+
f"| {stats_time_to_answer['90p']} |\n"
223+
)
224+
else:
225+
file.write("| Time to answer | None | None | None |\n")
226+
if labels and stats_time_in_labels:
227+
for label in labels:
228+
if (
229+
f"Time spent in {label}" in columns
230+
and label in stats_time_in_labels["avg"]
231+
):
232+
file.write(
233+
f"| Time spent in {label} "
234+
f"| {stats_time_in_labels['avg'][label]} "
235+
f"| {stats_time_in_labels['med'][label]} "
236+
f"| {stats_time_in_labels['90p'][label]} |\n"
237+
)
238+
file.write("\n")
239+
# Write count stats to a separate table
240+
file.write("| Metric | Count |\n")
241+
file.write("| --- | ---: |\n")
225242
file.write(f"| Number of items that remain open | {num_issues_opened} |\n")
226243
file.write(f"| Number of items closed | {num_issues_closed} |\n")
227244
file.write(f"| Total number of items created | {len(issues_with_metrics)} |\n\n")

test_markdown_writer.py

+42-26
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
class TestWriteToMarkdown(unittest.TestCase):
1919
"""Test the write_to_markdown function."""
20+
2021
maxDiff = None
2122

2223
def test_write_to_markdown(self):
@@ -50,21 +51,25 @@ def test_write_to_markdown(self):
5051
),
5152
]
5253
time_to_first_response = {
53-
'avg': timedelta(days=2),
54-
'med': timedelta(days=2),
55-
'90p': timedelta(days=2)}
54+
"avg": timedelta(days=2),
55+
"med": timedelta(days=2),
56+
"90p": timedelta(days=2),
57+
}
5658
time_to_close = {
57-
'avg': timedelta(days=3),
58-
'med': timedelta(days=3),
59-
'90p': timedelta(days=3)}
59+
"avg": timedelta(days=3),
60+
"med": timedelta(days=3),
61+
"90p": timedelta(days=3),
62+
}
6063
time_to_answer = {
61-
'avg': timedelta(days=4),
62-
'med': timedelta(days=4),
63-
'90p': timedelta(days=4)}
64+
"avg": timedelta(days=4),
65+
"med": timedelta(days=4),
66+
"90p": timedelta(days=4),
67+
}
6468
time_in_labels = {
65-
'avg': {"bug": "1 day, 12:00:00"},
66-
'med': {"bug": "1 day, 12:00:00"},
67-
'90p': {"bug": "1 day, 12:00:00"}}
69+
"avg": {"bug": "1 day, 12:00:00"},
70+
"med": {"bug": "1 day, 12:00:00"},
71+
"90p": {"bug": "1 day, 12:00:00"},
72+
}
6873

6974
num_issues_opened = 2
7075
num_issues_closed = 1
@@ -93,6 +98,9 @@ def test_write_to_markdown(self):
9398
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
9499
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
95100
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
101+
"\n"
102+
"| Metric | Count |\n"
103+
"| --- | ---: |\n"
96104
"| Number of items that remain open | 2 |\n"
97105
"| Number of items closed | 1 |\n"
98106
"| Total number of items created | 2 |\n\n"
@@ -140,21 +148,25 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
140148
),
141149
]
142150
average_time_to_first_response = {
143-
'avg': timedelta(days=2),
144-
'med': timedelta(days=2),
145-
'90p': timedelta(days=2)}
151+
"avg": timedelta(days=2),
152+
"med": timedelta(days=2),
153+
"90p": timedelta(days=2),
154+
}
146155
average_time_to_close = {
147-
'avg': timedelta(days=3),
148-
'med': timedelta(days=3),
149-
'90p': timedelta(days=3)}
156+
"avg": timedelta(days=3),
157+
"med": timedelta(days=3),
158+
"90p": timedelta(days=3),
159+
}
150160
average_time_to_answer = {
151-
'avg': timedelta(days=4),
152-
'med': timedelta(days=4),
153-
'90p': timedelta(days=4)}
161+
"avg": timedelta(days=4),
162+
"med": timedelta(days=4),
163+
"90p": timedelta(days=4),
164+
}
154165
average_time_in_labels = {
155-
'avg': {"bug": "1 day, 12:00:00"},
156-
'med': {"bug": "1 day, 12:00:00"},
157-
'90p': {"bug": "1 day, 12:00:00"}}
166+
"avg": {"bug": "1 day, 12:00:00"},
167+
"med": {"bug": "1 day, 12:00:00"},
168+
"90p": {"bug": "1 day, 12:00:00"},
169+
}
158170

159171
num_issues_opened = 2
160172
num_issues_closed = 1
@@ -182,6 +194,9 @@ def test_write_to_markdown_with_vertical_bar_in_title(self):
182194
"| Time to close | 3 days, 0:00:00 | 3 days, 0:00:00 | 3 days, 0:00:00 |\n"
183195
"| Time to answer | 4 days, 0:00:00 | 4 days, 0:00:00 | 4 days, 0:00:00 |\n"
184196
"| Time spent in bug | 1 day, 12:00:00 | 1 day, 12:00:00 | 1 day, 12:00:00 |\n"
197+
"\n"
198+
"| Metric | Count |\n"
199+
"| --- | ---: |\n"
185200
"| Number of items that remain open | 2 |\n"
186201
"| Number of items closed | 1 |\n"
187202
"| Total number of items created | 2 |\n\n"
@@ -279,15 +294,16 @@ def test_writes_markdown_file_with_non_hidden_columns_only(self):
279294
num_issues_closed=num_issues_closed,
280295
labels=["label1"],
281296
search_query="repo:user/repo is:issue",
297+
hide_label_metrics=True,
282298
)
283299

284300
# Check that the function writes the correct markdown file
285301
with open("issue_metrics.md", "r", encoding="utf-8") as file:
286302
content = file.read()
287303
expected_content = (
288304
"# Issue Metrics\n\n"
289-
"| Metric | Average | Median | 90th percentile |\n"
290-
"| --- | --- | --- | ---: |\n"
305+
"| Metric | Count |\n"
306+
"| --- | ---: |\n"
291307
"| Number of items that remain open | 2 |\n"
292308
"| Number of items closed | 1 |\n"
293309
"| Total number of items created | 2 |\n\n"

0 commit comments

Comments
 (0)