Skip to content

Commit a97d8c0

Browse files
committed
Add github code to list workflows and associated changes.
Enable downloading artifacts to a variable. Enable summarizing logs to a string only.
1 parent b5a23a0 commit a97d8c0

File tree

3 files changed

+44
-8
lines changed

3 files changed

+44
-8
lines changed

.github/workflows/integration_tests.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,12 +190,15 @@ jobs:
190190
apis="analytics,app_check,auth,database,dynamic_links,functions,gma,installations,messaging,remote_config,storage"
191191
elif [[ "${{ github.event.schedule }}" == "0 10 * * *" || "${{ github.event.schedule }}" == "0 11 * * *" ]]; then
192192
# at 2am PST/3am PDT and 3am PST/4am PDT. Running integration tests for firestore and generate test report.
193+
echo "::warning ::Running Firestore nightly tests"
193194
apis="firestore"
194195
else
196+
echo "::warning ::Running main nightly tests"
195197
apis=$( python scripts/gha/print_matrix_configuration.py -c -w integration_tests -k apis -o "${{github.event.inputs.apis}}" ${TEST_MATRIX_PARAM} )
196198
fi
197199
if [[ "${{ github.event.schedule }}" == "0 11 * * *" ]]; then
198-
# at 3am PST/4am PDT. Running firestore desktop integration test aginst tip-of-tree ios repo
200+
# at 3am PST/4am PDT. Running firestore desktop integration test aginst tip-of-tree ios repo
201+
echo "::warning ::Running against Firestore tip-of-tree"
199202
matrix_platform="Desktop"
200203
matrix_os=$( python scripts/gha/print_matrix_configuration.py -w integration_tests ${TEST_MATRIX_PARAM} -k os -o "ubuntu-20.04,macos-12")
201204
else

scripts/gha/github.py

Lines changed: 38 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -179,14 +179,18 @@ def list_artifacts(token, run_id):
179179
return response.json()["artifacts"]
180180

181181

182-
def download_artifact(token, artifact_id, output_path):
182+
def download_artifact(token, artifact_id, output_path=None):
183183
"""https://docs.github.com/en/rest/reference/actions#download-an-artifact"""
184184
url = f'{GITHUB_API_URL}/actions/artifacts/{artifact_id}/zip'
185185
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
186-
with requests.get(url, headers=headers, stream=True, timeout=TIMEOUT) as response:
186+
with requests_retry_session().get(url, headers=headers, stream=True, timeout=TIMEOUT_LONG) as response:
187187
logging.info("download_artifact: %s response: %s", url, response)
188-
with open(output_path, 'wb') as file:
189-
shutil.copyfileobj(response.raw, file)
188+
if output_path:
189+
with open(output_path, 'wb') as file:
190+
shutil.copyfileobj(response.raw, file)
191+
elif response.status_code == 200:
192+
return response.content
193+
return None
190194

191195

192196
def dismiss_review(token, pull_number, review_id, message):
@@ -273,9 +277,38 @@ def list_pull_requests(token, state, head, base):
273277
keep_going = False
274278
with requests_retry_session().get(url, headers=headers, params=params,
275279
stream=True, timeout=TIMEOUT) as response:
276-
logging.info("get_reviews: %s response: %s", url, response)
280+
logging.info("list_pull_requests: %s response: %s", url, response)
277281
results = results + response.json()
278282
# If exactly per_page results were retrieved, read the next page.
279283
keep_going = (len(response.json()) == per_page)
280284
return results
281285

286+
287+
def list_workflow_runs(token, workflow_id, branch=None, event=None, limit=200):
288+
"""https://docs.github.com/en/rest/actions/workflow-runs?list-workflow-runs-for-a-required-workflow"""
289+
url = f'{GITHUB_API_URL}/actions/workflows/{workflow_id}/runs'
290+
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
291+
page = 1
292+
per_page = 100
293+
results = []
294+
keep_going = True
295+
while keep_going:
296+
params = {'per_page': per_page, 'page': page}
297+
if branch: params.update({'branch': branch})
298+
if event: params.update({'event': event})
299+
page = page + 1
300+
keep_going = False
301+
with requests_retry_session().get(url, headers=headers, params=params,
302+
stream=True, timeout=TIMEOUT) as response:
303+
logging.info("list_workflow_runs: %s page %d, response: %s", url, params['page'], response)
304+
if 'workflow_runs' not in response.json():
305+
break
306+
run_list_results = response.json()['workflow_runs']
307+
results = results + run_list_results
308+
# If exactly per_page results were retrieved, read the next page.
309+
keep_going = (len(run_list_results) == per_page)
310+
if limit > 0 and len(results) >= limit:
311+
keep_going = False
312+
results = results[:limit]
313+
return results
314+

scripts/gha/summarize_test_results.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,7 @@ def main(argv):
191191
summarize_logs(FLAGS.dir, FLAGS.markdown, FLAGS.github_log)
192192

193193

194-
def summarize_logs(dir, markdown=False, github_log=False):
194+
def summarize_logs(dir, markdown=False, github_log=False, quiet=False):
195195
build_log_files = glob.glob(os.path.join(dir, BUILD_FILE_PATTERN))
196196
test_log_files = glob.glob(os.path.join(dir, TEST_FILE_PATTERN))
197197
# Replace the "*" in the file glob with a regex capture group,
@@ -267,7 +267,7 @@ def summarize_logs(dir, markdown=False, github_log=False):
267267
log_lines = print_log(log_results)
268268

269269
log_summary = "\n".join(log_lines)
270-
print(log_summary)
270+
if not quiet: print(log_summary)
271271
return (success_or_only_flakiness, log_summary)
272272

273273

0 commit comments

Comments
 (0)