Skip to content

Restore adding binary size metrics into DB #1804

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 13 additions & 11 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -360,22 +360,24 @@ jobs:

- name: Gather data
run: |
cargo run --bin collector bench_local `which rustc` --include syn --id version1 --db postgresql://postgres:[email protected]:5432/postgres
cargo run --bin collector bench_local `rustup +nightly which rustc` --include syn --id version2 --db postgresql://postgres:[email protected]:5432/postgres
cargo run --bin collector bench_local `which rustc` --include syn --id version1 \
--self-profile \
--db postgresql://postgres:[email protected]:5432/postgres
cargo run --bin collector bench_local `rustup +nightly which rustc` --include syn --id version2 \
--self-profile \
--db postgresql://postgres:[email protected]:5432/postgres

- name: Build site
run: cargo build --bin site

- name: Setup Python
uses: actions/setup-python@v5

- name: Install Python dependencies
run: python3 -m pip install msgpack requests

# Check that data from the /get endpoint can be successfully queried.
- name: Query compare page data
run: |
DATABASE_URL=postgresql://postgres:[email protected]:5432/postgres cargo run --bin site &
curl http://localhost:2346/perf/get \
-H 'Content-Type:application/json' \
-d '{"start": "version1", "end": "version2", "stat": "instructions:u" }' \
--output out.msgpack \
--retry-connrefused \
--connect-timeout 5 \
--max-time 10 \
--retry 3 \
--retry-delay 5
python3 ci/check-site.py version1 version2
50 changes: 50 additions & 0 deletions ci/check-site.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""
Checks that the perf site running locally returns non-empty data for a set of artifacts.
"""
import sys
import time

import msgpack
import requests

if __name__ == "__main__":
if len(sys.argv) < 3:
print("Usage: python3 check-site.py <version1> <version2>")
exit(1)
version1 = sys.argv[1]
version2 = sys.argv[2]

# Wait for the site to start
while True:
try:
response = requests.post("http://localhost:2346/perf/get", json={
"start": version1,
"end": version2,
"stat": "instructions:u"
})
if response.content != b"no data yet, please wait":
break
except BaseException as e:
print(e)

print(f"Site not online yet, waiting")
time.sleep(1)

# instructions:u is not available on CI, so check at least wall time and binary size
stats = ("wall-time", "size:linked_artifact")
for stat in stats:
print(f"Checking {stat}")
response = requests.post("http://localhost:2346/perf/get", json={
"start": version1,
"end": version2,
"stat": stat
})
if response.status_code != 200:
raise Exception(f"Failure {response.status_code}: {response.content}")
payload = msgpack.unpackb(response.content)
print(payload)
for artifact_id in ("a", "b"):
artifact = payload[artifact_id]
assert artifact["component_sizes"].get("librustc_driver", 0) > 0
comparisons = payload["compile_comparisons"]
assert len(comparisons) > 0
15 changes: 9 additions & 6 deletions collector/src/compile/execute/bencher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,21 +184,24 @@ impl<'a> Processor for BenchProcessor<'a> {
let version = get_rustc_perf_commit();
let collection = self.conn.collection_id(&version).await;

// If the gathered metrics were produced with self profile enabled, then they
// are not realistic. Do not store the metrics into the DB for self-profile
// runs to avoid unnecessary DB storage.
if let Some(files) = res.2 {
self.self_profiles.push(RecordedSelfProfile {
collection,
scenario,
profile,
files,
});
} else {
self.insert_stats(collection, scenario, profile, data.backend, res.0)
.await;

// If the gathered metrics were produced with self profile enabled, then they
// are not realistic. Do not store the metrics that are affected by
// self-profiling into the DB for self-profile runs to avoid unnecessary
// DB storage.
res.0.stats.retain(|key, _| key.starts_with("size:"));
}

self.insert_stats(collection, scenario, profile, data.backend, res.0)
.await;

Ok(Retry::No)
}
Err(DeserializeStatError::NoOutput(output)) => {
Expand Down