Skip to content

Commit afd4861

Browse files
authored
Merge branch 'main' into main
2 parents 090c062 + add6acc commit afd4861

File tree

1,695 files changed

+53485
-29187
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,695 files changed

+53485
-29187
lines changed

.ci/metrics/metrics.py

Lines changed: 2 additions & 201 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,9 @@
11
import collections
22
import datetime
3-
import dateutil
43
import github
5-
import json
64
import logging
75
import os
86
import requests
9-
import sys
107
import time
118

129
from dataclasses import dataclass
@@ -55,18 +52,6 @@
5552
# by trial and error).
5653
GRAFANA_METRIC_MAX_AGE_MN = 120
5754

58-
# Lists the BuildKite jobs we want to track. Maps the BuildKite job name to
59-
# the metric name in Grafana. This is important not to lose metrics history
60-
# if the workflow name changes.
61-
BUILDKITE_WORKFLOW_TO_TRACK = {
62-
":linux: Linux x64": "buildkite_linux",
63-
":windows: Windows x64": "buildkite_windows",
64-
}
65-
66-
# Number of builds to fetch per page. Since we scrape regularly, this can
67-
# remain small.
68-
BUILDKITE_GRAPHQL_BUILDS_PER_PAGE = 50
69-
7055

7156
@dataclass
7257
class JobMetrics:
@@ -86,181 +71,6 @@ class GaugeMetric:
8671
time_ns: int
8772

8873

89-
def buildkite_fetch_page_build_list(
90-
buildkite_token: str, after_cursor: str = None
91-
) -> list[dict[str, str]]:
92-
"""Fetches a page of the build list using the GraphQL BuildKite API.
93-
94-
Returns the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE last running/queued builds,
95-
or the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE running/queued builds
96-
older than the one pointer by |after_cursor| if provided.
97-
The |after_cursor| value is taken from the previous page returned by the
98-
API.
99-
100-
Args:
101-
buildkite_token: the secret token to authenticate GraphQL requests.
102-
after_cursor: cursor after which to start the page fetch.
103-
104-
Returns:
105-
The most recent builds after cursor (if set) with the following format:
106-
[
107-
{
108-
"cursor": <value>,
109-
"number": <build-number>,
110-
}
111-
]
112-
"""
113-
114-
BUILDKITE_GRAPHQL_QUERY = """
115-
query OrganizationShowQuery {{
116-
organization(slug: "llvm-project") {{
117-
pipelines(search: "Github pull requests", first: 1) {{
118-
edges {{
119-
node {{
120-
builds (state: [CANCELING, CREATING, FAILING, RUNNING], first: {PAGE_SIZE}, after: {AFTER}) {{
121-
edges {{
122-
cursor
123-
node {{
124-
number
125-
}}
126-
}}
127-
}}
128-
}}
129-
}}
130-
}}
131-
}}
132-
}}
133-
"""
134-
query = BUILDKITE_GRAPHQL_QUERY.format(
135-
PAGE_SIZE=BUILDKITE_GRAPHQL_BUILDS_PER_PAGE,
136-
AFTER="null" if after_cursor is None else '"{}"'.format(after_cursor),
137-
)
138-
query = json.dumps({"query": query})
139-
url = "https://graphql.buildkite.com/v1"
140-
headers = {
141-
"Authorization": "Bearer " + buildkite_token,
142-
"Content-Type": "application/json",
143-
}
144-
data = requests.post(url, data=query, headers=headers).json()
145-
# De-nest the build list.
146-
if "errors" in data:
147-
logging.info("Failed to fetch BuildKite jobs: {}".format(data["errors"]))
148-
return []
149-
builds = data["data"]["organization"]["pipelines"]["edges"][0]["node"]["builds"][
150-
"edges"
151-
]
152-
# Fold cursor info into the node dictionnary.
153-
return [{**x["node"], "cursor": x["cursor"]} for x in builds]
154-
155-
156-
def buildkite_get_build_info(build_number: str) -> dict:
157-
"""Returns all the info associated with the provided build number.
158-
159-
Note: for unknown reasons, graphql returns no jobs for a given build,
160-
while this endpoint does, hence why this uses this API instead of graphql.
161-
162-
Args:
163-
build_number: which build number to fetch info for.
164-
165-
Returns:
166-
The info for the target build, a JSON dictionnary.
167-
"""
168-
169-
URL = "https://buildkite.com/llvm-project/github-pull-requests/builds/{}.json"
170-
return requests.get(URL.format(build_number)).json()
171-
172-
173-
def buildkite_get_incomplete_tasks(buildkite_token: str) -> list:
174-
"""Returns all the running/pending BuildKite builds.
175-
176-
Args:
177-
buildkite_token: the secret token to authenticate GraphQL requests.
178-
last_cursor: the cursor to stop at if set. If None, a full page is fetched.
179-
"""
180-
output = []
181-
cursor = None
182-
while True:
183-
page = buildkite_fetch_page_build_list(buildkite_token, cursor)
184-
if len(page) == 0:
185-
break
186-
cursor = page[-1]["cursor"]
187-
output += page
188-
return output
189-
190-
191-
def buildkite_get_metrics(
192-
buildkite_token: str, previously_incomplete: set[int]
193-
) -> (list[JobMetrics], set[int]):
194-
"""Returns a tuple with:
195-
196-
- the metrics recorded for newly completed workflow jobs.
197-
- the set of workflow still running now.
198-
199-
Args:
200-
buildkite_token: the secret token to authenticate GraphQL requests.
201-
previously_incomplete: the set of running workflows the last time this
202-
function was called.
203-
"""
204-
205-
running_builds = buildkite_get_incomplete_tasks(buildkite_token)
206-
incomplete_now = set([x["number"] for x in running_builds])
207-
output = []
208-
209-
for build_id in previously_incomplete:
210-
if build_id in incomplete_now:
211-
continue
212-
213-
info = buildkite_get_build_info(build_id)
214-
metric_timestamp = dateutil.parser.isoparse(info["finished_at"])
215-
for job in info["jobs"]:
216-
# This workflow is not interesting to us.
217-
if job["name"] not in BUILDKITE_WORKFLOW_TO_TRACK:
218-
continue
219-
220-
# Don't count canceled jobs.
221-
if job["canceled_at"]:
222-
continue
223-
224-
created_at = dateutil.parser.isoparse(job["created_at"])
225-
scheduled_at = dateutil.parser.isoparse(job["scheduled_at"])
226-
started_at = dateutil.parser.isoparse(job["started_at"])
227-
finished_at = dateutil.parser.isoparse(job["finished_at"])
228-
229-
job_name = BUILDKITE_WORKFLOW_TO_TRACK[job["name"]]
230-
queue_time = (started_at - scheduled_at).seconds
231-
run_time = (finished_at - started_at).seconds
232-
status = bool(job["passed"])
233-
234-
# Grafana will refuse to ingest metrics older than ~2 hours, so we
235-
# should avoid sending historical data.
236-
metric_age_mn = (
237-
datetime.datetime.now(datetime.timezone.utc) - metric_timestamp
238-
).total_seconds() / 60
239-
if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN:
240-
logging.warning(
241-
f"Job {job['name']} from workflow {build_id} dropped due"
242-
+ f" to staleness: {metric_age_mn}mn old."
243-
)
244-
continue
245-
246-
metric_timestamp_ns = int(metric_timestamp.timestamp()) * 10**9
247-
workflow_id = build_id
248-
workflow_name = "Github pull requests"
249-
output.append(
250-
JobMetrics(
251-
job_name,
252-
queue_time,
253-
run_time,
254-
status,
255-
metric_timestamp_ns,
256-
workflow_id,
257-
workflow_name,
258-
)
259-
)
260-
261-
return output, incomplete_now
262-
263-
26474
def github_get_metrics(
26575
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
26676
) -> tuple[list[JobMetrics], int]:
@@ -478,17 +288,13 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
478288
def main():
479289
# Authenticate with Github
480290
github_auth = Auth.Token(os.environ["GITHUB_TOKEN"])
481-
buildkite_token = os.environ["BUILDKITE_TOKEN"]
482291
grafana_api_key = os.environ["GRAFANA_API_KEY"]
483292
grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"]
484293

485294
# The last workflow this script processed.
486295
# Because the Github queries are broken, we'll simply log a 'processed'
487296
# bit for the last COUNT_TO_PROCESS workflows.
488297
gh_last_workflows_seen_as_completed = set()
489-
# Stores the list of pending/running builds in BuildKite we need to check
490-
# at the next iteration.
491-
bk_incomplete = set()
492298

493299
# Enter the main loop. Every five minutes we wake up and dump metrics for
494300
# the relevant jobs.
@@ -500,13 +306,8 @@ def main():
500306
github_repo, gh_last_workflows_seen_as_completed
501307
)
502308

503-
bk_metrics, bk_incomplete = buildkite_get_metrics(
504-
buildkite_token, bk_incomplete
505-
)
506-
507-
metrics = gh_metrics + bk_metrics
508-
upload_metrics(metrics, grafana_metrics_userid, grafana_api_key)
509-
logging.info(f"Uploaded {len(metrics)} metrics")
309+
upload_metrics(gh_metrics, grafana_metrics_userid, grafana_api_key)
310+
logging.info(f"Uploaded {len(gh_metrics)} metrics")
510311

511312
time.sleep(SCRAPE_INTERVAL_SECONDS)
512313

.ci/monolithic-linux.sh

Lines changed: 10 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -102,51 +102,25 @@ if [[ "${runtimes}" != "" ]]; then
102102
exit 1
103103
fi
104104

105-
echo "--- ninja install-clang"
106-
107-
ninja -C ${BUILD_DIR} install-clang install-clang-resource-headers
108-
109-
RUNTIMES_BUILD_DIR="${MONOREPO_ROOT}/build-runtimes"
110-
INSTALL_DIR="${BUILD_DIR}/install"
111-
mkdir -p ${RUNTIMES_BUILD_DIR}
112-
113105
echo "--- cmake runtimes C++26"
114106

115-
rm -rf "${RUNTIMES_BUILD_DIR}"
116-
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
117-
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
118-
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
119-
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
120-
-D LIBCXX_CXX_ABI=libcxxabi \
121-
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
122-
-D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \
123-
-D LIBCXX_TEST_PARAMS="std=c++26" \
124-
-D LIBCXXABI_TEST_PARAMS="std=c++26" \
125-
-D LLVM_LIT_ARGS="${lit_args}"
107+
cmake \
108+
-D LIBCXX_TEST_PARAMS="std=c++26" \
109+
-D LIBCXXABI_TEST_PARAMS="std=c++26" \
110+
"${BUILD_DIR}"
126111

127112
echo "--- ninja runtimes C++26"
128113

129-
ninja -vC "${RUNTIMES_BUILD_DIR}" ${runtime_targets}
114+
ninja -C "${BUILD_DIR}" ${runtime_targets}
130115

131116
echo "--- cmake runtimes clang modules"
132117

133-
# We don't need to do a clean build of runtimes, because LIBCXX_TEST_PARAMS
134-
# and LIBCXXABI_TEST_PARAMS only affect lit configuration, which successfully
135-
# propagates without a clean build. Other that those two variables, builds
136-
# are supposed to be the same.
137-
138-
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
139-
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
140-
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
141-
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
142-
-D LIBCXX_CXX_ABI=libcxxabi \
143-
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
144-
-D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \
145-
-D LIBCXX_TEST_PARAMS="enable_modules=clang" \
146-
-D LIBCXXABI_TEST_PARAMS="enable_modules=clang" \
147-
-D LLVM_LIT_ARGS="${lit_args}"
118+
cmake \
119+
-D LIBCXX_TEST_PARAMS="enable_modules=clang" \
120+
-D LIBCXXABI_TEST_PARAMS="enable_modules=clang" \
121+
"${BUILD_DIR}"
148122

149123
echo "--- ninja runtimes clang modules"
150124

151-
ninja -vC "${RUNTIMES_BUILD_DIR}" ${runtime_targets}
125+
ninja -C "${BUILD_DIR}" ${runtime_targets}
152126
fi

.git-blame-ignore-revs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,3 +100,9 @@ d33bf2e9df578ff7e44fd22504d6ad5a122b7ee6
100100

101101
# [lldb][NFC] clang-format MainLoopPosix.cpp
102102
66bdbfbaa08fa3d8e64a7fe136a8fb717f5cdbb7
103+
104+
# [clang-tidy][NFC] Run clang-format on "clang-tools-extra/clang-tidy"
105+
65d66625b3e2b8322ed99d82edabecbafcd0885b
106+
ce46adb8b7ce645353eccaedf31ed9765dab77bb
107+
68070f908bb7ac5f0b5fa9722caa504ecf723f6b
108+
5213c57cb1f0d78aad9a253b7f6a2b62ff4c7859

bolt/include/bolt/Core/MCPlusBuilder.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ class MCPlusBuilder {
405405

406406
bool equals(const MCExpr &A, const MCExpr &B, CompFuncTy Comp) const;
407407

408-
virtual bool equals(const MCTargetExpr &A, const MCTargetExpr &B,
408+
virtual bool equals(const MCSpecifierExpr &A, const MCSpecifierExpr &B,
409409
CompFuncTy Comp) const;
410410

411411
virtual bool isBranch(const MCInst &Inst) const {

bolt/include/bolt/Profile/DataAggregator.h

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,13 @@ class DataAggregator : public DataReader {
7878
static bool checkPerfDataMagic(StringRef FileName);
7979

8080
private:
81+
struct LBREntry {
82+
uint64_t From;
83+
uint64_t To;
84+
bool Mispred;
85+
};
86+
friend raw_ostream &operator<<(raw_ostream &OS, const LBREntry &);
87+
8188
struct PerfBranchSample {
8289
SmallVector<LBREntry, 32> LBR;
8390
};
@@ -476,7 +483,6 @@ class DataAggregator : public DataReader {
476483

477484
/// Debugging dump methods
478485
void dump() const;
479-
void dump(const LBREntry &LBR) const;
480486
void dump(const PerfBranchSample &Sample) const;
481487
void dump(const PerfMemSample &Sample) const;
482488

@@ -504,6 +510,12 @@ class DataAggregator : public DataReader {
504510

505511
friend class YAMLProfileWriter;
506512
};
513+
514+
inline raw_ostream &operator<<(raw_ostream &OS,
515+
const DataAggregator::LBREntry &L) {
516+
OS << formatv("{0:x} -> {1:x}/{2}", L.From, L.To, L.Mispred ? 'M' : 'P');
517+
return OS;
518+
}
507519
} // namespace bolt
508520
} // namespace llvm
509521

bolt/include/bolt/Profile/DataReader.h

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -32,18 +32,6 @@ namespace bolt {
3232

3333
class BinaryFunction;
3434

35-
struct LBREntry {
36-
uint64_t From;
37-
uint64_t To;
38-
bool Mispred;
39-
};
40-
41-
inline raw_ostream &operator<<(raw_ostream &OS, const LBREntry &LBR) {
42-
OS << "0x" << Twine::utohexstr(LBR.From) << " -> 0x"
43-
<< Twine::utohexstr(LBR.To);
44-
return OS;
45-
}
46-
4735
struct Location {
4836
bool IsSymbol;
4937
StringRef Name;

bolt/lib/Core/HashUtilities.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ std::string hashExpr(BinaryContext &BC, const MCExpr &Expr) {
6767
.append(hashInteger(BinaryExpr.getOpcode()))
6868
.append(hashExpr(BC, *BinaryExpr.getRHS()));
6969
}
70+
case MCExpr::Specifier:
7071
case MCExpr::Target:
7172
return std::string();
7273
}

0 commit comments

Comments
 (0)