Skip to content

Commit ddfe6e4

Browse files
Merge branch 'master' of github.com:ClickHouse/ClickHouse into limit-tests-more
2 parents 68be4bf + 3fb7c4d commit ddfe6e4

File tree

207 files changed

+2829
-1050
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

207 files changed

+2829
-1050
lines changed

.github/workflows/master.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -776,7 +776,7 @@ jobs:
776776
fi
777777
778778
build_amd_compat:
779-
runs-on: [self-hosted, builder-aarch64]
779+
runs-on: [self-hosted, builder]
780780
needs: [config_workflow, dockers_build_amd_and_merge, build_amd_tidy, build_amd_debug, build_amd_release, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_release, build_arm_asan]
781781
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9jb21wYXQp') }}
782782
name: "Build (amd_compat)"

.github/workflows/pull_request.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -930,7 +930,7 @@ jobs:
930930
fi
931931
932932
build_amd_compat:
933-
runs-on: [self-hosted, builder-aarch64]
933+
runs-on: [self-hosted, builder]
934934
needs: [config_workflow, dockers_build_amd_and_merge, build_amd_debug, build_amd_release, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_release, build_arm_asan]
935935
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9jb21wYXQp') }}
936936
name: "Build (amd_compat)"

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ Upcoming meetups
5353
* [Tokyo Meetup with Confluent](https://www.meetup.com/clickhouse-tokyo-user-group/events/306832118/) - April 15, 2025
5454
* [Kuala Lumper Meetup with CNCF](https://www.meetup.com/clickhouse-malaysia-meetup-group/events/306697678/) - April 16, 2025
5555
* [Jakarta Meetup with AWS](https://www.meetup.com/clickhouse-indonesia-user-group/events/306973747/) - April 22, 2025
56+
* [Denver Meetup](https://www.meetup.com/clickhouse-denver-user-group/events/306934991/) - April 23, 2025
5657
* [London Meetup](https://www.meetup.com/clickhouse-london-user-group/events/306047172/) - May 14, 2025
5758
* [Istanbul Meetup](https://www.meetup.com/clickhouse-turkiye-meetup-group/events/306978337/) - May 15, 2025
5859

base/poco/NetSSL_OpenSSL/src/Context.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -504,18 +504,18 @@ void Context::createSSLContext()
504504
* if TLS1.x was removed at OpenSSL library build time via Configure options.
505505
*/
506506
case TLSV1_1_CLIENT_USE:
507-
_pSSLContext = SSL_CTX_new(TLSv1_1_client_method());
507+
_pSSLContext = SSL_CTX_new(TLS_client_method());
508508
break;
509509
case TLSV1_1_SERVER_USE:
510-
_pSSLContext = SSL_CTX_new(TLSv1_1_server_method());
510+
_pSSLContext = SSL_CTX_new(TLS_server_method());
511511
break;
512512
#endif
513513
#if defined(SSL_OP_NO_TLSv1_2) && !defined(OPENSSL_NO_TLS1)
514514
case TLSV1_2_CLIENT_USE:
515-
_pSSLContext = SSL_CTX_new(TLSv1_2_client_method());
515+
_pSSLContext = SSL_CTX_new(TLS_client_method());
516516
break;
517517
case TLSV1_2_SERVER_USE:
518-
_pSSLContext = SSL_CTX_new(TLSv1_2_server_method());
518+
_pSSLContext = SSL_CTX_new(TLS_server_method());
519519
break;
520520
#endif
521521
default:

ci/defs/job_configs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,7 @@ class JobConfigs:
273273
RunnerLabels.BUILDER_ARM, # BuildTypes.ARM_V80COMPAT,
274274
RunnerLabels.BUILDER_AMD, # BuildTypes.AMD_FREEBSD,
275275
RunnerLabels.BUILDER_ARM, # BuildTypes.PPC64LE,
276-
RunnerLabels.BUILDER_ARM, # BuildTypes.AMD_COMPAT,
276+
RunnerLabels.BUILDER_AMD, # BuildTypes.AMD_COMPAT,
277277
RunnerLabels.BUILDER_AMD, # BuildTypes.AMD_MUSL,
278278
RunnerLabels.BUILDER_ARM, # BuildTypes.RISCV64,
279279
RunnerLabels.BUILDER_AMD, # BuildTypes.S390X,

ci/praktika/_environment.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def from_env(cls) -> "_Environment":
5656
RUN_URL = f"https://github.com/{REPOSITORY}/actions/runs/{RUN_ID}"
5757
BASE_BRANCH = os.getenv("GITHUB_BASE_REF", "")
5858
USER_LOGIN = ""
59-
FORK_NAME = ""
59+
FORK_NAME = REPOSITORY
6060
PR_BODY = ""
6161
PR_TITLE = ""
6262
PR_LABELS = []

ci/praktika/cidb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def json_data_generator(cls, result: Result, result_name_for_cidb):
7575
base_ref=env.BASE_BRANCH,
7676
base_repo=env.REPOSITORY,
7777
head_ref=env.BRANCH,
78-
head_repo=env.REPOSITORY, # TODO: remove from table?
78+
head_repo=env.FORK_NAME,
7979
task_url="",
8080
instance_type=",".join(
8181
filter(None, [env.INSTANCE_TYPE, env.INSTANCE_LIFE_CYCLE])

ci/praktika/hook_html.py

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,11 @@ def push_pending_ci_report(cls, _workflow):
142142
# fetch running status with start_time for current job
143143
result = Result.from_fs(job.name)
144144
else:
145-
result = Result.generate_pending(job.name)
145+
result = Result.create_new(job.name, Result.Status.PENDING)
146146
results.append(result)
147-
summary_result = Result.generate_pending(_workflow.name, results=results)
147+
summary_result = Result.create_new(
148+
_workflow.name, Result.Status.RUNNING, results=results
149+
)
148150
summary_result.start_time = Utils.timestamp()
149151
summary_result.links.append(env.CHANGE_URL)
150152
summary_result.links.append(env.RUN_URL)
@@ -200,18 +202,26 @@ def configure(cls, _workflow):
200202
sha=cache_record.sha,
201203
job_name=skipped_job,
202204
)
203-
result = Result.generate_skipped(
204-
skipped_job, [report_link], "reused from cache"
205+
result = Result.create_new(
206+
skipped_job,
207+
Result.Status.SKIPPED,
208+
[report_link],
209+
"reused from cache",
205210
)
206211
else:
207-
result = Result.generate_skipped(
208-
skipped_job, info=filtered_job_and_reason[skipped_job]
212+
result = Result.create_new(
213+
skipped_job,
214+
Result.Status.SKIPPED,
215+
info=filtered_job_and_reason[skipped_job],
209216
)
210217
results.append(result)
211218
if results:
212-
assert _ResultS3.update_workflow_results(
213-
_workflow.name, new_sub_results=results
214-
)
219+
assert (
220+
_ResultS3.update_workflow_results(
221+
_workflow.name, new_sub_results=results
222+
)
223+
is None
224+
), "Workflow status supposed to remain 'running'"
215225

216226
@classmethod
217227
def pre_run(cls, _workflow, _job):

ci/praktika/parser.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -236,10 +236,6 @@ def parse(self):
236236
assert (
237237
False
238238
), f"Artifact [{artifact_name}] has unsupported type [{artifact.type}]"
239-
if not artifact.required_by and artifact.type != Artifact.Type.PHONY:
240-
print(
241-
f"WARNING: Artifact [{artifact_name}] provided by job [{artifact.provided_by}] in workflow [{self.workflow_name}] has no job that requires it"
242-
)
243239
if artifact.type == Artifact.Type.GH:
244240
self.workflow_yaml_config.job_to_config[
245241
artifact.provided_by

ci/praktika/result.py

Lines changed: 19 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -344,23 +344,10 @@ def add_ext_key_value(self, key, value):
344344
return self
345345

346346
@classmethod
347-
def generate_pending(cls, name, results=None):
347+
def create_new(cls, name, status, links=None, info="", results=None):
348348
return Result(
349349
name=name,
350-
status=Result.Status.PENDING,
351-
start_time=None,
352-
duration=None,
353-
results=results or [],
354-
files=[],
355-
links=[],
356-
info="",
357-
)
358-
359-
@classmethod
360-
def generate_skipped(cls, name, links=None, info="", results=None):
361-
return Result(
362-
name=name,
363-
status=Result.Status.SKIPPED,
350+
status=status,
364351
start_time=None,
365352
duration=None,
366353
results=results or [],
@@ -511,10 +498,10 @@ def to_stdout_formatted(self, indent="", res=""):
511498

512499
class ResultInfo:
513500
SETUP_ENV_JOB_FAILED = (
514-
"Failed to set up job env, it's praktika bug or misconfiguration"
501+
"Failed to set up job env, it is praktika bug or misconfiguration"
515502
)
516503
PRE_JOB_FAILED = (
517-
"Failed to do a job pre-run step, it's praktika bug or misconfiguration"
504+
"Failed to do a job pre-run step, it is praktika bug or misconfiguration"
518505
)
519506
KILLED = "Job killed or terminated, no Result provided"
520507
NOT_FOUND_IMPOSSIBLE = (
@@ -556,20 +543,21 @@ def copy_result_from_s3_with_version(cls, local_path):
556543
env = _Environment.get()
557544
file_name = Path(local_path).name
558545
local_dir = Path(local_path).parent
559-
file_name_pattern = f"{file_name}_*"
560-
for file_path in local_dir.glob(file_name_pattern):
561-
file_path.unlink()
562-
s3_path = f"{Settings.HTML_S3_PATH}/{env.get_s3_prefix()}/"
563-
S3.copy_file_from_s3_matching_pattern(
564-
s3_path=s3_path, local_path=local_dir, include=file_name_pattern
546+
s3_path = f"{Settings.HTML_S3_PATH}/{env.get_s3_prefix()}"
547+
latest_result_file = Shell.get_output(
548+
f"aws s3 ls {s3_path}/{file_name}_ | awk '{{print $4}}' | sort -r | head -n 1",
549+
strict=True,
550+
verbose=True,
551+
)
552+
version = int(latest_result_file.split("_")[-1])
553+
S3.copy_file_from_s3(
554+
s3_path=f"{s3_path}/{latest_result_file}", local_path=local_dir
555+
)
556+
Shell.check(
557+
f"cp {local_dir}/{latest_result_file} {local_path}",
558+
strict=True,
559+
verbose=True,
565560
)
566-
result_files = []
567-
for file_path in local_dir.glob(file_name_pattern):
568-
result_files.append(file_path)
569-
assert result_files, "No result files found"
570-
result_files.sort()
571-
version = int(result_files[-1].name.split("_")[-1])
572-
Shell.check(f"cp {result_files[-1]} {local_path}", strict=True, verbose=True)
573561
return version
574562

575563
@classmethod
@@ -698,6 +686,7 @@ def update_workflow_results(
698686
# when multiple concurrent jobs attempt to update the workflow report
699687
time.sleep(random.uniform(0, 2))
700688

689+
print(f"Workflow status changed: [{prev_status}] -> [{new_status}]")
701690
if prev_status != new_status:
702691
return new_status
703692
else:

ci/praktika/runner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def generate_local_run_environment(workflow, job, pr=None, sha=None):
7373

7474
workflow_config.dump()
7575

76-
Result.generate_pending(job.name).dump()
76+
Result.create_from(name=job.name, status=Result.Status.PENDING).dump()
7777

7878
def _setup_env(self, _workflow, job):
7979
# source env file to write data into fs (workflow config json, workflow status json)
@@ -323,7 +323,7 @@ def _post_run(
323323
info=info,
324324
).dump()
325325
elif prerun_exit_code != 0:
326-
info = f"ERROR: {ResultInfo.PRE_JOB_FAILED}"
326+
info = ResultInfo.PRE_JOB_FAILED
327327
print(info)
328328
# set Result with error and logs
329329
Result(

ci/praktika/s3.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,8 @@ def add_uploaded(cls, file_path):
7272
file_zize = cls.get_size_bytes(file_path)
7373
usage.uploaded += file_zize
7474
if file_name in usage.uploaded_details:
75-
print(f"WARNING: Duplicated upload for filename [{file_name}]")
75+
if not file_name.startswith("result_"):
76+
print(f"WARNING: Duplicated upload for filename [{file_name}]")
7677
usage.uploaded_details[file_name] += file_zize
7778
else:
7879
usage.uploaded_details[file_name] = file_zize
@@ -109,7 +110,7 @@ def clean_s3_directory(cls, s3_path, include=""):
109110
cmd = f"aws s3 rm s3://{s3_path} --recursive"
110111
if include:
111112
cmd += f' --exclude "*" --include "{include}"'
112-
cls.run_command_with_retries(cmd, retries=1)
113+
cls.run_command_with_retries(cmd, retries=1, with_stderr=True)
113114
return
114115

115116
@classmethod
@@ -196,7 +197,11 @@ def put(
196197

197198
@classmethod
198199
def run_command_with_retries(
199-
cls, command, retries=Settings.MAX_RETRIES_S3, no_strict=False
200+
cls,
201+
command,
202+
retries=Settings.MAX_RETRIES_S3,
203+
no_strict=False,
204+
with_stderr=False,
200205
):
201206
i = 0
202207
res = False
@@ -224,6 +229,8 @@ def run_command_with_retries(
224229
print(
225230
f"ERROR: aws s3 cp failed, stdout/stderr err: [{stderr}], out [{stdout}]"
226231
)
232+
elif with_stderr and (stdout or stderr):
233+
print(f"stdout: {stdout}\nstderr: {stderr}")
227234
res = ret_code == 0
228235
if not res and not no_strict:
229236
raise RuntimeError(f"s3 command failed: [{stderr}]")
@@ -275,7 +282,7 @@ def copy_file_from_s3_matching_pattern(
275282
).is_dir(), f"Path [{local_path}] does not exist or not a directory"
276283
assert s3_path.endswith("/"), f"s3 path is invalid [{s3_path}]"
277284
cmd = f'aws s3 cp s3://{s3_path} {local_path} --exclude "{exclude}" --include "{include}" --recursive'
278-
res = cls.run_command_with_retries(cmd, no_strict=no_strict)
285+
res = cls.run_command_with_retries(cmd, no_strict=no_strict, with_stderr=True)
279286
if res:
280287
print(
281288
"TODO: support StorageUsage.add_downloaded with matching pattern download"

contrib/curl-cmake/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,7 @@ target_compile_definitions (_curl PRIVATE
168168
BUILDING_LIBCURL
169169
CURL_HIDDEN_SYMBOLS
170170
libcurl_EXPORTS
171+
OPENSSL_NO_ENGINE
171172
CURL_OS="${CMAKE_SYSTEM_NAME}"
172173
)
173174

contrib/grpc-cmake/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ set(_gRPC_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/grpc")
1313

1414
if(TARGET OpenSSL::SSL)
1515
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
16+
17+
add_definitions(-DOPENSSL_NO_ENGINE)
1618
else()
1719
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
1820
endif()

contrib/librdkafka-cmake/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,7 @@ if(WITH_SASL_CYRUS)
117117
set(WITH_SASL_SCRAM 1)
118118
set(WITH_SASL_OAUTHBEARER 1)
119119
endif()
120+
add_definitions(-DOPENSSL_NO_ENGINE)
120121
list(APPEND SRCS "${RDKAFKA_SOURCE_DIR}/rdkafka_ssl.c")
121122

122123
if(WITH_SSL AND WITH_CURL)

0 commit comments

Comments
 (0)