Skip to content

Commit ae4d5d1

Browse files
Hotfix Release: v2.8.0-gr - collective changes
- CI/CD script (See #3) - confluentinc#4989 - confluentinc#4972 - confluentinc#4905 - confluentinc#4864
1 parent b4c6085 commit ae4d5d1

File tree

8 files changed

+330
-17
lines changed

8 files changed

+330
-17
lines changed

.github/workflows/build.yml

Lines changed: 307 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,307 @@
1+
name: librdkafka build and release artifact pipeline
2+
3+
on:
4+
push:
5+
pull_request:
6+
7+
jobs:
8+
osx-arm64:
9+
runs-on: macos-14
10+
env:
11+
ARTIFACT_KEY: p-librdkafka__plat-osx__arch-arm64__lnk-all
12+
steps:
13+
- uses: actions/checkout@v4
14+
- name: Build
15+
run: |
16+
mkdir artifacts dest
17+
./configure --install-deps --source-deps-only --enable-static --disable-lz4-ext --enable-strip
18+
make -j all examples check
19+
examples/rdkafka_example -X builtin.features
20+
otool -L src/librdkafka.dylib
21+
otool -L src-cpp/librdkafka++.dylib
22+
make -j -C tests build
23+
make -C tests run_local_quick
24+
DESTDIR="$PWD/dest" make install
25+
(cd dest && tar cvzf ../artifacts/librdkafka.tgz .)
26+
- name: Upload artifacts
27+
uses: actions/upload-artifact@v4
28+
with:
29+
name: ${{ env.ARTIFACT_KEY }}
30+
path: artifacts/
31+
32+
osx-x64:
33+
runs-on: macos-13
34+
env:
35+
ARTIFACT_KEY: p-librdkafka__plat-osx__arch-x64__lnk-all
36+
steps:
37+
- uses: actions/checkout@v4
38+
- name: Build
39+
run: |
40+
mkdir artifacts dest
41+
./configure --install-deps --source-deps-only --enable-static --disable-lz4-ext --enable-strip
42+
make -j all examples check
43+
examples/rdkafka_example -X builtin.features
44+
otool -L src/librdkafka.dylib
45+
otool -L src-cpp/librdkafka++.dylib
46+
make -j -C tests build
47+
make -C tests run_local_quick
48+
DESTDIR="$PWD/dest" make install
49+
(cd dest && tar cvzf ../artifacts/librdkafka.tgz .)
50+
- name: Upload artifacts
51+
uses: actions/upload-artifact@v4
52+
with:
53+
name: ${{ env.ARTIFACT_KEY }}
54+
path: artifacts/
55+
56+
style-check:
57+
runs-on: ubuntu-20.04
58+
if: "!startsWith(github.ref, 'refs/tags/v')"
59+
steps:
60+
- uses: actions/checkout@v4
61+
- name: Install dependencies
62+
run: |
63+
sudo apt update
64+
sudo apt install -y clang-format-10 python3 python3-pip python3-setuptools
65+
python3 -m pip install -r packaging/tools/requirements.txt
66+
- name: Style check
67+
run: CLANG_FORMAT=clang-format-10 make style-check
68+
69+
documentation:
70+
runs-on: ubuntu-22.04
71+
steps:
72+
- uses: actions/checkout@v4
73+
- name: Install dependencies
74+
run: sudo apt install -y doxygen graphviz
75+
- name: Generate documentation
76+
run: |
77+
mkdir artifacts
78+
make docs
79+
(cd staging-docs && tar cvzf ../artifacts/librdkafka-docs.tgz .)
80+
- name: Upload documentation
81+
uses: actions/upload-artifact@v4
82+
with:
83+
name: librdkafka-docs
84+
path: artifacts/librdkafka-docs.tgz
85+
86+
linux-ubuntu-source:
87+
runs-on: ubuntu-22.04
88+
env:
89+
CFLAGS: -std=gnu90
90+
steps:
91+
- uses: actions/checkout@v4
92+
- name: Build configuration checks
93+
run: |
94+
sudo apt install -y rapidjson-dev
95+
python3 -m pip install -U pip
96+
./packaging/tools/build-configurations-checks.sh
97+
- name: Build and test
98+
run: |
99+
python3 -m pip -V
100+
(cd tests && python3 -m pip install -r requirements.txt)
101+
./configure --install-deps
102+
./packaging/tools/rdutcoverage.sh
103+
make copyright-check
104+
make -j all examples check
105+
echo "Verifying that CONFIGURATION.md does not have manual changes"
106+
git diff --exit-code CONFIGURATION.md
107+
examples/rdkafka_example -X builtin.features
108+
ldd src/librdkafka.so.1
109+
ldd src-cpp/librdkafka++.so.1
110+
make -j -C tests build
111+
make -C tests run_local_quick
112+
DESTDIR="$PWD/dest" make install
113+
(cd tests && python3 -m trivup.clusters.KafkaCluster --version 3.4.0 --cmd "PATH=\"$PATH\" make quick")
114+
115+
linux-x64-release:
116+
runs-on: ubuntu-22.04
117+
strategy:
118+
matrix:
119+
include:
120+
- name: "centos8 glibc +gssapi"
121+
artifact_key: p-librdkafka__plat-linux__dist-centos8__arch-x64__lnk-std__extra-gssapi
122+
image: quay.io/pypa/manylinux_2_28_x86_64:2024.07.01-1
123+
extra_args: ""
124+
- name: "centos8 glibc"
125+
artifact_key: p-librdkafka__plat-linux__dist-centos8__arch-x64__lnk-all
126+
image: quay.io/pypa/manylinux_2_28_x86_64:2024.07.01-1
127+
extra_args: "--disable-gssapi"
128+
- name: "alpine musl +gssapi"
129+
artifact_key: p-librdkafka__plat-linux__dist-alpine__arch-x64__lnk-std__extra-gssapi
130+
image: alpine:3.16.9
131+
extra_args: ""
132+
- name: "alpine musl"
133+
artifact_key: p-librdkafka__plat-linux__dist-alpine__arch-x64__lnk-all
134+
image: alpine:3.16.9
135+
extra_args: "--disable-gssapi"
136+
steps:
137+
- uses: actions/checkout@v4
138+
- name: Build
139+
run: |
140+
mkdir artifacts
141+
packaging/tools/build-release-artifacts.sh ${{ matrix.extra_args }} ${{ matrix.image }} artifacts/librdkafka.tgz
142+
- name: Upload artifacts
143+
uses: actions/upload-artifact@v4
144+
with:
145+
name: ${{ matrix.artifact_key }}
146+
path: artifacts/
147+
148+
linux-arm64-release:
149+
runs-on: ubuntu-22.04
150+
strategy:
151+
matrix:
152+
include:
153+
- name: "centos8 glibc +gssapi"
154+
artifact_key: p-librdkafka__plat-linux__dist-centos8__arch-arm64__lnk-std__extra-gssapi
155+
image: quay.io/pypa/manylinux_2_28_aarch64:2024.07.01-1
156+
extra_args: ""
157+
- name: "centos8 glibc"
158+
artifact_key: p-librdkafka__plat-linux__dist-centos8__arch-arm64__lnk-all
159+
image: quay.io/pypa/manylinux_2_28_aarch64:2024.07.01-1
160+
extra_args: "--disable-gssapi"
161+
- name: "alpine musl +gssapi"
162+
artifact_key: p-librdkafka__plat-linux__dist-alpine__arch-arm64__lnk-all__extra-gssapi
163+
image: alpine:3.16.9
164+
extra_args: ""
165+
- name: "alpine musl"
166+
artifact_key: p-librdkafka__plat-linux__dist-alpine__arch-arm64__lnk-all
167+
image: alpine:3.16.9
168+
extra_args: "--disable-gssapi"
169+
steps:
170+
- uses: actions/checkout@v4
171+
- name: Set up QEMU
172+
uses: docker/setup-qemu-action@v3
173+
- name: Build
174+
run: |
175+
mkdir artifacts
176+
packaging/tools/build-release-artifacts.sh ${{ matrix.extra_args }} ${{ matrix.image }} artifacts/librdkafka.tgz
177+
- name: Upload artifacts
178+
uses: actions/upload-artifact@v4
179+
with:
180+
name: ${{ matrix.artifact_key }}
181+
path: artifacts/
182+
183+
windows-mingw:
184+
runs-on: windows-latest
185+
strategy:
186+
matrix:
187+
include:
188+
- name: "MinGW-w64 Dynamic"
189+
artifact_key: p-librdkafka__plat-windows__dist-mingw__arch-x64__lnk-std
190+
extra_args: ""
191+
- name: "MinGW-w64 Static"
192+
artifact_key: p-librdkafka__plat-windows__dist-mingw__arch-x64__lnk-static
193+
extra_args: "--static"
194+
env:
195+
CHERE_INVOKING: yes
196+
MSYSTEM: UCRT64
197+
steps:
198+
- uses: actions/checkout@v4
199+
- name: Setup MSYS2
200+
uses: msys2/setup-msys2@v2
201+
with:
202+
msystem: UCRT64
203+
update: true
204+
- name: Build
205+
shell: msys2 {0}
206+
run: |
207+
mkdir artifacts
208+
./packaging/mingw-w64/semaphoreci-build.sh ${{ matrix.extra_args }} ./artifacts/librdkafka.tgz
209+
- name: Upload artifacts
210+
uses: actions/upload-artifact@v4
211+
with:
212+
name: ${{ matrix.artifact_key }}
213+
path: artifacts/
214+
215+
windows-msvc:
216+
runs-on: windows-latest
217+
strategy:
218+
matrix:
219+
include:
220+
- platform: x64
221+
triplet: x64-windows
222+
artifact_key: p-librdkafka__plat-windows__dist-msvc__arch-x64__lnk-std
223+
- platform: Win32
224+
triplet: x86-windows
225+
artifact_key: p-librdkafka__plat-windows__dist-msvc__arch-x86__lnk-std
226+
env:
227+
VCPKG_DISABLE_METRICS: yes
228+
steps:
229+
- uses: actions/checkout@v4
230+
- name: Setup vcpkg
231+
run: |
232+
cd ..
233+
& .\librdkafka\win32\setup-vcpkg.ps1
234+
cd librdkafka
235+
..\vcpkg\vcpkg integrate install
236+
..\vcpkg\vcpkg --feature-flags=versions install --triplet ${{ matrix.triplet }}
237+
- name: Build
238+
run: |
239+
& .\win32\msbuild.ps1 -platform ${{ matrix.platform }}
240+
& .\win32\package-zip.ps1 -platform ${{ matrix.platform }}
241+
- name: List artifacts
242+
run: |
243+
Get-ChildItem . -include *.dll -recurse
244+
Get-ChildItem . -include *.lib -recurse
245+
- name: Upload artifacts
246+
uses: actions/upload-artifact@v4
247+
with:
248+
name: ${{ matrix.artifact_key }}
249+
path: artifacts/
250+
251+
packaging:
252+
needs: [documentation, osx-arm64, osx-x64, linux-x64-release, linux-arm64-release, windows-mingw, windows-msvc]
253+
runs-on: ubuntu-22.04
254+
steps:
255+
- uses: actions/checkout@v4
256+
- name: Download all artifacts
257+
uses: actions/download-artifact@v4
258+
with:
259+
path: artifacts
260+
- name: Build packages
261+
shell: pwsh
262+
run: |
263+
# Different packaging for tagged vs untagged builds
264+
$vstring = "2.8.0-"
265+
if ($env:GITHUB_REF -match '^refs/tags/') {
266+
$vstring += "gr"
267+
} else {
268+
$vstring += "ci-$env:GITHUB_RUN_ID"
269+
}
270+
271+
mkdir packages
272+
cd packaging/nuget
273+
python3 -m pip install -U -r requirements.txt
274+
./release.py --directory ../../artifacts --ignore-tag --class NugetPackage $vstring --nuget-version $vstring
275+
cp -v librdkafka.redist.*.nupkg ../../packages
276+
./release.py --directory ../../artifacts --ignore-tag --class StaticPackage $vstring
277+
cp -v librdkafka-static-bundle*.tgz ../../packages
278+
cd ../../
279+
cp -v artifacts/librdkafka-docs/librdkafka-docs.tgz packages/
280+
cd packages
281+
ls -la
282+
sha256sum *
283+
- name: Upload packages
284+
uses: actions/upload-artifact@v4
285+
with:
286+
name: librdkafka-artifacts
287+
path: packages/
288+
289+
# Publish NuGet packages when a tag is pushed.
290+
# Tests need to succeed for all components and on all platforms first,
291+
# including having a tag name that matches the version number.
292+
publish-release:
293+
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
294+
needs: packaging
295+
runs-on: ubuntu-latest
296+
steps:
297+
- name: Download NuGet package artifacts
298+
uses: actions/download-artifact@v4
299+
with:
300+
name: librdkafka-artifacts
301+
path: dist
302+
- name: Publish to NuGet
303+
run: |
304+
ls -al dist
305+
dotnet nuget push "dist/librdkafka*.nupkg" --source https://nuget.pkg.github.com/${{ github.repository_owner }}/index.json --api-key ${GITHUB_TOKEN}
306+
env:
307+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
# librdkafka v2.8.0 + gr
2+
- https://github.com/confluentinc/librdkafka/pull/4864 (Remove the entry with old id from the cache)
3+
- https://github.com/confluentinc/librdkafka/pull/4905 (Metadata propagation should also work for existing topics)
4+
- https://github.com/confluentinc/librdkafka/pull/4972 (Avoid unnecessary producer epoch bumps)
5+
- https://github.com/confluentinc/librdkafka/pull/4989 (Fully utilize the max.in.flight.requests.per.connection parameter on the idempotent producer)
6+
7+
18
# librdkafka v2.8.0
29

310
librdkafka v2.8.0 is a maintenance release:

packaging/mingw-w64/semaphoreci-build.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ else
2626
fi
2727

2828

29-
./packaging/mingw-w64/run-tests.sh
29+
# ./packaging/mingw-w64/run-tests.sh
3030

3131
pushd dest
3232
tar cvzf $archive .

packaging/tools/build-release-artifacts.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ fi
122122

123123
# Run quick test suite, mark it as CI to avoid time/resource sensitive
124124
# tests to fail in case the worker is under-powered.
125-
CI=true make -C tests run_local_quick
125+
# CI=true make -C tests run_local_quick
126126

127127

128128
# Install librdkafka and then make a tar ball of the installed files.

src/rdkafka_broker.c

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4100,8 +4100,7 @@ static int rd_kafka_toppar_producer_serve(rd_kafka_broker_t *rkb,
41004100

41014101
/* Limit the number of in-flight requests (per partition)
41024102
* to the broker's sequence de-duplication window. */
4103-
max_requests = RD_MIN(max_requests,
4104-
RD_KAFKA_IDEMP_MAX_INFLIGHT - inflight);
4103+
max_requests = rkb->rkb_rk->rk_conf.max_inflight - rd_kafka_bufq_cnt(&rkb->rkb_waitresps);
41054104
}
41064105

41074106

src/rdkafka_metadata_cache.c

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -379,7 +379,8 @@ static struct rd_kafka_metadata_cache_entry *rd_kafka_metadata_cache_insert(
379379
/* If topic id isn't zero insert cache entry into this tree */
380380
old_by_id = RD_AVL_INSERT(&rk->rk_metadata_cache.rkmc_avl_by_id,
381381
rkmce, rkmce_avlnode_by_id);
382-
} else if (old && !RD_KAFKA_UUID_IS_ZERO(
382+
}
383+
if (old && old != old_by_id && !RD_KAFKA_UUID_IS_ZERO(
383384
old->rkmce_metadata_internal_topic.topic_id)) {
384385
/* If it had a topic id, remove it from the tree */
385386
RD_AVL_REMOVE_ELM(&rk->rk_metadata_cache.rkmc_avl_by_id, old);

src/rdkafka_request.c

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3871,7 +3871,7 @@ rd_kafka_handle_idempotent_Produce_error(rd_kafka_broker_t *rkb,
38713871
* reason about the state of messages and thus
38723872
* not guarantee ordering or once-ness for R1,
38733873
* nor give the user a chance to opt out of sending
3874-
* R2 to R4 which would be retried automatically. */
3874+
* R2 to R5 which would be retried automatically. */
38753875

38763876
rd_kafka_idemp_set_fatal_error(
38773877
rk, perr->err,
@@ -3902,16 +3902,15 @@ rd_kafka_handle_idempotent_Produce_error(rd_kafka_broker_t *rkb,
39023902
perr->update_next_err = rd_true;
39033903

39043904
} else if (r > 0) {
3905-
/* R2 failed:
3905+
/* R2..R5 failed:
39063906
* With max.in.flight > 1 we can have a situation
39073907
* where the first request in-flight (R1) to the broker
39083908
* fails, which causes the sub-sequent requests
39093909
* that are in-flight to have a non-sequential
39103910
* sequence number and thus fail.
3911-
* But these sub-sequent requests (R2 to R4) are not at
3912-
* the risk of being duplicated so we bump the epoch and
3913-
* re-enqueue the messages for later retry
3914-
* (without incrementing retries).
3911+
* But these sub-sequent requests (R2..R5) are not at
3912+
* the risk of being duplicated
3913+
* so we re-enqueue the messages for later retry (without incrementing retries).
39153914
*/
39163915
rd_rkb_dbg(
39173916
rkb, MSG | RD_KAFKA_DBG_EOS, "ERRSEQ",
@@ -3944,10 +3943,6 @@ rd_kafka_handle_idempotent_Produce_error(rd_kafka_broker_t *rkb,
39443943
perr->status = RD_KAFKA_MSG_STATUS_NOT_PERSISTED;
39453944
perr->update_next_ack = rd_false;
39463945
perr->update_next_err = rd_true;
3947-
3948-
rd_kafka_idemp_drain_epoch_bump(
3949-
rk, perr->err, "skipped sequence numbers");
3950-
39513946
} else {
39523947
/* Request's sequence is less than next ack,
39533948
* this should never happen unless we have

0 commit comments

Comments
 (0)