@@ -34,7 +34,7 @@ private struct ConsumerMessagesAsyncSequenceDelegate: NIOAsyncSequenceProducerDe
34
34
35
35
/// `AsyncSequence` implementation for handling messages received from the Kafka cluster (``KafkaConsumerMessage``).
36
36
public struct ConsumerMessagesAsyncSequence : AsyncSequence {
37
- public typealias Element = Result < KafkaConsumerMessage , Error > // TODO: replace with something like KafkaConsumerError
37
+ public typealias Element = Result < KafkaConsumerMessage , KafkaError >
38
38
typealias HighLowWatermark = NIOAsyncSequenceProducerBackPressureStrategies . HighLowWatermark
39
39
fileprivate let wrappedSequence : NIOAsyncSequenceProducer < Element , HighLowWatermark , ConsumerMessagesAsyncSequenceDelegate >
40
40
@@ -74,7 +74,7 @@ public final class KafkaConsumer {
74
74
75
75
// We use implicitly unwrapped optionals here as these properties need to access self upon initialization
76
76
/// Type of the values returned by the ``messages`` sequence.
77
- private typealias Element = Result < KafkaConsumerMessage , Error > // TODO: replace with a more specific Error type
77
+ private typealias Element = Result < KafkaConsumerMessage , KafkaError >
78
78
private var messagesSource : NIOAsyncSequenceProducer <
79
79
Element ,
80
80
ConsumerMessagesAsyncSequence . HighLowWatermark ,
@@ -88,6 +88,7 @@ public final class KafkaConsumer {
88
88
/// or assign the consumer to a particular topic + partition pair using ``assign(topic:partition:offset:)``.
89
89
/// - Parameter config: The ``KafkaConfig`` for configuring the ``KafkaConsumer``.
90
90
/// - Parameter logger: A logger.
91
+ /// - Throws: A ``KafkaError`` if the initialization failed.
91
92
private init (
92
93
config: KafkaConfig ,
93
94
logger: Logger
@@ -104,7 +105,7 @@ public final class KafkaConsumer {
104
105
rd_kafka_poll_set_consumer ( handle)
105
106
}
106
107
guard result == RD_KAFKA_RESP_ERR_NO_ERROR else {
107
- throw KafkaError ( rawValue : result. rawValue )
108
+ throw KafkaError . rdKafkaError ( wrapping : result)
108
109
}
109
110
110
111
self . serialQueue = DispatchQueue ( label: " swift-kafka-gsoc.consumer.serial " )
@@ -136,6 +137,7 @@ public final class KafkaConsumer {
136
137
/// - Parameter groupID: Name of the consumer group that this ``KafkaConsumer`` will create / join.
137
138
/// - Parameter config: The ``KafkaConfig`` for configuring the ``KafkaConsumer``.
138
139
/// - Parameter logger: A logger.
140
+ /// - Throws: A ``KafkaError`` if the initialization failed.
139
141
public convenience init (
140
142
topics: [ String ] ,
141
143
groupID: String ,
@@ -145,7 +147,7 @@ public final class KafkaConsumer {
145
147
var config = config
146
148
if let configGroupID = config. value ( forKey: " group.id " ) {
147
149
if configGroupID != groupID {
148
- throw KafkaError ( description : " Group ID does not match with group ID found in the configuration " )
150
+ throw KafkaError . config ( reason : " Group ID does not match with group ID found in the configuration " )
149
151
}
150
152
} else {
151
153
try config. set ( groupID, forKey: " group.id " )
@@ -164,6 +166,7 @@ public final class KafkaConsumer {
164
166
/// - Parameter offset: The topic offset where reading begins. Defaults to the offset of the last read message.
165
167
/// - Parameter config: The ``KafkaConfig`` for configuring the ``KafkaConsumer``.
166
168
/// - Parameter logger: A logger.
169
+ /// - Throws: A ``KafkaError`` if the initialization failed.
167
170
/// - Note: This consumer ignores the `group.id` property of its `config`.
168
171
public convenience init (
169
172
topic: String ,
@@ -193,6 +196,7 @@ public final class KafkaConsumer {
193
196
/// Subscribe to the given list of `topics`.
194
197
/// The partition assignment happens automatically using `KafkaConsumer`'s consumer group.
195
198
/// - Parameter topics: An array of topic names to subscribe to.
199
+ /// - Throws: A ``KafkaError`` if subscribing to the topic list failed.
196
200
private func subscribe( topics: [ String ] ) throws {
197
201
assert ( !closed)
198
202
@@ -208,15 +212,16 @@ public final class KafkaConsumer {
208
212
rd_kafka_subscribe ( handle, subscribedTopicsPointer)
209
213
}
210
214
211
- guard result. rawValue == 0 else {
212
- throw KafkaError ( rawValue : result. rawValue )
215
+ guard result == RD_KAFKA_RESP_ERR_NO_ERROR else {
216
+ throw KafkaError . rdKafkaError ( wrapping : result)
213
217
}
214
218
}
215
219
216
220
/// Assign the``KafkaConsumer`` to a specific `partition` of a `topic`.
217
221
/// - Parameter topic: Name of the topic that this ``KafkaConsumer`` will read from.
218
222
/// - Parameter partition: Partition that this ``KafkaConsumer`` will read from.
219
223
/// - Parameter offset: The topic offset where reading begins. Defaults to the offset of the last read message.
224
+ /// - Throws: A ``KafkaError`` if the consumer could not be assigned to the topic + partition pair.
220
225
private func assign(
221
226
topic: String ,
222
227
partition: KafkaPartition ,
@@ -239,7 +244,7 @@ public final class KafkaConsumer {
239
244
}
240
245
241
246
guard result == RD_KAFKA_RESP_ERR_NO_ERROR else {
242
- throw KafkaError ( rawValue : result. rawValue )
247
+ throw KafkaError . rdKafkaError ( wrapping : result)
243
248
}
244
249
}
245
250
@@ -257,8 +262,11 @@ public final class KafkaConsumer {
257
262
return
258
263
}
259
264
messageResult = . success( message)
265
+ } catch let kafkaError as KafkaError {
266
+ messageResult = . failure( kafkaError)
260
267
} catch {
261
- messageResult = . failure( error)
268
+ self . logger. error ( " KafkaConsumer caught error: \( error) " )
269
+ return
262
270
}
263
271
264
272
let yieldresult = self . messagesSource. yield ( messageResult)
@@ -275,6 +283,7 @@ public final class KafkaConsumer {
275
283
/// This method blocks for a maximum of `timeout` milliseconds.
276
284
/// - Parameter timeout: Maximum amount of milliseconds this method waits for a new message.
277
285
/// - Returns: A ``KafkaConsumerMessage`` or `nil` if there are no new messages.
286
+ /// - Throws: A ``KafkaError`` if the received message is an error message or malformed.
278
287
private func poll( timeout: Int32 = 100 ) throws -> KafkaConsumerMessage ? {
279
288
dispatchPrecondition ( condition: . onQueue( self . serialQueue) )
280
289
assert ( !closed)
@@ -307,6 +316,7 @@ public final class KafkaConsumer {
307
316
/// Mark `message` in the topic as read and request the next message from the topic.
308
317
/// This method is only used for manual offset management.
309
318
/// - Parameter message: Last received message that shall be marked as read.
319
+ /// - Throws: A ``KafkaError`` if committing failed.
310
320
/// - Warning: This method fails if the `enable.auto.commit` configuration property is set to `true`.
311
321
public func commitSync( _ message: KafkaConsumerMessage ) async throws {
312
322
try await self . serializeWithThrowingContinuation { ( continuation: CheckedContinuation < Void , Error > ) in
@@ -323,11 +333,11 @@ public final class KafkaConsumer {
323
333
private func _commitSync( _ message: KafkaConsumerMessage ) throws {
324
334
dispatchPrecondition ( condition: . onQueue( self . serialQueue) )
325
335
guard !self . closed else {
326
- throw KafkaError ( description : " Trying to invoke method on consumer that has been closed. " )
336
+ throw KafkaError . connectionClosed ( reason : " Tried to commit message offset on a closed consumer " )
327
337
}
328
338
329
339
guard self . config. value ( forKey: " enable.auto.commit " ) == " false " else {
330
- throw KafkaError ( description : " Committing manually only works if enable.auto.commit is set to false " )
340
+ throw KafkaError . config ( reason : " Committing manually only works if enable.auto.commit is set to false " )
331
341
}
332
342
333
343
let changesList = rd_kafka_topic_partition_list_new ( 1 )
@@ -352,7 +362,7 @@ public final class KafkaConsumer {
352
362
)
353
363
}
354
364
guard result == RD_KAFKA_RESP_ERR_NO_ERROR else {
355
- throw KafkaError ( rawValue : result. rawValue )
365
+ throw KafkaError . rdKafkaError ( wrapping : result)
356
366
}
357
367
return
358
368
}
@@ -371,7 +381,7 @@ public final class KafkaConsumer {
371
381
rd_kafka_topic_partition_list_destroy ( self . subscribedTopicsPointer)
372
382
373
383
guard result == RD_KAFKA_RESP_ERR_NO_ERROR else {
374
- let error = KafkaError ( rawValue : result. rawValue )
384
+ let error = KafkaError . rdKafkaError ( wrapping : result)
375
385
self . logger. error ( " Closing KafkaConsumer failed: \( error. description) " )
376
386
return
377
387
}
0 commit comments