@@ -95,7 +95,6 @@ async function produceViaPixy(fetchImpl: IFetchImpl, logger, pixyHost: string, t
95
95
}
96
96
97
97
const json = await res . json ( ) as PixyPostTopicKeySyncResponse ;
98
- logger . debug ( { res, json } , 'KafkaCache put returned' ) ;
99
98
100
99
return json . offset ;
101
100
}
@@ -209,9 +208,9 @@ export default class KafkaKeyValue {
209
208
} = requestBody ;
210
209
211
210
const expectedTopic = this . topic ;
212
- this . logger . debug ( { topic, expectedTopic } , 'Matching update event against expected topic' ) ;
211
+ this . logger . trace ( { topic, expectedTopic } , 'Matching update event against expected topic' ) ;
213
212
if ( topic !== expectedTopic ) {
214
- this . logger . debug ( { topic, expectedTopic } , 'Update event ignored due to topic mismatch. Business as usual.' ) ;
213
+ this . logger . trace ( { topic, expectedTopic } , 'Update event ignored due to topic mismatch. Business as usual.' ) ;
215
214
return ;
216
215
}
217
216
@@ -232,14 +231,14 @@ export default class KafkaKeyValue {
232
231
} ) ;
233
232
234
233
const updatesBeingPropagated = updatesToPropagate . map ( async key => {
235
- this . logger . debug ( { key } , 'Received update event for key' ) ;
234
+ this . logger . trace ( { key } , 'Received update event for key' ) ;
236
235
const value = await this . get ( key ) ;
237
236
this . updateHandlers . forEach ( fn => fn ( key , value ) ) ;
238
237
} ) ;
239
238
240
239
await Promise . all ( updatesBeingPropagated ) ;
241
240
} else {
242
- this . logger . debug ( { topic } , 'No update handlers registered, update event has no effect' ) ;
241
+ this . logger . trace ( { topic } , 'No update handlers registered, update event has no effect' ) ;
243
242
}
244
243
245
244
// NOTE: Letting all handlers complete before updating the metric
@@ -325,7 +324,6 @@ export default class KafkaKeyValue {
325
324
const value = parseResponse ( this . logger , res , this . config . gzip || false ) ;
326
325
327
326
parseTiming ( ) ;
328
- this . logger . debug ( { key, value } , 'KafkaCache get value returned' )
329
327
330
328
this . updateLastSeenOffsetsFromHeader ( res ) ;
331
329
@@ -334,7 +332,7 @@ export default class KafkaKeyValue {
334
332
335
333
async streamValues ( onValue : ( value : any ) => void ) : Promise < void > {
336
334
if ( this . config . gzip ) throw new Error ( 'Unsuported method for gzipped topics!' ) ;
337
- this . logger . debug ( { cache_name : this . getCacheName ( ) } , 'Streaming values for cache started' ) ;
335
+ this . logger . trace ( { cache_name : this . getCacheName ( ) } , 'Streaming values for cache started' ) ;
338
336
339
337
const streamTiming = this . metrics . kafka_key_value_stream_latency_seconds . startTimer ( { cache_name : this . getCacheName ( ) } ) ;
340
338
const res = await this . fetchImpl ( `${ this . getCacheHost ( ) } /cache/v1/values` ) ;
@@ -344,7 +342,7 @@ export default class KafkaKeyValue {
344
342
await streamResponseBody ( this . logger , res . body , onValue ) ;
345
343
346
344
streamTiming ( ) ;
347
- this . logger . debug ( { cache_name : this . getCacheName ( ) } , 'Streaming values for cache finished' ) ;
345
+ this . logger . trace ( { cache_name : this . getCacheName ( ) } , 'Streaming values for cache finished' ) ;
348
346
349
347
this . updateLastSeenOffsetsFromHeader ( res ) ;
350
348
}
0 commit comments