Skip to content

Commit 48a78b3

Browse files
authored
Merge pull request #33 from Yolean/adjust-logging-and-allow-filter-env
Adjust logging and allow for log level filtering via env KAFKA_KEYVAL…
2 parents 3817224 + 4a5f4af commit 48a78b3

File tree

2 files changed

+7
-11
lines changed

2 files changed

+7
-11
lines changed

src/KafkaKeyValue.ts

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ async function produceViaPixy(fetchImpl: IFetchImpl, logger, pixyHost: string, t
9595
}
9696

9797
const json = await res.json() as PixyPostTopicKeySyncResponse;
98-
logger.debug({ res, json }, 'KafkaCache put returned');
9998

10099
return json.offset;
101100
}
@@ -209,9 +208,9 @@ export default class KafkaKeyValue {
209208
} = requestBody;
210209

211210
const expectedTopic = this.topic;
212-
this.logger.debug({ topic, expectedTopic }, 'Matching update event against expected topic');
211+
this.logger.trace({ topic, expectedTopic }, 'Matching update event against expected topic');
213212
if (topic !== expectedTopic) {
214-
this.logger.debug({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
213+
this.logger.trace({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
215214
return;
216215
}
217216

@@ -232,14 +231,14 @@ export default class KafkaKeyValue {
232231
});
233232

234233
const updatesBeingPropagated = updatesToPropagate.map(async key => {
235-
this.logger.debug({ key }, 'Received update event for key');
234+
this.logger.trace({ key }, 'Received update event for key');
236235
const value = await this.get(key);
237236
this.updateHandlers.forEach(fn => fn(key, value));
238237
});
239238

240239
await Promise.all(updatesBeingPropagated);
241240
} else {
242-
this.logger.debug({ topic }, 'No update handlers registered, update event has no effect');
241+
this.logger.trace({ topic }, 'No update handlers registered, update event has no effect');
243242
}
244243

245244
// NOTE: Letting all handlers complete before updating the metric
@@ -325,7 +324,6 @@ export default class KafkaKeyValue {
325324
const value = parseResponse(this.logger, res, this.config.gzip || false);
326325

327326
parseTiming();
328-
this.logger.debug({ key, value }, 'KafkaCache get value returned')
329327

330328
this.updateLastSeenOffsetsFromHeader(res);
331329

@@ -334,7 +332,7 @@ export default class KafkaKeyValue {
334332

335333
async streamValues(onValue: (value: any) => void): Promise<void> {
336334
if (this.config.gzip) throw new Error('Unsuported method for gzipped topics!');
337-
this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache started');
335+
this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache started');
338336

339337
const streamTiming = this.metrics.kafka_key_value_stream_latency_seconds.startTimer({ cache_name: this.getCacheName() });
340338
const res = await this.fetchImpl(`${this.getCacheHost()}/cache/v1/values`);
@@ -344,7 +342,7 @@ export default class KafkaKeyValue {
344342
await streamResponseBody(this.logger, res.body, onValue);
345343

346344
streamTiming();
347-
this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');
345+
this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');
348346

349347
this.updateLastSeenOffsetsFromHeader(res);
350348
}

src/logger.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,7 @@ const globalOptions: LoggerOptions = {
2020
name: "no-logger-name-given",
2121
streams: [
2222
{
23-
level: 'debug',
24-
// TODO TypeScript didnt allow me to do this when building :(
25-
// level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
23+
level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
2624
stream: process.stdout
2725
}
2826
],

0 commit comments

Comments
 (0)