|
2 | 2 |
|
3 | 3 | const {
|
4 | 4 | addHook,
|
5 |
| - channel, |
6 |
| - AsyncResource |
| 5 | + channel |
7 | 6 | } = require('./helpers/instrument')
|
8 | 7 | const shimmer = require('../../datadog-shimmer')
|
9 | 8 |
|
@@ -32,7 +31,6 @@ const disabledHeaderWeakSet = new WeakSet()
|
32 | 31 | // we need to store the offset per partition per topic for the consumer to track offsets for DSM
|
33 | 32 | const latestConsumerOffsets = new Map()
|
34 | 33 |
|
35 |
| -// Customize the instrumentation for Confluent Kafka JavaScript |
36 | 34 | addHook({ name: '@confluentinc/kafka-javascript', versions: ['>=1.0.0'] }, (module) => {
|
37 | 35 | // Hook native module classes first
|
38 | 36 | instrumentBaseModule(module)
|
@@ -62,23 +60,25 @@ function instrumentBaseModule (module) {
|
62 | 60 |
|
63 | 61 | const brokers = this.globalConfig?.['bootstrap.servers']
|
64 | 62 |
|
65 |
| - const asyncResource = new AsyncResource('bound-anonymous-fn') |
66 |
| - return asyncResource.runInAsyncScope(() => { |
67 |
| - try { |
68 |
| - channels.producerStart.publish({ |
69 |
| - topic, |
70 |
| - messages: [{ key, value: message }], |
71 |
| - bootstrapServers: brokers |
72 |
| - }) |
| 63 | + const ctx = { |
| 64 | + topic, |
| 65 | + messages: [{ key, value: message }], |
| 66 | + bootstrapServers: brokers |
| 67 | + } |
73 | 68 |
|
74 |
| - const result = produce.apply(this, arguments) |
| 69 | + return channels.producerStart.runStores(ctx, () => { |
| 70 | + try { |
| 71 | + const headers = convertHeaders(ctx.messages[0].headers) |
| 72 | + const result = produce.apply(this, [topic, partition, message, key, timestamp, opaque, headers]) |
75 | 73 |
|
76 |
| - channels.producerCommit.publish() |
77 |
| - channels.producerFinish.publish() |
| 74 | + ctx.result = result |
| 75 | + channels.producerCommit.publish(ctx) |
| 76 | + channels.producerFinish.publish(ctx) |
78 | 77 | return result
|
79 | 78 | } catch (error) {
|
80 |
| - channels.producerError.publish(error) |
81 |
| - channels.producerFinish.publish() |
| 79 | + ctx.error = error |
| 80 | + channels.producerError.publish(ctx) |
| 81 | + channels.producerFinish.publish(ctx) |
82 | 82 | throw error
|
83 | 83 | }
|
84 | 84 | })
|
@@ -110,32 +110,39 @@ function instrumentBaseModule (module) {
|
110 | 110 | callback = numMessages
|
111 | 111 | }
|
112 | 112 |
|
| 113 | + const ctx = { |
| 114 | + groupId |
| 115 | + } |
113 | 116 | // Handle callback-based consumption
|
114 | 117 | if (typeof callback === 'function') {
|
115 | 118 | return consume.call(this, numMessages, function wrappedCallback (err, messages) {
|
116 | 119 | if (messages && messages.length > 0) {
|
117 | 120 | messages.forEach(message => {
|
118 |
| - channels.consumerStart.publish({ |
119 |
| - topic: message?.topic, |
120 |
| - partition: message?.partition, |
121 |
| - message, |
122 |
| - groupId |
123 |
| - }) |
| 121 | + ctx.topic = message?.topic |
| 122 | + ctx.partition = message?.partition |
| 123 | + ctx.message = message |
| 124 | + |
| 125 | + // TODO: We should be using publish here instead of runStores but we need bindStart to be called |
| 126 | + channels.consumerStart.runStores(ctx, () => {}) |
124 | 127 | updateLatestOffset(message?.topic, message?.partition, message?.offset, groupId)
|
125 | 128 | })
|
126 | 129 | }
|
127 | 130 |
|
128 | 131 | if (err) {
|
129 |
| - channels.consumerError.publish(err) |
| 132 | + ctx.error = err |
| 133 | + channels.consumerError.publish(ctx) |
130 | 134 | }
|
131 | 135 |
|
132 | 136 | try {
|
133 | 137 | const result = callback.apply(this, arguments)
|
134 |
| - channels.consumerFinish.publish() |
| 138 | + if (messages && messages.length > 0) { |
| 139 | + channels.consumerFinish.publish(ctx) |
| 140 | + } |
135 | 141 | return result
|
136 | 142 | } catch (error) {
|
137 |
| - channels.consumerError.publish(error) |
138 |
| - channels.consumerFinish.publish() |
| 143 | + ctx.error = error |
| 144 | + channels.consumerError.publish(ctx) |
| 145 | + channels.consumerFinish.publish(ctx) |
139 | 146 | throw error
|
140 | 147 | }
|
141 | 148 | })
|
@@ -204,45 +211,44 @@ function instrumentKafkaJS (kafkaJS) {
|
204 | 211 | return send.apply(this, arguments)
|
205 | 212 | }
|
206 | 213 |
|
207 |
| - const asyncResource = new AsyncResource('bound-anonymous-fn') |
208 |
| - return asyncResource.runInAsyncScope(() => { |
209 |
| - try { |
210 |
| - channels.producerStart.publish({ |
211 |
| - topic: payload?.topic, |
212 |
| - messages: payload?.messages || [], |
213 |
| - bootstrapServers: kafka._ddBrokers, |
214 |
| - disableHeaderInjection: disabledHeaderWeakSet.has(producer) |
215 |
| - }) |
| 214 | + const ctx = { |
| 215 | + topic: payload?.topic, |
| 216 | + messages: payload?.messages || [], |
| 217 | + bootstrapServers: kafka._ddBrokers, |
| 218 | + disableHeaderInjection: disabledHeaderWeakSet.has(producer) |
| 219 | + } |
216 | 220 |
|
| 221 | + return channels.producerStart.runStores(ctx, () => { |
| 222 | + try { |
217 | 223 | const result = send.apply(this, arguments)
|
218 | 224 |
|
219 |
| - result.then( |
220 |
| - asyncResource.bind(res => { |
221 |
| - channels.producerCommit.publish(res) |
222 |
| - channels.producerFinish.publish() |
223 |
| - }), |
224 |
| - asyncResource.bind(err => { |
225 |
| - if (err) { |
226 |
| - // Fixes bug where we would inject message headers for kafka brokers |
227 |
| - // that don't support headers (version <0.11). On the error, we disable |
228 |
| - // header injection. Tnfortunately the error name / type is not more specific. |
229 |
| - // This approach is implemented by other tracers as well. |
230 |
| - if (err.name === 'KafkaJSError' && err.type === 'ERR_UNKNOWN') { |
231 |
| - disabledHeaderWeakSet.add(producer) |
232 |
| - log.error('Kafka Broker responded with UNKNOWN_SERVER_ERROR (-1). ' + |
233 |
| - 'Please look at broker logs for more information. ' + |
234 |
| - 'Tracer message header injection for Kafka is disabled.') |
235 |
| - } |
236 |
| - channels.producerError.publish(err) |
| 225 | + result.then((res) => { |
| 226 | + ctx.result = res |
| 227 | + channels.producerCommit.publish(ctx) |
| 228 | + channels.producerFinish.publish(ctx) |
| 229 | + }, (err) => { |
| 230 | + if (err) { |
| 231 | + // Fixes bug where we would inject message headers for kafka brokers |
| 232 | + // that don't support headers (version <0.11). On the error, we disable |
| 233 | + // header injection. Tnfortunately the error name / type is not more specific. |
| 234 | + // This approach is implemented by other tracers as well. |
| 235 | + if (err.name === 'KafkaJSError' && err.type === 'ERR_UNKNOWN') { |
| 236 | + disabledHeaderWeakSet.add(producer) |
| 237 | + log.error('Kafka Broker responded with UNKNOWN_SERVER_ERROR (-1). ' + |
| 238 | + 'Please look at broker logs for more information. ' + |
| 239 | + 'Tracer message header injection for Kafka is disabled.') |
237 | 240 | }
|
238 |
| - channels.producerFinish.publish() |
239 |
| - }) |
240 |
| - ) |
| 241 | + ctx.error = err |
| 242 | + channels.producerError.publish(ctx) |
| 243 | + } |
| 244 | + channels.producerFinish.publish(ctx) |
| 245 | + }) |
241 | 246 |
|
242 | 247 | return result
|
243 | 248 | } catch (e) {
|
244 |
| - channels.producerError.publish(e) |
245 |
| - channels.producerFinish.publish() |
| 249 | + ctx.error = e |
| 250 | + channels.producerError.publish(ctx) |
| 251 | + channels.producerFinish.publish(ctx) |
246 | 252 | throw e
|
247 | 253 | }
|
248 | 254 | })
|
@@ -350,33 +356,37 @@ function wrapKafkaCallback (callback, { startCh, commitCh, finishCh, errorCh },
|
350 | 356 | return function wrappedKafkaCallback (payload) {
|
351 | 357 | const commitPayload = getPayload(payload)
|
352 | 358 |
|
353 |
| - const asyncResource = new AsyncResource('bound-anonymous-fn') |
354 |
| - return asyncResource.runInAsyncScope(() => { |
355 |
| - startCh.publish(commitPayload) |
| 359 | + const ctx = { |
| 360 | + extractedArgs: commitPayload |
| 361 | + } |
356 | 362 |
|
| 363 | + return startCh.runStores(ctx, () => { |
357 | 364 | updateLatestOffset(commitPayload?.topic, commitPayload?.partition, commitPayload?.offset, commitPayload?.groupId)
|
358 | 365 |
|
359 | 366 | try {
|
360 | 367 | const result = callback.apply(this, arguments)
|
361 | 368 |
|
362 | 369 | if (result && typeof result.then === 'function') {
|
363 | 370 | return result
|
364 |
| - .then(asyncResource.bind(res => { |
365 |
| - finishCh.publish() |
| 371 | + .then((res) => { |
| 372 | + ctx.result = res |
| 373 | + finishCh.publish(ctx) |
366 | 374 | return res
|
367 |
| - })) |
368 |
| - .catch(asyncResource.bind(err => { |
369 |
| - errorCh.publish(err) |
370 |
| - finishCh.publish() |
| 375 | + }) |
| 376 | + .catch((err) => { |
| 377 | + ctx.error = err |
| 378 | + errorCh.publish(ctx) |
| 379 | + finishCh.publish(ctx) |
371 | 380 | throw err
|
372 |
| - })) |
| 381 | + }) |
373 | 382 | } else {
|
374 |
| - finishCh.publish() |
| 383 | + finishCh.publish(ctx) |
375 | 384 | return result
|
376 | 385 | }
|
377 | 386 | } catch (error) {
|
378 |
| - errorCh.publish(error) |
379 |
| - finishCh.publish() |
| 387 | + ctx.error = error |
| 388 | + errorCh.publish(ctx) |
| 389 | + finishCh.publish(ctx) |
380 | 390 | throw error
|
381 | 391 | }
|
382 | 392 | })
|
@@ -404,3 +414,8 @@ function updateLatestOffset (topic, partition, offset, groupId) {
|
404 | 414 | function getLatestOffsets () {
|
405 | 415 | return [...latestConsumerOffsets.values()]
|
406 | 416 | }
|
| 417 | + |
| 418 | +function convertHeaders (headers) { |
| 419 | + // convert headers from object to array of objects with 1 key and value per array entry |
| 420 | + return Object.entries(headers).map(([key, value]) => ({ [key.toString()]: value.toString() })) |
| 421 | +} |
0 commit comments