Skip to content

Commit 44b259a

Browse files
committed
feat: telemetry - loop duration & delay
Signed-off-by: Miroslav Bajtoš <oss@bajtos.net>
1 parent 3e20055 commit 44b259a

File tree

2 files changed

+47
-7
lines changed

2 files changed

+47
-7
lines changed

observer/bin/spark-observer.js

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator'
33
import { ethers } from 'ethers'
44
import * as Sentry from '@sentry/node'
55
import timers from 'node:timers/promises'
6-
import { InfluxDB } from '@influxdata/influxdb-client'
6+
import { createInflux } from '../lib/telemetry.js'
77
import assert from 'node:assert/strict'
88

99
import { RPC_URL, rpcHeaders } from '../lib/config.js'
@@ -26,11 +26,7 @@ const provider = new ethers.JsonRpcProvider(fetchRequest, null, { polling: true
2626

2727
const ieContract = new ethers.Contract(SparkImpactEvaluator.ADDRESS, SparkImpactEvaluator.ABI, provider)
2828

29-
const influx = new InfluxDB({
30-
url: 'https://eu-central-1-1.aws.cloud2.influxdata.com',
31-
// spark-stats-observer-read
32-
token: INFLUXDB_TOKEN
33-
})
29+
const { influx, recordTelemetry } = createInflux(INFLUXDB_TOKEN)
3430

3531
const influxQueryApi = influx.getQueryApi('Filecoin Station')
3632

@@ -47,7 +43,14 @@ const loop = async (name, fn, interval) => {
4743
}
4844
const dt = Date.now() - start
4945
console.log(`Loop "${name}" took ${dt}ms`)
50-
await timers.setTimeout(interval - dt)
46+
recordTelemetry(`loop_${name.replaceAll(' ', '_')}`, point => {
47+
point.intField('interval_ms', interval)
48+
point.intField('duration_ms', dt)
49+
point.intField('delay_ms', interval - dt)
50+
})
51+
if (dt < interval) {
52+
await timers.setTimeout(interval - dt)
53+
}
5154
}
5255
}
5356

observer/lib/telemetry.js

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import { InfluxDB, Point } from '@influxdata/influxdb-client'
2+
import createDebug from 'debug'
3+
4+
const debug = createDebug('spark:observer:telemetry')
5+
6+
export const createInflux = token => {
7+
const influx = new InfluxDB({
8+
url: 'https://eu-central-1-1.aws.cloud2.influxdata.com',
9+
// bucket permissions: spark-evaluate:read spark-observer:write
10+
token
11+
})
12+
const writeClient = influx.getWriteApi(
13+
'Filecoin Station', // org
14+
'spark-observer', // bucket
15+
'ms' // precision
16+
)
17+
setInterval(() => {
18+
writeClient.flush().catch(console.error)
19+
}, 10_000).unref()
20+
21+
return {
22+
influx,
23+
24+
/**
25+
* @param {string} name
26+
* @param {(p: Point) => void} fn
27+
*/
28+
recordTelemetry: (name, fn) => {
29+
const point = new Point(name)
30+
fn(point)
31+
writeClient.writePoint(point)
32+
debug('%s %o', name, point)
33+
}
34+
}
35+
}
36+
37+
export { Point }

0 commit comments

Comments
 (0)