Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 7 additions & 1 deletion stats/lib/handler.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ import {
fetchRetrievalSuccessRate,
fetchDealSummary,
fetchDailyRetrievalResultCodes,
fetchDailyMinerRSRSummary
fetchDailyMinerRSRSummary,
fetchDailyRetrievalTimings,
fetchDailyMinerRetrievalTimings
} from './stats-fetchers.js'

import { handlePlatformRoutes } from './platform-routes.js'
Expand Down Expand Up @@ -108,6 +110,10 @@ const handler = async (req, res, pgPools, SPARK_API_BASE_URL) => {
await respond(fetchMinersRSRSummary)
} else if (req.method === 'GET' && url === '/retrieval-result-codes/daily') {
await respond(fetchDailyRetrievalResultCodes)
} else if (req.method === 'GET' && url === '/retrieval-timings/daily') {
await respond(fetchDailyRetrievalTimings)
} else if (req.method === 'GET' && segs[0] === 'miner' && segs[1] && segs[2] === 'retrieval-timings' && segs[3] === 'summary') {
await respond(fetchDailyMinerRetrievalTimings, segs[1])
} else if (req.method === 'GET' && segs[0] === 'miner' && segs[1] && segs[2] === 'retrieval-success-rate' && segs[3] === 'summary') {
await respond(fetchDailyMinerRSRSummary, segs[1])
} else if (req.method === 'GET' && segs[0] === 'miner' && segs[1] && segs[2] === 'deals' && segs[3] === 'eligible' && segs[4] === 'summary') {
Expand Down
45 changes: 45 additions & 0 deletions stats/lib/stats-fetchers.js
Original file line number Diff line number Diff line change
Expand Up @@ -290,3 +290,48 @@ export const fetchDailyRetrievalResultCodes = async (pgPools, filter) => {
const stats = Object.entries(days).map(([day, rates]) => ({ day, rates }))
return stats
}

/**
* Fetches daily global retrieval time statistics
* @param {import('@filecoin-station/spark-stats-db').PgPools} pgPools
* @param {import('./typings.js').DateRangeFilter} filter
*/
export const fetchDailyRetrievalTimings = async (pgPools, filter) => {
const { rows } = await pgPools.evaluate.query(`
SELECT
day::text,
CEIL(percentile_cont(0.5) WITHIN GROUP (ORDER BY ttfb_p50_values)) AS ttfb_ms
FROM retrieval_timings, UNNEST(ttfb_p50) AS ttfb_p50_values
WHERE day >= $1 AND day <= $2
GROUP BY day
ORDER BY day
`, [
filter.from,
filter.to
])
return rows
}

/**
* Fetches per miner daily retrieval time statistics
* @param {import('@filecoin-station/spark-stats-db').PgPools} pgPools
* @param {import('./typings.js').DateRangeFilter} filter
* @param {string} minerId
*/
export const fetchDailyMinerRetrievalTimings = async (pgPools, { from, to }, minerId) => {
const { rows } = await pgPools.evaluate.query(`
SELECT
day::text,
miner_id,
CEIL(percentile_cont(0.5) WITHIN GROUP (ORDER BY ttfb_p50_values)) AS ttfb_ms
FROM retrieval_timings, UNNEST(ttfb_p50) AS ttfb_p50_values
WHERE miner_id = $1 AND day >= $2 AND day <= $3
GROUP BY day, miner_id
ORDER BY day
`, [
minerId,
from,
to
])
return rows
}
73 changes: 73 additions & 0 deletions stats/test/handler.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ describe('HTTP request handler', () => {
await pgPools.evaluate.query('DELETE FROM retrieval_stats')
await pgPools.evaluate.query('DELETE FROM daily_participants')
await pgPools.evaluate.query('DELETE FROM daily_deals')
await pgPools.evaluate.query('DELETE FROM retrieval_timings')
await pgPools.stats.query('DELETE FROM daily_scheduled_rewards')
await pgPools.stats.query('DELETE FROM daily_reward_transfers')
await pgPools.stats.query('DELETE FROM daily_retrieval_result_codes')
Expand Down Expand Up @@ -767,6 +768,63 @@ describe('HTTP request handler', () => {
])
})
})

describe('miner retrieval timing stats', () => {
beforeEach(async () => {
// before the range
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1one', timeToFirstByteP50: [1000] })
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1two', timeToFirstByteP50: [1000] })
// in the range
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1one', timeToFirstByteP50: [1000] })
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1two', timeToFirstByteP50: [1000] })

await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', timeToFirstByteP50: [123, 345] })
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', timeToFirstByteP50: [654, 789] })
// after the range
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1one', timeToFirstByteP50: [1000] })
await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1two', timeToFirstByteP50: [1000] })
})

it('lists daily retrieval timings in given date range', async () => {
const res = await fetch(
new URL(
'/retrieval-timings/daily?from=2024-01-10&to=2024-01-20',
baseUrl
), {
redirect: 'manual'
}
)
await assertResponseStatus(res, 200)

const stats = /** @type {{ day: string, success_rate: number }[]} */(
await res.json()
)
assert.deepStrictEqual(stats, [
{ day: '2024-01-10', ttfb_ms: 500 },
{ day: '2024-01-20', ttfb_ms: 1000 }
])
})

it('lists daily retrieval timings summary for specified miner in given date range', async () => {
const res = await fetch(
new URL(
'/miner/f1one/retrieval-timings/summary?from=2024-01-10&to=2024-01-20',
baseUrl
), {
redirect: 'manual'
}
)
await assertResponseStatus(res, 200)

const stats = /** @type {{ day: string, success_rate: number }[]} */(
await res.json()
)
assert.deepStrictEqual(stats, [
{ day: '2024-01-10', miner_id: 'f1one', ttfb_ms: 234 },
{ day: '2024-01-20', miner_id: 'f1one', ttfb_ms: 1000 }
])
})
})
})

/**
Expand Down Expand Up @@ -843,3 +901,18 @@ const givenDailyDealStats = async (pgPool, {
retrievable
])
}

/**
*
* @param {import('../lib/platform-stats-fetchers.js').Queryable} pgPool
* @param {object} data
* @param {string} data.day
* @param {string} data.minerId
* @param {number[]} data.timeToFirstByteP50
*/
const givenRetrievalTimings = async (pgPool, { day, minerId, timeToFirstByteP50 }) => {
await pgPool.query(
'INSERT INTO retrieval_timings (day, miner_id, ttfb_p50) VALUES ($1, $2, $3)',
[day, minerId ?? 'f1test', timeToFirstByteP50]
)
}
Loading