Skip to content

Commit d8b7e91

Browse files
bajtosjuliangruber
andauthored
feat: handle eligible deal stats via spark-api (#206)
Rework the endpoints `{miner/client/allocator}/:id/deals/eligible/summary` to return a (temporary) redirect to spark-api. --------- Signed-off-by: Miroslav Bajtoš <oss@bajtos.net> Co-authored-by: Julian Gruber <julian@juliangruber.com>
1 parent 31313ec commit d8b7e91

File tree

10 files changed

+35
-235
lines changed

10 files changed

+35
-235
lines changed

.github/workflows/ci.yml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,9 @@ jobs:
2525
env:
2626
DATABASE_URL: postgres://postgres:postgres@localhost:5432/spark_stats
2727
EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate
28-
API_DB_URL: postgres://postgres:postgres@localhost:5432/spark
2928
NPM_CONFIG_WORKSPACE: stats
3029
steps:
3130
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark_evaluate"
32-
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark"
3331
- uses: actions/checkout@v4
3432
- uses: actions/setup-node@v4
3533
with:
@@ -57,11 +55,9 @@ jobs:
5755
env:
5856
DATABASE_URL: postgres://postgres:postgres@localhost:5432/spark_stats
5957
EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate
60-
API_DB_URL: postgres://postgres:postgres@localhost:5432/spark
6158
NPM_CONFIG_WORKSPACE: observer
6259
steps:
6360
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark_evaluate"
64-
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark"
6561
- uses: actions/checkout@v4
6662
- uses: actions/setup-node@v4
6763
with:
@@ -100,11 +96,9 @@ jobs:
10096
env:
10197
DATABASE_URL: postgres://postgres:postgres@localhost:5432/spark_stats
10298
EVALUATE_DB_URL: postgres://postgres:postgres@localhost:5432/spark_evaluate
103-
API_DB_URL: postgres://postgres:postgres@localhost:5432/spark
10499
NPM_CONFIG_WORKSPACE: observer
105100
steps:
106101
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark_evaluate"
107-
- run: psql "${DATABASE_URL}" -c "CREATE DATABASE spark"
108102
- uses: actions/checkout@v4
109103
- uses: actions/setup-node@v4
110104
with:

db/index.js

Lines changed: 3 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import { migrateWithPgClient as migrateEvaluateDB } from 'spark-evaluate/lib/migrate.js'
2-
import { migrate as migrateApiDB } from 'spark-api/migrations/index.js'
32
import pg from 'pg'
43
import { dirname, join } from 'node:path'
54
import { fileURLToPath } from 'node:url'
@@ -9,18 +8,14 @@ import Postgrator from 'postgrator'
98
/** @typedef {import('./typings.js').PgPools} PgPools */
109
/** @typedef {import('./typings.js').PgPoolStats} PgPoolStats */
1110
/** @typedef {import('./typings.js').PgPoolEvaluate} PgPoolEvaluate */
12-
/** @typedef {import('./typings.js').PgPoolApi} PgPoolApi */
1311
/** @typedef {import('./typings.js').Queryable} Queryable */
1412

15-
export { migrateEvaluateDB, migrateApiDB }
13+
export { migrateEvaluateDB }
1614

1715
const {
1816
// DATABASE_URL points to `spark_stats` database managed by this monorepo
1917
DATABASE_URL = 'postgres://localhost:5432/spark_stats',
2018

21-
// API_DB_URL points to `spark` database managed by spark-api repo.
22-
API_DB_URL = 'postgres://localhost:5432/spark',
23-
2419
// EVALUATE_DB_URL points to `spark_evaluate` database managed by spark-evaluate repo.
2520
// Eventually, we should move the code updating stats from spark-evaluate to this repo
2621
// and then we won't need two connection strings.
@@ -84,32 +79,15 @@ export const getEvaluatePgPool = async () => {
8479
return evaluate
8580
}
8681

87-
/**
88-
* @returns {Promise<PgPoolApi>}
89-
*/
90-
export const getApiPgPool = async () => {
91-
const stats = Object.assign(
92-
new pg.Pool({
93-
...poolConfig,
94-
connectionString: API_DB_URL
95-
}),
96-
/** @type {const} */({ db: 'api' })
97-
)
98-
stats.on('error', onError)
99-
await stats.query('SELECT 1')
100-
return stats
101-
}
102-
10382
/**
10483
* @returns {Promise<PgPools>}
10584
*/
10685
export const getPgPools = async () => {
10786
const stats = await getStatsPgPool()
10887
const evaluate = await getEvaluatePgPool()
109-
const api = await getApiPgPool()
110-
const end = async () => { await Promise.all([stats.end(), evaluate.end(), api.end()]) }
88+
const end = async () => { await Promise.all([stats.end(), evaluate.end()]) }
11189

112-
return { stats, evaluate, api, end }
90+
return { stats, evaluate, end }
11391
}
11492

11593
/**

db/package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
"dependencies": {
1515
"pg": "^8.12.0",
1616
"postgrator": "^7.2.0",
17-
"spark-api": "https://github.com/filecoin-station/spark-api/archive/7075fb55b253d48d5d5eb4846f13a3f688d80437.tar.gz",
1817
"spark-evaluate": "filecoin-station/spark-evaluate#main"
1918
},
2019
"standard": {

db/typings.d.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,19 +8,13 @@ export interface PgPoolStats extends pg.Pool {
88
db: 'stats'
99
}
1010

11-
export interface PgPoolApi extends pg.Pool {
12-
db: 'api'
13-
}
14-
1511
export type PgPool =
1612
| PgPoolEvaluate
1713
| PgPoolStats
18-
| PgPoolApi
1914

2015
export interface PgPools {
2116
stats: PgPoolStats;
2217
evaluate: PgPoolEvaluate;
23-
api: PgPoolApi;
2418
end(): Promise<void>
2519
}
2620

package-lock.json

Lines changed: 0 additions & 10 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

stats/bin/migrate.js

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,9 @@
11
import {
22
getPgPools,
3-
migrateApiDB,
43
migrateEvaluateDB,
54
migrateStatsDB
65
} from '@filecoin-station/spark-stats-db'
76

87
const pgPools = await getPgPools()
98
await migrateStatsDB(pgPools.stats)
109
await migrateEvaluateDB(pgPools.evaluate)
11-
await migrateApiDB(pgPools.api)

stats/bin/spark-stats.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import { getPgPools } from '@filecoin-station/spark-stats-db'
77
const {
88
PORT = '8080',
99
HOST = '127.0.0.1',
10+
SPARK_API_BASE_URL = 'https://api.filspark.com/',
1011
REQUEST_LOGGING = 'true'
1112
} = process.env
1213

@@ -17,7 +18,7 @@ const logger = {
1718
request: ['1', 'true'].includes(REQUEST_LOGGING) ? console.info : () => {}
1819
}
1920

20-
const handler = createHandler({ pgPools, logger })
21+
const handler = createHandler({ SPARK_API_BASE_URL, pgPools, logger })
2122
const server = http.createServer(handler)
2223
console.log('Starting the http server on host %j port %s', HOST, PORT)
2324
server.listen(Number(PORT), HOST)

stats/lib/handler.js

Lines changed: 14 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import * as Sentry from '@sentry/node'
2-
import { json } from 'http-responders'
2+
import { redirect } from 'http-responders'
33

44
import { getStatsWithFilterAndCaching } from './request-helpers.js'
55

@@ -22,18 +22,20 @@ import { handlePlatformRoutes } from './platform-routes.js'
2222

2323
/**
2424
* @param {object} args
25+
* @param {string} args.SPARK_API_BASE_URL
2526
* @param {import('@filecoin-station/spark-stats-db').PgPools} args.pgPools
2627
* @param {import('./typings.d.ts').Logger} args.logger
2728
* @returns
2829
*/
2930
export const createHandler = ({
31+
SPARK_API_BASE_URL,
3032
pgPools,
3133
logger
3234
}) => {
3335
return (req, res) => {
3436
const start = Date.now()
3537
logger.request(`${req.method} ${req.url} ...`)
36-
handler(req, res, pgPools)
38+
handler(req, res, pgPools, SPARK_API_BASE_URL)
3739
.catch(err => errorHandler(res, err, logger))
3840
.then(() => {
3941
logger.request(`${req.method} ${req.url} ${res.statusCode} (${Date.now() - start}ms)`)
@@ -73,8 +75,9 @@ const createRespondWithFetchFn =
7375
* @param {import('node:http').IncomingMessage} req
7476
* @param {import('node:http').ServerResponse} res
7577
* @param {import('@filecoin-station/spark-stats-db').PgPools} pgPools
78+
* @param {string} SPARK_API_BASE_URL
7679
*/
77-
const handler = async (req, res, pgPools) => {
80+
const handler = async (req, res, pgPools, SPARK_API_BASE_URL) => {
7881
// Caveat! `new URL('//foo', 'http://127.0.0.1')` would produce "http://foo/" - not what we want!
7982
const { pathname, searchParams } = new URL(`http://127.0.0.1${req.url}`)
8083
const segs = pathname.split('/').filter(Boolean)
@@ -102,11 +105,11 @@ const handler = async (req, res, pgPools) => {
102105
} else if (req.method === 'GET' && url === '/miners/retrieval-success-rate/summary') {
103106
await respond(fetchMinersRSRSummary)
104107
} else if (req.method === 'GET' && segs[0] === 'miner' && segs[1] && segs[2] === 'deals' && segs[3] === 'eligible' && segs[4] === 'summary') {
105-
await getRetrievableDealsForMiner(req, res, pgPools.api, segs[1])
108+
redirectToSparkApi(req, res, SPARK_API_BASE_URL)
106109
} else if (req.method === 'GET' && segs[0] === 'client' && segs[1] && segs[2] === 'deals' && segs[3] === 'eligible' && segs[4] === 'summary') {
107-
await getRetrievableDealsForClient(req, res, pgPools.api, segs[1])
110+
redirectToSparkApi(req, res, SPARK_API_BASE_URL)
108111
} else if (req.method === 'GET' && segs[0] === 'allocator' && segs[1] && segs[2] === 'deals' && segs[3] === 'eligible' && segs[4] === 'summary') {
109-
await getRetrievableDealsForAllocator(req, res, pgPools.api, segs[1])
112+
redirectToSparkApi(req, res, SPARK_API_BASE_URL)
110113
} else if (await handlePlatformRoutes(req, res, pgPools)) {
111114
// no-op, request was handled by handlePlatformRoute
112115
} else if (req.method === 'GET' && url === '/') {
@@ -141,86 +144,14 @@ const notFound = (res) => {
141144
}
142145

143146
/**
144-
* @param {import('node:http').IncomingMessage} _req
147+
* @param {import('node:http').IncomingMessage} req
145148
* @param {import('node:http').ServerResponse} res
146-
* @param {PgPools['api']} client
147-
* @param {string} minerId
149+
* @param {string} SPARK_API_BASE_URL
148150
*/
149-
const getRetrievableDealsForMiner = async (_req, res, client, minerId) => {
150-
/** @type {{rows: {client_id: string; deal_count: number}[]}} */
151-
const { rows } = await client.query(`
152-
SELECT client_id, COUNT(cid)::INTEGER as deal_count FROM retrievable_deals
153-
WHERE miner_id = $1 AND expires_at > now()
154-
GROUP BY client_id
155-
ORDER BY deal_count DESC, client_id ASC
156-
`, [
157-
minerId
158-
])
159-
160-
// Cache the response for 6 hours
161-
res.setHeader('cache-control', `max-age=${6 * 3600}`)
162-
163-
const body = {
164-
minerId,
165-
dealCount: rows.reduce((sum, row) => sum + row.deal_count, 0),
166-
clients:
167-
rows.map(
168-
// eslint-disable-next-line camelcase
169-
({ client_id, deal_count }) => ({ clientId: client_id, dealCount: deal_count })
170-
)
171-
}
172-
173-
json(res, body)
174-
}
175-
176-
const getRetrievableDealsForClient = async (_req, res, client, clientId) => {
177-
/** @type {{rows: {miner_id: string; deal_count: number}[]}} */
178-
const { rows } = await client.query(`
179-
SELECT miner_id, COUNT(cid)::INTEGER as deal_count FROM retrievable_deals
180-
WHERE client_id = $1 AND expires_at > now()
181-
GROUP BY miner_id
182-
ORDER BY deal_count DESC, miner_id ASC
183-
`, [
184-
clientId
185-
])
186-
151+
const redirectToSparkApi = (req, res, SPARK_API_BASE_URL) => {
187152
// Cache the response for 6 hours
188153
res.setHeader('cache-control', `max-age=${6 * 3600}`)
189154

190-
const body = {
191-
clientId,
192-
dealCount: rows.reduce((sum, row) => sum + row.deal_count, 0),
193-
providers: rows.map(
194-
// eslint-disable-next-line camelcase
195-
({ miner_id, deal_count }) => ({ minerId: miner_id, dealCount: deal_count })
196-
)
197-
}
198-
json(res, body)
199-
}
200-
201-
const getRetrievableDealsForAllocator = async (_req, res, client, allocatorId) => {
202-
/** @type {{rows: {client_id: string; deal_count: number}[]}} */
203-
const { rows } = await client.query(`
204-
SELECT ac.client_id, COUNT(cid)::INTEGER as deal_count
205-
FROM allocator_clients ac
206-
LEFT JOIN retrievable_deals rd ON ac.client_id = rd.client_id
207-
WHERE ac.allocator_id = $1 AND expires_at > now()
208-
GROUP BY ac.client_id
209-
ORDER BY deal_count DESC, ac.client_id ASC
210-
`, [
211-
allocatorId
212-
])
213-
214-
// Cache the response for 6 hours
215-
res.setHeader('cache-control', `max-age=${6 * 3600}`)
216-
217-
const body = {
218-
allocatorId,
219-
dealCount: rows.reduce((sum, row) => sum + row.deal_count, 0),
220-
clients: rows.map(
221-
// eslint-disable-next-line camelcase
222-
({ client_id, deal_count }) => ({ clientId: client_id, dealCount: deal_count })
223-
)
224-
}
225-
json(res, body)
155+
const location = new URL(req.url, SPARK_API_BASE_URL).toString()
156+
redirect(req, res, location, 302)
226157
}

0 commit comments

Comments
 (0)