Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
192 changes: 192 additions & 0 deletions tests/unit/handlers/pools.handlers.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
'use strict'

const test = require('brittle')
const {
getPoolBalanceHistory,
flattenTransactionResults,
groupByBucket
} = require('../../../workers/lib/server/handlers/pools.handlers')

test('getPoolBalanceHistory - happy path', async (t) => {
const mockCtx = {
conf: {
orks: [{ rpcPublicKey: 'key1' }]
},
net_r0: {
jRequest: async () => {
return [{
ts: '1700006400000',
transactions: [{
username: 'user1',
changed_balance: 0.001,
mining_extra: { hash_rate: 611000000000000 }
}]
}]
}
}
}

const mockReq = {
query: { start: 1700000000000, end: 1700100000000, range: '1D' },
params: {}
}

const result = await getPoolBalanceHistory(mockCtx, mockReq, {})
t.ok(result.log, 'should return log array')
t.ok(Array.isArray(result.log), 'log should be array')
t.ok(result.log.length > 0, 'should have entries')
const entry = result.log[0]
t.ok(entry.hashrate > 0, 'should include hashrate')
t.ok(entry.revenue > 0, 'should include revenue')
t.pass()
})

test('getPoolBalanceHistory - with pool filter', async (t) => {
let capturedPayload = null
const mockCtx = {
conf: { orks: [{ rpcPublicKey: 'key1' }] },
net_r0: {
jRequest: async (key, method, payload) => {
capturedPayload = payload
return [{
ts: '1700006400000',
transactions: [
{ username: 'user1', changed_balance: 0.001 },
{ username: 'user2', changed_balance: 0.002 }
]
}]
}
}
}

const mockReq = {
query: { start: 1700000000000, end: 1700100000000 },
params: { pool: 'user1' }
}

const result = await getPoolBalanceHistory(mockCtx, mockReq, {})
t.ok(result.log, 'should return log array')
t.is(capturedPayload.query.pool, 'user1', 'should pass pool filter in RPC payload')
t.pass()
})

test('getPoolBalanceHistory - "all" pool filter returns all pools', async (t) => {
const mockCtx = {
conf: { orks: [{ rpcPublicKey: 'key1' }] },
net_r0: {
jRequest: async () => {
return [{
ts: '1700006400000',
transactions: [
{ username: 'user1', changed_balance: 0.001 },
{ username: 'user2', changed_balance: 0.002 }
]
}]
}
}
}

const mockReq = {
query: { start: 1700000000000, end: 1700100000000 },
params: { pool: 'all' }
}

const result = await getPoolBalanceHistory(mockCtx, mockReq, {})
t.ok(result.log.length > 0, 'should return entries for all pools')
t.pass()
})

test('getPoolBalanceHistory - missing start throws', async (t) => {
const mockCtx = {
conf: { orks: [] },
net_r0: { jRequest: async () => ({}) }
}

try {
await getPoolBalanceHistory(mockCtx, { query: { end: 1700100000000 }, params: {} }, {})
t.fail('should have thrown')
} catch (err) {
t.is(err.message, 'ERR_MISSING_START_END', 'should throw missing start/end error')
}
t.pass()
})

test('getPoolBalanceHistory - invalid range throws', async (t) => {
const mockCtx = {
conf: { orks: [] },
net_r0: { jRequest: async () => ({}) }
}

try {
await getPoolBalanceHistory(mockCtx, { query: { start: 1700100000000, end: 1700000000000 }, params: {} }, {})
t.fail('should have thrown')
} catch (err) {
t.is(err.message, 'ERR_INVALID_DATE_RANGE', 'should throw invalid range error')
}
t.pass()
})

test('getPoolBalanceHistory - empty ork results', async (t) => {
const mockCtx = {
conf: { orks: [{ rpcPublicKey: 'key1' }] },
net_r0: { jRequest: async () => ({}) }
}

const result = await getPoolBalanceHistory(mockCtx, { query: { start: 1700000000000, end: 1700100000000 }, params: {} }, {})
t.ok(result.log, 'should return log array')
t.is(result.log.length, 0, 'log should be empty')
t.pass()
})

test('flattenTransactionResults - extracts daily entries from ext-data', (t) => {
const results = [
[{
ts: '1700006400000',
transactions: [
{ username: 'user1', changed_balance: 0.001, mining_extra: { hash_rate: 500000 } },
{ username: 'user2', changed_balance: 0.002, mining_extra: { hash_rate: 600000 } }
]
}]
]
const entries = flattenTransactionResults(results)
t.is(entries.length, 1, 'should have 1 daily entry')
t.ok(entries[0].revenue > 0, 'should have revenue')
t.ok(entries[0].hashrate > 0, 'should have hashrate')
t.pass()
})

test('flattenTransactionResults - handles error results', (t) => {
const results = [{ error: 'timeout' }]
const entries = flattenTransactionResults(results)
t.is(entries.length, 0, 'should be empty for errors')
t.pass()
})

test('groupByBucket - groups by daily bucket', (t) => {
const entries = [
{ ts: 1700006400000, revenue: 100 },
{ ts: 1700050000000, revenue: 200 },
{ ts: 1700092800000, revenue: 300 }
]
const bucketSize = 86400000
const buckets = groupByBucket(entries, bucketSize)
t.ok(typeof buckets === 'object', 'should return object')
t.ok(Object.keys(buckets).length >= 1, 'should have at least one bucket')
t.pass()
})

test('groupByBucket - handles empty entries', (t) => {
const buckets = groupByBucket([], 86400000)
t.is(Object.keys(buckets).length, 0, 'should be empty')
t.pass()
})

test('groupByBucket - handles missing timestamps', (t) => {
const entries = [
{ revenue: 100 },
{ ts: 1700006400000, revenue: 200 }
]
const buckets = groupByBucket(entries, 86400000)
t.ok(Object.keys(buckets).length >= 1, 'should skip items without ts')
t.pass()
})
39 changes: 39 additions & 0 deletions tests/unit/routes/pools.routes.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
'use strict'

const test = require('brittle')
const { testModuleStructure, testHandlerFunctions, testOnRequestFunctions } = require('../helpers/routeTestHelpers')
const { createRoutesForTest } = require('../helpers/mockHelpers')

const ROUTES_PATH = '../../../workers/lib/server/routes/pools.routes.js'

test('pools routes - module structure', (t) => {
testModuleStructure(t, ROUTES_PATH, 'pools')
t.pass()
})

test('pools routes - route definitions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
const routeUrls = routes.map(route => route.url)
t.ok(routeUrls.includes('/auth/pools/:pool/balance-history'), 'should have balance-history route')
t.pass()
})

test('pools routes - HTTP methods', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
routes.forEach(route => {
t.is(route.method, 'GET', `route ${route.url} should be GET`)
})
t.pass()
})

test('pools routes - handler functions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
testHandlerFunctions(t, routes, 'pools')
t.pass()
})

test('pools routes - onRequest functions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
testOnRequestFunctions(t, routes, 'pools')
t.pass()
})
26 changes: 24 additions & 2 deletions workers/lib/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,10 @@ const ENDPOINTS = {
THING_CONFIG: '/auth/thing-config',

// WebSocket endpoint
WEBSOCKET: '/ws'
WEBSOCKET: '/ws',

// Pools endpoints
POOLS_BALANCE_HISTORY: '/auth/pools/:pool/balance-history'
}

const HTTP_METHODS = {
Expand Down Expand Up @@ -183,6 +186,22 @@ const STATUS_CODES = {
INTERNAL_SERVER_ERROR: 500
}

const RPC_METHODS = {
TAIL_LOG: 'tailLog',
GET_WRK_EXT_DATA: 'getWrkExtData'
}

const MINERPOOL_EXT_DATA_KEYS = {
TRANSACTIONS: 'transactions',
STATS: 'stats'
}

const RANGE_BUCKETS = {
'1D': 86400000,
'1W': 604800000,
'1M': 2592000000
}

const RPC_TIMEOUT = 15000
const RPC_CONCURRENCY_LIMIT = 2

Expand All @@ -202,5 +221,8 @@ module.exports = {
STATUS_CODES,
RPC_TIMEOUT,
RPC_CONCURRENCY_LIMIT,
USER_SETTINGS_TYPE
USER_SETTINGS_TYPE,
RPC_METHODS,
MINERPOOL_EXT_DATA_KEYS,
RANGE_BUCKETS
}
115 changes: 115 additions & 0 deletions workers/lib/server/handlers/pools.handlers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
'use strict'

const {
RPC_METHODS,
MINERPOOL_EXT_DATA_KEYS,
RANGE_BUCKETS
} = require('../../constants')
const {
requestRpcEachLimit,
getStartOfDay
} = require('../../utils')

async function getPoolBalanceHistory (ctx, req) {
const start = Number(req.query.start)
const end = Number(req.query.end)
const range = req.query.range || '1D'
const poolParam = req.params.pool || null
const poolFilter = poolParam === 'all' ? null : poolParam

if (!start || !end) {
throw new Error('ERR_MISSING_START_END')
}

if (start >= end) {
throw new Error('ERR_INVALID_DATE_RANGE')
}

const results = await requestRpcEachLimit(ctx, RPC_METHODS.GET_WRK_EXT_DATA, {
type: 'minerpool',
query: { key: MINERPOOL_EXT_DATA_KEYS.TRANSACTIONS, start, end, pool: poolFilter }
})

const dailyEntries = flattenTransactionResults(results)

const bucketSize = RANGE_BUCKETS[range] || RANGE_BUCKETS['1D']
const buckets = groupByBucket(dailyEntries, bucketSize)

const log = Object.entries(buckets)
.sort(([a], [b]) => Number(a) - Number(b))
.map(([ts, entries]) => {
const totalRevenue = entries.reduce((sum, e) => sum + (e.revenue || 0), 0)
const hashrates = entries.filter(e => e.hashrate > 0)
const avgHashrate = hashrates.length
? hashrates.reduce((sum, e) => sum + e.hashrate, 0) / hashrates.length
: 0

return {
ts: Number(ts),
balance: totalRevenue,
hashrate: avgHashrate,
revenue: totalRevenue
}
})

return { log }
}

function flattenTransactionResults (results) {
const daily = []
for (const res of results) {
if (res.error || !res) continue
const data = Array.isArray(res) ? res : (res.data || res.result || [])
if (!Array.isArray(data)) continue

for (const entry of data) {
if (!entry) continue
const ts = Number(entry.ts)
if (!ts) continue

const txs = entry.transactions || []
if (!Array.isArray(txs) || txs.length === 0) continue

let revenue = 0
let hashrate = 0
let hashCount = 0

for (const tx of txs) {
if (!tx) continue
revenue += Math.abs(tx.changed_balance || 0)
if (tx.mining_extra?.hash_rate) {
hashrate += tx.mining_extra.hash_rate
hashCount++
}
}

if (revenue === 0 && hashCount === 0) continue

daily.push({
ts: getStartOfDay(ts),
revenue,
hashrate: hashCount > 0 ? hashrate / hashCount : 0
})
}
}

return daily
}

function groupByBucket (entries, bucketSize) {
const buckets = {}
for (const entry of entries) {
const ts = entry.ts
if (!ts) continue
const bucketTs = Math.floor(ts / bucketSize) * bucketSize
if (!buckets[bucketTs]) buckets[bucketTs] = []
buckets[bucketTs].push(entry)
}
return buckets
}

module.exports = {
getPoolBalanceHistory,
flattenTransactionResults,
groupByBucket
}
Loading