diff --git a/config/facs/auth.config.json.example b/config/facs/auth.config.json.example index 369a204..7dbd3ae 100644 --- a/config/facs/auth.config.json.example +++ b/config/facs/auth.config.json.example @@ -23,7 +23,8 @@ "reporting:rw", "settings:rw", "ticket:rw", - "power_spot_forecast:rw" + "power_spot_forecast:rw", + "pool_manager:rw" ], "roles": { "admin": [ @@ -46,7 +47,8 @@ "reporting:rw", "settings:rw", "ticket:rw", - "power_spot_forecast:rw" + "power_spot_forecast:rw", + "pool_manager:rw" ], "reporting_tool_manager": [ "revenue:rw", @@ -136,8 +138,9 @@ "comments:r", "settings:r", "ticket:r", - "alerts:r" - ] + "alerts:r", + "pool_manager:r" + ] }, "roleManagement": { "admin": [ diff --git a/tests/integration/api.poolManager.test.js b/tests/integration/api.poolManager.test.js new file mode 100644 index 0000000..75011c3 --- /dev/null +++ b/tests/integration/api.poolManager.test.js @@ -0,0 +1,360 @@ +'use strict' + +const test = require('brittle') +const fs = require('fs') +const { createWorker } = require('tether-svc-test-helper').worker +const { setTimeout: sleep } = require('timers/promises') +const HttpFacility = require('bfx-facs-http') + +test('Pool Manager API', { timeout: 90000 }, async (main) => { + const baseDir = 'tests/integration' + let worker + let httpClient + const appNodePort = 5001 + const ip = '127.0.0.1' + const appNodeBaseUrl = `http://${ip}:${appNodePort}` + const testUser = 'poolmanager@test' + const encoding = 'json' + + main.teardown(async () => { + await httpClient.stop() + await worker.stop() + await sleep(2000) + fs.rmSync(`./${baseDir}/store`, { recursive: true, force: true }) + fs.rmSync(`./${baseDir}/status`, { recursive: true, force: true }) + fs.rmSync(`./${baseDir}/config`, { recursive: true, force: true }) + fs.rmSync(`./${baseDir}/db`, { recursive: true, force: true }) + }) + + const createConfig = () => { + if (!fs.existsSync(`./${baseDir}/config/facs`)) { + if (!fs.existsSync(`./${baseDir}/config`)) fs.mkdirSync(`./${baseDir}/config`) + fs.mkdirSync(`./${baseDir}/config/facs`) + } + if (!fs.existsSync(`./${baseDir}/db`)) fs.mkdirSync(`./${baseDir}/db`) + + const commonConf = { + dir_log: 'logs', + debug: 0, + orks: { 'cluster-1': { region: 'AB', rpcPublicKey: '' } }, + cacheTiming: {}, + featureConfig: {} + } + const netConf = { r0: {} } + const httpdConf = { h0: {} } + const httpdOauthConf = { + h0: { + method: 'google', + credentials: { client: { id: 'i', secret: 's' } }, + users: [{ email: testUser }] + } + } + const authConf = require('../../config/facs/auth.config.json') + fs.writeFileSync(`./${baseDir}/config/common.json`, JSON.stringify(commonConf)) + fs.writeFileSync(`./${baseDir}/config/facs/net.config.json`, JSON.stringify(netConf)) + fs.writeFileSync(`./${baseDir}/config/facs/httpd.config.json`, JSON.stringify(httpdConf)) + fs.writeFileSync(`./${baseDir}/config/facs/httpd-oauth2.config.json`, JSON.stringify(httpdOauthConf)) + fs.writeFileSync(`./${baseDir}/config/facs/auth.config.json`, JSON.stringify(authConf)) + } + + const mockMiners = [ + { + id: 'miner-001', + info: { model: 'Antminer S19 XP', ip_address: '192.168.1.100' }, + snap: { + ts: Date.now(), + config: { + pool_config: [ + { url: 'stratum+tcp://btc.f2pool.com:3333', username: 'tether.worker1' } + ] + }, + stats: { + status: 'mining', + pool_status: [{ pool: 'btc.f2pool.com:3333', status: 'Alive', accepted: 100, rejected: 1 }], + hashrate_mhs: { t_5m: 140000 } + } + }, + tags: { unit: 'unit-A', rack: 'rack-1' }, + alerts: {} + }, + { + id: 'miner-002', + info: { model: 'Antminer S19 XP', ip_address: '192.168.1.101' }, + snap: { + ts: Date.now(), + config: { + pool_config: [ + { url: 'stratum+tcp://btc.f2pool.com:3333', username: 'tether.worker2' } + ] + }, + stats: { + status: 'mining', + pool_status: [{ pool: 'btc.f2pool.com:3333', status: 'Alive', accepted: 150, rejected: 2 }], + hashrate_mhs: { t_5m: 145000 } + } + }, + tags: { unit: 'unit-A', rack: 'rack-1' }, + alerts: {} + }, + { + id: 'miner-003', + info: { model: 'Whatsminer M50S', ip_address: '192.168.1.102' }, + snap: { + ts: Date.now(), + config: { + pool_config: [ + { url: 'stratum+tcp://ocean.xyz:3333', username: 'tether.worker3' } + ] + }, + stats: { + status: 'mining', + pool_status: [{ pool: 'ocean.xyz:3333', status: 'Alive', accepted: 200, rejected: 3 }], + hashrate_mhs: { t_5m: 130000 } + } + }, + tags: { unit: 'unit-B', rack: 'rack-2' }, + alerts: { wrong_miner_pool: { ts: Date.now() } } + } + ] + + const startWorker = async () => { + worker = createWorker({ + env: 'test', + wtype: 'wrk-node-dashboard-test', + rack: 'test-rack', + tmpdir: baseDir, + storeDir: 'test-store', + serviceRoot: `${process.cwd()}/${baseDir}`, + port: appNodePort + }) + + await worker.start() + worker.worker.net_r0.jRequest = (publicKey, method, params) => { + if (method === 'listThings') { + return Promise.resolve(mockMiners) + } + if (method === 'applyThings') { + return Promise.resolve({ success: true, affected: params.query?.id?.$in?.length || 0 }) + } + return Promise.resolve([]) + } + } + + const createHttpClient = async () => { + httpClient = new HttpFacility({}, { ns: 'c0', timeout: 30000, debug: false }, { env: 'test' }) + await httpClient.start() + } + + const getTestToken = async (email) => { + worker.worker.authLib._auth.addHandlers({ + google: () => { return { email } } + }) + const token = await worker.worker.auth_a0.authCallbackHandler('google', { ip }) + return token + } + + createConfig() + await startWorker() + await createHttpClient() + await sleep(2000) + + const baseParams = 'regions=["AB"]' + + await main.test('Api: auth/pool-manager/stats', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/stats?${baseParams}` + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.get(api, { encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return stats', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.get(api, { headers, encoding }) + t.ok(res.body) + t.ok(typeof res.body.totalMiners === 'number') + t.ok(res.body.configured) + t.ok(typeof res.body.errors === 'number') + t.pass() + } catch (e) { + console.error('Stats error:', e) + t.fail() + } + }) + }) + + await main.test('Api: auth/pool-manager/pools', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/pools?${baseParams}` + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.get(api, { encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return pools list', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.get(api, { headers, encoding }) + t.ok(Array.isArray(res.body)) + if (res.body.length > 0) { + t.ok(res.body[0].url) + t.ok(res.body[0].name) + } + t.pass() + } catch (e) { + console.error('Pools error:', e) + t.fail() + } + }) + }) + + await main.test('Api: auth/pool-manager/miners', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/miners?${baseParams}` + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.get(api, { encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return paginated miners', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.get(api, { headers, encoding }) + t.ok(res.body) + t.ok(Array.isArray(res.body.miners)) + t.ok(typeof res.body.total === 'number') + t.ok(typeof res.body.page === 'number') + t.ok(typeof res.body.limit === 'number') + t.pass() + } catch (e) { + console.error('Miners error:', e) + t.fail() + } + }) + + await n.test('api should support pagination params', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + const paginatedApi = `${api}&page=1&limit=10` + try { + const res = await httpClient.get(paginatedApi, { headers, encoding }) + t.is(res.body.page, 1) + t.is(res.body.limit, 10) + t.pass() + } catch (e) { + console.error('Pagination error:', e) + t.fail() + } + }) + }) + + await main.test('Api: auth/pool-manager/sites', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/sites?${baseParams}` + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.get(api, { encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return sites list', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.get(api, { headers, encoding }) + t.ok(Array.isArray(res.body)) + t.pass() + } catch (e) { + console.error('Sites error:', e) + t.fail() + } + }) + }) + + await main.test('Api: auth/pool-manager/alerts', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/alerts?${baseParams}` + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.get(api, { encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return alerts list', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.get(api, { headers, encoding }) + t.ok(Array.isArray(res.body)) + if (res.body.length > 0) { + t.ok(res.body[0].type) + t.ok(res.body[0].minerId) + t.ok(res.body[0].severity) + } + t.pass() + } catch (e) { + console.error('Alerts error:', e) + t.fail() + } + }) + }) + + await main.test('Api: auth/pool-manager/miners/assign', async (n) => { + const api = `${appNodeBaseUrl}/auth/pool-manager/miners/assign?${baseParams}` + const body = { + regions: ['AB'], + minerIds: ['miner-001', 'miner-002'], + poolConfig: { + url: 'stratum+tcp://newpool.com:3333', + workerName: 'tether.newworker', + workerPassword: 'x' + } + } + + await n.test('api should fail for missing auth token', async (t) => { + try { + await httpClient.post(api, { body, encoding }) + t.fail() + } catch (e) { + t.is(e.response.message.includes('ERR_AUTH_FAIL'), true) + } + }) + + await n.test('api should succeed and return assignment result', async (t) => { + const token = await getTestToken(testUser) + const headers = { 'bfx-token': token } + try { + const res = await httpClient.post(api, { body, headers, encoding }) + t.ok(res.body) + t.ok(typeof res.body.success === 'boolean') + t.ok(typeof res.body.assigned === 'number') + t.pass() + } catch (e) { + console.error('Assign error:', e) + t.fail() + } + }) + }) +}) diff --git a/tests/unit/services.poolManager.test.js b/tests/unit/services.poolManager.test.js new file mode 100644 index 0000000..39e7e21 --- /dev/null +++ b/tests/unit/services.poolManager.test.js @@ -0,0 +1,463 @@ +'use strict' + +const test = require('brittle') + +const { + getPoolStats, + getPoolConfigs, + getMinersWithPools, + getUnitsWithPoolData, + getPoolAlerts, + assignPoolToMiners, + setPowerMode +} = require('../../workers/lib/server/services/poolManager') + +function createMockCtx (responseData) { + return { + conf: { + orks: [{ rpcPublicKey: 'key1' }] + }, + net_r0: { + jRequest: () => Promise.resolve(responseData) + } + } +} + +function createMockPoolStatsResponse (pools) { + return [{ + stats: pools.map(p => ({ + poolType: p.poolType || 'f2pool', + username: p.username || 'worker1', + hashrate: p.hashrate || 100000, + hashrate_1h: p.hashrate_1h || 100000, + hashrate_24h: p.hashrate_24h || 95000, + worker_count: p.worker_count || 5, + active_workers_count: p.active_workers_count || 4, + balance: p.balance || 0.001, + unsettled: p.unsettled || 0, + revenue_24h: p.revenue_24h || 0.0001, + yearlyBalances: p.yearlyBalances || [], + timestamp: p.timestamp || Date.now() + })) + }] +} + +function createMockMiner (id, options = {}) { + return { + id, + code: options.code || 'AM-S19XP-0001', + type: options.type || 'miner-am-s19xp', + tags: options.tags || ['t-miner', 'site-pintado', 'container-bitmain-imm-1'], + info: { + site: options.site || 'Pintado', + container: options.container || 'bitmain-imm-1', + serialNum: options.serialNum || 'HTM3X01', + nominalHashrateMhs: options.nominalHashrateMhs || 204000000 + }, + address: options.address || '192.168.1.100', + alerts: options.alerts || {} + } +} + +test('poolManager:getPoolStats returns correct aggregates', async function (t) { + const poolData = createMockPoolStatsResponse([ + { poolType: 'f2pool', username: 'worker1', hashrate: 100000, worker_count: 5, active_workers_count: 4, balance: 0.001 }, + { poolType: 'ocean', username: 'addr1', hashrate: 200000, worker_count: 10, active_workers_count: 8, balance: 0.002 } + ]) + + const mockCtx = createMockCtx(poolData) + + const result = await getPoolStats(mockCtx) + + t.is(result.totalPools, 2) + t.is(result.totalWorkers, 15) + t.is(result.activeWorkers, 12) + t.is(result.totalHashrate, 300000) + t.is(result.totalBalance, 0.003) + t.is(result.errors, 3) +}) + +test('poolManager:getPoolStats handles empty orks', async function (t) { + const mockCtx = { + conf: { orks: [] }, + net_r0: { jRequest: () => Promise.resolve([]) } + } + + const result = await getPoolStats(mockCtx) + + t.is(result.totalPools, 0) + t.is(result.totalWorkers, 0) + t.is(result.activeWorkers, 0) + t.is(result.totalHashrate, 0) + t.is(result.totalBalance, 0) +}) + +test('poolManager:getPoolStats deduplicates pools by key', async function (t) { + const poolData = createMockPoolStatsResponse([ + { poolType: 'f2pool', username: 'worker1', worker_count: 5, active_workers_count: 4 } + ]) + + const mockCtx = { + conf: { orks: [{ rpcPublicKey: 'key1' }, { rpcPublicKey: 'key2' }] }, + net_r0: { jRequest: () => Promise.resolve(poolData) } + } + + const result = await getPoolStats(mockCtx) + + t.is(result.totalPools, 1) + t.is(result.totalWorkers, 5) +}) + +test('poolManager:getPoolConfigs returns pool objects', async function (t) { + const poolData = createMockPoolStatsResponse([ + { poolType: 'f2pool', username: 'worker1', hashrate: 100000, balance: 0.001 }, + { poolType: 'ocean', username: 'addr1', hashrate: 200000, balance: 0.002 } + ]) + + const mockCtx = createMockCtx(poolData) + + const result = await getPoolConfigs(mockCtx) + + t.ok(Array.isArray(result)) + t.is(result.length, 2) + + const f2pool = result.find(p => p.pool === 'f2pool') + t.ok(f2pool) + t.is(f2pool.name, 'worker1') + t.is(f2pool.account, 'worker1') + t.is(f2pool.hashrate, 100000) + t.is(f2pool.balance, 0.001) +}) + +test('poolManager:getPoolConfigs returns empty for no data', async function (t) { + const mockCtx = createMockCtx([]) + + const result = await getPoolConfigs(mockCtx) + + t.ok(Array.isArray(result)) + t.is(result.length, 0) +}) + +test('poolManager:getMinersWithPools returns paginated results', async function (t) { + const miners = [] + for (let i = 0; i < 100; i++) { + miners.push(createMockMiner(`miner-${i}`, { code: `AM-S19XP-${i}` })) + } + + const mockCtx = createMockCtx(miners) + + const result = await getMinersWithPools(mockCtx, { page: 1, limit: 10 }) + + t.is(result.miners.length, 10) + t.is(result.total, 100) + t.is(result.page, 1) + t.is(result.limit, 10) + t.is(result.totalPages, 10) +}) + +test('poolManager:getMinersWithPools extracts model from type', async function (t) { + const miners = [ + createMockMiner('m1', { type: 'miner-am-s19xp', code: 'AM-S19XP-001' }), + createMockMiner('m2', { type: 'miner-wm-m56s', code: 'WM-M56S-001' }), + createMockMiner('m3', { type: 'miner-av-a1346', code: 'AV-A1346-001' }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getMinersWithPools(mockCtx, {}) + + t.is(result.miners[0].model, 'Antminer S19XP') + t.is(result.miners[1].model, 'Whatsminer M56S') + t.is(result.miners[2].model, 'Avalon A1346') +}) + +test('poolManager:getMinersWithPools filters by search', async function (t) { + const miners = [ + createMockMiner('miner-1', { code: 'AM-S19XP-0001', serialNum: 'HTM3X01' }), + createMockMiner('miner-2', { code: 'WM-M56S-0002', serialNum: 'WMT001' }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getMinersWithPools(mockCtx, { search: 'S19XP' }) + + t.is(result.total, 1) + t.is(result.miners[0].id, 'miner-1') +}) + +test('poolManager:getMinersWithPools filters by model', async function (t) { + const miners = [ + createMockMiner('m1', { type: 'miner-am-s19xp' }), + createMockMiner('m2', { type: 'miner-wm-m56s' }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getMinersWithPools(mockCtx, { model: 'whatsminer' }) + + t.is(result.total, 1) + t.is(result.miners[0].model, 'Whatsminer M56S') +}) + +test('poolManager:getMinersWithPools maps thing fields correctly', async function (t) { + const miners = [createMockMiner('miner-1', { + code: 'AM-S19XP-0165', + type: 'miner-am-s19xp', + site: 'Pintado', + container: 'bitmain-imm-1', + address: '10.0.0.1', + serialNum: 'HTM3X10', + nominalHashrateMhs: 204000000 + })] + + const mockCtx = createMockCtx(miners) + + const result = await getMinersWithPools(mockCtx, {}) + + const miner = result.miners[0] + t.is(miner.id, 'miner-1') + t.is(miner.code, 'AM-S19XP-0165') + t.is(miner.type, 'miner-am-s19xp') + t.is(miner.model, 'Antminer S19XP') + t.is(miner.site, 'Pintado') + t.is(miner.container, 'bitmain-imm-1') + t.is(miner.ipAddress, '10.0.0.1') + t.is(miner.serialNum, 'HTM3X10') + t.is(miner.nominalHashrate, 204000000) +}) + +test('poolManager:getUnitsWithPoolData groups miners by container', async function (t) { + const miners = [ + createMockMiner('m1', { container: 'bitmain-imm-1' }), + createMockMiner('m2', { container: 'bitmain-imm-1' }), + createMockMiner('m3', { container: 'bitdeer-4a' }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getUnitsWithPoolData(mockCtx) + + t.ok(Array.isArray(result)) + t.is(result.length, 2) + + const imm1 = result.find(u => u.name === 'bitmain-imm-1') + t.ok(imm1) + t.is(imm1.minersCount, 2) +}) + +test('poolManager:getUnitsWithPoolData sums nominal hashrate', async function (t) { + const miners = [ + createMockMiner('m1', { container: 'unit-A', nominalHashrateMhs: 100000 }), + createMockMiner('m2', { container: 'unit-A', nominalHashrateMhs: 150000 }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getUnitsWithPoolData(mockCtx) + + const unitA = result.find(u => u.name === 'unit-A') + t.is(unitA.nominalHashrate, 250000) +}) + +test('poolManager:getUnitsWithPoolData extracts container from tags', async function (t) { + const miners = [ + createMockMiner('m1', { + container: undefined, + tags: ['t-miner', 'site-pintado', 'container-bitmain-imm-2'] + }) + ] + miners[0].info.container = undefined + + const mockCtx = createMockCtx(miners) + + const result = await getUnitsWithPoolData(mockCtx) + + t.is(result.length, 1) + t.is(result[0].name, 'bitmain-imm-2') +}) + +test('poolManager:getUnitsWithPoolData assigns unassigned for no container', async function (t) { + const miners = [ + createMockMiner('m1', { container: undefined, tags: ['t-miner'] }) + ] + miners[0].info.container = undefined + + const mockCtx = createMockCtx(miners) + + const result = await getUnitsWithPoolData(mockCtx) + + t.is(result[0].name, 'unassigned') +}) + +test('poolManager:getPoolAlerts returns pool-related alerts', async function (t) { + const miners = [ + createMockMiner('miner-1', { + alerts: { + wrong_miner_pool: { ts: Date.now() }, + wrong_miner_subaccount: { ts: Date.now() } + } + }), + createMockMiner('miner-2', { + alerts: { all_pools_dead: { ts: Date.now() } } + }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getPoolAlerts(mockCtx) + + t.ok(Array.isArray(result)) + t.is(result.length, 3) +}) + +test('poolManager:getPoolAlerts respects limit', async function (t) { + const miners = [] + for (let i = 0; i < 10; i++) { + miners.push(createMockMiner(`miner-${i}`, { + type: 'miner-am-s19xp', + code: `AM-S19XP-${i}`, + alerts: { wrong_miner_pool: { ts: Date.now() - i * 1000 } } + })) + } + + const mockCtx = createMockCtx(miners) + + const result = await getPoolAlerts(mockCtx, { limit: 5 }) + + t.is(result.length, 5) +}) + +test('poolManager:getPoolAlerts includes severity', async function (t) { + const miners = [ + createMockMiner('miner-1', { + alerts: { all_pools_dead: { ts: Date.now() } } + }) + ] + + const mockCtx = createMockCtx(miners) + + const result = await getPoolAlerts(mockCtx) + + t.is(result[0].severity, 'critical') + t.is(result[0].type, 'all_pools_dead') +}) + +test('poolManager:assignPoolToMiners validates miner IDs', async function (t) { + const mockCtx = createMockCtx({ success: true }) + + await t.exception(async () => { + await assignPoolToMiners(mockCtx, [], [{ url: 'stratum+tcp://pool.com:3333' }]) + }, /ERR_MINER_IDS_REQUIRED/) +}) + +test('poolManager:assignPoolToMiners validates pools', async function (t) { + const mockCtx = createMockCtx({ success: true }) + + await t.exception(async () => { + await assignPoolToMiners(mockCtx, ['miner-1'], []) + }, /ERR_POOLS_REQUIRED/) +}) + +test('poolManager:assignPoolToMiners calls RPC with correct params', async function (t) { + let capturedParams + const mockCtx = { + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: (pk, method, params) => { + capturedParams = params + return Promise.resolve({ success: true, affected: 2 }) + } + } + } + + const result = await assignPoolToMiners(mockCtx, ['miner-1', 'miner-2'], [ + { url: 'stratum+tcp://pool.com:3333', worker_name: 'worker1', worker_password: 'x' } + ]) + + t.ok(capturedParams) + t.is(capturedParams.action, 'setupPools') + t.ok(capturedParams.params.pools) + t.is(capturedParams.params.pools[0].url, 'stratum+tcp://pool.com:3333') + t.is(capturedParams.params.pools[0].worker_name, 'worker1') + t.is(result.success, true) + t.is(result.assigned, 2) +}) + +test('poolManager:assignPoolToMiners handles multiple pools', async function (t) { + let capturedParams + const mockCtx = { + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: (pk, method, params) => { + capturedParams = params + return Promise.resolve({ success: true, affected: 1 }) + } + } + } + + await assignPoolToMiners(mockCtx, ['miner-1'], [ + { url: 'stratum+tcp://primary.com:3333', worker_name: 'worker1' }, + { url: 'stratum+tcp://backup1.com:3333' }, + { url: 'stratum+tcp://backup2.com:3333' } + ]) + + t.is(capturedParams.params.pools.length, 3) + t.is(capturedParams.params.pools[0].url, 'stratum+tcp://primary.com:3333') + t.is(capturedParams.params.pools[1].url, 'stratum+tcp://backup1.com:3333') + t.is(capturedParams.params.pools[2].url, 'stratum+tcp://backup2.com:3333') +}) + +test('poolManager:setPowerMode validates miner IDs', async function (t) { + const mockCtx = createMockCtx({ success: true }) + + await t.exception(async () => { + await setPowerMode(mockCtx, [], 'sleep') + }, /ERR_MINER_IDS_REQUIRED/) +}) + +test('poolManager:setPowerMode validates power mode', async function (t) { + const mockCtx = createMockCtx({ success: true }) + + await t.exception(async () => { + await setPowerMode(mockCtx, ['miner-1'], 'invalid-mode') + }, /ERR_INVALID_POWER_MODE/) +}) + +test('poolManager:setPowerMode calls RPC with correct params', async function (t) { + let capturedParams + const mockCtx = { + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: (pk, method, params) => { + capturedParams = params + return Promise.resolve({ success: true, affected: 2 }) + } + } + } + + const result = await setPowerMode(mockCtx, ['miner-1', 'miner-2'], 'sleep') + + t.ok(capturedParams) + t.is(capturedParams.action, 'setPowerMode') + t.is(capturedParams.params.mode, 'sleep') + t.ok(result.success) + t.is(result.affected, 2) + t.is(result.mode, 'sleep') +}) + +test('poolManager:setPowerMode accepts all valid modes', async function (t) { + const validModes = ['low', 'normal', 'high', 'sleep'] + + for (const mode of validModes) { + const mockCtx = { + conf: { orks: [{ rpcPublicKey: 'key1' }] }, + net_r0: { + jRequest: () => Promise.resolve({ success: true, affected: 1 }) + } + } + + const result = await setPowerMode(mockCtx, ['miner-1'], mode) + t.ok(result.success) + t.is(result.mode, mode) + } +}) diff --git a/workers/lib/constants.js b/workers/lib/constants.js index f1f407e..e7b8d6b 100644 --- a/workers/lib/constants.js +++ b/workers/lib/constants.js @@ -108,7 +108,16 @@ const ENDPOINTS = { THING_CONFIG: '/auth/thing-config', // WebSocket endpoint - WEBSOCKET: '/ws' + WEBSOCKET: '/ws', + + // Pool Manager endpoints + POOL_MANAGER_STATS: '/auth/pool-manager/stats', + POOL_MANAGER_POOLS: '/auth/pool-manager/pools', + POOL_MANAGER_MINERS: '/auth/pool-manager/miners', + POOL_MANAGER_UNITS: '/auth/pool-manager/units', + POOL_MANAGER_ALERTS: '/auth/pool-manager/alerts', + POOL_MANAGER_ASSIGN: '/auth/pool-manager/miners/assign', + POOL_MANAGER_POWER_MODE: '/auth/pool-manager/miners/power-mode' } const HTTP_METHODS = { @@ -183,6 +192,57 @@ const STATUS_CODES = { INTERNAL_SERVER_ERROR: 500 } +const LIST_THINGS = 'listThings' +const APPLY_THINGS = 'applyThings' +const GET_HISTORICAL_LOGS = 'getHistoricalLogs' + +const WORKER_TYPES = { + MINER: 'miner', + CONTAINER: 'container', + MINERPOOL: 'minerpool', + POWERMETER: 'powermeter', + ELECTRICITY: 'electricity' +} + +const CACHE_KEYS = { + POOL_MANAGER_STATS: 'pool-manager/stats', + POOL_MANAGER_POOLS: 'pool-manager/pools', + POOL_MANAGER_MINERS: 'pool-manager/miners', + POOL_MANAGER_UNITS: 'pool-manager/units', + POOL_MANAGER_ALERTS: 'pool-manager/alerts' +} + +const POOL_ALERT_TYPES = [ + 'all_pools_dead', + 'wrong_miner_pool', + 'wrong_miner_subaccount', + 'wrong_worker_name', + 'ip_worker_name' +] + +const MINER_POOL_STATUS = { + ONLINE: 'online', + OFFLINE: 'offline', + INACTIVE: 'inactive' +} + +const POWER_MODES = { + LOW: 'low', + NORMAL: 'normal', + HIGH: 'high', + SLEEP: 'sleep' +} + +const RPC_METHODS = { + LIST_THINGS: 'listThings', + GET_WRK_EXT_DATA: 'getWrkExtData' +} + +const MINERPOOL_EXT_DATA_KEYS = { + TRANSACTIONS: 'transactions', + STATS: 'stats' +} + const RPC_TIMEOUT = 15000 const RPC_CONCURRENCY_LIMIT = 2 @@ -202,5 +262,15 @@ module.exports = { STATUS_CODES, RPC_TIMEOUT, RPC_CONCURRENCY_LIMIT, - USER_SETTINGS_TYPE + USER_SETTINGS_TYPE, + LIST_THINGS, + APPLY_THINGS, + GET_HISTORICAL_LOGS, + WORKER_TYPES, + CACHE_KEYS, + POOL_ALERT_TYPES, + MINER_POOL_STATUS, + POWER_MODES, + RPC_METHODS, + MINERPOOL_EXT_DATA_KEYS } diff --git a/workers/lib/server/controllers/poolManager.js b/workers/lib/server/controllers/poolManager.js new file mode 100644 index 0000000..5ac4b3d --- /dev/null +++ b/workers/lib/server/controllers/poolManager.js @@ -0,0 +1,131 @@ +'use strict' + +const poolManagerService = require('../services/poolManager') + +const getStats = async (ctx, req) => { + return poolManagerService.getPoolStats(ctx) +} + +const getPools = async (ctx, req) => { + const pools = await poolManagerService.getPoolConfigs(ctx) + + return { + pools, + total: pools.length + } +} + +const getMiners = async (ctx, req) => { + const filters = { + search: req.query.search, + status: req.query.status, + poolUrl: req.query.poolUrl, + model: req.query.model, + page: parseInt(req.query.page) || 1, + limit: parseInt(req.query.limit) || 50 + } + + return poolManagerService.getMinersWithPools(ctx, filters) +} + +const getUnits = async (ctx, req) => { + const units = await poolManagerService.getUnitsWithPoolData(ctx) + + return { + units, + total: units.length + } +} + +const getAlerts = async (ctx, req) => { + const filters = { + limit: parseInt(req.query.limit) || 50 + } + + const alerts = await poolManagerService.getPoolAlerts(ctx, filters) + + return { + alerts, + total: alerts.length + } +} + +const assignPool = async (ctx, req) => { + const { write } = await ctx.authLib.getTokenPerms(req._info.authToken) + if (!write) { + throw new Error('ERR_WRITE_PERM_REQUIRED') + } + + const hasPoolManagerPerm = await ctx.authLib.tokenHasPerms( + req._info.authToken, + true, + ['pool_manager:rw'] + ) + if (!hasPoolManagerPerm) { + throw new Error('ERR_POOL_MANAGER_PERM_REQUIRED') + } + + const { minerIds, pools } = req.body + + if (!minerIds || !Array.isArray(minerIds) || minerIds.length === 0) { + throw new Error('ERR_MINER_IDS_REQUIRED') + } + + if (!pools || !Array.isArray(pools) || pools.length === 0) { + throw new Error('ERR_POOLS_REQUIRED') + } + + if (!pools[0]?.url) { + throw new Error('ERR_POOL_URL_REQUIRED') + } + + const auditInfo = { + user: req._info.user?.metadata?.email || 'unknown', + timestamp: Date.now() + } + + return poolManagerService.assignPoolToMiners(ctx, minerIds, pools, auditInfo) +} + +const setPowerMode = async (ctx, req) => { + const { write } = await ctx.authLib.getTokenPerms(req._info.authToken) + if (!write) { + throw new Error('ERR_WRITE_PERM_REQUIRED') + } + + const hasPoolManagerPerm = await ctx.authLib.tokenHasPerms( + req._info.authToken, + true, + ['pool_manager:rw'] + ) + if (!hasPoolManagerPerm) { + throw new Error('ERR_POOL_MANAGER_PERM_REQUIRED') + } + + const { minerIds, mode } = req.body + + if (!minerIds || !Array.isArray(minerIds) || minerIds.length === 0) { + throw new Error('ERR_MINER_IDS_REQUIRED') + } + + if (!mode) { + throw new Error('ERR_POWER_MODE_REQUIRED') + } + + const auditInfo = { + user: req._info.user?.metadata?.email || 'unknown', + timestamp: Date.now() + } + + return poolManagerService.setPowerMode(ctx, minerIds, mode, auditInfo) +} + +module.exports = { + getStats, + getPools, + getMiners, + getUnits, + getAlerts, + assignPool, + setPowerMode +} diff --git a/workers/lib/server/index.js b/workers/lib/server/index.js index ac884da..925d327 100644 --- a/workers/lib/server/index.js +++ b/workers/lib/server/index.js @@ -8,6 +8,7 @@ const globalRoutes = require('./routes/global.routes') const thingsRoutes = require('./routes/things.routes') const settingsRoutes = require('./routes/settings.routes') const wsRoutes = require('./routes/ws.routes') +const poolManagerRoutes = require('./routes/poolManager.routes') /** * Collect all routes into a flat array for server injection. @@ -22,7 +23,8 @@ function routes (ctx) { ...thingsRoutes(ctx), ...usersRoutes(ctx), ...settingsRoutes(ctx), - ...wsRoutes(ctx) + ...wsRoutes(ctx), + ...poolManagerRoutes(ctx) ] } diff --git a/workers/lib/server/routes/poolManager.routes.js b/workers/lib/server/routes/poolManager.routes.js new file mode 100644 index 0000000..6fb500b --- /dev/null +++ b/workers/lib/server/routes/poolManager.routes.js @@ -0,0 +1,180 @@ +'use strict' + +const { + getStats, + getPools, + getMiners, + getUnits, + getAlerts, + assignPool, + setPowerMode +} = require('../controllers/poolManager') +const { ENDPOINTS, HTTP_METHODS } = require('../../constants') +const { createCachedAuthRoute, createAuthRoute } = require('../lib/routeHelpers') + +const POOL_MANAGER_MINERS_SCHEMA = { + querystring: { + type: 'object', + properties: { + search: { type: 'string' }, + status: { type: 'string' }, + poolUrl: { type: 'string' }, + model: { type: 'string' }, + page: { type: 'integer' }, + limit: { type: 'integer' }, + overwriteCache: { type: 'boolean' } + } + } +} + +const POOL_MANAGER_ALERTS_SCHEMA = { + querystring: { + type: 'object', + properties: { + limit: { type: 'integer' }, + overwriteCache: { type: 'boolean' } + } + } +} + +const POOL_MANAGER_CACHE_SCHEMA = { + querystring: { + type: 'object', + properties: { + overwriteCache: { type: 'boolean' } + } + } +} + +const POOL_MANAGER_ASSIGN_SCHEMA = { + body: { + type: 'object', + properties: { + minerIds: { + type: 'array', + items: { type: 'string' } + }, + pools: { + type: 'array', + items: { + type: 'object', + properties: { + url: { type: 'string' }, + worker_name: { type: 'string' }, + worker_password: { type: 'string' } + }, + required: ['url'] + }, + minItems: 1, + maxItems: 3 + } + }, + required: ['minerIds', 'pools'] + } +} + +const POOL_MANAGER_POWER_MODE_SCHEMA = { + body: { + type: 'object', + properties: { + minerIds: { + type: 'array', + items: { type: 'string' } + }, + mode: { + type: 'string', + enum: ['low', 'normal', 'high', 'sleep'] + } + }, + required: ['minerIds', 'mode'] + } +} + +module.exports = (ctx) => { + return [ + { + method: HTTP_METHODS.GET, + url: ENDPOINTS.POOL_MANAGER_STATS, + schema: POOL_MANAGER_CACHE_SCHEMA, + ...createCachedAuthRoute( + ctx, + ['pool-manager/stats'], + ENDPOINTS.POOL_MANAGER_STATS, + getStats + ) + }, + + { + method: HTTP_METHODS.GET, + url: ENDPOINTS.POOL_MANAGER_POOLS, + schema: POOL_MANAGER_CACHE_SCHEMA, + ...createCachedAuthRoute( + ctx, + ['pool-manager/pools'], + ENDPOINTS.POOL_MANAGER_POOLS, + getPools + ) + }, + + { + method: HTTP_METHODS.GET, + url: ENDPOINTS.POOL_MANAGER_MINERS, + schema: POOL_MANAGER_MINERS_SCHEMA, + ...createCachedAuthRoute( + ctx, + (req) => [ + 'pool-manager/miners', + req.query.search, + req.query.status, + req.query.poolUrl, + req.query.model, + req.query.page, + req.query.limit + ], + ENDPOINTS.POOL_MANAGER_MINERS, + getMiners + ) + }, + + { + method: HTTP_METHODS.GET, + url: ENDPOINTS.POOL_MANAGER_UNITS, + schema: POOL_MANAGER_CACHE_SCHEMA, + ...createCachedAuthRoute( + ctx, + ['pool-manager/units'], + ENDPOINTS.POOL_MANAGER_UNITS, + getUnits + ) + }, + + { + method: HTTP_METHODS.GET, + url: ENDPOINTS.POOL_MANAGER_ALERTS, + schema: POOL_MANAGER_ALERTS_SCHEMA, + ...createCachedAuthRoute( + ctx, + (req) => [ + 'pool-manager/alerts', + req.query.limit + ], + ENDPOINTS.POOL_MANAGER_ALERTS, + getAlerts + ) + }, + + { + method: HTTP_METHODS.POST, + url: ENDPOINTS.POOL_MANAGER_ASSIGN, + schema: POOL_MANAGER_ASSIGN_SCHEMA, + ...createAuthRoute(ctx, assignPool) + }, + + { + method: HTTP_METHODS.POST, + url: ENDPOINTS.POOL_MANAGER_POWER_MODE, + schema: POOL_MANAGER_POWER_MODE_SCHEMA, + ...createAuthRoute(ctx, setPowerMode) + } + ] +} diff --git a/workers/lib/server/services/poolManager.js b/workers/lib/server/services/poolManager.js new file mode 100644 index 0000000..4660880 --- /dev/null +++ b/workers/lib/server/services/poolManager.js @@ -0,0 +1,428 @@ +'use strict' + +const { + LIST_THINGS, + WORKER_TYPES, + POOL_ALERT_TYPES, + APPLY_THINGS, + POWER_MODES, + RPC_METHODS, + MINERPOOL_EXT_DATA_KEYS +} = require('../../constants') +const { + requestRpcMapLimit, + requestRpcEachLimit +} = require('../../utils') + +const getPoolStats = async (ctx) => { + const pools = await _fetchPoolStats(ctx) + + const totalWorkers = pools.reduce((sum, p) => sum + (p.workerCount || 0), 0) + const activeWorkers = pools.reduce((sum, p) => sum + (p.activeWorkerCount || 0), 0) + const totalHashrate = pools.reduce((sum, p) => sum + (p.hashrate || 0), 0) + const totalBalance = pools.reduce((sum, p) => sum + (p.balance || 0), 0) + + return { + totalPools: pools.length, + totalWorkers, + activeWorkers, + totalHashrate, + totalBalance, + errors: totalWorkers - activeWorkers + } +} + +const getPoolConfigs = async (ctx) => { + return _fetchPoolStats(ctx) +} + +const getMinersWithPools = async (ctx, filters = {}) => { + const { search, model, page = 1, limit = 50 } = filters + + const results = await requestRpcMapLimit(ctx, LIST_THINGS, { + type: WORKER_TYPES.MINER, + query: {} + }) + + let allMiners = [] + + results.forEach((clusterData) => { + if (!Array.isArray(clusterData)) return + + clusterData.forEach((thing) => { + if (!thing?.type?.startsWith('miner-')) return + + allMiners.push({ + id: thing.id, + code: thing.code, + type: thing.type, + model: _extractModelFromType(thing.type), + site: thing.info?.site || _extractTagValue(thing.tags, 'site-'), + container: thing.info?.container || _extractTagValue(thing.tags, 'container-'), + ipAddress: thing.address || null, + serialNum: thing.info?.serialNum || null, + nominalHashrate: thing.info?.nominalHashrateMhs || 0 + }) + }) + }) + + if (search) { + const s = search.toLowerCase() + allMiners = allMiners.filter(m => + m.id.toLowerCase().includes(s) || + (m.code && m.code.toLowerCase().includes(s)) || + (m.serialNum && m.serialNum.toLowerCase().includes(s)) || + (m.ipAddress && m.ipAddress.includes(s)) + ) + } + + if (model) { + const m = model.toLowerCase() + allMiners = allMiners.filter(miner => + miner.model.toLowerCase().includes(m) || + miner.type.toLowerCase().includes(m) + ) + } + + const total = allMiners.length + const startIdx = (page - 1) * limit + const paginatedMiners = allMiners.slice(startIdx, startIdx + limit) + + return { + miners: paginatedMiners, + total, + page, + limit, + totalPages: Math.ceil(total / limit) + } +} + +const getUnitsWithPoolData = async (ctx) => { + const results = await requestRpcMapLimit(ctx, LIST_THINGS, { + type: WORKER_TYPES.MINER, + query: {} + }) + + const unitsMap = new Map() + + results.forEach((clusterData) => { + if (!Array.isArray(clusterData)) return + + clusterData.forEach((thing) => { + if (!thing?.type?.startsWith('miner-')) return + + const container = thing.info?.container || + _extractTagValue(thing.tags, 'container-') || 'unassigned' + const site = thing.info?.site || _extractTagValue(thing.tags, 'site-') || '' + + if (!unitsMap.has(container)) { + unitsMap.set(container, { + name: container, + site, + miners: [], + totalNominalHashrate: 0 + }) + } + + const unitData = unitsMap.get(container) + unitData.miners.push(thing.id) + unitData.totalNominalHashrate += thing.info?.nominalHashrateMhs || 0 + }) + }) + + return Array.from(unitsMap.values()).map((unit) => ({ + name: unit.name, + site: unit.site, + minersCount: unit.miners.length, + nominalHashrate: unit.totalNominalHashrate + })) +} + +const getPoolAlerts = async (ctx, filters = {}) => { + const { limit = 50 } = filters + + const results = await requestRpcMapLimit(ctx, LIST_THINGS, { + type: WORKER_TYPES.MINER, + query: {} + }) + + const alerts = [] + + results.forEach((clusterData) => { + if (!Array.isArray(clusterData)) return + + clusterData.forEach((thing) => { + const minerAlerts = thing?.alerts || {} + + POOL_ALERT_TYPES.forEach((alertType) => { + if (minerAlerts[alertType]) { + alerts.push({ + id: `${thing.id}-${alertType}`, + type: alertType, + minerId: thing.id, + code: thing.code, + container: thing.info?.container || _extractTagValue(thing.tags, 'container-'), + severity: _getAlertSeverity(alertType), + message: _getAlertMessage(alertType, thing.code || thing.id), + timestamp: minerAlerts[alertType]?.ts || Date.now() + }) + } + }) + }) + }) + + alerts.sort((a, b) => b.timestamp - a.timestamp) + return alerts.slice(0, limit) +} + +const assignPoolToMiners = async (ctx, minerIds, pools, auditInfo = {}) => { + if (!Array.isArray(minerIds) || minerIds.length === 0) { + throw new Error('ERR_MINER_IDS_REQUIRED') + } + + if (!Array.isArray(pools) || pools.length === 0 || !pools[0]?.url) { + throw new Error('ERR_POOLS_REQUIRED') + } + + const formattedPools = pools.map(pool => ({ + url: pool.url, + worker_name: pool.worker_name || '', + worker_password: pool.worker_password || '' + })) + + if (ctx.logger && auditInfo.user) { + ctx.logger.info({ + action: 'pool_assignment', + user: auditInfo.user, + timestamp: auditInfo.timestamp, + minerCount: minerIds.length, + poolUrl: pools[0].url + }, 'Pool assignment initiated') + } + + const params = { + type: WORKER_TYPES.MINER, + query: { + id: { $in: minerIds } + }, + action: 'setupPools', + params: { pools: formattedPools } + } + + const results = await requestRpcEachLimit(ctx, APPLY_THINGS, params) + + let assigned = 0 + let failed = 0 + const details = [] + + results.forEach((clusterResult) => { + if (clusterResult?.success) { + assigned += clusterResult.affected || 0 + } else { + failed++ + } + + if (clusterResult?.details) { + details.push(...clusterResult.details) + } + }) + + if (ctx.logger && auditInfo.user) { + ctx.logger.info({ + action: 'pool_assignment_complete', + user: auditInfo.user, + timestamp: Date.now(), + assigned, + failed, + total: minerIds.length + }, 'Pool assignment completed') + } + + return { + success: failed === 0, + assigned, + failed, + total: minerIds.length, + details, + audit: { + user: auditInfo.user, + timestamp: auditInfo.timestamp + } + } +} + +const setPowerMode = async (ctx, minerIds, mode, auditInfo = {}) => { + if (!Array.isArray(minerIds) || minerIds.length === 0) { + throw new Error('ERR_MINER_IDS_REQUIRED') + } + + const validModes = Object.values(POWER_MODES) + if (!mode || !validModes.includes(mode)) { + throw new Error('ERR_INVALID_POWER_MODE') + } + + if (ctx.logger && auditInfo.user) { + ctx.logger.info({ + action: 'set_power_mode', + user: auditInfo.user, + timestamp: auditInfo.timestamp, + minerCount: minerIds.length, + mode + }, 'Power mode change initiated') + } + + const params = { + type: WORKER_TYPES.MINER, + query: { + id: { $in: minerIds } + }, + action: 'setPowerMode', + params: { mode } + } + + const results = await requestRpcEachLimit(ctx, APPLY_THINGS, params) + + let affected = 0 + let failed = 0 + const details = [] + + results.forEach((clusterResult) => { + if (clusterResult?.success) { + affected += clusterResult.affected || 0 + } else { + failed++ + } + + if (clusterResult?.details) { + details.push(...clusterResult.details) + } + }) + + if (ctx.logger && auditInfo.user) { + ctx.logger.info({ + action: 'set_power_mode_complete', + user: auditInfo.user, + timestamp: Date.now(), + affected, + failed, + total: minerIds.length, + mode + }, 'Power mode change completed') + } + + return { + success: failed === 0, + affected, + failed, + total: minerIds.length, + mode, + details, + audit: { + user: auditInfo.user, + timestamp: auditInfo.timestamp + } + } +} + +async function _fetchPoolStats (ctx) { + const results = await requestRpcMapLimit(ctx, RPC_METHODS.GET_WRK_EXT_DATA, { + type: 'minerpool', + query: { key: MINERPOOL_EXT_DATA_KEYS.STATS } + }) + + const pools = [] + const seen = new Set() + + for (const orkResult of results) { + if (!orkResult || orkResult.error) continue + const items = Array.isArray(orkResult) ? orkResult : [] + + for (const item of items) { + if (!item) continue + const stats = item.stats || [] + if (!Array.isArray(stats)) continue + + for (const stat of stats) { + if (!stat) continue + const poolKey = `${stat.poolType}:${stat.username}` + if (seen.has(poolKey)) continue + seen.add(poolKey) + + pools.push({ + name: stat.username || stat.poolType, + pool: stat.poolType, + account: stat.username, + status: 'active', + hashrate: stat.hashrate || 0, + hashrate1h: stat.hashrate_1h || 0, + hashrate24h: stat.hashrate_24h || 0, + workerCount: stat.worker_count || 0, + activeWorkerCount: stat.active_workers_count || 0, + balance: stat.balance || 0, + unsettled: stat.unsettled || 0, + revenue24h: stat.revenue_24h || stat.estimated_today_income || 0, + yearlyBalances: stat.yearlyBalances || [], + lastUpdated: stat.timestamp || null + }) + } + } + } + + return pools +} + +function _extractModelFromType (type) { + if (!type) return 'Unknown' + const models = { + 'miner-am': 'Antminer', + 'miner-wm': 'Whatsminer', + 'miner-av': 'Avalon' + } + + for (const [prefix, brand] of Object.entries(models)) { + if (type.startsWith(prefix)) { + const suffix = type.slice(prefix.length + 1).toUpperCase() + return suffix ? `${brand} ${suffix}` : brand + } + } + + return type +} + +function _extractTagValue (tags, prefix) { + if (!Array.isArray(tags)) return null + const tag = tags.find(t => typeof t === 'string' && t.startsWith(prefix)) + return tag ? tag.slice(prefix.length) : null +} + +function _getAlertSeverity (alertType) { + const severityMap = { + all_pools_dead: 'critical', + wrong_miner_pool: 'critical', + wrong_miner_subaccount: 'critical', + wrong_worker_name: 'medium', + ip_worker_name: 'medium' + } + return severityMap[alertType] || 'low' +} + +function _getAlertMessage (alertType, minerLabel) { + const messageMap = { + all_pools_dead: `All pools are dead - ${minerLabel}`, + wrong_miner_pool: `Pool URL mismatch - ${minerLabel}`, + wrong_miner_subaccount: `Wrong pool subaccount - ${minerLabel}`, + wrong_worker_name: `Incorrect worker name - ${minerLabel}`, + ip_worker_name: `Worker name uses IP address - ${minerLabel}` + } + return messageMap[alertType] || `Pool alert - ${minerLabel}` +} + +module.exports = { + getPoolStats, + getPoolConfigs, + getMinersWithPools, + getUnitsWithPoolData, + getPoolAlerts, + assignPoolToMiners, + setPowerMode +}