|
| 1 | +#!/usr/bin/env node |
| 2 | +// Syncs log entries from production API to local MSSQL database. |
| 3 | +// Usage: node scripts/sync-prod-logs.js [--since DATE] [--batch-size N] |
| 4 | + |
| 5 | +const sql = require('mssql'); |
| 6 | +const https = require('https'); |
| 7 | +const http = require('http'); |
| 8 | +const fs = require('fs'); |
| 9 | +const path = require('path'); |
| 10 | + |
| 11 | +// --- Config --- |
| 12 | +const SINCE = process.argv.find((a, i) => process.argv[i - 1] === '--since') || '2026-03-01'; |
| 13 | +const BATCH_SIZE = parseInt(process.argv.find((a, i) => process.argv[i - 1] === '--batch-size') || '100'); |
| 14 | + |
| 15 | +// Load .env |
| 16 | +const envPath = path.join(__dirname, '..', '.env'); |
| 17 | +const envVars = {}; |
| 18 | +fs.readFileSync(envPath, 'utf8').split('\n').forEach(line => { |
| 19 | + const match = line.match(/^([A-Z_]+)=(.*)$/); |
| 20 | + if (match) envVars[match[1]] = match[2]; |
| 21 | +}); |
| 22 | + |
| 23 | +const API_URL = envVars.DEBUG_API_URL || 'https://api.dfx.swiss/v1'; |
| 24 | +const DEBUG_ADDRESS = envVars.DEBUG_ADDRESS; |
| 25 | +const DEBUG_SIGNATURE = envVars.DEBUG_SIGNATURE; |
| 26 | + |
| 27 | +const LOCAL_DB = { |
| 28 | + server: envVars.SQL_HOST || 'localhost', |
| 29 | + port: parseInt(envVars.SQL_PORT || '1433'), |
| 30 | + user: envVars.SQL_USERNAME || 'sa', |
| 31 | + password: envVars.SQL_PASSWORD, |
| 32 | + database: envVars.SQL_DB || 'dfx', |
| 33 | + options: { encrypt: false, trustServerCertificate: true }, |
| 34 | +}; |
| 35 | + |
| 36 | +if (!LOCAL_DB.password) { |
| 37 | + console.error('Error: SQL_PASSWORD must be set in .env'); |
| 38 | + process.exit(1); |
| 39 | +} |
| 40 | + |
| 41 | +// --- HTTP helper --- |
| 42 | +function apiRequest(urlPath, method, body) { |
| 43 | + return new Promise((resolve, reject) => { |
| 44 | + const url = new URL(urlPath, API_URL.endsWith('/') ? API_URL : API_URL + '/'); |
| 45 | + const lib = url.protocol === 'https:' ? https : http; |
| 46 | + const options = { |
| 47 | + hostname: url.hostname, |
| 48 | + port: url.port, |
| 49 | + path: url.pathname, |
| 50 | + method, |
| 51 | + headers: { 'Content-Type': 'application/json' }, |
| 52 | + }; |
| 53 | + if (apiRequest.token) { |
| 54 | + options.headers['Authorization'] = `Bearer ${apiRequest.token}`; |
| 55 | + } |
| 56 | + const req = lib.request(options, (res) => { |
| 57 | + let data = ''; |
| 58 | + res.on('data', (chunk) => data += chunk); |
| 59 | + res.on('end', () => { |
| 60 | + try { resolve(JSON.parse(data)); } |
| 61 | + catch { resolve(data); } |
| 62 | + }); |
| 63 | + }); |
| 64 | + req.on('error', reject); |
| 65 | + if (body) req.write(JSON.stringify(body)); |
| 66 | + req.end(); |
| 67 | + }); |
| 68 | +} |
| 69 | + |
| 70 | +async function authenticate() { |
| 71 | + console.log(`Authenticating to ${API_URL}...`); |
| 72 | + const res = await apiRequest('auth', 'POST', { |
| 73 | + address: DEBUG_ADDRESS, |
| 74 | + signature: DEBUG_SIGNATURE, |
| 75 | + }); |
| 76 | + if (!res.accessToken) throw new Error('Auth failed: ' + JSON.stringify(res)); |
| 77 | + apiRequest.token = res.accessToken; |
| 78 | + console.log('Authenticated.'); |
| 79 | +} |
| 80 | + |
| 81 | +async function execSql(query) { |
| 82 | + return apiRequest('gs/debug', 'POST', { sql: query }); |
| 83 | +} |
| 84 | + |
| 85 | +async function main() { |
| 86 | + // Authenticate |
| 87 | + await authenticate(); |
| 88 | + |
| 89 | + // Get total count |
| 90 | + const countResult = await execSql(`SELECT COUNT(*) as cnt FROM log WHERE created >= '${SINCE}'`); |
| 91 | + const total = countResult[0].cnt; |
| 92 | + console.log(`Total log entries since ${SINCE}: ${total}`); |
| 93 | + |
| 94 | + // Connect to local DB |
| 95 | + console.log('Connecting to local DB...'); |
| 96 | + const pool = await sql.connect(LOCAL_DB); |
| 97 | + |
| 98 | + // Check if identity insert needed |
| 99 | + const localCount = await pool.request().query('SELECT COUNT(*) as cnt FROM log'); |
| 100 | + console.log(`Local log entries before sync: ${localCount.recordset[0].cnt}`); |
| 101 | + |
| 102 | + // Enable identity insert |
| 103 | + await pool.request().query('SET IDENTITY_INSERT log ON'); |
| 104 | + |
| 105 | + let lastId = 0; |
| 106 | + let inserted = 0; |
| 107 | + let errors = 0; |
| 108 | + let batchNum = 0; |
| 109 | + const totalBatches = Math.ceil(total / BATCH_SIZE); |
| 110 | + |
| 111 | + console.log(`Fetching in batches of ${BATCH_SIZE} (using id cursor)...`); |
| 112 | + |
| 113 | + while (true) { |
| 114 | + batchNum++; |
| 115 | + const query = `SELECT TOP ${BATCH_SIZE} id, updated, created, system, subsystem, severity, message, category, valid FROM log WHERE created >= '${SINCE}' AND id > ${lastId} ORDER BY id ASC`; |
| 116 | + |
| 117 | + process.stdout.write(`\r Batch ${batchNum}/${totalBatches} (inserted: ${inserted}/${total}, lastId: ${lastId})...`); |
| 118 | + |
| 119 | + let rows; |
| 120 | + let retries = 3; |
| 121 | + while (retries > 0) { |
| 122 | + try { |
| 123 | + rows = await execSql(query); |
| 124 | + break; |
| 125 | + } catch (e) { |
| 126 | + retries--; |
| 127 | + if (retries > 0) { |
| 128 | + process.stdout.write(`\n Retry (${3 - retries}/3) after error: ${e.message}\n`); |
| 129 | + // Re-authenticate in case token expired |
| 130 | + try { await authenticate(); } catch {} |
| 131 | + await new Promise(r => setTimeout(r, 2000)); |
| 132 | + } else { |
| 133 | + console.error(`\n Failed batch ${batchNum} after 3 retries (lastId=${lastId}): ${e.message}`); |
| 134 | + errors++; |
| 135 | + rows = null; |
| 136 | + } |
| 137 | + } |
| 138 | + } |
| 139 | + if (!rows) { |
| 140 | + // Skip this batch range and try next |
| 141 | + lastId += 100; |
| 142 | + continue; |
| 143 | + } |
| 144 | + |
| 145 | + if (!Array.isArray(rows) || rows.length === 0) { |
| 146 | + break; |
| 147 | + } |
| 148 | + |
| 149 | + // Insert batch |
| 150 | + for (const row of rows) { |
| 151 | + try { |
| 152 | + const req = pool.request(); |
| 153 | + req.input('id', sql.Int, row.id); |
| 154 | + req.input('updated', sql.DateTime2, row.updated); |
| 155 | + req.input('created', sql.DateTime2, row.created); |
| 156 | + req.input('system', sql.NVarChar(256), row.system); |
| 157 | + req.input('subsystem', sql.NVarChar(256), row.subsystem); |
| 158 | + req.input('severity', sql.NVarChar(256), row.severity); |
| 159 | + req.input('message', sql.NVarChar(sql.MAX), typeof row.message === 'string' ? row.message : JSON.stringify(row.message)); |
| 160 | + req.input('category', sql.NVarChar(256), row.category || null); |
| 161 | + req.input('valid', sql.Bit, row.valid != null ? row.valid : null); |
| 162 | + |
| 163 | + await req.query(` |
| 164 | + SET IDENTITY_INSERT log ON; |
| 165 | + INSERT INTO log (id, updated, created, system, subsystem, severity, message, category, valid) |
| 166 | + VALUES (@id, @updated, @created, @system, @subsystem, @severity, @message, @category, @valid) |
| 167 | + `); |
| 168 | + inserted++; |
| 169 | + } catch (e) { |
| 170 | + if (e.message.includes('duplicate key') || e.message.includes('UNIQUE')) { |
| 171 | + // Skip duplicates |
| 172 | + } else { |
| 173 | + if (errors < 5) console.error(`\n Insert error (id=${row.id}): ${e.message}`); |
| 174 | + errors++; |
| 175 | + } |
| 176 | + } |
| 177 | + } |
| 178 | + |
| 179 | + lastId = rows[rows.length - 1].id; |
| 180 | + } |
| 181 | + |
| 182 | + console.log(`\n\nDone!`); |
| 183 | + console.log(` Inserted: ${inserted}`); |
| 184 | + console.log(` Errors: ${errors}`); |
| 185 | + console.log(` Total in prod: ${total}`); |
| 186 | + |
| 187 | + // Verify |
| 188 | + const finalCount = await pool.request().query('SELECT COUNT(*) as cnt FROM log'); |
| 189 | + console.log(` Local log entries after sync: ${finalCount.recordset[0].cnt}`); |
| 190 | + |
| 191 | + await pool.close(); |
| 192 | +} |
| 193 | + |
| 194 | +main().catch(e => { console.error(e); process.exit(1); }); |
0 commit comments