-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdeleter.js
More file actions
277 lines (230 loc) Β· 7.92 KB
/
deleter.js
File metadata and controls
277 lines (230 loc) Β· 7.92 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
const rimraf = require('rimraf')
const fetch = require('node-fetch')
const fs = require('fs')
const fsp = require('fsp')
const path = require('path')
const moment = require('moment')
const touch = require('touch')
const mkdirp = require('mkdirp')
const { AbortController } = require('abort-controller')
const config = require('./config.js').config()
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
// temp folder is used to cache site meta between deploys
// example.com/.forge-meta.json
const tmpDir = (process.env.NODE_ENV === 'development' ?
path.resolve(__dirname, './tmp/') :
path.resolve(__dirname, '/tmp/cache/')
)
const forgercDir = (process.env.NODE_ENV === 'development' ?
path.resolve(__dirname, './tmp/forgerc_sites') :
path.resolve(__dirname, '/tmp/forgerc_sites/')
)
const logger = (...args) => {
console.log(`π ${moment().format('MM.DD.YYYY HH:mm:ss')} #`, ...args)
}
/*
* In order to provide downtime support
* we use time of the last sucessfull check
*/
const lastCheckedAtFile = path.join(tmpDir, '.deleter-last-check')
let lastCheckAt = null
try {
lastCheckAt = new Date( fs.readFileSync(lastCheckedAtFile, 'utf-8') )
} catch(err) {
lastCheckAt = moment().toDate()
}
logger(`π Last check was performed ${moment(lastCheckAt).format('MM.DD HH:mm')}`)
const updateLastCheck = () => {
lastCheckAt = new Date
fs.writeFile(lastCheckedAtFile, lastCheckAt.toString(), (err) => {
if(err) return err
})
}
/*
* Utility fs functions (Promise-compliant)
*/
const removeDir = (dir) => {
logger(`βοΈ Removing dir ${dir}`)
return new Promise( (resolve, reject) => {
rimraf(dir, (err) => {
if(err) return reject(err)
resolve()
})
})
}
const touchFile = (fname) => {
logger(`π Touching file ${fname}`)
return new Promise( (resolve, reject) => {
const directory = path.dirname(fname)
logger(`π Creating dir ${directory}`)
mkdirp(directory, (err) => {
if(err) return reject(err)
fsp.writeFile(fname, 'w00t', (err) => {
if(err) return err
})
})
})
}
const cleanSite = (site) => {
logger(`π£ Cleaning site ${site.url}`)
return Promise.all([
removeDir(path.join(tmpDir, site.url)),
removeDir(path.join(tmpDir, `www.${site.url}`)),
removeDir(path.join(forgercDir, site.url)),
removeDir(path.join(forgercDir, `www.${site.url}`))
])
.then( _ => {
if(site.has_config) {
return Promise.all([
touchFile(path.join(forgercDir, site.url, '.forge-config')),
touchFile(path.join(forgercDir, `www.${site.url}`, '.forge-config'))
])
}
})
}
// Strips www. part of an URL
const normalizeUrl = (url) => {
return (url || '').replace(/^www\./, '')
}
const cleanSites = (sites) => {
if(!sites.length) {
// TODO: output this message with some throttle
// e.g. once in X seconds only
// logger('π Nothing to do...')
}
const normalizedUrls = sites.map((site) => {
return Object.assign({}, site, { url: normalizeUrl(site.url) })
})
Promise.all( normalizedUrls.map(cleanSite) )
return updateLastCheck()
}
/*
* Fetches deployed sites data from API
* and cleans the caches
*/
const CHECK_INTERVAL = 5000
const API_TIMEOUT = 10000 // 10 seconds
const MAX_RETRIES = 3
const RETRY_DELAY_BASE = 1000 // 1 second base delay
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms))
const makeApiRequest = async (url, retryCount = 0) => {
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), API_TIMEOUT)
try {
const response = await fetch(url, {
signal: controller.signal,
timeout: API_TIMEOUT,
headers: {
'User-Agent': 'forge-server/1.0'
}
})
clearTimeout(timeoutId)
if(response.status != 200) {
throw new Error(`API returned status ${response.status}: ${response.statusText}`)
}
return await response.json()
} catch (err) {
clearTimeout(timeoutId)
// Check if error is due to HTTP status code (502, 503, 504 are retryable)
const isHttpError = err.message && err.message.includes('API returned status')
const httpStatusMatch = err.message && err.message.match(/status (\d+)/)
const httpStatus = httpStatusMatch ? parseInt(httpStatusMatch[1]) : null
const isRetryableHttpStatus = httpStatus && (httpStatus >= 500 && httpStatus < 600)
const isRetryableError = err.name === 'AbortError' ||
err.code === 'ETIMEDOUT' ||
err.errno === 'ETIMEDOUT' ||
err.code === 'ECONNRESET' ||
err.code === 'ENOTFOUND' ||
isRetryableHttpStatus
if (isRetryableError && retryCount < MAX_RETRIES) {
const delay = RETRY_DELAY_BASE * Math.pow(2, retryCount) // Exponential backoff
if (isRetryableHttpStatus) {
logger(`π Retrying API request after ${httpStatus} error in ${delay}ms (attempt ${retryCount + 1}/${MAX_RETRIES})`)
} else {
logger(`π Retrying API request in ${delay}ms (attempt ${retryCount + 1}/${MAX_RETRIES})`)
}
await sleep(delay)
return makeApiRequest(url, retryCount + 1)
}
throw err
}
}
const checkDeployed = async () => {
let since = moment(lastCheckAt).unix() - 5;
// Validate config
if (!config.forge_api) {
logger('β οΈ FORGE_API environment variable not set! Skipping API check.')
setTimeout(checkDeployed, CHECK_INTERVAL)
return
}
const url = `${config.forge_api}/deployed_sites.json?deployed_since=${since}`
try {
const json = await makeApiRequest(url)
const sitesToUpdate = json.sites || []
cleanSites(sitesToUpdate)
} catch (err) {
// Check if it's an HTTP error status
const httpStatusMatch = err.message && err.message.match(/status (\d+)/)
const httpStatus = httpStatusMatch ? parseInt(httpStatusMatch[1]) : null
if (err.name === 'AbortError') {
logger('π£ BOOM! Request timeout to Forge API after retries')
} else if (err.code === 'ETIMEDOUT' || err.errno === 'ETIMEDOUT') {
logger('π£ BOOM! Connection timeout to Forge API after retries:', err.message)
} else if (httpStatus && httpStatus >= 500 && httpStatus < 600) {
logger(`π£ BOOM! API returned ${httpStatus} error after retries: ${err.message}`)
} else {
logger('π£ BOOM! Error happened after retries', err)
}
} finally {
setTimeout(checkDeployed, CHECK_INTERVAL)
}
}
//Clear old cache files every 5 minutes
const CLEAR_INTERVAL = 5000*60;
const CACHE_MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days in milliseconds
const cleanupOldCacheFiles = function() {
try {
const files = fs.readdirSync(tmpDir);
let cleaned = 0;
let errors = 0;
files.forEach((file) => {
try {
const filePath = path.join(tmpDir, file);
const stats = fs.statSync(filePath);
// Remove files/directories older than 7 days
const age = Date.now() - stats.mtimeMs;
if (age > CACHE_MAX_AGE_MS) {
if (stats.isDirectory()) {
rimraf(filePath, (err) => {
if (err) {
logger(`β Failed to remove old cache dir ${filePath}: ${err.message}`)
errors++
} else {
cleaned++
}
})
} else {
fs.unlinkSync(filePath)
cleaned++
}
}
} catch (err) {
logger(`β Error cleaning cache file ${file}: ${err.message}`)
errors++
}
})
if (cleaned > 0) {
logger(`π§Ή Cleaned ${cleaned} old cache files/directories`)
}
if (errors > 0) {
logger(`β οΈ Encountered ${errors} errors during cleanup`)
}
} catch (err) {
if (err.code !== 'ENOENT') {
logger(`β Error during cache cleanup: ${err.message}`)
}
}
}
var intervalTimer = null;
checkDeployed();
setInterval(cleanupOldCacheFiles, CLEAR_INTERVAL);