-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapi.js
More file actions
executable file
·411 lines (359 loc) · 16 KB
/
api.js
File metadata and controls
executable file
·411 lines (359 loc) · 16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
import express from 'express';
import multer from 'multer';
import axios from 'axios';
import fs from 'fs';
import path from 'path';
import { marked } from 'marked';
import helmet from 'helmet';
import rateLimit from 'express-rate-limit';
import { fileURLToPath } from 'url';
import winston from 'winston';
import unzipper from 'unzipper';
import { fileTypeFromBuffer } from 'file-type';
import { readChunkSync } from 'read-chunk';
import { v4 as uuidv4 } from 'uuid'
import Analyze from './analyze.js';
// Define __filename and __dirname using import.meta.url
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const app = express();
const port = process.env.PORT || 3000;
app.set('trust proxy', true);
// Configure Winston logger
const logger = winston.createLogger({
level: 'info',
format: winston.format.combine(
winston.format.timestamp(),
winston.format.json()
),
transports: [
new winston.transports.Console(),
new winston.transports.File({ filename: 'app.log' })
]
});
// Ensure the uploads directory exists
const uploadsDir = path.join(__dirname, 'uploads');
if (!fs.existsSync(uploadsDir)) {
fs.mkdirSync(uploadsDir);
}
// Esure the results directory exists
const resultsDir = path.join(__dirname, 'results');
if (!fs.existsSync(resultsDir)) {
fs.mkdirSync(resultsDir);
}
// Read file size, rate limit window, and max hits from environment variables
const FILE_SIZE_MB = Number.isNaN(parseInt(process.env.FILE_SIZE_MB, 10))
? 10
: parseInt(process.env.FILE_SIZE_MB, 10); // Default is 10 MB
const RATE_LIMIT_S = Number.isNaN(parseInt(process.env.RATE_LIMIT_S, 10))
? 180
: parseInt(process.env.RATE_LIMIT_S, 10); // Default 180 seconds (3 minutes)
const RATE_LIMIT_MAX = Number.isNaN(parseInt(process.env.RATE_LIMIT_MAX, 10))
? 10
: parseInt(process.env.RATE_LIMIT_MAX, 10); // Default 10 requests per window
// Convert to bytes and milliseconds
const FILE_SIZE_BYTES = FILE_SIZE_MB * 1024 * 1024;
const RATE_LIMIT_MS = RATE_LIMIT_S * 1000;
// Log the configured limits
logger.info(`File size limit: ${FILE_SIZE_MB}MB (${FILE_SIZE_BYTES} bytes)`);
logger.info(`Rate limit window: ${RATE_LIMIT_S} seconds (${RATE_LIMIT_MS} ms), max ${RATE_LIMIT_MAX} requests per window`);
// Configure multer for file uploads
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, uploadsDir);
},
filename: (req, file, cb) => {
const uploadName = uuidv4().split('-')[0]; // Generate a unique upload name
req.uploadName = uploadName; // Store the upload name in the request object
cb(null, uploadName);
}
});
// Set CORS headers for all requests when ENABLE_CORS is set
if (process.env.ENABLE_CORS === 'true') {
logger.info('CORS is enabled for all origins');
app.use((req, res, next) => {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
res.header('Access-Control-Allow-Credentials', 'true');
next();
});
}
// Size limit from environment variable (default 10MB)
const upload = multer({
storage: storage,
limits: { fileSize: FILE_SIZE_BYTES }
});
// Add security headers to all responses
app.use(helmet()); // Add security headers to all responses
// Rate limiting middleware to prevent abuse
const limiter = rateLimit({
windowMs: RATE_LIMIT_MS, // from environment variable
max: RATE_LIMIT_MAX // limit each IP to X requests per windowMs
});
app.use(limiter);
// Middleware to log incoming requests
app.use((req, res, next) => {
logger.info(`Incoming request: ${req.ip} ${req.method} ${req.url}`, { body: req.body });
next();
});
// Function to delete our file after processing or an error
const deleteFile = async (deletePath) => {
try {
await fs.promises.rm(deletePath, { recursive: true, force: true });
logger.info(`Deleted target: ${deletePath}`);
} catch (err) {
logger.error(`Failed to delete target: ${err.message}`);
}
};
// Function to check the first 4 bytes of a file
// If they are PAGE then it is a valid DMP
const checkFileHeader = (checkPath) => {
const buffer = readChunkSync(checkPath, { length: 4, startPosition: 0 });
const fileHead = Array.from(buffer).map(byte => String.fromCharCode(byte)).join('');
if (fileHead !== 'PAGE') {
logger.warn(`Unsupported file header: ${fileHead}`);
return false;
}
logger.info('File is a DMP with PAGE header');
return true;
};
// Function to execute analysis commands on local files and save result as JSON
const analyzeFile = async (filePath, res) => {
logger.info(`Sending target: ${filePath} for analysis`);
try {
const analysisResult = await Analyze(filePath);
const resultObj = JSON.parse(analysisResult);
// Generate short result ID (first octet of UUID)
const resultId = uuidv4().split('-')[0];
const resultPath = path.join(resultsDir, `${resultId}.json`);
// Save result to file
await fs.promises.writeFile(resultPath, JSON.stringify(resultObj, null, 2), 'utf8');
logger.info(`Analysis result saved: ${resultPath}`);
// Return only the UUID to the client
res.json({ uuid: resultId });
} catch (error) {
logger.error(`Failed to analyze target: ${error.message}`);
res.status(500).send("An error occurred while analyzing the file");
} finally {
await deleteFile(filePath);
}
};
// GET endpoint to fetch analysis result by UUID
app.get('/:uuid', async (req, res) => {
const { uuid } = req.params;
const resultPath = path.join(resultsDir, `${uuid}.json`);
logger.info(`Result requested: ${resultPath}`);
try {
if (!fs.existsSync(resultPath)) {
logger.info(`Result not found: ${resultPath}`);
return res.status(404).send('Result not found');
}
const data = await fs.promises.readFile(resultPath, 'utf8');
logger.info(`Result served: ${resultPath}`);
res.type('application/json').send(data);
} catch (err) {
logger.error(`Failed to fetch result for UUID ${uuid}: ${err.message}`);
res.status(500).send('An error occured while retrieving result');
}
});
// PUT and POST endpoint to receive .dmp file or URL and analyze it
const handleAnalyzeDmp = async (req, res) => {
const uploadName = req.uploadName || uuidv4().split('-')[0]; // Retrieve the upload name from the request object
const uploadPath = path.join(uploadsDir, `${uploadName}`);
if (req.file) { // If a file is uploaded
logger.info(`File uploaded: ${uploadPath}`);
} else if (req.query.url) { // If a URL is provided
const encodedUrl = req.query.url;
const url = decodeURIComponent(encodedUrl); // Decode the URL
try {
logger.info(`Fetching file from URL: ${url}`);
const response = await axios({
method: 'get',
url,
responseType: 'stream'
});
logger.info(`Writing file to: ${uploadPath}`)
const writer = fs.createWriteStream(uploadPath);
response.data.pipe(writer);
await new Promise((resolve, reject) => {
writer.on('finish', () => {
logger.info(`File downloaded: ${uploadPath}`);
resolve();
});
writer.on('error', (err) => {
logger.error(`Failed to download file: ${err.message}`);
res.status(500).send(`An error occured while downloading file: ${err.message}`);
reject(err);
});
});
} catch (error) {
logger.error(`Failed to fetch URL: ${error.message}`);
res.status(500).send(`An error occured while fetching URL: ${error.message}`);
return; // Terminate on invalid URL
}
} else {
logger.warn('No file or URL provided');
res.status(400).send('No file or URL provided');
}
// Process the files
const buffer = fs.readFileSync(uploadPath);
const mimeTypeObj = await fileTypeFromBuffer(buffer);
// If mimeTypeObj returns a valid response check that it is a zip
// otherwise it is not valid and we reject it
if (mimeTypeObj) {
logger.info(`File type is: ${mimeTypeObj.mime}`)
if (mimeTypeObj.mime === 'application/zip') {
logger.info(`.zip file uploaded`)
const filePath = `${uploadPath}_dir`
fs.createReadStream(uploadPath)
.pipe(unzipper.Extract({ path: filePath }))
.on('close', () => {
logger.info(`.zip file extracted: ${filePath}`);
deleteFile(uploadPath); // Delete zip file
// Check for subdirectories and for more than 10 files in a zip
// If the checks fail delete the extracted directory
// it then checks the contained files for their headers, fail entirely on any single file
// Finally analyze the directory
fs.readdir(filePath, { withFileTypes: true }, (err, files) => {
if (err) {
logger.error(`Failed to read extracted directory: ${err.message}`);
res.status(500).send(`An error occurred while reading the extracted directory: ${err.message}`);
deleteFile(filePath);
return;
}
// Log files contained in the archive
logger.info('Files in the archive:');
files.forEach(file => {
logger.info(` - ${file.name} ${file.isDirectory() ? '(directory)' : '(file)'}`);
});
const hasSubdirectories = files.some(file => file.isDirectory());
const hasMinidumpSubdirectory = files.some(file => file.isDirectory() && file.name === 'Minidump');
// If there is a Minidumps subdirectory adjust our variable then analyze
// We assume there are no invalid files in a Minidumps directory
// Testing shows the API won't choke on invalid files so meh
if (hasMinidumpSubdirectory) {
logger.info('Archive contains Minidumps directory');
const filePath0 = `${filePath}\\Minidump`;
// List files in filePath0
const filesInMinidump = fs.readdirSync(filePath0);
logger.info('Files in the Minidump directory:');
filesInMinidump.forEach(file => {
const miniPath = path.join(filePath0, file);
const isDirectory = fs.statSync(miniPath).isDirectory();
logger.info(` - ${file} ${isDirectory ? '(directory)' : '(file)'}`);
});
analyzeFile(filePath0, res);
// if there are subdirectories that are not Minidump return 400
} else if (hasSubdirectories) {
logger.warn('Archive contains invalid subdirectories');
res.status(400).send('Uploaded archive contains invalid subdirectories. .dmps must be loose files inside the single archive or in a Minidump directory');
deleteFile(filePath);
// If more than 10 files in an archive return 400
} else if (files.length > 10) {
logger.warn('Archive contains more than 10 files');
res.status(400).send('Uploaded archive contains more than 10 files');
deleteFile(filePath);
// If no subdirectories validate the files then analyze
} else {
const validFiles = files.filter(file => checkFileHeader(path.join(filePath, file.name)));
if (validFiles.length > 0) {
analyzeFile(filePath, res);
} else {
logger.warn('Archive only contains unsupported file types');
res.status(400).send('Uploaded archive only contains unsupported file types');
deleteFile(filePath);
}
}
});
})
.on('error', (err) => {
logger.error(`Failed to extract .zip file: ${err.message}`);
res.status(500).send(`An error occured while extracting .zip file: ${err.message}`);
deleteFile(uploadPath);
});
} else {
logger.warn('Unsupported file type');
res.status(400).send('Unsupported file type');
await deleteFile(uploadPath);
}
// If mimetype is undefined use the checkFileHeader function
// to check first 4 bytes of the file, otherwise reject the file
} else {
if (checkFileHeader(uploadPath)) {
const filePath = `${uploadPath}.dmp`;
try {
await fs.promises.rename(uploadPath, filePath);
logger.info(`Renamed file: ${filePath}`);
} catch (err) {
logger.error('Failed to rename file:', err);
res.status(500).send(`An error occurred while renaming file: ${err.message}`);
await deleteFile(uploadPath);
}
analyzeFile(filePath, res);
} else {
res.status(400).send('Unsupported file header, file is not a valid .dmp');
await deleteFile(uploadPath);
}
}
};
app.put('/analyze-dmp', upload.single('dmpFile'), handleAnalyzeDmp);
app.post('/analyze-dmp', upload.single('dmpFile'), handleAnalyzeDmp);
// GET endpoint to render README.md as HTML
app.get('/', (req, res) => {
const readmePath = path.join(__dirname, 'USAGE.md');
fs.readFile(readmePath, 'utf8', (err, data) => {
if (err) {
logger.error(`Failed to read README file: ${err.message}`);
res.status(500).send(`An error occured while reading README file: ${err.message}`);
return;
}
const htmlContent = marked(data, { mangle: false, headerIds: false });
res.send(htmlContent);
});
});
// Cleanup function to delete result files older than 7 days
const cleanupOldResults = async () => {
const now = Date.now();
const weekMs = 7 * 24 * 60 * 60 * 1000;
try {
const files = await fs.promises.readdir(resultsDir);
let deletedCount = 0;
for (const file of files) {
if (file.endsWith('.json')) {
const filePath = path.join(resultsDir, file);
const stat = await fs.promises.stat(filePath);
if (now - stat.mtimeMs > weekMs) {
await fs.promises.unlink(filePath);
logger.info(`Deleted old result file: ${filePath}`);
deletedCount++;
}
}
}
if (deletedCount === 0) {
logger.info('Cleanup ran: no old result files deleted.');
}
} catch (err) {
logger.error(`Cleanup error: ${err.message}`);
}
};
// Cleanup middleware: run after every request
app.use(async (req, res, next) => {
await cleanupOldResults();
next();
});
// Centralized error handling middleware
app.use((err, req, res, next) => {
if (err instanceof multer.MulterError) {
if (err.code === 'LIMIT_FILE_SIZE') {
logger.warn(`File size exceeds the limit of ${FILE_SIZE_MB}MB`);
return res.status(400).send(`File size exceeds the limit of ${FILE_SIZE_MB}MB`);
}
}
logger.error(`Unhandled failure: ${err.stack}`);
res.status(500).send('Something broke, error is 500 but it might as well be 418');
});
// Start the Express server
app.listen(port, () => {
logger.info(`App listening at http://localhost:${port}`);
});