Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions src/api/routes/backup.js
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,8 @@ router.post(
router.get(
'/',
(req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
(_req, res) => {
const backups = listBackups();
async (_req, res) => {
const backups = await listBackups();
res.json(backups);
},
Comment on lines +187 to 190
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Normalize error responses for async list/prune handlers.

These handlers await filesystem-backed operations without local error handling, so failures can bypass the route-specific JSON error contract used by neighboring endpoints.

🧩 Suggested consistency fix
 router.get(
   '/',
   (req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
   async (_req, res) => {
-    const backups = await listBackups();
-    res.json(backups);
+    try {
+      const backups = await listBackups();
+      return res.json(backups);
+    } catch (err) {
+      return res.status(500).json({ error: 'Failed to list backups', details: err.message });
+    }
   },
 );
@@
 router.post(
   '/prune',
   (req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
   async (req, res) => {
@@
-    const deleted = await pruneBackups(retention);
-    return res.json({ deleted, count: deleted.length });
+    try {
+      const deleted = await pruneBackups(retention);
+      return res.json({ deleted, count: deleted.length });
+    } catch (err) {
+      return res.status(500).json({ error: 'Failed to prune backups', details: err.message });
+    }
   },
 );

Also applies to: 423-443

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/api/routes/backup.js` around lines 187 - 190, The async route handlers
that call listBackups (and the similar prune handler at lines 423-443) lack
local error handling and can bypass the route's JSON error contract; wrap the
await calls in try/catch blocks inside the route functions (the async (_req,
res) => { ... } handler that calls listBackups and the prune handler that calls
the prune function), and on error return the same JSON error shape used by
neighboring endpoints (e.g., res.status(500).json({ error: 'backup_failed',
message: err.message }) or call next(err) if the router uses a centralized error
formatter) so all failures are normalized to the route-specific JSON error
response.

);
Expand Down Expand Up @@ -229,9 +229,9 @@ router.get(
router.post(
'/',
(req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
(_req, res) => {
async (_req, res) => {
try {
const meta = createBackup();
const meta = await createBackup();
return res.status(201).json({ id: meta.id, size: meta.size, createdAt: meta.createdAt });
} catch (err) {
return res.status(500).json({ error: 'Failed to create backup', details: err.message });
Expand Down Expand Up @@ -278,11 +278,11 @@ router.post(
router.get(
'/:id/download',
(req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
(req, res) => {
async (req, res) => {
const { id } = req.params;

try {
const payload = readBackup(id);
const payload = await readBackup(id);
const filename = `${id}.json`;
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
res.setHeader('Content-Type', 'application/json');
Expand Down Expand Up @@ -420,7 +420,7 @@ router.post(
router.post(
'/prune',
(req, res, next) => requireGlobalAdmin('Backup access', req, res, next),
(req, res) => {
async (req, res) => {
const retention = req.body ?? {};
const errors = [];

Expand All @@ -439,7 +439,7 @@ router.post(
return res.status(400).json({ error: 'Invalid prune options', details: errors });
}

const deleted = pruneBackups(retention);
const deleted = await pruneBackups(retention);
return res.json({ deleted, count: deleted.length });
},
);
Expand Down
92 changes: 43 additions & 49 deletions src/modules/backup.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,7 @@
* @see https://github.com/VolvoxLLC/volvox-bot/issues/129
*/

import {
existsSync,
mkdirSync,
readdirSync,
readFileSync,
statSync,
unlinkSync,
writeFileSync,
} from 'node:fs';

// TODO: Consider switching to fs.promises for async operations to improve performance
// and avoid blocking the event loop with synchronous file system operations.
import { access, mkdir, readdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { SAFE_CONFIG_KEYS, SENSITIVE_FIELDS } from '../api/utils/configAllowlist.js';
Expand Down Expand Up @@ -45,12 +34,14 @@ let scheduledBackupInterval = null;
* Get or create the backup directory.
*
* @param {string} [dir] - Override backup directory path
* @returns {string} The backup directory path
* @returns {Promise<string>} The backup directory path
*/
export function getBackupDir(dir) {
export async function getBackupDir(dir) {
const backupDir = dir ?? DEFAULT_BACKUP_DIR;
if (!existsSync(backupDir)) {
mkdirSync(backupDir, { recursive: true });
try {
await access(backupDir);
} catch {
await mkdir(backupDir, { recursive: true });
}
return backupDir;
}
Expand Down Expand Up @@ -192,20 +183,20 @@ function makeBackupFilename(date = new Date()) {
* Create a timestamped backup of the current config in the backup directory.
*
* @param {string} [backupDir] - Override backup directory
* @returns {{id: string, path: string, size: number, createdAt: string}} Backup metadata
* @returns {Promise<{id: string, path: string, size: number, createdAt: string}>} Backup metadata
*/
export function createBackup(backupDir) {
const dir = getBackupDir(backupDir);
export async function createBackup(backupDir) {
const dir = await getBackupDir(backupDir);
const now = new Date();
const filename = makeBackupFilename(now);
const filePath = path.join(dir, filename);

const payload = exportConfig();
const json = JSON.stringify(payload, null, 2);

writeFileSync(filePath, json, 'utf8');
await writeFile(filePath, json, 'utf8');

const { size } = statSync(filePath);
const { size } = await stat(filePath);
const id = filename.replace('.json', '');

info('Config backup created', { id, path: filePath, size });
Expand All @@ -223,16 +214,17 @@ export function createBackup(backupDir) {
*
* @param {string} filename - Backup filename
* @param {string} dir - Directory containing the backup file
* @returns {{id: string, filename: string, createdAt: string, size: number} | null}
* @returns {Promise<{id: string, filename: string, createdAt: string, size: number} | null>}
*/
function parseBackupMeta(filename, dir) {
async function parseBackupMeta(filename, dir) {
const match = BACKUP_FILENAME_PATTERN.exec(filename);
if (!match) return null;

const filePath = path.join(dir, filename);
let size = 0;
try {
size = statSync(filePath).size;
const st = await stat(filePath);
size = st.size;
} catch {
return null;
}
Expand All @@ -252,22 +244,20 @@ function parseBackupMeta(filename, dir) {
* List all available backups, sorted newest first.
*
* @param {string} [backupDir] - Override backup directory
* @returns {Array<{id: string, filename: string, createdAt: string, size: number}>}
* @returns {Promise<Array<{id: string, filename: string, createdAt: string, size: number}>>}
*/
export function listBackups(backupDir) {
const dir = getBackupDir(backupDir);
export async function listBackups(backupDir) {
const dir = await getBackupDir(backupDir);

let files;
try {
files = readdirSync(dir);
files = await readdir(dir);
} catch {
return [];
}

const backups = files
.map((filename) => parseBackupMeta(filename, dir))
.filter(Boolean)
.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
const results = await Promise.all(files.map((filename) => parseBackupMeta(filename, dir)));
const backups = results.filter(Boolean).sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
Comment on lines +259 to +260
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🧹 Nitpick | 🔵 Trivial

Avoid unbounded stat fan-out in backup listing.

Promise.all(files.map(...)) scales linearly with file count and can hit file descriptor limits under large backup directories. Prefer sequential iteration or a small concurrency cap.

♻️ Suggested bounded approach
-  const results = await Promise.all(files.map((filename) => parseBackupMeta(filename, dir)));
-  const backups = results.filter(Boolean).sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
+  const backups = [];
+  for (const filename of files) {
+    const meta = await parseBackupMeta(filename, dir);
+    if (meta) backups.push(meta);
+  }
+  backups.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt));
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/modules/backup.js` around lines 259 - 260, The current
Promise.all(files.map(...)) in the backup listing causes an unbounded stat
fan-out; replace it with a bounded approach by iterating files with a controlled
concurrency or sequential loop to avoid hitting FD limits: either use a simple
for...of loop to await parseBackupMeta(filename, dir) one-by-one and push
non-null results into results, or use a small concurrency pool (e.g., limit
5-10) around parseBackupMeta to process files in batches; ensure you still
filter(Boolean) and sort the final backups as before (references: files,
parseBackupMeta, results, backups).


return backups;
}
Expand All @@ -277,26 +267,28 @@ export function listBackups(backupDir) {
*
* @param {string} id - Backup ID (filename without .json)
* @param {string} [backupDir] - Override backup directory
* @returns {Object} Parsed backup payload
* @returns {Promise<Object>} Parsed backup payload
* @throws {Error} If backup file not found or invalid
*/
export function readBackup(id, backupDir) {
export async function readBackup(id, backupDir) {
// Validate ID against strict pattern: backup-YYYY-MM-DDTHH-mm-ss-SSS-NNNN
const BACKUP_ID_PATTERN =
/^backup-[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}-[0-9]{2}-[0-9]{2}-[0-9]{3}-[0-9]{4}$/;
if (!BACKUP_ID_PATTERN.test(id)) {
throw new Error('Invalid backup ID');
}

const dir = getBackupDir(backupDir);
const dir = await getBackupDir(backupDir);
const filename = `${id}.json`;
const filePath = path.join(dir, filename);

if (!existsSync(filePath)) {
try {
await access(filePath);
} catch {
throw new Error(`Backup not found: ${id}`);
}

const raw = readFileSync(filePath, 'utf8');
const raw = await readFile(filePath, 'utf8');
Comment on lines +285 to +291
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Remove TOCTOU between existence check and file read.

The access() then readFile() sequence can race; if the file is removed in-between, raw ENOENT escapes your normalized “not found” handling and can leak internal path details upstream.

🛠️ Suggested fix (single read path with normalized errors)
-  try {
-    await access(filePath);
-  } catch {
-    throw new Error(`Backup not found: ${id}`);
-  }
-
-  const raw = await readFile(filePath, 'utf8');
+  let raw;
+  try {
+    raw = await readFile(filePath, 'utf8');
+  } catch (err) {
+    if (err && typeof err === 'object' && 'code' in err && err.code === 'ENOENT') {
+      throw new Error(`Backup not found: ${id}`);
+    }
+    throw new Error(`Failed to read backup: ${id}`);
+  }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
try {
await access(filePath);
} catch {
throw new Error(`Backup not found: ${id}`);
}
const raw = readFileSync(filePath, 'utf8');
const raw = await readFile(filePath, 'utf8');
let raw;
try {
raw = await readFile(filePath, 'utf8');
} catch (err) {
if (err && typeof err === 'object' && 'code' in err && err.code === 'ENOENT') {
throw new Error(`Backup not found: ${id}`);
}
throw new Error(`Failed to read backup: ${id}`);
}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/modules/backup.js` around lines 285 - 291, Replace the separate
access(filePath) check and readFile(filePath, 'utf8') with a single readFile
call and normalize errors: call readFile(filePath, 'utf8') inside a try/catch,
and if the caught error.code === 'ENOENT' throw new Error(`Backup not found:
${id}`) (otherwise rethrow or wrap other errors); remove the prior access(...)
branch so there is no TOCTOU window between access and readFile for filePath.

try {
return JSON.parse(raw);
} catch {
Expand All @@ -313,7 +305,7 @@ export function readBackup(id, backupDir) {
* @throws {Error} If backup not found or invalid
*/
export async function restoreBackup(id, backupDir) {
const payload = readBackup(id, backupDir);
const payload = await readBackup(id, backupDir);

const validationErrors = validateImportPayload(payload);
if (validationErrors.length > 0) {
Expand All @@ -338,12 +330,12 @@ export async function restoreBackup(id, backupDir) {
*
* @param {{daily?: number, weekly?: number}} [retention] - Retention counts
* @param {string} [backupDir] - Override backup directory
* @returns {string[]} IDs of deleted backups
* @returns {Promise<string[]>} IDs of deleted backups
*/
export function pruneBackups(retention, backupDir) {
export async function pruneBackups(retention, backupDir) {
const { daily = DEFAULT_RETENTION.daily, weekly = DEFAULT_RETENTION.weekly } = retention ?? {};
const dir = getBackupDir(backupDir);
const all = listBackups(dir);
const dir = await getBackupDir(backupDir);
const all = await listBackups(dir);

if (all.length === 0) return [];

Expand Down Expand Up @@ -372,7 +364,7 @@ export function pruneBackups(retention, backupDir) {
for (const backup of all) {
if (!toKeep.has(backup.id)) {
try {
unlinkSync(path.join(dir, backup.filename));
await unlink(path.join(dir, backup.filename));
deleted.push(backup.id);
info('Pruned old backup', { id: backup.id });
} catch (err) {
Expand Down Expand Up @@ -407,12 +399,14 @@ export function startScheduledBackups(opts = {}) {
info('Starting scheduled config backups', { intervalMs });

scheduledBackupInterval = setInterval(() => {
try {
createBackup(backupDir);
pruneBackups(retention, backupDir);
} catch (err) {
logError('Scheduled backup failed', { error: err.message });
}
void (async () => {
try {
await createBackup(backupDir);
await pruneBackups(retention, backupDir);
} catch (err) {
logError('Scheduled backup failed', { error: err.message });
}
})();
Comment on lines +402 to +409
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Prevent overlapping scheduled backup runs.

The interval callback launches async work and returns immediately; if one run exceeds intervalMs, subsequent ticks can overlap and race create/prune operations.

🛡️ Suggested in-flight guard
 let scheduledBackupInterval = null;
+let scheduledBackupInFlight = false;
@@
   scheduledBackupInterval = setInterval(() => {
+    if (scheduledBackupInFlight) {
+      warn('Previous scheduled backup run is still in progress — skipping tick');
+      return;
+    }
     void (async () => {
+      scheduledBackupInFlight = true;
       try {
         await createBackup(backupDir);
         await pruneBackups(retention, backupDir);
       } catch (err) {
         logError('Scheduled backup failed', { error: err.message });
+      } finally {
+        scheduledBackupInFlight = false;
       }
     })();
   }, intervalMs);
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/modules/backup.js` around lines 402 - 409, The scheduled backup IIFE can
overlap because it launches async work and returns immediately; add an in-flight
guard (e.g., a module-scoped boolean like isBackupRunning or a simple mutex)
checked at the start of the interval handler to skip if a run is already active,
set the flag true before calling createBackup(backupDir) and
pruneBackups(retention, backupDir), and ensure the flag is cleared in a finally
block so it is reset even on errors; also log when a tick is skipped to aid
observability.

}, intervalMs);
Comment on lines 401 to 410
Copy link

Copilot AI Mar 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

startScheduledBackups now kicks off an async IIFE inside setInterval without any in-flight guard. If createBackup/pruneBackups take longer than intervalMs, multiple runs can overlap, leading to concurrent backup/prune operations (race conditions, extra load, unexpected deletions). Consider serializing runs (e.g., a running flag that skips or queues the next tick, or using a recursive setTimeout scheduled after the prior run completes).

Copilot uses AI. Check for mistakes.

// Prevent the interval from keeping the process alive unnecessarily (e.g. in tests)
Expand Down
58 changes: 29 additions & 29 deletions tests/modules/backup.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -191,26 +191,26 @@ describe('importConfig', () => {
// --- createBackup / listBackups ---

describe('createBackup and listBackups', () => {
it('creates a backup file', () => {
const meta = createBackup(tmpDir);
it('creates a backup file', async () => {
const meta = await createBackup(tmpDir);
expect(meta.id).toMatch(/^backup-/);
expect(meta.size).toBeGreaterThan(0);
expect(meta.createdAt).toMatch(/^\d{4}-/);
});

it('lists created backups sorted newest first', async () => {
createBackup(tmpDir);
await createBackup(tmpDir);
await new Promise((r) => setTimeout(r, 10)); // ensure different timestamps (at least ms apart)
createBackup(tmpDir);
const backups = listBackups(tmpDir);
await createBackup(tmpDir);
const backups = await listBackups(tmpDir);
expect(backups.length).toBe(2);
expect(new Date(backups[0].createdAt) >= new Date(backups[1].createdAt)).toBe(true);
});

it('returns empty array when no backups', () => {
it('returns empty array when no backups', async () => {
const emptyDir = mkdtempSync(join(tmpdir(), 'empty-backup-'));
try {
expect(listBackups(emptyDir)).toEqual([]);
expect(await listBackups(emptyDir)).toEqual([]);
} finally {
rmSync(emptyDir, { recursive: true });
}
Expand All @@ -220,28 +220,28 @@ describe('createBackup and listBackups', () => {
// --- readBackup ---

describe('readBackup', () => {
it('reads a valid backup by id', () => {
const meta = createBackup(tmpDir);
const payload = readBackup(meta.id, tmpDir);
it('reads a valid backup by id', async () => {
const meta = await createBackup(tmpDir);
const payload = await readBackup(meta.id, tmpDir);
expect(payload).toHaveProperty('config');
expect(payload).toHaveProperty('exportedAt');
});

it('throws for unknown id', () => {
expect(() => readBackup('backup-9999-01-01T00-00-00-000-0000', tmpDir)).toThrow(
it('throws for unknown id', async () => {
await expect(readBackup('backup-9999-01-01T00-00-00-000-0000', tmpDir)).rejects.toThrow(
'Backup not found',
);
});

it('throws for path-traversal attempts', () => {
expect(() => readBackup('../etc/passwd', tmpDir)).toThrow('Invalid backup ID');
expect(() => readBackup('..\\windows\\system32', tmpDir)).toThrow('Invalid backup ID');
it('throws for path-traversal attempts', async () => {
await expect(readBackup('../etc/passwd', tmpDir)).rejects.toThrow('Invalid backup ID');
await expect(readBackup('..\\windows\\system32', tmpDir)).rejects.toThrow('Invalid backup ID');
});

it('throws for corrupted backup', () => {
it('throws for corrupted backup', async () => {
const badFile = join(tmpDir, 'backup-2020-01-01T00-00-00-000-0000.json');
writeFileSync(badFile, 'not json', 'utf8');
expect(() => readBackup('backup-2020-01-01T00-00-00-000-0000', tmpDir)).toThrow(
await expect(readBackup('backup-2020-01-01T00-00-00-000-0000', tmpDir)).rejects.toThrow(
'Backup file is corrupted',
);
});
Expand All @@ -251,7 +251,7 @@ describe('readBackup', () => {

describe('restoreBackup', () => {
it('restores config from a valid backup', async () => {
const meta = createBackup(tmpDir);
const meta = await createBackup(tmpDir);
const result = await restoreBackup(meta.id, tmpDir);
expect(result).toHaveProperty('applied');
expect(result).toHaveProperty('skipped');
Expand All @@ -270,25 +270,25 @@ describe('restoreBackup', () => {
// --- pruneBackups ---

describe('pruneBackups', () => {
it('keeps the N most recent backups', () => {
it('keeps the N most recent backups', async () => {
for (let i = 0; i < 5; i++) {
createBackup(tmpDir);
await createBackup(tmpDir);
}
const deleted = pruneBackups({ daily: 3, weekly: 0 }, tmpDir);
const deleted = await pruneBackups({ daily: 3, weekly: 0 }, tmpDir);
expect(deleted.length).toBe(2);
expect(listBackups(tmpDir).length).toBe(3);
expect((await listBackups(tmpDir)).length).toBe(3);
});

it('keeps zero backups when daily=0 and weekly=0', () => {
createBackup(tmpDir);
createBackup(tmpDir);
const deleted = pruneBackups({ daily: 0, weekly: 0 }, tmpDir);
it('keeps zero backups when daily=0 and weekly=0', async () => {
await createBackup(tmpDir);
await createBackup(tmpDir);
const deleted = await pruneBackups({ daily: 0, weekly: 0 }, tmpDir);
expect(deleted.length).toBe(2);
expect(listBackups(tmpDir).length).toBe(0);
expect((await listBackups(tmpDir)).length).toBe(0);
});

it('returns empty array when no backups exist', () => {
expect(pruneBackups({}, tmpDir)).toEqual([]);
it('returns empty array when no backups exist', async () => {
expect(await pruneBackups({}, tmpDir)).toEqual([]);
});
});

Expand Down
Loading