Skip to content

Commit 5e1a8e6

Browse files
committed
Fix sync tar.list when file size reduces while reading
Fix: #445 Fix: #446 Fix: GHSA-29xp-372q-xqph
1 parent 0fbeaed commit 5e1a8e6

File tree

2 files changed

+96
-3
lines changed

2 files changed

+96
-3
lines changed

src/list.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,14 @@ const listFileSync = (opt: TarOptionsSyncFile) => {
6464
const readSize: number = opt.maxReadSize || 16 * 1024 * 1024
6565
if (stat.size < readSize) {
6666
const buf = Buffer.allocUnsafe(stat.size)
67-
fs.readSync(fd, buf, 0, stat.size, 0)
68-
p.end(buf)
67+
const read = fs.readSync(fd, buf, 0, stat.size, 0)
68+
p.end(read === buf.byteLength ? buf : buf.subarray(0, read))
6969
} else {
7070
let pos = 0
7171
const buf = Buffer.allocUnsafe(readSize)
7272
while (pos < stat.size) {
7373
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
74+
if (bytesRead === 0) break
7475
pos += bytesRead
7576
p.write(buf.subarray(0, bytesRead))
7677
}

test/list.ts

Lines changed: 93 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import fs, { readFileSync } from 'fs'
1+
import fs, { readFileSync, Stats } from 'fs'
22
//@ts-ignore
33
import mutateFS from 'mutate-fs'
44
import { dirname, resolve } from 'path'
@@ -7,6 +7,7 @@ import { fileURLToPath } from 'url'
77
import { list } from '../dist/esm/list.js'
88
import { Parser } from '../dist/esm/parse.js'
99
import { ReadEntry } from '../dist/esm/read-entry.js'
10+
import { makeTar } from './fixtures/make-tar.js'
1011

1112
const __filename = fileURLToPath(import.meta.url)
1213
const __dirname = dirname(__filename)
@@ -276,3 +277,94 @@ t.test('typechecks', t => {
276277
t.type(p, Parser)
277278
t.end()
278279
})
280+
281+
// GHSA-29xp-372q-xqph
282+
t.test('reduce file size while synchronously reading', async t => {
283+
const data = makeTar([
284+
{
285+
type: 'File',
286+
path: 'a',
287+
size: 1,
288+
},
289+
'a',
290+
{
291+
type: 'File',
292+
path: 'b',
293+
size: 1,
294+
},
295+
'b',
296+
'',
297+
'',
298+
])
299+
const dataLen = data.byteLength
300+
const truncLen = 512 * 2
301+
const truncData = data.subarray(0, truncLen)
302+
303+
const setup = async (t: Test) => {
304+
const dir = t.testdir({ 'file.tar': data })
305+
const file = resolve(dir, 'file.tar')
306+
const { list } = await t.mockImport<
307+
typeof import('../src/list.js')
308+
>('../src/list.js', {
309+
'node:fs': t.createMock(fs, {
310+
fstatSync: (fd: number): Stats => {
311+
const st = fs.fstatSync(fd)
312+
// truncate the file before we have a chance to read
313+
fs.writeFileSync(file, truncData)
314+
return st
315+
},
316+
}),
317+
})
318+
319+
return { file, list }
320+
}
321+
322+
t.test(
323+
'gutcheck, reading normally reads the whole file',
324+
async t => {
325+
const dir = t.testdir({ 'file.tar': data })
326+
const file = resolve(dir, 'file.tar')
327+
const entries: string[] = []
328+
list({
329+
file,
330+
sync: true,
331+
maxReadSize: dataLen + 1,
332+
onReadEntry: e => entries.push(e.path),
333+
})
334+
t.strictSame(entries, ['a', 'b'])
335+
336+
entries.length = 0
337+
list({
338+
file,
339+
sync: true,
340+
maxReadSize: dataLen - 1,
341+
onReadEntry: e => entries.push(e.path),
342+
})
343+
t.strictSame(entries, ['a', 'b'])
344+
},
345+
)
346+
347+
t.test('read in one go', async t => {
348+
const { file, list } = await setup(t)
349+
const entries: string[] = []
350+
list({
351+
file,
352+
sync: true,
353+
maxReadSize: dataLen + 1,
354+
onReadEntry: e => entries.push(e.path),
355+
})
356+
t.strictSame(entries, ['a'])
357+
})
358+
359+
t.test('read in parts', async t => {
360+
const { file, list } = await setup(t)
361+
const entries: string[] = []
362+
list({
363+
file,
364+
sync: true,
365+
maxReadSize: dataLen / 4,
366+
onReadEntry: e => entries.push(e.path),
367+
})
368+
t.strictSame(entries, ['a'])
369+
})
370+
})

0 commit comments

Comments
 (0)