Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 44 additions & 10 deletions src/SharpCompress/IO/BufferedSubStream.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
Expand Down Expand Up @@ -28,14 +29,25 @@ protected override void Dispose(bool disposing)
#if DEBUG_STREAMS
this.DebugDispose(typeof(BufferedSubStream));
#endif
if (disposing) { }
if (_isDisposed)
{
return;
}
_isDisposed = true;

if (disposing && _cache is not null)
{
ArrayPool<byte>.Shared.Return(_cache);
_cache = null;
}
base.Dispose(disposing);
}

private int _cacheOffset;
private int _cacheLength;
private readonly byte[] _cache = new byte[32 << 10];
private byte[]? _cache = ArrayPool<byte>.Shared.Rent(81920);
private long origin;
private bool _isDisposed;

private long BytesLeftToRead { get; set; }

Expand All @@ -57,29 +69,51 @@ public override long Position

private void RefillCache()
{
var count = (int)Math.Min(BytesLeftToRead, _cache.Length);
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}

var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
Stream.Position = origin;

// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}

_cacheLength = Stream.Read(_cache, 0, count);
origin += _cacheLength;
BytesLeftToRead -= _cacheLength;
}

private async ValueTask RefillCacheAsync(CancellationToken cancellationToken)
{
var count = (int)Math.Min(BytesLeftToRead, _cache.Length);
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}

var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
Stream.Position = origin;
// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}
_cacheLength = await Stream
.ReadAsync(_cache, 0, count, cancellationToken)
.ConfigureAwait(false);
Expand All @@ -102,7 +136,7 @@ public override int Read(byte[] buffer, int offset, int count)
}

count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}

Expand All @@ -120,7 +154,7 @@ public override int ReadByte()
}
}

return _cache[_cacheOffset++];
return _cache![_cacheOffset++];
}

public override async Task<int> ReadAsync(
Expand All @@ -143,7 +177,7 @@ CancellationToken cancellationToken
}

count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}

Expand All @@ -170,7 +204,7 @@ public override async ValueTask<int> ReadAsync(
}

count = Math.Min(count, _cacheLength - _cacheOffset);
_cache.AsSpan(_cacheOffset, count).CopyTo(buffer.Span);
_cache!.AsSpan(_cacheOffset, count).CopyTo(buffer.Span);
_cacheOffset += count;
}

Expand Down
1 change: 0 additions & 1 deletion src/SharpCompress/IO/SharpCompressStream.cs
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,6 @@ public override long Seek(long offset, SeekOrigin origin)
ValidateBufferState();
}

long orig = _internalPosition;
long targetPos;
// Calculate the absolute target position based on origin
switch (origin)
Expand Down
31 changes: 30 additions & 1 deletion tests/SharpCompress.Test/Streams/SharpCompressStreamTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using System.Text;
using SharpCompress.Compressors.LZMA;
using SharpCompress.IO;
using SharpCompress.Test.Mocks;
using Xunit;

namespace SharpCompress.Test.Streams;
Expand Down Expand Up @@ -64,7 +65,14 @@ public void BufferReadAndSeekTest()
{
createData(ms);

using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using (
SharpCompressStream scs = new SharpCompressStream(
new ForwardOnlyStream(ms),
true,
false,
0x10000
)
)
{
IStreamStack stack = (IStreamStack)scs;

Expand All @@ -89,4 +97,25 @@ public void BufferReadAndSeekTest()
}
}
}

[Fact]
public void BufferedSubStream_DoubleDispose_DoesNotCorruptArrayPool()
{
// This test verifies that calling Dispose multiple times on BufferedSubStream
// doesn't return the same array to the pool twice, which would cause pool corruption
byte[] data = new byte[0x10000];
using (MemoryStream ms = new MemoryStream(data))
{
var stream = new BufferedSubStream(ms, 0, data.Length);

// First disposal
stream.Dispose();

// Second disposal should not throw or corrupt the pool
stream.Dispose();
}

// If we got here without an exception, the test passed
Assert.True(true);
}
}
Loading