From d4380b6bb674120bd57d351ea83ef9cdc2c7ebfe Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 27 Nov 2025 16:48:09 +0000
Subject: [PATCH 01/26] Initial plan
From c082d4203b46cfd1a70ec4d4fec4d453a650e9e3 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 27 Nov 2025 16:58:32 +0000
Subject: [PATCH 02/26] Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.../Common/CompressionProgress.cs | 43 +++++++
.../IO/ProgressReportingStream.cs | 119 ++++++++++++++++++
src/SharpCompress/Writers/AbstractWriter.cs | 18 +++
src/SharpCompress/Writers/WriterOptions.cs | 7 ++
4 files changed, 187 insertions(+)
create mode 100644 src/SharpCompress/Common/CompressionProgress.cs
create mode 100644 src/SharpCompress/IO/ProgressReportingStream.cs
diff --git a/src/SharpCompress/Common/CompressionProgress.cs b/src/SharpCompress/Common/CompressionProgress.cs
new file mode 100644
index 000000000..67f479315
--- /dev/null
+++ b/src/SharpCompress/Common/CompressionProgress.cs
@@ -0,0 +1,43 @@
+namespace SharpCompress.Common;
+
+///
+/// Represents progress information for compression operations.
+///
+public sealed class CompressionProgress
+{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The path of the entry being compressed.
+ /// Number of bytes read from the source.
+ /// Total bytes to be read from the source, or null if unknown.
+ public CompressionProgress(string entryPath, long bytesRead, long? totalBytes)
+ {
+ EntryPath = entryPath;
+ BytesRead = bytesRead;
+ TotalBytes = totalBytes;
+ }
+
+ ///
+ /// Gets the path of the entry being compressed.
+ ///
+ public string EntryPath { get; }
+
+ ///
+ /// Gets the number of bytes read from the source so far.
+ ///
+ public long BytesRead { get; }
+
+ ///
+ /// Gets the total number of bytes to be read from the source, or null if unknown.
+ ///
+ public long? TotalBytes { get; }
+
+ ///
+ /// Gets the progress percentage (0-100), or null if total bytes is unknown.
+ ///
+ public double? PercentComplete =>
+ TotalBytes.HasValue && TotalBytes.Value > 0
+ ? (double)BytesRead / TotalBytes.Value * 100
+ : null;
+}
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
new file mode 100644
index 000000000..9192f9ca6
--- /dev/null
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -0,0 +1,119 @@
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+
+namespace SharpCompress.IO;
+
+///
+/// A stream wrapper that reports progress as data is written.
+///
+internal sealed class ProgressReportingStream : Stream
+{
+ private readonly Stream _baseStream;
+ private readonly IProgress _progress;
+ private readonly string _entryPath;
+ private readonly long? _totalBytes;
+ private long _bytesWritten;
+
+ public ProgressReportingStream(
+ Stream baseStream,
+ IProgress progress,
+ string entryPath,
+ long? totalBytes
+ )
+ {
+ _baseStream = baseStream;
+ _progress = progress;
+ _entryPath = entryPath;
+ _totalBytes = totalBytes;
+ }
+
+ public override bool CanRead => _baseStream.CanRead;
+
+ public override bool CanSeek => _baseStream.CanSeek;
+
+ public override bool CanWrite => _baseStream.CanWrite;
+
+ public override long Length => _baseStream.Length;
+
+ public override long Position
+ {
+ get => _baseStream.Position;
+ set => _baseStream.Position = value;
+ }
+
+ public override void Flush() => _baseStream.Flush();
+
+ public override int Read(byte[] buffer, int offset, int count) =>
+ _baseStream.Read(buffer, offset, count);
+
+ public override long Seek(long offset, SeekOrigin origin) =>
+ _baseStream.Seek(offset, origin);
+
+ public override void SetLength(long value) => _baseStream.SetLength(value);
+
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ _baseStream.Write(buffer, offset, count);
+ _bytesWritten += count;
+ ReportProgress();
+ }
+
+ public override void Write(ReadOnlySpan buffer)
+ {
+ _baseStream.Write(buffer);
+ _bytesWritten += buffer.Length;
+ ReportProgress();
+ }
+
+ public override async Task WriteAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken
+ )
+ {
+ await _baseStream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
+ _bytesWritten += count;
+ ReportProgress();
+ }
+
+ public override async ValueTask WriteAsync(
+ ReadOnlyMemory buffer,
+ CancellationToken cancellationToken = default
+ )
+ {
+ await _baseStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
+ _bytesWritten += buffer.Length;
+ ReportProgress();
+ }
+
+ public override void WriteByte(byte value)
+ {
+ _baseStream.WriteByte(value);
+ _bytesWritten++;
+ ReportProgress();
+ }
+
+ private void ReportProgress()
+ {
+ _progress.Report(new CompressionProgress(_entryPath, _bytesWritten, _totalBytes));
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ _baseStream.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ public override async ValueTask DisposeAsync()
+ {
+ await _baseStream.DisposeAsync().ConfigureAwait(false);
+ await base.DisposeAsync().ConfigureAwait(false);
+ }
+}
diff --git a/src/SharpCompress/Writers/AbstractWriter.cs b/src/SharpCompress/Writers/AbstractWriter.cs
index e75b93b6c..467fcb6b3 100644
--- a/src/SharpCompress/Writers/AbstractWriter.cs
+++ b/src/SharpCompress/Writers/AbstractWriter.cs
@@ -3,6 +3,7 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
+using SharpCompress.IO;
namespace SharpCompress.Writers;
@@ -22,6 +23,23 @@ public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptio
protected WriterOptions WriterOptions { get; } = writerOptions;
+ ///
+ /// Wraps the source stream with a progress-reporting stream if progress reporting is enabled.
+ ///
+ /// The source stream to wrap.
+ /// The path of the entry being written.
+ /// A stream that reports progress, or the original stream if progress is not enabled.
+ protected Stream WrapWithProgress(Stream source, string entryPath)
+ {
+ if (WriterOptions.Progress is null)
+ {
+ return source;
+ }
+
+ long? totalBytes = source.CanSeek ? source.Length : null;
+ return new ProgressReportingStream(source, WriterOptions.Progress, entryPath, totalBytes);
+ }
+
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
public virtual async Task WriteAsync(
diff --git a/src/SharpCompress/Writers/WriterOptions.cs b/src/SharpCompress/Writers/WriterOptions.cs
index 361dfb550..4611dabc4 100644
--- a/src/SharpCompress/Writers/WriterOptions.cs
+++ b/src/SharpCompress/Writers/WriterOptions.cs
@@ -1,3 +1,4 @@
+using System;
using SharpCompress.Common;
using D = SharpCompress.Compressors.Deflate;
@@ -36,6 +37,12 @@ public WriterOptions(CompressionType compressionType, int compressionLevel)
///
public int CompressionLevel { get; set; }
+ ///
+ /// An optional progress reporter for tracking compression operations.
+ /// When set, progress updates will be reported as entries are written.
+ ///
+ public IProgress? Progress { get; set; }
+
public static implicit operator WriterOptions(CompressionType compressionType) =>
new(compressionType);
}
From 0d487df61be7762abdbcb72ae56cdee8f2ebbf51 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 27 Nov 2025 19:15:10 +0000
Subject: [PATCH 03/26] Add IProgress support for compression operations with
tests
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.../IO/ProgressReportingStream.cs | 100 +++++++-----
src/SharpCompress/Writers/AbstractWriter.cs | 8 +-
src/SharpCompress/Writers/GZip/GZipWriter.cs | 3 +-
src/SharpCompress/Writers/Tar/TarWriter.cs | 6 +-
src/SharpCompress/Writers/Zip/ZipWriter.cs | 3 +-
.../CompressionProgressTests.cs | 143 ++++++++++++++++++
6 files changed, 223 insertions(+), 40 deletions(-)
create mode 100644 tests/SharpCompress.Test/CompressionProgressTests.cs
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index 9192f9ca6..8bb45092b 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -7,7 +7,8 @@
namespace SharpCompress.IO;
///
-/// A stream wrapper that reports progress as data is written.
+/// A stream wrapper that reports progress as data is read from the source.
+/// Used to track compression progress by wrapping the source stream.
///
internal sealed class ProgressReportingStream : Stream
{
@@ -15,19 +16,22 @@ internal sealed class ProgressReportingStream : Stream
private readonly IProgress _progress;
private readonly string _entryPath;
private readonly long? _totalBytes;
- private long _bytesWritten;
+ private long _bytesRead;
+ private readonly bool _leaveOpen;
public ProgressReportingStream(
Stream baseStream,
IProgress progress,
string entryPath,
- long? totalBytes
+ long? totalBytes,
+ bool leaveOpen = false
)
{
_baseStream = baseStream;
_progress = progress;
_entryPath = entryPath;
_totalBytes = totalBytes;
+ _leaveOpen = leaveOpen;
}
public override bool CanRead => _baseStream.CanRead;
@@ -46,65 +50,88 @@ public override long Position
public override void Flush() => _baseStream.Flush();
- public override int Read(byte[] buffer, int offset, int count) =>
- _baseStream.Read(buffer, offset, count);
-
- public override long Seek(long offset, SeekOrigin origin) =>
- _baseStream.Seek(offset, origin);
-
- public override void SetLength(long value) => _baseStream.SetLength(value);
-
- public override void Write(byte[] buffer, int offset, int count)
+ public override int Read(byte[] buffer, int offset, int count)
{
- _baseStream.Write(buffer, offset, count);
- _bytesWritten += count;
- ReportProgress();
+ var bytesRead = _baseStream.Read(buffer, offset, count);
+ if (bytesRead > 0)
+ {
+ _bytesRead += bytesRead;
+ ReportProgress();
+ }
+ return bytesRead;
}
- public override void Write(ReadOnlySpan buffer)
+ public override int Read(Span buffer)
{
- _baseStream.Write(buffer);
- _bytesWritten += buffer.Length;
- ReportProgress();
+ var bytesRead = _baseStream.Read(buffer);
+ if (bytesRead > 0)
+ {
+ _bytesRead += bytesRead;
+ ReportProgress();
+ }
+ return bytesRead;
}
- public override async Task WriteAsync(
+ public override async Task ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
- await _baseStream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
- _bytesWritten += count;
- ReportProgress();
+ var bytesRead = await _baseStream
+ .ReadAsync(buffer, offset, count, cancellationToken)
+ .ConfigureAwait(false);
+ if (bytesRead > 0)
+ {
+ _bytesRead += bytesRead;
+ ReportProgress();
+ }
+ return bytesRead;
}
- public override async ValueTask WriteAsync(
- ReadOnlyMemory buffer,
+ public override async ValueTask ReadAsync(
+ Memory buffer,
CancellationToken cancellationToken = default
)
{
- await _baseStream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
- _bytesWritten += buffer.Length;
- ReportProgress();
+ var bytesRead = await _baseStream
+ .ReadAsync(buffer, cancellationToken)
+ .ConfigureAwait(false);
+ if (bytesRead > 0)
+ {
+ _bytesRead += bytesRead;
+ ReportProgress();
+ }
+ return bytesRead;
}
- public override void WriteByte(byte value)
+ public override int ReadByte()
{
- _baseStream.WriteByte(value);
- _bytesWritten++;
- ReportProgress();
+ var value = _baseStream.ReadByte();
+ if (value != -1)
+ {
+ _bytesRead++;
+ ReportProgress();
+ }
+ return value;
}
+ public override long Seek(long offset, SeekOrigin origin) => _baseStream.Seek(offset, origin);
+
+ public override void SetLength(long value) => _baseStream.SetLength(value);
+
+ public override void Write(byte[] buffer, int offset, int count) =>
+ _baseStream.Write(buffer, offset, count);
+
private void ReportProgress()
{
- _progress.Report(new CompressionProgress(_entryPath, _bytesWritten, _totalBytes));
+ _progress.Report(new CompressionProgress(_entryPath, _bytesRead, _totalBytes));
}
protected override void Dispose(bool disposing)
{
- if (disposing)
+ if (disposing && !_leaveOpen)
{
_baseStream.Dispose();
}
@@ -113,7 +140,10 @@ protected override void Dispose(bool disposing)
public override async ValueTask DisposeAsync()
{
- await _baseStream.DisposeAsync().ConfigureAwait(false);
+ if (!_leaveOpen)
+ {
+ await _baseStream.DisposeAsync().ConfigureAwait(false);
+ }
await base.DisposeAsync().ConfigureAwait(false);
}
}
diff --git a/src/SharpCompress/Writers/AbstractWriter.cs b/src/SharpCompress/Writers/AbstractWriter.cs
index 467fcb6b3..d86ccc746 100644
--- a/src/SharpCompress/Writers/AbstractWriter.cs
+++ b/src/SharpCompress/Writers/AbstractWriter.cs
@@ -37,7 +37,13 @@ protected Stream WrapWithProgress(Stream source, string entryPath)
}
long? totalBytes = source.CanSeek ? source.Length : null;
- return new ProgressReportingStream(source, WriterOptions.Progress, entryPath, totalBytes);
+ return new ProgressReportingStream(
+ source,
+ WriterOptions.Progress,
+ entryPath,
+ totalBytes,
+ leaveOpen: true
+ );
}
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
diff --git a/src/SharpCompress/Writers/GZip/GZipWriter.cs b/src/SharpCompress/Writers/GZip/GZipWriter.cs
index 00100323e..ad1d59d77 100644
--- a/src/SharpCompress/Writers/GZip/GZipWriter.cs
+++ b/src/SharpCompress/Writers/GZip/GZipWriter.cs
@@ -47,7 +47,8 @@ public override void Write(string filename, Stream source, DateTime? modificatio
var stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
- source.CopyTo(stream);
+ var progressStream = WrapWithProgress(source, filename);
+ progressStream.CopyTo(stream);
_wroteToStream = true;
}
diff --git a/src/SharpCompress/Writers/Tar/TarWriter.cs b/src/SharpCompress/Writers/Tar/TarWriter.cs
index 96346e2bc..7d17d55a8 100644
--- a/src/SharpCompress/Writers/Tar/TarWriter.cs
+++ b/src/SharpCompress/Writers/Tar/TarWriter.cs
@@ -127,7 +127,8 @@ public void Write(string filename, Stream source, DateTime? modificationTime, lo
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
- size = source.TransferTo(OutputStream, realSize);
+ var progressStream = WrapWithProgress(source, filename);
+ size = progressStream.TransferTo(OutputStream, realSize);
PadTo512(size.Value);
}
@@ -159,7 +160,8 @@ public async Task WriteAsync(
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
- var written = await source
+ var progressStream = WrapWithProgress(source, filename);
+ var written = await progressStream
.TransferToAsync(OutputStream, realSize, cancellationToken)
.ConfigureAwait(false);
PadTo512(written);
diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.cs b/src/SharpCompress/Writers/Zip/ZipWriter.cs
index f867c8a91..1d5b5fc83 100644
--- a/src/SharpCompress/Writers/Zip/ZipWriter.cs
+++ b/src/SharpCompress/Writers/Zip/ZipWriter.cs
@@ -85,7 +85,8 @@ public override void Write(string entryPath, Stream source, DateTime? modificati
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
- source.CopyTo(output);
+ var progressStream = WrapWithProgress(source, entryPath);
+ progressStream.CopyTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
diff --git a/tests/SharpCompress.Test/CompressionProgressTests.cs b/tests/SharpCompress.Test/CompressionProgressTests.cs
new file mode 100644
index 000000000..576a8dd02
--- /dev/null
+++ b/tests/SharpCompress.Test/CompressionProgressTests.cs
@@ -0,0 +1,143 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Threading.Tasks;
+using SharpCompress.Common;
+using SharpCompress.Writers;
+using SharpCompress.Writers.Tar;
+using SharpCompress.Writers.Zip;
+using Xunit;
+
+namespace SharpCompress.Test;
+
+///
+/// A synchronous progress implementation for testing.
+/// Unlike Progress<T>, this captures reports immediately without SynchronizationContext.
+///
+internal sealed class TestProgress : IProgress
+{
+ private readonly List _reports = new();
+
+ public IReadOnlyList Reports => _reports;
+
+ public void Report(T value) => _reports.Add(value);
+}
+
+public class CompressionProgressTests : TestBase
+{
+ [Fact]
+ public void Zip_Write_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ using var archiveStream = new MemoryStream();
+ var options = new ZipWriterOptions(CompressionType.Deflate) { Progress = progress };
+
+ using (var writer = new ZipWriter(archiveStream, options))
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+ Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesRead);
+ Assert.Equal(100.0, lastReport.PercentComplete);
+ }
+
+ [Fact]
+ public void Tar_Write_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ using var archiveStream = new MemoryStream();
+ var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
+
+ using (var writer = new TarWriter(archiveStream, options))
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+ Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesRead);
+ Assert.Equal(100.0, lastReport.PercentComplete);
+ }
+
+ [Fact]
+ public void WriterOptions_WithoutProgress_DoesNotThrow()
+ {
+ using var archiveStream = new MemoryStream();
+ var options = new ZipWriterOptions(CompressionType.Deflate);
+ Assert.Null(options.Progress);
+
+ using (var writer = new ZipWriter(archiveStream, options))
+ {
+ var testData = new byte[100];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ Assert.True(archiveStream.Length > 0);
+ }
+
+ [Fact]
+ public void CompressionProgress_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
+ {
+ var progress = new CompressionProgress("test.txt", 100, null);
+ Assert.Null(progress.PercentComplete);
+ }
+
+ [Fact]
+ public void CompressionProgress_PercentComplete_WithZeroTotalBytes_ReturnsNull()
+ {
+ var progress = new CompressionProgress("test.txt", 0, 0);
+ Assert.Null(progress.PercentComplete);
+ }
+
+ [Fact]
+ public void CompressionProgress_Properties_AreSetCorrectly()
+ {
+ var progress = new CompressionProgress("path/to/file.txt", 500, 1000);
+
+ Assert.Equal("path/to/file.txt", progress.EntryPath);
+ Assert.Equal(500, progress.BytesRead);
+ Assert.Equal(1000, progress.TotalBytes);
+ Assert.Equal(50.0, progress.PercentComplete);
+ }
+
+ [Fact]
+ public async Task Tar_WriteAsync_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ using var archiveStream = new MemoryStream();
+ var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
+
+ using (var writer = new TarWriter(archiveStream, options))
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ await writer.WriteAsync("test.txt", sourceStream, DateTime.Now);
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesRead);
+ }
+}
From 0f374b27cf90e58906329f8c7969edb372a4d243 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 27 Nov 2025 19:17:10 +0000
Subject: [PATCH 04/26] Address code review: ProgressReportingStream now throws
on writes
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/IO/ProgressReportingStream.cs | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index 8bb45092b..af768462a 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -38,7 +38,7 @@ public ProgressReportingStream(
public override bool CanSeek => _baseStream.CanSeek;
- public override bool CanWrite => _baseStream.CanWrite;
+ public override bool CanWrite => false;
public override long Length => _baseStream.Length;
@@ -122,7 +122,9 @@ public override int ReadByte()
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
- _baseStream.Write(buffer, offset, count);
+ throw new NotSupportedException(
+ "ProgressReportingStream is designed for read operations to track compression progress."
+ );
private void ReportProgress()
{
From aa0356de9f2df0740aa00d49221c3973c9d6b627 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 29 Nov 2025 15:55:21 +0000
Subject: [PATCH 05/26] Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/Archives/AbstractArchive.cs | 28 ------
src/SharpCompress/Archives/IArchive.cs | 3 -
.../Archives/IArchiveExtractionListener.cs | 4 +-
.../Archives/Rar/RarArchiveEntry.cs | 8 +-
.../Common/CompressedBytesReadEventArgs.cs | 25 -----
.../Common/CompressionProgress.cs | 43 --------
.../FilePartExtractionBeginEventArgs.cs | 28 ------
.../Common/IExtractionListener.cs | 7 --
src/SharpCompress/Common/ProgressReport.cs | 43 ++++++++
.../Common/ReaderExtractionEventArgs.cs | 17 ----
.../Rar/MultiVolumeReadOnlyStream.cs | 38 +------
src/SharpCompress/IO/ListeningStream.cs | 97 ------------------
.../IO/ProgressReportingStream.cs | 26 +++--
src/SharpCompress/Readers/AbstractReader.cs | 98 +++++++++----------
src/SharpCompress/Readers/IReader.cs | 5 -
.../Readers/IReaderExtractionListener.cs | 8 --
src/SharpCompress/Readers/Rar/RarReader.cs | 6 +-
src/SharpCompress/Readers/ReaderOptions.cs | 7 ++
src/SharpCompress/Readers/ReaderProgress.cs | 21 ----
src/SharpCompress/Utility.cs | 66 -------------
src/SharpCompress/Writers/WriterOptions.cs | 2 +-
21 files changed, 119 insertions(+), 461 deletions(-)
delete mode 100644 src/SharpCompress/Common/CompressedBytesReadEventArgs.cs
delete mode 100644 src/SharpCompress/Common/CompressionProgress.cs
delete mode 100644 src/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs
delete mode 100644 src/SharpCompress/Common/IExtractionListener.cs
create mode 100644 src/SharpCompress/Common/ProgressReport.cs
delete mode 100644 src/SharpCompress/Common/ReaderExtractionEventArgs.cs
delete mode 100644 src/SharpCompress/IO/ListeningStream.cs
delete mode 100644 src/SharpCompress/Readers/IReaderExtractionListener.cs
delete mode 100644 src/SharpCompress/Readers/ReaderProgress.cs
diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs
index 712db4de1..c4ea2857f 100644
--- a/src/SharpCompress/Archives/AbstractArchive.cs
+++ b/src/SharpCompress/Archives/AbstractArchive.cs
@@ -20,8 +20,6 @@ public abstract class AbstractArchive : IArchive, IArchiveExtra
public event EventHandler>? EntryExtractionBegin;
public event EventHandler>? EntryExtractionEnd;
- public event EventHandler? CompressedBytesRead;
- public event EventHandler? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -105,32 +103,6 @@ void IArchiveExtractionListener.EnsureEntriesLoaded()
_lazyVolumes.EnsureFullyLoaded();
}
- void IExtractionListener.FireCompressedBytesRead(
- long currentPartCompressedBytes,
- long compressedReadBytes
- ) =>
- CompressedBytesRead?.Invoke(
- this,
- new CompressedBytesReadEventArgs(
- currentFilePartCompressedBytesRead: currentPartCompressedBytes,
- compressedBytesRead: compressedReadBytes
- )
- );
-
- void IExtractionListener.FireFilePartExtractionBegin(
- string name,
- long size,
- long compressedSize
- ) =>
- FilePartExtractionBegin?.Invoke(
- this,
- new FilePartExtractionBeginEventArgs(
- compressedSize: compressedSize,
- size: size,
- name: name
- )
- );
-
///
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
diff --git a/src/SharpCompress/Archives/IArchive.cs b/src/SharpCompress/Archives/IArchive.cs
index 154529bfb..7ab31ccc6 100644
--- a/src/SharpCompress/Archives/IArchive.cs
+++ b/src/SharpCompress/Archives/IArchive.cs
@@ -10,9 +10,6 @@ public interface IArchive : IDisposable
event EventHandler> EntryExtractionBegin;
event EventHandler> EntryExtractionEnd;
- event EventHandler CompressedBytesRead;
- event EventHandler FilePartExtractionBegin;
-
IEnumerable Entries { get; }
IEnumerable Volumes { get; }
diff --git a/src/SharpCompress/Archives/IArchiveExtractionListener.cs b/src/SharpCompress/Archives/IArchiveExtractionListener.cs
index 7bc2ef34e..17e9a40e8 100644
--- a/src/SharpCompress/Archives/IArchiveExtractionListener.cs
+++ b/src/SharpCompress/Archives/IArchiveExtractionListener.cs
@@ -1,8 +1,6 @@
-using SharpCompress.Common;
-
namespace SharpCompress.Archives;
-internal interface IArchiveExtractionListener : IExtractionListener
+internal interface IArchiveExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
diff --git a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
index aaba6d1ec..69c54f310 100644
--- a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
+++ b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
@@ -76,7 +76,7 @@ public Stream OpenEntryStream()
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
- new MultiVolumeReadOnlyStream(Parts.Cast(), archive)
+ new MultiVolumeReadOnlyStream(Parts.Cast())
);
}
else
@@ -84,7 +84,7 @@ public Stream OpenEntryStream()
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
- new MultiVolumeReadOnlyStream(Parts.Cast(), archive)
+ new MultiVolumeReadOnlyStream(Parts.Cast())
);
}
@@ -100,7 +100,7 @@ public async Task OpenEntryStreamAsync(CancellationToken cancellationTok
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
- new MultiVolumeReadOnlyStream(Parts.Cast(), archive)
+ new MultiVolumeReadOnlyStream(Parts.Cast())
);
}
else
@@ -108,7 +108,7 @@ public async Task OpenEntryStreamAsync(CancellationToken cancellationTok
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
- new MultiVolumeReadOnlyStream(Parts.Cast(), archive)
+ new MultiVolumeReadOnlyStream(Parts.Cast())
);
}
diff --git a/src/SharpCompress/Common/CompressedBytesReadEventArgs.cs b/src/SharpCompress/Common/CompressedBytesReadEventArgs.cs
deleted file mode 100644
index 34ca461f1..000000000
--- a/src/SharpCompress/Common/CompressedBytesReadEventArgs.cs
+++ /dev/null
@@ -1,25 +0,0 @@
-using System;
-
-namespace SharpCompress.Common;
-
-public sealed class CompressedBytesReadEventArgs : EventArgs
-{
- public CompressedBytesReadEventArgs(
- long compressedBytesRead,
- long currentFilePartCompressedBytesRead
- )
- {
- CompressedBytesRead = compressedBytesRead;
- CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
- }
-
- ///
- /// Compressed bytes read for the current entry
- ///
- public long CompressedBytesRead { get; }
-
- ///
- /// Current file part read for Multipart files (e.g. Rar)
- ///
- public long CurrentFilePartCompressedBytesRead { get; }
-}
diff --git a/src/SharpCompress/Common/CompressionProgress.cs b/src/SharpCompress/Common/CompressionProgress.cs
deleted file mode 100644
index 67f479315..000000000
--- a/src/SharpCompress/Common/CompressionProgress.cs
+++ /dev/null
@@ -1,43 +0,0 @@
-namespace SharpCompress.Common;
-
-///
-/// Represents progress information for compression operations.
-///
-public sealed class CompressionProgress
-{
- ///
- /// Initializes a new instance of the class.
- ///
- /// The path of the entry being compressed.
- /// Number of bytes read from the source.
- /// Total bytes to be read from the source, or null if unknown.
- public CompressionProgress(string entryPath, long bytesRead, long? totalBytes)
- {
- EntryPath = entryPath;
- BytesRead = bytesRead;
- TotalBytes = totalBytes;
- }
-
- ///
- /// Gets the path of the entry being compressed.
- ///
- public string EntryPath { get; }
-
- ///
- /// Gets the number of bytes read from the source so far.
- ///
- public long BytesRead { get; }
-
- ///
- /// Gets the total number of bytes to be read from the source, or null if unknown.
- ///
- public long? TotalBytes { get; }
-
- ///
- /// Gets the progress percentage (0-100), or null if total bytes is unknown.
- ///
- public double? PercentComplete =>
- TotalBytes.HasValue && TotalBytes.Value > 0
- ? (double)BytesRead / TotalBytes.Value * 100
- : null;
-}
diff --git a/src/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs b/src/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs
deleted file mode 100644
index d5b8328cc..000000000
--- a/src/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs
+++ /dev/null
@@ -1,28 +0,0 @@
-using System;
-
-namespace SharpCompress.Common;
-
-public sealed class FilePartExtractionBeginEventArgs : EventArgs
-{
- public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
- {
- Name = name;
- Size = size;
- CompressedSize = compressedSize;
- }
-
- ///
- /// File name for the part for the current entry
- ///
- public string Name { get; }
-
- ///
- /// Uncompressed size of the current entry in the part
- ///
- public long Size { get; }
-
- ///
- /// Compressed size of the current entry in the part
- ///
- public long CompressedSize { get; }
-}
diff --git a/src/SharpCompress/Common/IExtractionListener.cs b/src/SharpCompress/Common/IExtractionListener.cs
deleted file mode 100644
index e1389810a..000000000
--- a/src/SharpCompress/Common/IExtractionListener.cs
+++ /dev/null
@@ -1,7 +0,0 @@
-namespace SharpCompress.Common;
-
-public interface IExtractionListener
-{
- void FireFilePartExtractionBegin(string name, long size, long compressedSize);
- void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
-}
diff --git a/src/SharpCompress/Common/ProgressReport.cs b/src/SharpCompress/Common/ProgressReport.cs
new file mode 100644
index 000000000..2d8368674
--- /dev/null
+++ b/src/SharpCompress/Common/ProgressReport.cs
@@ -0,0 +1,43 @@
+namespace SharpCompress.Common;
+
+///
+/// Represents progress information for compression or extraction operations.
+///
+public sealed class ProgressReport
+{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The path of the entry being processed.
+ /// Number of bytes transferred so far.
+ /// Total bytes to be transferred, or null if unknown.
+ public ProgressReport(string entryPath, long bytesTransferred, long? totalBytes)
+ {
+ EntryPath = entryPath;
+ BytesTransferred = bytesTransferred;
+ TotalBytes = totalBytes;
+ }
+
+ ///
+ /// Gets the path of the entry being processed.
+ ///
+ public string EntryPath { get; }
+
+ ///
+ /// Gets the number of bytes transferred so far.
+ ///
+ public long BytesTransferred { get; }
+
+ ///
+ /// Gets the total number of bytes to be transferred, or null if unknown.
+ ///
+ public long? TotalBytes { get; }
+
+ ///
+ /// Gets the progress percentage (0-100), or null if total bytes is unknown.
+ ///
+ public double? PercentComplete =>
+ TotalBytes.HasValue && TotalBytes.Value > 0
+ ? (double)BytesTransferred / TotalBytes.Value * 100
+ : null;
+}
diff --git a/src/SharpCompress/Common/ReaderExtractionEventArgs.cs b/src/SharpCompress/Common/ReaderExtractionEventArgs.cs
deleted file mode 100644
index 7c4363e20..000000000
--- a/src/SharpCompress/Common/ReaderExtractionEventArgs.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-using System;
-using SharpCompress.Readers;
-
-namespace SharpCompress.Common;
-
-public sealed class ReaderExtractionEventArgs : EventArgs
-{
- internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
- {
- Item = entry;
- ReaderProgress = readerProgress;
- }
-
- public T Item { get; }
-
- public ReaderProgress? ReaderProgress { get; }
-}
diff --git a/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyStream.cs b/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyStream.cs
index d20fecd07..df1c59592 100644
--- a/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyStream.cs
+++ b/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyStream.cs
@@ -37,18 +37,8 @@ void IStreamStack.SetPosition(long position) { }
private IEnumerator filePartEnumerator;
private Stream currentStream;
- private readonly IExtractionListener streamListener;
-
- private long currentPartTotalReadBytes;
- private long currentEntryTotalReadBytes;
-
- internal MultiVolumeReadOnlyStream(
- IEnumerable parts,
- IExtractionListener streamListener
- )
+ internal MultiVolumeReadOnlyStream(IEnumerable parts)
{
- this.streamListener = streamListener;
-
filePartEnumerator = parts.GetEnumerator();
filePartEnumerator.MoveNext();
InitializeNextFilePart();
@@ -81,15 +71,7 @@ private void InitializeNextFilePart()
currentPosition = 0;
currentStream = filePartEnumerator.Current.GetCompressedStream();
- currentPartTotalReadBytes = 0;
-
CurrentCrc = filePartEnumerator.Current.FileHeader.FileCrc;
-
- streamListener.FireFilePartExtractionBegin(
- filePartEnumerator.Current.FilePartName,
- filePartEnumerator.Current.FileHeader.CompressedSize,
- filePartEnumerator.Current.FileHeader.UncompressedSize
- );
}
public override int Read(byte[] buffer, int offset, int count)
@@ -141,12 +123,6 @@ public override int Read(byte[] buffer, int offset, int count)
break;
}
}
- currentPartTotalReadBytes += totalRead;
- currentEntryTotalReadBytes += totalRead;
- streamListener.FireCompressedBytesRead(
- currentPartTotalReadBytes,
- currentEntryTotalReadBytes
- );
return totalRead;
}
@@ -206,12 +182,6 @@ System.Threading.CancellationToken cancellationToken
break;
}
}
- currentPartTotalReadBytes += totalRead;
- currentEntryTotalReadBytes += totalRead;
- streamListener.FireCompressedBytesRead(
- currentPartTotalReadBytes,
- currentEntryTotalReadBytes
- );
return totalRead;
}
@@ -270,12 +240,6 @@ public override async System.Threading.Tasks.ValueTask ReadAsync(
break;
}
}
- currentPartTotalReadBytes += totalRead;
- currentEntryTotalReadBytes += totalRead;
- streamListener.FireCompressedBytesRead(
- currentPartTotalReadBytes,
- currentEntryTotalReadBytes
- );
return totalRead;
}
#endif
diff --git a/src/SharpCompress/IO/ListeningStream.cs b/src/SharpCompress/IO/ListeningStream.cs
deleted file mode 100644
index fafe12b57..000000000
--- a/src/SharpCompress/IO/ListeningStream.cs
+++ /dev/null
@@ -1,97 +0,0 @@
-using System.IO;
-using SharpCompress.Common;
-
-namespace SharpCompress.IO;
-
-internal class ListeningStream : Stream, IStreamStack
-{
-#if DEBUG_STREAMS
- long IStreamStack.InstanceId { get; set; }
-#endif
- int IStreamStack.DefaultBufferSize { get; set; }
-
- Stream IStreamStack.BaseStream() => Stream;
-
- int IStreamStack.BufferSize
- {
- get => 0;
- set { return; }
- }
- int IStreamStack.BufferPosition
- {
- get => 0;
- set { return; }
- }
-
- void IStreamStack.SetPosition(long position) { }
-
- private long _currentEntryTotalReadBytes;
- private readonly IExtractionListener _listener;
-
- public ListeningStream(IExtractionListener listener, Stream stream)
- {
- Stream = stream;
- this._listener = listener;
-#if DEBUG_STREAMS
- this.DebugConstruct(typeof(ListeningStream));
-#endif
- }
-
- protected override void Dispose(bool disposing)
- {
-#if DEBUG_STREAMS
- this.DebugDispose(typeof(ListeningStream));
-#endif
- if (disposing)
- {
- Stream.Dispose();
- }
- base.Dispose(disposing);
- }
-
- public Stream Stream { get; }
-
- public override bool CanRead => Stream.CanRead;
-
- public override bool CanSeek => Stream.CanSeek;
-
- public override bool CanWrite => Stream.CanWrite;
-
- public override void Flush() => Stream.Flush();
-
- public override long Length => Stream.Length;
-
- public override long Position
- {
- get => Stream.Position;
- set => Stream.Position = value;
- }
-
- public override int Read(byte[] buffer, int offset, int count)
- {
- var read = Stream.Read(buffer, offset, count);
- _currentEntryTotalReadBytes += read;
- _listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
- return read;
- }
-
- public override int ReadByte()
- {
- var value = Stream.ReadByte();
- if (value == -1)
- {
- return -1;
- }
-
- ++_currentEntryTotalReadBytes;
- _listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
- return value;
- }
-
- public override long Seek(long offset, SeekOrigin origin) => Stream.Seek(offset, origin);
-
- public override void SetLength(long value) => Stream.SetLength(value);
-
- public override void Write(byte[] buffer, int offset, int count) =>
- Stream.Write(buffer, offset, count);
-}
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index af768462a..dc977a04a 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -8,20 +8,20 @@ namespace SharpCompress.IO;
///
/// A stream wrapper that reports progress as data is read from the source.
-/// Used to track compression progress by wrapping the source stream.
+/// Used to track compression or extraction progress by wrapping the source stream.
///
internal sealed class ProgressReportingStream : Stream
{
private readonly Stream _baseStream;
- private readonly IProgress _progress;
+ private readonly IProgress _progress;
private readonly string _entryPath;
private readonly long? _totalBytes;
- private long _bytesRead;
+ private long _bytesTransferred;
private readonly bool _leaveOpen;
public ProgressReportingStream(
Stream baseStream,
- IProgress progress,
+ IProgress progress,
string entryPath,
long? totalBytes,
bool leaveOpen = false
@@ -55,7 +55,7 @@ public override int Read(byte[] buffer, int offset, int count)
var bytesRead = _baseStream.Read(buffer, offset, count);
if (bytesRead > 0)
{
- _bytesRead += bytesRead;
+ _bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
@@ -66,7 +66,7 @@ public override int Read(Span buffer)
var bytesRead = _baseStream.Read(buffer);
if (bytesRead > 0)
{
- _bytesRead += bytesRead;
+ _bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
@@ -84,7 +84,7 @@ CancellationToken cancellationToken
.ConfigureAwait(false);
if (bytesRead > 0)
{
- _bytesRead += bytesRead;
+ _bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
@@ -95,12 +95,10 @@ public override async ValueTask ReadAsync(
CancellationToken cancellationToken = default
)
{
- var bytesRead = await _baseStream
- .ReadAsync(buffer, cancellationToken)
- .ConfigureAwait(false);
+ var bytesRead = await _baseStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (bytesRead > 0)
{
- _bytesRead += bytesRead;
+ _bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
@@ -111,7 +109,7 @@ public override int ReadByte()
var value = _baseStream.ReadByte();
if (value != -1)
{
- _bytesRead++;
+ _bytesTransferred++;
ReportProgress();
}
return value;
@@ -123,12 +121,12 @@ public override int ReadByte()
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException(
- "ProgressReportingStream is designed for read operations to track compression progress."
+ "ProgressReportingStream is designed for read operations to track progress."
);
private void ReportProgress()
{
- _progress.Report(new CompressionProgress(_entryPath, _bytesRead, _totalBytes));
+ _progress.Report(new ProgressReport(_entryPath, _bytesTransferred, _totalBytes));
}
protected override void Dispose(bool disposing)
diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs
index 17b74c2ae..35cd71009 100644
--- a/src/SharpCompress/Readers/AbstractReader.cs
+++ b/src/SharpCompress/Readers/AbstractReader.cs
@@ -5,13 +5,14 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
+using SharpCompress.IO;
namespace SharpCompress.Readers;
///
/// A generic push reader that reads unseekable comrpessed streams.
///
-public abstract class AbstractReader : IReader, IReaderExtractionListener
+public abstract class AbstractReader : IReader
where TEntry : Entry
where TVolume : Volume
{
@@ -19,11 +20,6 @@ public abstract class AbstractReader : IReader, IReaderExtracti
private IEnumerator? _entriesForCurrentReadStream;
private bool _wroteCurrentEntry;
- public event EventHandler>? EntryExtractionProgress;
-
- public event EventHandler? CompressedBytesRead;
- public event EventHandler? FilePartExtractionBegin;
-
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
{
ArchiveType = archiveType;
@@ -264,25 +260,66 @@ public async Task WriteEntryToAsync(
internal void Write(Stream writeStream)
{
- var streamListener = this as IReaderExtractionListener;
using Stream s = OpenEntryStream();
- s.TransferTo(writeStream, Entry, streamListener);
+ TransferWithProgress(s, writeStream, Entry);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
- var streamListener = this as IReaderExtractionListener;
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
- await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
+ await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
.ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
- await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
+ await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
.ConfigureAwait(false);
#endif
}
+ private void TransferWithProgress(Stream source, Stream destination, Entry entry)
+ {
+ var progress = Options.Progress;
+ var entryPath = entry.Key ?? string.Empty;
+ long? totalBytes = entry.Size > 0 ? entry.Size : null;
+ long transferred = 0;
+
+ var buffer = new byte[81920];
+ int bytesRead;
+ while ((bytesRead = source.Read(buffer, 0, buffer.Length)) > 0)
+ {
+ destination.Write(buffer, 0, bytesRead);
+ transferred += bytesRead;
+ progress?.Report(new ProgressReport(entryPath, transferred, totalBytes));
+ }
+ }
+
+ private async Task TransferWithProgressAsync(
+ Stream source,
+ Stream destination,
+ Entry entry,
+ CancellationToken cancellationToken
+ )
+ {
+ var progress = Options.Progress;
+ var entryPath = entry.Key ?? string.Empty;
+ long? totalBytes = entry.Size > 0 ? entry.Size : null;
+ long transferred = 0;
+
+ var buffer = new byte[81920];
+ int bytesRead;
+ while (
+ (bytesRead = await source.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
+ .ConfigureAwait(false)) > 0
+ )
+ {
+ await destination.WriteAsync(buffer, 0, bytesRead, cancellationToken)
+ .ConfigureAwait(false);
+ transferred += bytesRead;
+ progress?.Report(new ProgressReport(entryPath, transferred, totalBytes));
+ }
+ }
+
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
@@ -325,43 +362,4 @@ protected virtual Task GetEntryStreamAsync(
#endregion
IEntry IReader.Entry => Entry;
-
- void IExtractionListener.FireCompressedBytesRead(
- long currentPartCompressedBytes,
- long compressedReadBytes
- ) =>
- CompressedBytesRead?.Invoke(
- this,
- new CompressedBytesReadEventArgs(
- currentFilePartCompressedBytesRead: currentPartCompressedBytes,
- compressedBytesRead: compressedReadBytes
- )
- );
-
- void IExtractionListener.FireFilePartExtractionBegin(
- string name,
- long size,
- long compressedSize
- ) =>
- FilePartExtractionBegin?.Invoke(
- this,
- new FilePartExtractionBeginEventArgs(
- compressedSize: compressedSize,
- size: size,
- name: name
- )
- );
-
- void IReaderExtractionListener.FireEntryExtractionProgress(
- Entry entry,
- long bytesTransferred,
- int iterations
- ) =>
- EntryExtractionProgress?.Invoke(
- this,
- new ReaderExtractionEventArgs(
- entry,
- new ReaderProgress(entry, bytesTransferred, iterations)
- )
- );
}
diff --git a/src/SharpCompress/Readers/IReader.cs b/src/SharpCompress/Readers/IReader.cs
index c2d7c3c0e..574237088 100644
--- a/src/SharpCompress/Readers/IReader.cs
+++ b/src/SharpCompress/Readers/IReader.cs
@@ -8,11 +8,6 @@ namespace SharpCompress.Readers;
public interface IReader : IDisposable
{
- event EventHandler> EntryExtractionProgress;
-
- event EventHandler CompressedBytesRead;
- event EventHandler FilePartExtractionBegin;
-
ArchiveType ArchiveType { get; }
IEntry Entry { get; }
diff --git a/src/SharpCompress/Readers/IReaderExtractionListener.cs b/src/SharpCompress/Readers/IReaderExtractionListener.cs
deleted file mode 100644
index 49e5dae42..000000000
--- a/src/SharpCompress/Readers/IReaderExtractionListener.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-using SharpCompress.Common;
-
-namespace SharpCompress.Readers;
-
-public interface IReaderExtractionListener : IExtractionListener
-{
- void FireEntryExtractionProgress(Entry entry, long sizeTransferred, int iterations);
-}
diff --git a/src/SharpCompress/Readers/Rar/RarReader.cs b/src/SharpCompress/Readers/Rar/RarReader.cs
index 67af26842..9c6de26f8 100644
--- a/src/SharpCompress/Readers/Rar/RarReader.cs
+++ b/src/SharpCompress/Readers/Rar/RarReader.cs
@@ -108,8 +108,7 @@ protected override EntryStream GetEntryStream()
}
var stream = new MultiVolumeReadOnlyStream(
- CreateFilePartEnumerableForCurrentEntry().Cast(),
- this
+ CreateFilePartEnumerableForCurrentEntry().Cast()
);
if (Entry.IsRarV3)
{
@@ -136,8 +135,7 @@ protected override async System.Threading.Tasks.Task GetEntryStream
}
var stream = new MultiVolumeReadOnlyStream(
- CreateFilePartEnumerableForCurrentEntry().Cast(),
- this
+ CreateFilePartEnumerableForCurrentEntry().Cast()
);
if (Entry.IsRarV3)
{
diff --git a/src/SharpCompress/Readers/ReaderOptions.cs b/src/SharpCompress/Readers/ReaderOptions.cs
index 4ddfe94d2..cedf8bedb 100644
--- a/src/SharpCompress/Readers/ReaderOptions.cs
+++ b/src/SharpCompress/Readers/ReaderOptions.cs
@@ -1,3 +1,4 @@
+using System;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -21,4 +22,10 @@ public class ReaderOptions : OptionsBase
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder. Should be without the leading period in the form like: tar.gz or zip
///
public string? ExtensionHint { get; set; }
+
+ ///
+ /// An optional progress reporter for tracking extraction operations.
+ /// When set, progress updates will be reported as entries are extracted.
+ ///
+ public IProgress? Progress { get; set; }
}
diff --git a/src/SharpCompress/Readers/ReaderProgress.cs b/src/SharpCompress/Readers/ReaderProgress.cs
deleted file mode 100644
index 2cffab9ab..000000000
--- a/src/SharpCompress/Readers/ReaderProgress.cs
+++ /dev/null
@@ -1,21 +0,0 @@
-using System;
-using SharpCompress.Common;
-
-namespace SharpCompress.Readers;
-
-public class ReaderProgress
-{
- private readonly IEntry _entry;
- public long BytesTransferred { get; }
- public int Iterations { get; }
-
- public int PercentageRead => (int)Math.Round(PercentageReadExact);
- public double PercentageReadExact => (float)BytesTransferred / _entry.Size * 100;
-
- public ReaderProgress(IEntry entry, long bytesTransferred, int iterations)
- {
- _entry = entry;
- BytesTransferred = bytesTransferred;
- Iterations = iterations;
- }
-}
diff --git a/src/SharpCompress/Utility.cs b/src/SharpCompress/Utility.cs
index d781c79b7..6c6276026 100644
--- a/src/SharpCompress/Utility.cs
+++ b/src/SharpCompress/Utility.cs
@@ -6,7 +6,6 @@
using System.Text;
using System.Threading;
using System.Threading.Tasks;
-using SharpCompress.Readers;
namespace SharpCompress;
@@ -216,34 +215,6 @@ public static long TransferTo(this Stream source, Stream destination, long maxLe
}
}
- public static long TransferTo(
- this Stream source,
- Stream destination,
- Common.Entry entry,
- IReaderExtractionListener readerExtractionListener
- )
- {
- var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE);
- try
- {
- var iterations = 0;
- long total = 0;
- int count;
- while ((count = source.Read(array, 0, array.Length)) != 0)
- {
- total += count;
- destination.Write(array, 0, count);
- iterations++;
- readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
- }
- return total;
- }
- finally
- {
- ArrayPool.Shared.Return(array);
- }
- }
-
public static async Task TransferToAsync(
this Stream source,
Stream destination,
@@ -290,43 +261,6 @@ await destination
}
}
- public static async Task TransferToAsync(
- this Stream source,
- Stream destination,
- Common.Entry entry,
- IReaderExtractionListener readerExtractionListener,
- CancellationToken cancellationToken = default
- )
- {
- var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE);
- try
- {
- var iterations = 0;
- long total = 0;
- int count;
- while (
- (
- count = await source
- .ReadAsync(array, 0, array.Length, cancellationToken)
- .ConfigureAwait(false)
- ) != 0
- )
- {
- total += count;
- await destination
- .WriteAsync(array, 0, count, cancellationToken)
- .ConfigureAwait(false);
- iterations++;
- readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
- }
- return total;
- }
- finally
- {
- ArrayPool.Shared.Return(array);
- }
- }
-
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
var size = maxSize;
diff --git a/src/SharpCompress/Writers/WriterOptions.cs b/src/SharpCompress/Writers/WriterOptions.cs
index 4611dabc4..46e145602 100644
--- a/src/SharpCompress/Writers/WriterOptions.cs
+++ b/src/SharpCompress/Writers/WriterOptions.cs
@@ -41,7 +41,7 @@ public WriterOptions(CompressionType compressionType, int compressionLevel)
/// An optional progress reporter for tracking compression operations.
/// When set, progress updates will be reported as entries are written.
///
- public IProgress? Progress { get; set; }
+ public IProgress? Progress { get; set; }
public static implicit operator WriterOptions(CompressionType compressionType) =>
new(compressionType);
From 8fc5ca5a71ec5f3ed66ac17116da512e74b39231 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 29 Nov 2025 17:40:10 +0000
Subject: [PATCH 06/26] Unify progress reporting: remove IExtractionListener
and add IProgress support for reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.../Archives/IArchiveEntryExtensions.cs | 29 ++---
.../IO/ProgressReportingStream.cs | 4 +-
src/SharpCompress/Readers/AbstractReader.cs | 27 ++++-
...rogressTests.cs => ProgressReportTests.cs} | 102 +++++++++++++++---
4 files changed, 119 insertions(+), 43 deletions(-)
rename tests/SharpCompress.Test/{CompressionProgressTests.cs => ProgressReportTests.cs} (54%)
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index 6e76f7f90..39d850ebb 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -2,7 +2,6 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
-using SharpCompress.IO;
namespace SharpCompress.Archives;
@@ -18,17 +17,8 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
- streamListener.FireFilePartExtractionBegin(
- archiveEntry.Key ?? "Key",
- archiveEntry.Size,
- archiveEntry.CompressedSize
- );
- var entryStream = archiveEntry.OpenEntryStream();
- using (entryStream)
- {
- using Stream s = new ListeningStream(streamListener, entryStream);
- s.CopyTo(streamToWriteTo);
- }
+ using var entryStream = archiveEntry.OpenEntryStream();
+ entryStream.CopyTo(streamToWriteTo);
streamListener.FireEntryExtractionEnd(archiveEntry);
}
@@ -46,17 +36,10 @@ public static async Task WriteToAsync(
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
- streamListener.FireFilePartExtractionBegin(
- archiveEntry.Key ?? "Key",
- archiveEntry.Size,
- archiveEntry.CompressedSize
- );
- var entryStream = archiveEntry.OpenEntryStream();
- using (entryStream)
- {
- using Stream s = new ListeningStream(streamListener, entryStream);
- await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
- }
+ using var entryStream = archiveEntry.OpenEntryStream();
+ await entryStream
+ .CopyToAsync(streamToWriteTo, 81920, cancellationToken)
+ .ConfigureAwait(false);
streamListener.FireEntryExtractionEnd(archiveEntry);
}
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index dc977a04a..3aa497afb 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -95,7 +95,9 @@ public override async ValueTask ReadAsync(
CancellationToken cancellationToken = default
)
{
- var bytesRead = await _baseStream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
+ var bytesRead = await _baseStream
+ .ReadAsync(buffer, cancellationToken)
+ .ConfigureAwait(false);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs
index 35cd71009..be9ace41a 100644
--- a/src/SharpCompress/Readers/AbstractReader.cs
+++ b/src/SharpCompress/Readers/AbstractReader.cs
@@ -277,11 +277,24 @@ await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
#endif
}
+ private static long? GetEntrySizeSafe(Entry entry)
+ {
+ try
+ {
+ var size = entry.Size;
+ return size > 0 ? size : null;
+ }
+ catch (NotImplementedException)
+ {
+ return null;
+ }
+ }
+
private void TransferWithProgress(Stream source, Stream destination, Entry entry)
{
var progress = Options.Progress;
var entryPath = entry.Key ?? string.Empty;
- long? totalBytes = entry.Size > 0 ? entry.Size : null;
+ long? totalBytes = GetEntrySizeSafe(entry);
long transferred = 0;
var buffer = new byte[81920];
@@ -303,17 +316,21 @@ CancellationToken cancellationToken
{
var progress = Options.Progress;
var entryPath = entry.Key ?? string.Empty;
- long? totalBytes = entry.Size > 0 ? entry.Size : null;
+ long? totalBytes = GetEntrySizeSafe(entry);
long transferred = 0;
var buffer = new byte[81920];
int bytesRead;
while (
- (bytesRead = await source.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
- .ConfigureAwait(false)) > 0
+ (
+ bytesRead = await source
+ .ReadAsync(buffer, 0, buffer.Length, cancellationToken)
+ .ConfigureAwait(false)
+ ) > 0
)
{
- await destination.WriteAsync(buffer, 0, bytesRead, cancellationToken)
+ await destination
+ .WriteAsync(buffer, 0, bytesRead, cancellationToken)
.ConfigureAwait(false);
transferred += bytesRead;
progress?.Report(new ProgressReport(entryPath, transferred, totalBytes));
diff --git a/tests/SharpCompress.Test/CompressionProgressTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs
similarity index 54%
rename from tests/SharpCompress.Test/CompressionProgressTests.cs
rename to tests/SharpCompress.Test/ProgressReportTests.cs
index 576a8dd02..883e09b6e 100644
--- a/tests/SharpCompress.Test/CompressionProgressTests.cs
+++ b/tests/SharpCompress.Test/ProgressReportTests.cs
@@ -3,6 +3,7 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common;
+using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using SharpCompress.Writers.Zip;
@@ -23,12 +24,12 @@ internal sealed class TestProgress : IProgress
public void Report(T value) => _reports.Add(value);
}
-public class CompressionProgressTests : TestBase
+public class ProgressReportTests : TestBase
{
[Fact]
public void Zip_Write_ReportsProgress()
{
- var progress = new TestProgress();
+ var progress = new TestProgress();
using var archiveStream = new MemoryStream();
var options = new ZipWriterOptions(CompressionType.Deflate) { Progress = progress };
@@ -46,14 +47,14 @@ public void Zip_Write_ReportsProgress()
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[^1];
- Assert.Equal(10000, lastReport.BytesRead);
+ Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public void Tar_Write_ReportsProgress()
{
- var progress = new TestProgress();
+ var progress = new TestProgress();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
@@ -71,10 +72,50 @@ public void Tar_Write_ReportsProgress()
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[^1];
- Assert.Equal(10000, lastReport.BytesRead);
+ Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
+ [Fact]
+ public void Zip_Read_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // First create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ // Now read it with progress reporting
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
+ {
+ while (reader.MoveToNextEntry())
+ {
+ if (!reader.Entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ reader.WriteEntryTo(extractedStream);
+ }
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
[Fact]
public void WriterOptions_WithoutProgress_DoesNotThrow()
{
@@ -94,26 +135,59 @@ public void WriterOptions_WithoutProgress_DoesNotThrow()
}
[Fact]
- public void CompressionProgress_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
+ public void ReaderOptions_WithoutProgress_DoesNotThrow()
+ {
+ // First create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = new byte[100];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ // Read without progress
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions();
+ Assert.Null(readerOptions.Progress);
+
+ using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
+ {
+ while (reader.MoveToNextEntry())
+ {
+ if (!reader.Entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ reader.WriteEntryTo(extractedStream);
+ }
+ }
+ }
+ }
+
+ [Fact]
+ public void ProgressReport_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
{
- var progress = new CompressionProgress("test.txt", 100, null);
+ var progress = new ProgressReport("test.txt", 100, null);
Assert.Null(progress.PercentComplete);
}
[Fact]
- public void CompressionProgress_PercentComplete_WithZeroTotalBytes_ReturnsNull()
+ public void ProgressReport_PercentComplete_WithZeroTotalBytes_ReturnsNull()
{
- var progress = new CompressionProgress("test.txt", 0, 0);
+ var progress = new ProgressReport("test.txt", 0, 0);
Assert.Null(progress.PercentComplete);
}
[Fact]
- public void CompressionProgress_Properties_AreSetCorrectly()
+ public void ProgressReport_Properties_AreSetCorrectly()
{
- var progress = new CompressionProgress("path/to/file.txt", 500, 1000);
+ var progress = new ProgressReport("path/to/file.txt", 500, 1000);
Assert.Equal("path/to/file.txt", progress.EntryPath);
- Assert.Equal(500, progress.BytesRead);
+ Assert.Equal(500, progress.BytesTransferred);
Assert.Equal(1000, progress.TotalBytes);
Assert.Equal(50.0, progress.PercentComplete);
}
@@ -121,7 +195,7 @@ public void CompressionProgress_Properties_AreSetCorrectly()
[Fact]
public async Task Tar_WriteAsync_ReportsProgress()
{
- var progress = new TestProgress();
+ var progress = new TestProgress();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
@@ -138,6 +212,6 @@ public async Task Tar_WriteAsync_ReportsProgress()
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[^1];
- Assert.Equal(10000, lastReport.BytesRead);
+ Assert.Equal(10000, lastReport.BytesTransferred);
}
}
From 7af029b5de8c12a7d7723809525433a8f91658ee Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 29 Nov 2025 17:48:50 +0000
Subject: [PATCH 07/26] Address code review: properly handle zero-sized entries
in progress reporting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/Readers/AbstractReader.cs | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs
index be9ace41a..b20abcfd3 100644
--- a/src/SharpCompress/Readers/AbstractReader.cs
+++ b/src/SharpCompress/Readers/AbstractReader.cs
@@ -282,7 +282,9 @@ await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
try
{
var size = entry.Size;
- return size > 0 ? size : null;
+ // Return the actual size (including 0 for empty entries)
+ // Negative values indicate unknown size
+ return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
From e2df7894f98bff34cd850512b0749d02d1c30677 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 30 Nov 2025 12:00:54 +0000
Subject: [PATCH 08/26] Remove IArchiveExtractionListener and add IProgress
support to Archive Entry extraction
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/Archives/AbstractArchive.cs | 20 ++--
src/SharpCompress/Archives/IArchive.cs | 3 -
.../Archives/IArchiveEntryExtensions.cs | 92 +++++++++++++--
.../Archives/IArchiveExtractionListener.cs | 8 --
.../Common/ArchiveExtractionEventArgs.cs | 10 --
.../SharpCompress.Test/ProgressReportTests.cs | 107 ++++++++++++++++++
6 files changed, 195 insertions(+), 45 deletions(-)
delete mode 100644 src/SharpCompress/Archives/IArchiveExtractionListener.cs
delete mode 100644 src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs
index c4ea2857f..05c508524 100644
--- a/src/SharpCompress/Archives/AbstractArchive.cs
+++ b/src/SharpCompress/Archives/AbstractArchive.cs
@@ -8,7 +8,7 @@
namespace SharpCompress.Archives;
-public abstract class AbstractArchive : IArchive, IArchiveExtractionListener
+public abstract class AbstractArchive : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -17,9 +17,6 @@ public abstract class AbstractArchive : IArchive, IArchiveExtra
private bool _disposed;
private readonly SourceStream? _sourceStream;
- public event EventHandler>? EntryExtractionBegin;
- public event EventHandler>? EntryExtractionEnd;
-
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -41,11 +38,10 @@ internal AbstractArchive(ArchiveType type)
public ArchiveType Type { get; }
- void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
- EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs(entry));
-
- void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
- EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs(entry));
+ ///
+ /// Gets the progress reporter for this archive, if one was set via ReaderOptions.
+ ///
+ internal IProgress? Progress => ReaderOptions.Progress;
private static Stream CheckStreams(Stream stream)
{
@@ -97,7 +93,7 @@ public virtual void Dispose()
}
}
- void IArchiveExtractionListener.EnsureEntriesLoaded()
+ internal void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
@@ -122,7 +118,7 @@ public IReader ExtractAllEntries()
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
- ((IArchiveExtractionListener)this).EnsureEntriesLoaded();
+ EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -140,7 +136,7 @@ public bool IsComplete
{
get
{
- ((IArchiveExtractionListener)this).EnsureEntriesLoaded();
+ EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
diff --git a/src/SharpCompress/Archives/IArchive.cs b/src/SharpCompress/Archives/IArchive.cs
index 7ab31ccc6..3ed7490d3 100644
--- a/src/SharpCompress/Archives/IArchive.cs
+++ b/src/SharpCompress/Archives/IArchive.cs
@@ -7,9 +7,6 @@ namespace SharpCompress.Archives;
public interface IArchive : IDisposable
{
- event EventHandler> EntryExtractionBegin;
- event EventHandler> EntryExtractionEnd;
-
IEnumerable Entries { get; }
IEnumerable Volumes { get; }
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index 39d850ebb..d4b50fe77 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -1,3 +1,4 @@
+using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -7,6 +8,8 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
+ private const int BufferSize = 81920;
+
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.IsDirectory)
@@ -14,12 +17,31 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
- streamListener.EnsureEntriesLoaded();
- streamListener.FireEntryExtractionBegin(archiveEntry);
+ var archive = archiveEntry.Archive as dynamic;
+ archive.EnsureEntriesLoaded();
+
+ IProgress? progress = GetProgress(archiveEntry.Archive);
using var entryStream = archiveEntry.OpenEntryStream();
- entryStream.CopyTo(streamToWriteTo);
- streamListener.FireEntryExtractionEnd(archiveEntry);
+
+ if (progress is null)
+ {
+ entryStream.CopyTo(streamToWriteTo);
+ }
+ else
+ {
+ var entryPath = archiveEntry.Key ?? string.Empty;
+ long? totalBytes = GetEntrySizeSafe(archiveEntry);
+ long transferred = 0;
+
+ var buffer = new byte[BufferSize];
+ int bytesRead;
+ while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
+ {
+ streamToWriteTo.Write(buffer, 0, bytesRead);
+ transferred += bytesRead;
+ progress.Report(new ProgressReport(entryPath, transferred, totalBytes));
+ }
+ }
}
public static async Task WriteToAsync(
@@ -33,14 +55,60 @@ public static async Task WriteToAsync(
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
- streamListener.EnsureEntriesLoaded();
- streamListener.FireEntryExtractionBegin(archiveEntry);
+ var archive = archiveEntry.Archive as dynamic;
+ archive.EnsureEntriesLoaded();
+
+ IProgress? progress = GetProgress(archiveEntry.Archive);
using var entryStream = archiveEntry.OpenEntryStream();
- await entryStream
- .CopyToAsync(streamToWriteTo, 81920, cancellationToken)
- .ConfigureAwait(false);
- streamListener.FireEntryExtractionEnd(archiveEntry);
+
+ if (progress is null)
+ {
+ await entryStream
+ .CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
+ .ConfigureAwait(false);
+ }
+ else
+ {
+ var entryPath = archiveEntry.Key ?? string.Empty;
+ long? totalBytes = GetEntrySizeSafe(archiveEntry);
+ long transferred = 0;
+
+ var buffer = new byte[BufferSize];
+ int bytesRead;
+ while (
+ (
+ bytesRead = await entryStream
+ .ReadAsync(buffer, 0, buffer.Length, cancellationToken)
+ .ConfigureAwait(false)
+ ) > 0
+ )
+ {
+ await streamToWriteTo
+ .WriteAsync(buffer, 0, bytesRead, cancellationToken)
+ .ConfigureAwait(false);
+ transferred += bytesRead;
+ progress.Report(new ProgressReport(entryPath, transferred, totalBytes));
+ }
+ }
+ }
+
+ private static IProgress? GetProgress(IArchive archive)
+ {
+ // Try to get progress from the concrete archive type
+ return (archive as dynamic)?.Progress as IProgress;
+ }
+
+ private static long? GetEntrySizeSafe(IArchiveEntry entry)
+ {
+ try
+ {
+ var size = entry.Size;
+ return size >= 0 ? size : null;
+ }
+ catch (NotImplementedException)
+ {
+ return null;
+ }
}
///
diff --git a/src/SharpCompress/Archives/IArchiveExtractionListener.cs b/src/SharpCompress/Archives/IArchiveExtractionListener.cs
deleted file mode 100644
index 17e9a40e8..000000000
--- a/src/SharpCompress/Archives/IArchiveExtractionListener.cs
+++ /dev/null
@@ -1,8 +0,0 @@
-namespace SharpCompress.Archives;
-
-internal interface IArchiveExtractionListener
-{
- void EnsureEntriesLoaded();
- void FireEntryExtractionBegin(IArchiveEntry entry);
- void FireEntryExtractionEnd(IArchiveEntry entry);
-}
diff --git a/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs b/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
deleted file mode 100644
index 808177489..000000000
--- a/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-using System;
-
-namespace SharpCompress.Common;
-
-public class ArchiveExtractionEventArgs : EventArgs
-{
- internal ArchiveExtractionEventArgs(T entry) => Item = entry;
-
- public T Item { get; }
-}
diff --git a/tests/SharpCompress.Test/ProgressReportTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs
index 883e09b6e..bb6b3ab99 100644
--- a/tests/SharpCompress.Test/ProgressReportTests.cs
+++ b/tests/SharpCompress.Test/ProgressReportTests.cs
@@ -2,6 +2,8 @@
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
+using SharpCompress.Archives;
+using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
@@ -116,6 +118,82 @@ public void Zip_Read_ReportsProgress()
Assert.Equal(10000, lastReport.BytesTransferred);
}
+ [Fact]
+ public void ZipArchive_Entry_WriteTo_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // First create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ // Now open as archive and extract entry with progress
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using var archive = ZipArchive.Open(archiveStream, readerOptions);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ entry.WriteTo(extractedStream);
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
+ [Fact]
+ public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // First create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = new byte[10000];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ // Now open as archive and extract entry async with progress
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using var archive = ZipArchive.Open(archiveStream, readerOptions);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ await entry.WriteToAsync(extractedStream);
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
+
+ var lastReport = progress.Reports[^1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
[Fact]
public void WriterOptions_WithoutProgress_DoesNotThrow()
{
@@ -167,6 +245,35 @@ public void ReaderOptions_WithoutProgress_DoesNotThrow()
}
}
+ [Fact]
+ public void ZipArchive_WithoutProgress_DoesNotThrow()
+ {
+ // First create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = new byte[100];
+ Array.Fill(testData, (byte)'A');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("test.txt", sourceStream, DateTime.Now);
+ }
+
+ // Open archive and extract without progress
+ archiveStream.Position = 0;
+
+ using var archive = ZipArchive.Open(archiveStream);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ entry.WriteTo(extractedStream);
+ }
+ }
+ }
+
[Fact]
public void ProgressReport_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
{
From 0fdf9c74a8135bc4681bd75000c392b654810910 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 30 Nov 2025 12:07:17 +0000
Subject: [PATCH 09/26] Address code review: Replace dynamic with
IArchiveProgressInfo interface
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/Archives/AbstractArchive.cs | 10 +++++-----
.../Archives/IArchiveEntryExtensions.cs | 18 ++++++-----------
.../Archives/IArchiveProgressInfo.cs | 20 +++++++++++++++++++
3 files changed, 31 insertions(+), 17 deletions(-)
create mode 100644 src/SharpCompress/Archives/IArchiveProgressInfo.cs
diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs
index 05c508524..0ba564adf 100644
--- a/src/SharpCompress/Archives/AbstractArchive.cs
+++ b/src/SharpCompress/Archives/AbstractArchive.cs
@@ -8,7 +8,7 @@
namespace SharpCompress.Archives;
-public abstract class AbstractArchive : IArchive
+public abstract class AbstractArchive : IArchive, IArchiveProgressInfo
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -41,7 +41,7 @@ internal AbstractArchive(ArchiveType type)
///
/// Gets the progress reporter for this archive, if one was set via ReaderOptions.
///
- internal IProgress? Progress => ReaderOptions.Progress;
+ IProgress? IArchiveProgressInfo.Progress => ReaderOptions.Progress;
private static Stream CheckStreams(Stream stream)
{
@@ -93,7 +93,7 @@ public virtual void Dispose()
}
}
- internal void EnsureEntriesLoaded()
+ void IArchiveProgressInfo.EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
@@ -118,7 +118,7 @@ public IReader ExtractAllEntries()
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
- EnsureEntriesLoaded();
+ ((IArchiveProgressInfo)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -136,7 +136,7 @@ public bool IsComplete
{
get
{
- EnsureEntriesLoaded();
+ ((IArchiveProgressInfo)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index d4b50fe77..b419b6f0d 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -17,10 +17,10 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var archive = archiveEntry.Archive as dynamic;
- archive.EnsureEntriesLoaded();
+ var progressInfo = archiveEntry.Archive as IArchiveProgressInfo;
+ progressInfo?.EnsureEntriesLoaded();
- IProgress? progress = GetProgress(archiveEntry.Archive);
+ IProgress? progress = progressInfo?.Progress;
using var entryStream = archiveEntry.OpenEntryStream();
if (progress is null)
@@ -55,10 +55,10 @@ public static async Task WriteToAsync(
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var archive = archiveEntry.Archive as dynamic;
- archive.EnsureEntriesLoaded();
+ var progressInfo = archiveEntry.Archive as IArchiveProgressInfo;
+ progressInfo?.EnsureEntriesLoaded();
- IProgress? progress = GetProgress(archiveEntry.Archive);
+ IProgress? progress = progressInfo?.Progress;
using var entryStream = archiveEntry.OpenEntryStream();
if (progress is null)
@@ -92,12 +92,6 @@ await streamToWriteTo
}
}
- private static IProgress? GetProgress(IArchive archive)
- {
- // Try to get progress from the concrete archive type
- return (archive as dynamic)?.Progress as IProgress;
- }
-
private static long? GetEntrySizeSafe(IArchiveEntry entry)
{
try
diff --git a/src/SharpCompress/Archives/IArchiveProgressInfo.cs b/src/SharpCompress/Archives/IArchiveProgressInfo.cs
new file mode 100644
index 000000000..4708d1a5d
--- /dev/null
+++ b/src/SharpCompress/Archives/IArchiveProgressInfo.cs
@@ -0,0 +1,20 @@
+using System;
+using SharpCompress.Common;
+
+namespace SharpCompress.Archives;
+
+///
+/// Internal interface for archives that support progress reporting.
+///
+internal interface IArchiveProgressInfo
+{
+ ///
+ /// Gets the progress reporter for this archive, if one was set.
+ ///
+ IProgress? Progress { get; }
+
+ ///
+ /// Ensures all entries are loaded from the archive.
+ ///
+ void EnsureEntriesLoaded();
+}
From 14d432e22ddf56b1fc1b5f1127aa9b445dadef98 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 30 Nov 2025 13:04:40 +0000
Subject: [PATCH 10/26] Pass progress as parameter to WriteTo/WriteToAsync
instead of storing on archive
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
src/SharpCompress/Archives/AbstractArchive.cs | 13 +++-----
.../Archives/IArchiveEntryExtensions.cs | 32 +++++++++++++------
.../Archives/IArchiveProgressInfo.cs | 20 ------------
.../SharpCompress.Test/ProgressReportTests.cs | 14 ++++----
4 files changed, 32 insertions(+), 47 deletions(-)
delete mode 100644 src/SharpCompress/Archives/IArchiveProgressInfo.cs
diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs
index 0ba564adf..c470b7f22 100644
--- a/src/SharpCompress/Archives/AbstractArchive.cs
+++ b/src/SharpCompress/Archives/AbstractArchive.cs
@@ -8,7 +8,7 @@
namespace SharpCompress.Archives;
-public abstract class AbstractArchive : IArchive, IArchiveProgressInfo
+public abstract class AbstractArchive : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -38,11 +38,6 @@ internal AbstractArchive(ArchiveType type)
public ArchiveType Type { get; }
- ///
- /// Gets the progress reporter for this archive, if one was set via ReaderOptions.
- ///
- IProgress? IArchiveProgressInfo.Progress => ReaderOptions.Progress;
-
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -93,7 +88,7 @@ public virtual void Dispose()
}
}
- void IArchiveProgressInfo.EnsureEntriesLoaded()
+ private void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
@@ -118,7 +113,7 @@ public IReader ExtractAllEntries()
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
- ((IArchiveProgressInfo)this).EnsureEntriesLoaded();
+ EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -136,7 +131,7 @@ public bool IsComplete
{
get
{
- ((IArchiveProgressInfo)this).EnsureEntriesLoaded();
+ EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index b419b6f0d..1db96e3ed 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -10,17 +10,23 @@ public static class IArchiveEntryExtensions
{
private const int BufferSize = 81920;
- public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
+ ///
+ /// Extract entry to the specified stream.
+ ///
+ /// The archive entry to extract.
+ /// The stream to write the entry content to.
+ /// Optional progress reporter for tracking extraction progress.
+ public static void WriteTo(
+ this IArchiveEntry archiveEntry,
+ Stream streamToWriteTo,
+ IProgress? progress = null
+ )
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var progressInfo = archiveEntry.Archive as IArchiveProgressInfo;
- progressInfo?.EnsureEntriesLoaded();
-
- IProgress? progress = progressInfo?.Progress;
using var entryStream = archiveEntry.OpenEntryStream();
if (progress is null)
@@ -44,9 +50,17 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite
}
}
+ ///
+ /// Extract entry to the specified stream asynchronously.
+ ///
+ /// The archive entry to extract.
+ /// The stream to write the entry content to.
+ /// Optional progress reporter for tracking extraction progress.
+ /// Cancellation token.
public static async Task WriteToAsync(
this IArchiveEntry archiveEntry,
Stream streamToWriteTo,
+ IProgress? progress = null,
CancellationToken cancellationToken = default
)
{
@@ -55,10 +69,6 @@ public static async Task WriteToAsync(
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
- var progressInfo = archiveEntry.Archive as IArchiveProgressInfo;
- progressInfo?.EnsureEntriesLoaded();
-
- IProgress? progress = progressInfo?.Progress;
using var entryStream = archiveEntry.OpenEntryStream();
if (progress is null)
@@ -172,7 +182,9 @@ public static Task WriteToFileAsync(
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
- await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
+ await entry
+ .WriteToAsync(fs, progress: null, cancellationToken: cancellationToken)
+ .ConfigureAwait(false);
},
cancellationToken
);
diff --git a/src/SharpCompress/Archives/IArchiveProgressInfo.cs b/src/SharpCompress/Archives/IArchiveProgressInfo.cs
deleted file mode 100644
index 4708d1a5d..000000000
--- a/src/SharpCompress/Archives/IArchiveProgressInfo.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-using System;
-using SharpCompress.Common;
-
-namespace SharpCompress.Archives;
-
-///
-/// Internal interface for archives that support progress reporting.
-///
-internal interface IArchiveProgressInfo
-{
- ///
- /// Gets the progress reporter for this archive, if one was set.
- ///
- IProgress? Progress { get; }
-
- ///
- /// Ensures all entries are loaded from the archive.
- ///
- void EnsureEntriesLoaded();
-}
diff --git a/tests/SharpCompress.Test/ProgressReportTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs
index bb6b3ab99..52fb049cb 100644
--- a/tests/SharpCompress.Test/ProgressReportTests.cs
+++ b/tests/SharpCompress.Test/ProgressReportTests.cs
@@ -135,17 +135,16 @@ public void ZipArchive_Entry_WriteTo_ReportsProgress()
writer.Write("test.txt", sourceStream, DateTime.Now);
}
- // Now open as archive and extract entry with progress
+ // Now open as archive and extract entry with progress as parameter
archiveStream.Position = 0;
- var readerOptions = new ReaderOptions { Progress = progress };
- using var archive = ZipArchive.Open(archiveStream, readerOptions);
+ using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
- entry.WriteTo(extractedStream);
+ entry.WriteTo(extractedStream, progress);
}
}
@@ -173,17 +172,16 @@ public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
writer.Write("test.txt", sourceStream, DateTime.Now);
}
- // Now open as archive and extract entry async with progress
+ // Now open as archive and extract entry async with progress as parameter
archiveStream.Position = 0;
- var readerOptions = new ReaderOptions { Progress = progress };
- using var archive = ZipArchive.Open(archiveStream, readerOptions);
+ using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
- await entry.WriteToAsync(extractedStream);
+ await entry.WriteToAsync(extractedStream, progress);
}
}
From 9291f5809186edd7584f30ace42468eac6450704 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Fri, 5 Dec 2025 11:44:17 +0000
Subject: [PATCH 11/26] Merge master and add comprehensive tests for archive
and reader progress
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.../IO/ProgressReportingStream.cs | 6 +
.../SharpCompress.Test/ProgressReportTests.cs | 330 ++++++++++++++++--
2 files changed, 312 insertions(+), 24 deletions(-)
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index 3aa497afb..ace639f62 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -61,6 +61,7 @@ public override int Read(byte[] buffer, int offset, int count)
return bytesRead;
}
+#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span buffer)
{
var bytesRead = _baseStream.Read(buffer);
@@ -71,6 +72,7 @@ public override int Read(Span buffer)
}
return bytesRead;
}
+#endif
public override async Task ReadAsync(
byte[] buffer,
@@ -90,6 +92,7 @@ CancellationToken cancellationToken
return bytesRead;
}
+#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask ReadAsync(
Memory buffer,
CancellationToken cancellationToken = default
@@ -105,6 +108,7 @@ public override async ValueTask ReadAsync(
}
return bytesRead;
}
+#endif
public override int ReadByte()
{
@@ -140,6 +144,7 @@ protected override void Dispose(bool disposing)
base.Dispose(disposing);
}
+#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (!_leaveOpen)
@@ -148,4 +153,5 @@ public override async ValueTask DisposeAsync()
}
await base.DisposeAsync().ConfigureAwait(false);
}
+#endif
}
diff --git a/tests/SharpCompress.Test/ProgressReportTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs
index 52fb049cb..37a23c8d6 100644
--- a/tests/SharpCompress.Test/ProgressReportTests.cs
+++ b/tests/SharpCompress.Test/ProgressReportTests.cs
@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
+using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
@@ -28,6 +29,16 @@ internal sealed class TestProgress : IProgress
public class ProgressReportTests : TestBase
{
+ private static byte[] CreateTestData(int size, byte fillValue)
+ {
+ var data = new byte[size];
+ for (var i = 0; i < size; i++)
+ {
+ data[i] = fillValue;
+ }
+ return data;
+ }
+
[Fact]
public void Zip_Write_ReportsProgress()
{
@@ -38,8 +49,7 @@ public void Zip_Write_ReportsProgress()
using (var writer = new ZipWriter(archiveStream, options))
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -48,7 +58,7 @@ public void Zip_Write_ReportsProgress()
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
@@ -63,8 +73,7 @@ public void Tar_Write_ReportsProgress()
using (var writer = new TarWriter(archiveStream, options))
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -73,7 +82,7 @@ public void Tar_Write_ReportsProgress()
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
@@ -89,8 +98,7 @@ public void Zip_Read_ReportsProgress()
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -114,7 +122,7 @@ public void Zip_Read_ReportsProgress()
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
@@ -129,8 +137,7 @@ public void ZipArchive_Entry_WriteTo_ReportsProgress()
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -151,7 +158,7 @@ public void ZipArchive_Entry_WriteTo_ReportsProgress()
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
@@ -166,8 +173,7 @@ public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -188,7 +194,7 @@ public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
@@ -201,8 +207,7 @@ public void WriterOptions_WithoutProgress_DoesNotThrow()
using (var writer = new ZipWriter(archiveStream, options))
{
- var testData = new byte[100];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -219,8 +224,7 @@ public void ReaderOptions_WithoutProgress_DoesNotThrow()
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
- var testData = new byte[100];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -252,8 +256,7 @@ public void ZipArchive_WithoutProgress_DoesNotThrow()
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
- var testData = new byte[100];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
@@ -297,6 +300,286 @@ public void ProgressReport_Properties_AreSetCorrectly()
Assert.Equal(50.0, progress.PercentComplete);
}
+ [Fact]
+ public void Tar_Read_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a tar archive first
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new TarWriter(
+ archiveStream,
+ new TarWriterOptions(CompressionType.None, true)
+ )
+ )
+ {
+ var testData = CreateTestData(10000, (byte)'B');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("data.bin", sourceStream, DateTime.Now);
+ }
+
+ // Now read it with progress reporting
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
+ {
+ while (reader.MoveToNextEntry())
+ {
+ if (!reader.Entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ reader.WriteEntryTo(extractedStream);
+ }
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("data.bin", p.EntryPath));
+
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
+ [Fact]
+ public void TarArchive_Entry_WriteTo_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a tar archive first
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new TarWriter(
+ archiveStream,
+ new TarWriterOptions(CompressionType.None, true)
+ )
+ )
+ {
+ var testData = CreateTestData(10000, (byte)'C');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("file.dat", sourceStream, DateTime.Now);
+ }
+
+ // Now open as archive and extract entry with progress as parameter
+ archiveStream.Position = 0;
+
+ using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ entry.WriteTo(extractedStream, progress);
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("file.dat", p.EntryPath));
+
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
+ [Fact]
+ public async Task TarArchive_Entry_WriteToAsync_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a tar archive first
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new TarWriter(
+ archiveStream,
+ new TarWriterOptions(CompressionType.None, true)
+ )
+ )
+ {
+ var testData = CreateTestData(10000, (byte)'D');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("async.dat", sourceStream, DateTime.Now);
+ }
+
+ // Now open as archive and extract entry async with progress as parameter
+ archiveStream.Position = 0;
+
+ using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ await entry.WriteToAsync(extractedStream, progress);
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("async.dat", p.EntryPath));
+
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
+ [Fact]
+ public void Zip_Read_MultipleEntries_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a zip archive with multiple entries
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData1 = CreateTestData(5000, (byte)'A');
+ using var sourceStream1 = new MemoryStream(testData1);
+ writer.Write("file1.txt", sourceStream1, DateTime.Now);
+
+ var testData2 = CreateTestData(8000, (byte)'B');
+ using var sourceStream2 = new MemoryStream(testData2);
+ writer.Write("file2.txt", sourceStream2, DateTime.Now);
+ }
+
+ // Now read it with progress reporting
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
+ {
+ while (reader.MoveToNextEntry())
+ {
+ if (!reader.Entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ reader.WriteEntryTo(extractedStream);
+ }
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+
+ // Should have reports for both files
+ var file1Reports = progress.Reports.Where(p => p.EntryPath == "file1.txt").ToList();
+ var file2Reports = progress.Reports.Where(p => p.EntryPath == "file2.txt").ToList();
+
+ Assert.NotEmpty(file1Reports);
+ Assert.NotEmpty(file2Reports);
+
+ // Verify final bytes for each file
+ Assert.Equal(5000, file1Reports[file1Reports.Count - 1].BytesTransferred);
+ Assert.Equal(8000, file2Reports[file2Reports.Count - 1].BytesTransferred);
+ }
+
+ [Fact]
+ public void ZipArchive_MultipleEntries_WriteTo_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a zip archive with multiple entries
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData1 = CreateTestData(5000, (byte)'A');
+ using var sourceStream1 = new MemoryStream(testData1);
+ writer.Write("entry1.txt", sourceStream1, DateTime.Now);
+
+ var testData2 = CreateTestData(7000, (byte)'B');
+ using var sourceStream2 = new MemoryStream(testData2);
+ writer.Write("entry2.txt", sourceStream2, DateTime.Now);
+ }
+
+ // Now open as archive and extract entries with progress as parameter
+ archiveStream.Position = 0;
+
+ using var archive = ZipArchive.Open(archiveStream);
+ foreach (var entry in archive.Entries)
+ {
+ if (!entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ entry.WriteTo(extractedStream, progress);
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+
+ // Should have reports for both files
+ var entry1Reports = progress.Reports.Where(p => p.EntryPath == "entry1.txt").ToList();
+ var entry2Reports = progress.Reports.Where(p => p.EntryPath == "entry2.txt").ToList();
+
+ Assert.NotEmpty(entry1Reports);
+ Assert.NotEmpty(entry2Reports);
+
+ // Verify final bytes for each entry
+ Assert.Equal(5000, entry1Reports[entry1Reports.Count - 1].BytesTransferred);
+ Assert.Equal(7000, entry2Reports[entry2Reports.Count - 1].BytesTransferred);
+ }
+
+ [Fact]
+ public async Task Zip_ReadAsync_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ // Create a zip archive
+ using var archiveStream = new MemoryStream();
+ using (
+ var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
+ )
+ {
+ var testData = CreateTestData(10000, (byte)'E');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("async_read.txt", sourceStream, DateTime.Now);
+ }
+
+ // Now read it with progress reporting
+ archiveStream.Position = 0;
+ var readerOptions = new ReaderOptions { Progress = progress };
+
+ using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
+ {
+ while (reader.MoveToNextEntry())
+ {
+ if (!reader.Entry.IsDirectory)
+ {
+ using var extractedStream = new MemoryStream();
+ await reader.WriteEntryToAsync(extractedStream);
+ }
+ }
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("async_read.txt", p.EntryPath));
+
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ }
+
+ [Fact]
+ public void GZip_Write_ReportsProgress()
+ {
+ var progress = new TestProgress();
+
+ using var archiveStream = new MemoryStream();
+ var options = new SharpCompress.Writers.GZip.GZipWriterOptions { Progress = progress };
+
+ using (var writer = new SharpCompress.Writers.GZip.GZipWriter(archiveStream, options))
+ {
+ var testData = CreateTestData(10000, (byte)'G');
+ using var sourceStream = new MemoryStream(testData);
+ writer.Write("gzip_test.txt", sourceStream, DateTime.Now);
+ }
+
+ Assert.NotEmpty(progress.Reports);
+ Assert.All(progress.Reports, p => Assert.Equal("gzip_test.txt", p.EntryPath));
+ Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
+
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
+ Assert.Equal(10000, lastReport.BytesTransferred);
+ Assert.Equal(100.0, lastReport.PercentComplete);
+ }
+
[Fact]
public async Task Tar_WriteAsync_ReportsProgress()
{
@@ -307,8 +590,7 @@ public async Task Tar_WriteAsync_ReportsProgress()
using (var writer = new TarWriter(archiveStream, options))
{
- var testData = new byte[10000];
- Array.Fill(testData, (byte)'A');
+ var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
await writer.WriteAsync("test.txt", sourceStream, DateTime.Now);
}
@@ -316,7 +598,7 @@ public async Task Tar_WriteAsync_ReportsProgress()
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
- var lastReport = progress.Reports[^1];
+ var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
}
From 3396f8fe006ed12cd4074091f3cc269f03658523 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Fri, 5 Dec 2025 11:55:29 +0000
Subject: [PATCH 12/26] Refactor to use ProgressReportingStream for progress
tracking
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.../Archives/IArchiveEntryExtensions.cs | 63 +++++-----------
src/SharpCompress/Readers/AbstractReader.cs | 71 +++++--------------
2 files changed, 37 insertions(+), 97 deletions(-)
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index 1db96e3ed..acf340ed8 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -3,6 +3,7 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
+using SharpCompress.IO;
namespace SharpCompress.Archives;
@@ -28,26 +29,8 @@ public static void WriteTo(
}
using var entryStream = archiveEntry.OpenEntryStream();
-
- if (progress is null)
- {
- entryStream.CopyTo(streamToWriteTo);
- }
- else
- {
- var entryPath = archiveEntry.Key ?? string.Empty;
- long? totalBytes = GetEntrySizeSafe(archiveEntry);
- long transferred = 0;
-
- var buffer = new byte[BufferSize];
- int bytesRead;
- while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
- {
- streamToWriteTo.Write(buffer, 0, bytesRead);
- transferred += bytesRead;
- progress.Report(new ProgressReport(entryPath, transferred, totalBytes));
- }
- }
+ var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
+ sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
///
@@ -70,36 +53,26 @@ public static async Task WriteToAsync(
}
using var entryStream = archiveEntry.OpenEntryStream();
+ var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
+ await sourceStream
+ .CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
+ .ConfigureAwait(false);
+ }
+ private static Stream WrapWithProgress(
+ Stream source,
+ IArchiveEntry entry,
+ IProgress? progress
+ )
+ {
if (progress is null)
{
- await entryStream
- .CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
- .ConfigureAwait(false);
+ return source;
}
- else
- {
- var entryPath = archiveEntry.Key ?? string.Empty;
- long? totalBytes = GetEntrySizeSafe(archiveEntry);
- long transferred = 0;
- var buffer = new byte[BufferSize];
- int bytesRead;
- while (
- (
- bytesRead = await entryStream
- .ReadAsync(buffer, 0, buffer.Length, cancellationToken)
- .ConfigureAwait(false)
- ) > 0
- )
- {
- await streamToWriteTo
- .WriteAsync(buffer, 0, bytesRead, cancellationToken)
- .ConfigureAwait(false);
- transferred += bytesRead;
- progress.Report(new ProgressReport(entryPath, transferred, totalBytes));
- }
- }
+ var entryPath = entry.Key ?? string.Empty;
+ long? totalBytes = GetEntrySizeSafe(entry);
+ return new ProgressReportingStream(source, progress, entryPath, totalBytes, leaveOpen: true);
}
private static long? GetEntrySizeSafe(IArchiveEntry entry)
diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs
index b20abcfd3..422230639 100644
--- a/src/SharpCompress/Readers/AbstractReader.cs
+++ b/src/SharpCompress/Readers/AbstractReader.cs
@@ -261,22 +261,36 @@ public async Task WriteEntryToAsync(
internal void Write(Stream writeStream)
{
using Stream s = OpenEntryStream();
- TransferWithProgress(s, writeStream, Entry);
+ var sourceStream = WrapWithProgress(s, Entry);
+ sourceStream.CopyTo(writeStream, 81920);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
- await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
- .ConfigureAwait(false);
+ var sourceStream = WrapWithProgress(s, Entry);
+ await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
- await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
- .ConfigureAwait(false);
+ var sourceStream = WrapWithProgress(s, Entry);
+ await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#endif
}
+ private Stream WrapWithProgress(Stream source, Entry entry)
+ {
+ var progress = Options.Progress;
+ if (progress is null)
+ {
+ return source;
+ }
+
+ var entryPath = entry.Key ?? string.Empty;
+ long? totalBytes = GetEntrySizeSafe(entry);
+ return new ProgressReportingStream(source, progress, entryPath, totalBytes, leaveOpen: true);
+ }
+
private static long? GetEntrySizeSafe(Entry entry)
{
try
@@ -292,53 +306,6 @@ await TransferWithProgressAsync(s, writeStream, Entry, cancellationToken)
}
}
- private void TransferWithProgress(Stream source, Stream destination, Entry entry)
- {
- var progress = Options.Progress;
- var entryPath = entry.Key ?? string.Empty;
- long? totalBytes = GetEntrySizeSafe(entry);
- long transferred = 0;
-
- var buffer = new byte[81920];
- int bytesRead;
- while ((bytesRead = source.Read(buffer, 0, buffer.Length)) > 0)
- {
- destination.Write(buffer, 0, bytesRead);
- transferred += bytesRead;
- progress?.Report(new ProgressReport(entryPath, transferred, totalBytes));
- }
- }
-
- private async Task TransferWithProgressAsync(
- Stream source,
- Stream destination,
- Entry entry,
- CancellationToken cancellationToken
- )
- {
- var progress = Options.Progress;
- var entryPath = entry.Key ?? string.Empty;
- long? totalBytes = GetEntrySizeSafe(entry);
- long transferred = 0;
-
- var buffer = new byte[81920];
- int bytesRead;
- while (
- (
- bytesRead = await source
- .ReadAsync(buffer, 0, buffer.Length, cancellationToken)
- .ConfigureAwait(false)
- ) > 0
- )
- {
- await destination
- .WriteAsync(buffer, 0, bytesRead, cancellationToken)
- .ConfigureAwait(false);
- transferred += bytesRead;
- progress?.Report(new ProgressReport(entryPath, transferred, totalBytes));
- }
- }
-
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
From c2f6055e3353bcade1b822618b9f964f6ad28b94 Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 10:26:45 +0000
Subject: [PATCH 13/26] format
---
src/SharpCompress/Archives/IArchiveEntryExtensions.cs | 8 +++++++-
src/SharpCompress/Readers/AbstractReader.cs | 8 +++++++-
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index acf340ed8..03545de9e 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -72,7 +72,13 @@ private static Stream WrapWithProgress(
var entryPath = entry.Key ?? string.Empty;
long? totalBytes = GetEntrySizeSafe(entry);
- return new ProgressReportingStream(source, progress, entryPath, totalBytes, leaveOpen: true);
+ return new ProgressReportingStream(
+ source,
+ progress,
+ entryPath,
+ totalBytes,
+ leaveOpen: true
+ );
}
private static long? GetEntrySizeSafe(IArchiveEntry entry)
diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs
index 422230639..cd37bb5ff 100644
--- a/src/SharpCompress/Readers/AbstractReader.cs
+++ b/src/SharpCompress/Readers/AbstractReader.cs
@@ -288,7 +288,13 @@ private Stream WrapWithProgress(Stream source, Entry entry)
var entryPath = entry.Key ?? string.Empty;
long? totalBytes = GetEntrySizeSafe(entry);
- return new ProgressReportingStream(source, progress, entryPath, totalBytes, leaveOpen: true);
+ return new ProgressReportingStream(
+ source,
+ progress,
+ entryPath,
+ totalBytes,
+ leaveOpen: true
+ );
}
private static long? GetEntrySizeSafe(Entry entry)
From c29407101598040fad6b04fa6859ecaec013a318 Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 10:55:06 +0000
Subject: [PATCH 14/26] Update
src/SharpCompress/Archives/IArchiveEntryExtensions.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
src/SharpCompress/Archives/IArchiveEntryExtensions.cs | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
index 03545de9e..1ec3bfec1 100644
--- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -38,13 +38,13 @@ public static void WriteTo(
///
/// The archive entry to extract.
/// The stream to write the entry content to.
- /// Optional progress reporter for tracking extraction progress.
/// Cancellation token.
+ /// Optional progress reporter for tracking extraction progress.
public static async Task WriteToAsync(
this IArchiveEntry archiveEntry,
Stream streamToWriteTo,
- IProgress? progress = null,
- CancellationToken cancellationToken = default
+ CancellationToken cancellationToken = default,
+ IProgress? progress = null
)
{
if (archiveEntry.IsDirectory)
From fd453e946d7236541a58ba2491344c375178c5b8 Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 10:55:24 +0000
Subject: [PATCH 15/26] Update src/SharpCompress/IO/ProgressReportingStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
src/SharpCompress/IO/ProgressReportingStream.cs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index ace639f62..86600c0ed 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -45,7 +45,7 @@ public ProgressReportingStream(
public override long Position
{
get => _baseStream.Position;
- set => _baseStream.Position = value;
+ set => throw new NotSupportedException("Directly setting Position is not supported in ProgressReportingStream to maintain progress tracking integrity.");
}
public override void Flush() => _baseStream.Flush();
From 1eaf3e6294fa4b0ba13899bb78af65ef119f36af Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 11:00:29 +0000
Subject: [PATCH 16/26] format with csharpier
---
src/SharpCompress/IO/ProgressReportingStream.cs | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/SharpCompress/IO/ProgressReportingStream.cs b/src/SharpCompress/IO/ProgressReportingStream.cs
index 86600c0ed..5ed028351 100644
--- a/src/SharpCompress/IO/ProgressReportingStream.cs
+++ b/src/SharpCompress/IO/ProgressReportingStream.cs
@@ -45,7 +45,10 @@ public ProgressReportingStream(
public override long Position
{
get => _baseStream.Position;
- set => throw new NotSupportedException("Directly setting Position is not supported in ProgressReportingStream to maintain progress tracking integrity.");
+ set =>
+ throw new NotSupportedException(
+ "Directly setting Position is not supported in ProgressReportingStream to maintain progress tracking integrity."
+ );
}
public override void Flush() => _baseStream.Flush();
From 618b4bbb8362a758ec80c2151e35d5c2f5964186 Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 11:04:08 +0000
Subject: [PATCH 17/26] try to tell agents to format
---
AGENTS.md | 4 +++-
build/Program.cs | 11 ++++++++++-
2 files changed, 13 insertions(+), 2 deletions(-)
diff --git a/AGENTS.md b/AGENTS.md
index 4467fd553..c7b4a5a63 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -28,11 +28,13 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
## Code Formatting
+**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
+
- Use CSharpier for code formatting to ensure consistent style across the project
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
+- **To format code, run the task: `format` task (which runs `dotnet csharpier .` from project root)**
- Restore tools with: `dotnet tool restore`
- Format files from the project root with: `dotnet csharpier .`
-- **Run `dotnet csharpier .` from the project root after making code changes before committing**
- Configure your IDE to format on save using CSharpier for the best experience
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
diff --git a/build/Program.cs b/build/Program.cs
index b3bb2c47e..6141f6fc8 100644
--- a/build/Program.cs
+++ b/build/Program.cs
@@ -11,6 +11,7 @@
const string Build = "build";
const string Test = "test";
const string Format = "format";
+const string CheckFormat = "check-format";
const string Publish = "publish";
Target(
@@ -42,12 +43,20 @@ void RemoveDirectory(string d)
Target(
Format,
() =>
+ {
+ Run("dotnet", "tool restore");
+ Run("dotnet", "csharpier format .");
+ }
+);
+Target(
+ CheckFormat,
+ () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
-Target(Restore, [Format], () => Run("dotnet", "restore"));
+Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
Target(
Build,
From 40e72ad199ffb6b09b31abd9215c24b109ed978c Mon Sep 17 00:00:00 2001
From: Adam Hathcock
Date: Mon, 8 Dec 2025 11:11:51 +0000
Subject: [PATCH 18/26] fix AI edit
---
tests/SharpCompress.Test/ProgressReportTests.cs | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/tests/SharpCompress.Test/ProgressReportTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs
index 37a23c8d6..58ea43040 100644
--- a/tests/SharpCompress.Test/ProgressReportTests.cs
+++ b/tests/SharpCompress.Test/ProgressReportTests.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
@@ -187,7 +188,7 @@ public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
- await entry.WriteToAsync(extractedStream, progress);
+ await entry.WriteToAsync(extractedStream, CancellationToken.None, progress);
}
}
@@ -409,7 +410,7 @@ public async Task TarArchive_Entry_WriteToAsync_ReportsProgress()
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
- await entry.WriteToAsync(extractedStream, progress);
+ await entry.WriteToAsync(extractedStream, CancellationToken.None, progress);
}
}
From aaa97e2ce26ba230b464d17aec52579bb4fca791 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Fri, 19 Dec 2025 11:36:44 +0000
Subject: [PATCH 19/26] Merge master branch - add ZStandard compression support
and TarHeaderWriteFormat
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
---
.config/dotnet-tools.json | 2 +-
.github/workflows/dotnetcore.yml | 2 +-
src/SharpCompress/AssemblyInfo.cs | 3 +-
.../Common/Tar/Headers/TarHeader.cs | 133 +-
.../Tar/Headers/TarHeaderWriteFormat.cs | 7 +
src/SharpCompress/Common/Zip/ZipFilePart.cs | 2 +-
.../Compressors/LZMA/Registry.cs | 2 +-
.../Compressors/ZStandard/BitOperations.cs | 311 +
.../ZStandard/CompressionStream.cs | 301 +
.../Compressors/ZStandard/Compressor.cs | 204 +
.../Compressors/ZStandard/Constants.cs | 8 +
.../ZStandard/DecompressionStream.cs | 293 +
.../Compressors/ZStandard/Decompressor.cs | 176 +
.../Compressors/ZStandard/JobThreadPool.cs | 141 +
.../Compressors/ZStandard/SafeHandles.cs | 163 +
.../ZStandard/SynchronizationWrapper.cs | 22 +
.../Compressors/ZStandard/ThrowHelper.cs | 48 +
.../Compressors/ZStandard/UnmanagedObject.cs | 18 +
.../ZStandard/Unsafe/Allocations.cs | 52 +
.../ZStandard/Unsafe/BIT_CStream_t.cs | 14 +
.../ZStandard/Unsafe/BIT_DStream_status.cs | 16 +
.../ZStandard/Unsafe/BIT_DStream_t.cs | 13 +
.../Compressors/ZStandard/Unsafe/Bits.cs | 60 +
.../Compressors/ZStandard/Unsafe/Bitstream.cs | 739 +
.../ZStandard/Unsafe/BlockSummary.cs | 8 +
.../ZStandard/Unsafe/COVER_best_s.cs | 20 +
.../ZStandard/Unsafe/COVER_ctx_t.cs | 19 +
.../ZStandard/Unsafe/COVER_dictSelection.cs | 11 +
.../ZStandard/Unsafe/COVER_epoch_info_t.cs | 10 +
.../ZStandard/Unsafe/COVER_map_pair_t_s.cs | 7 +
.../ZStandard/Unsafe/COVER_map_s.cs | 9 +
.../ZStandard/Unsafe/COVER_segment_t.cs | 11 +
.../Unsafe/COVER_tryParameters_data_s.cs | 12 +
.../Compressors/ZStandard/Unsafe/Clevels.cs | 849 ++
.../Compressors/ZStandard/Unsafe/Compiler.cs | 61 +
.../Compressors/ZStandard/Unsafe/Cover.cs | 444 +
.../ZStandard/Unsafe/DTableDesc.cs | 12 +
.../ZStandard/Unsafe/EStats_ress_t.cs | 13 +
.../ZStandard/Unsafe/EntropyCommon.cs | 447 +
.../ZStandard/Unsafe/ErrorPrivate.cs | 110 +
.../ZStandard/Unsafe/EstimatedBlockSize.cs | 7 +
.../ZStandard/Unsafe/FASTCOVER_accel_t.cs | 19 +
.../ZStandard/Unsafe/FASTCOVER_ctx_t.cs | 19 +
.../Unsafe/FASTCOVER_tryParameters_data_s.cs | 12 +
.../Compressors/ZStandard/Unsafe/FPStats.cs | 7 +
.../ZStandard/Unsafe/FSE_CState_t.cs | 16 +
.../ZStandard/Unsafe/FSE_DState_t.cs | 12 +
.../ZStandard/Unsafe/FSE_DTableHeader.cs | 8 +
.../ZStandard/Unsafe/FSE_DecompressWksp.cs | 6 +
.../ZStandard/Unsafe/FSE_decode_t.cs | 8 +
.../ZStandard/Unsafe/FSE_repeat.cs | 13 +
.../Unsafe/FSE_symbolCompressionTransform.cs | 10 +
.../Compressors/ZStandard/Unsafe/Fastcover.cs | 761 ++
.../ZStandard/Unsafe/Fingerprint.cs | 7 +
.../Compressors/ZStandard/Unsafe/Fse.cs | 198 +
.../ZStandard/Unsafe/FseCompress.cs | 782 ++
.../ZStandard/Unsafe/FseDecompress.cs | 462 +
.../ZStandard/Unsafe/HIST_checkInput_e.cs | 7 +
.../ZStandard/Unsafe/HUF_CStream_t.cs | 22 +
.../ZStandard/Unsafe/HUF_CTableHeader.cs | 8 +
.../Unsafe/HUF_CompressWeightsWksp.cs | 9 +
.../ZStandard/Unsafe/HUF_DEltX1.cs | 11 +
.../ZStandard/Unsafe/HUF_DEltX2.cs | 12 +
.../Unsafe/HUF_DecompressFastArgs.cs | 49 +
.../Unsafe/HUF_ReadDTableX1_Workspace.cs | 10 +
.../Unsafe/HUF_ReadDTableX2_Workspace.cs | 307 +
.../ZStandard/Unsafe/HUF_WriteCTableWksp.cs | 10 +
.../Unsafe/HUF_buildCTable_wksp_tables.cs | 739 +
.../ZStandard/Unsafe/HUF_compress_tables_t.cs | 280 +
.../ZStandard/Unsafe/HUF_flags_e.cs | 44 +
.../ZStandard/Unsafe/HUF_nbStreams_e.cs | 7 +
.../ZStandard/Unsafe/HUF_repeat.cs | 13 +
.../Compressors/ZStandard/Unsafe/Hist.cs | 273 +
.../ZStandard/Unsafe/HufCompress.cs | 1825 +++
.../ZStandard/Unsafe/HufDecompress.cs | 2505 ++++
.../Compressors/ZStandard/Unsafe/Mem.cs | 162 +
.../Compressors/ZStandard/Unsafe/Pool.cs | 122 +
.../ZStandard/Unsafe/RSyncState_t.cs | 8 +
.../Compressors/ZStandard/Unsafe/Range.cs | 14 +
.../ZStandard/Unsafe/RawSeqStore_t.cs | 29 +
.../ZStandard/Unsafe/RoundBuff_t.cs | 28 +
.../ZStandard/Unsafe/SeqCollector.cs | 9 +
.../Compressors/ZStandard/Unsafe/SeqDef_s.cs | 14 +
.../ZStandard/Unsafe/SeqStore_t.cs | 27 +
.../ZStandard/Unsafe/SerialState.cs | 23 +
.../ZStandard/Unsafe/SymbolEncodingType_e.cs | 9 +
.../Compressors/ZStandard/Unsafe/SyncPoint.cs | 10 +
.../ZStandard/Unsafe/XXH32_canonical_t.cs | 10 +
.../ZStandard/Unsafe/XXH32_state_s.cs | 34 +
.../ZStandard/Unsafe/XXH64_canonical_t.cs | 9 +
.../ZStandard/Unsafe/XXH64_state_s.cs | 34 +
.../ZStandard/Unsafe/XXH_alignment.cs | 14 +
.../ZStandard/Unsafe/XXH_errorcode.cs | 13 +
.../Compressors/ZStandard/Unsafe/Xxhash.cs | 626 +
.../ZStandard/Unsafe/ZDICT_cover_params_t.cs | 30 +
.../Unsafe/ZDICT_fastCover_params_t.cs | 32 +
.../ZStandard/Unsafe/ZDICT_legacy_params_t.cs | 8 +
.../ZStandard/Unsafe/ZDICT_params_t.cs | 20 +
.../ZStandard/Unsafe/ZSTDMT_CCtxPool.cs | 12 +
.../ZStandard/Unsafe/ZSTDMT_CCtx_s.cs | 32 +
.../ZStandard/Unsafe/ZSTDMT_bufferPool_s.cs | 11 +
.../ZStandard/Unsafe/ZSTDMT_jobDescription.cs | 61 +
.../Unsafe/ZSTD_BlockCompressor_f.cs | 12 +
.../ZStandard/Unsafe/ZSTD_BuildCTableWksp.cs | 7 +
.../ZStandard/Unsafe/ZSTD_BuildSeqStore_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_CCtx_params_s.cs | 86 +
.../ZStandard/Unsafe/ZSTD_CCtx_s.cs | 91 +
.../ZStandard/Unsafe/ZSTD_CDict_s.cs | 30 +
.../ZStandard/Unsafe/ZSTD_CParamMode_e.cs | 29 +
.../ZStandard/Unsafe/ZSTD_DCtx_s.cs | 95 +
.../ZStandard/Unsafe/ZSTD_DDictHashSet.cs | 9 +
.../ZStandard/Unsafe/ZSTD_DDict_s.cs | 15 +
.../ZStandard/Unsafe/ZSTD_DefaultPolicy_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_EndDirective.cs | 21 +
.../ZStandard/Unsafe/ZSTD_ErrorCode.cs | 59 +
.../ZStandard/Unsafe/ZSTD_MatchState_t.cs | 66 +
.../ZStandard/Unsafe/ZSTD_OffsetInfo.cs | 7 +
.../ZStandard/Unsafe/ZSTD_OptPrice_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_ResetDirective.cs | 8 +
.../ZStandard/Unsafe/ZSTD_Sequence.cs | 40 +
.../ZStandard/Unsafe/ZSTD_SequenceLength.cs | 7 +
.../ZStandard/Unsafe/ZSTD_SequencePosition.cs | 13 +
.../ZStandard/Unsafe/ZSTD_blockSplitCtx.cs | 12 +
.../ZStandard/Unsafe/ZSTD_blockState_t.cs | 8 +
.../ZStandard/Unsafe/ZSTD_bounds.cs | 8 +
.../ZStandard/Unsafe/ZSTD_bufferMode_e.cs | 11 +
.../Unsafe/ZSTD_buffered_policy_e.cs | 12 +
.../ZStandard/Unsafe/ZSTD_cParameter.cs | 218 +
.../ZStandard/Unsafe/ZSTD_cStreamStage.cs | 8 +
.../Unsafe/ZSTD_compResetPolicy_e.cs | 14 +
.../Unsafe/ZSTD_compressedBlockState_t.cs | 7 +
.../Unsafe/ZSTD_compressionParameters.cs | 44 +
.../Unsafe/ZSTD_compressionStage_e.cs | 12 +
.../ZStandard/Unsafe/ZSTD_customMem.cs | 15 +
.../ZStandard/Unsafe/ZSTD_cwksp.cs | 110 +
.../Unsafe/ZSTD_cwksp_alloc_phase_e.cs | 12 +
.../Unsafe/ZSTD_cwksp_static_alloc_e.cs | 12 +
.../ZStandard/Unsafe/ZSTD_dParameter.cs | 38 +
.../ZStandard/Unsafe/ZSTD_dStage.cs | 13 +
.../ZStandard/Unsafe/ZSTD_dStreamStage.cs | 10 +
.../ZStandard/Unsafe/ZSTD_dictAttachPref_e.cs | 16 +
.../Unsafe/ZSTD_dictContentType_e.cs | 13 +
.../ZStandard/Unsafe/ZSTD_dictLoadMethod_e.cs | 10 +
.../ZStandard/Unsafe/ZSTD_dictMode_e.cs | 9 +
.../Unsafe/ZSTD_dictTableLoadMethod_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_dictUses_e.cs | 13 +
.../Unsafe/ZSTD_entropyCTablesMetadata_t.cs | 7 +
.../ZStandard/Unsafe/ZSTD_entropyCTables_t.cs | 7 +
.../ZStandard/Unsafe/ZSTD_entropyDTables_t.cs | 1342 ++
.../Unsafe/ZSTD_forceIgnoreChecksum_e.cs | 8 +
.../ZStandard/Unsafe/ZSTD_format_e.cs | 12 +
.../ZStandard/Unsafe/ZSTD_frameHeader.cs | 21 +
.../ZStandard/Unsafe/ZSTD_frameParameters.cs | 13 +
.../ZStandard/Unsafe/ZSTD_frameProgression.cs | 22 +
.../ZStandard/Unsafe/ZSTD_frameSizeInfo.cs | 14 +
.../ZStandard/Unsafe/ZSTD_frameType_e.cs | 7 +
.../Unsafe/ZSTD_fseCTablesMetadata_t.cs | 18 +
.../ZStandard/Unsafe/ZSTD_fseCTables_t.cs | 11 +
.../ZStandard/Unsafe/ZSTD_fseState.cs | 7 +
.../ZStandard/Unsafe/ZSTD_getAllMatchesFn.cs | 15 +
.../Unsafe/ZSTD_hufCTablesMetadata_t.cs | 16 +
.../ZStandard/Unsafe/ZSTD_hufCTables_t.cs | 279 +
.../ZStandard/Unsafe/ZSTD_inBuffer_s.cs | 16 +
.../Unsafe/ZSTD_indexResetPolicy_e.cs | 12 +
.../ZStandard/Unsafe/ZSTD_litLocation_e.cs | 13 +
.../Unsafe/ZSTD_literalCompressionMode_e.cs | 16 +
.../ZStandard/Unsafe/ZSTD_localDict.cs | 10 +
.../ZStandard/Unsafe/ZSTD_longLengthType_e.cs | 14 +
.../ZStandard/Unsafe/ZSTD_longOffset_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_match_t.cs | 13 +
.../ZStandard/Unsafe/ZSTD_nextInputType_e.cs | 11 +
.../ZStandard/Unsafe/ZSTD_optLdm_t.cs | 17 +
.../ZStandard/Unsafe/ZSTD_optimal_t.cs | 19 +
.../ZStandard/Unsafe/ZSTD_outBuffer_s.cs | 13 +
.../ZStandard/Unsafe/ZSTD_overlap_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_paramSwitch_e.cs | 13 +
.../ZStandard/Unsafe/ZSTD_parameters.cs | 7 +
.../ZStandard/Unsafe/ZSTD_prefixDict_s.cs | 8 +
.../Unsafe/ZSTD_refMultipleDDicts_e.cs | 8 +
.../ZStandard/Unsafe/ZSTD_resetTarget_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_seqSymbol.cs | 17 +
.../ZStandard/Unsafe/ZSTD_seqSymbol_header.cs | 10 +
.../ZStandard/Unsafe/ZSTD_sequenceFormat_e.cs | 10 +
.../ZStandard/Unsafe/ZSTD_strategy.cs | 15 +
.../Unsafe/ZSTD_symbolEncodingTypeStats_t.cs | 16 +
.../Unsafe/ZSTD_tableFillPurpose_e.cs | 7 +
.../ZStandard/Unsafe/ZSTD_window_t.cs | 25 +
.../Compressors/ZStandard/Unsafe/Zdict.cs | 640 +
.../Compressors/ZStandard/Unsafe/Zstd.cs | 10 +
.../ZStandard/Unsafe/ZstdCommon.cs | 48 +
.../ZStandard/Unsafe/ZstdCompress.cs | 11225 ++++++++++++++++
.../ZStandard/Unsafe/ZstdCompressInternal.cs | 1444 ++
.../ZStandard/Unsafe/ZstdCompressLiterals.cs | 314 +
.../ZStandard/Unsafe/ZstdCompressSequences.cs | 1211 ++
.../Unsafe/ZstdCompressSuperblock.cs | 976 ++
.../Compressors/ZStandard/Unsafe/ZstdCwksp.cs | 581 +
.../Compressors/ZStandard/Unsafe/ZstdDdict.cs | 292 +
.../ZStandard/Unsafe/ZstdDecompress.cs | 3420 +++++
.../ZStandard/Unsafe/ZstdDecompressBlock.cs | 3218 +++++
.../Unsafe/ZstdDecompressInternal.cs | 394 +
.../ZStandard/Unsafe/ZstdDoubleFast.cs | 1118 ++
.../Compressors/ZStandard/Unsafe/ZstdFast.cs | 1209 ++
.../ZStandard/Unsafe/ZstdInternal.cs | 638 +
.../Compressors/ZStandard/Unsafe/ZstdLazy.cs | 4661 +++++++
.../Compressors/ZStandard/Unsafe/ZstdLdm.cs | 940 ++
.../ZStandard/Unsafe/ZstdLdmGeartab.cs | 539 +
.../Compressors/ZStandard/Unsafe/ZstdOpt.cs | 2254 ++++
.../ZStandard/Unsafe/ZstdPresplit.cs | 295 +
.../ZStandard/Unsafe/ZstdmtCompress.cs | 1894 +++
.../ZStandard/Unsafe/_wksps_e__Union.cs | 16 +
.../ZStandard/Unsafe/algo_time_t.cs | 13 +
.../ZStandard/Unsafe/base_directive_e.cs | 7 +
.../ZStandard/Unsafe/blockProperties_t.cs | 8 +
.../ZStandard/Unsafe/blockType_e.cs | 9 +
.../Compressors/ZStandard/Unsafe/buffer_s.cs | 15 +
.../Compressors/ZStandard/Unsafe/dictItem.cs | 8 +
.../Compressors/ZStandard/Unsafe/inBuff_t.cs | 12 +
.../ZStandard/Unsafe/ldmEntry_t.cs | 7 +
.../ZStandard/Unsafe/ldmMatchCandidate_t.cs | 9 +
.../ZStandard/Unsafe/ldmParams_t.cs | 22 +
.../ZStandard/Unsafe/ldmRollingHashState_t.cs | 7 +
.../ZStandard/Unsafe/ldmState_t.cs | 170 +
.../Compressors/ZStandard/Unsafe/nodeElt_s.cs | 12 +
.../ZStandard/Unsafe/offsetCount_t.cs | 7 +
.../ZStandard/Unsafe/optState_t.cs | 53 +
.../Compressors/ZStandard/Unsafe/rankPos.cs | 7 +
.../ZStandard/Unsafe/rankValCol_t.cs | 6 +
.../Compressors/ZStandard/Unsafe/rawSeq.cs | 13 +
.../ZStandard/Unsafe/repcodes_s.cs | 6 +
.../ZStandard/Unsafe/searchMethod_e.cs | 8 +
.../ZStandard/Unsafe/seqState_t.cs | 17 +
.../ZStandard/Unsafe/seqStoreSplits.cs | 11 +
.../Compressors/ZStandard/Unsafe/seq_t.cs | 8 +
.../ZStandard/Unsafe/sortedSymbol_t.cs | 6 +
.../ZStandard/Unsafe/streaming_operation.cs | 8 +
.../Compressors/ZStandard/UnsafeHelper.cs | 107 +
.../Compressors/ZStandard/ZStandardStream.cs | 2 +-
.../ZStandard/ZstandardConstants.cs | 6 -
.../Compressors/ZStandard/ZstdException.cs | 12 +
src/SharpCompress/SharpCompress.csproj | 1 -
src/SharpCompress/Writers/Tar/TarWriter.cs | 4 +-
.../Writers/Tar/TarWriterOptions.cs | 15 +-
src/SharpCompress/Writers/Zip/ZipWriter.cs | 3 +-
src/SharpCompress/packages.lock.json | 23 -
tests/SharpCompress.Test/packages.lock.json | 43 +-
245 files changed, 55930 insertions(+), 82 deletions(-)
create mode 100644 src/SharpCompress/Common/Tar/Headers/TarHeaderWriteFormat.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/BitOperations.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/CompressionStream.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Compressor.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Constants.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Decompressor.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/JobThreadPool.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/SafeHandles.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/SynchronizationWrapper.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/ThrowHelper.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/UnmanagedObject.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Allocations.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/BIT_CStream_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/BIT_DStream_status.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/BIT_DStream_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Bits.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Bitstream.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/BlockSummary.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_best_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_ctx_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_dictSelection.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_epoch_info_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_map_pair_t_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_map_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_segment_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/COVER_tryParameters_data_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Clevels.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Compiler.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Cover.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/DTableDesc.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/EStats_ress_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/EntropyCommon.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ErrorPrivate.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/EstimatedBlockSize.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FASTCOVER_accel_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FASTCOVER_ctx_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FASTCOVER_tryParameters_data_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FPStats.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_CState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_DState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_DTableHeader.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_DecompressWksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_decode_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_repeat.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FSE_symbolCompressionTransform.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Fastcover.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Fingerprint.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Fse.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FseCompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/FseDecompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HIST_checkInput_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_CStream_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_CTableHeader.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_CompressWeightsWksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_DEltX1.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_DEltX2.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_DecompressFastArgs.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_ReadDTableX1_Workspace.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_ReadDTableX2_Workspace.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_WriteCTableWksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_buildCTable_wksp_tables.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_compress_tables_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_flags_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_nbStreams_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HUF_repeat.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Hist.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HufCompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/HufDecompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Mem.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Pool.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/RSyncState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Range.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/RawSeqStore_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/RoundBuff_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SeqCollector.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SeqDef_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SeqStore_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SerialState.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SymbolEncodingType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/SyncPoint.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH32_canonical_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH32_state_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH64_canonical_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH64_state_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH_alignment.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/XXH_errorcode.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Xxhash.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZDICT_cover_params_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZDICT_fastCover_params_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZDICT_legacy_params_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZDICT_params_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTDMT_CCtxPool.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTDMT_CCtx_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTDMT_bufferPool_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTDMT_jobDescription.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_BlockCompressor_f.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_BuildCTableWksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_BuildSeqStore_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_CCtx_params_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_CCtx_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_CDict_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_CParamMode_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_DCtx_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_DDictHashSet.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_DDict_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_DefaultPolicy_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_EndDirective.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_ErrorCode.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_MatchState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_OffsetInfo.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_OptPrice_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_ResetDirective.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_Sequence.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_SequenceLength.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_SequencePosition.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_blockSplitCtx.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_blockState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_bounds.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_bufferMode_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_buffered_policy_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_cParameter.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_cStreamStage.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_compResetPolicy_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_compressedBlockState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_compressionParameters.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_compressionStage_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_customMem.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_cwksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_cwksp_alloc_phase_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_cwksp_static_alloc_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dParameter.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dStage.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dStreamStage.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictAttachPref_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictContentType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictLoadMethod_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictMode_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictTableLoadMethod_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_dictUses_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_entropyCTablesMetadata_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_entropyCTables_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_entropyDTables_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_forceIgnoreChecksum_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_format_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_frameHeader.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_frameParameters.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_frameProgression.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_frameSizeInfo.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_frameType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_fseCTablesMetadata_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_fseCTables_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_fseState.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_getAllMatchesFn.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_hufCTablesMetadata_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_hufCTables_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_inBuffer_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_indexResetPolicy_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_litLocation_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_literalCompressionMode_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_localDict.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_longLengthType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_longOffset_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_match_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_nextInputType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_optLdm_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_optimal_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_outBuffer_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_overlap_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_paramSwitch_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_parameters.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_prefixDict_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_refMultipleDDicts_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_resetTarget_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_seqSymbol.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_seqSymbol_header.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_sequenceFormat_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_strategy.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_symbolEncodingTypeStats_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_tableFillPurpose_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZSTD_window_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Zdict.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/Zstd.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCommon.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressInternal.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressLiterals.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSequences.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSuperblock.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCwksp.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdDdict.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdDecompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdDecompressBlock.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdDecompressInternal.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdDoubleFast.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdFast.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdInternal.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLazy.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLdm.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLdmGeartab.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdOpt.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdPresplit.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdmtCompress.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/_wksps_e__Union.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/algo_time_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/base_directive_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/blockProperties_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/blockType_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/buffer_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/dictItem.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/inBuff_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ldmEntry_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ldmMatchCandidate_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ldmParams_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ldmRollingHashState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/ldmState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/nodeElt_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/offsetCount_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/optState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/rankPos.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/rankValCol_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/rawSeq.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/repcodes_s.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/searchMethod_e.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/seqState_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/seqStoreSplits.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/seq_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/sortedSymbol_t.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/Unsafe/streaming_operation.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/UnsafeHelper.cs
create mode 100644 src/SharpCompress/Compressors/ZStandard/ZstdException.cs
diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 03df23230..05325293b 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
- "version": "1.2.1",
+ "version": "1.2.3",
"commands": [
"csharpier"
],
diff --git a/.github/workflows/dotnetcore.yml b/.github/workflows/dotnetcore.yml
index 3f6bef75e..2ce8f4dd1 100644
--- a/.github/workflows/dotnetcore.yml
+++ b/.github/workflows/dotnetcore.yml
@@ -19,7 +19,7 @@ jobs:
with:
dotnet-version: 10.0.x
- run: dotnet run --project build/build.csproj
- - uses: actions/upload-artifact@v5
+ - uses: actions/upload-artifact@v6
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
diff --git a/src/SharpCompress/AssemblyInfo.cs b/src/SharpCompress/AssemblyInfo.cs
index e51d97fe0..c11eb8e02 100644
--- a/src/SharpCompress/AssemblyInfo.cs
+++ b/src/SharpCompress/AssemblyInfo.cs
@@ -1,7 +1,8 @@
using System;
using System.Runtime.CompilerServices;
-[assembly: CLSCompliant(true)]
+// CLSCompliant(false) is required because ZStandard integration uses unsafe code
+[assembly: CLSCompliant(false)]
[assembly: InternalsVisibleTo(
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
)]
diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
index 06a1f10a4..e6b7c265c 100644
--- a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
+++ b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
+using System.Collections.Generic;
using System.IO;
using System.Text;
@@ -9,8 +10,16 @@ internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
- public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
+ public TarHeader(
+ ArchiveEncoding archiveEncoding,
+ TarHeaderWriteFormat writeFormat = TarHeaderWriteFormat.GNU_TAR_LONG_LINK
+ )
+ {
+ ArchiveEncoding = archiveEncoding;
+ WriteFormat = writeFormat;
+ }
+ internal TarHeaderWriteFormat WriteFormat { get; set; }
internal string? Name { get; set; }
internal string? LinkName { get; set; }
@@ -30,6 +39,114 @@ internal sealed class TarHeader
private const int MAX_LONG_NAME_SIZE = 32768;
internal void Write(Stream output)
+ {
+ switch (WriteFormat)
+ {
+ case TarHeaderWriteFormat.GNU_TAR_LONG_LINK:
+ WriteGnuTarLongLink(output);
+ break;
+ case TarHeaderWriteFormat.USTAR:
+ WriteUstar(output);
+ break;
+ default:
+ throw new Exception("This should be impossible...");
+ }
+ }
+
+ internal void WriteUstar(Stream output)
+ {
+ var buffer = new byte[BLOCK_SIZE];
+
+ WriteOctalBytes(511, buffer, 100, 8); // file mode
+ WriteOctalBytes(0, buffer, 108, 8); // owner ID
+ WriteOctalBytes(0, buffer, 116, 8); // group ID
+
+ //ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
+ var nameByteCount = ArchiveEncoding
+ .GetEncoding()
+ .GetByteCount(Name.NotNull("Name is null"));
+
+ if (nameByteCount > 100)
+ {
+ // if name is longer, try to split it into name and namePrefix
+
+ string fullName = Name.NotNull("Name is null");
+
+ // find all directory separators
+ List dirSeps = new List();
+ for (int i = 0; i < fullName.Length; i++)
+ {
+ if (fullName[i] == Path.DirectorySeparatorChar)
+ {
+ dirSeps.Add(i);
+ }
+ }
+
+ // find the right place to split the name
+ int splitIndex = -1;
+ for (int i = 0; i < dirSeps.Count; i++)
+ {
+ int count = ArchiveEncoding
+ .GetEncoding()
+ .GetByteCount(fullName.Substring(0, dirSeps[i]));
+ if (count < 155)
+ {
+ splitIndex = dirSeps[i];
+ }
+ else
+ {
+ break;
+ }
+ }
+
+ if (splitIndex == -1)
+ {
+ throw new Exception(
+ $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
+ );
+ }
+
+ string namePrefix = fullName.Substring(0, splitIndex);
+ string name = fullName.Substring(splitIndex + 1);
+
+ if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
+ throw new Exception(
+ $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
+ );
+
+ if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
+ throw new Exception(
+ $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
+ );
+
+ // write name prefix
+ WriteStringBytes(ArchiveEncoding.Encode(namePrefix), buffer, 345, 100);
+ // write partial name
+ WriteStringBytes(ArchiveEncoding.Encode(name), buffer, 100);
+ }
+ else
+ {
+ WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
+ }
+
+ WriteOctalBytes(Size, buffer, 124, 12);
+ var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
+ WriteOctalBytes(time, buffer, 136, 12);
+ buffer[156] = (byte)EntryType;
+
+ // write ustar magic field
+ WriteStringBytes(Encoding.ASCII.GetBytes("ustar"), buffer, 257, 6);
+ // write ustar version "00"
+ buffer[263] = 0x30;
+ buffer[264] = 0x30;
+
+ var crc = RecalculateChecksum(buffer);
+ WriteOctalBytes(crc, buffer, 148, 8);
+
+ output.Write(buffer, 0, buffer.Length);
+ }
+
+ internal void WriteGnuTarLongLink(Stream output)
{
var buffer = new byte[BLOCK_SIZE];
@@ -85,7 +202,7 @@ internal void Write(Stream output)
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
- Write(output);
+ WriteGnuTarLongLink(output);
}
}
@@ -241,6 +358,18 @@ private static void WriteStringBytes(ReadOnlySpan name, Span buffer,
buffer.Slice(i, length - i).Clear();
}
+ private static void WriteStringBytes(
+ ReadOnlySpan name,
+ Span buffer,
+ int offset,
+ int length
+ )
+ {
+ name.CopyTo(buffer.Slice(offset));
+ var i = Math.Min(length, name.Length);
+ buffer.Slice(offset + i, length - i).Clear();
+ }
+
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;
diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeaderWriteFormat.cs b/src/SharpCompress/Common/Tar/Headers/TarHeaderWriteFormat.cs
new file mode 100644
index 000000000..3a3a434ab
--- /dev/null
+++ b/src/SharpCompress/Common/Tar/Headers/TarHeaderWriteFormat.cs
@@ -0,0 +1,7 @@
+namespace SharpCompress.Common.Tar.Headers;
+
+public enum TarHeaderWriteFormat
+{
+ GNU_TAR_LONG_LINK,
+ USTAR,
+}
diff --git a/src/SharpCompress/Common/Zip/ZipFilePart.cs b/src/SharpCompress/Common/Zip/ZipFilePart.cs
index 77dc4abba..16eb8e1a9 100644
--- a/src/SharpCompress/Common/Zip/ZipFilePart.cs
+++ b/src/SharpCompress/Common/Zip/ZipFilePart.cs
@@ -13,8 +13,8 @@
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
+using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
-using ZstdSharp;
namespace SharpCompress.Common.Zip;
diff --git a/src/SharpCompress/Compressors/LZMA/Registry.cs b/src/SharpCompress/Compressors/LZMA/Registry.cs
index eb3e3bdd6..d71abded2 100644
--- a/src/SharpCompress/Compressors/LZMA/Registry.cs
+++ b/src/SharpCompress/Compressors/LZMA/Registry.cs
@@ -7,7 +7,7 @@
using SharpCompress.Compressors.Filters;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.Compressors.PPMd;
-using ZstdSharp;
+using SharpCompress.Compressors.ZStandard;
namespace SharpCompress.Compressors.LZMA;
diff --git a/src/SharpCompress/Compressors/ZStandard/BitOperations.cs b/src/SharpCompress/Compressors/ZStandard/BitOperations.cs
new file mode 100644
index 000000000..fc8e3108d
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/BitOperations.cs
@@ -0,0 +1,311 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+#if !NETCOREAPP3_0_OR_GREATER
+
+using System.Runtime.CompilerServices;
+using static SharpCompress.Compressors.ZStandard.UnsafeHelper;
+
+// Some routines inspired by the Stanford Bit Twiddling Hacks by Sean Eron Anderson:
+// http://graphics.stanford.edu/~seander/bithacks.html
+
+namespace System.Numerics
+{
+ ///
+ /// Utility methods for intrinsic bit-twiddling operations.
+ /// The methods use hardware intrinsics when available on the underlying platform,
+ /// otherwise they use optimized software fallbacks.
+ ///
+ public static unsafe class BitOperations
+ {
+ // hack: should be public because of inline
+ public static readonly byte* TrailingZeroCountDeBruijn = GetArrayPointer(
+ new byte[]
+ {
+ 00,
+ 01,
+ 28,
+ 02,
+ 29,
+ 14,
+ 24,
+ 03,
+ 30,
+ 22,
+ 20,
+ 15,
+ 25,
+ 17,
+ 04,
+ 08,
+ 31,
+ 27,
+ 13,
+ 23,
+ 21,
+ 19,
+ 16,
+ 07,
+ 26,
+ 12,
+ 18,
+ 06,
+ 11,
+ 05,
+ 10,
+ 09,
+ }
+ );
+
+ // hack: should be public because of inline
+ public static readonly byte* Log2DeBruijn = GetArrayPointer(
+ new byte[]
+ {
+ 00,
+ 09,
+ 01,
+ 10,
+ 13,
+ 21,
+ 02,
+ 29,
+ 11,
+ 14,
+ 16,
+ 18,
+ 22,
+ 25,
+ 03,
+ 30,
+ 08,
+ 12,
+ 20,
+ 28,
+ 15,
+ 17,
+ 24,
+ 07,
+ 19,
+ 27,
+ 23,
+ 06,
+ 26,
+ 05,
+ 04,
+ 31,
+ }
+ );
+
+ ///
+ /// Returns the integer (floor) log of the specified value, base 2.
+ /// Note that by convention, input value 0 returns 0 since log(0) is undefined.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int Log2(uint value)
+ {
+ // The 0->0 contract is fulfilled by setting the LSB to 1.
+ // Log(1) is 0, and setting the LSB for values > 1 does not change the log2 result.
+ value |= 1;
+
+ // value lzcnt actual expected
+ // ..0001 31 31-31 0
+ // ..0010 30 31-30 1
+ // 0010.. 2 31-2 29
+ // 0100.. 1 31-1 30
+ // 1000.. 0 31-0 31
+
+ // Fallback contract is 0->0
+ // No AggressiveInlining due to large method size
+ // Has conventional contract 0->0 (Log(0) is undefined)
+
+ // Fill trailing zeros with ones, eg 00010010 becomes 00011111
+ value |= value >> 01;
+ value |= value >> 02;
+ value |= value >> 04;
+ value |= value >> 08;
+ value |= value >> 16;
+
+ // uint.MaxValue >> 27 is always in range [0 - 31] so we use Unsafe.AddByteOffset to avoid bounds check
+ return Log2DeBruijn[
+ // Using deBruijn sequence, k=2, n=5 (2^5=32) : 0b_0000_0111_1100_0100_1010_1100_1101_1101u
+ (int)((value * 0x07C4ACDDu) >> 27)
+ ];
+ }
+
+ ///
+ /// Returns the integer (floor) log of the specified value, base 2.
+ /// Note that by convention, input value 0 returns 0 since log(0) is undefined.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int Log2(ulong value)
+ {
+ value |= 1;
+
+ uint hi = (uint)(value >> 32);
+
+ if (hi == 0)
+ {
+ return Log2((uint)value);
+ }
+
+ return 32 + Log2(hi);
+ }
+
+ ///
+ /// Count the number of trailing zero bits in an integer value.
+ /// Similar in behavior to the x86 instruction TZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int TrailingZeroCount(int value) => TrailingZeroCount((uint)value);
+
+ ///
+ /// Count the number of trailing zero bits in an integer value.
+ /// Similar in behavior to the x86 instruction TZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int TrailingZeroCount(uint value)
+ {
+ // Unguarded fallback contract is 0->0, BSF contract is 0->undefined
+ if (value == 0)
+ {
+ return 32;
+ }
+
+ // uint.MaxValue >> 27 is always in range [0 - 31] so we use Unsafe.AddByteOffset to avoid bounds check
+ return TrailingZeroCountDeBruijn[
+ // Using deBruijn sequence, k=2, n=5 (2^5=32) : 0b_0000_0111_0111_1100_1011_0101_0011_0001u
+ (int)(((value & (uint)-(int)value) * 0x077CB531u) >> 27)
+ ]; // Multi-cast mitigates redundant conv.u8
+ }
+
+ ///
+ /// Count the number of trailing zero bits in a mask.
+ /// Similar in behavior to the x86 instruction TZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int TrailingZeroCount(long value) => TrailingZeroCount((ulong)value);
+
+ ///
+ /// Count the number of trailing zero bits in a mask.
+ /// Similar in behavior to the x86 instruction TZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int TrailingZeroCount(ulong value)
+ {
+ uint lo = (uint)value;
+
+ if (lo == 0)
+ {
+ return 32 + TrailingZeroCount((uint)(value >> 32));
+ }
+
+ return TrailingZeroCount(lo);
+ }
+
+ ///
+ /// Rotates the specified value left by the specified number of bits.
+ /// Similar in behavior to the x86 instruction ROL.
+ ///
+ /// The value to rotate.
+ /// The number of bits to rotate by.
+ /// Any value outside the range [0..31] is treated as congruent mod 32.
+ /// The rotated value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static uint RotateLeft(uint value, int offset) =>
+ (value << offset) | (value >> (32 - offset));
+
+ ///
+ /// Rotates the specified value left by the specified number of bits.
+ /// Similar in behavior to the x86 instruction ROL.
+ ///
+ /// The value to rotate.
+ /// The number of bits to rotate by.
+ /// Any value outside the range [0..63] is treated as congruent mod 64.
+ /// The rotated value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static ulong RotateLeft(ulong value, int offset) =>
+ (value << offset) | (value >> (64 - offset));
+
+ ///
+ /// Rotates the specified value right by the specified number of bits.
+ /// Similar in behavior to the x86 instruction ROR.
+ ///
+ /// The value to rotate.
+ /// The number of bits to rotate by.
+ /// Any value outside the range [0..31] is treated as congruent mod 32.
+ /// The rotated value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static uint RotateRight(uint value, int offset) =>
+ (value >> offset) | (value << (32 - offset));
+
+ ///
+ /// Rotates the specified value right by the specified number of bits.
+ /// Similar in behavior to the x86 instruction ROR.
+ ///
+ /// The value to rotate.
+ /// The number of bits to rotate by.
+ /// Any value outside the range [0..63] is treated as congruent mod 64.
+ /// The rotated value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static ulong RotateRight(ulong value, int offset) =>
+ (value >> offset) | (value << (64 - offset));
+
+ ///
+ /// Count the number of leading zero bits in a mask.
+ /// Similar in behavior to the x86 instruction LZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int LeadingZeroCount(uint value)
+ {
+ // Unguarded fallback contract is 0->31, BSR contract is 0->undefined
+ if (value == 0)
+ {
+ return 32;
+ }
+
+ // No AggressiveInlining due to large method size
+ // Has conventional contract 0->0 (Log(0) is undefined)
+
+ // Fill trailing zeros with ones, eg 00010010 becomes 00011111
+ value |= value >> 01;
+ value |= value >> 02;
+ value |= value >> 04;
+ value |= value >> 08;
+ value |= value >> 16;
+
+ // uint.MaxValue >> 27 is always in range [0 - 31] so we use Unsafe.AddByteOffset to avoid bounds check
+ return 31
+ ^ Log2DeBruijn[
+ // uint|long -> IntPtr cast on 32-bit platforms does expensive overflow checks not needed here
+ (int)((value * 0x07C4ACDDu) >> 27)
+ ];
+ }
+
+ ///
+ /// Count the number of leading zero bits in a mask.
+ /// Similar in behavior to the x86 instruction LZCNT.
+ ///
+ /// The value.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int LeadingZeroCount(ulong value)
+ {
+ uint hi = (uint)(value >> 32);
+
+ if (hi == 0)
+ {
+ return 32 + LeadingZeroCount((uint)value);
+ }
+
+ return LeadingZeroCount(hi);
+ }
+ }
+}
+
+#endif
diff --git a/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs
new file mode 100644
index 000000000..92de03b34
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs
@@ -0,0 +1,301 @@
+using System;
+using System.Buffers;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using SharpCompress.Compressors.ZStandard.Unsafe;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+public class CompressionStream : Stream
+{
+ private readonly Stream innerStream;
+ private readonly byte[] outputBuffer;
+ private readonly bool preserveCompressor;
+ private readonly bool leaveOpen;
+ private Compressor? compressor;
+ private ZSTD_outBuffer_s output;
+
+ public CompressionStream(
+ Stream stream,
+ int level = Compressor.DefaultCompressionLevel,
+ int bufferSize = 0,
+ bool leaveOpen = true
+ )
+ : this(stream, new Compressor(level), bufferSize, false, leaveOpen) { }
+
+ public CompressionStream(
+ Stream stream,
+ Compressor compressor,
+ int bufferSize = 0,
+ bool preserveCompressor = true,
+ bool leaveOpen = true
+ )
+ {
+ if (stream == null)
+ throw new ArgumentNullException(nameof(stream));
+
+ if (!stream.CanWrite)
+ throw new ArgumentException("Stream is not writable", nameof(stream));
+
+ if (bufferSize < 0)
+ throw new ArgumentOutOfRangeException(nameof(bufferSize));
+
+ innerStream = stream;
+ this.compressor = compressor;
+ this.preserveCompressor = preserveCompressor;
+ this.leaveOpen = leaveOpen;
+
+ var outputBufferSize =
+ bufferSize > 0
+ ? bufferSize
+ : (int)Unsafe.Methods.ZSTD_CStreamOutSize().EnsureZstdSuccess();
+ outputBuffer = ArrayPool.Shared.Rent(outputBufferSize);
+ output = new ZSTD_outBuffer_s { pos = 0, size = (nuint)outputBufferSize };
+ }
+
+ public void SetParameter(ZSTD_cParameter parameter, int value)
+ {
+ EnsureNotDisposed();
+ compressor.NotNull().SetParameter(parameter, value);
+ }
+
+ public int GetParameter(ZSTD_cParameter parameter)
+ {
+ EnsureNotDisposed();
+ return compressor.NotNull().GetParameter(parameter);
+ }
+
+ public void LoadDictionary(byte[] dict)
+ {
+ EnsureNotDisposed();
+ compressor.NotNull().LoadDictionary(dict);
+ }
+
+ ~CompressionStream() => Dispose(false);
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+ public override async ValueTask DisposeAsync()
+#else
+ public async Task DisposeAsync()
+#endif
+ {
+ if (compressor == null)
+ return;
+
+ try
+ {
+ await FlushInternalAsync(ZSTD_EndDirective.ZSTD_e_end).ConfigureAwait(false);
+ }
+ finally
+ {
+ ReleaseUnmanagedResources();
+ GC.SuppressFinalize(this);
+ }
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ if (compressor == null)
+ return;
+
+ try
+ {
+ if (disposing)
+ FlushInternal(ZSTD_EndDirective.ZSTD_e_end);
+ }
+ finally
+ {
+ ReleaseUnmanagedResources();
+ }
+ }
+
+ private void ReleaseUnmanagedResources()
+ {
+ if (!preserveCompressor)
+ {
+ compressor.NotNull().Dispose();
+ }
+ compressor = null;
+
+ if (outputBuffer != null)
+ {
+ ArrayPool.Shared.Return(outputBuffer);
+ }
+
+ if (!leaveOpen)
+ {
+ innerStream.Dispose();
+ }
+ }
+
+ public override void Flush() => FlushInternal(ZSTD_EndDirective.ZSTD_e_flush);
+
+ public override async Task FlushAsync(CancellationToken cancellationToken) =>
+ await FlushInternalAsync(ZSTD_EndDirective.ZSTD_e_flush, cancellationToken)
+ .ConfigureAwait(false);
+
+ private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive);
+
+ private async Task FlushInternalAsync(
+ ZSTD_EndDirective directive,
+ CancellationToken cancellationToken = default
+ ) => await WriteInternalAsync(null, directive, cancellationToken).ConfigureAwait(false);
+
+ public override void Write(byte[] buffer, int offset, int count) =>
+ Write(new ReadOnlySpan(buffer, offset, count));
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+ public override void Write(ReadOnlySpan buffer) =>
+ WriteInternal(buffer, ZSTD_EndDirective.ZSTD_e_continue);
+#else
+ public void Write(ReadOnlySpan buffer) =>
+ WriteInternal(buffer, ZSTD_EndDirective.ZSTD_e_continue);
+#endif
+
+ private void WriteInternal(ReadOnlySpan buffer, ZSTD_EndDirective directive)
+ {
+ EnsureNotDisposed();
+
+ var input = new ZSTD_inBuffer_s
+ {
+ pos = 0,
+ size = buffer != null ? (nuint)buffer.Length : 0,
+ };
+ nuint remaining;
+ do
+ {
+ output.pos = 0;
+ remaining = CompressStream(ref input, buffer, directive);
+
+ var written = (int)output.pos;
+ if (written > 0)
+ innerStream.Write(outputBuffer, 0, written);
+ } while (
+ directive == ZSTD_EndDirective.ZSTD_e_continue ? input.pos < input.size : remaining > 0
+ );
+ }
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+ private async ValueTask WriteInternalAsync(
+ ReadOnlyMemory? buffer,
+ ZSTD_EndDirective directive,
+ CancellationToken cancellationToken = default
+ )
+#else
+ private async Task WriteInternalAsync(
+ ReadOnlyMemory? buffer,
+ ZSTD_EndDirective directive,
+ CancellationToken cancellationToken = default
+ )
+#endif
+
+ {
+ EnsureNotDisposed();
+
+ var input = new ZSTD_inBuffer_s
+ {
+ pos = 0,
+ size = buffer.HasValue ? (nuint)buffer.Value.Length : 0,
+ };
+ nuint remaining;
+ do
+ {
+ output.pos = 0;
+ remaining = CompressStream(
+ ref input,
+ buffer.HasValue ? buffer.Value.Span : null,
+ directive
+ );
+
+ var written = (int)output.pos;
+ if (written > 0)
+ await innerStream
+ .WriteAsync(outputBuffer, 0, written, cancellationToken)
+ .ConfigureAwait(false);
+ } while (
+ directive == ZSTD_EndDirective.ZSTD_e_continue ? input.pos < input.size : remaining > 0
+ );
+ }
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+
+ public override Task WriteAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken
+ ) => WriteAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken).AsTask();
+
+ public override async ValueTask WriteAsync(
+ ReadOnlyMemory buffer,
+ CancellationToken cancellationToken = default
+ ) =>
+ await WriteInternalAsync(buffer, ZSTD_EndDirective.ZSTD_e_continue, cancellationToken)
+ .ConfigureAwait(false);
+#else
+
+ public override Task WriteAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken
+ ) => WriteAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken);
+
+ public async Task WriteAsync(
+ ReadOnlyMemory buffer,
+ CancellationToken cancellationToken = default
+ ) =>
+ await WriteInternalAsync(buffer, ZSTD_EndDirective.ZSTD_e_continue, cancellationToken)
+ .ConfigureAwait(false);
+#endif
+
+ internal unsafe nuint CompressStream(
+ ref ZSTD_inBuffer_s input,
+ ReadOnlySpan inputBuffer,
+ ZSTD_EndDirective directive
+ )
+ {
+ fixed (byte* inputBufferPtr = inputBuffer)
+ fixed (byte* outputBufferPtr = outputBuffer)
+ {
+ input.src = inputBufferPtr;
+ output.dst = outputBufferPtr;
+ return compressor
+ .NotNull()
+ .CompressStream(ref input, ref output, directive)
+ .EnsureZstdSuccess();
+ }
+ }
+
+ public override bool CanRead => false;
+ public override bool CanSeek => false;
+ public override bool CanWrite => true;
+
+ public override long Length => throw new NotSupportedException();
+
+ public override long Position
+ {
+ get => throw new NotSupportedException();
+ set => throw new NotSupportedException();
+ }
+
+ public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
+
+ public override void SetLength(long value) => throw new NotSupportedException();
+
+ public override int Read(byte[] buffer, int offset, int count) =>
+ throw new NotSupportedException();
+
+ private void EnsureNotDisposed()
+ {
+ if (compressor == null)
+ throw new ObjectDisposedException(nameof(CompressionStream));
+ }
+
+ public void SetPledgedSrcSize(ulong pledgedSrcSize)
+ {
+ EnsureNotDisposed();
+ compressor.NotNull().SetPledgedSrcSize(pledgedSrcSize);
+ }
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/Compressor.cs b/src/SharpCompress/Compressors/ZStandard/Compressor.cs
new file mode 100644
index 000000000..668606016
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/Compressor.cs
@@ -0,0 +1,204 @@
+using System;
+using SharpCompress.Compressors.ZStandard.Unsafe;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+public unsafe class Compressor : IDisposable
+{
+ ///
+ /// Minimum negative compression level allowed
+ ///
+ public static int MinCompressionLevel => Unsafe.Methods.ZSTD_minCLevel();
+
+ ///
+ /// Maximum compression level available
+ ///
+ public static int MaxCompressionLevel => Unsafe.Methods.ZSTD_maxCLevel();
+
+ ///
+ /// Default compression level
+ ///
+ ///
+ public const int DefaultCompressionLevel = 3;
+
+ private int level = DefaultCompressionLevel;
+
+ private readonly SafeCctxHandle handle;
+
+ public int Level
+ {
+ get => level;
+ set
+ {
+ if (level != value)
+ {
+ level = value;
+ SetParameter(ZSTD_cParameter.ZSTD_c_compressionLevel, value);
+ }
+ }
+ }
+
+ public void SetParameter(ZSTD_cParameter parameter, int value)
+ {
+ using var cctx = handle.Acquire();
+ Unsafe.Methods.ZSTD_CCtx_setParameter(cctx, parameter, value).EnsureZstdSuccess();
+ }
+
+ public int GetParameter(ZSTD_cParameter parameter)
+ {
+ using var cctx = handle.Acquire();
+ int value;
+ Unsafe.Methods.ZSTD_CCtx_getParameter(cctx, parameter, &value).EnsureZstdSuccess();
+ return value;
+ }
+
+ public void LoadDictionary(byte[] dict)
+ {
+ var dictReadOnlySpan = new ReadOnlySpan(dict);
+ LoadDictionary(dictReadOnlySpan);
+ }
+
+ public void LoadDictionary(ReadOnlySpan dict)
+ {
+ using var cctx = handle.Acquire();
+ fixed (byte* dictPtr = dict)
+ Unsafe
+ .Methods.ZSTD_CCtx_loadDictionary(cctx, dictPtr, (nuint)dict.Length)
+ .EnsureZstdSuccess();
+ }
+
+ public Compressor(int level = DefaultCompressionLevel)
+ {
+ handle = SafeCctxHandle.Create();
+ Level = level;
+ }
+
+ public static int GetCompressBound(int length) =>
+ (int)Unsafe.Methods.ZSTD_compressBound((nuint)length);
+
+ public static ulong GetCompressBoundLong(ulong length) =>
+ Unsafe.Methods.ZSTD_compressBound((nuint)length);
+
+ public Span Wrap(ReadOnlySpan src)
+ {
+ var dest = new byte[GetCompressBound(src.Length)];
+ var length = Wrap(src, dest);
+ return new Span(dest, 0, length);
+ }
+
+ public int Wrap(byte[] src, byte[] dest, int offset) =>
+ Wrap(src, new Span(dest, offset, dest.Length - offset));
+
+ public int Wrap(ReadOnlySpan src, Span dest)
+ {
+ fixed (byte* srcPtr = src)
+ fixed (byte* destPtr = dest)
+ {
+ using var cctx = handle.Acquire();
+ return (int)
+ Unsafe
+ .Methods.ZSTD_compress2(
+ cctx,
+ destPtr,
+ (nuint)dest.Length,
+ srcPtr,
+ (nuint)src.Length
+ )
+ .EnsureZstdSuccess();
+ }
+ }
+
+ public int Wrap(ArraySegment src, ArraySegment dest) =>
+ Wrap((ReadOnlySpan)src, dest);
+
+ public int Wrap(
+ byte[] src,
+ int srcOffset,
+ int srcLength,
+ byte[] dst,
+ int dstOffset,
+ int dstLength
+ ) =>
+ Wrap(
+ new ReadOnlySpan(src, srcOffset, srcLength),
+ new Span(dst, dstOffset, dstLength)
+ );
+
+ public bool TryWrap(byte[] src, byte[] dest, int offset, out int written) =>
+ TryWrap(src, new Span(dest, offset, dest.Length - offset), out written);
+
+ public bool TryWrap(ReadOnlySpan src, Span dest, out int written)
+ {
+ fixed (byte* srcPtr = src)
+ fixed (byte* destPtr = dest)
+ {
+ nuint returnValue;
+ using (var cctx = handle.Acquire())
+ {
+ returnValue = Unsafe.Methods.ZSTD_compress2(
+ cctx,
+ destPtr,
+ (nuint)dest.Length,
+ srcPtr,
+ (nuint)src.Length
+ );
+ }
+
+ if (returnValue == unchecked(0 - (nuint)ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall))
+ {
+ written = default;
+ return false;
+ }
+
+ returnValue.EnsureZstdSuccess();
+ written = (int)returnValue;
+ return true;
+ }
+ }
+
+ public bool TryWrap(ArraySegment src, ArraySegment dest, out int written) =>
+ TryWrap((ReadOnlySpan)src, dest, out written);
+
+ public bool TryWrap(
+ byte[] src,
+ int srcOffset,
+ int srcLength,
+ byte[] dst,
+ int dstOffset,
+ int dstLength,
+ out int written
+ ) =>
+ TryWrap(
+ new ReadOnlySpan(src, srcOffset, srcLength),
+ new Span(dst, dstOffset, dstLength),
+ out written
+ );
+
+ public void Dispose()
+ {
+ handle.Dispose();
+ GC.SuppressFinalize(this);
+ }
+
+ internal nuint CompressStream(
+ ref ZSTD_inBuffer_s input,
+ ref ZSTD_outBuffer_s output,
+ ZSTD_EndDirective directive
+ )
+ {
+ fixed (ZSTD_inBuffer_s* inputPtr = &input)
+ fixed (ZSTD_outBuffer_s* outputPtr = &output)
+ {
+ using var cctx = handle.Acquire();
+ return Unsafe
+ .Methods.ZSTD_compressStream2(cctx, outputPtr, inputPtr, directive)
+ .EnsureZstdSuccess();
+ }
+ }
+
+ public void SetPledgedSrcSize(ulong pledgedSrcSize)
+ {
+ using var cctx = handle.Acquire();
+ Unsafe.Methods.ZSTD_CCtx_setPledgedSrcSize(cctx, pledgedSrcSize).EnsureZstdSuccess();
+ }
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/Constants.cs b/src/SharpCompress/Compressors/ZStandard/Constants.cs
new file mode 100644
index 000000000..cce84fc09
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/Constants.cs
@@ -0,0 +1,8 @@
+namespace SharpCompress.Compressors.ZStandard;
+
+internal class Constants
+{
+ //NOTE: https://docs.microsoft.com/en-us/dotnet/framework/configure-apps/file-schema/runtime/gcallowverylargeobjects-element#remarks
+ //NOTE: https://github.com/dotnet/runtime/blob/v5.0.0-rtm.20519.4/src/libraries/System.Private.CoreLib/src/System/Array.cs#L27
+ public const ulong MaxByteArrayLength = 0x7FFFFFC7;
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs
new file mode 100644
index 000000000..9864a8055
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs
@@ -0,0 +1,293 @@
+using System;
+using System.Buffers;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using SharpCompress.Compressors.ZStandard.Unsafe;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+public class DecompressionStream : Stream
+{
+ private readonly Stream innerStream;
+ private readonly byte[] inputBuffer;
+ private readonly int inputBufferSize;
+ private readonly bool preserveDecompressor;
+ private readonly bool leaveOpen;
+ private readonly bool checkEndOfStream;
+ private Decompressor? decompressor;
+ private ZSTD_inBuffer_s input;
+ private nuint lastDecompressResult = 0;
+ private bool contextDrained = true;
+
+ public DecompressionStream(
+ Stream stream,
+ int bufferSize = 0,
+ bool checkEndOfStream = true,
+ bool leaveOpen = true
+ )
+ : this(stream, new Decompressor(), bufferSize, checkEndOfStream, false, leaveOpen) { }
+
+ public DecompressionStream(
+ Stream stream,
+ Decompressor decompressor,
+ int bufferSize = 0,
+ bool checkEndOfStream = true,
+ bool preserveDecompressor = true,
+ bool leaveOpen = true
+ )
+ {
+ if (stream == null)
+ throw new ArgumentNullException(nameof(stream));
+
+ if (!stream.CanRead)
+ throw new ArgumentException("Stream is not readable", nameof(stream));
+
+ if (bufferSize < 0)
+ throw new ArgumentOutOfRangeException(nameof(bufferSize));
+
+ innerStream = stream;
+ this.decompressor = decompressor;
+ this.preserveDecompressor = preserveDecompressor;
+ this.leaveOpen = leaveOpen;
+ this.checkEndOfStream = checkEndOfStream;
+
+ inputBufferSize =
+ bufferSize > 0
+ ? bufferSize
+ : (int)Unsafe.Methods.ZSTD_DStreamInSize().EnsureZstdSuccess();
+ inputBuffer = ArrayPool.Shared.Rent(inputBufferSize);
+ input = new ZSTD_inBuffer_s { pos = (nuint)inputBufferSize, size = (nuint)inputBufferSize };
+ }
+
+ public void SetParameter(ZSTD_dParameter parameter, int value)
+ {
+ EnsureNotDisposed();
+ decompressor.NotNull().SetParameter(parameter, value);
+ }
+
+ public int GetParameter(ZSTD_dParameter parameter)
+ {
+ EnsureNotDisposed();
+ return decompressor.NotNull().GetParameter(parameter);
+ }
+
+ public void LoadDictionary(byte[] dict)
+ {
+ EnsureNotDisposed();
+ decompressor.NotNull().LoadDictionary(dict);
+ }
+
+ ~DecompressionStream() => Dispose(false);
+
+ protected override void Dispose(bool disposing)
+ {
+ if (decompressor == null)
+ return;
+
+ if (!preserveDecompressor)
+ {
+ decompressor.Dispose();
+ }
+ decompressor = null;
+
+ if (inputBuffer != null)
+ {
+ ArrayPool.Shared.Return(inputBuffer);
+ }
+
+ if (!leaveOpen)
+ {
+ innerStream.Dispose();
+ }
+ }
+
+ public override int Read(byte[] buffer, int offset, int count) =>
+ Read(new Span(buffer, offset, count));
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+ public override int Read(Span buffer)
+#else
+ public int Read(Span buffer)
+#endif
+ {
+ EnsureNotDisposed();
+
+ // Guard against infinite loop (output.pos would never become non-zero)
+ if (buffer.Length == 0)
+ {
+ return 0;
+ }
+
+ var output = new ZSTD_outBuffer_s { pos = 0, size = (nuint)buffer.Length };
+ while (true)
+ {
+ // If there is still input available, or there might be data buffered in the decompressor context, flush that out
+ while (input.pos < input.size || !contextDrained)
+ {
+ nuint oldInputPos = input.pos;
+ nuint result = DecompressStream(ref output, buffer);
+ if (output.pos > 0 || oldInputPos != input.pos)
+ {
+ // Keep result from last decompress call that made some progress, so we known if we're at end of frame
+ lastDecompressResult = result;
+ }
+ // If decompression filled the output buffer, there might still be data buffered in the decompressor context
+ contextDrained = output.pos < output.size;
+ // If we have data to return, return it immediately, so we won't stall on Read
+ if (output.pos > 0)
+ {
+ return (int)output.pos;
+ }
+ }
+
+ // Otherwise, read some more input
+ int bytesRead;
+ if ((bytesRead = innerStream.Read(inputBuffer, 0, inputBufferSize)) == 0)
+ {
+ if (checkEndOfStream && lastDecompressResult != 0)
+ {
+ throw new EndOfStreamException("Premature end of stream");
+ }
+
+ return 0;
+ }
+
+ input.size = (nuint)bytesRead;
+ input.pos = 0;
+ }
+ }
+
+#if !NETSTANDARD2_0 && !NETFRAMEWORK
+ public override Task ReadAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken
+ ) => ReadAsync(new Memory(buffer, offset, count), cancellationToken).AsTask();
+
+ public override async ValueTask ReadAsync(
+ Memory buffer,
+ CancellationToken cancellationToken = default
+ )
+#else
+
+ public override Task ReadAsync(
+ byte[] buffer,
+ int offset,
+ int count,
+ CancellationToken cancellationToken
+ ) => ReadAsync(new Memory(buffer, offset, count), cancellationToken);
+
+ public async Task ReadAsync(
+ Memory buffer,
+ CancellationToken cancellationToken = default
+ )
+#endif
+ {
+ EnsureNotDisposed();
+
+ // Guard against infinite loop (output.pos would never become non-zero)
+ if (buffer.Length == 0)
+ {
+ return 0;
+ }
+
+ var output = new ZSTD_outBuffer_s { pos = 0, size = (nuint)buffer.Length };
+ while (true)
+ {
+ // If there is still input available, or there might be data buffered in the decompressor context, flush that out
+ while (input.pos < input.size || !contextDrained)
+ {
+ nuint oldInputPos = input.pos;
+ nuint result = DecompressStream(ref output, buffer.Span);
+ if (output.pos > 0 || oldInputPos != input.pos)
+ {
+ // Keep result from last decompress call that made some progress, so we known if we're at end of frame
+ lastDecompressResult = result;
+ }
+ // If decompression filled the output buffer, there might still be data buffered in the decompressor context
+ contextDrained = output.pos < output.size;
+ // If we have data to return, return it immediately, so we won't stall on Read
+ if (output.pos > 0)
+ {
+ return (int)output.pos;
+ }
+ }
+
+ // Otherwise, read some more input
+ int bytesRead;
+ if (
+ (
+ bytesRead = await innerStream
+ .ReadAsync(inputBuffer, 0, inputBufferSize, cancellationToken)
+ .ConfigureAwait(false)
+ ) == 0
+ )
+ {
+ if (checkEndOfStream && lastDecompressResult != 0)
+ {
+ throw new EndOfStreamException("Premature end of stream");
+ }
+
+ return 0;
+ }
+
+ input.size = (nuint)bytesRead;
+ input.pos = 0;
+ }
+ }
+
+ private unsafe nuint DecompressStream(ref ZSTD_outBuffer_s output, Span outputBuffer)
+ {
+ fixed (byte* inputBufferPtr = inputBuffer)
+ fixed (byte* outputBufferPtr = outputBuffer)
+ {
+ input.src = inputBufferPtr;
+ output.dst = outputBufferPtr;
+ return decompressor.NotNull().DecompressStream(ref input, ref output);
+ }
+ }
+
+ public override bool CanRead => true;
+ public override bool CanSeek => false;
+ public override bool CanWrite => false;
+
+ public override long Length => throw new NotSupportedException();
+
+ public override long Position
+ {
+ get => throw new NotSupportedException();
+ set => throw new NotSupportedException();
+ }
+
+ public override void Flush() => throw new NotSupportedException();
+
+ public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
+
+ public override void SetLength(long value) => throw new NotSupportedException();
+
+ public override void Write(byte[] buffer, int offset, int count) =>
+ throw new NotSupportedException();
+
+ private void EnsureNotDisposed()
+ {
+ if (decompressor == null)
+ throw new ObjectDisposedException(nameof(DecompressionStream));
+ }
+
+#if NETSTANDARD2_0 || NETFRAMEWORK
+ public virtual Task DisposeAsync()
+ {
+ try
+ {
+ Dispose();
+ return Task.CompletedTask;
+ }
+ catch (Exception exc)
+ {
+ return Task.FromException(exc);
+ }
+ }
+#endif
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/Decompressor.cs b/src/SharpCompress/Compressors/ZStandard/Decompressor.cs
new file mode 100644
index 000000000..8a63c4d9d
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/Decompressor.cs
@@ -0,0 +1,176 @@
+using System;
+using SharpCompress.Compressors.ZStandard.Unsafe;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+public unsafe class Decompressor : IDisposable
+{
+ private readonly SafeDctxHandle handle;
+
+ public Decompressor()
+ {
+ handle = SafeDctxHandle.Create();
+ }
+
+ public void SetParameter(ZSTD_dParameter parameter, int value)
+ {
+ using var dctx = handle.Acquire();
+ Unsafe.Methods.ZSTD_DCtx_setParameter(dctx, parameter, value).EnsureZstdSuccess();
+ }
+
+ public int GetParameter(ZSTD_dParameter parameter)
+ {
+ using var dctx = handle.Acquire();
+ int value;
+ Unsafe.Methods.ZSTD_DCtx_getParameter(dctx, parameter, &value).EnsureZstdSuccess();
+ return value;
+ }
+
+ public void LoadDictionary(byte[] dict)
+ {
+ var dictReadOnlySpan = new ReadOnlySpan(dict);
+ this.LoadDictionary(dictReadOnlySpan);
+ }
+
+ public void LoadDictionary(ReadOnlySpan dict)
+ {
+ using var dctx = handle.Acquire();
+ fixed (byte* dictPtr = dict)
+ Unsafe
+ .Methods.ZSTD_DCtx_loadDictionary(dctx, dictPtr, (nuint)dict.Length)
+ .EnsureZstdSuccess();
+ }
+
+ public static ulong GetDecompressedSize(ReadOnlySpan src)
+ {
+ fixed (byte* srcPtr = src)
+ return Unsafe
+ .Methods.ZSTD_decompressBound(srcPtr, (nuint)src.Length)
+ .EnsureContentSizeOk();
+ }
+
+ public static ulong GetDecompressedSize(ArraySegment src) =>
+ GetDecompressedSize((ReadOnlySpan)src);
+
+ public static ulong GetDecompressedSize(byte[] src, int srcOffset, int srcLength) =>
+ GetDecompressedSize(new ReadOnlySpan(src, srcOffset, srcLength));
+
+ public Span Unwrap(ReadOnlySpan src, int maxDecompressedSize = int.MaxValue)
+ {
+ var expectedDstSize = GetDecompressedSize(src);
+ if (expectedDstSize > (ulong)maxDecompressedSize)
+ throw new ZstdException(
+ ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall,
+ $"Decompressed content size {expectedDstSize} is greater than {nameof(maxDecompressedSize)} {maxDecompressedSize}"
+ );
+ if (expectedDstSize > Constants.MaxByteArrayLength)
+ throw new ZstdException(
+ ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall,
+ $"Decompressed content size {expectedDstSize} is greater than max possible byte array size {Constants.MaxByteArrayLength}"
+ );
+
+ var dest = new byte[expectedDstSize];
+ var length = Unwrap(src, dest);
+ return new Span(dest, 0, length);
+ }
+
+ public int Unwrap(byte[] src, byte[] dest, int offset) =>
+ Unwrap(src, new Span(dest, offset, dest.Length - offset));
+
+ public int Unwrap(ReadOnlySpan src, Span dest)
+ {
+ fixed (byte* srcPtr = src)
+ fixed (byte* destPtr = dest)
+ {
+ using var dctx = handle.Acquire();
+ return (int)
+ Unsafe
+ .Methods.ZSTD_decompressDCtx(
+ dctx,
+ destPtr,
+ (nuint)dest.Length,
+ srcPtr,
+ (nuint)src.Length
+ )
+ .EnsureZstdSuccess();
+ }
+ }
+
+ public int Unwrap(
+ byte[] src,
+ int srcOffset,
+ int srcLength,
+ byte[] dst,
+ int dstOffset,
+ int dstLength
+ ) =>
+ Unwrap(
+ new ReadOnlySpan(src, srcOffset, srcLength),
+ new Span(dst, dstOffset, dstLength)
+ );
+
+ public bool TryUnwrap(byte[] src, byte[] dest, int offset, out int written) =>
+ TryUnwrap(src, new Span(dest, offset, dest.Length - offset), out written);
+
+ public bool TryUnwrap(ReadOnlySpan src, Span dest, out int written)
+ {
+ fixed (byte* srcPtr = src)
+ fixed (byte* destPtr = dest)
+ {
+ nuint returnValue;
+ using (var dctx = handle.Acquire())
+ {
+ returnValue = Unsafe.Methods.ZSTD_decompressDCtx(
+ dctx,
+ destPtr,
+ (nuint)dest.Length,
+ srcPtr,
+ (nuint)src.Length
+ );
+ }
+
+ if (returnValue == unchecked(0 - (nuint)ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall))
+ {
+ written = default;
+ return false;
+ }
+
+ returnValue.EnsureZstdSuccess();
+ written = (int)returnValue;
+ return true;
+ }
+ }
+
+ public bool TryUnwrap(
+ byte[] src,
+ int srcOffset,
+ int srcLength,
+ byte[] dst,
+ int dstOffset,
+ int dstLength,
+ out int written
+ ) =>
+ TryUnwrap(
+ new ReadOnlySpan(src, srcOffset, srcLength),
+ new Span(dst, dstOffset, dstLength),
+ out written
+ );
+
+ public void Dispose()
+ {
+ handle.Dispose();
+ GC.SuppressFinalize(this);
+ }
+
+ internal nuint DecompressStream(ref ZSTD_inBuffer_s input, ref ZSTD_outBuffer_s output)
+ {
+ fixed (ZSTD_inBuffer_s* inputPtr = &input)
+ fixed (ZSTD_outBuffer_s* outputPtr = &output)
+ {
+ using var dctx = handle.Acquire();
+ return Unsafe
+ .Methods.ZSTD_decompressStream(dctx, outputPtr, inputPtr)
+ .EnsureZstdSuccess();
+ }
+ }
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/JobThreadPool.cs b/src/SharpCompress/Compressors/ZStandard/JobThreadPool.cs
new file mode 100644
index 000000000..e783940dc
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/JobThreadPool.cs
@@ -0,0 +1,141 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Threading;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+internal unsafe class JobThreadPool : IDisposable
+{
+ private int numThreads;
+ private readonly List threads;
+ private readonly BlockingCollection queue;
+
+ private struct Job
+ {
+ public void* function;
+ public void* opaque;
+ }
+
+ private class JobThread
+ {
+ private Thread Thread { get; }
+ public CancellationTokenSource CancellationTokenSource { get; }
+
+ public JobThread(Thread thread)
+ {
+ CancellationTokenSource = new CancellationTokenSource();
+ Thread = thread;
+ }
+
+ public void Start()
+ {
+ Thread.Start(this);
+ }
+
+ public void Cancel()
+ {
+ CancellationTokenSource.Cancel();
+ }
+
+ public void Join()
+ {
+ Thread.Join();
+ }
+ }
+
+ private void Worker(object? obj)
+ {
+ if (obj is not JobThread poolThread)
+ return;
+
+ var cancellationToken = poolThread.CancellationTokenSource.Token;
+ while (!queue.IsCompleted && !cancellationToken.IsCancellationRequested)
+ {
+ try
+ {
+ if (queue.TryTake(out var job, -1, cancellationToken))
+ ((delegate* managed)job.function)(job.opaque);
+ }
+ catch (InvalidOperationException) { }
+ catch (OperationCanceledException) { }
+ }
+ }
+
+ public JobThreadPool(int num, int queueSize)
+ {
+ numThreads = num;
+ queue = new BlockingCollection(queueSize + 1);
+ threads = new List(num);
+ for (var i = 0; i < numThreads; i++)
+ CreateThread();
+ }
+
+ private void CreateThread()
+ {
+ var poolThread = new JobThread(new Thread(Worker));
+ threads.Add(poolThread);
+ poolThread.Start();
+ }
+
+ public void Resize(int num)
+ {
+ lock (threads)
+ {
+ if (num < numThreads)
+ {
+ for (var i = numThreads - 1; i >= num; i--)
+ {
+ threads[i].Cancel();
+ threads.RemoveAt(i);
+ }
+ }
+ else
+ {
+ for (var i = numThreads; i < num; i++)
+ CreateThread();
+ }
+ }
+
+ numThreads = num;
+ }
+
+ public void Add(void* function, void* opaque)
+ {
+ queue.Add(new Job { function = function, opaque = opaque });
+ }
+
+ public bool TryAdd(void* function, void* opaque)
+ {
+ return queue.TryAdd(new Job { function = function, opaque = opaque });
+ }
+
+ public void Join(bool cancel = true)
+ {
+ queue.CompleteAdding();
+ List jobThreads;
+ lock (threads)
+ jobThreads = new List(threads);
+
+ if (cancel)
+ {
+ foreach (var thread in jobThreads)
+ thread.Cancel();
+ }
+
+ foreach (var thread in jobThreads)
+ thread.Join();
+ }
+
+ public void Dispose()
+ {
+ queue.Dispose();
+ }
+
+ public int Size()
+ {
+ // todo not implemented
+ // https://github.com/dotnet/runtime/issues/24200
+ return 0;
+ }
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs b/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs
new file mode 100644
index 000000000..3b49bdcec
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs
@@ -0,0 +1,163 @@
+using System;
+using System.Runtime.InteropServices;
+using SharpCompress.Compressors.ZStandard.Unsafe;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+///
+/// Provides the base class for ZstdSharp implementations.
+///
+///
+/// Even though ZstdSharp is a managed library, its internals are using unmanaged
+/// memory and we are using safe handles in the library's high-level API to ensure
+/// proper disposal of unmanaged resources and increase safety.
+///
+///
+///
+internal abstract unsafe class SafeZstdHandle : SafeHandle
+{
+ ///
+ /// Parameterless constructor is hidden. Use the static Create factory
+ /// method to create a new safe handle instance.
+ ///
+ protected SafeZstdHandle()
+ : base(IntPtr.Zero, true) { }
+
+ public sealed override bool IsInvalid => handle == IntPtr.Zero;
+}
+
+///
+/// Safely wraps an unmanaged Zstd compression context.
+///
+internal sealed unsafe class SafeCctxHandle : SafeZstdHandle
+{
+ ///
+ private SafeCctxHandle() { }
+
+ ///
+ /// Creates a new instance of .
+ ///
+ ///
+ /// Creation failed.
+ public static SafeCctxHandle Create()
+ {
+ var safeHandle = new SafeCctxHandle();
+ bool success = false;
+ try
+ {
+ var cctx = Unsafe.Methods.ZSTD_createCCtx();
+ if (cctx == null)
+ throw new ZstdException(ZSTD_ErrorCode.ZSTD_error_GENERIC, "Failed to create cctx");
+ safeHandle.SetHandle((IntPtr)cctx);
+ success = true;
+ }
+ finally
+ {
+ if (!success)
+ {
+ safeHandle.SetHandleAsInvalid();
+ }
+ }
+ return safeHandle;
+ }
+
+ ///
+ /// Acquires a reference to the safe handle.
+ ///
+ ///
+ /// A instance that can be implicitly converted to a pointer
+ /// to .
+ ///
+ public SafeHandleHolder Acquire() => new(this);
+
+ protected override bool ReleaseHandle()
+ {
+ return Unsafe.Methods.ZSTD_freeCCtx((ZSTD_CCtx_s*)handle) == 0;
+ }
+}
+
+///
+/// Safely wraps an unmanaged Zstd compression context.
+///
+internal sealed unsafe class SafeDctxHandle : SafeZstdHandle
+{
+ ///
+ private SafeDctxHandle() { }
+
+ ///
+ /// Creates a new instance of .
+ ///
+ ///
+ /// Creation failed.
+ public static SafeDctxHandle Create()
+ {
+ var safeHandle = new SafeDctxHandle();
+ bool success = false;
+ try
+ {
+ var dctx = Unsafe.Methods.ZSTD_createDCtx();
+ if (dctx == null)
+ throw new ZstdException(ZSTD_ErrorCode.ZSTD_error_GENERIC, "Failed to create dctx");
+ safeHandle.SetHandle((IntPtr)dctx);
+ success = true;
+ }
+ finally
+ {
+ if (!success)
+ {
+ safeHandle.SetHandleAsInvalid();
+ }
+ }
+ return safeHandle;
+ }
+
+ ///
+ /// Acquires a reference to the safe handle.
+ ///
+ ///
+ /// A instance that can be implicitly converted to a pointer
+ /// to .
+ ///
+ public SafeHandleHolder Acquire() => new(this);
+
+ protected override bool ReleaseHandle()
+ {
+ return Unsafe.Methods.ZSTD_freeDCtx((ZSTD_DCtx_s*)handle) == 0;
+ }
+}
+
+///
+/// Provides a convenient interface to safely acquire pointers of a specific type
+/// from a , by utilizing blocks.
+///
+/// The type of pointers to return.
+///
+/// Safe handle holders can be d to decrement the safe handle's
+/// reference count, and can be implicitly converted to pointers to .
+///
+internal unsafe ref struct SafeHandleHolder
+ where T : unmanaged
+{
+ private readonly SafeHandle _handle;
+
+ private bool _refAdded;
+
+ public SafeHandleHolder(SafeHandle safeHandle)
+ {
+ _handle = safeHandle;
+ _refAdded = false;
+ safeHandle.DangerousAddRef(ref _refAdded);
+ }
+
+ public static implicit operator T*(SafeHandleHolder holder) =>
+ (T*)holder._handle.DangerousGetHandle();
+
+ public void Dispose()
+ {
+ if (_refAdded)
+ {
+ _handle.DangerousRelease();
+ _refAdded = false;
+ }
+ }
+}
diff --git a/src/SharpCompress/Compressors/ZStandard/SynchronizationWrapper.cs b/src/SharpCompress/Compressors/ZStandard/SynchronizationWrapper.cs
new file mode 100644
index 000000000..406cacd43
--- /dev/null
+++ b/src/SharpCompress/Compressors/ZStandard/SynchronizationWrapper.cs
@@ -0,0 +1,22 @@
+using System.Threading;
+
+namespace SharpCompress.Compressors.ZStandard;
+
+internal static unsafe class SynchronizationWrapper
+{
+ private static object UnwrapObject(void** obj) => UnmanagedObject.Unwrap