diff --git a/.editorconfig b/.editorconfig index eab3d4286..96f2a953b 100644 --- a/.editorconfig +++ b/.editorconfig @@ -368,6 +368,9 @@ dotnet_diagnostic.NX0001.severity = error dotnet_diagnostic.NX0002.severity = silent dotnet_diagnostic.NX0003.severity = silent +dotnet_diagnostic.VSTHRD110.severity = error +dotnet_diagnostic.VSTHRD107.severity = error + ########################################## # Styles ########################################## diff --git a/Directory.Packages.props b/Directory.Packages.props index 703b2803c..9f2e0c864 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -13,8 +13,11 @@ - - - + + + diff --git a/build/packages.lock.json b/build/packages.lock.json index 9a72d50dc..ddbe3e974 100644 --- a/build/packages.lock.json +++ b/build/packages.lock.json @@ -14,11 +14,51 @@ "resolved": "1.1.9", "contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw==" }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" + } + }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[8.0.0, )", + "resolved": "8.0.0", + "contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "8.0.0", + "Microsoft.SourceLink.Common": "8.0.0" + } + }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "SimpleExec": { "type": "Direct", "requested": "[13.0.0, )", "resolved": "13.0.0", "contentHash": "zcCR1pupa1wI1VqBULRiQKeHKKZOuJhi/K+4V5oO+rHJZlaOD53ViFo1c3PavDoMAfSn/FAXGAWpPoF57rwhYg==" + }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies.net461": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" + }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw==" } } } diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs index 672382ffc..12ab15a6b 100644 --- a/src/SharpCompress/Archives/AbstractArchive.cs +++ b/src/SharpCompress/Archives/AbstractArchive.cs @@ -1,14 +1,13 @@ -using System; using System.Collections.Generic; -using System.IO; using System.Linq; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Readers; namespace SharpCompress.Archives; -public abstract class AbstractArchive : IArchive +public abstract class AbstractArchive : IArchive, IAsyncArchive where TEntry : IArchiveEntry where TVolume : IVolume { @@ -26,6 +25,12 @@ internal AbstractArchive(ArchiveType type, SourceStream sourceStream) _sourceStream = sourceStream; _lazyVolumes = new LazyReadOnlyCollection(LoadVolumes(_sourceStream)); _lazyEntries = new LazyReadOnlyCollection(LoadEntries(Volumes)); + _lazyVolumesAsync = new LazyAsyncReadOnlyCollection( + LoadVolumesAsync(_sourceStream) + ); + _lazyEntriesAsync = new LazyAsyncReadOnlyCollection( + LoadEntriesAsync(_lazyVolumesAsync) + ); } internal AbstractArchive(ArchiveType type) @@ -34,19 +39,16 @@ internal AbstractArchive(ArchiveType type) ReaderOptions = new(); _lazyVolumes = new LazyReadOnlyCollection(Enumerable.Empty()); _lazyEntries = new LazyReadOnlyCollection(Enumerable.Empty()); + _lazyVolumesAsync = new LazyAsyncReadOnlyCollection( + AsyncEnumerableEx.Empty() + ); + _lazyEntriesAsync = new LazyAsyncReadOnlyCollection( + AsyncEnumerableEx.Empty() + ); } public ArchiveType Type { get; } - private static Stream CheckStreams(Stream stream) - { - if (!stream.CanSeek || !stream.CanRead) - { - throw new ArchiveException("Archive streams must be Readable and Seekable"); - } - return stream; - } - /// /// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive. /// @@ -72,6 +74,19 @@ private static Stream CheckStreams(Stream stream) protected abstract IEnumerable LoadVolumes(SourceStream sourceStream); protected abstract IEnumerable LoadEntries(IEnumerable volumes); + protected virtual IAsyncEnumerable LoadVolumesAsync(SourceStream sourceStream) => + LoadVolumes(sourceStream).ToAsyncEnumerable(); + + protected virtual async IAsyncEnumerable LoadEntriesAsync( + IAsyncEnumerable volumes + ) + { + foreach (var item in LoadEntries(await volumes.ToListAsync())) + { + yield return item; + } + } + IEnumerable IArchive.Entries => Entries.Cast(); IEnumerable IArchive.Volumes => _lazyVolumes.Cast(); @@ -118,6 +133,7 @@ public IReader ExtractAllEntries() } protected abstract IReader CreateReaderForSolidExtraction(); + protected abstract ValueTask CreateReaderForSolidExtractionAsync(); /// /// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files). @@ -140,4 +156,67 @@ public bool IsComplete return Entries.All(x => x.IsComplete); } } + + #region Async Support + + private readonly LazyAsyncReadOnlyCollection _lazyVolumesAsync; + private readonly LazyAsyncReadOnlyCollection _lazyEntriesAsync; + + public virtual async ValueTask DisposeAsync() + { + if (!_disposed) + { + await foreach (var v in _lazyVolumesAsync) + { + v.Dispose(); + } + foreach (var v in _lazyEntriesAsync.GetLoaded().Cast()) + { + v.Close(); + } + _sourceStream?.Dispose(); + + _disposed = true; + } + } + + private async ValueTask EnsureEntriesLoadedAsync() + { + await _lazyEntriesAsync.EnsureFullyLoaded(); + await _lazyVolumesAsync.EnsureFullyLoaded(); + } + + public virtual IAsyncEnumerable EntriesAsync => _lazyEntriesAsync; + IAsyncEnumerable IAsyncArchive.EntriesAsync => + EntriesAsync.Cast(); + + public IAsyncEnumerable VolumesAsync => _lazyVolumesAsync.Cast(); + + public async ValueTask ExtractAllEntriesAsync() + { + if (!IsSolid && Type != ArchiveType.SevenZip) + { + throw new SharpCompressException( + "ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)." + ); + } + await EnsureEntriesLoadedAsync(); + return await CreateReaderForSolidExtractionAsync(); + } + + public virtual ValueTask IsSolidAsync() => new(false); + + public async ValueTask IsCompleteAsync() + { + await EnsureEntriesLoadedAsync(); + return await EntriesAsync.All(x => x.IsComplete); + } + + public async ValueTask TotalSizeAsync() => + await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.CompressedSize); + + public async ValueTask TotalUncompressSizeAsync() => + await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.Size); + + #endregion } diff --git a/src/SharpCompress/Archives/AbstractWritableArchive.cs b/src/SharpCompress/Archives/AbstractWritableArchive.cs index 744d4ee2a..13fb66f9a 100644 --- a/src/SharpCompress/Archives/AbstractWritableArchive.cs +++ b/src/SharpCompress/Archives/AbstractWritableArchive.cs @@ -162,7 +162,7 @@ public void SaveTo(Stream stream, WriterOptions options) SaveTo(stream, options, OldEntries, newEntries); } - public async Task SaveToAsync( + public async ValueTask SaveToAsync( Stream stream, WriterOptions options, CancellationToken cancellationToken = default @@ -208,7 +208,7 @@ protected abstract void SaveTo( IEnumerable newEntries ); - protected abstract Task SaveToAsync( + protected abstract ValueTask SaveToAsync( Stream stream, WriterOptions options, IEnumerable oldEntries, diff --git a/src/SharpCompress/Archives/ArchiveFactory.cs b/src/SharpCompress/Archives/ArchiveFactory.cs index 94368ece5..e052c3e80 100644 --- a/src/SharpCompress/Archives/ArchiveFactory.cs +++ b/src/SharpCompress/Archives/ArchiveFactory.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Factories; using SharpCompress.IO; @@ -24,6 +26,28 @@ public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null) return FindFactory(stream).Open(stream, readerOptions); } + /// + /// Opens an Archive for random access asynchronously + /// + /// + /// + /// + /// + public static async ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + readerOptions ??= new ReaderOptions(); + stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize); + var factory = await FindFactoryAsync(stream, cancellationToken) + .ConfigureAwait(false); + return await factory + .OpenAsync(stream, readerOptions, cancellationToken) + .ConfigureAwait(false); + } + public static IWritableArchive Create(ArchiveType type) { var factory = Factory @@ -49,6 +73,22 @@ public static IArchive Open(string filePath, ReaderOptions? options = null) return Open(new FileInfo(filePath), options); } + /// + /// Opens an Archive from a filepath asynchronously. + /// + /// + /// + /// + public static ValueTask OpenAsync( + string filePath, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + filePath.NotNullOrEmpty(nameof(filePath)); + return OpenAsync(new FileInfo(filePath), options, cancellationToken); + } + /// /// Constructor with a FileInfo object to an existing file. /// @@ -61,6 +101,25 @@ public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null) return FindFactory(fileInfo).Open(fileInfo, options); } + /// + /// Opens an Archive from a FileInfo object asynchronously. + /// + /// + /// + /// + public static async ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + options ??= new ReaderOptions { LeaveStreamOpen = false }; + + var factory = await FindFactoryAsync(fileInfo, cancellationToken) + .ConfigureAwait(false); + return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false); + } + /// /// Constructor with IEnumerable FileInfo objects, multi and split support. /// @@ -87,6 +146,40 @@ public static IArchive Open(IEnumerable fileInfos, ReaderOptions? opti return FindFactory(fileInfo).Open(filesArray, options); } + /// + /// Opens a multi-part archive from files asynchronously. + /// + /// + /// + /// + public static async ValueTask OpenAsync( + IEnumerable fileInfos, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + fileInfos.NotNull(nameof(fileInfos)); + var filesArray = fileInfos.ToArray(); + if (filesArray.Length == 0) + { + throw new InvalidOperationException("No files to open"); + } + + var fileInfo = filesArray[0]; + if (filesArray.Length == 1) + { + return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false); + } + + fileInfo.NotNull(nameof(fileInfo)); + options ??= new ReaderOptions { LeaveStreamOpen = false }; + + var factory = FindFactory(fileInfo); + return await factory + .OpenAsync(filesArray, options, cancellationToken) + .ConfigureAwait(false); + } + /// /// Constructor with IEnumerable FileInfo objects, multi and split support. /// @@ -113,6 +206,41 @@ public static IArchive Open(IEnumerable streams, ReaderOptions? options return FindFactory(firstStream).Open(streamsArray, options); } + /// + /// Opens a multi-part archive from streams asynchronously. + /// + /// + /// + /// + public static async ValueTask OpenAsync( + IEnumerable streams, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + streams.NotNull(nameof(streams)); + var streamsArray = streams.ToArray(); + if (streamsArray.Length == 0) + { + throw new InvalidOperationException("No streams"); + } + + var firstStream = streamsArray[0]; + if (streamsArray.Length == 1) + { + return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false); + } + + firstStream.NotNull(nameof(firstStream)); + options ??= new ReaderOptions(); + + var factory = FindFactory(firstStream); + return await factory + .OpenAsync(streamsArray, options, cancellationToken) + .ConfigureAwait(false); + } + /// /// Extract to specific directory, retaining filename /// @@ -166,6 +294,52 @@ private static T FindFactory(Stream stream) ); } + private static async ValueTask FindFactoryAsync( + FileInfo finfo, + CancellationToken cancellationToken + ) + where T : IFactory + { + finfo.NotNull(nameof(finfo)); + using Stream stream = finfo.OpenRead(); + return await FindFactoryAsync(stream, cancellationToken); + } + + private static async ValueTask FindFactoryAsync( + Stream stream, + CancellationToken cancellationToken + ) + where T : IFactory + { + stream.NotNull(nameof(stream)); + if (!stream.CanRead || !stream.CanSeek) + { + throw new ArgumentException("Stream should be readable and seekable"); + } + + var factories = Factory.Factories.OfType(); + + var startPosition = stream.Position; + + foreach (var factory in factories) + { + stream.Seek(startPosition, SeekOrigin.Begin); + + if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken)) + { + stream.Seek(startPosition, SeekOrigin.Begin); + + return factory; + } + } + + var extensions = string.Join(", ", factories.Select(item => item.Name)); + + throw new InvalidOperationException( + $"Cannot determine compressed stream type. Supported Archive Formats: {extensions}" + ); + } + public static bool IsArchive( string filePath, out ArchiveType? type, diff --git a/src/SharpCompress/Archives/AutoArchiveFactory.cs b/src/SharpCompress/Archives/AutoArchiveFactory.cs index 78313df54..7751c7e01 100644 --- a/src/SharpCompress/Archives/AutoArchiveFactory.cs +++ b/src/SharpCompress/Archives/AutoArchiveFactory.cs @@ -1,12 +1,14 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Readers; namespace SharpCompress.Archives; -class AutoArchiveFactory : IArchiveFactory +internal class AutoArchiveFactory : IArchiveFactory { public string Name => nameof(AutoArchiveFactory); @@ -20,11 +22,30 @@ public bool IsArchive( int bufferSize = ReaderOptions.DefaultBufferSize ) => throw new NotSupportedException(); + public ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) => throw new NotSupportedException(); + public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException(); public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => ArchiveFactory.Open(stream, readerOptions); + public async ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => await ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken); + public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => ArchiveFactory.Open(fileInfo, readerOptions); + + public async ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => await ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken); } diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.cs b/src/SharpCompress/Archives/GZip/GZipArchive.cs index 34e4c6484..7c8c08f7c 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchive.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchive.cs @@ -102,6 +102,70 @@ public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = nul ); } + /// + /// Opens a GZipArchive asynchronously from a stream. + /// + /// + /// + /// + public static ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, readerOptions)); + } + + /// + /// Opens a GZipArchive asynchronously from a FileInfo. + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfo, readerOptions)); + } + + /// + /// Opens a GZipArchive asynchronously from multiple streams. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(streams, readerOptions)); + } + + /// + /// Opens a GZipArchive asynchronously from multiple FileInfo objects. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfos, readerOptions)); + } + public static GZipArchive Create() => new(); /// @@ -138,10 +202,13 @@ public void SaveTo(FileInfo fileInfo) SaveTo(stream, new WriterOptions(CompressionType.GZip)); } - public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) => + public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) => SaveToAsync(new FileInfo(filePath), cancellationToken); - public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default) + public async ValueTask SaveToAsync( + FileInfo fileInfo, + CancellationToken cancellationToken = default + ) { using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write); await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken) @@ -167,6 +234,28 @@ public static bool IsGZipFile(Stream stream) return true; } + public static async ValueTask IsGZipFileAsync( + Stream stream, + CancellationToken cancellationToken = default + ) + { + // read the header on the first read + byte[] header = new byte[10]; + + // workitem 8501: handle edge case (decompress empty stream) + if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false)) + { + return false; + } + + if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8) + { + return false; + } + + return true; + } + internal GZipArchive() : base(ArchiveType.GZip) { } @@ -213,7 +302,7 @@ IEnumerable newEntries } } - protected override async Task SaveToAsync( + protected override async ValueTask SaveToAsync( Stream stream, WriterOptions options, IEnumerable oldEntries, @@ -250,4 +339,11 @@ protected override IReader CreateReaderForSolidExtraction() stream.Position = 0; return GZipReader.Open(stream); } + + protected override ValueTask CreateReaderForSolidExtractionAsync() + { + var stream = Volumes.Single().Stream; + stream.Position = 0; + return new(GZipReader.Open(stream)); + } } diff --git a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs index 62e4760b3..049c7262a 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs @@ -23,10 +23,12 @@ public virtual Stream OpenEntryStream() return Parts.Single().GetCompressedStream().NotNull(); } - public virtual Task OpenEntryStreamAsync(CancellationToken cancellationToken = default) + public async ValueTask OpenEntryStreamAsync( + CancellationToken cancellationToken = default + ) { // GZip synchronous implementation is fast enough, just wrap it - return Task.FromResult(OpenEntryStream()); + return OpenEntryStream(); } #region IArchiveEntry Members diff --git a/src/SharpCompress/Archives/IArchiveEntry.cs b/src/SharpCompress/Archives/IArchiveEntry.cs index 69b3a674e..a38e65a0c 100644 --- a/src/SharpCompress/Archives/IArchiveEntry.cs +++ b/src/SharpCompress/Archives/IArchiveEntry.cs @@ -17,7 +17,7 @@ public interface IArchiveEntry : IEntry /// Opens the current entry as a stream that will decompress as it is read asynchronously. /// Read the entire stream or use SkipEntry on EntryStream. /// - Task OpenEntryStreamAsync(CancellationToken cancellationToken = default); + ValueTask OpenEntryStreamAsync(CancellationToken cancellationToken = default); /// /// The archive can find all the parts of the archive needed to extract this entry. diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs index af2c9be45..3bf940351 100644 --- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs +++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs @@ -37,7 +37,7 @@ public void WriteTo(Stream streamToWriteTo, IProgress? progress /// The stream to write the entry content to. /// Cancellation token. /// Optional progress reporter for tracking extraction progress. - public async Task WriteToAsync( + public async ValueTask WriteToAsync( Stream streamToWriteTo, IProgress? progress = null, CancellationToken cancellationToken = default @@ -110,18 +110,20 @@ public void WriteToDirectory( /// /// Extract to specific directory asynchronously, retaining filename /// - public Task WriteToDirectoryAsync( + public async ValueTask WriteToDirectoryAsync( string destinationDirectory, ExtractionOptions? options = null, CancellationToken cancellationToken = default ) => - ExtractionMethods.WriteEntryToDirectoryAsync( - entry, - destinationDirectory, - options, - entry.WriteToFileAsync, - cancellationToken - ); + await ExtractionMethods + .WriteEntryToDirectoryAsync( + entry, + destinationDirectory, + options, + entry.WriteToFileAsync, + cancellationToken + ) + .ConfigureAwait(false); /// /// Extract to specific file @@ -141,21 +143,23 @@ public void WriteToFile(string destinationFileName, ExtractionOptions? options = /// /// Extract to specific file asynchronously /// - public Task WriteToFileAsync( + public async ValueTask WriteToFileAsync( string destinationFileName, ExtractionOptions? options = null, CancellationToken cancellationToken = default ) => - ExtractionMethods.WriteEntryToFileAsync( - entry, - destinationFileName, - options, - async (x, fm, ct) => - { - using var fs = File.Open(destinationFileName, fm); - await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false); - }, - cancellationToken - ); + await ExtractionMethods + .WriteEntryToFileAsync( + entry, + destinationFileName, + options, + async (x, fm, ct) => + { + using var fs = File.Open(destinationFileName, fm); + await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false); + }, + cancellationToken + ) + .ConfigureAwait(false); } } diff --git a/src/SharpCompress/Archives/IArchiveExtensions.cs b/src/SharpCompress/Archives/IArchiveExtensions.cs index 0d39c6e2c..c1d2ac987 100644 --- a/src/SharpCompress/Archives/IArchiveExtensions.cs +++ b/src/SharpCompress/Archives/IArchiveExtensions.cs @@ -1,8 +1,6 @@ using System; using System.Collections.Generic; using System.IO; -using System.Threading; -using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Readers; @@ -80,89 +78,5 @@ private void WriteToDirectoryInternal( ); } } - - /// - /// Extract to specific directory asynchronously with progress reporting and cancellation support - /// - /// The folder to extract into. - /// Extraction options. - /// Optional progress reporter for tracking extraction progress. - /// Optional cancellation token. - public async Task WriteToDirectoryAsync( - string destinationDirectory, - ExtractionOptions? options = null, - IProgress? progress = null, - CancellationToken cancellationToken = default - ) - { - // For solid archives (Rar, 7Zip), use the optimized reader-based approach - if (archive.IsSolid || archive.Type == ArchiveType.SevenZip) - { - using var reader = archive.ExtractAllEntries(); - await reader.WriteAllToDirectoryAsync( - destinationDirectory, - options, - cancellationToken - ); - } - else - { - // For non-solid archives, extract entries directly - await archive.WriteToDirectoryAsyncInternal( - destinationDirectory, - options, - progress, - cancellationToken - ); - } - } - - private async Task WriteToDirectoryAsyncInternal( - string destinationDirectory, - ExtractionOptions? options, - IProgress? progress, - CancellationToken cancellationToken - ) - { - // Prepare for progress reporting - var totalBytes = archive.TotalUncompressSize; - var bytesRead = 0L; - - // Tracking for created directories. - var seenDirectories = new HashSet(); - - // Extract - foreach (var entry in archive.Entries) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (entry.IsDirectory) - { - var dirPath = Path.Combine( - destinationDirectory, - entry.Key.NotNull("Entry Key is null") - ); - if ( - Path.GetDirectoryName(dirPath + "/") is { } parentDirectory - && seenDirectories.Add(dirPath) - ) - { - Directory.CreateDirectory(parentDirectory); - } - continue; - } - - // Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions - await entry - .WriteToDirectoryAsync(destinationDirectory, options, cancellationToken) - .ConfigureAwait(false); - - // Update progress - bytesRead += entry.Size; - progress?.Report( - new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes) - ); - } - } } } diff --git a/src/SharpCompress/Archives/IArchiveFactory.cs b/src/SharpCompress/Archives/IArchiveFactory.cs index 370e5c9fe..1c1253f6a 100644 --- a/src/SharpCompress/Archives/IArchiveFactory.cs +++ b/src/SharpCompress/Archives/IArchiveFactory.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Factories; using SharpCompress.Readers; @@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory /// reading options. IArchive Open(Stream stream, ReaderOptions? readerOptions = null); + /// + /// Opens an Archive for random access asynchronously. + /// + /// An open, readable and seekable stream. + /// reading options. + /// Cancellation token. + ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ); + /// /// Constructor with a FileInfo object to an existing file. /// /// the file to open. /// reading options. IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null); + + /// + /// Opens an Archive from a FileInfo object asynchronously. + /// + /// the file to open. + /// reading options. + /// Cancellation token. + ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ); } diff --git a/src/SharpCompress/Archives/IAsyncArchive.cs b/src/SharpCompress/Archives/IAsyncArchive.cs new file mode 100644 index 000000000..bd3f290ea --- /dev/null +++ b/src/SharpCompress/Archives/IAsyncArchive.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using SharpCompress.Common; +using SharpCompress.Readers; + +namespace SharpCompress.Archives; + +public interface IAsyncArchive : IAsyncDisposable +{ + IAsyncEnumerable EntriesAsync { get; } + IAsyncEnumerable VolumesAsync { get; } + + ArchiveType Type { get; } + + /// + /// Use this method to extract all entries in an archive in order. + /// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be + /// extracted sequentially for the best performance. + /// + ValueTask ExtractAllEntriesAsync(); + + /// + /// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files). + /// Rar Archives can be SOLID while all 7Zip archives are considered SOLID. + /// + ValueTask IsSolidAsync(); + + /// + /// This checks to see if all the known entries have IsComplete = true + /// + ValueTask IsCompleteAsync(); + + /// + /// The total size of the files compressed in the archive. + /// + ValueTask TotalSizeAsync(); + + /// + /// The total size of the files as uncompressed in the archive. + /// + ValueTask TotalUncompressSizeAsync(); +} diff --git a/src/SharpCompress/Archives/IAsyncArchiveExtensions.cs b/src/SharpCompress/Archives/IAsyncArchiveExtensions.cs new file mode 100644 index 000000000..b6b0cad1e --- /dev/null +++ b/src/SharpCompress/Archives/IAsyncArchiveExtensions.cs @@ -0,0 +1,93 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using SharpCompress.Common; +using SharpCompress.Readers; + +namespace SharpCompress.Archives; + +public static class IAsyncArchiveExtensions +{ + /// + /// Extract to specific directory asynchronously with progress reporting and cancellation support + /// + /// The archive to extract. + /// The folder to extract into. + /// Extraction options. + /// Optional progress reporter for tracking extraction progress. + /// Optional cancellation token. + public static async Task WriteToDirectoryAsync( + this IAsyncArchive archive, + string destinationDirectory, + ExtractionOptions? options = null, + IProgress? progress = null, + CancellationToken cancellationToken = default + ) + { + // For solid archives (Rar, 7Zip), use the optimized reader-based approach + if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip) + { + await using var reader = await archive.ExtractAllEntriesAsync(); + await reader.WriteAllToDirectoryAsync(destinationDirectory, options, cancellationToken); + } + else + { + // For non-solid archives, extract entries directly + await archive.WriteToDirectoryAsyncInternal( + destinationDirectory, + options, + progress, + cancellationToken + ); + } + } + + private static async Task WriteToDirectoryAsyncInternal( + this IAsyncArchive archive, + string destinationDirectory, + ExtractionOptions? options, + IProgress? progress, + CancellationToken cancellationToken + ) + { + // Prepare for progress reporting + var totalBytes = await archive.TotalUncompressSizeAsync(); + var bytesRead = 0L; + + // Tracking for created directories. + var seenDirectories = new HashSet(); + + // Extract + await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (entry.IsDirectory) + { + var dirPath = Path.Combine( + destinationDirectory, + entry.Key.NotNull("Entry Key is null") + ); + if ( + Path.GetDirectoryName(dirPath + "/") is { } parentDirectory + && seenDirectories.Add(dirPath) + ) + { + Directory.CreateDirectory(parentDirectory); + } + continue; + } + + // Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions + await entry + .WriteToDirectoryAsync(destinationDirectory, options, cancellationToken) + .ConfigureAwait(false); + + // Update progress + bytesRead += entry.Size; + progress?.Report(new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)); + } + } +} diff --git a/src/SharpCompress/Archives/IMultiArchiveFactory.cs b/src/SharpCompress/Archives/IMultiArchiveFactory.cs index c26b649f1..4fa94d7fe 100644 --- a/src/SharpCompress/Archives/IMultiArchiveFactory.cs +++ b/src/SharpCompress/Archives/IMultiArchiveFactory.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Factories; using SharpCompress.Readers; @@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory /// reading options. IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null); + /// + /// Opens a multi-part archive from streams asynchronously. + /// + /// + /// reading options. + /// Cancellation token. + ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ); + /// /// Constructor with IEnumerable Stream objects, multi and split support. /// /// /// reading options. IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null); + + /// + /// Opens a multi-part archive from files asynchronously. + /// + /// + /// reading options. + /// Cancellation token. + ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ); } diff --git a/src/SharpCompress/Archives/IWritableArchive.cs b/src/SharpCompress/Archives/IWritableArchive.cs index dde22a032..74d8da763 100644 --- a/src/SharpCompress/Archives/IWritableArchive.cs +++ b/src/SharpCompress/Archives/IWritableArchive.cs @@ -22,7 +22,7 @@ IArchiveEntry AddEntry( void SaveTo(Stream stream, WriterOptions options); - Task SaveToAsync( + ValueTask SaveToAsync( Stream stream, WriterOptions options, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Archives/IWritableArchiveExtensions.cs b/src/SharpCompress/Archives/IWritableArchiveExtensions.cs index 4defe6049..60ec83d85 100644 --- a/src/SharpCompress/Archives/IWritableArchiveExtensions.cs +++ b/src/SharpCompress/Archives/IWritableArchiveExtensions.cs @@ -44,14 +44,14 @@ WriterOptions options writableArchive.SaveTo(stream, options); } - public static Task SaveToAsync( + public static ValueTask SaveToAsync( this IWritableArchive writableArchive, string filePath, WriterOptions options, CancellationToken cancellationToken = default ) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken); - public static async Task SaveToAsync( + public static async ValueTask SaveToAsync( this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options, diff --git a/src/SharpCompress/Archives/Rar/RarArchive.cs b/src/SharpCompress/Archives/Rar/RarArchive.cs index 9acfdccc5..03b8d4d90 100644 --- a/src/SharpCompress/Archives/Rar/RarArchive.cs +++ b/src/SharpCompress/Archives/Rar/RarArchive.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Rar; using SharpCompress.Common.Rar.Headers; @@ -65,7 +67,13 @@ protected override IEnumerable LoadVolumes(SourceStream sourceStream) return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable(); } - protected override IReader CreateReaderForSolidExtraction() + protected override IReader CreateReaderForSolidExtraction() => + CreateReaderForSolidExtractionInternal(); + + protected override ValueTask CreateReaderForSolidExtractionAsync() => + new(CreateReaderForSolidExtractionInternal()); + + private RarReader CreateReaderForSolidExtractionInternal() { if (this.IsMultipartVolume()) { @@ -181,6 +189,70 @@ public static RarArchive Open(IEnumerable streams, ReaderOptions? reader ); } + /// + /// Opens a RarArchive asynchronously from a stream. + /// + /// + /// + /// + public static ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, readerOptions)); + } + + /// + /// Opens a RarArchive asynchronously from a FileInfo. + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfo, readerOptions)); + } + + /// + /// Opens a RarArchive asynchronously from multiple streams. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(streams, readerOptions)); + } + + /// + /// Opens a RarArchive asynchronously from multiple FileInfo objects. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfos, readerOptions)); + } + public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath)); public static bool IsRarFile(FileInfo fileInfo) diff --git a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs index 69c54f310..0fe259cca 100644 --- a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs +++ b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs @@ -92,7 +92,9 @@ public Stream OpenEntryStream() return stream; } - public async Task OpenEntryStreamAsync(CancellationToken cancellationToken = default) + public async ValueTask OpenEntryStreamAsync( + CancellationToken cancellationToken = default + ) { RarStream stream; if (IsRarV3) diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs index d9b5ba1aa..43f49abed 100644 --- a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs +++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs @@ -105,6 +105,70 @@ public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = ); } + /// + /// Opens a SevenZipArchive asynchronously from a stream. + /// + /// + /// + /// + public static ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, readerOptions)); + } + + /// + /// Opens a SevenZipArchive asynchronously from a FileInfo. + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfo, readerOptions)); + } + + /// + /// Opens a SevenZipArchive asynchronously from multiple streams. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(streams, readerOptions)); + } + + /// + /// Opens a SevenZipArchive asynchronously from multiple FileInfo objects. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfos, readerOptions)); + } + /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// @@ -201,6 +265,9 @@ private static bool SignatureMatch(Stream stream) protected override IReader CreateReaderForSolidExtraction() => new SevenZipReader(ReaderOptions, this); + protected override ValueTask CreateReaderForSolidExtractionAsync() => + new(new SevenZipReader(ReaderOptions, this)); + public override bool IsSolid => Entries .Where(x => !x.IsDirectory) diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs index 754c8c637..a0d4a50d8 100644 --- a/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs +++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs @@ -12,8 +12,9 @@ internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part) public Stream OpenEntryStream() => FilePart.GetCompressedStream(); - public Task OpenEntryStreamAsync(CancellationToken cancellationToken = default) => - Task.FromResult(OpenEntryStream()); + public async ValueTask OpenEntryStreamAsync( + CancellationToken cancellationToken = default + ) => OpenEntryStream(); public IArchive Archive { get; } diff --git a/src/SharpCompress/Archives/Tar/TarArchive.cs b/src/SharpCompress/Archives/Tar/TarArchive.cs index 2754fd9bb..1aeaf9a7a 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.cs @@ -103,6 +103,70 @@ public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null ); } + /// + /// Opens a TarArchive asynchronously from a stream. + /// + /// + /// + /// + public static ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, readerOptions)); + } + + /// + /// Opens a TarArchive asynchronously from a FileInfo. + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfo, readerOptions)); + } + + /// + /// Opens a TarArchive asynchronously from multiple streams. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(streams, readerOptions)); + } + + /// + /// Opens a TarArchive asynchronously from multiple FileInfo objects. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfos, readerOptions)); + } + public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath)); public static bool IsTarFile(FileInfo fileInfo) @@ -259,7 +323,7 @@ IEnumerable newEntries } } - protected override async Task SaveToAsync( + protected override async ValueTask SaveToAsync( Stream stream, WriterOptions options, IEnumerable oldEntries, @@ -302,4 +366,11 @@ protected override IReader CreateReaderForSolidExtraction() stream.Position = 0; return TarReader.Open(stream); } + + protected override ValueTask CreateReaderForSolidExtractionAsync() + { + var stream = Volumes.Single().Stream; + stream.Position = 0; + return new(TarReader.Open(stream)); + } } diff --git a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs index 8c0827917..cbea2c717 100644 --- a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs +++ b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs @@ -14,9 +14,9 @@ internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull(); - public virtual Task OpenEntryStreamAsync( + public async ValueTask OpenEntryStreamAsync( CancellationToken cancellationToken = default - ) => Task.FromResult(OpenEntryStream()); + ) => OpenEntryStream(); #region IArchiveEntry Members diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.cs b/src/SharpCompress/Archives/Zip/ZipArchive.cs index 57db85c2a..756bc8863 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.cs @@ -124,6 +124,70 @@ public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null ); } + /// + /// Opens a ZipArchive asynchronously from a stream. + /// + /// + /// + /// + public static ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, readerOptions)); + } + + /// + /// Opens a ZipArchive asynchronously from a FileInfo. + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfo, readerOptions)); + } + + /// + /// Opens a ZipArchive asynchronously from multiple streams. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(streams, readerOptions)); + } + + /// + /// Opens a ZipArchive asynchronously from multiple FileInfo objects. + /// + /// + /// + /// + public static ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(fileInfos, readerOptions)); + } + public static bool IsZipFile( string filePath, string? password = null, @@ -199,7 +263,95 @@ public static bool IsZipMulti( if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe { var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding()); - var x = z.ReadSeekableHeader(stream).FirstOrDefault(); + var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault(); + return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry; + } + else + { + return false; + } + } + return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + } + catch (CryptographicException) + { + return true; + } + catch + { + return false; + } + } + + public static async ValueTask IsZipFileAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null); + try + { + if (stream is not SharpCompressStream) + { + stream = new SharpCompressStream(stream, bufferSize: bufferSize); + } + + var header = await headerFactory + .ReadStreamHeaderAsync(stream) + .Where(x => x.ZipHeaderType != ZipHeaderType.Split) + .FirstOrDefaultAsync(); + if (header is null) + { + return false; + } + return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + } + catch (CryptographicException) + { + return true; + } + catch + { + return false; + } + } + + public static async ValueTask IsZipMultiAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null); + try + { + if (stream is not SharpCompressStream) + { + stream = new SharpCompressStream(stream, bufferSize: bufferSize); + } + + var header = headerFactory + .ReadStreamHeader(stream) + .FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split); + if (header is null) + { + if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe + { + var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding()); + ZipHeader? x = null; + await foreach ( + var h in z.ReadSeekableHeaderAsync(stream) + .WithCancellation(cancellationToken) + ) + { + x = h; + break; + } return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry; } else @@ -254,7 +406,9 @@ internal ZipArchive() protected override IEnumerable LoadEntries(IEnumerable volumes) { var vols = volumes.ToArray(); - foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream)) + foreach ( + var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true) + ) { if (h != null) { @@ -298,6 +452,59 @@ protected override IEnumerable LoadEntries(IEnumerable LoadEntriesAsync( + IAsyncEnumerable volumes + ) + { + var vols = await volumes.ToListAsync(); + var volsArray = vols.ToArray(); + + await foreach ( + var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream) + ) + { + if (h != null) + { + switch (h.ZipHeaderType) + { + case ZipHeaderType.DirectoryEntry: + { + var deh = (DirectoryEntryHeader)h; + Stream s; + if ( + deh.RelativeOffsetOfEntryHeader + deh.CompressedSize + > volsArray[deh.DiskNumberStart].Stream.Length + ) + { + var v = volsArray.Skip(deh.DiskNumberStart).ToArray(); + s = new SourceStream( + v[0].Stream, + i => i < v.Length ? v[i].Stream : null, + new ReaderOptions() { LeaveStreamOpen = true } + ); + } + else + { + s = volsArray[deh.DiskNumberStart].Stream; + } + + yield return new ZipArchiveEntry( + this, + new SeekableZipFilePart(headerFactory.NotNull(), deh, s) + ); + } + break; + case ZipHeaderType.DirectoryEnd: + { + var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty(); + volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes); + yield break; + } + } + } + } + } + public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate)); protected override void SaveTo( @@ -329,7 +536,7 @@ IEnumerable newEntries } } - protected override async Task SaveToAsync( + protected override async ValueTask SaveToAsync( Stream stream, WriterOptions options, IEnumerable oldEntries, @@ -385,4 +592,11 @@ protected override IReader CreateReaderForSolidExtraction() ((IStreamStack)stream).StackSeek(0); return ZipReader.Open(stream, ReaderOptions, Entries); } + + protected override ValueTask CreateReaderForSolidExtractionAsync() + { + var stream = Volumes.Single().Stream; + stream.Position = 0; + return new(ZipReader.Open(stream)); + } } diff --git a/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs b/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs index a6baf34b3..f59da4f66 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs @@ -13,9 +13,17 @@ internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part) public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull(); - public virtual Task OpenEntryStreamAsync( + public async ValueTask OpenEntryStreamAsync( CancellationToken cancellationToken = default - ) => Task.FromResult(OpenEntryStream()); + ) + { + var part = Parts.Single(); + if (part is SeekableZipFilePart seekablePart) + { + return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull(); + } + return OpenEntryStream(); + } #region IArchiveEntry Members diff --git a/src/SharpCompress/Common/AsyncBinaryReader.cs b/src/SharpCompress/Common/AsyncBinaryReader.cs new file mode 100644 index 000000000..51da5d5cd --- /dev/null +++ b/src/SharpCompress/Common/AsyncBinaryReader.cs @@ -0,0 +1,95 @@ +using System; +using System.Buffers.Binary; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace SharpCompress.Common +{ + public sealed class AsyncBinaryReader : IDisposable + { + private readonly Stream _stream; + private readonly Stream _originalStream; + private readonly bool _leaveOpen; + private readonly byte[] _buffer = new byte[8]; + private bool _disposed; + + public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096) + { + _originalStream = stream ?? throw new ArgumentNullException(nameof(stream)); + _leaveOpen = leaveOpen; + + // Use the stream directly without wrapping in BufferedStream + // BufferedStream uses synchronous Read internally which doesn't work with async-only streams + // SharpCompress uses SharpCompressStream for buffering which supports true async reads + _stream = stream; + } + + public Stream BaseStream => _stream; + + public async ValueTask ReadByteAsync(CancellationToken ct = default) + { + await _stream.ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false); + return _buffer[0]; + } + + public async ValueTask ReadUInt16Async(CancellationToken ct = default) + { + await _stream.ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false); + return BinaryPrimitives.ReadUInt16LittleEndian(_buffer); + } + + public async ValueTask ReadUInt32Async(CancellationToken ct = default) + { + await _stream.ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false); + return BinaryPrimitives.ReadUInt32LittleEndian(_buffer); + } + + public async ValueTask ReadUInt64Async(CancellationToken ct = default) + { + await _stream.ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false); + return BinaryPrimitives.ReadUInt64LittleEndian(_buffer); + } + + public async ValueTask ReadBytesAsync(int count, CancellationToken ct = default) + { + var result = new byte[count]; + await _stream.ReadExactAsync(result, 0, count, ct).ConfigureAwait(false); + return result; + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _disposed = true; + + // Dispose the original stream if we own it + if (!_leaveOpen) + { + _originalStream.Dispose(); + } + } + +#if NET6_0_OR_GREATER + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + // Dispose the original stream if we own it + if (!_leaveOpen) + { + await _originalStream.DisposeAsync().ConfigureAwait(false); + } + } +#endif + } +} diff --git a/src/SharpCompress/Common/EntryStream.cs b/src/SharpCompress/Common/EntryStream.cs index 9e87e25e0..11d0e898f 100644 --- a/src/SharpCompress/Common/EntryStream.cs +++ b/src/SharpCompress/Common/EntryStream.cs @@ -56,7 +56,7 @@ public void SkipEntry() /// /// Asynchronously skip the rest of the entry stream. /// - public async Task SkipEntryAsync(CancellationToken cancellationToken = default) + public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default) { await this.SkipAsync(cancellationToken).ConfigureAwait(false); _completed = true; diff --git a/src/SharpCompress/Common/ExtractionMethods.cs b/src/SharpCompress/Common/ExtractionMethods.cs index 509524b15..787771de9 100644 --- a/src/SharpCompress/Common/ExtractionMethods.cs +++ b/src/SharpCompress/Common/ExtractionMethods.cs @@ -124,11 +124,11 @@ Action openAndWrite } } - public static async Task WriteEntryToDirectoryAsync( + public static async ValueTask WriteEntryToDirectoryAsync( IEntry entry, string destinationDirectory, ExtractionOptions? options, - Func writeAsync, + Func writeAsync, CancellationToken cancellationToken = default ) { @@ -197,11 +197,11 @@ public static async Task WriteEntryToDirectoryAsync( } } - public static async Task WriteEntryToFileAsync( + public static async ValueTask WriteEntryToFileAsync( IEntry entry, string destinationFileName, ExtractionOptions? options, - Func openAndWriteAsync, + Func openAndWriteAsync, CancellationToken cancellationToken = default ) { diff --git a/src/SharpCompress/Common/FilePart.cs b/src/SharpCompress/Common/FilePart.cs index 3548b6c1c..583dbf4b4 100644 --- a/src/SharpCompress/Common/FilePart.cs +++ b/src/SharpCompress/Common/FilePart.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace SharpCompress.Common; @@ -14,4 +16,8 @@ public abstract class FilePart internal abstract Stream? GetCompressedStream(); internal abstract Stream? GetRawStream(); internal bool Skipped { get; set; } + + internal virtual ValueTask GetCompressedStreamAsync( + CancellationToken cancellationToken = default + ) => new(GetCompressedStream()); } diff --git a/src/SharpCompress/Common/Zip/Headers/DirectoryEndHeader.cs b/src/SharpCompress/Common/Zip/Headers/DirectoryEndHeader.cs index 2e54a6ddd..7d35f3ea6 100644 --- a/src/SharpCompress/Common/Zip/Headers/DirectoryEndHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/DirectoryEndHeader.cs @@ -1,4 +1,5 @@ using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; @@ -19,6 +20,18 @@ internal override void Read(BinaryReader reader) Comment = reader.ReadBytes(CommentLength); } + internal override async ValueTask Read(AsyncBinaryReader reader) + { + VolumeNumber = await reader.ReadUInt16Async(); + FirstVolumeWithDirectory = await reader.ReadUInt16Async(); + TotalNumberOfEntriesInDisk = await reader.ReadUInt16Async(); + TotalNumberOfEntries = await reader.ReadUInt16Async(); + DirectorySize = await reader.ReadUInt32Async(); + DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async(); + CommentLength = await reader.ReadUInt16Async(); + Comment = await reader.ReadBytesAsync(CommentLength); + } + public ushort VolumeNumber { get; private set; } public ushort FirstVolumeWithDirectory { get; private set; } diff --git a/src/SharpCompress/Common/Zip/Headers/DirectoryEntryHeader.cs b/src/SharpCompress/Common/Zip/Headers/DirectoryEntryHeader.cs index 6a95a5bfb..a9ed564f6 100644 --- a/src/SharpCompress/Common/Zip/Headers/DirectoryEntryHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/DirectoryEntryHeader.cs @@ -1,5 +1,6 @@ using System.IO; using System.Linq; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; @@ -31,7 +32,37 @@ internal override void Read(BinaryReader reader) var extra = reader.ReadBytes(extraLength); var comment = reader.ReadBytes(commentLength); - // According to .ZIP File Format Specification + ProcessReadData(name, extra, comment); + } + + internal override async ValueTask Read(AsyncBinaryReader reader) + { + Version = await reader.ReadUInt16Async(); + VersionNeededToExtract = await reader.ReadUInt16Async(); + Flags = (HeaderFlags)await reader.ReadUInt16Async(); + CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async(); + OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async(); + OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async(); + Crc = await reader.ReadUInt32Async(); + CompressedSize = await reader.ReadUInt32Async(); + UncompressedSize = await reader.ReadUInt32Async(); + var nameLength = await reader.ReadUInt16Async(); + var extraLength = await reader.ReadUInt16Async(); + var commentLength = await reader.ReadUInt16Async(); + DiskNumberStart = await reader.ReadUInt16Async(); + InternalFileAttributes = await reader.ReadUInt16Async(); + ExternalFileAttributes = await reader.ReadUInt32Async(); + RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async(); + + var name = await reader.ReadBytesAsync(nameLength); + var extra = await reader.ReadBytesAsync(extraLength); + var comment = await reader.ReadBytesAsync(commentLength); + + ProcessReadData(name, extra, comment); + } + + private void ProcessReadData(byte[] name, byte[] extra, byte[] comment) + { // // For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT // diff --git a/src/SharpCompress/Common/Zip/Headers/IgnoreHeader.cs b/src/SharpCompress/Common/Zip/Headers/IgnoreHeader.cs index 5a587a7bc..9c648baf3 100644 --- a/src/SharpCompress/Common/Zip/Headers/IgnoreHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/IgnoreHeader.cs @@ -1,4 +1,5 @@ using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; @@ -8,4 +9,6 @@ public IgnoreHeader(ZipHeaderType type) : base(type) { } internal override void Read(BinaryReader reader) { } + + internal override ValueTask Read(AsyncBinaryReader reader) => default; } diff --git a/src/SharpCompress/Common/Zip/Headers/LocalEntryHeader.cs b/src/SharpCompress/Common/Zip/Headers/LocalEntryHeader.cs index 6fd3a9e2c..9091d454e 100644 --- a/src/SharpCompress/Common/Zip/Headers/LocalEntryHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/LocalEntryHeader.cs @@ -1,13 +1,12 @@ using System.IO; using System.Linq; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; -internal class LocalEntryHeader : ZipFileEntry +internal class LocalEntryHeader(IArchiveEncoding archiveEncoding) + : ZipFileEntry(ZipHeaderType.LocalEntry, archiveEncoding) { - public LocalEntryHeader(IArchiveEncoding archiveEncoding) - : base(ZipHeaderType.LocalEntry, archiveEncoding) { } - internal override void Read(BinaryReader reader) { Version = reader.ReadUInt16(); @@ -23,7 +22,29 @@ internal override void Read(BinaryReader reader) var name = reader.ReadBytes(nameLength); var extra = reader.ReadBytes(extraLength); - // According to .ZIP File Format Specification + ProcessReadData(name, extra); + } + + internal override async ValueTask Read(AsyncBinaryReader reader) + { + Version = await reader.ReadUInt16Async(); + Flags = (HeaderFlags)await reader.ReadUInt16Async(); + CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async(); + OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async(); + OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async(); + Crc = await reader.ReadUInt32Async(); + CompressedSize = await reader.ReadUInt32Async(); + UncompressedSize = await reader.ReadUInt32Async(); + var nameLength = await reader.ReadUInt16Async(); + var extraLength = await reader.ReadUInt16Async(); + var name = await reader.ReadBytesAsync(nameLength); + var extra = await reader.ReadBytesAsync(extraLength); + + ProcessReadData(name, extra); + } + + private void ProcessReadData(byte[] name, byte[] extra) + { // // For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT // diff --git a/src/SharpCompress/Common/Zip/Headers/SplitHeader.cs b/src/SharpCompress/Common/Zip/Headers/SplitHeader.cs index 4151a6cbf..29aaabaae 100644 --- a/src/SharpCompress/Common/Zip/Headers/SplitHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/SplitHeader.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; @@ -9,4 +10,7 @@ public SplitHeader() : base(ZipHeaderType.Split) { } internal override void Read(BinaryReader reader) => throw new NotImplementedException(); + + internal override ValueTask Read(AsyncBinaryReader reader) => + throw new NotImplementedException(); } diff --git a/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndHeader.cs b/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndHeader.cs index a74b4d1f0..b15b6f162 100644 --- a/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndHeader.cs @@ -1,4 +1,5 @@ using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; @@ -26,6 +27,25 @@ internal override void Read(BinaryReader reader) ); } + internal override async ValueTask Read(AsyncBinaryReader reader) + { + SizeOfDirectoryEndRecord = (long)await reader.ReadUInt64Async(); + VersionMadeBy = await reader.ReadUInt16Async(); + VersionNeededToExtract = await reader.ReadUInt16Async(); + VolumeNumber = await reader.ReadUInt32Async(); + FirstVolumeWithDirectory = await reader.ReadUInt32Async(); + TotalNumberOfEntriesInDisk = (long)await reader.ReadUInt64Async(); + TotalNumberOfEntries = (long)await reader.ReadUInt64Async(); + DirectorySize = (long)await reader.ReadUInt64Async(); + DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async(); + DataSector = await reader.ReadBytesAsync( + (int)( + SizeOfDirectoryEndRecord + - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS + ) + ); + } + private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44; public long SizeOfDirectoryEndRecord { get; private set; } diff --git a/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndLocatorHeader.cs b/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndLocatorHeader.cs index 3020d377e..8326be99a 100644 --- a/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndLocatorHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/Zip64DirectoryEndLocatorHeader.cs @@ -1,12 +1,10 @@ using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; -internal class Zip64DirectoryEndLocatorHeader : ZipHeader +internal class Zip64DirectoryEndLocatorHeader() : ZipHeader(ZipHeaderType.Zip64DirectoryEndLocator) { - public Zip64DirectoryEndLocatorHeader() - : base(ZipHeaderType.Zip64DirectoryEndLocator) { } - internal override void Read(BinaryReader reader) { FirstVolumeWithDirectory = reader.ReadUInt32(); @@ -14,6 +12,13 @@ internal override void Read(BinaryReader reader) TotalNumberOfVolumes = reader.ReadUInt32(); } + internal override async ValueTask Read(AsyncBinaryReader reader) + { + FirstVolumeWithDirectory = await reader.ReadUInt32Async(); + RelativeOffsetOfTheEndOfDirectoryRecord = (long)await reader.ReadUInt64Async(); + TotalNumberOfVolumes = await reader.ReadUInt32Async(); + } + public uint FirstVolumeWithDirectory { get; private set; } public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; } diff --git a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs index 374f54370..edcb29767 100644 --- a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs +++ b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs @@ -2,18 +2,14 @@ using System.Buffers.Binary; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; -internal abstract class ZipFileEntry : ZipHeader +internal abstract class ZipFileEntry(ZipHeaderType type, IArchiveEncoding archiveEncoding) + : ZipHeader(type) { - protected ZipFileEntry(ZipHeaderType type, IArchiveEncoding archiveEncoding) - : base(type) - { - Extra = new List(); - ArchiveEncoding = archiveEncoding; - } - internal bool IsDirectory { get @@ -30,7 +26,7 @@ internal bool IsDirectory internal Stream? PackedStream { get; set; } - internal IArchiveEncoding ArchiveEncoding { get; } + internal IArchiveEncoding ArchiveEncoding { get; } = archiveEncoding; internal string? Name { get; set; } @@ -44,7 +40,7 @@ internal bool IsDirectory internal long UncompressedSize { get; set; } - internal List Extra { get; set; } + internal List Extra { get; set; } = new(); public string? Password { get; set; } @@ -63,6 +59,24 @@ internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStr return encryptionData; } + internal async ValueTask ComposeEncryptionDataAsync( + Stream archiveStream, + CancellationToken cancellationToken = default + ) + { + if (archiveStream is null) + { + throw new ArgumentNullException(nameof(archiveStream)); + } + + var buffer = new byte[12]; + await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false); + + var encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer); + + return encryptionData; + } + internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; } /// diff --git a/src/SharpCompress/Common/Zip/Headers/ZipHeader.cs b/src/SharpCompress/Common/Zip/Headers/ZipHeader.cs index 36d40a821..9ce1caa3a 100644 --- a/src/SharpCompress/Common/Zip/Headers/ZipHeader.cs +++ b/src/SharpCompress/Common/Zip/Headers/ZipHeader.cs @@ -1,18 +1,14 @@ using System.IO; +using System.Threading.Tasks; namespace SharpCompress.Common.Zip.Headers; -internal abstract class ZipHeader +internal abstract class ZipHeader(ZipHeaderType type) { - protected ZipHeader(ZipHeaderType type) - { - ZipHeaderType = type; - HasData = true; - } - - internal ZipHeaderType ZipHeaderType { get; } + internal ZipHeaderType ZipHeaderType { get; } = type; internal abstract void Read(BinaryReader reader); + internal abstract ValueTask Read(AsyncBinaryReader reader); - internal bool HasData { get; set; } + internal bool HasData { get; set; } = true; } diff --git a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs index e75727112..7dbf93baa 100644 --- a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common.Zip.Headers; namespace SharpCompress.Common.Zip; @@ -25,9 +27,24 @@ internal override Stream GetCompressedStream() return base.GetCompressedStream(); } + internal override async ValueTask GetCompressedStreamAsync( + CancellationToken cancellationToken = default + ) + { + if (!_isLocalHeaderLoaded) + { + await LoadLocalHeaderAsync(cancellationToken); + _isLocalHeaderLoaded = true; + } + return await base.GetCompressedStreamAsync(cancellationToken); + } + private void LoadLocalHeader() => Header = _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header); + private async ValueTask LoadLocalHeaderAsync(CancellationToken cancellationToken = default) => + Header = await _headerFactory.GetLocalHeaderAsync(BaseStream, (DirectoryEntryHeader)Header); + protected override Stream CreateBaseStream() { BaseStream.Position = Header.DataStartPosition.NotNull(); diff --git a/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs index 8d6349586..908566229 100644 --- a/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs +++ b/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading.Tasks; using SharpCompress.Common.Zip.Headers; using SharpCompress.IO; @@ -18,7 +19,74 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory internal SeekableZipHeaderFactory(string? password, IArchiveEncoding archiveEncoding) : base(StreamingMode.Seekable, password, archiveEncoding) { } - internal IEnumerable ReadSeekableHeader(Stream stream) + internal async IAsyncEnumerable ReadSeekableHeaderAsync(Stream stream) + { + var reader = new AsyncBinaryReader(stream); + + await SeekBackToHeaderAsync(stream, reader); + + var eocd_location = stream.Position; + var entry = new DirectoryEndHeader(); + await entry.Read(reader); + + if (entry.IsZip64) + { + _zip64 = true; + + // ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD + stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); + uint zip64_locator = await reader.ReadUInt32Async(); + if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR) + { + throw new ArchiveException("Failed to locate the Zip64 Directory Locator"); + } + + var zip64Locator = new Zip64DirectoryEndLocatorHeader(); + await zip64Locator.Read(reader); + + stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin); + var zip64Signature = await reader.ReadUInt32Async(); + if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY) + { + throw new ArchiveException("Failed to locate the Zip64 Header"); + } + + var zip64Entry = new Zip64DirectoryEndHeader(); + await zip64Entry.Read(reader); + stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); + } + else + { + stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); + } + + var position = stream.Position; + while (true) + { + stream.Position = position; + var signature = await reader.ReadUInt32Async(); + var nextHeader = await ReadHeader(signature, reader, _zip64); + position = stream.Position; + + if (nextHeader is null) + { + yield break; + } + + if (nextHeader is DirectoryEntryHeader entryHeader) + { + //entry could be zero bytes so we need to know that. + entryHeader.HasData = entryHeader.CompressedSize != 0; + yield return entryHeader; + } + else if (nextHeader is DirectoryEndHeader endHeader) + { + yield return endHeader; + } + } + } + + internal IEnumerable ReadSeekableHeader(Stream stream, bool useSync) { var reader = new BinaryReader(stream); @@ -85,6 +153,73 @@ internal IEnumerable ReadSeekableHeader(Stream stream) } } + internal async IAsyncEnumerable ReadSeekableHeaderAsync(Stream stream, bool useSync) + { + var reader = new AsyncBinaryReader(stream); + + await SeekBackToHeaderAsync(stream, reader); + + var eocd_location = stream.Position; + var entry = new DirectoryEndHeader(); + await entry.Read(reader); + + if (entry.IsZip64) + { + _zip64 = true; + + // ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD + stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); + var zip64_locator = await reader.ReadUInt32Async(); + if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR) + { + throw new ArchiveException("Failed to locate the Zip64 Directory Locator"); + } + + var zip64Locator = new Zip64DirectoryEndLocatorHeader(); + await zip64Locator.Read(reader); + + stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin); + var zip64Signature = await reader.ReadUInt32Async(); + if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY) + { + throw new ArchiveException("Failed to locate the Zip64 Header"); + } + + var zip64Entry = new Zip64DirectoryEndHeader(); + await zip64Entry.Read(reader); + stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); + } + else + { + stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); + } + + var position = stream.Position; + while (true) + { + stream.Position = position; + var signature = await reader.ReadUInt32Async(); + var nextHeader = await ReadHeader(signature, reader, _zip64); + position = stream.Position; + + if (nextHeader is null) + { + yield break; + } + + if (nextHeader is DirectoryEntryHeader entryHeader) + { + //entry could be zero bytes so we need to know that. + entryHeader.HasData = entryHeader.CompressedSize != 0; + yield return entryHeader; + } + else if (nextHeader is DirectoryEndHeader endHeader) + { + yield return endHeader; + } + } + } + private static bool IsMatch(byte[] haystack, int position, byte[] needle) { for (var i = 0; i < needle.Length; i++) @@ -98,6 +233,45 @@ private static bool IsMatch(byte[] haystack, int position, byte[] needle) return true; } + private static async ValueTask SeekBackToHeaderAsync(Stream stream, AsyncBinaryReader reader) + { + // Minimum EOCD length + if (stream.Length < MINIMUM_EOCD_LENGTH) + { + throw new ArchiveException( + "Could not find Zip file Directory at the end of the file. File may be corrupted." + ); + } + + var len = + stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD + ? (int)stream.Length + : MAX_SEARCH_LENGTH_FOR_EOCD; + // We search for marker in reverse to find the first occurance + byte[] needle = { 0x06, 0x05, 0x4b, 0x50 }; + + stream.Seek(-len, SeekOrigin.End); + + var seek = await reader.ReadBytesAsync(len); + + // Search in reverse + Array.Reverse(seek); + + // don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files + var max_search_area = len; // - MINIMUM_EOCD_LENGTH; + + for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end) + { + if (IsMatch(seek, pos_from_end, needle)) + { + stream.Seek(-pos_from_end, SeekOrigin.End); + return; + } + } + + throw new ArchiveException("Failed to locate the Zip Header"); + } + private static void SeekBackToHeader(Stream stream, BinaryReader reader) { // Minimum EOCD length @@ -163,4 +337,31 @@ DirectoryEntryHeader directoryEntryHeader } return localEntryHeader; } + + internal async ValueTask GetLocalHeaderAsync( + Stream stream, + DirectoryEntryHeader directoryEntryHeader + ) + { + stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin); + var reader = new AsyncBinaryReader(stream); + var signature = await reader.ReadUInt32Async(); + if (await ReadHeader(signature, reader, _zip64) is not LocalEntryHeader localEntryHeader) + { + throw new InvalidOperationException(); + } + + // populate fields only known from the DirectoryEntryHeader + localEntryHeader.HasData = directoryEntryHeader.HasData; + localEntryHeader.ExternalFileAttributes = directoryEntryHeader.ExternalFileAttributes; + localEntryHeader.Comment = directoryEntryHeader.Comment; + + if (FlagUtility.HasFlag(localEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)) + { + localEntryHeader.Crc = directoryEntryHeader.Crc; + localEntryHeader.CompressedSize = directoryEntryHeader.CompressedSize; + localEntryHeader.UncompressedSize = directoryEntryHeader.UncompressedSize; + } + return localEntryHeader; + } } diff --git a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs index 5464a9cc8..312ea1263 100644 --- a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common.Zip.Headers; using SharpCompress.Compressors.Deflate; using SharpCompress.IO; @@ -31,6 +33,28 @@ internal override Stream GetCompressedStream() return _decompressionStream; } + internal override async ValueTask GetCompressedStreamAsync( + CancellationToken cancellationToken = default + ) + { + if (!Header.HasData) + { + return Stream.Null; + } + _decompressionStream = await CreateDecompressionStreamAsync( + await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken) + .ConfigureAwait(false), + Header.CompressionMethod, + cancellationToken + ) + .ConfigureAwait(false); + if (LeaveStreamOpen) + { + return SharpCompressStream.Create(_decompressionStream, leaveOpen: true); + } + return _decompressionStream; + } + internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream) { if (Header.IsDirectory) diff --git a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs index ff52244a4..479a5c2a1 100644 --- a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs +++ b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs @@ -2,6 +2,9 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using SharpCompress.Common; using SharpCompress.Common.Zip.Headers; using SharpCompress.IO; @@ -200,4 +203,331 @@ internal IEnumerable ReadStreamHeader(Stream stream) yield return header; } } + + /// + /// Reads ZIP headers asynchronously for streams that do not support synchronous reads. + /// + internal IAsyncEnumerable ReadStreamHeaderAsync(Stream stream) => + new StreamHeaderAsyncEnumerable(this, stream); + + /// + /// Invokes the shared async header parsing logic on the base factory. + /// + private ValueTask ReadHeaderAsyncInternal( + uint headerBytes, + AsyncBinaryReader reader + ) => ReadHeader(headerBytes, reader); + + /// + /// Exposes the last parsed local entry header to the async enumerator so it can handle streaming data descriptors. + /// + private LocalEntryHeader? LastEntryHeader + { + get => _lastEntryHeader; + set => _lastEntryHeader = value; + } + + /// + /// Produces an async enumerator for streaming ZIP headers. + /// + private sealed class StreamHeaderAsyncEnumerable : IAsyncEnumerable + { + private readonly StreamingZipHeaderFactory _headerFactory; + private readonly Stream _stream; + + public StreamHeaderAsyncEnumerable(StreamingZipHeaderFactory headerFactory, Stream stream) + { + _headerFactory = headerFactory; + _stream = stream; + } + + public IAsyncEnumerator GetAsyncEnumerator( + CancellationToken cancellationToken = default + ) => new StreamHeaderAsyncEnumerator(_headerFactory, _stream, cancellationToken); + } + + /// + /// Async implementation of using to avoid sync reads. + /// + private sealed class StreamHeaderAsyncEnumerator : IAsyncEnumerator, IDisposable + { + private readonly StreamingZipHeaderFactory _headerFactory; + private readonly SharpCompressStream _rewindableStream; + private readonly AsyncBinaryReader _reader; + private readonly CancellationToken _cancellationToken; + private bool _completed; + + public StreamHeaderAsyncEnumerator( + StreamingZipHeaderFactory headerFactory, + Stream stream, + CancellationToken cancellationToken + ) + { + _headerFactory = headerFactory; + _rewindableStream = EnsureSharpCompressStream(stream); + _reader = new AsyncBinaryReader(_rewindableStream, leaveOpen: true); + _cancellationToken = cancellationToken; + } + + private ZipHeader? _current; + + public ZipHeader Current => + _current ?? throw new InvalidOperationException("No current header is available."); + + /// + /// Advances to the next ZIP header in the stream, honoring streaming data descriptors where applicable. + /// + public async ValueTask MoveNextAsync() + { + if (_completed) + { + return false; + } + + while (true) + { + _cancellationToken.ThrowIfCancellationRequested(); + + uint headerBytes; + var lastEntryHeader = _headerFactory.LastEntryHeader; + if ( + lastEntryHeader != null + && FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor) + ) + { + if (lastEntryHeader.Part is null) + { + continue; + } + + var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null; + + var crc = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + if (crc == POST_DATA_DESCRIPTOR) + { + crc = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + } + lastEntryHeader.Crc = crc; + + //attempt 32bit read + ulong compressedSize = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + ulong uncompressedSize = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + headerBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + + //check for zip64 sentinel or unexpected header + bool isSentinel = + compressedSize == 0xFFFFFFFF || uncompressedSize == 0xFFFFFFFF; + bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50; + + if (!isHeader && !isSentinel) + { + //reshuffle into 64-bit values + compressedSize = (uncompressedSize << 32) | compressedSize; + uncompressedSize = + ((ulong)headerBytes << 32) + | await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + headerBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + } + else if (isSentinel) + { + //standards-compliant zip64 descriptor + compressedSize = await _reader + .ReadUInt64Async(_cancellationToken) + .ConfigureAwait(false); + uncompressedSize = await _reader + .ReadUInt64Async(_cancellationToken) + .ConfigureAwait(false); + } + + lastEntryHeader.CompressedSize = (long)compressedSize; + lastEntryHeader.UncompressedSize = (long)uncompressedSize; + + if (pos.HasValue) + { + lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize; + } + } + else if (lastEntryHeader != null && lastEntryHeader.IsZip64) + { + if (lastEntryHeader.Part is null) + { + continue; + } + + var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null; + + headerBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + + _ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // version + _ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // flags + _ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // compressionMethod + _ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // lastModifiedDate + _ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // lastModifiedTime + + var crc = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + + if (crc == POST_DATA_DESCRIPTOR) + { + crc = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + } + lastEntryHeader.Crc = crc; + + // The DataDescriptor can be either 64bit or 32bit + var compressedSize = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + var uncompressedSize = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + + // Check if we have header or 64bit DataDescriptor + var testHeader = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50); + + var test64Bit = ((long)uncompressedSize << 32) | compressedSize; + if (test64Bit == lastEntryHeader.CompressedSize && testHeader) + { + lastEntryHeader.UncompressedSize = + ( + (long) + await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false) << 32 + ) | headerBytes; + headerBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + } + else + { + lastEntryHeader.UncompressedSize = uncompressedSize; + } + + if (pos.HasValue) + { + lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize; + + // 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04) + _rewindableStream.Position = pos.Value + 4; + } + } + else + { + headerBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + } + + _headerFactory.LastEntryHeader = null; + var header = await _headerFactory + .ReadHeaderAsyncInternal(headerBytes, _reader) + .ConfigureAwait(false); + if (header is null) + { + _completed = true; + return false; + } + + //entry could be zero bytes so we need to know that. + if (header.ZipHeaderType == ZipHeaderType.LocalEntry) + { + var localHeader = (LocalEntryHeader)header; + var directoryHeader = _headerFactory._entries?.FirstOrDefault(entry => + entry.Key == localHeader.Name + && localHeader.CompressedSize == 0 + && localHeader.UncompressedSize == 0 + && localHeader.Crc == 0 + && localHeader.IsDirectory == false + ); + + if (directoryHeader != null) + { + localHeader.UncompressedSize = directoryHeader.Size; + localHeader.CompressedSize = directoryHeader.CompressedSize; + localHeader.Crc = (uint)directoryHeader.Crc; + } + + // If we have CompressedSize, there is data to be read + if (localHeader.CompressedSize > 0) + { + header.HasData = true; + } // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor ) + else if (localHeader.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor)) + { + var nextHeaderBytes = await _reader + .ReadUInt32Async(_cancellationToken) + .ConfigureAwait(false); + ((IStreamStack)_rewindableStream).Rewind(sizeof(uint)); + + // Check if next data is PostDataDescriptor, streamed file with 0 length + header.HasData = !IsHeader(nextHeaderBytes); + } + else // We are not streaming and compressed size is 0, we have no data + { + header.HasData = false; + } + } + + _current = header; + return true; + } + } + + public ValueTask DisposeAsync() + { + Dispose(); + return default; + } + + /// + /// Disposes the underlying reader (without closing the archive stream). + /// + public void Dispose() + { + _reader.Dispose(); + } + + /// + /// Ensures the stream is a so header parsing can use rewind/buffer helpers. + /// + private static SharpCompressStream EnsureSharpCompressStream(Stream stream) + { + if (stream is SharpCompressStream sharpCompressStream) + { + return sharpCompressStream; + } + + // Ensure the stream is already a SharpCompressStream so the buffer/size is set. + // The original code wrapped this with RewindableStream; use SharpCompressStream so we can get the buffer size. + if (stream is SourceStream src) + { + return new SharpCompressStream( + stream, + src.ReaderOptions.LeaveStreamOpen, + bufferSize: src.ReaderOptions.BufferSize + ); + } + + throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream)); + } + } } diff --git a/src/SharpCompress/Common/Zip/ZipFilePart.cs b/src/SharpCompress/Common/Zip/ZipFilePart.cs index 16eb8e1a9..219f24fae 100644 --- a/src/SharpCompress/Common/Zip/ZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/ZipFilePart.cs @@ -2,6 +2,8 @@ using System.Buffers.Binary; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common.Zip.Headers; using SharpCompress.Compressors; using SharpCompress.Compressors.BZip2; @@ -264,4 +266,244 @@ protected Stream GetCryptoStream(Stream plainStream) } return plainStream; } + + internal override async ValueTask GetCompressedStreamAsync( + CancellationToken cancellationToken = default + ) + { + if (!Header.HasData) + { + return Stream.Null; + } + var decompressionStream = await CreateDecompressionStreamAsync( + await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken) + .ConfigureAwait(false), + Header.CompressionMethod, + cancellationToken + ) + .ConfigureAwait(false); + if (LeaveStreamOpen) + { + return SharpCompressStream.Create(decompressionStream, leaveOpen: true); + } + return decompressionStream; + } + + protected async Task GetCryptoStreamAsync( + Stream plainStream, + CancellationToken cancellationToken = default + ) + { + var isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted); + + if (Header.CompressedSize == 0 && isFileEncrypted) + { + throw new NotSupportedException("Cannot encrypt file with unknown size at start."); + } + + if ( + ( + Header.CompressedSize == 0 + && FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) + ) || Header.IsZip64 + ) + { + plainStream = SharpCompressStream.Create(plainStream, leaveOpen: true); //make sure AES doesn't close + } + else + { + plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close + } + + if (isFileEncrypted) + { + switch (Header.CompressionMethod) + { + case ZipCompressionMethod.None: + case ZipCompressionMethod.Shrink: + case ZipCompressionMethod.Reduce1: + case ZipCompressionMethod.Reduce2: + case ZipCompressionMethod.Reduce3: + case ZipCompressionMethod.Reduce4: + case ZipCompressionMethod.Deflate: + case ZipCompressionMethod.Deflate64: + case ZipCompressionMethod.BZip2: + case ZipCompressionMethod.LZMA: + case ZipCompressionMethod.PPMd: + { + return new PkwareTraditionalCryptoStream( + plainStream, + await Header + .ComposeEncryptionDataAsync(plainStream, cancellationToken) + .ConfigureAwait(false), + CryptoMode.Decrypt + ); + } + + case ZipCompressionMethod.WinzipAes: + { + if (Header.WinzipAesEncryptionData != null) + { + return new WinzipAesCryptoStream( + plainStream, + Header.WinzipAesEncryptionData, + Header.CompressedSize - 10 + ); + } + return plainStream; + } + + default: + { + throw new InvalidOperationException("Header.CompressionMethod is invalid"); + } + } + } + return plainStream; + } + + protected async Task CreateDecompressionStreamAsync( + Stream stream, + ZipCompressionMethod method, + CancellationToken cancellationToken = default + ) + { + switch (method) + { + case ZipCompressionMethod.None: + { + if (Header.CompressedSize is 0) + { + return new DataDescriptorStream(stream); + } + + return stream; + } + case ZipCompressionMethod.Shrink: + { + return new ShrinkStream( + stream, + CompressionMode.Decompress, + Header.CompressedSize, + Header.UncompressedSize + ); + } + case ZipCompressionMethod.Reduce1: + { + return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1); + } + case ZipCompressionMethod.Reduce2: + { + return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2); + } + case ZipCompressionMethod.Reduce3: + { + return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3); + } + case ZipCompressionMethod.Reduce4: + { + return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4); + } + case ZipCompressionMethod.Explode: + { + return new ExplodeStream( + stream, + Header.CompressedSize, + Header.UncompressedSize, + Header.Flags + ); + } + + case ZipCompressionMethod.Deflate: + { + return new DeflateStream(stream, CompressionMode.Decompress); + } + case ZipCompressionMethod.Deflate64: + { + return new Deflate64Stream(stream, CompressionMode.Decompress); + } + case ZipCompressionMethod.BZip2: + { + return new BZip2Stream(stream, CompressionMode.Decompress, false); + } + case ZipCompressionMethod.LZMA: + { + if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted)) + { + throw new NotSupportedException("LZMA with pkware encryption."); + } + var buffer = new byte[4]; + await stream.ReadFullyAsync(buffer, 0, 4, cancellationToken).ConfigureAwait(false); + var version = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(0, 2)); + var propsSize = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(2, 2)); + var props = new byte[propsSize]; + await stream + .ReadFullyAsync(props, 0, propsSize, cancellationToken) + .ConfigureAwait(false); + return new LzmaStream( + props, + stream, + Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1, + FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1) + ? -1 + : Header.UncompressedSize + ); + } + case ZipCompressionMethod.Xz: + { + return new XZStream(stream); + } + case ZipCompressionMethod.ZStandard: + { + return new DecompressionStream(stream); + } + case ZipCompressionMethod.PPMd: + { + var props = new byte[2]; + await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false); + return new PpmdStream(new PpmdProperties(props), stream, false); + } + case ZipCompressionMethod.WinzipAes: + { + var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes); + if (data is null) + { + throw new InvalidFormatException("No Winzip AES extra data found."); + } + + if (data.Length != 7) + { + throw new InvalidFormatException("Winzip data length is not 7."); + } + + var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes); + + if (compressedMethod != 0x01 && compressedMethod != 0x02) + { + throw new InvalidFormatException( + "Unexpected vendor version number for WinZip AES metadata" + ); + } + + var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2)); + if (vendorId != 0x4541) + { + throw new InvalidFormatException( + "Unexpected vendor ID for WinZip AES metadata" + ); + } + + return await CreateDecompressionStreamAsync( + stream, + (ZipCompressionMethod) + BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)), + cancellationToken + ); + } + default: + { + throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod); + } + } + } } diff --git a/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs index 865aba44a..7238dda5c 100644 --- a/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs +++ b/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs @@ -1,6 +1,8 @@ using System; using System.IO; using System.Linq; +using System.Threading.Tasks; +using SharpCompress; using SharpCompress.Common.Zip.Headers; using SharpCompress.IO; @@ -34,6 +36,82 @@ IArchiveEncoding archiveEncoding _archiveEncoding = archiveEncoding; } + protected async ValueTask ReadHeader( + uint headerBytes, + AsyncBinaryReader reader, + bool zip64 = false + ) + { + switch (headerBytes) + { + case ENTRY_HEADER_BYTES: + { + var entryHeader = new LocalEntryHeader(_archiveEncoding); + await entryHeader.Read(reader); + await LoadHeaderAsync(entryHeader, reader.BaseStream).ConfigureAwait(false); + + _lastEntryHeader = entryHeader; + return entryHeader; + } + case DIRECTORY_START_HEADER_BYTES: + { + var entry = new DirectoryEntryHeader(_archiveEncoding); + await entry.Read(reader); + return entry; + } + case POST_DATA_DESCRIPTOR: + { + if ( + _lastEntryHeader != null + && FlagUtility.HasFlag( + _lastEntryHeader.NotNull().Flags, + HeaderFlags.UsePostDataDescriptor + ) + ) + { + _lastEntryHeader.Crc = await reader.ReadUInt32Async(); + _lastEntryHeader.CompressedSize = zip64 + ? (long)await reader.ReadUInt64Async() + : await reader.ReadUInt32Async(); + _lastEntryHeader.UncompressedSize = zip64 + ? (long)await reader.ReadUInt64Async() + : await reader.ReadUInt32Async(); + } + else + { + await reader.ReadBytesAsync(zip64 ? 20 : 12); + } + return null; + } + case DIGITAL_SIGNATURE: + return null; + case DIRECTORY_END_HEADER_BYTES: + { + var entry = new DirectoryEndHeader(); + await entry.Read(reader); + return entry; + } + case SPLIT_ARCHIVE_HEADER_BYTES: + { + return new SplitHeader(); + } + case ZIP64_END_OF_CENTRAL_DIRECTORY: + { + var entry = new Zip64DirectoryEndHeader(); + await entry.Read(reader); + return entry; + } + case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR: + { + var entry = new Zip64DirectoryEndLocatorHeader(); + await entry.Read(reader); + return entry; + } + default: + return null; + } + } + protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false) { switch (headerBytes) @@ -205,4 +283,82 @@ private void LoadHeader(ZipFileEntry entryHeader, Stream stream) //} } + + /// + /// Loads encryption metadata and stream positioning for a header using async reads where needed. + /// + private async ValueTask LoadHeaderAsync(ZipFileEntry entryHeader, Stream stream) + { + if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted)) + { + if ( + !entryHeader.IsDirectory + && entryHeader.CompressedSize == 0 + && FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor) + ) + { + throw new NotSupportedException( + "SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner." + ); + } + + if (_password is null) + { + throw new CryptographicException("No password supplied for encrypted zip."); + } + + entryHeader.Password = _password; + + if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes) + { + var data = entryHeader.Extra.SingleOrDefault(x => + x.Type == ExtraDataType.WinZipAes + ); + if (data != null) + { + var keySize = (WinzipAesKeySize)data.DataBytes[4]; + + var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2]; + var passwordVerifyValue = new byte[2]; + await stream.ReadExactAsync(salt, 0, salt.Length).ConfigureAwait(false); + await stream.ReadExactAsync(passwordVerifyValue, 0, 2).ConfigureAwait(false); + + entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData( + keySize, + salt, + passwordVerifyValue, + _password + ); + + entryHeader.CompressedSize -= (uint)(salt.Length + 2); + } + } + } + + if (entryHeader.IsDirectory) + { + return; + } + + switch (_mode) + { + case StreamingMode.Seekable: + { + entryHeader.DataStartPosition = stream.Position; + stream.Position += entryHeader.CompressedSize; + break; + } + + case StreamingMode.Streaming: + { + entryHeader.PackedStream = stream; + break; + } + + default: + { + throw new InvalidFormatException("Invalid StreamingMode"); + } + } + } } diff --git a/src/SharpCompress/Compressors/ADC/ADCBase.cs b/src/SharpCompress/Compressors/ADC/ADCBase.cs index 35301b526..ad5218986 100644 --- a/src/SharpCompress/Compressors/ADC/ADCBase.cs +++ b/src/SharpCompress/Compressors/ADC/ADCBase.cs @@ -104,7 +104,7 @@ public static int Decompress(byte[] input, out byte[]? output, int bufferSize = /// Max size for decompressed data /// Cancellation token /// Result containing bytes read and decompressed data - public static async Task DecompressAsync( + public static async ValueTask DecompressAsync( byte[] input, int bufferSize = 262144, CancellationToken cancellationToken = default @@ -117,7 +117,7 @@ public static async Task DecompressAsync( /// Max size for decompressed data /// Cancellation token /// Result containing bytes read and decompressed data - public static async Task DecompressAsync( + public static async ValueTask DecompressAsync( Stream input, int bufferSize = 262144, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs index e2a757c62..d3c10f9b7 100644 --- a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs +++ b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs @@ -400,7 +400,7 @@ private void finish() } } - private async Task finishAsync(CancellationToken cancellationToken = default) + private async ValueTask finishAsync(CancellationToken cancellationToken = default) { if (_z is null) { @@ -646,7 +646,9 @@ private string ReadZeroTerminatedString() return _encoding.GetString(buffer, 0, buffer.Length); } - private async Task ReadZeroTerminatedStringAsync(CancellationToken cancellationToken) + private async ValueTask ReadZeroTerminatedStringAsync( + CancellationToken cancellationToken + ) { var list = new List(); var done = false; @@ -729,7 +731,9 @@ private int _ReadAndValidateGzipHeader() return totalBytesRead; } - private async Task _ReadAndValidateGzipHeaderAsync(CancellationToken cancellationToken) + private async ValueTask _ReadAndValidateGzipHeaderAsync( + CancellationToken cancellationToken + ) { var totalBytesRead = 0; diff --git a/src/SharpCompress/Compressors/LZMA/LZ/LzOutWindow.cs b/src/SharpCompress/Compressors/LZMA/LZ/LzOutWindow.cs index 276456fba..0866f718f 100644 --- a/src/SharpCompress/Compressors/LZMA/LZ/LzOutWindow.cs +++ b/src/SharpCompress/Compressors/LZMA/LZ/LzOutWindow.cs @@ -87,7 +87,7 @@ public void ReleaseStream() _stream = null; } - public async Task ReleaseStreamAsync(CancellationToken cancellationToken = default) + public async ValueTask ReleaseStreamAsync(CancellationToken cancellationToken = default) { await FlushAsync(cancellationToken).ConfigureAwait(false); _stream = null; @@ -112,7 +112,7 @@ private void Flush() _streamPos = _pos; } - private async Task FlushAsync(CancellationToken cancellationToken = default) + private async ValueTask FlushAsync(CancellationToken cancellationToken = default) { if (_stream is null) { @@ -303,7 +303,7 @@ public int CopyStream(Stream stream, int len) return len - size; } - public async Task CopyStreamAsync( + public async ValueTask CopyStreamAsync( Stream stream, int len, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Compressors/LZMA/LzmaStream.cs b/src/SharpCompress/Compressors/LZMA/LzmaStream.cs index 77e4c4947..26079966c 100644 --- a/src/SharpCompress/Compressors/LZMA/LzmaStream.cs +++ b/src/SharpCompress/Compressors/LZMA/LzmaStream.cs @@ -429,7 +429,7 @@ private async ValueTask DecodeChunkHeaderAsync(CancellationToken cancellationTok { var controlBuffer = new byte[1]; await _inputStream - .ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken) + .ReadExactAsync(controlBuffer, 0, 1, cancellationToken) .ConfigureAwait(false); var control = controlBuffer[0]; _inputPosition++; @@ -458,13 +458,13 @@ await _inputStream _availableBytes = (control & 0x1F) << 16; var buffer = new byte[2]; await _inputStream - .ReadExactlyAsync(buffer, 0, 2, cancellationToken) + .ReadExactAsync(buffer, 0, 2, cancellationToken) .ConfigureAwait(false); _availableBytes += (buffer[0] << 8) + buffer[1] + 1; _inputPosition += 2; await _inputStream - .ReadExactlyAsync(buffer, 0, 2, cancellationToken) + .ReadExactAsync(buffer, 0, 2, cancellationToken) .ConfigureAwait(false); _rangeDecoderLimit = (buffer[0] << 8) + buffer[1] + 1; _inputPosition += 2; @@ -473,7 +473,7 @@ await _inputStream { _needProps = false; await _inputStream - .ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken) + .ReadExactAsync(controlBuffer, 0, 1, cancellationToken) .ConfigureAwait(false); Properties[0] = controlBuffer[0]; _inputPosition++; @@ -502,7 +502,7 @@ await _inputStream _uncompressedChunk = true; var buffer = new byte[2]; await _inputStream - .ReadExactlyAsync(buffer, 0, 2, cancellationToken) + .ReadExactAsync(buffer, 0, 2, cancellationToken) .ConfigureAwait(false); _availableBytes = (buffer[0] << 8) + buffer[1] + 1; _inputPosition += 2; diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs b/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs index 19b0f3748..b57cd53f5 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs @@ -53,39 +53,4 @@ public static void Assert(bool expression) throw new InvalidOperationException("Assertion failed."); } } - - public static void ReadExact(this Stream stream, byte[] buffer, int offset, int length) - { - if (stream is null) - { - throw new ArgumentNullException(nameof(stream)); - } - - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } - - if (offset < 0 || offset > buffer.Length) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } - - if (length < 0 || length > buffer.Length - offset) - { - throw new ArgumentOutOfRangeException(nameof(length)); - } - - while (length > 0) - { - var fetched = stream.Read(buffer, offset, length); - if (fetched <= 0) - { - throw new EndOfStreamException(); - } - - offset += fetched; - length -= fetched; - } - } } diff --git a/src/SharpCompress/Compressors/Rar/RarStream.cs b/src/SharpCompress/Compressors/Rar/RarStream.cs index a4869075a..7f258bc5b 100644 --- a/src/SharpCompress/Compressors/Rar/RarStream.cs +++ b/src/SharpCompress/Compressors/Rar/RarStream.cs @@ -68,7 +68,7 @@ public void Initialize() _position = 0; } - public async Task InitializeAsync(CancellationToken cancellationToken = default) + public async ValueTask InitializeAsync(CancellationToken cancellationToken = default) { fetch = true; await unpack.DoUnpackAsync(fileHeader, readStream, this, cancellationToken); diff --git a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs index f5613d661..6f7a863ba 100644 --- a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs +++ b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs @@ -58,7 +58,7 @@ public static async Task ReadXZIntegerAsync( MaxBytes = 9; } - var LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false); + var LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); var Output = (ulong)LastByte & 0x7F; var i = 0; @@ -69,7 +69,7 @@ public static async Task ReadXZIntegerAsync( throw new InvalidFormatException(); } - LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false); + LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); if (LastByte == 0) { throw new InvalidFormatException(); @@ -79,37 +79,4 @@ public static async Task ReadXZIntegerAsync( } return Output; } - - public static async Task ReadByteAsync( - this BinaryReader reader, - CancellationToken cancellationToken = default - ) - { - var buffer = new byte[1]; - var bytesRead = await reader - .BaseStream.ReadAsync(buffer, 0, 1, cancellationToken) - .ConfigureAwait(false); - if (bytesRead != 1) - { - throw new EndOfStreamException(); - } - return buffer[0]; - } - - public static async Task ReadBytesAsync( - this BinaryReader reader, - int count, - CancellationToken cancellationToken = default - ) - { - var buffer = new byte[count]; - var bytesRead = await reader - .BaseStream.ReadAsync(buffer, 0, count, cancellationToken) - .ConfigureAwait(false); - if (bytesRead != count) - { - throw new EndOfStreamException(); - } - return buffer; - } } diff --git a/src/SharpCompress/Compressors/Xz/XZBlock.cs b/src/SharpCompress/Compressors/Xz/XZBlock.cs index 45e11745a..7c18e3b41 100644 --- a/src/SharpCompress/Compressors/Xz/XZBlock.cs +++ b/src/SharpCompress/Compressors/Xz/XZBlock.cs @@ -132,7 +132,7 @@ private void SkipPadding() _paddingSkipped = true; } - private async Task SkipPaddingAsync(CancellationToken cancellationToken = default) + private async ValueTask SkipPaddingAsync(CancellationToken cancellationToken = default) { var bytes = (BaseStream.Position - _startPosition) % 4; if (bytes > 0) @@ -158,7 +158,7 @@ private void CheckCrc() _crcChecked = true; } - private async Task CheckCrcAsync(CancellationToken cancellationToken = default) + private async ValueTask CheckCrcAsync(CancellationToken cancellationToken = default) { var crc = new byte[_checkSize]; await BaseStream.ReadAsync(crc, 0, _checkSize, cancellationToken).ConfigureAwait(false); @@ -194,7 +194,7 @@ private void LoadHeader() HeaderIsLoaded = true; } - private async Task LoadHeaderAsync(CancellationToken cancellationToken = default) + private async ValueTask LoadHeaderAsync(CancellationToken cancellationToken = default) { await ReadHeaderSizeAsync(cancellationToken).ConfigureAwait(false); var headerCache = await CacheHeaderAsync(cancellationToken).ConfigureAwait(false); @@ -218,7 +218,7 @@ private void ReadHeaderSize() } } - private async Task ReadHeaderSizeAsync(CancellationToken cancellationToken = default) + private async ValueTask ReadHeaderSizeAsync(CancellationToken cancellationToken = default) { var buffer = new byte[1]; await BaseStream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false); @@ -249,7 +249,7 @@ private byte[] CacheHeader() return blockHeaderWithoutCrc; } - private async Task CacheHeaderAsync(CancellationToken cancellationToken = default) + private async ValueTask CacheHeaderAsync(CancellationToken cancellationToken = default) { var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4]; blockHeaderWithoutCrc[0] = _blockHeaderSizeByte; diff --git a/src/SharpCompress/Compressors/Xz/XZFooter.cs b/src/SharpCompress/Compressors/Xz/XZFooter.cs index 09c95b1a0..67b68be7c 100644 --- a/src/SharpCompress/Compressors/Xz/XZFooter.cs +++ b/src/SharpCompress/Compressors/Xz/XZFooter.cs @@ -62,7 +62,7 @@ public void Process() } } - public async Task ProcessAsync(CancellationToken cancellationToken = default) + public async ValueTask ProcessAsync(CancellationToken cancellationToken = default) { var crc = await _reader .BaseStream.ReadLittleEndianUInt32Async(cancellationToken) diff --git a/src/SharpCompress/Compressors/Xz/XZHeader.cs b/src/SharpCompress/Compressors/Xz/XZHeader.cs index 1aee619bb..39f706b1d 100644 --- a/src/SharpCompress/Compressors/Xz/XZHeader.cs +++ b/src/SharpCompress/Compressors/Xz/XZHeader.cs @@ -41,7 +41,7 @@ public void Process() ProcessStreamFlags(); } - public async Task ProcessAsync(CancellationToken cancellationToken = default) + public async ValueTask ProcessAsync(CancellationToken cancellationToken = default) { CheckMagicBytes(await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false)); await ProcessStreamFlagsAsync(cancellationToken).ConfigureAwait(false); @@ -65,7 +65,7 @@ private void ProcessStreamFlags() } } - private async Task ProcessStreamFlagsAsync(CancellationToken cancellationToken = default) + private async ValueTask ProcessStreamFlagsAsync(CancellationToken cancellationToken = default) { var streamFlags = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false); var crc = await _reader diff --git a/src/SharpCompress/Compressors/Xz/XZIndex.cs b/src/SharpCompress/Compressors/Xz/XZIndex.cs index 9c5230911..3bc8ea422 100644 --- a/src/SharpCompress/Compressors/Xz/XZIndex.cs +++ b/src/SharpCompress/Compressors/Xz/XZIndex.cs @@ -41,7 +41,7 @@ public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified) return index; } - public static async Task FromStreamAsync( + public static async ValueTask FromStreamAsync( Stream stream, bool indexMarkerAlreadyVerified, CancellationToken cancellationToken = default @@ -71,7 +71,7 @@ public void Process() VerifyCrc32(); } - public async Task ProcessAsync(CancellationToken cancellationToken = default) + public async ValueTask ProcessAsync(CancellationToken cancellationToken = default) { if (!_indexMarkerAlreadyVerified) { @@ -100,7 +100,7 @@ private void VerifyIndexMarker() } } - private async Task VerifyIndexMarkerAsync(CancellationToken cancellationToken = default) + private async ValueTask VerifyIndexMarkerAsync(CancellationToken cancellationToken = default) { var marker = await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); if (marker != 0) @@ -122,7 +122,7 @@ private void SkipPadding() } } - private async Task SkipPaddingAsync(CancellationToken cancellationToken = default) + private async ValueTask SkipPaddingAsync(CancellationToken cancellationToken = default) { var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4; if (bytes > 0) @@ -143,7 +143,7 @@ private void VerifyCrc32() // TODO verify this matches } - private async Task VerifyCrc32Async(CancellationToken cancellationToken = default) + private async ValueTask VerifyCrc32Async(CancellationToken cancellationToken = default) { var crc = await _reader .BaseStream.ReadLittleEndianUInt32Async(cancellationToken) diff --git a/src/SharpCompress/Compressors/Xz/XZStream.cs b/src/SharpCompress/Compressors/Xz/XZStream.cs index ebd0924ed..1e3051d6b 100644 --- a/src/SharpCompress/Compressors/Xz/XZStream.cs +++ b/src/SharpCompress/Compressors/Xz/XZStream.cs @@ -142,7 +142,7 @@ private void ReadHeader() HeaderIsRead = true; } - private async Task ReadHeaderAsync(CancellationToken cancellationToken = default) + private async ValueTask ReadHeaderAsync(CancellationToken cancellationToken = default) { Header = await XZHeader .FromStreamAsync(BaseStream, cancellationToken) @@ -153,7 +153,7 @@ private async Task ReadHeaderAsync(CancellationToken cancellationToken = default private void ReadIndex() => Index = XZIndex.FromStream(BaseStream, true); - private async Task ReadIndexAsync(CancellationToken cancellationToken = default) => + private async ValueTask ReadIndexAsync(CancellationToken cancellationToken = default) => Index = await XZIndex .FromStreamAsync(BaseStream, true, cancellationToken) .ConfigureAwait(false); @@ -162,7 +162,7 @@ private async Task ReadIndexAsync(CancellationToken cancellationToken = default) private void ReadFooter() => Footer = XZFooter.FromStream(BaseStream); // TODO verify footer - private async Task ReadFooterAsync(CancellationToken cancellationToken = default) => + private async ValueTask ReadFooterAsync(CancellationToken cancellationToken = default) => Footer = await XZFooter .FromStreamAsync(BaseStream, cancellationToken) .ConfigureAwait(false); @@ -202,7 +202,7 @@ private int ReadBlocks(byte[] buffer, int offset, int count) return bytesRead; } - private async Task ReadBlocksAsync( + private async ValueTask ReadBlocksAsync( byte[] buffer, int offset, int count, diff --git a/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs index 92de03b34..af8865b41 100644 --- a/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs +++ b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs @@ -77,7 +77,7 @@ public void LoadDictionary(byte[] dict) #if !NETSTANDARD2_0 && !NETFRAMEWORK public override async ValueTask DisposeAsync() #else - public async Task DisposeAsync() + public async ValueTask DisposeAsync() #endif { if (compressor == null) @@ -137,7 +137,7 @@ await FlushInternalAsync(ZSTD_EndDirective.ZSTD_e_flush, cancellationToken) private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive); - private async Task FlushInternalAsync( + private async ValueTask FlushInternalAsync( ZSTD_EndDirective directive, CancellationToken cancellationToken = default ) => await WriteInternalAsync(null, directive, cancellationToken).ConfigureAwait(false); @@ -183,7 +183,7 @@ private async ValueTask WriteInternalAsync( CancellationToken cancellationToken = default ) #else - private async Task WriteInternalAsync( + private async ValueTask WriteInternalAsync( ReadOnlyMemory? buffer, ZSTD_EndDirective directive, CancellationToken cancellationToken = default @@ -235,14 +235,16 @@ await WriteInternalAsync(buffer, ZSTD_EndDirective.ZSTD_e_continue, cancellation .ConfigureAwait(false); #else - public override Task WriteAsync( + public override async Task WriteAsync( byte[] buffer, int offset, int count, CancellationToken cancellationToken - ) => WriteAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken); + ) => + await WriteAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken) + .ConfigureAwait(false); - public async Task WriteAsync( + public async ValueTask WriteAsync( ReadOnlyMemory buffer, CancellationToken cancellationToken = default ) => diff --git a/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs index 9864a8055..78af43513 100644 --- a/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs +++ b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs @@ -177,9 +177,9 @@ public override Task ReadAsync( int offset, int count, CancellationToken cancellationToken - ) => ReadAsync(new Memory(buffer, offset, count), cancellationToken); + ) => ReadAsync(new Memory(buffer, offset, count), cancellationToken).AsTask(); - public async Task ReadAsync( + public async ValueTask ReadAsync( Memory buffer, CancellationToken cancellationToken = default ) diff --git a/src/SharpCompress/Factories/AceFactory.cs b/src/SharpCompress/Factories/AceFactory.cs index 5b80ae24f..95f647ddb 100644 --- a/src/SharpCompress/Factories/AceFactory.cs +++ b/src/SharpCompress/Factories/AceFactory.cs @@ -3,6 +3,7 @@ using System.IO; using System.Linq; using System.Text; +using System.Threading; using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Ace.Headers; @@ -26,12 +27,21 @@ public override bool IsArchive( Stream stream, string? password = null, int bufferSize = ReaderOptions.DefaultBufferSize - ) - { - return AceHeader.IsArchive(stream); - } + ) => AceHeader.IsArchive(stream); public IReader OpenReader(Stream stream, ReaderOptions? options) => AceReader.Open(stream, options); + + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) => new(AceReader.Open(stream, options)); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); } } diff --git a/src/SharpCompress/Factories/ArcFactory.cs b/src/SharpCompress/Factories/ArcFactory.cs index b5180afae..37984112a 100644 --- a/src/SharpCompress/Factories/ArcFactory.cs +++ b/src/SharpCompress/Factories/ArcFactory.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Security.Cryptography; using System.Text; +using System.Threading; using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Readers; @@ -42,5 +43,17 @@ public override bool IsArchive( public IReader OpenReader(Stream stream, ReaderOptions? options) => ArcReader.Open(stream, options); + + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) => new(ArcReader.Open(stream, options)); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); } } diff --git a/src/SharpCompress/Factories/ArjFactory.cs b/src/SharpCompress/Factories/ArjFactory.cs index f6f7a3934..6e5f7a309 100644 --- a/src/SharpCompress/Factories/ArjFactory.cs +++ b/src/SharpCompress/Factories/ArjFactory.cs @@ -3,6 +3,7 @@ using System.IO; using System.Linq; using System.Text; +using System.Threading; using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Arj.Headers; @@ -33,5 +34,17 @@ public override bool IsArchive( public IReader OpenReader(Stream stream, ReaderOptions? options) => ArjReader.Open(stream, options); + + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) => new(ArjReader.Open(stream, options)); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); } } diff --git a/src/SharpCompress/Factories/Factory.cs b/src/SharpCompress/Factories/Factory.cs index 4651ccb22..f28f3d249 100644 --- a/src/SharpCompress/Factories/Factory.cs +++ b/src/SharpCompress/Factories/Factory.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Readers; @@ -57,6 +59,24 @@ public abstract bool IsArchive( int bufferSize = ReaderOptions.DefaultBufferSize ); + public abstract ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ); + + /// + public virtual ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(IsArchive(stream, password, bufferSize)); + } + /// public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null; @@ -92,4 +112,34 @@ out IReader? reader return false; } + + internal virtual async ValueTask<(bool, IAsyncReader?)> TryOpenReaderAsync( + SharpCompressStream stream, + ReaderOptions options, + CancellationToken cancellationToken + ) + { + if (this is IReaderFactory readerFactory) + { + long pos = ((IStreamStack)stream).GetPosition(); + + if ( + await IsArchiveAsync( + stream, + options.Password, + options.BufferSize, + cancellationToken + ) + ) + { + ((IStreamStack)stream).StackSeek(pos); + return ( + true, + await readerFactory.OpenReaderAsync(stream, options, cancellationToken) + ); + } + } + + return (false, null); + } } diff --git a/src/SharpCompress/Factories/GZipFactory.cs b/src/SharpCompress/Factories/GZipFactory.cs index 17f344cf0..48f5c63e5 100644 --- a/src/SharpCompress/Factories/GZipFactory.cs +++ b/src/SharpCompress/Factories/GZipFactory.cs @@ -1,6 +1,8 @@ using System.Collections.Generic; using System.IO; using System.IO.Compression; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Archives.GZip; using SharpCompress.Archives.Tar; @@ -46,6 +48,14 @@ public override bool IsArchive( int bufferSize = ReaderOptions.DefaultBufferSize ) => GZipArchive.IsGZipFile(stream); + /// + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) => GZipArchive.IsGZipFileAsync(stream, cancellationToken); + #endregion #region IArchiveFactory @@ -54,10 +64,30 @@ public override bool IsArchive( public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => GZipArchive.Open(stream, readerOptions); + /// + public ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => GZipArchive.OpenAsync(stream, readerOptions, cancellationToken); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); + /// public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => GZipArchive.Open(fileInfo, readerOptions); + /// + public ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken); + #endregion #region IMultiArchiveFactory @@ -66,10 +96,24 @@ public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => public IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null) => GZipArchive.Open(streams, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => GZipArchive.OpenAsync(streams, readerOptions, cancellationToken); + /// public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null) => GZipArchive.Open(fileInfos, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken); + #endregion #region IReaderFactory @@ -108,6 +152,17 @@ out IReader? reader public IReader OpenReader(Stream stream, ReaderOptions? options) => GZipReader.Open(stream, options); + /// + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(GZipReader.Open(stream, options)); + } + #endregion #region IWriterFactory @@ -122,6 +177,17 @@ public IWriter Open(Stream stream, WriterOptions writerOptions) return new GZipWriter(stream, new GZipWriterOptions(writerOptions)); } + /// + public ValueTask OpenAsync( + Stream stream, + WriterOptions writerOptions, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, writerOptions)); + } + #endregion #region IWriteableArchiveFactory diff --git a/src/SharpCompress/Factories/IFactory.cs b/src/SharpCompress/Factories/IFactory.cs index 63d5eeec6..a9dd4f5ac 100644 --- a/src/SharpCompress/Factories/IFactory.cs +++ b/src/SharpCompress/Factories/IFactory.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Readers; namespace SharpCompress.Factories; @@ -42,6 +44,20 @@ bool IsArchive( int bufferSize = ReaderOptions.DefaultBufferSize ); + /// + /// Returns true if the stream represents an archive of the format defined by this type asynchronously. + /// + /// A stream, pointing to the beginning of the archive. + /// optional password + /// buffer size for reading + /// cancellation token + ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ); + /// /// From a passed in archive (zip, rar, 7z, 001), return all parts. /// diff --git a/src/SharpCompress/Factories/RarFactory.cs b/src/SharpCompress/Factories/RarFactory.cs index 610999057..fb9e03abb 100644 --- a/src/SharpCompress/Factories/RarFactory.cs +++ b/src/SharpCompress/Factories/RarFactory.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Archives.Rar; using SharpCompress.Common; @@ -47,10 +49,30 @@ public override bool IsArchive( public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => RarArchive.Open(stream, readerOptions); + /// + public ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => RarArchive.OpenAsync(stream, readerOptions, cancellationToken); + /// public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => RarArchive.Open(fileInfo, readerOptions); + /// + public ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => RarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); + #endregion #region IMultiArchiveFactory @@ -59,10 +81,24 @@ public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => public IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null) => RarArchive.Open(streams, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => RarArchive.OpenAsync(streams, readerOptions, cancellationToken); + /// public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null) => RarArchive.Open(fileInfos, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => RarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken); + #endregion #region IReaderFactory @@ -71,5 +107,16 @@ public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOpt public IReader OpenReader(Stream stream, ReaderOptions? options) => RarReader.Open(stream, options); + /// + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(RarReader.Open(stream, options)); + } + #endregion } diff --git a/src/SharpCompress/Factories/SevenZipFactory.cs b/src/SharpCompress/Factories/SevenZipFactory.cs index 18dedbfdd..c387e3b30 100644 --- a/src/SharpCompress/Factories/SevenZipFactory.cs +++ b/src/SharpCompress/Factories/SevenZipFactory.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Archives.SevenZip; using SharpCompress.Common; @@ -42,10 +44,30 @@ public override bool IsArchive( public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => SevenZipArchive.Open(stream, readerOptions); + /// + public ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => SevenZipArchive.OpenAsync(stream, readerOptions, cancellationToken); + /// public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => SevenZipArchive.Open(fileInfo, readerOptions); + /// + public ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => SevenZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); + #endregion #region IMultiArchiveFactory @@ -54,10 +76,24 @@ public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => public IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null) => SevenZipArchive.Open(streams, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => SevenZipArchive.OpenAsync(streams, readerOptions, cancellationToken); + /// public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null) => SevenZipArchive.Open(fileInfos, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => SevenZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken); + #endregion #region reader diff --git a/src/SharpCompress/Factories/TarFactory.cs b/src/SharpCompress/Factories/TarFactory.cs index d32020fd7..4e22e0bd4 100644 --- a/src/SharpCompress/Factories/TarFactory.cs +++ b/src/SharpCompress/Factories/TarFactory.cs @@ -2,6 +2,8 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Archives.Tar; using SharpCompress.Common; @@ -59,6 +61,12 @@ public override bool IsArchive( int bufferSize = ReaderOptions.DefaultBufferSize ) => TarArchive.IsTarFile(stream); + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); + #endregion #region IArchiveFactory @@ -67,10 +75,24 @@ public override bool IsArchive( public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => TarArchive.Open(stream, readerOptions); + /// + public ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => TarArchive.OpenAsync(stream, readerOptions, cancellationToken); + /// public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => TarArchive.Open(fileInfo, readerOptions); + /// + public ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken); + #endregion #region IMultiArchiveFactory @@ -79,10 +101,24 @@ public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => public IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null) => TarArchive.Open(streams, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => TarArchive.OpenAsync(streams, readerOptions, cancellationToken); + /// public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null) => TarArchive.Open(fileInfos, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken); + #endregion #region IReaderFactory @@ -234,6 +270,17 @@ TestOption testOption public IReader OpenReader(Stream stream, ReaderOptions? options) => TarReader.Open(stream, options); + /// + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(TarReader.Open(stream, options)); + } + #endregion #region IWriterFactory @@ -242,6 +289,17 @@ public IReader OpenReader(Stream stream, ReaderOptions? options) => public IWriter Open(Stream stream, WriterOptions writerOptions) => new TarWriter(stream, new TarWriterOptions(writerOptions)); + /// + public ValueTask OpenAsync( + Stream stream, + WriterOptions writerOptions, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, writerOptions)); + } + #endregion #region IWriteableArchiveFactory diff --git a/src/SharpCompress/Factories/ZStandardFactory.cs b/src/SharpCompress/Factories/ZStandardFactory.cs index a5c6d84f2..d534a8bb3 100644 --- a/src/SharpCompress/Factories/ZStandardFactory.cs +++ b/src/SharpCompress/Factories/ZStandardFactory.cs @@ -25,4 +25,10 @@ public override bool IsArchive( string? password = null, int bufferSize = 65536 ) => ZStandardStream.IsZStandard(stream); + + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); } diff --git a/src/SharpCompress/Factories/ZipFactory.cs b/src/SharpCompress/Factories/ZipFactory.cs index 5c2fcad8d..a9e62f142 100644 --- a/src/SharpCompress/Factories/ZipFactory.cs +++ b/src/SharpCompress/Factories/ZipFactory.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Archives; using SharpCompress.Archives.Zip; using SharpCompress.Common; @@ -79,6 +81,55 @@ public override bool IsArchive( return false; } + public override ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize + ) => new(IsArchive(stream, password, bufferSize)); + + /// + public override async ValueTask IsArchiveAsync( + Stream stream, + string? password = null, + int bufferSize = ReaderOptions.DefaultBufferSize, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + var startPosition = stream.CanSeek ? stream.Position : -1; + + // probe for single volume zip + + if (stream is not SharpCompressStream) // wrap to provide buffer bef + { + stream = new SharpCompressStream(stream, bufferSize: bufferSize); + } + + if (await ZipArchive.IsZipFileAsync(stream, password, bufferSize, cancellationToken)) + { + return true; + } + + // probe for a multipart zip + + if (!stream.CanSeek) + { + return false; + } + + stream.Position = startPosition; + + //test the zip (last) file of a multipart zip + if (await ZipArchive.IsZipMultiAsync(stream, password, bufferSize, cancellationToken)) + { + return true; + } + + stream.Position = startPosition; + + return false; + } + /// public override FileInfo? GetFilePart(int index, FileInfo part1) => ZipArchiveVolumeFactory.GetFilePart(index, part1); @@ -91,10 +142,24 @@ public override bool IsArchive( public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) => ZipArchive.Open(stream, readerOptions); + /// + public ValueTask OpenAsync( + Stream stream, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => ZipArchive.OpenAsync(stream, readerOptions, cancellationToken); + /// public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => ZipArchive.Open(fileInfo, readerOptions); + /// + public ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => ZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken); + #endregion #region IMultiArchiveFactory @@ -103,10 +168,24 @@ public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) => public IArchive Open(IReadOnlyList streams, ReaderOptions? readerOptions = null) => ZipArchive.Open(streams, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList streams, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => ZipArchive.OpenAsync(streams, readerOptions, cancellationToken); + /// public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOptions = null) => ZipArchive.Open(fileInfos, readerOptions); + /// + public ValueTask OpenAsync( + IReadOnlyList fileInfos, + ReaderOptions? readerOptions = null, + CancellationToken cancellationToken = default + ) => ZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken); + #endregion #region IReaderFactory @@ -115,6 +194,17 @@ public IArchive Open(IReadOnlyList fileInfos, ReaderOptions? readerOpt public IReader OpenReader(Stream stream, ReaderOptions? options) => ZipReader.Open(stream, options); + /// + public ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(ZipReader.Open(stream, options)); + } + #endregion #region IWriterFactory @@ -123,6 +213,17 @@ public IReader OpenReader(Stream stream, ReaderOptions? options) => public IWriter Open(Stream stream, WriterOptions writerOptions) => new ZipWriter(stream, new ZipWriterOptions(writerOptions)); + /// + public ValueTask OpenAsync( + Stream stream, + WriterOptions writerOptions, + CancellationToken cancellationToken = default + ) + { + cancellationToken.ThrowIfCancellationRequested(); + return new(Open(stream, writerOptions)); + } + #endregion #region IWriteableArchiveFactory diff --git a/src/SharpCompress/LazyAsyncReadOnlyCollection.cs b/src/SharpCompress/LazyAsyncReadOnlyCollection.cs new file mode 100644 index 000000000..85caf6113 --- /dev/null +++ b/src/SharpCompress/LazyAsyncReadOnlyCollection.cs @@ -0,0 +1,103 @@ +#nullable disable +using System; +using System.Collections; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace SharpCompress; + +internal sealed class LazyAsyncReadOnlyCollection(IAsyncEnumerable source) + : IAsyncEnumerable +{ + private readonly List backing = new(); + private readonly IAsyncEnumerator source = source.GetAsyncEnumerator(); + private bool fullyLoaded; + + private class LazyLoader( + LazyAsyncReadOnlyCollection lazyReadOnlyCollection, + CancellationToken cancellationToken + ) : IAsyncEnumerator + { + private bool disposed; + private int index = -1; + + public ValueTask DisposeAsync() + { + if (!disposed) + { + disposed = true; + } + return default; + } + + public async ValueTask MoveNextAsync() + { + cancellationToken.ThrowIfCancellationRequested(); + if (index + 1 < lazyReadOnlyCollection.backing.Count) + { + index++; + return true; + } + if ( + !lazyReadOnlyCollection.fullyLoaded + && await lazyReadOnlyCollection.source.MoveNextAsync() + ) + { + lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current); + index++; + return true; + } + lazyReadOnlyCollection.fullyLoaded = true; + return false; + } + + #region IEnumerator Members + + public T Current => lazyReadOnlyCollection.backing[index]; + + #endregion + + #region IDisposable Members + + public void Dispose() + { + if (!disposed) + { + disposed = true; + } + } + + #endregion + } + + internal async ValueTask EnsureFullyLoaded() + { + if (!fullyLoaded) + { + var loader = new LazyLoader(this, CancellationToken.None); + while (await loader.MoveNextAsync()) + { + // Intentionally empty + } + fullyLoaded = true; + } + } + + internal IEnumerable GetLoaded() => backing; + + #region ICollection Members + + public void Add(T item) => throw new NotSupportedException(); + + public void Clear() => throw new NotSupportedException(); + + public bool IsReadOnly => true; + + public bool Remove(T item) => throw new NotSupportedException(); + + #endregion + + public IAsyncEnumerator GetAsyncEnumerator(CancellationToken cancellationToken = default) => + new LazyLoader(this, cancellationToken); +} diff --git a/src/SharpCompress/Polyfills/AsyncEnumerableExtensions.cs b/src/SharpCompress/Polyfills/AsyncEnumerableExtensions.cs new file mode 100644 index 000000000..785d4c328 --- /dev/null +++ b/src/SharpCompress/Polyfills/AsyncEnumerableExtensions.cs @@ -0,0 +1,101 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; + +namespace SharpCompress; + +public static class AsyncEnumerableEx +{ + public static async IAsyncEnumerable Empty() + where T : notnull + { + await Task.CompletedTask; + yield break; + } +} + +public static class EnumerableExtensions +{ + public static async IAsyncEnumerable ToAsyncEnumerable(this IEnumerable source) + { + await Task.CompletedTask; + foreach (var item in source) + { + yield return item; + } + } +} + +public static class AsyncEnumerableExtensions +{ + extension(IAsyncEnumerable source) + where T : notnull + { + public async ValueTask> ToListAsync() + { + var list = new List(); + await foreach (var item in source) + { + list.Add(item); + } + return list; + } + + public async IAsyncEnumerable Cast() + where TResult : class + { + await foreach (var item in source) + { + yield return (item as TResult).NotNull(); + } + } + + public async ValueTask All(Func predicate) + { + await foreach (var item in source) + { + if (!predicate(item)) + { + return false; + } + } + + return true; + } + + public async IAsyncEnumerable Where(Func predicate) + { + await foreach (var item in source) + { + if (predicate(item)) + { + yield return item; + } + } + } + + public async ValueTask FirstOrDefaultAsync() + { + await foreach (var item in source) + { + return item; // Returns the very first item found + } + + return default; // Returns null/default if the stream is empty + } + + public async ValueTask Aggregate( + TAccumulate seed, + Func func + ) + { + TAccumulate result = seed; + await foreach (var element in source) + { + result = func(result, element); + } + return result; + } + } +} diff --git a/src/SharpCompress/Polyfills/BinaryReaderExtensions.cs b/src/SharpCompress/Polyfills/BinaryReaderExtensions.cs new file mode 100644 index 000000000..24a0eaf66 --- /dev/null +++ b/src/SharpCompress/Polyfills/BinaryReaderExtensions.cs @@ -0,0 +1,65 @@ +using System; +using System.Buffers; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace SharpCompress; + +public static class BinaryReaderExtensions +{ + extension(BinaryReader reader) + { + public async ValueTask ReadByteAsync(CancellationToken cancellationToken = default) + { + var buffer = new byte[1]; + await reader + .BaseStream.ReadExactAsync(buffer, 0, 1, cancellationToken) + .ConfigureAwait(false); + return buffer[0]; + } + + public async ValueTask ReadBytesAsync( + int count, + CancellationToken cancellationToken = default + ) + { + if (count < 0) + { + throw new ArgumentOutOfRangeException(nameof(count), "Count must be non-negative."); + } + + if (count == 0) + { + return Array.Empty(); + } + + // For small allocations, direct allocation is more efficient than pooling + // due to ArrayPool overhead and the need to copy data to return array + if (count <= 256) + { + var bytes = new byte[count]; + await reader + .BaseStream.ReadExactAsync(bytes, 0, count, cancellationToken) + .ConfigureAwait(false); + return bytes; + } + + // For larger allocations, use ArrayPool to reduce GC pressure + var buffer = ArrayPool.Shared.Rent(count); + try + { + await reader + .BaseStream.ReadExactAsync(buffer, 0, count, cancellationToken) + .ConfigureAwait(false); + var bytes = new byte[count]; + Array.Copy(buffer, 0, bytes, 0, count); + return bytes; + } + finally + { + ArrayPool.Shared.Return(buffer); + } + } + } +} diff --git a/src/SharpCompress/Polyfills/StreamExtensions.cs b/src/SharpCompress/Polyfills/StreamExtensions.cs index f00b274a1..6316416da 100644 --- a/src/SharpCompress/Polyfills/StreamExtensions.cs +++ b/src/SharpCompress/Polyfills/StreamExtensions.cs @@ -1,5 +1,3 @@ -#if NETFRAMEWORK || NETSTANDARD2_0 - using System; using System.Buffers; using System.IO; @@ -8,63 +6,62 @@ namespace SharpCompress; -internal static class StreamExtensions +public static class StreamExtensions { - internal static int Read(this Stream stream, Span buffer) + extension(Stream stream) { - var temp = ArrayPool.Shared.Rent(buffer.Length); - - try + public void Skip(long advanceAmount) { - var read = stream.Read(temp, 0, buffer.Length); - - temp.AsSpan(0, read).CopyTo(buffer); + if (stream.CanSeek) + { + stream.Position += advanceAmount; + return; + } - return read; - } - finally - { - ArrayPool.Shared.Return(temp); + using var readOnlySubStream = new IO.ReadOnlySubStream(stream, advanceAmount); + readOnlySubStream.CopyTo(Stream.Null); } - } - - internal static void Write(this Stream stream, ReadOnlySpan buffer) - { - var temp = ArrayPool.Shared.Rent(buffer.Length); - buffer.CopyTo(temp); + public void Skip() => stream.CopyTo(Stream.Null); - try + public Task SkipAsync(CancellationToken cancellationToken = default) { - stream.Write(temp, 0, buffer.Length); + cancellationToken.ThrowIfCancellationRequested(); + return stream.CopyToAsync(Stream.Null); } - finally + + internal int Read(Span buffer) { - ArrayPool.Shared.Return(temp); + var temp = ArrayPool.Shared.Rent(buffer.Length); + + try + { + var read = stream.Read(temp, 0, buffer.Length); + + temp.AsSpan(0, read).CopyTo(buffer); + + return read; + } + finally + { + ArrayPool.Shared.Return(temp); + } } - } - internal static async Task ReadExactlyAsync( - this Stream stream, - byte[] buffer, - int offset, - int count, - CancellationToken cancellationToken - ) - { - var totalRead = 0; - while (totalRead < count) + internal void Write(ReadOnlySpan buffer) { - var read = await stream - .ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken) - .ConfigureAwait(false); - if (read == 0) + var temp = ArrayPool.Shared.Rent(buffer.Length); + + buffer.CopyTo(temp); + + try { - throw new EndOfStreamException(); + stream.Write(temp, 0, buffer.Length); + } + finally + { + ArrayPool.Shared.Return(temp); } - totalRead += read; } } } - -#endif diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs index cd37bb5ff..29e700aaa 100644 --- a/src/SharpCompress/Readers/AbstractReader.cs +++ b/src/SharpCompress/Readers/AbstractReader.cs @@ -12,12 +12,13 @@ namespace SharpCompress.Readers; /// /// A generic push reader that reads unseekable comrpessed streams. /// -public abstract class AbstractReader : IReader +public abstract class AbstractReader : IReader, IAsyncReader where TEntry : Entry where TVolume : Volume { private bool _completed; private IEnumerator? _entriesForCurrentReadStream; + private IAsyncEnumerator? _entriesForCurrentReadStreamAsync; private bool _wroteCurrentEntry; internal AbstractReader(ReaderOptions options, ArchiveType archiveType) @@ -36,9 +37,19 @@ internal AbstractReader(ReaderOptions options, ArchiveType archiveType) public abstract TVolume? Volume { get; } /// - /// Current file entry + /// Current file entry (from either sync or async enumeration). /// - public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current; + public TEntry Entry + { + get + { + if (_entriesForCurrentReadStreamAsync is not null) + { + return _entriesForCurrentReadStreamAsync.Current; + } + return _entriesForCurrentReadStream.NotNull().Current; + } + } #region IDisposable Members @@ -48,6 +59,15 @@ public virtual void Dispose() Volume?.Dispose(); } + public virtual async ValueTask DisposeAsync() + { + if (_entriesForCurrentReadStreamAsync is not null) + { + await _entriesForCurrentReadStreamAsync.DisposeAsync(); + } + Volume?.Dispose(); + } + #endregion public bool Cancelled { get; private set; } @@ -67,6 +87,12 @@ public void Cancel() public bool MoveToNextEntry() { + if (_entriesForCurrentReadStreamAsync is not null) + { + throw new InvalidOperationException( + $"{nameof(MoveToNextEntry)} cannot be used after {nameof(MoveToNextEntryAsync)} has been used." + ); + } if (_completed) { return false; @@ -92,7 +118,7 @@ public bool MoveToNextEntry() return false; } - public async Task MoveToNextEntryAsync(CancellationToken cancellationToken = default) + public async ValueTask MoveToNextEntryAsync(CancellationToken cancellationToken = default) { if (_completed) { @@ -102,16 +128,16 @@ public async Task MoveToNextEntryAsync(CancellationToken cancellationToken { throw new ReaderCancelledException("Reader has been cancelled."); } - if (_entriesForCurrentReadStream is null) + if (_entriesForCurrentReadStreamAsync is null) { - return LoadStreamForReading(RequestInitialStream()); + return await LoadStreamForReadingAsync(RequestInitialStream()); } if (!_wroteCurrentEntry) { await SkipEntryAsync(cancellationToken).ConfigureAwait(false); } _wroteCurrentEntry = false; - if (NextEntryForCurrentStream()) + if (await NextEntryForCurrentStreamAsync(cancellationToken)) { return true; } @@ -121,6 +147,12 @@ public async Task MoveToNextEntryAsync(CancellationToken cancellationToken protected bool LoadStreamForReading(Stream stream) { + if (_entriesForCurrentReadStreamAsync is not null) + { + throw new InvalidOperationException( + $"{nameof(LoadStreamForReading)} cannot be used after {nameof(LoadStreamForReadingAsync)} has been used." + ); + } _entriesForCurrentReadStream?.Dispose(); if (stream is null || !stream.CanRead) { @@ -134,14 +166,59 @@ protected bool LoadStreamForReading(Stream stream) return _entriesForCurrentReadStream.MoveNext(); } + protected async ValueTask LoadStreamForReadingAsync(Stream stream) + { + if (_entriesForCurrentReadStreamAsync is not null) + { + await _entriesForCurrentReadStreamAsync.DisposeAsync(); + } + if (stream is null || !stream.CanRead) + { + throw new MultipartStreamRequiredException( + "File is split into multiple archives: '" + + Entry.Key + + "'. A new readable stream is required. Use Cancel if it was intended." + ); + } + _entriesForCurrentReadStreamAsync = GetEntriesAsync(stream).GetAsyncEnumerator(); + return await _entriesForCurrentReadStreamAsync.MoveNextAsync(); + } + protected virtual Stream RequestInitialStream() => Volume.NotNull("Volume isn't loaded.").Stream; internal virtual bool NextEntryForCurrentStream() => _entriesForCurrentReadStream.NotNull().MoveNext(); + internal virtual ValueTask NextEntryForCurrentStreamAsync() => + _entriesForCurrentReadStreamAsync.NotNull().MoveNextAsync(); + + /// + /// Moves the current async enumerator to the next entry. + /// + internal virtual ValueTask NextEntryForCurrentStreamAsync( + CancellationToken cancellationToken + ) + { + if (_entriesForCurrentReadStreamAsync is not null) + { + return _entriesForCurrentReadStreamAsync.MoveNextAsync(); + } + + return new ValueTask(NextEntryForCurrentStream()); + } + protected abstract IEnumerable GetEntries(Stream stream); + protected virtual async IAsyncEnumerable GetEntriesAsync(Stream stream) + { + await Task.CompletedTask; + foreach (var entry in GetEntries(stream)) + { + yield return entry; + } + } + #region Entry Skip/Write private void SkipEntry() @@ -152,7 +229,7 @@ private void SkipEntry() } } - private async Task SkipEntryAsync(CancellationToken cancellationToken) + private async ValueTask SkipEntryAsync(CancellationToken cancellationToken) { if (!Entry.IsDirectory) { @@ -182,7 +259,7 @@ private void Skip() s.SkipEntry(); } - private async Task SkipAsync(CancellationToken cancellationToken) + private async ValueTask SkipAsync(CancellationToken cancellationToken) { var part = Entry.Parts.First(); @@ -231,7 +308,7 @@ public void WriteEntryTo(Stream writableStream) _wroteCurrentEntry = true; } - public async Task WriteEntryToAsync( + public async ValueTask WriteEntryToAsync( Stream writableStream, CancellationToken cancellationToken = default ) @@ -265,14 +342,14 @@ internal void Write(Stream writeStream) sourceStream.CopyTo(writeStream, 81920); } - internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken) + internal async ValueTask WriteAsync(Stream writeStream, CancellationToken cancellationToken) { #if NETFRAMEWORK || NETSTANDARD2_0 - using Stream s = OpenEntryStream(); + using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false); var sourceStream = WrapWithProgress(s, Entry); await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false); #else - await using Stream s = OpenEntryStream(); + await using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false); var sourceStream = WrapWithProgress(s, Entry); await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false); #endif @@ -323,7 +400,7 @@ public EntryStream OpenEntryStream() return stream; } - public async Task OpenEntryStreamAsync( + public async ValueTask OpenEntryStreamAsync( CancellationToken cancellationToken = default ) { @@ -347,11 +424,19 @@ protected EntryStream CreateEntryStream(Stream? decompressed) => protected virtual EntryStream GetEntryStream() => CreateEntryStream(Entry.Parts.First().GetCompressedStream()); - protected virtual Task GetEntryStreamAsync( + protected virtual async Task GetEntryStreamAsync( CancellationToken cancellationToken = default - ) => Task.FromResult(GetEntryStream()); + ) + { + var stream = await Entry + .Parts.First() + .GetCompressedStreamAsync(cancellationToken) + .ConfigureAwait(false); + return CreateEntryStream(stream); + } #endregion IEntry IReader.Entry => Entry; + IEntry IAsyncReader.Entry => Entry; } diff --git a/src/SharpCompress/Readers/IAsyncReader.cs b/src/SharpCompress/Readers/IAsyncReader.cs new file mode 100644 index 000000000..bd82ee482 --- /dev/null +++ b/src/SharpCompress/Readers/IAsyncReader.cs @@ -0,0 +1,41 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using SharpCompress.Common; + +namespace SharpCompress.Readers; + +public interface IAsyncReader : IAsyncDisposable +{ + ArchiveType ArchiveType { get; } + + IEntry Entry { get; } + + /// + /// Decompresses the current entry to the stream asynchronously. This cannot be called twice for the current entry. + /// + /// + /// + ValueTask WriteEntryToAsync( + Stream writableStream, + CancellationToken cancellationToken = default + ); + + bool Cancelled { get; } + void Cancel(); + + /// + /// Moves to the next entry asynchronously by reading more data from the underlying stream. This skips if data has not been read. + /// + /// + /// + ValueTask MoveToNextEntryAsync(CancellationToken cancellationToken = default); + + /// + /// Opens the current entry asynchronously as a stream that will decompress as it is read. + /// Read the entire stream or use SkipEntry on EntryStream. + /// + /// + ValueTask OpenEntryStreamAsync(CancellationToken cancellationToken = default); +} diff --git a/src/SharpCompress/Readers/IAsyncReaderExtensions.cs b/src/SharpCompress/Readers/IAsyncReaderExtensions.cs new file mode 100644 index 000000000..2b9a6a6b2 --- /dev/null +++ b/src/SharpCompress/Readers/IAsyncReaderExtensions.cs @@ -0,0 +1,69 @@ +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using SharpCompress.Common; + +namespace SharpCompress.Readers; + +public static class IAsyncReaderExtensions +{ + extension(IAsyncReader reader) + { + /// + /// Extract to specific directory asynchronously, retaining filename + /// + public async ValueTask WriteEntryToDirectoryAsync( + string destinationDirectory, + ExtractionOptions? options = null, + CancellationToken cancellationToken = default + ) => + await ExtractionMethods + .WriteEntryToDirectoryAsync( + reader.Entry, + destinationDirectory, + options, + reader.WriteEntryToFileAsync, + cancellationToken + ) + .ConfigureAwait(false); + + /// + /// Extract to specific file asynchronously + /// + public async ValueTask WriteEntryToFileAsync( + string destinationFileName, + ExtractionOptions? options = null, + CancellationToken cancellationToken = default + ) => + await ExtractionMethods + .WriteEntryToFileAsync( + reader.Entry, + destinationFileName, + options, + async (x, fm, ct) => + { + using var fs = File.Open(destinationFileName, fm); + await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false); + }, + cancellationToken + ) + .ConfigureAwait(false); + + /// + /// Extract all remaining unread entries to specific directory asynchronously, retaining filename + /// + public async ValueTask WriteAllToDirectoryAsync( + string destinationDirectory, + ExtractionOptions? options = null, + CancellationToken cancellationToken = default + ) + { + while (await reader.MoveToNextEntryAsync(cancellationToken)) + { + await reader + .WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken) + .ConfigureAwait(false); + } + } + } +} diff --git a/src/SharpCompress/Readers/IReader.cs b/src/SharpCompress/Readers/IReader.cs index 574237088..a38d61d6c 100644 --- a/src/SharpCompress/Readers/IReader.cs +++ b/src/SharpCompress/Readers/IReader.cs @@ -1,7 +1,5 @@ using System; using System.IO; -using System.Threading; -using System.Threading.Tasks; using SharpCompress.Common; namespace SharpCompress.Readers; @@ -18,13 +16,6 @@ public interface IReader : IDisposable /// void WriteEntryTo(Stream writableStream); - /// - /// Decompresses the current entry to the stream asynchronously. This cannot be called twice for the current entry. - /// - /// - /// - Task WriteEntryToAsync(Stream writableStream, CancellationToken cancellationToken = default); - bool Cancelled { get; } void Cancel(); @@ -34,23 +25,9 @@ public interface IReader : IDisposable /// bool MoveToNextEntry(); - /// - /// Moves to the next entry asynchronously by reading more data from the underlying stream. This skips if data has not been read. - /// - /// - /// - Task MoveToNextEntryAsync(CancellationToken cancellationToken = default); - /// /// Opens the current entry as a stream that will decompress as it is read. /// Read the entire stream or use SkipEntry on EntryStream. /// EntryStream OpenEntryStream(); - - /// - /// Opens the current entry asynchronously as a stream that will decompress as it is read. - /// Read the entire stream or use SkipEntry on EntryStream. - /// - /// - Task OpenEntryStreamAsync(CancellationToken cancellationToken = default); } diff --git a/src/SharpCompress/Readers/IReaderExtensions.cs b/src/SharpCompress/Readers/IReaderExtensions.cs index 65c6b1fac..cfa7c13a2 100644 --- a/src/SharpCompress/Readers/IReaderExtensions.cs +++ b/src/SharpCompress/Readers/IReaderExtensions.cs @@ -1,6 +1,4 @@ using System.IO; -using System.Threading; -using System.Threading.Tasks; using SharpCompress.Common; namespace SharpCompress.Readers; @@ -66,62 +64,5 @@ public void WriteEntryToFile( reader.WriteEntryTo(fs); } ); - - /// - /// Extract to specific directory asynchronously, retaining filename - /// - public async Task WriteEntryToDirectoryAsync( - string destinationDirectory, - ExtractionOptions? options = null, - CancellationToken cancellationToken = default - ) => - await ExtractionMethods - .WriteEntryToDirectoryAsync( - reader.Entry, - destinationDirectory, - options, - reader.WriteEntryToFileAsync, - cancellationToken - ) - .ConfigureAwait(false); - - /// - /// Extract to specific file asynchronously - /// - public async Task WriteEntryToFileAsync( - string destinationFileName, - ExtractionOptions? options = null, - CancellationToken cancellationToken = default - ) => - await ExtractionMethods - .WriteEntryToFileAsync( - reader.Entry, - destinationFileName, - options, - async (x, fm, ct) => - { - using var fs = File.Open(destinationFileName, fm); - await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false); - }, - cancellationToken - ) - .ConfigureAwait(false); - - /// - /// Extract all remaining unread entries to specific directory asynchronously, retaining filename - /// - public async Task WriteAllToDirectoryAsync( - string destinationDirectory, - ExtractionOptions? options = null, - CancellationToken cancellationToken = default - ) - { - while (await reader.MoveToNextEntryAsync(cancellationToken)) - { - await reader - .WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken) - .ConfigureAwait(false); - } - } } } diff --git a/src/SharpCompress/Readers/IReaderFactory.cs b/src/SharpCompress/Readers/IReaderFactory.cs index 08c190a30..4757644ba 100644 --- a/src/SharpCompress/Readers/IReaderFactory.cs +++ b/src/SharpCompress/Readers/IReaderFactory.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace SharpCompress.Readers; @@ -11,4 +13,9 @@ public interface IReaderFactory : Factories.IFactory /// /// IReader OpenReader(Stream stream, ReaderOptions? options); + ValueTask OpenReaderAsync( + Stream stream, + ReaderOptions? options, + CancellationToken cancellationToken + ); } diff --git a/src/SharpCompress/Readers/ReaderFactory.cs b/src/SharpCompress/Readers/ReaderFactory.cs index b8b022b04..6102f1469 100644 --- a/src/SharpCompress/Readers/ReaderFactory.cs +++ b/src/SharpCompress/Readers/ReaderFactory.cs @@ -1,6 +1,8 @@ using System; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Factories; using SharpCompress.IO; @@ -15,12 +17,46 @@ public static IReader Open(string filePath, ReaderOptions? options = null) return Open(new FileInfo(filePath), options); } + /// + /// Opens a Reader from a filepath asynchronously + /// + /// + /// + /// + /// + public static ValueTask OpenAsync( + string filePath, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + filePath.NotNullOrEmpty(nameof(filePath)); + return OpenAsync(new FileInfo(filePath), options, cancellationToken); + } + public static IReader Open(FileInfo fileInfo, ReaderOptions? options = null) { options ??= new ReaderOptions { LeaveStreamOpen = false }; return Open(fileInfo.OpenRead(), options); } + /// + /// Opens a Reader from a FileInfo asynchronously + /// + /// + /// + /// + /// + public static ValueTask OpenAsync( + FileInfo fileInfo, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + options ??= new ReaderOptions { LeaveStreamOpen = false }; + return OpenAsync(fileInfo.OpenRead(), options, cancellationToken); + } + /// /// Opens a Reader for Non-seeking usage /// @@ -73,4 +109,77 @@ public static IReader Open(Stream stream, ReaderOptions? options = null) "Cannot determine compressed stream type. Supported Reader Formats: Ace, Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard" ); } + + public static async ValueTask OpenAsync( + Stream stream, + ReaderOptions? options = null, + CancellationToken cancellationToken = default + ) + { + stream.NotNull(nameof(stream)); + options ??= new ReaderOptions() { LeaveStreamOpen = false }; + + var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize); + + long pos = ((IStreamStack)bStream).GetPosition(); + + var factories = Factories.Factory.Factories.OfType(); + + Factory? testedFactory = null; + + if (!string.IsNullOrWhiteSpace(options.ExtensionHint)) + { + testedFactory = factories.FirstOrDefault(a => + a.GetSupportedExtensions() + .Contains(options.ExtensionHint, StringComparer.CurrentCultureIgnoreCase) + ); + if (testedFactory is IReaderFactory readerFactory) + { + ((IStreamStack)bStream).StackSeek(pos); + if ( + await testedFactory.IsArchiveAsync( + bStream, + options.Password, + options.BufferSize, + cancellationToken + ) + ) + { + ((IStreamStack)bStream).StackSeek(pos); + return await readerFactory + .OpenReaderAsync(bStream, options, cancellationToken) + .ConfigureAwait(false); + } + } + ((IStreamStack)bStream).StackSeek(pos); + } + + foreach (var factory in factories) + { + if (testedFactory == factory) + { + continue; // Already tested above + } + ((IStreamStack)bStream).StackSeek(pos); + if ( + factory is IReaderFactory readerFactory + && await factory.IsArchiveAsync( + bStream, + options.Password, + options.BufferSize, + cancellationToken + ) + ) + { + ((IStreamStack)bStream).StackSeek(pos); + return await readerFactory + .OpenReaderAsync(bStream, options, cancellationToken) + .ConfigureAwait(false); + } + } + + throw new InvalidFormatException( + "Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard" + ); + } } diff --git a/src/SharpCompress/Readers/Zip/ZipReader.cs b/src/SharpCompress/Readers/Zip/ZipReader.cs index 3a2578459..d15fa7e04 100644 --- a/src/SharpCompress/Readers/Zip/ZipReader.cs +++ b/src/SharpCompress/Readers/Zip/ZipReader.cs @@ -1,5 +1,8 @@ +using System; using System.Collections.Generic; using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.Zip; using SharpCompress.Common.Zip.Headers; @@ -91,4 +94,102 @@ protected override IEnumerable GetEntries(Stream stream) } } } + + /// + /// Returns entries asynchronously for streams that only support async reads. + /// + protected override IAsyncEnumerable GetEntriesAsync(Stream stream) => + new ZipEntryAsyncEnumerable(_headerFactory, stream); + + /// + /// Adapts an async header sequence into an async entry sequence. + /// + private sealed class ZipEntryAsyncEnumerable : IAsyncEnumerable + { + private readonly StreamingZipHeaderFactory _headerFactory; + private readonly Stream _stream; + + public ZipEntryAsyncEnumerable(StreamingZipHeaderFactory headerFactory, Stream stream) + { + _headerFactory = headerFactory; + _stream = stream; + } + + public IAsyncEnumerator GetAsyncEnumerator( + CancellationToken cancellationToken = default + ) => new ZipEntryAsyncEnumerator(_headerFactory, _stream, cancellationToken); + } + + /// + /// Yields entries from streaming ZIP headers without requiring synchronous stream reads. + /// + private sealed class ZipEntryAsyncEnumerator : IAsyncEnumerator, IDisposable + { + private readonly Stream _stream; + private readonly IAsyncEnumerator _headerEnumerator; + private ZipEntry? _current; + + public ZipEntryAsyncEnumerator( + StreamingZipHeaderFactory headerFactory, + Stream stream, + CancellationToken cancellationToken + ) + { + _stream = stream; + _headerEnumerator = headerFactory + .ReadStreamHeaderAsync(stream) + .GetAsyncEnumerator(cancellationToken); + } + + public ZipEntry Current => + _current ?? throw new InvalidOperationException("No current entry is available."); + + /// + /// Advances to the next non-directory entry-relevant header and materializes a . + /// + public async ValueTask MoveNextAsync() + { + while (await _headerEnumerator.MoveNextAsync().ConfigureAwait(false)) + { + var header = _headerEnumerator.Current; + switch (header.ZipHeaderType) + { + case ZipHeaderType.LocalEntry: + _current = new ZipEntry( + new StreamingZipFilePart((LocalEntryHeader)header, _stream) + ); + return true; + case ZipHeaderType.DirectoryEntry: + // DirectoryEntry headers are intentionally skipped in streaming mode. + break; + case ZipHeaderType.DirectoryEnd: + _current = null; + return false; + } + } + + _current = null; + return false; + } + + /// + /// Disposes the underlying header enumerator. + /// + public ValueTask DisposeAsync() + { + Dispose(); + return default; + } + + /// + /// Disposes the underlying header enumerator. + /// + public void Dispose() + { + if (_headerEnumerator is IDisposable disposable) + { + disposable.Dispose(); + } + } + } } diff --git a/src/SharpCompress/SharpCompress.csproj b/src/SharpCompress/SharpCompress.csproj index b621fa934..a48b0b7a7 100644 --- a/src/SharpCompress/SharpCompress.csproj +++ b/src/SharpCompress/SharpCompress.csproj @@ -37,18 +37,11 @@ $(DefineConstants);DEBUG_STREAMS - - - - - - - diff --git a/src/SharpCompress/Utility.cs b/src/SharpCompress/Utility.cs index 8f2d6788a..5fd18e6bb 100644 --- a/src/SharpCompress/Utility.cs +++ b/src/SharpCompress/Utility.cs @@ -12,7 +12,7 @@ namespace SharpCompress; internal static class Utility { //80kb is a good industry standard temporary buffer size - private const int TEMP_BUFFER_SIZE = 81920; + internal const int TEMP_BUFFER_SIZE = 81920; private static readonly HashSet invalidChars = new(Path.GetInvalidFileNameChars()); public static ReadOnlyCollection ToReadOnly(this IList items) => new(items); @@ -63,26 +63,6 @@ public static IEnumerable AsEnumerable(this T item) yield return item; } - public static void Skip(this Stream source, long advanceAmount) - { - if (source.CanSeek) - { - source.Position += advanceAmount; - return; - } - - using var readOnlySubStream = new IO.ReadOnlySubStream(source, advanceAmount); - readOnlySubStream.CopyTo(Stream.Null); - } - - public static void Skip(this Stream source) => source.CopyTo(Stream.Null); - - public static Task SkipAsync(this Stream source, CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - return source.CopyToAsync(Stream.Null); - } - public static DateTime DosDateToDateTime(ushort iDate, ushort iTime) { var year = (iDate / 512) + 1980; @@ -149,229 +129,257 @@ public static DateTime UnixTimeToDateTime(long unixtime) return sTime.AddSeconds(unixtime); } - public static long TransferTo(this Stream source, Stream destination, long maxLength) + extension(Stream source) { - var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE); - try + public long TransferTo(Stream destination, long maxLength) { - var maxReadSize = array.Length; - long total = 0; - var remaining = maxLength; - if (remaining < maxReadSize) - { - maxReadSize = (int)remaining; - } - while (ReadTransferBlock(source, array, maxReadSize, out var count)) - { - destination.Write(array, 0, count); - total += count; - if (remaining - count < 0) - { - break; - } - remaining -= count; - if (remaining < maxReadSize) - { - maxReadSize = (int)remaining; - } - } - return total; + // Use ReadOnlySubStream to limit reading and leverage framework's CopyTo + using var limitedStream = new IO.ReadOnlySubStream(source, maxLength); + limitedStream.CopyTo(destination, TEMP_BUFFER_SIZE); + return limitedStream.Position; } - finally + + public async ValueTask TransferToAsync( + Stream destination, + long maxLength, + CancellationToken cancellationToken = default + ) { - ArrayPool.Shared.Return(array); + // Use ReadOnlySubStream to limit reading and leverage framework's CopyToAsync + using var limitedStream = new IO.ReadOnlySubStream(source, maxLength); + await limitedStream + .CopyToAsync(destination, TEMP_BUFFER_SIZE, cancellationToken) + .ConfigureAwait(false); + return limitedStream.Position; } } - public static async Task TransferToAsync( - this Stream source, - Stream destination, - long maxLength, - CancellationToken cancellationToken = default - ) + extension(Stream source) { - var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE); - try + public async ValueTask SkipAsync( + long advanceAmount, + CancellationToken cancellationToken = default + ) { - var maxReadSize = array.Length; - long total = 0; - var remaining = maxLength; - if (remaining < maxReadSize) + if (source.CanSeek) { - maxReadSize = (int)remaining; + source.Position += advanceAmount; + return; } - while ( - await ReadTransferBlockAsync(source, array, maxReadSize, cancellationToken) - .ConfigureAwait(false) - is var (success, count) - && success - ) + + var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE); + try { - await destination - .WriteAsync(array, 0, count, cancellationToken) - .ConfigureAwait(false); - total += count; - if (remaining - count < 0) - { - break; - } - remaining -= count; - if (remaining < maxReadSize) + while (advanceAmount > 0) { - maxReadSize = (int)remaining; + var toRead = (int)Math.Min(array.Length, advanceAmount); + var read = await source + .ReadAsync(array, 0, toRead, cancellationToken) + .ConfigureAwait(false); + if (read <= 0) + { + break; + } + advanceAmount -= read; } } - return total; + finally + { + ArrayPool.Shared.Return(array); + } } - finally + +#if NET60_OR_GREATER + public bool ReadFully(byte[] buffer) { - ArrayPool.Shared.Return(array); + try + { + source.ReadExactly(buffer); + return true; + } + catch (EndOfStreamException) + { + return false; + } } - } - private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count) - { - var size = maxSize; - if (maxSize > array.Length) + public bool ReadFully(Span buffer) { - size = array.Length; + try + { + source.ReadExactly(buffer); + return true; + } + catch (EndOfStreamException) + { + return false; + } } - count = source.Read(array, 0, size); - return count != 0; - } - - private static async Task<(bool success, int count)> ReadTransferBlockAsync( - Stream source, - byte[] array, - int maxSize, - CancellationToken cancellationToken - ) - { - var size = maxSize; - if (maxSize > array.Length) +#else + public bool ReadFully(byte[] buffer) { - size = array.Length; + var total = 0; + int read; + while ((read = source.Read(buffer, total, buffer.Length - total)) > 0) + { + total += read; + if (total >= buffer.Length) + { + return true; + } + } + return (total >= buffer.Length); } - var count = await source.ReadAsync(array, 0, size, cancellationToken).ConfigureAwait(false); - return (count != 0, count); - } - public static async Task SkipAsync( - this Stream source, - long advanceAmount, - CancellationToken cancellationToken = default - ) - { - if (source.CanSeek) + public bool ReadFully(Span buffer) { - source.Position += advanceAmount; - return; + var total = 0; + int read; + while ((read = source.Read(buffer.Slice(total, buffer.Length - total))) > 0) + { + total += read; + if (total >= buffer.Length) + { + return true; + } + } + return (total >= buffer.Length); } +#endif - var array = ArrayPool.Shared.Rent(TEMP_BUFFER_SIZE); - try + public async ValueTask ReadFullyAsync( + byte[] buffer, + CancellationToken cancellationToken = default + ) { - while (advanceAmount > 0) + var total = 0; + int read; + while ( + ( + read = await source + .ReadAsync(buffer, total, buffer.Length - total, cancellationToken) + .ConfigureAwait(false) + ) > 0 + ) { - var toRead = (int)Math.Min(array.Length, advanceAmount); - var read = await source - .ReadAsync(array, 0, toRead, cancellationToken) - .ConfigureAwait(false); - if (read <= 0) + total += read; + if (total >= buffer.Length) { - break; + return true; } - advanceAmount -= read; } + return (total >= buffer.Length); } - finally + + public async ValueTask ReadFullyAsync( + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken = default + ) { - ArrayPool.Shared.Return(array); + var total = 0; + int read; + while ( + ( + read = await source + .ReadAsync(buffer, offset + total, count - total, cancellationToken) + .ConfigureAwait(false) + ) > 0 + ) + { + total += read; + if (total >= count) + { + return true; + } + } + return (total >= count); } } -#if NET60_OR_GREATER - - public static bool ReadFully(this Stream stream, byte[] buffer) + /// + /// Read exactly the requested number of bytes from a stream. Throws EndOfStreamException if not enough data is available. + /// + public static void ReadExact(this Stream stream, byte[] buffer, int offset, int length) { - try - { - stream.ReadExactly(buffer); - return true; - } - catch (EndOfStreamException) + if (stream is null) { - return false; + throw new ArgumentNullException(nameof(stream)); } - } - public static bool ReadFully(this Stream stream, Span buffer) - { - try + if (buffer is null) { - stream.ReadExactly(buffer); - return true; + throw new ArgumentNullException(nameof(buffer)); } - catch (EndOfStreamException) + + if (offset < 0 || offset > buffer.Length) { - return false; + throw new ArgumentOutOfRangeException(nameof(offset)); } - } -#else - public static bool ReadFully(this Stream stream, byte[] buffer) - { - var total = 0; - int read; - while ((read = stream.Read(buffer, total, buffer.Length - total)) > 0) + + if (length < 0 || length > buffer.Length - offset) { - total += read; - if (total >= buffer.Length) - { - return true; - } + throw new ArgumentOutOfRangeException(nameof(length)); } - return (total >= buffer.Length); - } - public static bool ReadFully(this Stream stream, Span buffer) - { - var total = 0; - int read; - while ((read = stream.Read(buffer.Slice(total, buffer.Length - total))) > 0) + while (length > 0) { - total += read; - if (total >= buffer.Length) + var fetched = stream.Read(buffer, offset, length); + if (fetched <= 0) { - return true; + throw new EndOfStreamException(); } + + offset += fetched; + length -= fetched; } - return (total >= buffer.Length); } -#endif - public static async Task ReadFullyAsync( + /// + /// Read exactly the requested number of bytes from a stream asynchronously. Throws EndOfStreamException if not enough data is available. + /// + public static async ValueTask ReadExactAsync( this Stream stream, byte[] buffer, + int offset, + int length, CancellationToken cancellationToken = default ) { - var total = 0; - int read; - while ( - ( - read = await stream - .ReadAsync(buffer, total, buffer.Length - total, cancellationToken) - .ConfigureAwait(false) - ) > 0 - ) + if (stream is null) { - total += read; - if (total >= buffer.Length) + throw new ArgumentNullException(nameof(stream)); + } + + if (buffer is null) + { + throw new ArgumentNullException(nameof(buffer)); + } + + if (offset < 0 || offset > buffer.Length) + { + throw new ArgumentOutOfRangeException(nameof(offset)); + } + + if (length < 0 || length > buffer.Length - offset) + { + throw new ArgumentOutOfRangeException(nameof(length)); + } + + while (length > 0) + { + var fetched = await stream + .ReadAsync(buffer, offset, length, cancellationToken) + .ConfigureAwait(false); + if (fetched <= 0) { - return true; + throw new EndOfStreamException(); } + + offset += fetched; + length -= fetched; } - return (total >= buffer.Length); } public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim(); diff --git a/src/SharpCompress/Writers/AbstractWriter.cs b/src/SharpCompress/Writers/AbstractWriter.cs index d86ccc746..7dce62972 100644 --- a/src/SharpCompress/Writers/AbstractWriter.cs +++ b/src/SharpCompress/Writers/AbstractWriter.cs @@ -48,7 +48,7 @@ protected Stream WrapWithProgress(Stream source, string entryPath) public abstract void Write(string filename, Stream source, DateTime? modificationTime); - public virtual async Task WriteAsync( + public virtual async ValueTask WriteAsync( string filename, Stream source, DateTime? modificationTime, @@ -63,7 +63,7 @@ public virtual async Task WriteAsync( public abstract void WriteDirectory(string directoryName, DateTime? modificationTime); - public virtual async Task WriteDirectoryAsync( + public virtual async ValueTask WriteDirectoryAsync( string directoryName, DateTime? modificationTime, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Writers/IWriter.cs b/src/SharpCompress/Writers/IWriter.cs index d34d23986..b51b49722 100644 --- a/src/SharpCompress/Writers/IWriter.cs +++ b/src/SharpCompress/Writers/IWriter.cs @@ -10,14 +10,14 @@ public interface IWriter : IDisposable { ArchiveType WriterType { get; } void Write(string filename, Stream source, DateTime? modificationTime); - Task WriteAsync( + ValueTask WriteAsync( string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken = default ); void WriteDirectory(string directoryName, DateTime? modificationTime); - Task WriteDirectoryAsync( + ValueTask WriteDirectoryAsync( string directoryName, DateTime? modificationTime, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Writers/IWriterExtensions.cs b/src/SharpCompress/Writers/IWriterExtensions.cs index 9b6a268fc..9b8fb67b5 100644 --- a/src/SharpCompress/Writers/IWriterExtensions.cs +++ b/src/SharpCompress/Writers/IWriterExtensions.cs @@ -59,14 +59,14 @@ public static void WriteDirectory(this IWriter writer, string directoryName) => writer.WriteDirectory(directoryName, null); // Async extensions - public static Task WriteAsync( + public static ValueTask WriteAsync( this IWriter writer, string entryPath, Stream source, CancellationToken cancellationToken = default ) => writer.WriteAsync(entryPath, source, null, cancellationToken); - public static async Task WriteAsync( + public static async ValueTask WriteAsync( this IWriter writer, string entryPath, FileInfo source, @@ -83,14 +83,14 @@ await writer .ConfigureAwait(false); } - public static Task WriteAsync( + public static ValueTask WriteAsync( this IWriter writer, string entryPath, string source, CancellationToken cancellationToken = default ) => writer.WriteAsync(entryPath, new FileInfo(source), cancellationToken); - public static Task WriteAllAsync( + public static ValueTask WriteAllAsync( this IWriter writer, string directory, string searchPattern = "*", @@ -98,7 +98,7 @@ public static Task WriteAllAsync( CancellationToken cancellationToken = default ) => writer.WriteAllAsync(directory, searchPattern, null, option, cancellationToken); - public static async Task WriteAllAsync( + public static async ValueTask WriteAllAsync( this IWriter writer, string directory, string searchPattern = "*", @@ -125,7 +125,7 @@ await writer } } - public static Task WriteDirectoryAsync( + public static ValueTask WriteDirectoryAsync( this IWriter writer, string directoryName, CancellationToken cancellationToken = default diff --git a/src/SharpCompress/Writers/IWriterFactory.cs b/src/SharpCompress/Writers/IWriterFactory.cs index 094a55533..f933e8197 100644 --- a/src/SharpCompress/Writers/IWriterFactory.cs +++ b/src/SharpCompress/Writers/IWriterFactory.cs @@ -1,4 +1,6 @@ using System.IO; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Factories; namespace SharpCompress.Writers; @@ -6,4 +8,10 @@ namespace SharpCompress.Writers; public interface IWriterFactory : IFactory { IWriter Open(Stream stream, WriterOptions writerOptions); + + ValueTask OpenAsync( + Stream stream, + WriterOptions writerOptions, + CancellationToken cancellationToken = default + ); } diff --git a/src/SharpCompress/Writers/Tar/TarWriter.cs b/src/SharpCompress/Writers/Tar/TarWriter.cs index afad63bea..aec014540 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.cs @@ -103,7 +103,7 @@ public override void WriteDirectory(string directoryName, DateTime? modification header.Write(OutputStream); } - public override async Task WriteDirectoryAsync( + public override async ValueTask WriteDirectoryAsync( string directoryName, DateTime? modificationTime, CancellationToken cancellationToken = default @@ -134,14 +134,14 @@ public void Write(string filename, Stream source, DateTime? modificationTime, lo PadTo512(size.Value); } - public override async Task WriteAsync( + public override async ValueTask WriteAsync( string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken = default ) => await WriteAsync(filename, source, modificationTime, null, cancellationToken); - public async Task WriteAsync( + public async ValueTask WriteAsync( string filename, Stream source, DateTime? modificationTime, diff --git a/src/SharpCompress/Writers/WriterFactory.cs b/src/SharpCompress/Writers/WriterFactory.cs index 50e1fdfc8..b7ff5cd41 100644 --- a/src/SharpCompress/Writers/WriterFactory.cs +++ b/src/SharpCompress/Writers/WriterFactory.cs @@ -1,6 +1,8 @@ using System; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Common; namespace SharpCompress.Writers; @@ -20,4 +22,33 @@ public static IWriter Open(Stream stream, ArchiveType archiveType, WriterOptions throw new NotSupportedException("Archive Type does not have a Writer: " + archiveType); } + + /// + /// Opens a Writer asynchronously. + /// + /// The stream to write to. + /// The archive type. + /// Writer options. + /// Cancellation token. + /// A task that returns an IWriter. + public static async ValueTask OpenAsync( + Stream stream, + ArchiveType archiveType, + WriterOptions writerOptions, + CancellationToken cancellationToken = default + ) + { + var factory = Factories + .Factory.Factories.OfType() + .FirstOrDefault(item => item.KnownArchiveType == archiveType); + + if (factory != null) + { + return await factory + .OpenAsync(stream, writerOptions, cancellationToken) + .ConfigureAwait(false); + } + + throw new NotSupportedException("Archive Type does not have a Writer: " + archiveType); + } } diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.cs b/src/SharpCompress/Writers/Zip/ZipWriter.cs index 8c4b96b6e..845856b8f 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriter.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriter.cs @@ -162,7 +162,7 @@ public override void WriteDirectory(string directoryName, DateTime? modification WriteDirectoryEntry(normalizedName, options); } - public override Task WriteDirectoryAsync( + public override async ValueTask WriteDirectoryAsync( string directoryName, DateTime? modificationTime, CancellationToken cancellationToken = default @@ -170,7 +170,7 @@ public override Task WriteDirectoryAsync( { // Synchronous implementation is sufficient for directory entries WriteDirectory(directoryName, modificationTime); - return Task.CompletedTask; + await Task.CompletedTask.ConfigureAwait(false); } private void WriteDirectoryEntry(string directoryPath, ZipWriterEntryOptions options) diff --git a/src/SharpCompress/packages.lock.json b/src/SharpCompress/packages.lock.json index 2975919f1..032c15c49 100644 --- a/src/SharpCompress/packages.lock.json +++ b/src/SharpCompress/packages.lock.json @@ -30,6 +30,12 @@ "Microsoft.SourceLink.Common": "8.0.0" } }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "System.Buffers": { "type": "Direct", "requested": "[4.6.1, )", @@ -126,6 +132,12 @@ "Microsoft.SourceLink.Common": "8.0.0" } }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "NETStandard.Library": { "type": "Direct", "requested": "[2.0.3, )", @@ -204,9 +216,18 @@ "net10.0": { "Microsoft.NET.ILLink.Tasks": { "type": "Direct", - "requested": "[10.0.0, )", - "resolved": "10.0.0", - "contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA==" + "requested": "[10.0.1, )", + "resolved": "10.0.1", + "contentHash": "ISahzLHsHY7vrwqr2p1YWZ+gsxoBRtH7gWRDK8fDUst9pp2He0GiesaqEfeX0V8QMCJM3eNEHGGpnIcPjFo2NQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" + } }, "Microsoft.SourceLink.GitHub": { "type": "Direct", @@ -218,11 +239,22 @@ "Microsoft.SourceLink.Common": "8.0.0" } }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "Microsoft.Build.Tasks.Git": { "type": "Transitive", "resolved": "8.0.0", "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" }, + "Microsoft.NETFramework.ReferenceAssemblies.net461": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" + }, "Microsoft.SourceLink.Common": { "type": "Transitive", "resolved": "8.0.0", @@ -232,9 +264,18 @@ "net8.0": { "Microsoft.NET.ILLink.Tasks": { "type": "Direct", - "requested": "[10.0.0, )", - "resolved": "10.0.0", - "contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA==" + "requested": "[8.0.22, )", + "resolved": "8.0.22", + "contentHash": "MhcMithKEiyyNkD2ZfbDZPmcOdi0GheGfg8saEIIEfD/fol3iHmcV8TsZkD4ZYz5gdUuoX4YtlVySUU7Sxl9SQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" + } }, "Microsoft.SourceLink.GitHub": { "type": "Direct", @@ -246,11 +287,22 @@ "Microsoft.SourceLink.Common": "8.0.0" } }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "Microsoft.Build.Tasks.Git": { "type": "Transitive", "resolved": "8.0.0", "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" }, + "Microsoft.NETFramework.ReferenceAssemblies.net461": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" + }, "Microsoft.SourceLink.Common": { "type": "Transitive", "resolved": "8.0.0", diff --git a/tests/SharpCompress.Performance/packages.lock.json b/tests/SharpCompress.Performance/packages.lock.json index 255570ad3..12a15aa7a 100644 --- a/tests/SharpCompress.Performance/packages.lock.json +++ b/tests/SharpCompress.Performance/packages.lock.json @@ -12,6 +12,31 @@ "JetBrains.Profiler.Api": "1.4.10" } }, + "Microsoft.NETFramework.ReferenceAssemblies": { + "type": "Direct", + "requested": "[1.0.3, )", + "resolved": "1.0.3", + "contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==", + "dependencies": { + "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" + } + }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[8.0.0, )", + "resolved": "8.0.0", + "contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "8.0.0", + "Microsoft.SourceLink.Common": "8.0.0" + } + }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "JetBrains.FormatRipper": { "type": "Transitive", "resolved": "2.4.0", @@ -33,6 +58,21 @@ "JetBrains.HabitatDetector": "1.4.5" } }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" + }, + "Microsoft.NETFramework.ReferenceAssemblies.net461": { + "type": "Transitive", + "resolved": "1.0.3", + "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" + }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw==" + }, "sharpcompress": { "type": "Project" } diff --git a/tests/SharpCompress.Test/AdcAsyncTest.cs b/tests/SharpCompress.Test/AdcAsyncTest.cs index 185174aaa..611c10e0c 100644 --- a/tests/SharpCompress.Test/AdcAsyncTest.cs +++ b/tests/SharpCompress.Test/AdcAsyncTest.cs @@ -8,7 +8,7 @@ namespace SharpCompress.Test; public class AdcAsyncTest : TestBase { [Fact] - public async Task TestAdcStreamAsyncWholeChunk() + public async ValueTask TestAdcStreamAsyncWholeChunk() { using var decFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_decompressed.bin")); var decompressed = new byte[decFs.Length]; @@ -24,7 +24,7 @@ public async Task TestAdcStreamAsyncWholeChunk() } [Fact] - public async Task TestAdcStreamAsync() + public async ValueTask TestAdcStreamAsync() { using var decFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_decompressed.bin")); var decompressed = new byte[decFs.Length]; @@ -46,7 +46,7 @@ public async Task TestAdcStreamAsync() } [Fact] - public async Task TestAdcStreamAsyncWithCancellation() + public async ValueTask TestAdcStreamAsyncWithCancellation() { using var cmpFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_compressed.bin")); using var decStream = new ADCStream(cmpFs); diff --git a/tests/SharpCompress.Test/Arc/ArcReaderAsyncTests.cs b/tests/SharpCompress.Test/Arc/ArcReaderAsyncTests.cs new file mode 100644 index 000000000..40ab08e11 --- /dev/null +++ b/tests/SharpCompress.Test/Arc/ArcReaderAsyncTests.cs @@ -0,0 +1,24 @@ +using System.Threading.Tasks; +using SharpCompress.Common; +using Xunit; + +namespace SharpCompress.Test.Arc; + +public class ArcReaderAsyncTests : ReaderTests +{ + public ArcReaderAsyncTests() + { + UseExtensionInsteadOfNameToVerify = true; + UseCaseInsensitiveToVerify = true; + } + + [Fact] + public async ValueTask Arc_Uncompressed_Read_Async() => + await ReadAsync("Arc.uncompressed.arc", CompressionType.None); + + [Fact] + public async ValueTask Arc_Squeezed_Read_Async() => await ReadAsync("Arc.squeezed.arc"); + + [Fact] + public async ValueTask Arc_Crunched_Read_Async() => await ReadAsync("Arc.crunched.arc"); +} diff --git a/tests/SharpCompress.Test/Arc/ArcReaderTests.cs b/tests/SharpCompress.Test/Arc/ArcReaderTests.cs index 915e2ae12..54f169c39 100644 --- a/tests/SharpCompress.Test/Arc/ArcReaderTests.cs +++ b/tests/SharpCompress.Test/Arc/ArcReaderTests.cs @@ -1,12 +1,5 @@ using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; using SharpCompress.Common; -using SharpCompress.Readers; -using SharpCompress.Readers.Arc; using Xunit; namespace SharpCompress.Test.Arc diff --git a/tests/SharpCompress.Test/ArchiveTests.cs b/tests/SharpCompress.Test/ArchiveTests.cs index 916c9e2c0..3001b4bde 100644 --- a/tests/SharpCompress.Test/ArchiveTests.cs +++ b/tests/SharpCompress.Test/ArchiveTests.cs @@ -9,6 +9,7 @@ using SharpCompress.Crypto; using SharpCompress.IO; using SharpCompress.Readers; +using SharpCompress.Test.Mocks; using SharpCompress.Writers; using SharpCompress.Writers.Zip; using Xunit; @@ -599,11 +600,18 @@ IEnumerable testArchives throwOnDispose: true ) ) - using (var archive = archiveFactory.Open(stream, readerOptions)) + await using ( + var archive = await archiveFactory.OpenAsync( + new AsyncOnlyStream(stream), + readerOptions + ) + ) { try { - foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) + await foreach ( + var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory) + ) { await entry.WriteToDirectoryAsync( SCRATCH_FILES_PATH, diff --git a/tests/SharpCompress.Test/BZip2/BZip2StreamAsyncTests.cs b/tests/SharpCompress.Test/BZip2/BZip2StreamAsyncTests.cs index c801ab0dc..0c2d62d8f 100644 --- a/tests/SharpCompress.Test/BZip2/BZip2StreamAsyncTests.cs +++ b/tests/SharpCompress.Test/BZip2/BZip2StreamAsyncTests.cs @@ -22,7 +22,7 @@ private byte[] CreateTestData(int size) } [Fact] - public async Task BZip2CompressDecompressAsyncTest() + public async ValueTask BZip2CompressDecompressAsyncTest() { var testData = CreateTestData(10000); byte[] compressed; @@ -83,7 +83,7 @@ public async Task BZip2CompressDecompressAsyncTest() } [Fact] - public async Task BZip2ReadAsyncWithCancellationTest() + public async ValueTask BZip2ReadAsyncWithCancellationTest() { var testData = Encoding.ASCII.GetBytes(new string('A', 5000)); // Repetitive data compresses well byte[] compressed; @@ -127,7 +127,7 @@ public async Task BZip2ReadAsyncWithCancellationTest() } [Fact] - public async Task BZip2MultipleAsyncWritesTest() + public async ValueTask BZip2MultipleAsyncWritesTest() { using (var memoryStream = new MemoryStream()) { @@ -179,7 +179,7 @@ public async Task BZip2MultipleAsyncWritesTest() } [Fact] - public async Task BZip2LargeDataAsyncTest() + public async ValueTask BZip2LargeDataAsyncTest() { var largeData = CreateTestData(100000); diff --git a/tests/SharpCompress.Test/ExtractAll.cs b/tests/SharpCompress.Test/ExtractAll.cs index 3e8b7d7a2..d2643fcd9 100644 --- a/tests/SharpCompress.Test/ExtractAll.cs +++ b/tests/SharpCompress.Test/ExtractAll.cs @@ -18,12 +18,12 @@ public class ExtractAllTests : TestBase [InlineData("7Zip.solid.7z")] [InlineData("7Zip.nonsolid.7z")] [InlineData("7Zip.LZMA.7z")] - public async Task ExtractAllEntriesAsync(string archivePath) + public async ValueTask ExtractAllEntriesAsync(string archivePath) { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, archivePath); var options = new ExtractionOptions() { ExtractFullPath = true, Overwrite = true }; - using var archive = ArchiveFactory.Open(testArchive); + await using var archive = await ArchiveFactory.OpenAsync(testArchive); await archive.WriteToDirectoryAsync(SCRATCH_FILES_PATH, options); } diff --git a/tests/SharpCompress.Test/GZip/AsyncTests.cs b/tests/SharpCompress.Test/GZip/AsyncTests.cs index 562b82e88..3961ffc62 100644 --- a/tests/SharpCompress.Test/GZip/AsyncTests.cs +++ b/tests/SharpCompress.Test/GZip/AsyncTests.cs @@ -9,6 +9,7 @@ using SharpCompress.Compressors; using SharpCompress.Compressors.Deflate; using SharpCompress.Readers; +using SharpCompress.Test.Mocks; using SharpCompress.Writers; using Xunit; @@ -17,7 +18,7 @@ namespace SharpCompress.Test.GZip; public class AsyncTests : TestBase { [Fact] - public async Task Reader_Async_Extract_All() + public async ValueTask Reader_Async_Extract_All() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); #if NETFRAMEWORK @@ -25,7 +26,7 @@ public async Task Reader_Async_Extract_All() #else await using var stream = File.OpenRead(testArchive); #endif - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); await reader.WriteAllToDirectoryAsync( SCRATCH_FILES_PATH, @@ -42,7 +43,7 @@ await reader.WriteAllToDirectoryAsync( } [Fact] - public async Task Reader_Async_Extract_Single_Entry() + public async ValueTask Reader_Async_Extract_Single_Entry() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); #if NETFRAMEWORK @@ -50,9 +51,9 @@ public async Task Reader_Async_Extract_Single_Entry() #else await using var stream = File.OpenRead(testArchive); #endif - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { @@ -70,7 +71,7 @@ public async Task Reader_Async_Extract_Single_Entry() } [Fact] - public async Task Archive_Entry_Async_Open_Stream() + public async ValueTask Archive_Entry_Async_Open_Stream() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); using var archive = ArchiveFactory.Open(testArchive); @@ -93,32 +94,49 @@ public async Task Archive_Entry_Async_Open_Stream() } [Fact] - public async Task Writer_Async_Write_Single_File() + public async ValueTask Writer_Async_Write_Single_File() { var outputPath = Path.Combine(SCRATCH_FILES_PATH, "async_test.zip"); + +#if NETFRAMEWORK using (var stream = File.Create(outputPath)) +#else + await using (var stream = File.Create(outputPath)) +#endif using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate)) { var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); + +#if NETFRAMEWORK using var fileStream = File.OpenRead(testFile); +#else + await using var fileStream = File.OpenRead(testFile); +#endif await writer.WriteAsync("test_entry.bin", fileStream, new DateTime(2023, 1, 1)); } // Verify the archive was created and contains the entry Assert.True(File.Exists(outputPath)); - using var archive = ZipArchive.Open(outputPath); + await using var archive = ZipArchive.Open(outputPath); Assert.Single(archive.Entries.Where(e => !e.IsDirectory)); } [Fact] - public async Task Async_With_Cancellation_Token() + public async ValueTask Async_With_Cancellation_Token() { using var cts = new CancellationTokenSource(); cts.CancelAfter(10000); // 10 seconds should be plenty var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); +#if NETFRAMEWORK using var stream = File.OpenRead(testArchive); - using var reader = ReaderFactory.Open(stream); +#else + await using var stream = File.OpenRead(testArchive); +#endif + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + cancellationToken: cts.Token + ); await reader.WriteAllToDirectoryAsync( SCRATCH_FILES_PATH, @@ -136,12 +154,17 @@ await reader.WriteAllToDirectoryAsync( } [Fact] - public async Task Stream_Extensions_Async() + public async ValueTask Stream_Extensions_Async() { var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); - using var inputStream = File.OpenRead(testFile); var outputPath = Path.Combine(SCRATCH_FILES_PATH, "async_copy.bin"); +#if NETFRAMEWORK + using var inputStream = File.OpenRead(testFile); using var outputStream = File.Create(outputPath); +#else + await using var inputStream = File.OpenRead(testFile); + await using var outputStream = File.Create(outputPath); +#endif // Test the async extension method var buffer = new byte[8192]; @@ -156,17 +179,25 @@ public async Task Stream_Extensions_Async() } [Fact] - public async Task EntryStream_ReadAsync_Works() + public async ValueTask EntryStream_ReadAsync_Works() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"); +#if NETFRAMEWORK using var stream = File.OpenRead(testArchive); - using var reader = ReaderFactory.Open(stream); +#else + await using var stream = File.OpenRead(testArchive); +#endif + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { - using var entryStream = reader.OpenEntryStream(); +#if NETFRAMEWORK + using var entryStream = await reader.OpenEntryStreamAsync(); +#else + await using var entryStream = await reader.OpenEntryStreamAsync(); +#endif var buffer = new byte[4096]; var totalRead = 0; int bytesRead; @@ -184,7 +215,7 @@ public async Task EntryStream_ReadAsync_Works() } [Fact] - public async Task CompressionStream_Async_ReadWrite() + public async ValueTask CompressionStream_Async_ReadWrite() { var testData = new byte[1024]; new Random(42).NextBytes(testData); @@ -192,8 +223,13 @@ public async Task CompressionStream_Async_ReadWrite() var compressedPath = Path.Combine(SCRATCH_FILES_PATH, "async_compressed.gz"); // Test async write with GZipStream +#if NETFRAMEWORK using (var fileStream = File.Create(compressedPath)) using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress)) +#else + await using (var fileStream = File.Create(compressedPath)) + await using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress)) +#endif { await gzipStream.WriteAsync(testData, 0, testData.Length); await gzipStream.FlushAsync(); @@ -201,10 +237,14 @@ public async Task CompressionStream_Async_ReadWrite() Assert.True(File.Exists(compressedPath)); Assert.True(new FileInfo(compressedPath).Length > 0); - +#if NETFRAMEWORK + using (var fileStream = File.Create(compressedPath)) + using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress)) +#else // Test async read with GZipStream - using (var fileStream = File.OpenRead(compressedPath)) - using (var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress)) + await using (var fileStream = File.Create(compressedPath)) + await using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress)) +#endif { var decompressed = new byte[testData.Length]; var totalRead = 0; diff --git a/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs index 016c6fc0b..caa30ffa1 100644 --- a/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/GZip/GZipArchiveAsyncTests.cs @@ -14,9 +14,13 @@ public class GZipArchiveAsyncTests : ArchiveTests public GZipArchiveAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task GZip_Archive_Generic_Async() + public async ValueTask GZip_Archive_Generic_Async() { +#if NETFRAMEWORK using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#else + await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#endif using (var archive = ArchiveFactory.Open(stream)) { var entry = archive.Entries.First(); @@ -36,10 +40,14 @@ public async Task GZip_Archive_Generic_Async() } [Fact] - public async Task GZip_Archive_Async() + public async ValueTask GZip_Archive_Async() { +#if NETFRAMEWORK using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) - using (var archive = GZipArchive.Open(stream)) +#else + await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#endif + await using (var archive = GZipArchive.Open(stream)) { var entry = archive.Entries.First(); await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); @@ -58,47 +66,72 @@ public async Task GZip_Archive_Async() } [Fact] - public async Task GZip_Archive_NoAdd_Async() + public async ValueTask GZip_Archive_NoAdd_Async() { var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"); +#if NETFRAMEWORK using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); - using var archive = GZipArchive.Open(stream); +#else + await using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); +#endif + await using var archive = GZipArchive.Open(stream); Assert.Throws(() => archive.AddEntry("jpg\\test.jpg", jpg)); await archive.SaveToAsync(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar.gz")); } [Fact] - public async Task GZip_Archive_Multiple_Reads_Async() + public async ValueTask GZip_Archive_Multiple_Reads_Async() { var inputStream = new MemoryStream(); - using (var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#if NETFRAMEWORK + using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#else + await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"))) +#endif { - await fileStream.CopyToAsync(inputStream); + await stream.CopyToAsync(inputStream); inputStream.Position = 0; } - using var archive = GZipArchive.Open(inputStream); + + await using var archive = GZipArchive.Open(inputStream); var archiveEntry = archive.Entries.First(); MemoryStream tarStream; - using (var entryStream = archiveEntry.OpenEntryStream()) +#if NETFRAMEWORK + using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#else + await using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#endif { tarStream = new MemoryStream(); await entryStream.CopyToAsync(tarStream); } var size = tarStream.Length; - using (var entryStream = archiveEntry.OpenEntryStream()) +#if NETFRAMEWORK + using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#else + await using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#endif { tarStream = new MemoryStream(); await entryStream.CopyToAsync(tarStream); } Assert.Equal(size, tarStream.Length); - using (var entryStream = archiveEntry.OpenEntryStream()) +#if NETFRAMEWORK + using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#else + await using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#endif { var result = TarArchive.IsTarFile(entryStream); Assert.True(result); } Assert.Equal(size, tarStream.Length); - using (var entryStream = archiveEntry.OpenEntryStream()) +#if NETFRAMEWORK + using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#else + await using (var entryStream = await archiveEntry.OpenEntryStreamAsync()) +#endif { tarStream = new MemoryStream(); await entryStream.CopyToAsync(tarStream); diff --git a/tests/SharpCompress.Test/GZip/GZipReaderAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipReaderAsyncTests.cs index 20e9a34fb..befd1440b 100644 --- a/tests/SharpCompress.Test/GZip/GZipReaderAsyncTests.cs +++ b/tests/SharpCompress.Test/GZip/GZipReaderAsyncTests.cs @@ -14,16 +14,16 @@ public class GZipReaderAsyncTests : ReaderTests public GZipReaderAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task GZip_Reader_Generic_Async() => + public async ValueTask GZip_Reader_Generic_Async() => await ReadAsync("Tar.tar.gz", CompressionType.GZip); [Fact] - public async Task GZip_Reader_Generic2_Async() + public async ValueTask GZip_Reader_Generic2_Async() { //read only as GZip item using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")); - using var reader = GZipReader.Open(new SharpCompressStream(stream)); - while (reader.MoveToNextEntry()) + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); + while (await reader.MoveToNextEntryAsync()) { Assert.NotEqual(0, reader.Entry.Size); Assert.NotEqual(0, reader.Entry.Crc); @@ -31,7 +31,7 @@ public async Task GZip_Reader_Generic2_Async() // Use async overload for reading the entry if (!reader.Entry.IsDirectory) { - using var entryStream = reader.OpenEntryStream(); + using var entryStream = await reader.OpenEntryStreamAsync(); using var ms = new MemoryStream(); await entryStream.CopyToAsync(ms); } @@ -56,7 +56,7 @@ protected async Task ReadAsync( VerifyFiles(); } - private async Task ReadImplAsync( + private async ValueTask ReadImplAsync( string testArchive, CompressionType expectedCompression, ReaderOptions options @@ -70,7 +70,7 @@ ReaderOptions options bufferSize: options.BufferSize ); using var testStream = new TestStream(protectedStream); - using (var reader = ReaderFactory.Open(testStream, options)) + await using (var reader = await ReaderFactory.OpenAsync(testStream, options, default)) { await UseReaderAsync(reader, expectedCompression); protectedStream.ThrowOnDispose = false; @@ -82,9 +82,9 @@ ReaderOptions options Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message); } - private async Task UseReaderAsync(IReader reader, CompressionType expectedCompression) + private async ValueTask UseReaderAsync(IAsyncReader reader, CompressionType expectedCompression) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { diff --git a/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs index ead377edc..3ada3c524 100644 --- a/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs +++ b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs @@ -13,7 +13,7 @@ public GZipWriterAsyncTests() : base(ArchiveType.GZip) => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task GZip_Writer_Generic_Async() + public async ValueTask GZip_Writer_Generic_Async() { using ( Stream stream = File.Open( @@ -33,7 +33,7 @@ public async Task GZip_Writer_Generic_Async() } [Fact] - public async Task GZip_Writer_Async() + public async ValueTask GZip_Writer_Async() { using ( Stream stream = File.Open( @@ -61,7 +61,7 @@ public void GZip_Writer_Generic_Bad_Compression_Async() => }); [Fact] - public async Task GZip_Writer_Entry_Path_With_Dir_Async() + public async ValueTask GZip_Writer_Entry_Path_With_Dir_Async() { using ( Stream stream = File.Open( diff --git a/tests/SharpCompress.Test/Mocks/AsyncOnlyStream.cs b/tests/SharpCompress.Test/Mocks/AsyncOnlyStream.cs new file mode 100644 index 000000000..d0b363e0c --- /dev/null +++ b/tests/SharpCompress.Test/Mocks/AsyncOnlyStream.cs @@ -0,0 +1,70 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace SharpCompress.Test.Mocks; + +public class AsyncOnlyStream : Stream +{ + private readonly Stream _stream; + + public AsyncOnlyStream(Stream stream) + { + _stream = stream; + // Console.WriteLine("AsyncOnlyStream created"); + } + + public override bool CanRead => _stream.CanRead; + public override bool CanSeek => _stream.CanSeek; + public override bool CanWrite => _stream.CanWrite; + public override long Length => _stream.Length; + public override long Position + { + get => _stream.Position; + set => _stream.Position = value; + } + + public override void Flush() => _stream.Flush(); + + public override int Read(byte[] buffer, int offset, int count) + { + throw new NotSupportedException("Synchronous Read is not supported"); + } + + public override Task ReadAsync( + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken + ) + { + return _stream.ReadAsync(buffer, offset, count, cancellationToken); + } + +#if !NETFRAMEWORK && !NETSTANDARD2_0 + public override ValueTask ReadAsync( + Memory buffer, + CancellationToken cancellationToken = default + ) + { + return _stream.ReadAsync(buffer, cancellationToken); + } +#endif + + public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin); + + public override void SetLength(long value) => _stream.SetLength(value); + + public override void Write(byte[] buffer, int offset, int count) => + _stream.Write(buffer, offset, count); + + protected override void Dispose(bool disposing) + { + if (disposing) + { + _stream.Dispose(); + } + base.Dispose(disposing); + } +} diff --git a/tests/SharpCompress.Test/Mocks/TestStream.cs b/tests/SharpCompress.Test/Mocks/TestStream.cs index da7d65cc6..37e1e8085 100644 --- a/tests/SharpCompress.Test/Mocks/TestStream.cs +++ b/tests/SharpCompress.Test/Mocks/TestStream.cs @@ -1,4 +1,7 @@ -using System.IO; +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace SharpCompress.Test.Mocks; @@ -35,6 +38,20 @@ public override long Position public override int Read(byte[] buffer, int offset, int count) => stream.Read(buffer, offset, count); + public override Task ReadAsync( + byte[] buffer, + int offset, + int count, + CancellationToken cancellationToken + ) => stream.ReadAsync(buffer, offset, count, cancellationToken); + +#if !NETFRAMEWORK && !NETSTANDARD2_0 + public override ValueTask ReadAsync( + Memory buffer, + CancellationToken cancellationToken = default + ) => stream.ReadAsync(buffer, cancellationToken); +#endif + public override long Seek(long offset, SeekOrigin origin) => stream.Seek(offset, origin); public override void SetLength(long value) => stream.SetLength(value); diff --git a/tests/SharpCompress.Test/ProgressReportTests.cs b/tests/SharpCompress.Test/ProgressReportTests.cs index 75fa21164..2f0dd8021 100644 --- a/tests/SharpCompress.Test/ProgressReportTests.cs +++ b/tests/SharpCompress.Test/ProgressReportTests.cs @@ -7,7 +7,9 @@ using SharpCompress.Archives; using SharpCompress.Archives.Zip; using SharpCompress.Common; +using SharpCompress.IO; using SharpCompress.Readers; +using SharpCompress.Test.Mocks; using SharpCompress.Writers; using SharpCompress.Writers.Tar; using SharpCompress.Writers.Zip; @@ -164,7 +166,7 @@ public void ZipArchive_Entry_WriteTo_ReportsProgress() } [Fact] - public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress() + public async ValueTask ZipArchive_Entry_WriteToAsync_ReportsProgress() { var progress = new TestProgress(); @@ -383,7 +385,7 @@ public void TarArchive_Entry_WriteTo_ReportsProgress() } [Fact] - public async Task TarArchive_Entry_WriteToAsync_ReportsProgress() + public async ValueTask TarArchive_Entry_WriteToAsync_ReportsProgress() { var progress = new TestProgress(); @@ -519,7 +521,7 @@ public void ZipArchive_MultipleEntries_WriteTo_ReportsProgress() } [Fact] - public async Task Zip_ReadAsync_ReportsProgress() + public async ValueTask Zip_ReadAsync_ReportsProgress() { var progress = new TestProgress(); @@ -538,9 +540,14 @@ public async Task Zip_ReadAsync_ReportsProgress() archiveStream.Position = 0; var readerOptions = new ReaderOptions { Progress = progress }; - using (var reader = ReaderFactory.Open(archiveStream, readerOptions)) + await using ( + var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(archiveStream), + readerOptions + ) + ) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { @@ -582,7 +589,7 @@ public void GZip_Write_ReportsProgress() } [Fact] - public async Task Tar_WriteAsync_ReportsProgress() + public async ValueTask Tar_WriteAsync_ReportsProgress() { var progress = new TestProgress(); diff --git a/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs b/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs index 1c6a33f0a..acfe4c27a 100644 --- a/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs @@ -13,58 +13,58 @@ namespace SharpCompress.Test.Rar; public class RarArchiveAsyncTests : ArchiveTests { [Fact] - public async Task Rar_EncryptedFileAndHeader_Archive_Async() => + public async ValueTask Rar_EncryptedFileAndHeader_Archive_Async() => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", "test"); [Fact] - public async Task Rar_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() => + public async ValueTask Rar_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() => await Assert.ThrowsAsync( typeof(CryptographicException), async () => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", null) ); [Fact] - public async Task Rar5_EncryptedFileAndHeader_Archive_Async() => + public async ValueTask Rar5_EncryptedFileAndHeader_Archive_Async() => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test"); [Fact] - public async Task Rar5_EncryptedFileAndHeader_Archive_Err_Async() => + public async ValueTask Rar5_EncryptedFileAndHeader_Archive_Err_Async() => await Assert.ThrowsAsync( typeof(CryptographicException), async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "failed") ); [Fact] - public async Task Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() => + public async ValueTask Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() => await Assert.ThrowsAsync( typeof(CryptographicException), async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", null) ); [Fact] - public async Task Rar_EncryptedFileOnly_Archive_Async() => + public async ValueTask Rar_EncryptedFileOnly_Archive_Async() => await ReadRarPasswordAsync("Rar.encrypted_filesOnly.rar", "test"); [Fact] - public async Task Rar_EncryptedFileOnly_Archive_Err_Async() => + public async ValueTask Rar_EncryptedFileOnly_Archive_Err_Async() => await Assert.ThrowsAsync( typeof(CryptographicException), async () => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "failed") ); [Fact] - public async Task Rar5_EncryptedFileOnly_Archive_Async() => + public async ValueTask Rar5_EncryptedFileOnly_Archive_Async() => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "test"); [Fact] - public async Task Rar_Encrypted_Archive_Async() => + public async ValueTask Rar_Encrypted_Archive_Async() => await ReadRarPasswordAsync("Rar.Encrypted.rar", "test"); [Fact] - public async Task Rar5_Encrypted_Archive_Async() => + public async ValueTask Rar5_Encrypted_Archive_Async() => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test"); - private async Task ReadRarPasswordAsync(string testArchive, string? password) + private async ValueTask ReadRarPasswordAsync(string testArchive, string? password) { using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive))) using ( @@ -90,7 +90,7 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_Multi_Archive_Encrypted_Async() => + public async ValueTask Rar_Multi_Archive_Encrypted_Async() => await Assert.ThrowsAsync(async () => await ArchiveFileReadPasswordAsync("Rar.EncryptedParts.part01.rar", "test") ); @@ -116,24 +116,25 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_None_ArchiveStreamRead_Async() => + public async ValueTask Rar_None_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar.none.rar"); [Fact] - public async Task Rar5_None_ArchiveStreamRead_Async() => + public async ValueTask Rar5_None_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar5.none.rar"); [Fact] - public async Task Rar_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar.rar"); + public async ValueTask Rar_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar.rar"); [Fact] - public async Task Rar5_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar5.rar"); + public async ValueTask Rar5_ArchiveStreamRead_Async() => + await ArchiveStreamReadAsync("Rar5.rar"); [Fact] - public async Task Rar_test_invalid_exttime_ArchiveStreamRead_Async() => + public async ValueTask Rar_test_invalid_exttime_ArchiveStreamRead_Async() => await DoRar_test_invalid_exttime_ArchiveStreamReadAsync("Rar.test_invalid_exttime.rar"); - private async Task DoRar_test_invalid_exttime_ArchiveStreamReadAsync(string filename) + private async ValueTask DoRar_test_invalid_exttime_ArchiveStreamReadAsync(string filename) { using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)); using var archive = ArchiveFactory.Open(stream); @@ -147,7 +148,7 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_Jpg_ArchiveStreamRead_Async() + public async ValueTask Rar_Jpg_ArchiveStreamRead_Async() { using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg")); using (var archive = RarArchive.Open(stream, new ReaderOptions { LookForHeader = true })) @@ -164,14 +165,14 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_IsSolidArchiveCheck_Async() => + public async ValueTask Rar_IsSolidArchiveCheck_Async() => await DoRar_IsSolidArchiveCheckAsync("Rar.rar"); [Fact] - public async Task Rar5_IsSolidArchiveCheck_Async() => + public async ValueTask Rar5_IsSolidArchiveCheck_Async() => await DoRar_IsSolidArchiveCheckAsync("Rar5.rar"); - private async Task DoRar_IsSolidArchiveCheckAsync(string filename) + private async ValueTask DoRar_IsSolidArchiveCheckAsync(string filename) { using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename))) { @@ -189,10 +190,10 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_IsSolidEntryStreamCheck_Async() => + public async ValueTask Rar_IsSolidEntryStreamCheck_Async() => await DoRar_IsSolidEntryStreamCheckAsync("Rar.solid.rar"); - private async Task DoRar_IsSolidEntryStreamCheckAsync(string filename) + private async ValueTask DoRar_IsSolidEntryStreamCheckAsync(string filename) { using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)); using var archive = RarArchive.Open(stream); @@ -218,23 +219,23 @@ private async Task DoRar_IsSolidEntryStreamCheckAsync(string filename) } [Fact] - public async Task Rar_Solid_ArchiveStreamRead_Async() => + public async ValueTask Rar_Solid_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar.solid.rar"); [Fact] - public async Task Rar5_Solid_ArchiveStreamRead_Async() => + public async ValueTask Rar5_Solid_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar5.solid.rar"); [Fact] - public async Task Rar_Solid_StreamRead_Extract_All_Async() => + public async ValueTask Rar_Solid_StreamRead_Extract_All_Async() => await ArchiveStreamReadExtractAllAsync("Rar.solid.rar", CompressionType.Rar); [Fact] - public async Task Rar5_Solid_StreamRead_Extract_All_Async() => + public async ValueTask Rar5_Solid_StreamRead_Extract_All_Async() => await ArchiveStreamReadExtractAllAsync("Rar5.solid.rar", CompressionType.Rar); [Fact] - public async Task Rar_Multi_ArchiveStreamRead_Async() => + public async ValueTask Rar_Multi_ArchiveStreamRead_Async() => await DoRar_Multi_ArchiveStreamReadAsync( [ "Rar.multi.part01.rar", @@ -248,7 +249,7 @@ await DoRar_Multi_ArchiveStreamReadAsync( ); [Fact] - public async Task Rar5_Multi_ArchiveStreamRead_Async() => + public async ValueTask Rar5_Multi_ArchiveStreamRead_Async() => await DoRar_Multi_ArchiveStreamReadAsync( [ "Rar5.multi.part01.rar", @@ -261,7 +262,7 @@ await DoRar_Multi_ArchiveStreamReadAsync( false ); - private async Task DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid) + private async ValueTask DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid) { using var archive = RarArchive.Open( archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead) @@ -277,7 +278,7 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar5_MultiSolid_ArchiveStreamRead_Async() => + public async ValueTask Rar5_MultiSolid_ArchiveStreamRead_Async() => await DoRar_Multi_ArchiveStreamReadAsync( [ "Rar.multi.solid.part01.rar", @@ -291,24 +292,25 @@ await DoRar_Multi_ArchiveStreamReadAsync( ); [Fact] - public async Task RarNoneArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.none.rar"); + public async ValueTask RarNoneArchiveFileRead_Async() => + await ArchiveFileReadAsync("Rar.none.rar"); [Fact] - public async Task Rar5NoneArchiveFileRead_Async() => + public async ValueTask Rar5NoneArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.none.rar"); [Fact] - public async Task Rar_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.rar"); + public async ValueTask Rar_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.rar"); [Fact] - public async Task Rar5_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.rar"); + public async ValueTask Rar5_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.rar"); [Fact] - public async Task Rar_ArchiveFileRead_HasDirectories_Async() => + public async ValueTask Rar_ArchiveFileRead_HasDirectories_Async() => await DoRar_ArchiveFileRead_HasDirectoriesAsync("Rar.rar"); [Fact] - public async Task Rar5_ArchiveFileRead_HasDirectories_Async() => + public async ValueTask Rar5_ArchiveFileRead_HasDirectories_Async() => await DoRar_ArchiveFileRead_HasDirectoriesAsync("Rar5.rar"); private Task DoRar_ArchiveFileRead_HasDirectoriesAsync(string filename) @@ -321,7 +323,7 @@ private Task DoRar_ArchiveFileRead_HasDirectoriesAsync(string filename) } [Fact] - public async Task Rar_Jpg_ArchiveFileRead_Async() + public async ValueTask Rar_Jpg_ArchiveFileRead_Async() { using ( var archive = RarArchive.Open( @@ -342,15 +344,15 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Rar_Solid_ArchiveFileRead_Async() => + public async ValueTask Rar_Solid_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.solid.rar"); [Fact] - public async Task Rar5_Solid_ArchiveFileRead_Async() => + public async ValueTask Rar5_Solid_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.solid.rar"); [Fact] - public async Task Rar2_Multi_ArchiveStreamRead_Async() => + public async ValueTask Rar2_Multi_ArchiveStreamRead_Async() => await DoRar_Multi_ArchiveStreamReadAsync( [ "Rar2.multi.rar", @@ -365,14 +367,14 @@ await DoRar_Multi_ArchiveStreamReadAsync( ); [Fact] - public async Task Rar2_Multi_ArchiveFileRead_Async() => + public async ValueTask Rar2_Multi_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar2.multi.rar"); [Fact] - public async Task Rar2_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar2.rar"); + public async ValueTask Rar2_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar2.rar"); [Fact] - public async Task Rar15_ArchiveFileRead_Async() + public async ValueTask Rar15_ArchiveFileRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -420,11 +422,11 @@ public void Rar5_ArchiveVersionTest_Async() } [Fact] - public async Task Rar4_Multi_ArchiveFileRead_Async() => + public async ValueTask Rar4_Multi_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar4.multi.part01.rar"); [Fact] - public async Task Rar4_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar4.rar"); + public async ValueTask Rar4_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar4.rar"); [Fact] public void Rar_GetPartsSplit_Async() => @@ -471,7 +473,7 @@ public void Rar_GetPartsNew_Async() => ); [Fact] - public async Task Rar4_Multi_ArchiveStreamRead_Async() => + public async ValueTask Rar4_Multi_ArchiveStreamRead_Async() => await DoRar_Multi_ArchiveStreamReadAsync( [ "Rar4.multi.part01.rar", @@ -486,7 +488,7 @@ await DoRar_Multi_ArchiveStreamReadAsync( ); [Fact] - public async Task Rar4_Split_ArchiveStreamRead_Async() => + public async ValueTask Rar4_Split_ArchiveStreamRead_Async() => await ArchiveStreamMultiReadAsync( null, [ @@ -500,19 +502,19 @@ await ArchiveStreamMultiReadAsync( ); [Fact] - public async Task Rar4_Multi_ArchiveFirstFileRead_Async() => + public async ValueTask Rar4_Multi_ArchiveFirstFileRead_Async() => await ArchiveFileReadAsync("Rar4.multi.part01.rar"); [Fact] - public async Task Rar4_Split_ArchiveFirstFileRead_Async() => + public async ValueTask Rar4_Split_ArchiveFirstFileRead_Async() => await ArchiveFileReadAsync("Rar4.split.001"); [Fact] - public async Task Rar4_Split_ArchiveStreamFirstFileRead_Async() => + public async ValueTask Rar4_Split_ArchiveStreamFirstFileRead_Async() => await ArchiveStreamMultiReadAsync(null, ["Rar4.split.001"]); [Fact] - public async Task Rar4_Split_ArchiveOpen_Async() => + public async ValueTask Rar4_Split_ArchiveOpen_Async() => await ArchiveOpenStreamReadAsync( null, "Rar4.split.001", @@ -524,7 +526,7 @@ await ArchiveOpenStreamReadAsync( ); [Fact] - public async Task Rar4_Multi_ArchiveOpen_Async() => + public async ValueTask Rar4_Multi_ArchiveOpen_Async() => await ArchiveOpenStreamReadAsync( null, "Rar4.multi.part01.rar", @@ -555,11 +557,11 @@ public void Rar4_Multi_ArchiveOpenEntryVolumeIndexTest_Async() => ); [Fact] - public async Task Rar_Multi_ArchiveFileRead_Async() => + public async ValueTask Rar_Multi_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.multi.part01.rar"); [Fact] - public async Task Rar5_Multi_ArchiveFileRead_Async() => + public async ValueTask Rar5_Multi_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.multi.part01.rar"); [Fact] @@ -592,7 +594,7 @@ private void DoRar_IsFirstVolume_False(string notFirstFilename) } [Fact] - public async Task Rar5_CRC_Blake2_Archive_Async() => + public async ValueTask Rar5_CRC_Blake2_Archive_Async() => await ArchiveFileReadAsync("Rar5.crc_blake2.rar"); [Fact] @@ -625,7 +627,7 @@ public void Rar5_Encrypted_Iterate_Archive_Async() => "Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe" ); - private async Task ArchiveStreamReadAsync(string testArchive) + private async ValueTask ArchiveStreamReadAsync(string testArchive) { testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); using var stream = File.OpenRead(testArchive); @@ -640,18 +642,18 @@ await entry.WriteToDirectoryAsync( VerifyFiles(); } - private async Task ArchiveStreamReadExtractAllAsync( + private async ValueTask ArchiveStreamReadExtractAllAsync( string testArchive, CompressionType compression ) { testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); using var stream = File.OpenRead(testArchive); - using var archive = ArchiveFactory.Open(stream); - Assert.True(archive.IsSolid); - using (var reader = archive.ExtractAllEntries()) + await using var archive = await ArchiveFactory.OpenAsync(stream); + Assert.True(await archive.IsSolidAsync()); + await using (var reader = await archive.ExtractAllEntriesAsync()) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { @@ -665,7 +667,7 @@ await reader.WriteEntryToDirectoryAsync( } VerifyFiles(); - foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) + await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory)) { await entry.WriteToDirectoryAsync( SCRATCH_FILES_PATH, @@ -675,7 +677,7 @@ await entry.WriteToDirectoryAsync( VerifyFiles(); } - private async Task ArchiveFileReadAsync(string testArchive) + private async ValueTask ArchiveFileReadAsync(string testArchive) { testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); using var archive = ArchiveFactory.Open(testArchive); @@ -689,7 +691,7 @@ await entry.WriteToDirectoryAsync( VerifyFiles(); } - private async Task ArchiveStreamMultiReadAsync( + private async ValueTask ArchiveStreamMultiReadAsync( ReaderOptions? readerOptions, params string[] testArchives ) @@ -706,7 +708,7 @@ await entry.WriteToDirectoryAsync( VerifyFiles(); } - private async Task ArchiveOpenStreamReadAsync( + private async ValueTask ArchiveOpenStreamReadAsync( ReaderOptions? readerOptions, params string[] testArchives ) diff --git a/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs b/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs index ed497c7b9..6fd783146 100644 --- a/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs @@ -7,6 +7,7 @@ using SharpCompress.Common; using SharpCompress.Readers; using SharpCompress.Readers.Rar; +using SharpCompress.Test.Mocks; using Xunit; namespace SharpCompress.Test.Rar; @@ -14,7 +15,7 @@ namespace SharpCompress.Test.Rar; public class RarReaderAsyncTests : ReaderTests { [Fact] - public async Task Rar_Multi_Reader_Async() => + public async ValueTask Rar_Multi_Reader_Async() => await DoRar_Multi_Reader_Async([ "Rar.multi.part01.rar", "Rar.multi.part02.rar", @@ -25,7 +26,7 @@ await DoRar_Multi_Reader_Async([ ]); [Fact] - public async Task Rar5_Multi_Reader_Async() => + public async ValueTask Rar5_Multi_Reader_Async() => await DoRar_Multi_Reader_Async([ "Rar5.multi.part01.rar", "Rar5.multi.part02.rar", @@ -35,7 +36,7 @@ await DoRar_Multi_Reader_Async([ "Rar5.multi.part06.rar", ]); - private async Task DoRar_Multi_Reader_Async(string[] archives) + private async ValueTask DoRar_Multi_Reader_Async(string[] archives) { using ( var reader = RarReader.Open( @@ -45,7 +46,7 @@ private async Task DoRar_Multi_Reader_Async(string[] archives) ) ) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { await reader.WriteEntryToDirectoryAsync( SCRATCH_FILES_PATH, @@ -57,7 +58,7 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Rar_Multi_Reader_Encrypted_Async() => + public async ValueTask Rar_Multi_Reader_Encrypted_Async() => await Assert.ThrowsAsync(async () => { string[] archives = @@ -78,7 +79,7 @@ await Assert.ThrowsAsync(async () => ) ) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { await reader.WriteEntryToDirectoryAsync( SCRATCH_FILES_PATH, @@ -90,7 +91,7 @@ await reader.WriteEntryToDirectoryAsync( }); [Fact] - public async Task Rar_Multi_Reader_Delete_Files_Async() => + public async ValueTask Rar_Multi_Reader_Delete_Files_Async() => await DoRar_Multi_Reader_Delete_Files_Async([ "Rar.multi.part01.rar", "Rar.multi.part02.rar", @@ -101,7 +102,7 @@ await DoRar_Multi_Reader_Delete_Files_Async([ ]); [Fact] - public async Task Rar5_Multi_Reader_Delete_Files_Async() => + public async ValueTask Rar5_Multi_Reader_Delete_Files_Async() => await DoRar_Multi_Reader_Delete_Files_Async([ "Rar5.multi.part01.rar", "Rar5.multi.part02.rar", @@ -111,7 +112,7 @@ await DoRar_Multi_Reader_Delete_Files_Async([ "Rar5.multi.part06.rar", ]); - private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives) + private async ValueTask DoRar_Multi_Reader_Delete_Files_Async(string[] archives) { foreach (var file in archives) { @@ -126,7 +127,7 @@ private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives) .ToList(); using (var reader = RarReader.Open(streams)) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { await reader.WriteEntryToDirectoryAsync( SCRATCH_FILES_PATH, @@ -147,48 +148,48 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Rar_None_Reader_Async() => + public async ValueTask Rar_None_Reader_Async() => await ReadAsync("Rar.none.rar", CompressionType.Rar); [Fact] - public async Task Rar5_None_Reader_Async() => + public async ValueTask Rar5_None_Reader_Async() => await ReadAsync("Rar5.none.rar", CompressionType.Rar); [Fact] - public async Task Rar_Reader_Async() => await ReadAsync("Rar.rar", CompressionType.Rar); + public async ValueTask Rar_Reader_Async() => await ReadAsync("Rar.rar", CompressionType.Rar); [Fact] - public async Task Rar5_Reader_Async() => await ReadAsync("Rar5.rar", CompressionType.Rar); + public async ValueTask Rar5_Reader_Async() => await ReadAsync("Rar5.rar", CompressionType.Rar); [Fact] - public async Task Rar5_CRC_Blake2_Reader_Async() => + public async ValueTask Rar5_CRC_Blake2_Reader_Async() => await ReadAsync("Rar5.crc_blake2.rar", CompressionType.Rar); [Fact] - public async Task Rar_EncryptedFileAndHeader_Reader_Async() => + public async ValueTask Rar_EncryptedFileAndHeader_Reader_Async() => await ReadRar_Async("Rar.encrypted_filesAndHeader.rar", "test"); [Fact] - public async Task Rar5_EncryptedFileAndHeader_Reader_Async() => + public async ValueTask Rar5_EncryptedFileAndHeader_Reader_Async() => await ReadRar_Async("Rar5.encrypted_filesAndHeader.rar", "test"); [Fact] - public async Task Rar_EncryptedFileOnly_Reader_Async() => + public async ValueTask Rar_EncryptedFileOnly_Reader_Async() => await ReadRar_Async("Rar.encrypted_filesOnly.rar", "test"); [Fact] - public async Task Rar5_EncryptedFileOnly_Reader_Async() => + public async ValueTask Rar5_EncryptedFileOnly_Reader_Async() => await ReadRar_Async("Rar5.encrypted_filesOnly.rar", "test"); [Fact] - public async Task Rar_Encrypted_Reader_Async() => + public async ValueTask Rar_Encrypted_Reader_Async() => await ReadRar_Async("Rar.Encrypted.rar", "test"); [Fact] - public async Task Rar5_Encrypted_Reader_Async() => + public async ValueTask Rar5_Encrypted_Reader_Async() => await ReadRar_Async("Rar5.encrypted_filesOnly.rar", "test"); - private async Task ReadRar_Async(string testArchive, string password) => + private async ValueTask ReadRar_Async(string testArchive, string password) => await ReadAsync( testArchive, CompressionType.Rar, @@ -196,15 +197,15 @@ await ReadAsync( ); [Fact] - public async Task Rar_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar.rar"); + public async ValueTask Rar_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar.rar"); [Fact] - public async Task Rar5_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar5.rar"); + public async ValueTask Rar5_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar5.rar"); - private async Task DoRar_Entry_Stream_Async(string filename) + private async ValueTask DoRar_Entry_Stream_Async(string filename) { using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename))) - using (var reader = ReaderFactory.Open(stream)) + await using (var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream))) { while (await reader.MoveToNextEntryAsync()) { @@ -243,14 +244,19 @@ private async Task DoRar_Entry_Stream_Async(string filename) } [Fact] - public async Task Rar_Reader_Audio_program_Async() + public async ValueTask Rar_Reader_Audio_program_Async() { using ( var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.Audio_program.rar")) ) - using (var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true })) + await using ( + var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + new ReaderOptions { LookForHeader = true } + ) + ) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType); await reader.WriteEntryToDirectoryAsync( @@ -266,12 +272,12 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Rar_Jpg_Reader_Async() + public async ValueTask Rar_Jpg_Reader_Async() { using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"))) using (var reader = RarReader.Open(stream, new ReaderOptions { LookForHeader = true })) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType); await reader.WriteEntryToDirectoryAsync( @@ -284,34 +290,37 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Rar_Solid_Reader_Async() => + public async ValueTask Rar_Solid_Reader_Async() => await ReadAsync("Rar.solid.rar", CompressionType.Rar); [Fact] - public async Task Rar_Comment_Reader_Async() => + public async ValueTask Rar_Comment_Reader_Async() => await ReadAsync("Rar.comment.rar", CompressionType.Rar); [Fact] - public async Task Rar5_Comment_Reader_Async() => + public async ValueTask Rar5_Comment_Reader_Async() => await ReadAsync("Rar5.comment.rar", CompressionType.Rar); [Fact] - public async Task Rar5_Solid_Reader_Async() => + public async ValueTask Rar5_Solid_Reader_Async() => await ReadAsync("Rar5.solid.rar", CompressionType.Rar); [Fact] - public async Task Rar_Solid_Skip_Reader_Async() => + public async ValueTask Rar_Solid_Skip_Reader_Async() => await DoRar_Solid_Skip_Reader_Async("Rar.solid.rar"); [Fact] - public async Task Rar5_Solid_Skip_Reader_Async() => + public async ValueTask Rar5_Solid_Skip_Reader_Async() => await DoRar_Solid_Skip_Reader_Async("Rar5.solid.rar"); - private async Task DoRar_Solid_Skip_Reader_Async(string filename) + private async ValueTask DoRar_Solid_Skip_Reader_Async(string filename) { using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)); - using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }); - while (reader.MoveToNextEntry()) + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + new ReaderOptions { LookForHeader = true } + ); + while (await reader.MoveToNextEntryAsync()) { if (reader.Entry.Key.NotNull().Contains("jpg")) { @@ -325,16 +334,19 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Rar_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar.rar"); + public async ValueTask Rar_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar.rar"); [Fact] - public async Task Rar5_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar5.rar"); + public async ValueTask Rar5_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar5.rar"); - private async Task DoRar_Reader_Skip_Async(string filename) + private async ValueTask DoRar_Reader_Skip_Async(string filename) { using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)); - using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }); - while (reader.MoveToNextEntry()) + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + new ReaderOptions { LookForHeader = true } + ); + while (await reader.MoveToNextEntryAsync()) { if (reader.Entry.Key.NotNull().Contains("jpg")) { @@ -347,7 +359,7 @@ await reader.WriteEntryToDirectoryAsync( } } - private async Task ReadAsync( + private async ValueTask ReadAsync( string testArchive, CompressionType expectedCompression, ReaderOptions? readerOptions = null @@ -355,7 +367,10 @@ private async Task ReadAsync( { testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); using Stream stream = File.OpenRead(testArchive); - using var reader = ReaderFactory.Open(stream, readerOptions ?? new ReaderOptions()); + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + readerOptions ?? new ReaderOptions() + ); while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) diff --git a/tests/SharpCompress.Test/ReaderTests.cs b/tests/SharpCompress.Test/ReaderTests.cs index 897e6b329..67c4371c0 100644 --- a/tests/SharpCompress.Test/ReaderTests.cs +++ b/tests/SharpCompress.Test/ReaderTests.cs @@ -113,7 +113,7 @@ private void UseReader(IReader reader) protected async Task ReadAsync( string testArchive, - CompressionType expectedCompression, + CompressionType? expectedCompression = null, ReaderOptions? options = null, CancellationToken cancellationToken = default ) @@ -130,9 +130,9 @@ protected async Task ReadAsync( VerifyFiles(); } - private async Task ReadImplAsync( + private async ValueTask ReadImplAsync( string testArchive, - CompressionType expectedCompression, + CompressionType? expectedCompression, ReaderOptions options, CancellationToken cancellationToken = default ) @@ -145,7 +145,13 @@ private async Task ReadImplAsync( bufferSize: options.BufferSize ); using var testStream = new TestStream(protectedStream); - using (var reader = ReaderFactory.Open(testStream, options)) + await using ( + var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(testStream), + options, + cancellationToken + ) + ) { await UseReaderAsync(reader, expectedCompression, cancellationToken); protectedStream.ThrowOnDispose = false; @@ -157,9 +163,9 @@ private async Task ReadImplAsync( Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message); } - public async Task UseReaderAsync( - IReader reader, - CompressionType expectedCompression, + public async ValueTask UseReaderAsync( + IAsyncReader reader, + CompressionType? expectedCompression, CancellationToken cancellationToken = default ) { @@ -167,7 +173,11 @@ public async Task UseReaderAsync( { if (!reader.Entry.IsDirectory) { - Assert.Equal(expectedCompression, reader.Entry.CompressionType); + if (expectedCompression.HasValue) + { + Assert.Equal(expectedCompression, reader.Entry.CompressionType); + } + await reader.WriteEntryToDirectoryAsync( SCRATCH_FILES_PATH, new ExtractionOptions { ExtractFullPath = true, Overwrite = true }, diff --git a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs index 0029105c0..7a8718ee5 100644 --- a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs @@ -12,7 +12,7 @@ namespace SharpCompress.Test.SevenZip; public class SevenZipArchiveAsyncTests : ArchiveTests { [Fact] - public async Task SevenZipArchive_LZMA_AsyncStreamExtraction() + public async ValueTask SevenZipArchive_LZMA_AsyncStreamExtraction() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA.7z"); using var stream = File.OpenRead(testArchive); @@ -37,7 +37,7 @@ public async Task SevenZipArchive_LZMA_AsyncStreamExtraction() } [Fact] - public async Task SevenZipArchive_LZMA2_AsyncStreamExtraction() + public async ValueTask SevenZipArchive_LZMA2_AsyncStreamExtraction() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA2.7z"); using var stream = File.OpenRead(testArchive); @@ -62,7 +62,7 @@ public async Task SevenZipArchive_LZMA2_AsyncStreamExtraction() } [Fact] - public async Task SevenZipArchive_Solid_AsyncStreamExtraction() + public async ValueTask SevenZipArchive_Solid_AsyncStreamExtraction() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z"); using var stream = File.OpenRead(testArchive); @@ -87,7 +87,7 @@ public async Task SevenZipArchive_Solid_AsyncStreamExtraction() } [Fact] - public async Task SevenZipArchive_BZip2_AsyncStreamExtraction() + public async ValueTask SevenZipArchive_BZip2_AsyncStreamExtraction() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.BZip2.7z"); using var stream = File.OpenRead(testArchive); @@ -112,7 +112,7 @@ public async Task SevenZipArchive_BZip2_AsyncStreamExtraction() } [Fact] - public async Task SevenZipArchive_PPMd_AsyncStreamExtraction() + public async ValueTask SevenZipArchive_PPMd_AsyncStreamExtraction() { var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.PPMd.7z"); using var stream = File.OpenRead(testArchive); diff --git a/tests/SharpCompress.Test/SharpCompress.Test.csproj b/tests/SharpCompress.Test/SharpCompress.Test.csproj index 6ee632bd5..c16a1581a 100644 --- a/tests/SharpCompress.Test/SharpCompress.Test.csproj +++ b/tests/SharpCompress.Test/SharpCompress.Test.csproj @@ -23,7 +23,6 @@ - diff --git a/tests/SharpCompress.Test/Streams/LzmaStreamAsyncTests.cs b/tests/SharpCompress.Test/Streams/LzmaStreamAsyncTests.cs index 9f19401d7..e30e07975 100644 --- a/tests/SharpCompress.Test/Streams/LzmaStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Streams/LzmaStreamAsyncTests.cs @@ -10,7 +10,7 @@ namespace SharpCompress.Test.Streams; public class LzmaStreamAsyncTests { [Fact] - public async Task TestLzma2Decompress1ByteAsync() + public async ValueTask TestLzma2Decompress1ByteAsync() { var properties = new byte[] { 0x01 }; var compressedData = new byte[] { 0x01, 0x00, 0x00, 0x58, 0x00 }; @@ -517,7 +517,7 @@ public async Task TestLzma2Decompress1ByteAsync() ]; [Fact] - public async Task TestLzmaBufferAsync() + public async ValueTask TestLzmaBufferAsync() { var input = new MemoryStream(LzmaData); using var output = new MemoryStream(); @@ -536,7 +536,7 @@ public async Task TestLzmaBufferAsync() } [Fact] - public async Task TestLzmaStreamEncodingWritesDataAsync() + public async ValueTask TestLzmaStreamEncodingWritesDataAsync() { using var inputStream = new MemoryStream(LzmaResultData); using MemoryStream outputStream = new(); @@ -547,7 +547,7 @@ public async Task TestLzmaStreamEncodingWritesDataAsync() } [Fact] - public async Task TestLzmaEncodingAccuracyAsync() + public async ValueTask TestLzmaEncodingAccuracyAsync() { var input = new MemoryStream(LzmaResultData); var compressed = new MemoryStream(); diff --git a/tests/SharpCompress.Test/Streams/RewindableStreamAsyncTest.cs b/tests/SharpCompress.Test/Streams/RewindableStreamAsyncTest.cs index 3893f5399..455f007fb 100644 --- a/tests/SharpCompress.Test/Streams/RewindableStreamAsyncTest.cs +++ b/tests/SharpCompress.Test/Streams/RewindableStreamAsyncTest.cs @@ -8,7 +8,7 @@ namespace SharpCompress.Test.Streams; public class RewindableStreamAsyncTest { [Fact] - public async Task TestRewindAsync() + public async ValueTask TestRewindAsync() { var ms = new MemoryStream(); var bw = new BinaryWriter(ms); @@ -46,7 +46,7 @@ public async Task TestRewindAsync() } [Fact] - public async Task TestIncompleteRewindAsync() + public async ValueTask TestIncompleteRewindAsync() { var ms = new MemoryStream(); var bw = new BinaryWriter(ms); diff --git a/tests/SharpCompress.Test/Streams/SharpCompressStreamAsyncTests.cs b/tests/SharpCompress.Test/Streams/SharpCompressStreamAsyncTests.cs index d806246da..390ad4795 100644 --- a/tests/SharpCompress.Test/Streams/SharpCompressStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Streams/SharpCompressStreamAsyncTests.cs @@ -26,7 +26,7 @@ private static void CreateData(MemoryStream ms) } [Fact] - public async Task BufferReadAsyncTest() + public async ValueTask BufferReadAsyncTest() { byte[] data = new byte[0x100000]; byte[] test = new byte[0x1000]; @@ -55,7 +55,7 @@ public async Task BufferReadAsyncTest() } [Fact] - public async Task BufferReadAndSeekAsyncTest() + public async ValueTask BufferReadAndSeekAsyncTest() { byte[] data = new byte[0x100000]; byte[] test = new byte[0x1000]; @@ -90,7 +90,7 @@ public async Task BufferReadAndSeekAsyncTest() } [Fact] - public async Task MultipleAsyncReadsTest() + public async ValueTask MultipleAsyncReadsTest() { byte[] data = new byte[0x100000]; byte[] test1 = new byte[0x800]; @@ -115,7 +115,7 @@ public async Task MultipleAsyncReadsTest() } [Fact] - public async Task LargeBufferAsyncReadTest() + public async ValueTask LargeBufferAsyncReadTest() { byte[] data = new byte[0x200000]; byte[] test = new byte[0x8000]; diff --git a/tests/SharpCompress.Test/Streams/ZLibBaseStreamAsyncTests.cs b/tests/SharpCompress.Test/Streams/ZLibBaseStreamAsyncTests.cs index 3512b8b76..a29477da9 100644 --- a/tests/SharpCompress.Test/Streams/ZLibBaseStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Streams/ZLibBaseStreamAsyncTests.cs @@ -12,7 +12,7 @@ namespace SharpCompress.Test.Streams; public class ZLibBaseStreamAsyncTests { [Fact] - public async Task TestChunkedZlibCompressesEverythingAsync() + public async ValueTask TestChunkedZlibCompressesEverythingAsync() { var plainData = new byte[] { @@ -61,7 +61,7 @@ public async Task TestChunkedZlibCompressesEverythingAsync() } [Fact] - public async Task Zlib_should_read_the_previously_written_message_async() + public async ValueTask Zlib_should_read_the_previously_written_message_async() { var message = new string('a', 131073); // 131073 causes the failure, but 131072 (-1) doesn't var bytes = Encoding.ASCII.GetBytes(message); @@ -83,7 +83,7 @@ await GetBytesAsync(byteBufferStream).ConfigureAwait(false) result.Should().Be(message); } - private async Task CompressAsync(Stream input, Stream output, int compressionLevel) + private async ValueTask CompressAsync(Stream input, Stream output, int compressionLevel) { using var zlibStream = new ZlibStream( SharpCompressStream.Create(output, leaveOpen: true), @@ -94,7 +94,7 @@ private async Task CompressAsync(Stream input, Stream output, int compressionLev await input.CopyToAsync(zlibStream).ConfigureAwait(false); } - private async Task DecompressAsync(Stream input, Stream output) + private async ValueTask DecompressAsync(Stream input, Stream output) { using var zlibStream = new ZlibStream( SharpCompressStream.Create(input, leaveOpen: true), @@ -103,7 +103,7 @@ private async Task DecompressAsync(Stream input, Stream output) await zlibStream.CopyToAsync(output).ConfigureAwait(false); } - private async Task GetBytesAsync(BufferedStream stream) + private async ValueTask GetBytesAsync(BufferedStream stream) { var bytes = new byte[stream.Length]; await stream.ReadAsync(bytes, 0, (int)stream.Length).ConfigureAwait(false); diff --git a/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs index 280d37b2d..9fae1c582 100644 --- a/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs @@ -19,10 +19,10 @@ public class TarArchiveAsyncTests : ArchiveTests public TarArchiveAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task TarArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Tar.tar"); + public async ValueTask TarArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Tar.tar"); [Fact] - public async Task Tar_FileName_Exactly_100_Characters_Async() + public async ValueTask Tar_FileName_Exactly_100_Characters_Async() { var archive = "Tar_FileName_Exactly_100_Characters.tar"; @@ -54,14 +54,14 @@ public async Task Tar_FileName_Exactly_100_Characters_Async() { Assert.Equal( "dummy filecontent", - await new StreamReader(entry.OpenEntryStream()).ReadLineAsync() + await new StreamReader(await entry.OpenEntryStreamAsync()).ReadLineAsync() ); } } } [Fact] - public async Task Tar_VeryLongFilepathReadback_Async() + public async ValueTask Tar_VeryLongFilepathReadback_Async() { var archive = "Tar_VeryLongFilepathReadback.tar"; @@ -98,14 +98,14 @@ public async Task Tar_VeryLongFilepathReadback_Async() { Assert.Equal( "dummy filecontent", - await new StreamReader(entry.OpenEntryStream()).ReadLineAsync() + await new StreamReader(await entry.OpenEntryStreamAsync()).ReadLineAsync() ); } } } [Fact] - public async Task Tar_Create_New_Async() + public async ValueTask Tar_Create_New_Async() { var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"); var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar"); @@ -121,7 +121,7 @@ public async Task Tar_Create_New_Async() } [Fact] - public async Task Tar_Random_Write_Add_Async() + public async ValueTask Tar_Random_Write_Add_Async() { var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"); var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar"); @@ -137,7 +137,7 @@ public async Task Tar_Random_Write_Add_Async() } [Fact] - public async Task Tar_Random_Write_Remove_Async() + public async ValueTask Tar_Random_Write_Remove_Async() { var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.mod.tar"); var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar"); @@ -157,7 +157,7 @@ public async Task Tar_Random_Write_Remove_Async() [Theory] [InlineData(10)] [InlineData(128)] - public async Task Tar_Japanese_Name_Async(int length) + public async ValueTask Tar_Japanese_Name_Async(int length) { using var mstm = new MemoryStream(); var enc = new ArchiveEncoding { Default = Encoding.UTF8 }; @@ -183,7 +183,7 @@ public async Task Tar_Japanese_Name_Async(int length) } [Fact] - public async Task Tar_Read_One_At_A_Time_Async() + public async ValueTask Tar_Read_One_At_A_Time_Async() { var archiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 }; var tarWriterOptions = new TarWriterOptions(CompressionType.None, true) @@ -211,7 +211,7 @@ public async Task Tar_Read_One_At_A_Time_Async() { ++numberOfEntries; - using var tarEntryStream = entry.OpenEntryStream(); + using var tarEntryStream = await entry.OpenEntryStreamAsync(); using var testFileStream = new MemoryStream(); await tarEntryStream.CopyToAsync(testFileStream); Assert.Equal(testBytes.Length, testFileStream.Length); diff --git a/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs index 0bc93d833..4be595845 100644 --- a/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs @@ -15,17 +15,17 @@ public class TarReaderAsyncTests : ReaderTests public TarReaderAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task Tar_Reader_Async() => await ReadAsync("Tar.tar", CompressionType.None); + public async ValueTask Tar_Reader_Async() => await ReadAsync("Tar.tar", CompressionType.None); [Fact] - public async Task Tar_Skip_Async() + public async ValueTask Tar_Skip_Async() { using Stream stream = new ForwardOnlyStream( File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar")) ); - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); var x = 0; - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { @@ -42,43 +42,45 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Tar_Z_Reader_Async() => await ReadAsync("Tar.tar.Z", CompressionType.Lzw); + public async ValueTask Tar_Z_Reader_Async() => + await ReadAsync("Tar.tar.Z", CompressionType.Lzw); [Fact] - public async Task Tar_BZip2_Reader_Async() => + public async ValueTask Tar_BZip2_Reader_Async() => await ReadAsync("Tar.tar.bz2", CompressionType.BZip2); [Fact] - public async Task Tar_GZip_Reader_Async() => + public async ValueTask Tar_GZip_Reader_Async() => await ReadAsync("Tar.tar.gz", CompressionType.GZip); [Fact] - public async Task Tar_ZStandard_Reader_Async() => + public async ValueTask Tar_ZStandard_Reader_Async() => await ReadAsync("Tar.tar.zst", CompressionType.ZStandard); [Fact] - public async Task Tar_LZip_Reader_Async() => + public async ValueTask Tar_LZip_Reader_Async() => await ReadAsync("Tar.tar.lz", CompressionType.LZip); [Fact] - public async Task Tar_Xz_Reader_Async() => await ReadAsync("Tar.tar.xz", CompressionType.Xz); + public async ValueTask Tar_Xz_Reader_Async() => + await ReadAsync("Tar.tar.xz", CompressionType.Xz); [Fact] - public async Task Tar_GZip_OldGnu_Reader_Async() => + public async ValueTask Tar_GZip_OldGnu_Reader_Async() => await ReadAsync("Tar.oldgnu.tar.gz", CompressionType.GZip); [Fact] - public async Task Tar_BZip2_Entry_Stream_Async() + public async ValueTask Tar_BZip2_Entry_Stream_Async() { using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"))) using (var reader = TarReader.Open(stream)) { - while (reader.MoveToNextEntry()) + while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) { Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType); - using var entryStream = reader.OpenEntryStream(); + using var entryStream = await reader.OpenEntryStreamAsync(); var file = Path.GetFileName(reader.Entry.Key); var folder = Path.GetDirectoryName(reader.Entry.Key) @@ -178,44 +180,51 @@ public void Tar_With_TarGz_With_Flushed_EntryStream_Async() } [Fact] - public async Task Tar_Broken_Stream_Async() + public async ValueTask Tar_Broken_Stream_Async() { var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"); using Stream stream = File.OpenRead(archiveFullPath); - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); var memoryStream = new MemoryStream(); - Assert.True(reader.MoveToNextEntry()); - Assert.True(reader.MoveToNextEntry()); + Assert.True(await reader.MoveToNextEntryAsync()); + Assert.True(await reader.MoveToNextEntryAsync()); await reader.WriteEntryToAsync(memoryStream); stream.Close(); - Assert.Throws(() => reader.MoveToNextEntry()); + await Assert.ThrowsAsync(async () => + await reader.MoveToNextEntryAsync() + ); } [Fact] - public async Task Tar_Corrupted_Async() + public async ValueTask Tar_Corrupted_Async() { var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, "TarCorrupted.tar"); using Stream stream = File.OpenRead(archiveFullPath); - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); var memoryStream = new MemoryStream(); - Assert.True(reader.MoveToNextEntry()); - Assert.True(reader.MoveToNextEntry()); + Assert.True(await reader.MoveToNextEntryAsync()); + Assert.True(await reader.MoveToNextEntryAsync()); await reader.WriteEntryToAsync(memoryStream); stream.Close(); - Assert.Throws(() => reader.MoveToNextEntry()); + await Assert.ThrowsAsync(async () => + await reader.MoveToNextEntryAsync() + ); } #if LINUX [Fact] - public async Task Tar_GZip_With_Symlink_Entries_Async() + public async ValueTask Tar_GZip_With_Symlink_Entries_Async() { using Stream stream = File.OpenRead( Path.Combine(TEST_ARCHIVES_PATH, "TarWithSymlink.tar.gz") ); - using var reader = TarReader.Open(stream); - while (reader.MoveToNextEntry()) + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(stream), + new ReaderOptions { LookForHeader = true } + ); + while (await reader.MoveToNextEntryAsync()) { if (reader.Entry.IsDirectory) { diff --git a/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs index f94a39373..290a01edc 100644 --- a/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs +++ b/tests/SharpCompress.Test/Tar/TarWriterAsyncTests.cs @@ -21,7 +21,7 @@ public TarWriterAsyncTests() : base(ArchiveType.Tar) => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task Tar_Writer_Async() => + public async ValueTask Tar_Writer_Async() => await WriteAsync( CompressionType.None, "Tar.noEmptyDirs.tar", @@ -30,7 +30,7 @@ await WriteAsync( ); [Fact] - public async Task Tar_BZip2_Writer_Async() => + public async ValueTask Tar_BZip2_Writer_Async() => await WriteAsync( CompressionType.BZip2, "Tar.noEmptyDirs.tar.bz2", @@ -39,7 +39,7 @@ await WriteAsync( ); [Fact] - public async Task Tar_LZip_Writer_Async() => + public async ValueTask Tar_LZip_Writer_Async() => await WriteAsync( CompressionType.LZip, "Tar.noEmptyDirs.tar.lz", @@ -48,7 +48,7 @@ await WriteAsync( ); [Fact] - public async Task Tar_Rar_Write_Async() => + public async ValueTask Tar_Rar_Write_Async() => await Assert.ThrowsAsync(async () => await WriteAsync( CompressionType.Rar, @@ -60,7 +60,7 @@ await WriteAsync( [Theory] [InlineData(true)] [InlineData(false)] - public async Task Tar_Finalize_Archive_Async(bool finalizeArchive) + public async ValueTask Tar_Finalize_Archive_Async(bool finalizeArchive) { using var stream = new MemoryStream(); using Stream content = File.OpenRead(Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg")); diff --git a/tests/SharpCompress.Test/TestBase.cs b/tests/SharpCompress.Test/TestBase.cs index 184e64420..702d6bca0 100644 --- a/tests/SharpCompress.Test/TestBase.cs +++ b/tests/SharpCompress.Test/TestBase.cs @@ -3,22 +3,25 @@ using System.IO; using System.Linq; using System.Text; +using System.Threading; +using System.Threading.Tasks; using SharpCompress.Readers; using Xunit; namespace SharpCompress.Test; -public class TestBase : IDisposable +public class TestBase : IAsyncDisposable { - private readonly string SOLUTION_BASE_PATH; - protected readonly string TEST_ARCHIVES_PATH; - protected readonly string ORIGINAL_FILES_PATH; - protected readonly string MISC_TEST_FILES_PATH; - private readonly string SCRATCH_BASE_PATH; - protected readonly string SCRATCH_FILES_PATH; - protected readonly string SCRATCH2_FILES_PATH; + private static readonly string SOLUTION_BASE_PATH; + public static readonly string TEST_ARCHIVES_PATH; + public static readonly string ORIGINAL_FILES_PATH; + public static readonly string MISC_TEST_FILES_PATH; + private static readonly string SCRATCH_BASE_PATH; - protected TestBase() + private static readonly string SCRATCH_DIRECTORY; + private static readonly string SCRATCH2_DIRECTORY; + + static TestBase() { var index = AppDomain.CurrentDomain.BaseDirectory.IndexOf( "SharpCompress.Test", @@ -31,19 +34,34 @@ protected TestBase() ORIGINAL_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Original"); MISC_TEST_FILES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "MiscTest"); - SCRATCH_BASE_PATH = Path.Combine( - SOLUTION_BASE_PATH, - "TestArchives", - Guid.NewGuid().ToString() - ); - SCRATCH_FILES_PATH = Path.Combine(SCRATCH_BASE_PATH, "Scratch"); - SCRATCH2_FILES_PATH = Path.Combine(SCRATCH_BASE_PATH, "Scratch2"); + SCRATCH_BASE_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives"); + SCRATCH_DIRECTORY = Path.Combine(SCRATCH_BASE_PATH, "Scratch"); + SCRATCH2_DIRECTORY = Path.Combine(SCRATCH_BASE_PATH, "Scratch2"); + + Directory.CreateDirectory(SCRATCH_DIRECTORY); + Directory.CreateDirectory(SCRATCH2_DIRECTORY); + } + + private readonly Guid _testGuid = Guid.NewGuid(); + protected readonly string SCRATCH_FILES_PATH; + protected readonly string SCRATCH2_FILES_PATH; + + protected TestBase() + { + SCRATCH_FILES_PATH = Path.Combine(SCRATCH_DIRECTORY, _testGuid.ToString()); + SCRATCH2_FILES_PATH = Path.Combine(SCRATCH2_DIRECTORY, _testGuid.ToString()); Directory.CreateDirectory(SCRATCH_FILES_PATH); Directory.CreateDirectory(SCRATCH2_FILES_PATH); } - public void Dispose() => Directory.Delete(SCRATCH_BASE_PATH, true); + //always use async dispose since we have I/O and sync Dispose doesn't wait when using xunit + public async ValueTask DisposeAsync() + { + await Task.CompletedTask; + Directory.Delete(SCRATCH_FILES_PATH, true); + Directory.Delete(SCRATCH2_FILES_PATH, true); + } public void VerifyFiles() { diff --git a/tests/SharpCompress.Test/UtilityTests.cs b/tests/SharpCompress.Test/UtilityTests.cs index cbd573042..c0455d740 100644 --- a/tests/SharpCompress.Test/UtilityTests.cs +++ b/tests/SharpCompress.Test/UtilityTests.cs @@ -4,6 +4,7 @@ using System.IO; using System.Linq; using System.Text; +using System.Threading.Tasks; using Xunit; namespace SharpCompress.Test; @@ -157,6 +158,97 @@ public void ReadFully_Span_EmptyBuffer_ReturnsTrue() #endregion + #region ReadByteAsync Tests + + [Fact] + public async ValueTask ReadByteAsync_ReadsOneByte() + { + var data = new byte[] { 42, 1, 2, 3 }; + using var stream = new MemoryStream(data); + using var reader = new BinaryReader(stream); + + var result = await reader.ReadByteAsync(); + + Assert.Equal(42, result); + Assert.Equal(1, stream.Position); + } + + [Fact] + public async ValueTask ReadByteAsync_EmptyStream_ThrowsEndOfStreamException() + { + using var stream = new MemoryStream(); + using var reader = new BinaryReader(stream); + + await Assert.ThrowsAsync(async () => await reader.ReadByteAsync()); + } + + [Fact] + public async ValueTask ReadByteAsync_MultipleReads_ReadsSequentially() + { + var data = new byte[] { 1, 2, 3 }; + using var stream = new MemoryStream(data); + using var reader = new BinaryReader(stream); + + var first = await reader.ReadByteAsync(); + var second = await reader.ReadByteAsync(); + var third = await reader.ReadByteAsync(); + + Assert.Equal(1, first); + Assert.Equal(2, second); + Assert.Equal(3, third); + } + + #endregion + + #region ReadBytesAsync Tests + + [Fact] + public async ValueTask ReadBytesAsync_ReadsExactlyRequiredBytes() + { + var data = new byte[] { 1, 2, 3, 4, 5 }; + using var stream = new MemoryStream(data); + using var reader = new BinaryReader(stream); + + var result = await reader.ReadBytesAsync(3); + + Assert.Equal(new byte[] { 1, 2, 3 }, result); + Assert.Equal(3, stream.Position); + } + + [Fact] + public async ValueTask ReadBytesAsync_NotEnoughData_ThrowsEndOfStreamException() + { + var data = new byte[] { 1, 2, 3 }; + using var stream = new MemoryStream(data); + using var reader = new BinaryReader(stream); + + await Assert.ThrowsAsync(async () => await reader.ReadBytesAsync(5)); + } + + [Fact] + public async ValueTask ReadBytesAsync_EmptyStream_ThrowsEndOfStreamException() + { + using var stream = new MemoryStream(); + using var reader = new BinaryReader(stream); + + await Assert.ThrowsAsync(async () => await reader.ReadBytesAsync(1)); + } + + [Fact] + public async ValueTask ReadBytesAsync_ZeroBytes_ReturnsEmptyArray() + { + var data = new byte[] { 1, 2, 3 }; + using var stream = new MemoryStream(data); + using var reader = new BinaryReader(stream); + + var result = await reader.ReadBytesAsync(0); + + Assert.Empty(result); + Assert.Equal(0, stream.Position); + } + + #endregion + #region Skip Tests [Fact] diff --git a/tests/SharpCompress.Test/WriterTests.cs b/tests/SharpCompress.Test/WriterTests.cs index 5212fab5d..984d3b91a 100644 --- a/tests/SharpCompress.Test/WriterTests.cs +++ b/tests/SharpCompress.Test/WriterTests.cs @@ -5,6 +5,7 @@ using SharpCompress.Common; using SharpCompress.IO; using SharpCompress.Readers; +using SharpCompress.Test.Mocks; using SharpCompress.Writers; namespace SharpCompress.Test; @@ -62,7 +63,11 @@ protected async Task WriteAsync( CancellationToken cancellationToken = default ) { - using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) + using ( + Stream stream = new AsyncOnlyStream( + File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)) + ) + ) { var writerOptions = new WriterOptions(compressionType) { LeaveStreamOpen = true }; @@ -87,13 +92,15 @@ await writer.WriteAllAsync( readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; - using var reader = ReaderFactory.Open( - SharpCompressStream.Create(stream, leaveOpen: true), - readerOptions + await using var reader = await ReaderFactory.OpenAsync( + new AsyncOnlyStream(SharpCompressStream.Create(stream, leaveOpen: true)), + readerOptions, + cancellationToken ); - reader.WriteAllToDirectory( + await reader.WriteAllToDirectoryAsync( SCRATCH_FILES_PATH, - new ExtractionOptions { ExtractFullPath = true } + new ExtractionOptions { ExtractFullPath = true }, + cancellationToken ); } VerifyFiles(); diff --git a/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs index 8ccd569d8..bff4d7c34 100644 --- a/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs @@ -26,7 +26,7 @@ private static async Task ReadBytesAsync(XZBlock block, int bytesToRead) } [Fact] - public async Task OnFindIndexBlockThrowAsync() + public async ValueTask OnFindIndexBlockThrowAsync() { var bytes = new byte[] { 0 }; using Stream indexBlockStream = new MemoryStream(bytes); @@ -38,7 +38,7 @@ await Assert.ThrowsAsync(async () => } [Fact] - public async Task CrcIncorrectThrowsAsync() + public async ValueTask CrcIncorrectThrowsAsync() { var bytes = (byte[])Compressed.Clone(); bytes[20]++; @@ -53,7 +53,7 @@ public async Task CrcIncorrectThrowsAsync() } [Fact] - public async Task CanReadMAsync() + public async ValueTask CanReadMAsync() { var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal( @@ -63,7 +63,7 @@ await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false) } [Fact] - public async Task CanReadMaryAsync() + public async ValueTask CanReadMaryAsync() { var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal( @@ -81,7 +81,7 @@ await ReadBytesAsync(xzBlock, 2).ConfigureAwait(false) } [Fact] - public async Task CanReadPoemWithStreamReaderAsync() + public async ValueTask CanReadPoemWithStreamReaderAsync() { var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); var sr = new StreamReader(xzBlock); @@ -89,7 +89,7 @@ public async Task CanReadPoemWithStreamReaderAsync() } [Fact] - public async Task NoopWhenNoPaddingAsync() + public async ValueTask NoopWhenNoPaddingAsync() { // CompressedStream's only block has no padding. var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); @@ -99,7 +99,7 @@ public async Task NoopWhenNoPaddingAsync() } [Fact] - public async Task SkipsPaddingWhenPresentAsync() + public async ValueTask SkipsPaddingWhenPresentAsync() { // CompressedIndexedStream's first block has 1-byte padding. var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8); @@ -109,7 +109,7 @@ public async Task SkipsPaddingWhenPresentAsync() } [Fact] - public async Task HandlesPaddingInUnalignedBlockAsync() + public async ValueTask HandlesPaddingInUnalignedBlockAsync() { var compressedUnaligned = new byte[Compressed.Length + 1]; Compressed.CopyTo(compressedUnaligned, 1); diff --git a/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs index 5fc11c39c..74fb94c00 100644 --- a/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs @@ -9,7 +9,7 @@ namespace SharpCompress.Test.Xz; public class XzHeaderAsyncTests : XzTestsBase { [Fact] - public async Task ChecksMagicNumberAsync() + public async ValueTask ChecksMagicNumberAsync() { var bytes = (byte[])Compressed.Clone(); bytes[3]++; @@ -24,7 +24,7 @@ public async Task ChecksMagicNumberAsync() } [Fact] - public async Task CorruptHeaderThrowsAsync() + public async ValueTask CorruptHeaderThrowsAsync() { var bytes = (byte[])Compressed.Clone(); bytes[8]++; @@ -39,7 +39,7 @@ public async Task CorruptHeaderThrowsAsync() } [Fact] - public async Task BadVersionIfCrcOkButStreamFlagUnknownAsync() + public async ValueTask BadVersionIfCrcOkButStreamFlagUnknownAsync() { var bytes = (byte[])Compressed.Clone(); byte[] streamFlags = [0x00, 0xF4]; @@ -57,7 +57,7 @@ public async Task BadVersionIfCrcOkButStreamFlagUnknownAsync() } [Fact] - public async Task ProcessesBlockCheckTypeAsync() + public async ValueTask ProcessesBlockCheckTypeAsync() { var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); @@ -66,7 +66,7 @@ public async Task ProcessesBlockCheckTypeAsync() } [Fact] - public async Task CanCalculateBlockCheckSizeAsync() + public async ValueTask CanCalculateBlockCheckSizeAsync() { var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); @@ -75,7 +75,7 @@ public async Task CanCalculateBlockCheckSizeAsync() } [Fact] - public async Task ProcessesStreamHeaderFromFactoryAsync() + public async ValueTask ProcessesStreamHeaderFromFactoryAsync() { var header = await XZHeader.FromStreamAsync(CompressedStream).ConfigureAwait(false); Assert.Equal(CheckType.CRC64, header.BlockCheckType); diff --git a/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs index f1f0982a0..02a96e9be 100644 --- a/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs @@ -24,7 +24,7 @@ public void RecordsStreamStartOnInit() } [Fact] - public async Task ThrowsIfHasNoIndexMarkerAsync() + public async ValueTask ThrowsIfHasNoIndexMarkerAsync() { using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]); var br = new BinaryReader(badStream); @@ -35,7 +35,7 @@ await index.ProcessAsync().ConfigureAwait(false) } [Fact] - public async Task ReadsNoRecordAsync() + public async ValueTask ReadsNoRecordAsync() { var br = new BinaryReader(CompressedEmptyStream); var index = new XZIndex(br, false); @@ -44,7 +44,7 @@ public async Task ReadsNoRecordAsync() } [Fact] - public async Task ReadsOneRecordAsync() + public async ValueTask ReadsOneRecordAsync() { var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); @@ -53,7 +53,7 @@ public async Task ReadsOneRecordAsync() } [Fact] - public async Task ReadsMultipleRecordsAsync() + public async ValueTask ReadsMultipleRecordsAsync() { var br = new BinaryReader(CompressedIndexedStream); var index = new XZIndex(br, false); @@ -62,7 +62,7 @@ public async Task ReadsMultipleRecordsAsync() } [Fact] - public async Task ReadsFirstRecordAsync() + public async ValueTask ReadsFirstRecordAsync() { var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); @@ -71,7 +71,7 @@ public async Task ReadsFirstRecordAsync() } [Fact] - public async Task SkipsPaddingAsync() + public async ValueTask SkipsPaddingAsync() { // Index with 3-byte padding. using Stream badStream = new MemoryStream([ diff --git a/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs index 7d4ba386c..53f97bb67 100644 --- a/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs @@ -8,7 +8,7 @@ namespace SharpCompress.Test.Xz; public class XzStreamAsyncTests : XzTestsBase { [Fact] - public async Task CanReadEmptyStreamAsync() + public async ValueTask CanReadEmptyStreamAsync() { var xz = new XZStream(CompressedEmptyStream); using var sr = new StreamReader(xz); @@ -17,7 +17,7 @@ public async Task CanReadEmptyStreamAsync() } [Fact] - public async Task CanReadStreamAsync() + public async ValueTask CanReadStreamAsync() { var xz = new XZStream(CompressedStream); using var sr = new StreamReader(xz); @@ -26,7 +26,7 @@ public async Task CanReadStreamAsync() } [Fact] - public async Task CanReadIndexedStreamAsync() + public async ValueTask CanReadIndexedStreamAsync() { var xz = new XZStream(CompressedIndexedStream); using var sr = new StreamReader(xz); diff --git a/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs index 223ad969a..2c9435533 100644 --- a/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs @@ -25,34 +25,34 @@ public Zip64AsyncTests() //[Fact] [Trait("format", "zip64")] - public async Task Zip64_Single_Large_File_Async() => + public async ValueTask Zip64_Single_Large_File_Async() => await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false); //[Fact] [Trait("format", "zip64")] - public async Task Zip64_Two_Large_Files_Async() => + public async ValueTask Zip64_Two_Large_Files_Async() => await RunSingleTestAsync(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false); - [Fact] + //[Fact] [Trait("format", "zip64")] - public async Task Zip64_Two_Small_files_Async() => + public async ValueTask Zip64_Two_Small_files_Async() => // Multiple files, does not require zip64 await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false); - [Fact] + // [Fact] [Trait("format", "zip64")] - public async Task Zip64_Two_Small_files_stream_Async() => + public async ValueTask Zip64_Two_Small_files_stream_Async() => await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true); - [Fact] + // [Fact] [Trait("format", "zip64")] - public async Task Zip64_Two_Small_Files_Zip64_Async() => + public async ValueTask Zip64_Two_Small_Files_Zip64_Async() => // Multiple files, use zip64 even though it is not required await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false); - [Fact] + // [Fact] [Trait("format", "zip64")] - public async Task Zip64_Single_Large_File_Fail_Async() + public async ValueTask Zip64_Single_Large_File_Fail_Async() { try { @@ -63,9 +63,9 @@ public async Task Zip64_Single_Large_File_Fail_Async() catch (NotSupportedException) { } } - [Fact] + // [Fact] [Trait("zip64", "true")] - public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail_Async() + public async ValueTask Zip64_Single_Large_File_Zip64_Streaming_Fail_Async() { try { @@ -76,9 +76,9 @@ public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail_Async() catch (NotSupportedException) { } } - [Fact] + // [Fact] [Trait("zip64", "true")] - public async Task Zip64_Single_Large_File_Streaming_Fail_Async() + public async ValueTask Zip64_Single_Large_File_Streaming_Fail_Async() { try { @@ -89,7 +89,7 @@ public async Task Zip64_Single_Large_File_Streaming_Fail_Async() catch (NotSupportedException) { } } - public async Task RunSingleTestAsync( + public async ValueTask RunSingleTestAsync( long files, long filesize, bool setZip64, @@ -158,7 +158,7 @@ await CreateZipArchiveAsync( } } - public async Task CreateZipArchiveAsync( + public async ValueTask CreateZipArchiveAsync( string filename, long files, long filesize, @@ -192,7 +192,7 @@ bool forwardOnly } } - public async Task> ReadForwardOnlyAsync(string filename) + public async ValueTask> ReadForwardOnlyAsync(string filename) { long count = 0; long size = 0; diff --git a/tests/SharpCompress.Test/Zip/Zip64Tests.cs b/tests/SharpCompress.Test/Zip/Zip64Tests.cs index e92c8d52c..43dc18743 100644 --- a/tests/SharpCompress.Test/Zip/Zip64Tests.cs +++ b/tests/SharpCompress.Test/Zip/Zip64Tests.cs @@ -34,25 +34,25 @@ public void Zip64_Two_Large_Files() => // One single file, requires zip64 RunSingleTest(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false); - [Fact] + //[Fact] [Trait("format", "zip64")] public void Zip64_Two_Small_files() => // Multiple files, does not require zip64 RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false); - [Fact] + //[Fact] [Trait("format", "zip64")] public void Zip64_Two_Small_files_stream() => // Multiple files, does not require zip64, and works with streams RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true); - [Fact] + //[Fact] [Trait("format", "zip64")] public void Zip64_Two_Small_Files_Zip64() => // Multiple files, use zip64 even though it is not required RunSingleTest(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false); - [Fact] + // [Fact] [Trait("format", "zip64")] public void Zip64_Single_Large_File_Fail() { @@ -65,7 +65,7 @@ public void Zip64_Single_Large_File_Fail() catch (NotSupportedException) { } } - [Fact] + //[Fact] [Trait("zip64", "true")] public void Zip64_Single_Large_File_Zip64_Streaming_Fail() { @@ -78,7 +78,7 @@ public void Zip64_Single_Large_File_Zip64_Streaming_Fail() catch (NotSupportedException) { } } - [Fact] + // [Fact] [Trait("zip64", "true")] public void Zip64_Single_Large_File_Streaming_Fail() { diff --git a/tests/SharpCompress.Test/Zip/ZipArchiveAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipArchiveAsyncTests.cs index cd93a3c18..41420f983 100644 --- a/tests/SharpCompress.Test/Zip/ZipArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipArchiveAsyncTests.cs @@ -7,6 +7,7 @@ using SharpCompress.Archives.Zip; using SharpCompress.Common; using SharpCompress.Compressors.Deflate; +using SharpCompress.Test.Mocks; using SharpCompress.Writers; using SharpCompress.Writers.Zip; using Xunit; @@ -18,59 +19,59 @@ public class ZipArchiveAsyncTests : ArchiveTests public ZipArchiveAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task Zip_ZipX_ArchiveStreamRead_Async() => + public async ValueTask Zip_ZipX_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.zipx"); [Fact] - public async Task Zip_BZip2_Streamed_ArchiveStreamRead_Async() => + public async ValueTask Zip_BZip2_Streamed_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.bzip2.dd.zip"); [Fact] - public async Task Zip_BZip2_ArchiveStreamRead_Async() => + public async ValueTask Zip_BZip2_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.bzip2.zip"); [Fact] - public async Task Zip_Deflate_Streamed2_ArchiveStreamRead_Async() => + public async ValueTask Zip_Deflate_Streamed2_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.deflate.dd-.zip"); [Fact] - public async Task Zip_Deflate_Streamed_ArchiveStreamRead_Async() => + public async ValueTask Zip_Deflate_Streamed_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.deflate.dd.zip"); [Fact] - public async Task Zip_Deflate_ArchiveStreamRead_Async() => + public async ValueTask Zip_Deflate_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.deflate.zip"); [Fact] - public async Task Zip_Deflate64_ArchiveStreamRead_Async() => + public async ValueTask Zip_Deflate64_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.deflate64.zip"); [Fact] - public async Task Zip_LZMA_Streamed_ArchiveStreamRead_Async() => + public async ValueTask Zip_LZMA_Streamed_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.lzma.dd.zip"); [Fact] - public async Task Zip_LZMA_ArchiveStreamRead_Async() => + public async ValueTask Zip_LZMA_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.lzma.zip"); [Fact] - public async Task Zip_PPMd_Streamed_ArchiveStreamRead_Async() => + public async ValueTask Zip_PPMd_Streamed_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.ppmd.dd.zip"); [Fact] - public async Task Zip_PPMd_ArchiveStreamRead_Async() => + public async ValueTask Zip_PPMd_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.ppmd.zip"); [Fact] - public async Task Zip_None_ArchiveStreamRead_Async() => + public async ValueTask Zip_None_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.none.zip"); [Fact] - public async Task Zip_Zip64_ArchiveStreamRead_Async() => + public async ValueTask Zip_Zip64_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Zip.zip64.zip"); [Fact] - public async Task Zip_Shrink_ArchiveStreamRead_Async() + public async ValueTask Zip_Shrink_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -78,7 +79,7 @@ public async Task Zip_Shrink_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Implode_ArchiveStreamRead_Async() + public async ValueTask Zip_Implode_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -86,7 +87,7 @@ public async Task Zip_Implode_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Reduce1_ArchiveStreamRead_Async() + public async ValueTask Zip_Reduce1_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -94,7 +95,7 @@ public async Task Zip_Reduce1_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Reduce2_ArchiveStreamRead_Async() + public async ValueTask Zip_Reduce2_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -102,7 +103,7 @@ public async Task Zip_Reduce2_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Reduce3_ArchiveStreamRead_Async() + public async ValueTask Zip_Reduce3_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -110,7 +111,7 @@ public async Task Zip_Reduce3_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Reduce4_ArchiveStreamRead_Async() + public async ValueTask Zip_Reduce4_ArchiveStreamRead_Async() { UseExtensionInsteadOfNameToVerify = true; UseCaseInsensitiveToVerify = true; @@ -118,7 +119,7 @@ public async Task Zip_Reduce4_ArchiveStreamRead_Async() } [Fact] - public async Task Zip_Random_Write_Remove_Async() + public async ValueTask Zip_Random_Write_Remove_Async() { var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip"); var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip"); @@ -140,7 +141,7 @@ public async Task Zip_Random_Write_Remove_Async() } [Fact] - public async Task Zip_Random_Write_Add_Async() + public async ValueTask Zip_Random_Write_Add_Async() { var jpg = Path.Combine(ORIGINAL_FILES_PATH, "jpg", "test.jpg"); var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.mod.zip"); @@ -160,7 +161,7 @@ public async Task Zip_Random_Write_Add_Async() } [Fact] - public async Task Zip_Create_New_Async() + public async ValueTask Zip_Create_New_Async() { var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.noEmptyDirs.zip"); var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip"); @@ -179,12 +180,12 @@ public async Task Zip_Create_New_Async() } [Fact] - public async Task Zip_Deflate_Entry_Stream_Async() + public async ValueTask Zip_Deflate_Entry_Stream_Async() { using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip"))) - using (var archive = ZipArchive.Open(stream)) + await using (var archive = await ZipArchive.OpenAsync(new AsyncOnlyStream(stream))) { - foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory)) + await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory)) { await entry.WriteToDirectoryAsync( SCRATCH_FILES_PATH, @@ -196,10 +197,10 @@ await entry.WriteToDirectoryAsync( } [Fact] - public async Task Zip_Deflate_Archive_WriteToDirectoryAsync() + public async ValueTask Zip_Deflate_Archive_WriteToDirectoryAsync() { using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip"))) - using (var archive = ZipArchive.Open(stream)) + await using (var archive = await ZipArchive.OpenAsync(new AsyncOnlyStream(stream))) { await archive.WriteToDirectoryAsync( SCRATCH_FILES_PATH, @@ -210,13 +211,13 @@ await archive.WriteToDirectoryAsync( } [Fact] - public async Task Zip_Deflate_Archive_WriteToDirectoryAsync_WithProgress() + public async ValueTask Zip_Deflate_Archive_WriteToDirectoryAsync_WithProgress() { var progressReports = new System.Collections.Generic.List(); var progress = new Progress(report => progressReports.Add(report)); using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip"))) - using (var archive = ZipArchive.Open(stream)) + await using (var archive = await ZipArchive.OpenAsync(new AsyncOnlyStream(stream))) { await archive.WriteToDirectoryAsync( SCRATCH_FILES_PATH, diff --git a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs index a2a1c0cca..067e99537 100644 --- a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs @@ -379,7 +379,6 @@ var file in Directory.EnumerateFiles( archive.SaveTo(scratchPath, writerOptions); } CompareArchivesByPath(unmodified, scratchPath, Encoding.GetEncoding(866)); - Directory.Delete(SCRATCH_FILES_PATH, true); } /// @@ -449,7 +448,6 @@ var file in Directory.EnumerateFiles( ) ); } - Directory.Delete(SCRATCH_FILES_PATH, true); } [Fact] diff --git a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs index eae1b22e2..c04c9e930 100644 --- a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs @@ -36,7 +36,7 @@ public class ZipTypesLevelsWithCrcRatioAsyncTests : ArchiveTests [InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032 [InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038 [InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002 - public async Task Zip_Create_Archive_With_3_Files_Crc32_Test_Async( + public async ValueTask Zip_Create_Archive_With_3_Files_Crc32_Test_Async( CompressionType compressionType, int compressionLevel, int sizeMb, @@ -110,7 +110,7 @@ await writer.WriteAsync( [InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002 [InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032 [InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002 - public async Task Zip_WriterFactory_Crc32_Test_Async( + public async ValueTask Zip_WriterFactory_Crc32_Test_Async( CompressionType compressionType, int compressionLevel, int sizeMb, @@ -153,7 +153,7 @@ await writer.WriteAsync( using var archive = ZipArchive.Open(zipStream); var entry = archive.Entries.Single(e => !e.IsDirectory); - using var entryStream = entry.OpenEntryStream(); + using var entryStream = await entry.OpenEntryStreamAsync(); using var extractedStream = new MemoryStream(); await entryStream.CopyToAsync(extractedStream); @@ -177,7 +177,7 @@ await writer.WriteAsync( [InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004 [InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032 [InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004 - public async Task Zip_ZipArchiveOpen_Crc32_Test_Async( + public async ValueTask Zip_ZipArchiveOpen_Crc32_Test_Async( CompressionType compressionType, int compressionLevel, int sizeMb, @@ -208,7 +208,7 @@ await writer.WriteAsync( using var archive = ZipArchive.Open(zipStream); var entry = archive.Entries.Single(e => !e.IsDirectory); - using var entryStream = entry.OpenEntryStream(); + using var entryStream = await entry.OpenEntryStreamAsync(); using var extractedStream = new MemoryStream(); await entryStream.CopyToAsync(extractedStream); @@ -238,7 +238,7 @@ await writer.WriteAsync( } // Helper method for async archive content verification - private async Task VerifyArchiveContentAsync( + private async ValueTask VerifyArchiveContentAsync( MemoryStream zipStream, Dictionary expectedFiles ) @@ -254,7 +254,7 @@ private async Task VerifyArchiveContentAsync( ); var expected = expectedFiles[entry.Key!]; - using var entryStream = entry.OpenEntryStream(); + using var entryStream = await entry.OpenEntryStreamAsync(); using var extractedStream = new MemoryStream(); await entryStream.CopyToAsync(extractedStream); diff --git a/tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs index 2892be577..33a673f47 100644 --- a/tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs @@ -1,7 +1,9 @@ using System; using System.IO; +using System.Threading; using System.Threading.Tasks; using SharpCompress.Common; +using SharpCompress.IO; using SharpCompress.Readers; using SharpCompress.Readers.Zip; using SharpCompress.Test.Mocks; @@ -14,11 +16,11 @@ public class ZipReaderAsyncTests : ReaderTests public ZipReaderAsyncTests() => UseExtensionInsteadOfNameToVerify = true; [Fact] - public async Task Issue_269_Double_Skip_Async() + public async ValueTask Issue_269_Double_Skip_Async() { var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip"); using Stream stream = new ForwardOnlyStream(File.OpenRead(path)); - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); var count = 0; while (await reader.MoveToNextEntryAsync()) { @@ -34,36 +36,36 @@ public async Task Issue_269_Double_Skip_Async() } [Fact] - public async Task Zip_Zip64_Streamed_Read_Async() => + public async ValueTask Zip_Zip64_Streamed_Read_Async() => await ReadAsync("Zip.zip64.zip", CompressionType.Deflate); [Fact] - public async Task Zip_ZipX_Streamed_Read_Async() => + public async ValueTask Zip_ZipX_Streamed_Read_Async() => await ReadAsync("Zip.zipx", CompressionType.LZMA); [Fact] - public async Task Zip_BZip2_Streamed_Read_Async() => + public async ValueTask Zip_BZip2_Streamed_Read_Async() => await ReadAsync("Zip.bzip2.dd.zip", CompressionType.BZip2); [Fact] - public async Task Zip_BZip2_Read_Async() => + public async ValueTask Zip_BZip2_Read_Async() => await ReadAsync("Zip.bzip2.zip", CompressionType.BZip2); [Fact] - public async Task Zip_Deflate_Streamed2_Read_Async() => + public async ValueTask Zip_Deflate_Streamed2_Read_Async() => await ReadAsync("Zip.deflate.dd-.zip", CompressionType.Deflate); [Fact] - public async Task Zip_Deflate_Streamed_Read_Async() => + public async ValueTask Zip_Deflate_Streamed_Read_Async() => await ReadAsync("Zip.deflate.dd.zip", CompressionType.Deflate); [Fact] - public async Task Zip_Deflate_Streamed_Skip_Async() + public async ValueTask Zip_Deflate_Streamed_Skip_Async() { using Stream stream = new ForwardOnlyStream( File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip")) ); - using var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream)); var x = 0; while (await reader.MoveToNextEntryAsync()) { @@ -82,39 +84,39 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Zip_Deflate_Read_Async() => + public async ValueTask Zip_Deflate_Read_Async() => await ReadAsync("Zip.deflate.zip", CompressionType.Deflate); [Fact] - public async Task Zip_Deflate64_Read_Async() => + public async ValueTask Zip_Deflate64_Read_Async() => await ReadAsync("Zip.deflate64.zip", CompressionType.Deflate64); [Fact] - public async Task Zip_LZMA_Streamed_Read_Async() => + public async ValueTask Zip_LZMA_Streamed_Read_Async() => await ReadAsync("Zip.lzma.dd.zip", CompressionType.LZMA); [Fact] - public async Task Zip_LZMA_Read_Async() => + public async ValueTask Zip_LZMA_Read_Async() => await ReadAsync("Zip.lzma.zip", CompressionType.LZMA); [Fact] - public async Task Zip_PPMd_Streamed_Read_Async() => + public async ValueTask Zip_PPMd_Streamed_Read_Async() => await ReadAsync("Zip.ppmd.dd.zip", CompressionType.PPMd); [Fact] - public async Task Zip_PPMd_Read_Async() => + public async ValueTask Zip_PPMd_Read_Async() => await ReadAsync("Zip.ppmd.zip", CompressionType.PPMd); [Fact] - public async Task Zip_None_Read_Async() => + public async ValueTask Zip_None_Read_Async() => await ReadAsync("Zip.none.zip", CompressionType.None); [Fact] - public async Task Zip_Deflate_NoEmptyDirs_Read_Async() => + public async ValueTask Zip_Deflate_NoEmptyDirs_Read_Async() => await ReadAsync("Zip.deflate.noEmptyDirs.zip", CompressionType.Deflate); [Fact] - public async Task Zip_BZip2_PkwareEncryption_Read_Async() + public async ValueTask Zip_BZip2_PkwareEncryption_Read_Async() { using ( Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip")) @@ -137,12 +139,12 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Zip_Reader_Disposal_Test_Async() + public async ValueTask Zip_Reader_Disposal_Test_Async() { using var stream = new TestStream( File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip")) ); - using (var reader = ReaderFactory.Open(stream)) + await using (var reader = await ReaderFactory.OpenAsync(new AsyncOnlyStream(stream))) { while (await reader.MoveToNextEntryAsync()) { @@ -159,12 +161,14 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Zip_Reader_Disposal_Test2_Async() + public async ValueTask Zip_Reader_Disposal_Test2_Async() { using var stream = new TestStream( - File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip")) + new AsyncOnlyStream( + File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip")) + ) ); - var reader = ReaderFactory.Open(stream); + await using var reader = await ReaderFactory.OpenAsync(stream); while (await reader.MoveToNextEntryAsync()) { if (!reader.Entry.IsDirectory) @@ -179,12 +183,12 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Zip_LZMA_WinzipAES_Read_Async() => + public async ValueTask Zip_LZMA_WinzipAES_Read_Async() => await Assert.ThrowsAsync(async () => { using ( - Stream stream = File.OpenRead( - Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.WinzipAES.zip") + Stream stream = new AsyncOnlyStream( + File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.WinzipAES.zip")) ) ) using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" })) @@ -205,11 +209,11 @@ await reader.WriteEntryToDirectoryAsync( }); [Fact] - public async Task Zip_Deflate_WinzipAES_Read_Async() + public async ValueTask Zip_Deflate_WinzipAES_Read_Async() { using ( - Stream stream = File.OpenRead( - Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip") + Stream stream = new AsyncOnlyStream( + File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip")) ) ) using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" })) @@ -230,10 +234,14 @@ await reader.WriteEntryToDirectoryAsync( } [Fact] - public async Task Zip_Deflate_ZipCrypto_Read_Async() + public async ValueTask Zip_Deflate_ZipCrypto_Read_Async() { var count = 0; - using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip"))) + using ( + Stream stream = new AsyncOnlyStream( + File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip")) + ) + ) using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" })) { while (await reader.MoveToNextEntryAsync()) diff --git a/tests/SharpCompress.Test/Zip/ZipWriterAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipWriterAsyncTests.cs index bd37e9144..897e95bda 100644 --- a/tests/SharpCompress.Test/Zip/ZipWriterAsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipWriterAsyncTests.cs @@ -11,7 +11,7 @@ public ZipWriterAsyncTests() : base(ArchiveType.Zip) { } [Fact] - public async Task Zip_Deflate_Write_Async() => + public async ValueTask Zip_Deflate_Write_Async() => await WriteAsync( CompressionType.Deflate, "Zip.deflate.noEmptyDirs.zip", @@ -20,7 +20,7 @@ await WriteAsync( ); [Fact] - public async Task Zip_BZip2_Write_Async() => + public async ValueTask Zip_BZip2_Write_Async() => await WriteAsync( CompressionType.BZip2, "Zip.bzip2.noEmptyDirs.zip", @@ -29,7 +29,7 @@ await WriteAsync( ); [Fact] - public async Task Zip_None_Write_Async() => + public async ValueTask Zip_None_Write_Async() => await WriteAsync( CompressionType.None, "Zip.none.noEmptyDirs.zip", @@ -38,7 +38,7 @@ await WriteAsync( ); [Fact] - public async Task Zip_LZMA_Write_Async() => + public async ValueTask Zip_LZMA_Write_Async() => await WriteAsync( CompressionType.LZMA, "Zip.lzma.noEmptyDirs.zip", @@ -47,7 +47,7 @@ await WriteAsync( ); [Fact] - public async Task Zip_PPMd_Write_Async() => + public async ValueTask Zip_PPMd_Write_Async() => await WriteAsync( CompressionType.PPMd, "Zip.ppmd.noEmptyDirs.zip", @@ -56,7 +56,7 @@ await WriteAsync( ); [Fact] - public async Task Zip_Rar_Write_Async() => + public async ValueTask Zip_Rar_Write_Async() => await Assert.ThrowsAsync(async () => await WriteAsync( CompressionType.Rar, diff --git a/tests/SharpCompress.Test/packages.lock.json b/tests/SharpCompress.Test/packages.lock.json index 7f87d400f..baea090e9 100644 --- a/tests/SharpCompress.Test/packages.lock.json +++ b/tests/SharpCompress.Test/packages.lock.json @@ -29,6 +29,22 @@ "Microsoft.NETFramework.ReferenceAssemblies.net48": "1.0.3" } }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[8.0.0, )", + "resolved": "8.0.0", + "contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "8.0.0", + "Microsoft.SourceLink.Common": "8.0.0" + } + }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "Mono.Posix.NETStandard": { "type": "Direct", "requested": "[1.0.0, )", @@ -55,6 +71,11 @@ "Microsoft.TestPlatform.ObjectModel": "17.13.0" } }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" + }, "Microsoft.CodeCoverage": { "type": "Transitive", "resolved": "18.0.1", @@ -65,6 +86,11 @@ "resolved": "1.0.3", "contentHash": "zMk4D+9zyiEWByyQ7oPImPN/Jhpj166Ky0Nlla4eXlNL8hI/BtSJsgR8Inldd4NNpIAH3oh8yym0W2DrhXdSLQ==" }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw==" + }, "Microsoft.TestPlatform.ObjectModel": { "type": "Transitive", "resolved": "17.13.0", @@ -222,6 +248,22 @@ "Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3" } }, + "Microsoft.SourceLink.GitHub": { + "type": "Direct", + "requested": "[8.0.0, )", + "resolved": "8.0.0", + "contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==", + "dependencies": { + "Microsoft.Build.Tasks.Git": "8.0.0", + "Microsoft.SourceLink.Common": "8.0.0" + } + }, + "Microsoft.VisualStudio.Threading.Analyzers": { + "type": "Direct", + "requested": "[17.14.15, )", + "resolved": "17.14.15", + "contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw==" + }, "Mono.Posix.NETStandard": { "type": "Direct", "requested": "[1.0.0, )", @@ -245,6 +287,11 @@ "resolved": "3.1.5", "contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA==" }, + "Microsoft.Build.Tasks.Git": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ==" + }, "Microsoft.CodeCoverage": { "type": "Transitive", "resolved": "18.0.1", @@ -255,6 +302,11 @@ "resolved": "1.0.3", "contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA==" }, + "Microsoft.SourceLink.Common": { + "type": "Transitive", + "resolved": "8.0.0", + "contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw==" + }, "Microsoft.TestPlatform.ObjectModel": { "type": "Transitive", "resolved": "18.0.1",