diff --git a/.editorconfig b/.editorconfig index ec46ee724..e94ae1dda 100644 --- a/.editorconfig +++ b/.editorconfig @@ -257,60 +257,73 @@ csharp_style_namespace_declarations = file_scoped ########################################## [*.{cs,csx,cake,vb,vbx}] -dotnet_diagnostic.CA1000.severity = suggestion -dotnet_diagnostic.CA1001.severity = error -dotnet_diagnostic.CA1018.severity = error +dotnet_diagnostic.CA1000.severity = error # do not declare static members on generic types +dotnet_diagnostic.CA1001.severity = error # disposable field owners should be disposable +dotnet_diagnostic.CA1018.severity = error # mark custom attributes with AttributeUsage dotnet_diagnostic.CA1036.severity = silent -dotnet_diagnostic.CA1051.severity = suggestion -dotnet_diagnostic.CA1068.severity = error -dotnet_diagnostic.CA1069.severity = error -dotnet_diagnostic.CA1304.severity = error -dotnet_diagnostic.CA1305.severity = suggestion -dotnet_diagnostic.CA1307.severity = suggestion -dotnet_diagnostic.CA1309.severity = suggestion -dotnet_diagnostic.CA1310.severity = error -dotnet_diagnostic.CA1507.severity = suggestion -dotnet_diagnostic.CA1513.severity = suggestion -dotnet_diagnostic.CA1707.severity = suggestion -dotnet_diagnostic.CA1708.severity = suggestion -dotnet_diagnostic.CA1711.severity = suggestion -dotnet_diagnostic.CA1716.severity = suggestion -dotnet_diagnostic.CA1720.severity = suggestion -dotnet_diagnostic.CA1725.severity = suggestion -dotnet_diagnostic.CA1805.severity = suggestion -dotnet_diagnostic.CA1816.severity = suggestion -dotnet_diagnostic.CA1822.severity = suggestion -dotnet_diagnostic.CA1825.severity = error +dotnet_diagnostic.CA1051.severity = suggestion # do not declare visible instance fields +dotnet_diagnostic.CA1068.severity = error # cancellation token parameters must come last +dotnet_diagnostic.CA1069.severity = error # enums should not have duplicate values +dotnet_diagnostic.CA1304.severity = error # specify CultureInfo for culture-sensitive operations +dotnet_diagnostic.CA1305.severity = error # specify IFormatProvider +dotnet_diagnostic.CA1307.severity = error # specify StringComparison for clarity +dotnet_diagnostic.CA1309.severity = error # use ordinal StringComparison +dotnet_diagnostic.CA1310.severity = error # specify StringComparison for correctness +dotnet_diagnostic.CA1507.severity = error # use nameof in place of string literals +dotnet_diagnostic.CA1513.severity = suggestion # use ObjectDisposedException throw helper +dotnet_diagnostic.CA1707.severity = suggestion # identifiers should not contain underscores +dotnet_diagnostic.CA1708.severity = suggestion # identifiers should differ by more than case +dotnet_diagnostic.CA1711.severity = suggestion # identifiers should not have incorrect suffixes +dotnet_diagnostic.CA1716.severity = suggestion # identifiers should not match language keywords +dotnet_diagnostic.CA1720.severity = suggestion # identifiers should not contain type names +dotnet_diagnostic.CA1725.severity = error # parameter names should match base declaration +dotnet_diagnostic.CA1805.severity = suggestion # avoid unnecessary default value initialization +dotnet_diagnostic.CA1816.severity = suggestion # call GC.SuppressFinalize correctly +dotnet_diagnostic.CA1822.severity = suggestion # mark members static when possible +dotnet_diagnostic.CA1825.severity = error # avoid zero-length array allocations dotnet_diagnostic.CA1826.severity = silent -dotnet_diagnostic.CA1827.severity = error -dotnet_diagnostic.CA1829.severity = suggestion -dotnet_diagnostic.CA1834.severity = error -dotnet_diagnostic.CA1845.severity = suggestion -dotnet_diagnostic.CA1848.severity = suggestion -dotnet_diagnostic.CA1852.severity = suggestion +dotnet_diagnostic.CA1827.severity = error # use Any() instead of Count()/LongCount() checks +dotnet_diagnostic.CA1829.severity = error # use Length or Count property instead of LINQ Count() +dotnet_diagnostic.CA1834.severity = error # prefer StringBuilder.Append(char) for single chars +dotnet_diagnostic.CA1845.severity = error # use span-based string.Concat overloads +dotnet_diagnostic.CA1848.severity = error # use LoggerMessage for high-performance logging +dotnet_diagnostic.CA1852.severity = suggestion # seal types that are not intended for inheritance dotnet_diagnostic.CA1860.severity = silent -dotnet_diagnostic.CA2016.severity = suggestion -dotnet_diagnostic.CA2201.severity = error -dotnet_diagnostic.CA2206.severity = error -dotnet_diagnostic.CA2208.severity = error -dotnet_diagnostic.CA2211.severity = error -dotnet_diagnostic.CA2249.severity = error -dotnet_diagnostic.CA2251.severity = error +dotnet_diagnostic.CA2016.severity = error # forward CancellationToken to invoked methods +dotnet_diagnostic.CA2201.severity = error # do not throw reserved or overly general exceptions +dotnet_diagnostic.CA2206.severity = error # enforce CA2206 usage guidance +dotnet_diagnostic.CA2208.severity = error # instantiate ArgumentException types correctly +dotnet_diagnostic.CA2211.severity = error # non-constant fields should not be visible +dotnet_diagnostic.CA2249.severity = error # prefer string.Contains over string.IndexOf checks +dotnet_diagnostic.CA2251.severity = error # use string.Equals over string.Compare equality checks dotnet_diagnostic.CA2252.severity = none -dotnet_diagnostic.CA2254.severity = suggestion - -dotnet_diagnostic.CS0169.severity = error -dotnet_diagnostic.CS0219.severity = error -dotnet_diagnostic.CS0649.severity = suggestion -dotnet_diagnostic.CS1998.severity = error -dotnet_diagnostic.CS8602.severity = error -dotnet_diagnostic.CS8604.severity = error -dotnet_diagnostic.CS8618.severity = error -dotnet_diagnostic.CS0618.severity = suggestion -dotnet_diagnostic.CS4014.severity = error -dotnet_diagnostic.CS8600.severity = error -dotnet_diagnostic.CS8603.severity = error -dotnet_diagnostic.CS8625.severity = error +dotnet_diagnostic.CA2254.severity = error # logging message templates should be static expressions + +; High volume analyzers requiring extensive refactoring - set to suggestion temporarily +dotnet_diagnostic.CA1835.severity = suggestion # prefer Memory-based async overloads +dotnet_diagnostic.CA1510.severity = error # use ArgumentNullException.ThrowIfNull +dotnet_diagnostic.CA1512.severity = error # use ArgumentOutOfRangeException throw helpers +dotnet_diagnostic.CA1844.severity = suggestion # provide memory-based async stream overrides +dotnet_diagnostic.CA1825.severity = error # avoid zero-length array allocations +dotnet_diagnostic.CA1712.severity = suggestion # do not prefix enum values with type name +dotnet_diagnostic.CA2022.severity = suggestion # avoid inexact reads with Stream.Read +dotnet_diagnostic.CA1850.severity = error # prefer static HashData over ComputeHash +dotnet_diagnostic.CA2263.severity = error # prefer generic overload when type is known +dotnet_diagnostic.CA2012.severity = error # use ValueTasks correctly +dotnet_diagnostic.CA1001.severity = error # disposable field owners should be disposable + +dotnet_diagnostic.CS0169.severity = error # field is never used +dotnet_diagnostic.CS0219.severity = error # variable assigned but never used +dotnet_diagnostic.CS0649.severity = error # field is never assigned and remains default +dotnet_diagnostic.CS1998.severity = error # async method lacks await operators +dotnet_diagnostic.CS8602.severity = error # possible null reference dereference +dotnet_diagnostic.CS8604.severity = error # possible null reference argument +dotnet_diagnostic.CS8618.severity = error # non-nullable member is uninitialized +dotnet_diagnostic.CS0618.severity = error # obsolete member usage +dotnet_diagnostic.CS4014.severity = error # unawaited task call +dotnet_diagnostic.CS8600.severity = error # possible null to non-nullable conversion +dotnet_diagnostic.CS8603.severity = error # possible null reference return +dotnet_diagnostic.CS8625.severity = error # cannot assign null to non-nullable reference dotnet_diagnostic.BL0005.severity = suggestion @@ -318,9 +331,9 @@ dotnet_diagnostic.MVC1000.severity = suggestion dotnet_diagnostic.RZ10012.severity = error -dotnet_diagnostic.IDE0004.severity = error # redundant cast +dotnet_diagnostic.IDE0004.severity = suggestion # redundant cast dotnet_diagnostic.IDE0005.severity = suggestion -dotnet_diagnostic.IDE0007.severity = error # Use var +dotnet_diagnostic.IDE0007.severity = suggestion # Use var dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements dotnet_diagnostic.IDE0010.severity = silent # populate switch dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified @@ -334,7 +347,7 @@ dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property dotnet_diagnostic.IDE0033.severity = error # prefer tuple name dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type -dotnet_diagnostic.IDE0040.severity = error # modifiers required +dotnet_diagnostic.IDE0040.severity = suggestion # modifiers required dotnet_diagnostic.IDE0041.severity = error # simplify null dotnet_diagnostic.IDE0042.severity = error # deconstruct variable dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible @@ -348,6 +361,55 @@ dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters dotnet_diagnostic.IDE0061.severity = suggestion # local expression body dotnet_diagnostic.IDE0062.severity = suggestion # local to static dotnet_diagnostic.IDE0063.severity = error # simplify using + +[src/**/*.cs] +dotnet_diagnostic.VSTHRD002.severity = error # avoid sync waits on async operations +dotnet_diagnostic.VSTHRD100.severity = error # avoid async void methods +dotnet_diagnostic.VSTHRD101.severity = error # avoid unsupported async delegates +dotnet_diagnostic.VSTHRD102.severity = error # implement internal logic asynchronously +dotnet_diagnostic.VSTHRD103.severity = error # use async methods from async methods +dotnet_diagnostic.VSTHRD104.severity = error # offer async alternatives when possible +dotnet_diagnostic.VSTHRD107.severity = error # await task within using expression +dotnet_diagnostic.VSTHRD110.severity = error # observe result of async calls +dotnet_diagnostic.VSTHRD111.severity = error # use ConfigureAwait(bool) +dotnet_diagnostic.VSTHRD112.severity = error # implement System.IAsyncDisposable +dotnet_diagnostic.VSTHRD113.severity = error # check for System.IAsyncDisposable +dotnet_diagnostic.VSTHRD114.severity = error # avoid returning null from Task methods +dotnet_diagnostic.VSTHRD200.severity = suggestion # use Async suffix naming convention + +[build/**/*.cs] +dotnet_diagnostic.VSTHRD001.severity = none # avoid legacy thread switching methods (disabled for build scripts) +dotnet_diagnostic.VSTHRD002.severity = none # avoid sync waits on async operations (disabled for build scripts) +dotnet_diagnostic.VSTHRD003.severity = none # avoid awaiting foreign tasks (disabled for build scripts) +dotnet_diagnostic.VSTHRD004.severity = none # await SwitchToMainThreadAsync (disabled for build scripts) +dotnet_diagnostic.VSTHRD010.severity = none # invoke single-threaded types on main thread (disabled for build scripts) +dotnet_diagnostic.VSTHRD011.severity = none # use AsyncLazy (disabled for build scripts) +dotnet_diagnostic.VSTHRD012.severity = none # provide JoinableTaskFactory where allowed (disabled for build scripts) +dotnet_diagnostic.VSTHRD100.severity = none # avoid async void methods (disabled for build scripts) +dotnet_diagnostic.VSTHRD101.severity = none # avoid unsupported async delegates (disabled for build scripts) +dotnet_diagnostic.VSTHRD102.severity = none # implement internal logic asynchronously (disabled for build scripts) +dotnet_diagnostic.VSTHRD103.severity = none # use async methods from async methods (disabled for build scripts) +dotnet_diagnostic.VSTHRD104.severity = none # offer async alternatives when possible (disabled for build scripts) +dotnet_diagnostic.VSTHRD105.severity = none # avoid TaskScheduler.Current assumptions (disabled for build scripts) +dotnet_diagnostic.VSTHRD106.severity = none # use InvokeAsync for async events (disabled for build scripts) +dotnet_diagnostic.VSTHRD107.severity = none # await task within using expression (disabled for build scripts) +dotnet_diagnostic.VSTHRD108.severity = none # assert thread affinity unconditionally (disabled for build scripts) +dotnet_diagnostic.VSTHRD109.severity = none # switch instead of assert in async methods (disabled for build scripts) +dotnet_diagnostic.VSTHRD110.severity = none # observe result of async calls (disabled for build scripts) +dotnet_diagnostic.VSTHRD111.severity = none # use ConfigureAwait(bool) (disabled for build scripts) +dotnet_diagnostic.VSTHRD112.severity = none # implement System.IAsyncDisposable (disabled for build scripts) +dotnet_diagnostic.VSTHRD113.severity = none # check for System.IAsyncDisposable (disabled for build scripts) +dotnet_diagnostic.VSTHRD114.severity = none # avoid returning null from Task methods (disabled for build scripts) +dotnet_diagnostic.VSTHRD115.severity = none # avoid explicit null SynchronizationContext in JTC (disabled for build scripts) +dotnet_diagnostic.VSTHRD200.severity = none # use Async suffix naming convention (disabled for build scripts) + +[tests/**/*.cs] +dotnet_diagnostic.CA1861.severity = suggestion # avoid constant arrays as arguments +dotnet_diagnostic.CA1305.severity = suggestion # specify IFormatProvider +dotnet_diagnostic.CA1307.severity = suggestion # specify StringComparison for clarity +dotnet_diagnostic.IDE0042.severity = suggestion +dotnet_diagnostic.IDE0051.severity = suggestion +dotnet_diagnostic.IDE0063.severity = suggestion dotnet_diagnostic.IDE0066.severity = suggestion # switch expression dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching @@ -359,7 +421,7 @@ dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation -dotnet_diagnostic.IDE0290.severity = error # Primary Constructor +dotnet_diagnostic.IDE0290.severity = suggestion # Primary Constructor dotnet_diagnostic.IDE0300.severity = suggestion # Collection dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList @@ -367,8 +429,8 @@ dotnet_diagnostic.NX0001.severity = error dotnet_diagnostic.NX0002.severity = silent dotnet_diagnostic.NX0003.severity = silent -dotnet_diagnostic.VSTHRD110.severity = error -dotnet_diagnostic.VSTHRD107.severity = error +dotnet_diagnostic.VSTHRD110.severity = error # observe result of async calls +dotnet_diagnostic.VSTHRD107.severity = error # await task within using expression ########################################## # Styles diff --git a/Directory.Build.props b/Directory.Build.props index f03c2d6e9..277c525f0 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -8,8 +8,6 @@ true true true - False - False true true true diff --git a/build/Program.cs b/build/Program.cs index bfeb60b9f..1c36e0a93 100644 --- a/build/Program.cs +++ b/build/Program.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Globalization; using System.IO; using System.Linq; using System.Runtime.InteropServices; @@ -114,14 +115,19 @@ IEnumerable GetFiles(string d) { var (version, isPrerelease) = await GetVersion(); Console.WriteLine($"VERSION={version}"); - Console.WriteLine($"PRERELEASE={isPrerelease.ToString().ToLower()}"); + Console.WriteLine( + $"PRERELEASE={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}" + ); // Write to environment file for GitHub Actions var githubOutput = Environment.GetEnvironmentVariable("GITHUB_OUTPUT"); if (!string.IsNullOrEmpty(githubOutput)) { File.AppendAllText(githubOutput, $"version={version}\n"); - File.AppendAllText(githubOutput, $"prerelease={isPrerelease.ToString().ToLower()}\n"); + File.AppendAllText( + githubOutput, + $"prerelease={isPrerelease.ToString().ToLower(CultureInfo.InvariantCulture)}\n" + ); } } ); @@ -363,9 +369,13 @@ IEnumerable GetFiles(string d) : "⚪"; if (timeChange > 25 || memChange > 25) + { hasRegressions = true; + } if (timeChange < -25 || memChange < -25) + { hasImprovements = true; + } output.Add( $"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |" @@ -545,7 +555,10 @@ static async Task GetGitOutput(string command, string args) } catch (Exception ex) { - throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex); + throw new InvalidOperationException( + $"Git command failed: git {command} {args}\n{ex.Message}", + ex + ); } } @@ -575,12 +588,12 @@ static Dictionary ParseBenchmarkResults(string markdown var line = lines[i].Trim(); // Look for table rows with benchmark data - if (line.StartsWith("|") && line.Contains("'") && i > 0) + if (line.StartsWith('|') && line.Contains("'", StringComparison.Ordinal) && i > 0) { var parts = line.Split('|', StringSplitOptions.TrimEntries); if (parts.Length >= 5) { - var method = parts[1].Replace("'", "'"); + var method = parts[1].Replace("'", "'", StringComparison.Ordinal); var meanStr = parts[2]; // Find Allocated column - it's usually the last column or labeled "Allocated" @@ -588,10 +601,10 @@ static Dictionary ParseBenchmarkResults(string markdown for (int j = parts.Length - 2; j >= 2; j--) { if ( - parts[j].Contains("KB") - || parts[j].Contains("MB") - || parts[j].Contains("GB") - || parts[j].Contains("B") + parts[j].Contains("KB", StringComparison.Ordinal) + || parts[j].Contains("MB", StringComparison.Ordinal) + || parts[j].Contains("GB", StringComparison.Ordinal) + || parts[j].Contains('B', StringComparison.Ordinal) ) { memoryStr = parts[j]; @@ -624,17 +637,21 @@ static Dictionary ParseBenchmarkResults(string markdown static double ParseTimeValue(string timeStr) { if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA") + { return 0; + } // Remove thousands separators and parse - timeStr = timeStr.Replace(",", "").Trim(); + timeStr = timeStr.Replace(",", "", StringComparison.Ordinal).Trim(); var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)"); if (!match.Success) + { return 0; + } - var value = double.Parse(match.Groups[1].Value); - var unit = match.Groups[2].Value.ToLower(); + var value = double.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture); + var unit = match.Groups[2].Value.ToLower(CultureInfo.InvariantCulture); // Convert to microseconds for comparison return unit switch @@ -650,16 +667,20 @@ static double ParseTimeValue(string timeStr) static double ParseMemoryValue(string memStr) { if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA") + { return 0; + } - memStr = memStr.Replace(",", "").Trim(); + memStr = memStr.Replace(",", "", StringComparison.Ordinal).Trim(); var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)"); if (!match.Success) + { return 0; + } - var value = double.Parse(match.Groups[1].Value); - var unit = match.Groups[2].Value.ToUpper(); + var value = double.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture); + var unit = match.Groups[2].Value.ToUpper(CultureInfo.InvariantCulture); // Convert to KB for comparison return unit switch @@ -675,7 +696,9 @@ static double ParseMemoryValue(string memStr) static double CalculateChange(double baseline, double current) { if (baseline == 0) + { return 0; + } return ((current - baseline) / baseline) * 100; } diff --git a/src/SharpCompress/Archives/ArchiveVolumeFactory.cs b/src/SharpCompress/Archives/ArchiveVolumeFactory.cs index 26d7c6409..9bf86ce25 100644 --- a/src/SharpCompress/Archives/ArchiveVolumeFactory.cs +++ b/src/SharpCompress/Archives/ArchiveVolumeFactory.cs @@ -19,7 +19,9 @@ internal abstract class ArchiveVolumeFactory part1.DirectoryName!, String.Concat( m.Groups[1].Value, - (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0') + (index + 1) + .ToString(global::SharpCompress.Common.Constants.DefaultCultureInfo) + .PadLeft(m.Groups[2].Value.Length, '0') ) ) ); diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.Async.cs b/src/SharpCompress/Archives/GZip/GZipArchive.Async.cs index 67443ab92..49f03937d 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchive.Async.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchive.Async.cs @@ -40,10 +40,7 @@ protected override async ValueTask SaveToAsync( { throw new InvalidFormatException("Only one entry is allowed in a GZip Archive"); } - await using var writer = new GZipWriter( - stream, - options as GZipWriterOptions ?? new GZipWriterOptions(options) - ); + await using var writer = new GZipWriter(stream, options); await foreach ( var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false) ) diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.cs b/src/SharpCompress/Archives/GZip/GZipArchive.cs index 06000d0a2..e47bf7c6f 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchive.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchive.cs @@ -37,7 +37,7 @@ public void SaveTo(FileInfo fileInfo) } protected override GZipArchiveEntry CreateEntryInternal( - string filePath, + string key, Stream source, long size, DateTime? modified, @@ -48,13 +48,11 @@ bool closeStream { throw new InvalidFormatException("Only one entry is allowed in a GZip Archive"); } - return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream); + return new GZipWritableArchiveEntry(this, source, key, size, modified, closeStream); } - protected override GZipArchiveEntry CreateDirectoryEntry( - string directoryPath, - DateTime? modified - ) => throw new NotSupportedException("GZip archives do not support directory entries."); + protected override GZipArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) => + throw new NotSupportedException("GZip archives do not support directory entries."); protected override void SaveTo( Stream stream, @@ -67,10 +65,7 @@ IEnumerable newEntries { throw new InvalidFormatException("Only one entry is allowed in a GZip Archive"); } - using var writer = new GZipWriter( - stream, - options as GZipWriterOptions ?? new GZipWriterOptions(options) - ); + using var writer = new GZipWriter(stream, options); foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) { using var entryStream = entry.OpenEntryStream(); diff --git a/src/SharpCompress/Archives/Rar/RarArchive.Factory.cs b/src/SharpCompress/Archives/Rar/RarArchive.Factory.cs index edf745905..707049d39 100644 --- a/src/SharpCompress/Archives/Rar/RarArchive.Factory.cs +++ b/src/SharpCompress/Archives/Rar/RarArchive.Factory.cs @@ -31,7 +31,7 @@ public static ValueTask OpenAsyncArchive( return new((IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions)); } - public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null) + public static IRarArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null) { filePath.NotNullOrEmpty(nameof(filePath)); var fileInfo = new FileInfo(filePath); @@ -39,24 +39,24 @@ public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = new SourceStream( fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), - options ?? new ReaderOptions() + readerOptions ?? new ReaderOptions() ) ); } - public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null) + public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) { fileInfo.NotNull(nameof(fileInfo)); return new RarArchive( new SourceStream( fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), - options ?? new ReaderOptions() + readerOptions ?? new ReaderOptions() ) ); } - public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = null) + public static IRarArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); @@ -65,7 +65,9 @@ public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = nu throw new ArgumentException("Stream must be seekable", nameof(stream)); } - return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions())); + return new RarArchive( + new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions()) + ); } public static IRarArchive OpenArchive( diff --git a/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs b/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs index 1bd98f58f..33ad21a96 100644 --- a/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs +++ b/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs @@ -19,7 +19,9 @@ internal static class RarArchiveVolumeFactory part1.DirectoryName!, String.Concat( m.Groups[1].Value, - (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), + (index + 1) + .ToString(global::SharpCompress.Common.Constants.DefaultCultureInfo) + .PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value ) ) @@ -39,7 +41,15 @@ internal static class RarArchiveVolumeFactory index == 0 ? m.Groups[2].Value + m.Groups[3].Value : (char)(m.Groups[2].Value[0] + ((index - 1) / 100)) - + (index - 1).ToString("D4").Substring(2) + + (index - 1) + .ToString( + "D4", + global::SharpCompress + .Common + .Constants + .DefaultCultureInfo + ) + .Substring(2) ) ) ); diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs index db06ec9ab..30adbaf90 100644 --- a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs +++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs @@ -37,7 +37,7 @@ public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) { - fileInfo.NotNull("fileInfo"); + fileInfo.NotNull(nameof(fileInfo)); return new SevenZipArchive( new SourceStream( fileInfo, @@ -81,7 +81,7 @@ public static IArchive OpenArchive( public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null) { - stream.NotNull("stream"); + stream.NotNull(nameof(stream)); if (stream is not { CanSeek: true }) { diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs index 710e490c1..943547df3 100644 --- a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs +++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs @@ -6,7 +6,7 @@ using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.SevenZip; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.IO; using SharpCompress.Readers; diff --git a/src/SharpCompress/Archives/Tar/TarArchive.Async.cs b/src/SharpCompress/Archives/Tar/TarArchive.Async.cs index c27ff0f9e..ac584e1ba 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.Async.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.Async.cs @@ -25,10 +25,7 @@ protected override async ValueTask SaveToAsync( CancellationToken cancellationToken = default ) { - using var writer = new TarWriter( - stream, - options as TarWriterOptions ?? new TarWriterOptions(options) - ); + using var writer = new TarWriter(stream, options); await foreach ( var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false) ) diff --git a/src/SharpCompress/Archives/Tar/TarArchive.Factory.cs b/src/SharpCompress/Archives/Tar/TarArchive.Factory.cs index e1775875f..3c3caa3f8 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.Factory.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.Factory.cs @@ -226,7 +226,7 @@ public static bool IsTarFile(Stream stream) var isEmptyArchive = tarHeader.Name?.Length == 0 && tarHeader.Size == 0 - && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType); + && IsDefined(tarHeader.EntryType); return readSucceeded || isEmptyArchive; } catch (Exception) @@ -254,7 +254,7 @@ public static async ValueTask IsTarFileAsync( var isEmptyArchive = tarHeader.Name?.Length == 0 && tarHeader.Size == 0 - && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType); + && IsDefined(tarHeader.EntryType); return readSucceeded || isEmptyArchive; } catch (Exception) @@ -269,4 +269,13 @@ public static async ValueTask IsTarFileAsync( public static ValueTask> CreateAsyncArchive() => new(new TarArchive()); + + private static bool IsDefined(EntryType value) + { +#if LEGACY_DOTNET + return Enum.IsDefined(typeof(EntryType), value); +#else + return Enum.IsDefined(value); +#endif + } } diff --git a/src/SharpCompress/Archives/Tar/TarArchive.cs b/src/SharpCompress/Archives/Tar/TarArchive.cs index 860fb4829..fdd8ac3a2 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.cs @@ -182,7 +182,7 @@ var header in TarHeaderFactory.ReadHeader( } protected override TarArchiveEntry CreateEntryInternal( - string filePath, + string key, Stream source, long size, DateTime? modified, @@ -192,16 +192,14 @@ bool closeStream this, source, CompressionType.Unknown, - filePath, + key, size, modified, closeStream ); - protected override TarArchiveEntry CreateDirectoryEntry( - string directoryPath, - DateTime? modified - ) => new TarWritableArchiveEntry(this, directoryPath, modified); + protected override TarArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) => + new TarWritableArchiveEntry(this, key, modified); protected override void SaveTo( Stream stream, @@ -210,10 +208,7 @@ protected override void SaveTo( IEnumerable newEntries ) { - using var writer = new TarWriter( - stream, - options as TarWriterOptions ?? new TarWriterOptions(options) - ); + using var writer = new TarWriter(stream, options); foreach (var entry in oldEntries.Concat(newEntries)) { if (entry.IsDirectory) diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.Async.cs b/src/SharpCompress/Archives/Zip/ZipArchive.Async.cs index 4f17405ca..75255b25d 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.Async.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.Async.cs @@ -84,10 +84,7 @@ protected override async ValueTask SaveToAsync( CancellationToken cancellationToken = default ) { - using var writer = new ZipWriter( - stream, - options as ZipWriterOptions ?? new ZipWriterOptions(options) - ); + using var writer = new ZipWriter(stream, options); await foreach ( var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false) ) diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs b/src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs index 2635f42fe..dc4aad385 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs @@ -170,7 +170,7 @@ public static bool IsZipFile(Stream stream, string? password = null) { return false; } - return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + return IsDefined(header.ZipHeaderType); } catch (CryptographicException) { @@ -203,7 +203,7 @@ public static bool IsZipMulti(Stream stream, string? password = null) return false; } } - return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + return IsDefined(header.ZipHeaderType); } catch (CryptographicException) { @@ -234,7 +234,7 @@ public static async ValueTask IsZipFileAsync( { return false; } - return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + return IsDefined(header.ZipHeaderType); } catch (CryptographicException) { @@ -261,9 +261,11 @@ public static async ValueTask IsZipMultiAsync( var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null); try { - var header = headerFactory - .ReadStreamHeader(stream) - .FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split); + var header = await headerFactory + .ReadStreamHeaderAsync(stream) + .Where(x => x.ZipHeaderType != ZipHeaderType.Split) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); if (header is null) { if (stream.CanSeek) @@ -286,7 +288,7 @@ var h in z.ReadSeekableHeaderAsync(stream) return false; } } - return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType); + return IsDefined(header.ZipHeaderType); } catch (CryptographicException) { @@ -297,4 +299,13 @@ var h in z.ReadSeekableHeaderAsync(stream) return false; } } + + private static bool IsDefined(ZipHeaderType value) + { +#if LEGACY_DOTNET + return Enum.IsDefined(typeof(ZipHeaderType), value); +#else + return Enum.IsDefined(value); +#endif + } } diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.cs b/src/SharpCompress/Archives/Zip/ZipArchive.cs index 1d009f03e..206048142 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.cs @@ -34,14 +34,14 @@ internal ZipArchive(SourceStream sourceStream) internal ZipArchive() : base(ArchiveType.Zip) { } - protected override IEnumerable LoadVolumes(SourceStream stream) + protected override IEnumerable LoadVolumes(SourceStream sourceStream) { - stream.LoadAllParts(); + sourceStream.LoadAllParts(); //stream.Position = 0; - var streams = stream.Streams.ToList(); + var streams = sourceStream.Streams.ToList(); var idx = 0; - if (streams.Count() > 1) + if (streams.Count > 1) { //check if second stream is zip header without changing position var headerProbeStream = streams[1]; @@ -51,7 +51,7 @@ protected override IEnumerable LoadVolumes(SourceStream stream) headerProbeStream.Position = startPosition; if (isZip) { - stream.IsVolumes = true; + sourceStream.IsVolumes = true; var tmp = streams[0]; streams.RemoveAt(0); @@ -61,7 +61,7 @@ protected override IEnumerable LoadVolumes(SourceStream stream) } } - return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable(); + return new ZipVolume(sourceStream, ReaderOptions, idx++).AsEnumerable(); } protected override IEnumerable LoadEntries(IEnumerable volumes) @@ -127,10 +127,7 @@ protected override void SaveTo( IEnumerable newEntries ) { - using var writer = new ZipWriter( - stream, - options as ZipWriterOptions ?? new ZipWriterOptions(options) - ); + using var writer = new ZipWriter(stream, options); foreach (var entry in oldEntries.Concat(newEntries)) { if (entry.IsDirectory) @@ -153,17 +150,15 @@ IEnumerable newEntries } protected override ZipArchiveEntry CreateEntryInternal( - string filePath, + string key, Stream source, long size, DateTime? modified, bool closeStream - ) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream); + ) => new ZipWritableArchiveEntry(this, source, key, size, modified, closeStream); - protected override ZipArchiveEntry CreateDirectoryEntry( - string directoryPath, - DateTime? modified - ) => new ZipWritableArchiveEntry(this, directoryPath, modified); + protected override ZipArchiveEntry CreateDirectoryEntry(string key, DateTime? modified) => + new ZipWritableArchiveEntry(this, key, modified); protected override IReader CreateReaderForSolidExtraction() { diff --git a/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs b/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs index 0f0b65173..257c6db07 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs @@ -21,7 +21,9 @@ internal static class ZipArchiveVolumeFactory String.Concat( m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), - index.ToString().PadLeft(2, '0') + index + .ToString(global::SharpCompress.Common.Constants.DefaultCultureInfo) + .PadLeft(2, '0') ) ) ); diff --git a/src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs index 8f8a031e6..4f4aecbfc 100644 --- a/src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs +++ b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs @@ -14,11 +14,11 @@ public sealed partial class AceFileHeader /// Supports both ACE 1.0 and ACE 2.0 formats. /// public override async ValueTask ReadAsync( - Stream stream, + Stream reader, CancellationToken cancellationToken = default ) { - var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false); + var headerData = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false); if (headerData.Length == 0) { return null; @@ -104,7 +104,7 @@ public sealed partial class AceFileHeader } // Store the data start position - DataStartPosition = stream.Position; + DataStartPosition = reader.Position; return this; } diff --git a/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs index 9a9688108..1c71d18d5 100644 --- a/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs +++ b/src/SharpCompress/Common/Ace/Headers/AceFileHeader.cs @@ -56,9 +56,9 @@ public AceFileHeader(IArchiveEncoding archiveEncoding) /// Returns null if no more entries or end of archive. /// Supports both ACE 1.0 and ACE 2.0 formats. /// - public override AceHeader? Read(Stream stream) + public override AceHeader? Read(Stream reader) { - var headerData = ReadHeader(stream); + var headerData = ReadHeader(reader); if (headerData.Length == 0) { return null; @@ -144,7 +144,7 @@ public AceFileHeader(IArchiveEncoding archiveEncoding) } // Store the data start position - DataStartPosition = stream.Position; + DataStartPosition = reader.Position; return this; } diff --git a/src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs index 290cd199b..b21ae450a 100644 --- a/src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs +++ b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs @@ -15,11 +15,11 @@ public sealed partial class AceMainHeader /// Supports both ACE 1.0 and ACE 2.0 formats. /// public override async ValueTask ReadAsync( - Stream stream, + Stream reader, CancellationToken cancellationToken = default ) { - var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false); + var headerData = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false); if (headerData.Length == 0) { return null; diff --git a/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs index fd74354f9..01a19bcf0 100644 --- a/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs +++ b/src/SharpCompress/Common/Ace/Headers/AceMainHeader.cs @@ -32,9 +32,9 @@ public AceMainHeader(IArchiveEncoding archiveEncoding) /// Returns header if this is a valid ACE archive. /// Supports both ACE 1.0 and ACE 2.0 formats. /// - public override AceHeader? Read(Stream stream) + public override AceHeader? Read(Stream reader) { - var headerData = ReadHeader(stream); + var headerData = ReadHeader(reader); if (headerData.Length == 0) { return null; diff --git a/src/SharpCompress/Common/Arc/ArcFilePart.Async.cs b/src/SharpCompress/Common/Arc/ArcFilePart.Async.cs index 6b00e3ff5..4af922889 100644 --- a/src/SharpCompress/Common/Arc/ArcFilePart.Async.cs +++ b/src/SharpCompress/Common/Arc/ArcFilePart.Async.cs @@ -2,6 +2,7 @@ using System.IO; using System.Threading; using System.Threading.Tasks; +using SharpCompress.Compressors.ArcLzw; using SharpCompress.Compressors.Lzw; using SharpCompress.Compressors.RLE90; using SharpCompress.Compressors.Squeezed; diff --git a/src/SharpCompress/Common/Arc/ArcFilePart.cs b/src/SharpCompress/Common/Arc/ArcFilePart.cs index 4a5231a18..d104396ef 100644 --- a/src/SharpCompress/Common/Arc/ArcFilePart.cs +++ b/src/SharpCompress/Common/Arc/ArcFilePart.cs @@ -8,6 +8,7 @@ using SharpCompress.Common.Tar; using SharpCompress.Common.Tar.Headers; using SharpCompress.Common.Zip.Headers; +using SharpCompress.Compressors.ArcLzw; using SharpCompress.Compressors.Lzw; using SharpCompress.Compressors.RLE90; using SharpCompress.Compressors.Squeezed; diff --git a/src/SharpCompress/Common/Arj/ArjFilePart.cs b/src/SharpCompress/Common/Arj/ArjFilePart.cs index f8bb2311a..d4f540675 100644 --- a/src/SharpCompress/Common/Arj/ArjFilePart.cs +++ b/src/SharpCompress/Common/Arj/ArjFilePart.cs @@ -26,43 +26,34 @@ internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream) internal override Stream GetCompressedStream() { - if (_stream != null) + Stream compressedStream; + switch (Header.CompressionMethod) { - Stream compressedStream; - switch (Header.CompressionMethod) - { - case CompressionMethod.Stored: - compressedStream = new ReadOnlySubStream( - _stream, - Header.DataStartPosition, - Header.CompressedSize - ); - break; - case CompressionMethod.CompressedMost: - case CompressionMethod.Compressed: - case CompressionMethod.CompressedFaster: - if (Header.OriginalSize > 128 * 1024) - { - throw new NotSupportedException( - "CompressionMethod: " + Header.CompressionMethod + " with size > 128KB" - ); - } - compressedStream = new LhaStream( - _stream, - (int)Header.OriginalSize - ); - break; - case CompressionMethod.CompressedFastest: - compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize); - break; - default: + case CompressionMethod.Stored: + compressedStream = new ReadOnlySubStream( + _stream, + Header.DataStartPosition, + Header.CompressedSize + ); + break; + case CompressionMethod.CompressedMost: + case CompressionMethod.Compressed: + case CompressionMethod.CompressedFaster: + if (Header.OriginalSize > 128 * 1024) + { throw new NotSupportedException( - "CompressionMethod: " + Header.CompressionMethod + "CompressionMethod: " + Header.CompressionMethod + " with size > 128KB" ); - } - return compressedStream; + } + compressedStream = new LhaStream(_stream, (int)Header.OriginalSize); + break; + case CompressionMethod.CompressedFastest: + compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize); + break; + default: + throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod); } - return _stream.NotNull(); + return compressedStream; } internal override Stream GetRawStream() => _stream; diff --git a/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs b/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs index a12eb01d6..d0dd4ad37 100644 --- a/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs +++ b/src/SharpCompress/Common/Arj/Headers/ArjHeader.cs @@ -18,7 +18,6 @@ public enum ArjHeaderType public abstract partial class ArjHeader { - private const int FIRST_HDR_SIZE = 34; private const ushort ARJ_MAGIC = 0xEA60; public ArjHeader(ArjHeaderType type) @@ -138,7 +137,11 @@ protected List ReadExtendedHeaders(Stream reader) public static FileType FileTypeFromByte(byte value) { +#if LEGACY_DOTNET return Enum.IsDefined(typeof(FileType), value) ? (FileType)value : Headers.FileType.Unknown; +#else + return Enum.IsDefined((FileType)value) ? (FileType)value : Headers.FileType.Unknown; +#endif } public static bool IsArchive(Stream stream) diff --git a/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs b/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs index c55583251..5b5dab70a 100644 --- a/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs +++ b/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs @@ -7,16 +7,16 @@ namespace SharpCompress.Common.Arj.Headers; public partial class ArjLocalHeader { public override async ValueTask ReadAsync( - Stream stream, + Stream reader, CancellationToken cancellationToken = default ) { - var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false); + var body = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false); if (body.Length > 0) { - await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false); + await ReadExtendedHeadersAsync(reader, cancellationToken).ConfigureAwait(false); var header = LoadFrom(body); - header.DataStartPosition = stream.Position; + header.DataStartPosition = reader.Position; return header; } return null; diff --git a/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs b/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs index d37121a9e..5e5145f54 100644 --- a/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs +++ b/src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.cs @@ -43,14 +43,14 @@ public ArjLocalHeader(ArchiveEncoding archiveEncoding) archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding)); } - public override ArjHeader? Read(Stream stream) + public override ArjHeader? Read(Stream reader) { - var body = ReadHeader(stream); + var body = ReadHeader(reader); if (body.Length > 0) { - ReadExtendedHeaders(stream); + ReadExtendedHeaders(reader); var header = LoadFrom(body); - header.DataStartPosition = stream.Position; + header.DataStartPosition = reader.Position; return header; } return null; diff --git a/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs b/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs index f337b0d3c..2271877c5 100644 --- a/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs +++ b/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs @@ -7,12 +7,12 @@ namespace SharpCompress.Common.Arj.Headers; public partial class ArjMainHeader { public override async ValueTask ReadAsync( - Stream stream, + Stream reader, CancellationToken cancellationToken = default ) { - var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false); - await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false); + var body = await ReadHeaderAsync(reader, cancellationToken).ConfigureAwait(false); + await ReadExtendedHeadersAsync(reader, cancellationToken).ConfigureAwait(false); return LoadFrom(body); } } diff --git a/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs b/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs index 3f4586131..e81d2669b 100644 --- a/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs +++ b/src/SharpCompress/Common/Arj/Headers/ArjMainHeader.cs @@ -10,9 +10,6 @@ namespace SharpCompress.Common.Arj.Headers; public partial class ArjMainHeader : ArjHeader { - private const int FIRST_HDR_SIZE = 34; - private const ushort ARJ_MAGIC = 0xEA60; - public ArchiveEncoding ArchiveEncoding { get; } public int ArchiverVersionNumber { get; private set; } @@ -40,10 +37,10 @@ public ArjMainHeader(ArchiveEncoding archiveEncoding) archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding)); } - public override ArjHeader? Read(Stream stream) + public override ArjHeader? Read(Stream reader) { - var body = ReadHeader(stream); - ReadExtendedHeaders(stream); + var body = ReadHeader(reader); + ReadExtendedHeaders(reader); return LoadFrom(body); } diff --git a/src/SharpCompress/Common/Arj/Headers/DosDateTime.cs b/src/SharpCompress/Common/Arj/Headers/DosDateTime.cs index c882e2126..3f913faa9 100644 --- a/src/SharpCompress/Common/Arj/Headers/DosDateTime.cs +++ b/src/SharpCompress/Common/Arj/Headers/DosDateTime.cs @@ -32,5 +32,6 @@ public DosDateTime(long dosValue) } } - public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss"); + public override string ToString() => + DateTime.ToString("yyyy-MM-dd HH:mm:ss", Constants.DefaultCultureInfo); } diff --git a/src/SharpCompress/Common/Constants.cs b/src/SharpCompress/Common/Constants.cs index b9ffcbe37..5fba5ed9d 100644 --- a/src/SharpCompress/Common/Constants.cs +++ b/src/SharpCompress/Common/Constants.cs @@ -1,3 +1,5 @@ +using System.Globalization; + namespace SharpCompress.Common; public static class Constants @@ -38,4 +40,6 @@ public static class Constants /// /// public static int RewindableBufferSize { get; set; } = 81920; + + public static CultureInfo DefaultCultureInfo { get; set; } = CultureInfo.InvariantCulture; } diff --git a/src/SharpCompress/Common/EntryStream.cs b/src/SharpCompress/Common/EntryStream.cs index 4ebd100ab..b8ccf339c 100644 --- a/src/SharpCompress/Common/EntryStream.cs +++ b/src/SharpCompress/Common/EntryStream.cs @@ -41,7 +41,11 @@ protected override void Dispose(bool disposing) { if (Utility.UseSyncOverAsyncDispose()) { +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits +#pragma warning disable CA2012 SkipEntryAsync().GetAwaiter().GetResult(); +#pragma warning restore CA2012 +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits } else { diff --git a/src/SharpCompress/Common/FlagUtility.cs b/src/SharpCompress/Common/FlagUtility.cs index 8f272b9d2..47b7f7ea8 100644 --- a/src/SharpCompress/Common/FlagUtility.cs +++ b/src/SharpCompress/Common/FlagUtility.cs @@ -46,7 +46,11 @@ public static bool HasFlag(ulong bitField, T flag) /// Flag to test /// public static bool HasFlag(T bitField, T flag) - where T : struct => HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag)); + where T : struct => + HasFlag( + Convert.ToInt64(bitField, Constants.DefaultCultureInfo), + Convert.ToInt64(flag, Constants.DefaultCultureInfo) + ); /// /// Returns true if the flag is set on the specified bit field. @@ -82,5 +86,10 @@ public static long SetFlag(long bitField, long flag, bool on) /// bool /// The flagged variable with the flag changed public static long SetFlag(T bitField, T flag, bool on) - where T : struct => SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on); + where T : struct => + SetFlag( + Convert.ToInt64(bitField, Constants.DefaultCultureInfo), + Convert.ToInt64(flag, Constants.DefaultCultureInfo), + on + ); } diff --git a/src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs b/src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs index 5b58e0a22..27d0e3244 100644 --- a/src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs +++ b/src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs @@ -8,7 +8,10 @@ namespace SharpCompress.Common.Rar; -internal class AsyncMarkingBinaryReader +internal class AsyncMarkingBinaryReader : IDisposable +#if NET8_0_OR_GREATER + , IAsyncDisposable +#endif { private readonly AsyncBinaryReader _reader; @@ -91,8 +94,8 @@ public virtual async ValueTask ReadInt64Async( } public async ValueTask ReadRarVIntAsync( - CancellationToken cancellationToken = default, - int maxBytes = 10 + int maxBytes = 10, + CancellationToken cancellationToken = default ) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false); private async ValueTask DoReadRarVIntAsync( @@ -187,4 +190,10 @@ private async ValueTask DoReadRarVIntUInt32Async( throw new FormatException("malformed vint"); } + + public virtual void Dispose() => _reader.Dispose(); + +#if NET8_0_OR_GREATER + public virtual ValueTask DisposeAsync() => _reader.DisposeAsync(); +#endif } diff --git a/src/SharpCompress/Common/Rar/CryptKey3.cs b/src/SharpCompress/Common/Rar/CryptKey3.cs index 93e451b1e..950f3ad66 100644 --- a/src/SharpCompress/Common/Rar/CryptKey3.cs +++ b/src/SharpCompress/Common/Rar/CryptKey3.cs @@ -1,18 +1,23 @@ -#nullable disable - +using System; +using System.Diagnostics.CodeAnalysis; using System.Security.Cryptography; using System.Text; using SharpCompress.Common.Rar.Headers; namespace SharpCompress.Common.Rar; +[SuppressMessage( + "Security", + "CA5350:Do Not Use Weak Cryptographic Algorithms", + Justification = "RAR3 key derivation is SHA-1 based by format definition." +)] internal class CryptKey3 : ICryptKey { const int AES_128 = 128; - private string _password; + private readonly string _password; - public CryptKey3(string password) => _password = password ?? ""; + public CryptKey3(string? password) => _password = password ?? string.Empty; public ICryptoTransform Transformer(byte[] salt) { @@ -32,7 +37,9 @@ public ICryptoTransform Transformer(byte[] salt) rawPassword[i + rawLength] = salt[i]; } +#if LEGACY_DOTNET var msgDigest = SHA1.Create(); +#endif const int noOfRounds = (1 << 18); const int iblock = 3; @@ -50,11 +57,19 @@ public ICryptoTransform Transformer(byte[] salt) if (i % (noOfRounds / EncryptionConstV5.SIZE_INITV) == 0) { +#if LEGACY_DOTNET digest = msgDigest.ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock)); +#else + digest = SHA1.HashData(data.AsSpan(0, (i + 1) * (rawPassword.Length + iblock))); +#endif aesIV[i / (noOfRounds / EncryptionConstV5.SIZE_INITV)] = digest[19]; } } +#if LEGACY_DOTNET digest = msgDigest.ComputeHash(data); +#else + digest = SHA1.HashData(data); +#endif //slow code ends var aesKey = new byte[EncryptionConstV5.SIZE_INITV]; diff --git a/src/SharpCompress/Common/Rar/CryptKey5.cs b/src/SharpCompress/Common/Rar/CryptKey5.cs index 90778c5af..490a3c540 100644 --- a/src/SharpCompress/Common/Rar/CryptKey5.cs +++ b/src/SharpCompress/Common/Rar/CryptKey5.cs @@ -14,8 +14,8 @@ internal class CryptKey5 : ICryptKey private string _password; private Rar5CryptoInfo _cryptoInfo; - private byte[] _pswCheck = { }; - private byte[] _hashKey = { }; + private byte[] _pswCheck = []; + private byte[] _hashKey = []; public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo) { @@ -34,8 +34,13 @@ private static List GenerateRarPBKDF2Key( int keyLength ) { - using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password)); + var passwordBytes = Encoding.UTF8.GetBytes(password); +#if LEGACY_DOTNET + using var hmac = new HMACSHA256(passwordBytes); var block = hmac.ComputeHash(salt); +#else + var block = HMACSHA256.HashData(passwordBytes, salt); +#endif var finalHash = (byte[])block.Clone(); var loop = new int[] { iterations, 17, 17 }; @@ -45,7 +50,11 @@ int keyLength { for (var i = 1; i < loop[x]; i++) { +#if LEGACY_DOTNET block = hmac.ComputeHash(block); +#else + block = HMACSHA256.HashData(passwordBytes, block); +#endif for (var j = 0; j < finalHash.Length; j++) { finalHash[j] ^= block[j]; diff --git a/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.Async.cs b/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.Async.cs index 031fe6bdb..eb34ec71e 100644 --- a/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.Async.cs +++ b/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.Async.cs @@ -1,5 +1,3 @@ -#nullable disable - using System.Threading; using System.Threading.Tasks; using SharpCompress.Common.Rar; diff --git a/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs b/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs index 1f5ab837b..bdb107fc5 100644 --- a/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs +++ b/src/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs @@ -1,5 +1,3 @@ -#nullable disable - using SharpCompress.Common.Rar; using SharpCompress.IO; diff --git a/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.Async.cs b/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.Async.cs index 9be7d4e9b..9a9b3065d 100644 --- a/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.Async.cs +++ b/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.Async.cs @@ -44,9 +44,7 @@ await reader PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false); if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER)) { - EncryptionVersion = await reader - .ReadByteAsync(cancellationToken) - .ConfigureAwait(false); + _ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); } } } diff --git a/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs b/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs index d3b5ff25a..1f6f147d7 100644 --- a/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs +++ b/src/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs @@ -29,7 +29,7 @@ protected sealed override void ReadFinish(MarkingBinaryReader reader) PosAv = reader.ReadInt32(); if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER)) { - EncryptionVersion = reader.ReadByte(); + _ = reader.ReadByte(); } } } @@ -44,8 +44,6 @@ protected sealed override void ReadFinish(MarkingBinaryReader reader) internal int? PosAv { get; private set; } - private byte? EncryptionVersion { get; set; } - public bool? IsEncrypted => IsRar5 ? null : HasFlag(ArchiveFlagsV4.PASSWORD); public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING); diff --git a/src/SharpCompress/Common/Rar/Headers/FileHeader.Async.cs b/src/SharpCompress/Common/Rar/Headers/FileHeader.Async.cs index a80d121e9..31024882d 100644 --- a/src/SharpCompress/Common/Rar/Headers/FileHeader.Async.cs +++ b/src/SharpCompress/Common/Rar/Headers/FileHeader.Async.cs @@ -79,7 +79,7 @@ await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false) CompressionMethod = (byte)((compressionInfo >> 7) & 0x7); WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf); - HostOs = await reader + _ = await reader .ReadRarVIntByteAsync(cancellationToken: cancellationToken) .ConfigureAwait(false); @@ -222,7 +222,7 @@ CancellationToken cancellationToken .ReadUInt32Async(cancellationToken) .ConfigureAwait(false); - HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); + _ = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false); diff --git a/src/SharpCompress/Common/Rar/Headers/FileHeader.cs b/src/SharpCompress/Common/Rar/Headers/FileHeader.cs index 966dc2bc9..cbf785039 100644 --- a/src/SharpCompress/Common/Rar/Headers/FileHeader.cs +++ b/src/SharpCompress/Common/Rar/Headers/FileHeader.cs @@ -72,7 +72,7 @@ private void ReadFromReaderV5(MarkingBinaryReader reader) // Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB. WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf); - HostOs = reader.ReadRarVIntByte(); + _ = reader.ReadRarVIntByte(); var nameSize = reader.ReadRarVIntUInt16(); @@ -197,7 +197,7 @@ private void ReadFromReaderV4(MarkingBinaryReader reader) var lowUncompressedSize = reader.ReadUInt32(); - HostOs = reader.ReadByte(); + _ = reader.ReadByte(); FileCrc = reader.ReadBytes(4); @@ -415,7 +415,6 @@ internal byte[]? FileCrc internal byte[]? R4Salt { get; private set; } internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; } - private byte HostOs { get; set; } internal uint FileAttributes { get; private set; } internal long CompressedSize { get; private set; } internal long UncompressedSize { get; private set; } diff --git a/src/SharpCompress/Common/Rar/Headers/MarkHeader.Async.cs b/src/SharpCompress/Common/Rar/Headers/MarkHeader.Async.cs index 19112f786..588a39168 100644 --- a/src/SharpCompress/Common/Rar/Headers/MarkHeader.Async.cs +++ b/src/SharpCompress/Common/Rar/Headers/MarkHeader.Async.cs @@ -122,7 +122,11 @@ public static async ValueTask ReadAsync( { if (!leaveStreamOpen) { +#if LEGACY_DOTNET stream.Dispose(); +#else + await stream.DisposeAsync().ConfigureAwait(false); +#endif } throw new InvalidFormatException("Error trying to read rar signature.", e); } diff --git a/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs b/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs index b45f98bbb..6285c4d2c 100644 --- a/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs +++ b/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs @@ -42,4 +42,20 @@ internal bool Equals(byte[] bytes) } public bool Equals(NewSubHeaderType? other) => other is not null && Equals(other._bytes); + + public override bool Equals(object? obj) => obj is NewSubHeaderType other && Equals(other); + + public override int GetHashCode() + { + unchecked + { + var hash = 17; + foreach (byte value in _bytes) + { + hash = (hash * 31) + value; + } + + return hash; + } + } } diff --git a/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs b/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs index f86e4a38b..2456e18ac 100644 --- a/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs +++ b/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs @@ -43,10 +43,17 @@ public static Rar5CryptoInfo Create(MarkingBinaryReader reader, bool readInitV) cryptoInfo.PswCheck = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK); var _pswCheckCsm = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK_CSUM); +#if LEGACY_DOTNET var sha = SHA256.Create(); cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck) .AsSpan() .StartsWith(_pswCheckCsm.AsSpan()); +#else + cryptoInfo.UsePswCheck = SHA256 + .HashData(cryptoInfo.PswCheck) + .AsSpan() + .StartsWith(_pswCheckCsm.AsSpan()); +#endif } return cryptoInfo; } @@ -98,10 +105,17 @@ await reader .ReadBytesAsync(EncryptionConstV5.SIZE_PSWCHECK_CSUM, CancellationToken.None) .ConfigureAwait(false); +#if LEGACY_DOTNET var sha = SHA256.Create(); cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck) .AsSpan() .StartsWith(_pswCheckCsm.AsSpan()); +#else + cryptoInfo.UsePswCheck = SHA256 + .HashData(cryptoInfo.PswCheck) + .AsSpan() + .StartsWith(_pswCheckCsm.AsSpan()); +#endif } return cryptoInfo; } @@ -118,9 +132,9 @@ public async ValueTask ReadInitVAsync(AsyncMarkingBinaryReader reader) => public int LG2Count = 0; - public byte[] InitV = { }; + public byte[] InitV = []; - public byte[] Salt = { }; + public byte[] Salt = []; - public byte[] PswCheck = { }; + public byte[] PswCheck = []; } diff --git a/src/SharpCompress/Common/Rar/RarEntry.cs b/src/SharpCompress/Common/Rar/RarEntry.cs index 9c0f7ae5f..3393f06ca 100644 --- a/src/SharpCompress/Common/Rar/RarEntry.cs +++ b/src/SharpCompress/Common/Rar/RarEntry.cs @@ -76,6 +76,7 @@ protected RarEntry(IReaderOptions readerOptions) public override string ToString() => string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}", Key, CompressedSize, diff --git a/src/SharpCompress/Common/Rar/RarVolume.cs b/src/SharpCompress/Common/Rar/RarVolume.cs index 2e2286d9f..e2bc6a457 100644 --- a/src/SharpCompress/Common/Rar/RarVolume.cs +++ b/src/SharpCompress/Common/Rar/RarVolume.cs @@ -140,7 +140,7 @@ private void EnsureArchiveHeaderLoaded() } // we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream - GetVolumeFileParts().First(); + _ = GetVolumeFileParts().First(); Stream.Position = 0; } } @@ -249,7 +249,9 @@ private async ValueTask EnsureArchiveHeaderLoadedAsync(CancellationToken cancell } // we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream +#pragma warning disable CA2016 // Forward token if available; polyfill FirstAsync has no token overload await GetVolumeFilePartsAsync(cancellationToken).FirstAsync().ConfigureAwait(false); +#pragma warning restore CA2016 Stream.Position = 0; } } diff --git a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.Async.cs b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.Async.cs index e3f8cb9fd..ae2ff2321 100644 --- a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.Async.cs +++ b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.Async.cs @@ -1,10 +1,8 @@ -#nullable disable - using System.IO; using System.Threading; using System.Threading.Tasks; using SharpCompress.Compressors.LZMA; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; namespace SharpCompress.Common.SevenZip; diff --git a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs index f4eb08517..3c4615223 100644 --- a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs +++ b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs @@ -1,10 +1,8 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; using SharpCompress.Compressors.LZMA; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; namespace SharpCompress.Common.SevenZip; @@ -18,7 +16,7 @@ internal partial class ArchiveDatabase internal List _packSizes = new(); internal List _packCrCs = new(); internal List _folders = new(); - internal List _numUnpackStreamsVector; + internal List _numUnpackStreamsVector = null!; internal List _files = new(); internal List _packStreamStartPositions = new(); @@ -35,7 +33,7 @@ internal void Clear() _packSizes.Clear(); _packCrCs.Clear(); _folders.Clear(); - _numUnpackStreamsVector = null; + _numUnpackStreamsVector = null!; _files.Clear(); _packStreamStartPositions.Clear(); diff --git a/src/SharpCompress/Common/SevenZip/ArchiveReader.Async.cs b/src/SharpCompress/Common/SevenZip/ArchiveReader.Async.cs index f81c52b99..a9ab2109e 100644 --- a/src/SharpCompress/Common/SevenZip/ArchiveReader.Async.cs +++ b/src/SharpCompress/Common/SevenZip/ArchiveReader.Async.cs @@ -1,13 +1,13 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; using System.Threading; using System.Threading.Tasks; +using SharpCompress.Compressors.Deflate64; using SharpCompress.Compressors.LZMA; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.IO; +using BlockType = SharpCompress.Compressors.LZMA.Utilities.BlockType; namespace SharpCompress.Common.SevenZip; @@ -167,10 +167,6 @@ private async ValueTask> ReadAndDecodePackedStreamsAsync( CancellationToken cancellationToken ) { -#if DEBUG - Log.WriteLine("-- ReadAndDecodePackedStreamsAsync --"); - Log.PushIndent(); -#endif try { ReadStreamsInfo( @@ -236,12 +232,7 @@ await outStream } return dataVector; } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private async ValueTask ReadHeaderAsync( @@ -250,10 +241,6 @@ private async ValueTask ReadHeaderAsync( CancellationToken cancellationToken ) { -#if DEBUG - Log.WriteLine("-- ReadHeaderAsync --"); - Log.PushIndent(); -#endif try { var type = ReadId(); @@ -264,7 +251,7 @@ CancellationToken cancellationToken type = ReadId(); } - List dataVector = null; + List? dataVector = null; if (type == BlockType.AdditionalStreamsInfo) { dataVector = await ReadAndDecodePackedStreamsAsync( @@ -322,9 +309,6 @@ out digests } var numFiles = ReadNum(); -#if DEBUG - Log.WriteLine("NumFiles: " + numFiles); -#endif db._files = new List(numFiles); for (var i = 0; i < numFiles; i++) { @@ -332,8 +316,8 @@ out digests } var emptyStreamVector = new BitVector(numFiles); - BitVector emptyFileVector = null; - BitVector antiFileVector = null; + BitVector emptyFileVector = null!; + BitVector antiFileVector = null!; var numEmptyStreams = 0; for (; ; ) @@ -351,26 +335,14 @@ out digests case BlockType.Name: using (var streamSwitch = new CStreamSwitch()) { - streamSwitch.Set(this, dataVector); -#if DEBUG - Log.Write("FileNames:"); -#endif + streamSwitch.Set(this, dataVector ?? []); for (var i = 0; i < db._files.Count; i++) { db._files[i].Name = _currentReader.ReadString(); -#if DEBUG - Log.Write(" " + db._files[i].Name); -#endif } -#if DEBUG - Log.WriteLine(); -#endif } break; case BlockType.WinAttributes: -#if DEBUG - Log.Write("WinAttributes:"); -#endif ReadAttributeVector( dataVector, numFiles, @@ -384,150 +356,70 @@ out digests } db._files[i].Attrib = attr; -#if DEBUG - Log.Write( - " " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a") - ); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.EmptyStream: emptyStreamVector = ReadBitVector(numFiles); -#if DEBUG - - Log.Write("EmptyStream: "); -#endif for (var i = 0; i < emptyStreamVector.Length; i++) { if (emptyStreamVector[i]) { -#if DEBUG - Log.Write("x"); -#endif numEmptyStreams++; } - else - { -#if DEBUG - Log.Write("."); -#endif - } + else { } } -#if DEBUG - Log.WriteLine(); -#endif emptyFileVector = new BitVector(numEmptyStreams); antiFileVector = new BitVector(numEmptyStreams); break; case BlockType.EmptyFile: emptyFileVector = ReadBitVector(numEmptyStreams); -#if DEBUG - Log.Write("EmptyFile: "); - for (var i = 0; i < numEmptyStreams; i++) - { - Log.Write(emptyFileVector[i] ? "x" : "."); - } - Log.WriteLine(); -#endif break; case BlockType.Anti: antiFileVector = ReadBitVector(numEmptyStreams); -#if DEBUG - Log.Write("Anti: "); - for (var i = 0; i < numEmptyStreams; i++) - { - Log.Write(antiFileVector[i] ? "x" : "."); - } - Log.WriteLine(); -#endif break; case BlockType.StartPos: -#if DEBUG - Log.Write("StartPos:"); -#endif ReadNumberVector( dataVector, numFiles, delegate(int i, long? startPos) { db._files[i].StartPos = startPos; -#if DEBUG - Log.Write( - " " + (startPos.HasValue ? startPos.Value.ToString() : "n/a") - ); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.CTime: -#if DEBUG - Log.Write("CTime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].CTime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.ATime: -#if DEBUG - Log.Write("ATime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].ATime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.MTime: -#if DEBUG - Log.Write("MTime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].MTime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.Dummy: -#if DEBUG - Log.Write("Dummy: " + size); -#endif for (long j = 0; j < size; j++) { if (ReadByte() != 0) @@ -572,11 +464,6 @@ out digests } } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } } diff --git a/src/SharpCompress/Common/SevenZip/ArchiveReader.cs b/src/SharpCompress/Common/SevenZip/ArchiveReader.cs index bb19b4b14..020e8e405 100644 --- a/src/SharpCompress/Common/SevenZip/ArchiveReader.cs +++ b/src/SharpCompress/Common/SevenZip/ArchiveReader.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.Diagnostics; @@ -7,20 +5,22 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; +using SharpCompress.Compressors.Deflate64; using SharpCompress.Compressors.LZMA; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.IO; +using BlockType = SharpCompress.Compressors.LZMA.Utilities.BlockType; namespace SharpCompress.Common.SevenZip; internal partial class ArchiveReader { - internal Stream _stream; + internal Stream _stream = null!; internal Stack _readerStack = new(); - internal DataReader _currentReader; + internal DataReader _currentReader = null!; internal long _streamOrigin; internal long _streamEnding; - internal byte[] _header; + internal byte[] _header = null!; private readonly Dictionary _cachedStreams = new(); @@ -54,9 +54,6 @@ private void ReadBytes(byte[] buffer, int offset, int length) => { return null; } -#if DEBUG - Log.WriteLine("ReadId: {0}", (BlockType)id); -#endif return (BlockType)id; } @@ -130,12 +127,12 @@ private BitVector ReadOptionalBitVector(int length) return ReadBitVector(length); } - private void ReadNumberVector(List dataVector, int numFiles, Action action) + private void ReadNumberVector(List? dataVector, int numFiles, Action action) { var defined = ReadOptionalBitVector(numFiles); using var streamSwitch = new CStreamSwitch(); - streamSwitch.Set(this, dataVector); + streamSwitch.Set(this, dataVector ?? []); for (var i = 0; i < numFiles; i++) { @@ -164,7 +161,7 @@ private DateTime TranslateTime(long time) => } private void ReadDateTimeVector( - List dataVector, + List? dataVector, int numFiles, Action action ) => @@ -175,14 +172,14 @@ private void ReadDateTimeVector( ); private void ReadAttributeVector( - List dataVector, + List? dataVector, int numFiles, Action action ) { var boolVector = ReadOptionalBitVector(numFiles); using var streamSwitch = new CStreamSwitch(); - streamSwitch.Set(this, dataVector); + streamSwitch.Set(this, dataVector ?? []); for (var i = 0; i < numFiles; i++) { if (boolVector[i]) @@ -202,25 +199,14 @@ private void ReadAttributeVector( private void GetNextFolderItem(CFolder folder) { -#if DEBUG - Log.WriteLine("-- GetNextFolderItem --"); - Log.PushIndent(); -#endif try { var numCoders = ReadNum(); -#if DEBUG - Log.WriteLine("NumCoders: " + numCoders); -#endif folder._coders = new List(numCoders); var numInStreams = 0; var numOutStreams = 0; for (var i = 0; i < numCoders; i++) { -#if DEBUG - Log.WriteLine("-- Coder --"); - Log.PushIndent(); -#endif try { var coder = new CCoderInfo(); @@ -230,18 +216,6 @@ private void GetNextFolderItem(CFolder folder) var idSize = (mainByte & 0xF); var longId = new byte[idSize]; ReadBytes(longId, 0, idSize); -#if DEBUG - Log.WriteLine( - "MethodId: " - + string.Join( - "", - Enumerable - .Range(0, idSize) - .Select(x => longId[x].ToString("x2")) - .ToArray() - ) - ); -#endif if (idSize > 8) { throw new NotSupportedException(); @@ -257,21 +231,9 @@ private void GetNextFolderItem(CFolder folder) { coder._numInStreams = ReadNum(); coder._numOutStreams = ReadNum(); -#if DEBUG - Log.WriteLine( - "Complex Stream (In: " - + coder._numInStreams - + " - Out: " - + coder._numOutStreams - + ")" - ); -#endif } else { -#if DEBUG - Log.WriteLine("Simple Stream (In: 1 - Out: 1)"); -#endif coder._numInStreams = 1; coder._numOutStreams = 1; } @@ -281,15 +243,6 @@ private void GetNextFolderItem(CFolder folder) var propsSize = ReadNum(); coder._props = new byte[propsSize]; ReadBytes(coder._props, 0, propsSize); -#if DEBUG - Log.WriteLine( - "Settings: " - + string.Join( - "", - coder._props.Select(bt => bt.ToString("x2")).ToArray() - ) - ); -#endif } if ((mainByte & 0x80) != 0) @@ -300,33 +253,18 @@ private void GetNextFolderItem(CFolder folder) numInStreams += coder._numInStreams; numOutStreams += coder._numOutStreams; } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } var numBindPairs = numOutStreams - 1; folder._bindPairs = new List(numBindPairs); -#if DEBUG - Log.WriteLine("BindPairs: " + numBindPairs); - Log.PushIndent(); -#endif for (var i = 0; i < numBindPairs; i++) { var bp = new CBindPair(); bp._inIndex = ReadNum(); bp._outIndex = ReadNum(); folder._bindPairs.Add(bp); -#if DEBUG - Log.WriteLine("#" + i + " - In: " + bp._inIndex + " - Out: " + bp._outIndex); -#endif } -#if DEBUG - Log.PopIndent(); -#endif if (numInStreams < numBindPairs) { @@ -342,9 +280,6 @@ private void GetNextFolderItem(CFolder folder) { if (folder.FindBindPairForInStream(i) < 0) { -#if DEBUG - Log.WriteLine("Single PackStream: #" + i); -#endif folder._packStreams.Add(i); break; } @@ -357,37 +292,18 @@ private void GetNextFolderItem(CFolder folder) } else { -#if DEBUG - Log.WriteLine("Multiple PackStreams ..."); - Log.PushIndent(); -#endif for (var i = 0; i < numPackStreams; i++) { var num = ReadNum(); -#if DEBUG - Log.WriteLine("#" + i + " - " + num); -#endif folder._packStreams.Add(num); } -#if DEBUG - Log.PopIndent(); -#endif } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private List ReadHashDigests(int count) { -#if DEBUG - Log.Write("ReadHashDigests:"); -#endif - var defined = ReadOptionalBitVector(count); var digests = new List(count); for (var i = 0; i < count; i++) @@ -395,23 +311,13 @@ private void GetNextFolderItem(CFolder folder) if (defined[i]) { var crc = ReadUInt32(); -#if DEBUG - Log.Write(" " + crc.ToString("x8")); -#endif digests.Add(crc); } else { -#if DEBUG - Log.Write(" ########"); -#endif digests.Add(null); } } -#if DEBUG - - Log.WriteLine(); -#endif return digests; } @@ -421,40 +327,21 @@ private void ReadPackInfo( out List packCrCs ) { -#if DEBUG - Log.WriteLine("-- ReadPackInfo --"); - Log.PushIndent(); -#endif try { - packCrCs = null; + packCrCs = null!; dataOffset = checked((long)ReadNumber()); -#if DEBUG - Log.WriteLine("DataOffset: " + dataOffset); -#endif var numPackStreams = ReadNum(); -#if DEBUG - Log.WriteLine("NumPackStreams: " + numPackStreams); -#endif WaitAttribute(BlockType.Size); packSizes = new List(numPackStreams); -#if DEBUG - Log.Write("Sizes:"); -#endif for (var i = 0; i < numPackStreams; i++) { var size = checked((long)ReadNumber()); -#if DEBUG - Log.Write(" " + size); -#endif packSizes.Add(size); } -#if DEBUG - Log.WriteLine(); -#endif BlockType? type; for (; ; ) @@ -481,31 +368,19 @@ out List packCrCs } } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } - private void ReadUnpackInfo(List dataVector, out List folders) + private void ReadUnpackInfo(List? dataVector, out List folders) { -#if DEBUG - Log.WriteLine("-- ReadUnpackInfo --"); - Log.PushIndent(); -#endif try { WaitAttribute(BlockType.Folder); var numFolders = ReadNum(); -#if DEBUG - Log.WriteLine("NumFolders: {0}", numFolders); -#endif using (var streamSwitch = new CStreamSwitch()) { - streamSwitch.Set(this, dataVector); + streamSwitch.Set(this, dataVector ?? []); //folders.Clear(); //folders.Reserve(numFolders); @@ -521,27 +396,15 @@ private void ReadUnpackInfo(List dataVector, out List folders) } WaitAttribute(BlockType.CodersUnpackSize); -#if DEBUG - Log.WriteLine("UnpackSizes:"); -#endif for (var i = 0; i < numFolders; i++) { var folder = folders[i]; -#if DEBUG - Log.Write(" #" + i + ":"); -#endif var numOutStreams = folder.GetNumOutStreams(); for (var j = 0; j < numOutStreams; j++) { var size = checked((long)ReadNumber()); -#if DEBUG - Log.Write(" " + size); -#endif folder._unpackSizes.Add(size); } -#if DEBUG - Log.WriteLine(); -#endif } for (; ; ) @@ -565,12 +428,7 @@ private void ReadUnpackInfo(List dataVector, out List folders) SkipData(); } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private void ReadSubStreamsInfo( @@ -580,13 +438,9 @@ private void ReadSubStreamsInfo( out List digests ) { -#if DEBUG - Log.WriteLine("-- ReadSubStreamsInfo --"); - Log.PushIndent(); -#endif try { - numUnpackStreamsInFolders = null; + numUnpackStreamsInFolders = null!; BlockType? type; for (; ; ) @@ -595,20 +449,11 @@ out List digests if (type == BlockType.NumUnpackStream) { numUnpackStreamsInFolders = new List(folders.Count); -#if DEBUG - Log.Write("NumUnpackStreams:"); -#endif for (var i = 0; i < folders.Count; i++) { var num = ReadNum(); -#if DEBUG - Log.Write(" " + num); -#endif numUnpackStreamsInFolders.Add(num); } -#if DEBUG - Log.WriteLine(); -#endif continue; } if (type is BlockType.Crc or BlockType.Size) @@ -641,26 +486,17 @@ out List digests { continue; } -#if DEBUG - Log.Write("#{0} StreamSizes:", i); -#endif long sum = 0; for (var j = 1; j < numSubstreams; j++) { if (type == BlockType.Size) { var size = checked((long)ReadNumber()); -#if DEBUG - Log.Write(" " + size); -#endif unpackSizes.Add(size); sum += size; } } unpackSizes.Add(folders[i].GetUnpackSize() - sum); -#if DEBUG - Log.WriteLine(" - rest: " + unpackSizes.Last()); -#endif } if (type == BlockType.Size) { @@ -679,7 +515,7 @@ out List digests numDigestsTotal += numSubstreams; } - digests = null; + digests = null!; for (; ; ) { @@ -696,7 +532,7 @@ out List digests var folder = folders[i]; if (numSubstreams == 1 && folder.UnpackCrcDefined) { - digests.Add(folder._unpackCrc.Value); + digests.Add(folder._unpackCrc!.Value); } else { @@ -732,16 +568,11 @@ out List digests type = ReadId(); } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private void ReadStreamsInfo( - List dataVector, + List? dataVector, out long dataOffset, out List packSizes, out List packCrCs, @@ -751,19 +582,15 @@ private void ReadStreamsInfo( out List digests ) { -#if DEBUG - Log.WriteLine("-- ReadStreamsInfo --"); - Log.PushIndent(); -#endif try { dataOffset = long.MinValue; - packSizes = null; - packCrCs = null; - folders = null; - numUnpackStreamsInFolders = null; - unpackSizes = null; - digests = null; + packSizes = null!; + packCrCs = null!; + folders = null!; + numUnpackStreamsInFolders = null!; + unpackSizes = null!; + digests = null!; for (; ; ) { @@ -779,7 +606,7 @@ out List digests break; case BlockType.SubStreamsInfo: ReadSubStreamsInfo( - folders, + folders!, out numUnpackStreamsInFolders, out unpackSizes, out digests @@ -790,20 +617,11 @@ out digests } } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private List ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass) { -#if DEBUG - Log.WriteLine("-- ReadAndDecodePackedStreams --"); - Log.PushIndent(); -#endif try { ReadStreamsInfo( @@ -864,20 +682,11 @@ out var digests } return dataVector; } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) { -#if DEBUG - Log.WriteLine("-- ReadHeader --"); - Log.PushIndent(); -#endif try { var type = ReadId(); @@ -888,7 +697,7 @@ private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword) type = ReadId(); } - List dataVector = null; + List? dataVector = null; if (type == BlockType.AdditionalStreamsInfo) { dataVector = ReadAndDecodePackedStreams( @@ -944,9 +753,6 @@ out digests } var numFiles = ReadNum(); -#if DEBUG - Log.WriteLine("NumFiles: " + numFiles); -#endif db._files = new List(numFiles); for (var i = 0; i < numFiles; i++) { @@ -954,8 +760,8 @@ out digests } var emptyStreamVector = new BitVector(numFiles); - BitVector emptyFileVector = null; - BitVector antiFileVector = null; + BitVector emptyFileVector = null!; + BitVector antiFileVector = null!; var numEmptyStreams = 0; for (; ; ) @@ -973,26 +779,14 @@ out digests case BlockType.Name: using (var streamSwitch = new CStreamSwitch()) { - streamSwitch.Set(this, dataVector); -#if DEBUG - Log.Write("FileNames:"); -#endif + streamSwitch.Set(this, dataVector ?? []); for (var i = 0; i < db._files.Count; i++) { db._files[i].Name = _currentReader.ReadString(); -#if DEBUG - Log.Write(" " + db._files[i].Name); -#endif } -#if DEBUG - Log.WriteLine(); -#endif } break; case BlockType.WinAttributes: -#if DEBUG - Log.Write("WinAttributes:"); -#endif ReadAttributeVector( dataVector, numFiles, @@ -1026,150 +820,70 @@ out digests } db._files[i].Attrib = attr; -#if DEBUG - Log.Write( - " " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a") - ); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.EmptyStream: emptyStreamVector = ReadBitVector(numFiles); -#if DEBUG - - Log.Write("EmptyStream: "); -#endif for (var i = 0; i < emptyStreamVector.Length; i++) { if (emptyStreamVector[i]) { -#if DEBUG - Log.Write("x"); -#endif numEmptyStreams++; } - else - { -#if DEBUG - Log.Write("."); -#endif - } + else { } } -#if DEBUG - Log.WriteLine(); -#endif emptyFileVector = new BitVector(numEmptyStreams); antiFileVector = new BitVector(numEmptyStreams); break; case BlockType.EmptyFile: emptyFileVector = ReadBitVector(numEmptyStreams); -#if DEBUG - Log.Write("EmptyFile: "); - for (var i = 0; i < numEmptyStreams; i++) - { - Log.Write(emptyFileVector[i] ? "x" : "."); - } - Log.WriteLine(); -#endif break; case BlockType.Anti: antiFileVector = ReadBitVector(numEmptyStreams); -#if DEBUG - Log.Write("Anti: "); - for (var i = 0; i < numEmptyStreams; i++) - { - Log.Write(antiFileVector[i] ? "x" : "."); - } - Log.WriteLine(); -#endif break; case BlockType.StartPos: -#if DEBUG - Log.Write("StartPos:"); -#endif ReadNumberVector( dataVector, numFiles, delegate(int i, long? startPos) { db._files[i].StartPos = startPos; -#if DEBUG - Log.Write( - " " + (startPos.HasValue ? startPos.Value.ToString() : "n/a") - ); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.CTime: -#if DEBUG - Log.Write("CTime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].CTime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.ATime: -#if DEBUG - Log.Write("ATime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].ATime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.MTime: -#if DEBUG - Log.Write("MTime:"); -#endif ReadDateTimeVector( dataVector, numFiles, delegate(int i, DateTime? time) { db._files[i].MTime = time; -#if DEBUG - Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a")); -#endif } ); -#if DEBUG - Log.WriteLine(); -#endif break; case BlockType.Dummy: -#if DEBUG - Log.Write("Dummy: " + size); -#endif for (long j = 0; j < size; j++) { if (ReadByte() != 0) @@ -1215,12 +929,7 @@ out digests } } } - finally - { -#if DEBUG - Log.PopIndent(); -#endif - } + finally { } } #endregion @@ -1445,7 +1154,7 @@ public override long Seek(long offset, SeekOrigin origin) => public override void SetLength(long value) => throw new NotSupportedException(); - private Stream _stream; + private Stream? _stream; private long _rem; private int _currentIndex; @@ -1457,7 +1166,7 @@ private void ProcessEmptyFiles() ) { OpenFile(); - _stream.Dispose(); + _stream.NotNull().Dispose(); _stream = null; _currentIndex++; } @@ -1466,12 +1175,10 @@ private void ProcessEmptyFiles() private void OpenFile() { var index = _startIndex + _currentIndex; -#if DEBUG - Log.WriteLine(_db._files[index].Name); -#endif - if (_db._files[index].Crc.HasValue) + var crc = _db._files[index].Crc; + if (crc.HasValue) { - _stream = new CrcCheckStream(_db._files[index].Crc.Value); + _stream = new CrcCheckStream(crc.Value); } else { @@ -1568,11 +1275,10 @@ public Stream OpenStream(ArchiveDatabase db, int fileIndex) return new ReadOnlySubStream(s, db._files[fileIndex].Size); } - public void Extract(ArchiveDatabase db, int[] indices) + public void Extract(ArchiveDatabase db, int[]? indices) { - var allFilesMode = (indices is null); - - var numItems = allFilesMode ? db._files.Count : indices.Length; + var allFilesMode = indices is null; + var numItems = allFilesMode ? db._files.Count : indices!.Length; if (numItems == 0) { @@ -1582,7 +1288,7 @@ public void Extract(ArchiveDatabase db, int[] indices) var extractFolderInfoVector = new List(); for (var i = 0; i < numItems; i++) { - var fileIndex = allFilesMode ? i : indices[i]; + var fileIndex = allFilesMode ? i : indices![i]; var folderIndex = db._fileIndexToFolderIndexMap[fileIndex]; if (folderIndex == -1) @@ -1608,7 +1314,7 @@ public void Extract(ArchiveDatabase db, int[] indices) } } - byte[] buffer = null; + byte[] buffer = null!; foreach (var efi in extractFolderInfoVector) { int startIndex; diff --git a/src/SharpCompress/Common/SevenZip/CCoderInfo.cs b/src/SharpCompress/Common/SevenZip/CCoderInfo.cs index d53c44f01..ce1a8808d 100644 --- a/src/SharpCompress/Common/SevenZip/CCoderInfo.cs +++ b/src/SharpCompress/Common/SevenZip/CCoderInfo.cs @@ -1,11 +1,9 @@ -#nullable disable - namespace SharpCompress.Common.SevenZip; internal class CCoderInfo { internal CMethodId _methodId; - internal byte[] _props; + internal byte[]? _props; internal int _numInStreams; internal int _numOutStreams; } diff --git a/src/SharpCompress/Common/SevenZip/CFileItem.cs b/src/SharpCompress/Common/SevenZip/CFileItem.cs index 254a444f1..5305bc5d5 100644 --- a/src/SharpCompress/Common/SevenZip/CFileItem.cs +++ b/src/SharpCompress/Common/SevenZip/CFileItem.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; namespace SharpCompress.Common.SevenZip; @@ -10,7 +8,7 @@ internal class CFileItem public uint? Attrib { get; internal set; } public uint? ExtendedAttrib { get; internal set; } public uint? Crc { get; internal set; } - public string Name { get; internal set; } + public string Name { get; internal set; } = string.Empty; public bool HasStream { get; internal set; } public bool IsDir { get; internal set; } diff --git a/src/SharpCompress/Common/SevenZip/CStreamSwitch.cs b/src/SharpCompress/Common/SevenZip/CStreamSwitch.cs index 9238a5a14..8b8139188 100644 --- a/src/SharpCompress/Common/SevenZip/CStreamSwitch.cs +++ b/src/SharpCompress/Common/SevenZip/CStreamSwitch.cs @@ -15,9 +15,6 @@ public void Dispose() if (_active) { _active = false; -#if DEBUG - Log.WriteLine("[end of switch]"); -#endif } if (_needRemove) @@ -50,19 +47,11 @@ public void Set(ArchiveReader archive, List dataVector) throw new InvalidOperationException(); } -#if DEBUG - Log.WriteLine("[switch to stream {0}]", dataIndex); -#endif _archive = archive; _archive.AddByteStream(dataVector[dataIndex], 0, dataVector[dataIndex].Length); _needRemove = true; _active = true; } - else - { -#if DEBUG - Log.WriteLine("[inline data]"); -#endif - } + else { } } } diff --git a/src/SharpCompress/Common/SevenZip/DataReader.cs b/src/SharpCompress/Common/SevenZip/DataReader.cs index f280a3c10..e04d2950f 100644 --- a/src/SharpCompress/Common/SevenZip/DataReader.cs +++ b/src/SharpCompress/Common/SevenZip/DataReader.cs @@ -76,9 +76,6 @@ public void SkipData(long size) } Offset += (int)size; -#if DEBUG - Log.WriteLine("SkipData {0}", size); -#endif } public void SkipData() => SkipData(checked((long)ReadNumber())); diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeader.Async.cs b/src/SharpCompress/Common/Tar/Headers/TarHeader.Async.cs index 75471aa90..9b3bf22f1 100644 --- a/src/SharpCompress/Common/Tar/Headers/TarHeader.Async.cs +++ b/src/SharpCompress/Common/Tar/Headers/TarHeader.Async.cs @@ -3,6 +3,7 @@ using System.Buffers.Binary; using System.Collections.Generic; using System.IO; +using System.IO.Compression; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -26,7 +27,7 @@ internal async ValueTask WriteAsync( await WriteUstarAsync(output, cancellationToken).ConfigureAwait(false); break; default: - throw new Exception("This should be impossible..."); + throw new InvalidOperationException("This should be impossible..."); } } @@ -58,9 +59,15 @@ private async ValueTask WriteUstarAsync(Stream output, CancellationToken cancell int splitIndex = -1; for (int i = 0; i < dirSeps.Count; i++) { +#if NET8_0_OR_GREATER + int count = ArchiveEncoding + .GetEncoding() + .GetByteCount(fullName.AsSpan(0, dirSeps[i])); +#else int count = ArchiveEncoding .GetEncoding() .GetByteCount(fullName.Substring(0, dirSeps[i])); +#endif if (count < 155) { splitIndex = dirSeps[i]; @@ -73,7 +80,7 @@ private async ValueTask WriteUstarAsync(Stream output, CancellationToken cancell if (splitIndex == -1) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!" ); } @@ -83,14 +90,14 @@ private async ValueTask WriteUstarAsync(Stream output, CancellationToken cancell if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!" ); } if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!" ); } @@ -259,7 +266,7 @@ internal async ValueTask ReadAsync(AsyncBinaryReader reader) LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime(); Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls(); - if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic)) + if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic, StringComparison.Ordinal)) { var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls(); diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs index 06224cac5..83018a3f5 100644 --- a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs +++ b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs @@ -3,6 +3,7 @@ using System.Buffers.Binary; using System.Collections.Generic; using System.IO; +using System.IO.Compression; using System.Text; using System.Threading.Tasks; @@ -51,7 +52,7 @@ internal void Write(Stream output) WriteUstar(output); break; default: - throw new Exception("This should be impossible..."); + throw new InvalidOperationException("This should be impossible..."); } } @@ -88,9 +89,15 @@ internal void WriteUstar(Stream output) int splitIndex = -1; for (int i = 0; i < dirSeps.Count; i++) { +#if NET8_0_OR_GREATER + int count = ArchiveEncoding + .GetEncoding() + .GetByteCount(fullName.AsSpan(0, dirSeps[i])); +#else int count = ArchiveEncoding .GetEncoding() .GetByteCount(fullName.Substring(0, dirSeps[i])); +#endif if (count < 155) { splitIndex = dirSeps[i]; @@ -103,7 +110,7 @@ internal void WriteUstar(Stream output) if (splitIndex == -1) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!" ); } @@ -113,14 +120,14 @@ internal void WriteUstar(Stream output) if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!" ); } if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100) { - throw new Exception( + throw new InvalidDataException( $"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!" ); } @@ -292,7 +299,7 @@ internal bool Read(BinaryReader reader) LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime(); Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls(); - if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic)) + if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic, StringComparison.Ordinal)) { var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls(); @@ -450,7 +457,7 @@ private static long ReadAsciiInt64(byte[] buffer, int offset, int count) { return 0; } - return Convert.ToInt64(s); + return Convert.ToInt64(s, Constants.DefaultCultureInfo); } private static readonly byte[] eightSpaces = diff --git a/src/SharpCompress/Common/Tar/TarReadOnlySubStream.cs b/src/SharpCompress/Common/Tar/TarReadOnlySubStream.cs index d6c178898..987751a34 100644 --- a/src/SharpCompress/Common/Tar/TarReadOnlySubStream.cs +++ b/src/SharpCompress/Common/Tar/TarReadOnlySubStream.cs @@ -6,7 +6,6 @@ namespace SharpCompress.Common.Tar; internal class TarReadOnlySubStream : Stream { private readonly Stream _stream; - private readonly bool _useSyncOverAsyncDispose; private bool _isDisposed; private long _amountRead; @@ -14,7 +13,6 @@ internal class TarReadOnlySubStream : Stream public TarReadOnlySubStream(Stream stream, long bytesToRead, bool useSyncOverAsyncDispose) { _stream = stream; - _useSyncOverAsyncDispose = useSyncOverAsyncDispose; BytesLeftToRead = bytesToRead; } @@ -22,6 +20,7 @@ protected override void Dispose(bool disposing) { if (_isDisposed) { + base.Dispose(disposing); return; } @@ -39,7 +38,11 @@ protected override void Dispose(bool disposing) { if (Utility.UseSyncOverAsyncDispose()) { +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits +#pragma warning disable CA2012 _stream.SkipAsync(512 - bytesInLastBlock).GetAwaiter().GetResult(); +#pragma warning restore CA2012 +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits } else { @@ -47,6 +50,7 @@ protected override void Dispose(bool disposing) } } } + base.Dispose(disposing); } #if !LEGACY_DOTNET @@ -54,6 +58,7 @@ public override async System.Threading.Tasks.ValueTask DisposeAsync() { if (_isDisposed) { + await base.DisposeAsync().ConfigureAwait(false); return; } @@ -71,6 +76,7 @@ public override async System.Threading.Tasks.ValueTask DisposeAsync() } GC.SuppressFinalize(this); + await base.DisposeAsync().ConfigureAwait(false); } #endif diff --git a/src/SharpCompress/Common/Volume.Async.cs b/src/SharpCompress/Common/Volume.Async.cs index 7b37be714..3a07aecea 100644 --- a/src/SharpCompress/Common/Volume.Async.cs +++ b/src/SharpCompress/Common/Volume.Async.cs @@ -9,7 +9,7 @@ public virtual async ValueTask DisposeAsync() { #if LEGACY_DOTNET _actualStream.Dispose(); - await Task.CompletedTask; + await Task.CompletedTask.ConfigureAwait(false); #else await _actualStream.DisposeAsync().ConfigureAwait(false); #endif diff --git a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.Async.cs b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.Async.cs index 5cc805b7b..f541fc8d0 100644 --- a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.Async.cs +++ b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.Async.cs @@ -12,10 +12,7 @@ internal async ValueTask ComposeEncryptionDataA CancellationToken cancellationToken = default ) { - if (archiveStream is null) - { - throw new ArgumentNullException(nameof(archiveStream)); - } + ThrowHelper.ThrowIfNull(archiveStream); var buffer = new byte[12]; await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false); diff --git a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs index c5bf85e65..b62b6a670 100644 --- a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs +++ b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs @@ -44,10 +44,7 @@ internal bool IsDirectory internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream) { - if (archiveStream is null) - { - throw new ArgumentNullException(nameof(archiveStream)); - } + ThrowHelper.ThrowIfNull(archiveStream); var buffer = new byte[12]; archiveStream.ReadFully(buffer); @@ -95,7 +92,7 @@ protected void LoadExtra(byte[] extra) } var type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i)); - if (!Enum.IsDefined(typeof(ExtraDataType), type)) + if (!IsDefined(type)) { type = ExtraDataType.NotImplementedExtraData; } @@ -135,4 +132,13 @@ protected void LoadExtra(byte[] extra) internal uint ExternalFileAttributes { get; set; } internal string? Comment { get; set; } + + private static bool IsDefined(ExtraDataType type) + { +#if LEGACY_DOTNET + return Enum.IsDefined(typeof(ExtraDataType), type); +#else + return Enum.IsDefined(type); +#endif + } } diff --git a/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.Async.cs b/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.Async.cs index a17b04d24..32a0a7bf6 100644 --- a/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.Async.cs +++ b/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.Async.cs @@ -20,10 +20,7 @@ CancellationToken cancellationToken throw new NotSupportedException("This stream does not encrypt via Read()"); } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); var temp = new byte[count]; var readBytes = await _stream diff --git a/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.cs b/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.cs index 65e233478..2efd036ea 100644 --- a/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.cs +++ b/src/SharpCompress/Common/Zip/PkwareTraditionalCryptoStream.cs @@ -48,10 +48,7 @@ public override int Read(byte[] buffer, int offset, int count) throw new NotSupportedException("This stream does not encrypt via Read()"); } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); var temp = new byte[count]; var readBytes = _stream.Read(temp, 0, count); diff --git a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs index c0c64524b..63d46bb69 100644 --- a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs +++ b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs @@ -69,10 +69,7 @@ public byte[] Decrypt(byte[] cipherText, int length) public byte[] Encrypt(byte[] plainText, int length) { - if (plainText is null) - { - throw new ArgumentNullException(nameof(plainText)); - } + ThrowHelper.ThrowIfNull(plainText); if (length > plainText.Length) { diff --git a/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.Async.cs b/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.Async.cs index deb4dd10a..766f4ecb2 100644 --- a/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.Async.cs +++ b/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.Async.cs @@ -13,6 +13,7 @@ public override async ValueTask DisposeAsync() { if (_isDisposed) { + await base.DisposeAsync().ConfigureAwait(false); return; } _isDisposed = true; @@ -27,6 +28,7 @@ public override async ValueTask DisposeAsync() ArrayPool.Shared.Return(authBytes); await _stream.DisposeAsync().ConfigureAwait(false); } + await base.DisposeAsync().ConfigureAwait(false); } #endif diff --git a/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs b/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs index 167f14616..bb7cff311 100644 --- a/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs +++ b/src/SharpCompress/Common/Zip/WinzipAesCryptoStream.cs @@ -10,7 +10,7 @@ namespace SharpCompress.Common.Zip; internal partial class WinzipAesCryptoStream : Stream { private const int BLOCK_SIZE_IN_BYTES = 16; - private readonly SymmetricAlgorithm _cipher; + private readonly Aes _cipher; private readonly byte[] _counter = new byte[BLOCK_SIZE_IN_BYTES]; private readonly Stream _stream; private readonly ICryptoTransform _transform; @@ -35,7 +35,7 @@ long length _transform = _cipher.CreateEncryptor(winzipAesEncryptionData.KeyBytes, iv); } - private SymmetricAlgorithm CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData) + private Aes CreateCipher(WinzipAesEncryptionData winzipAesEncryptionData) { var cipher = Aes.Create(); cipher.BlockSize = BLOCK_SIZE_IN_BYTES * 8; @@ -63,6 +63,7 @@ protected override void Dispose(bool disposing) { if (_isDisposed) { + base.Dispose(disposing); return; } _isDisposed = true; @@ -74,7 +75,11 @@ protected override void Dispose(bool disposing) var ten = ArrayPool.Shared.Rent(10); try { +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits +#pragma warning disable CA2012 _stream.ReadFullyAsync(ten, 0, 10).GetAwaiter().GetResult(); +#pragma warning restore CA2012 +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits } finally { @@ -88,6 +93,7 @@ protected override void Dispose(bool disposing) } _stream.Dispose(); } + base.Dispose(disposing); } private async Task ReadAuthBytesAsync() diff --git a/src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs b/src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs index 9925c5934..2da74a625 100644 --- a/src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs +++ b/src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs @@ -1,10 +1,16 @@ using System; using System.Buffers.Binary; +using System.Diagnostics.CodeAnalysis; using System.Security.Cryptography; using System.Text; namespace SharpCompress.Common.Zip; +[SuppressMessage( + "Security", + "CA5379:Rfc2898DeriveBytes might be using a weak hash algorithm", + Justification = "WinZip AES specification requires PBKDF2 with SHA-1." +)] internal class WinzipAesEncryptionData { private const int RFC2898_ITERATIONS = 1000; diff --git a/src/SharpCompress/Common/Zip/ZipFilePart.cs b/src/SharpCompress/Common/Zip/ZipFilePart.cs index 61e838bfe..b45ef1fc6 100644 --- a/src/SharpCompress/Common/Zip/ZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/ZipFilePart.cs @@ -140,31 +140,25 @@ protected Stream CreateDecompressionStream(Stream stream, ZipCompressionMethod m { throw new NotSupportedException("LZMA with pkware encryption."); } - using ( - var reader = new BinaryReader( - stream, - System.Text.Encoding.Default, - leaveOpen: true - ) - ) + + using var reader = new BinaryReader( + stream, + System.Text.Encoding.Default, + leaveOpen: true + ); + reader.ReadUInt16(); // LZMA version + var propsLength = reader.ReadUInt16(); + var props = reader.ReadBytes(propsLength); + context = context with { - reader.ReadUInt16(); // LZMA version - var propsLength = reader.ReadUInt16(); - var props = new byte[propsLength]; - reader.Read(props, 0, props.Length); - context = context with - { - Properties = props, - InputSize = - Header.CompressedSize > 0 - ? Header.CompressedSize - 4 - props.Length - : -1, - OutputSize = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1) - ? -1 - : Header.UncompressedSize, - }; - return providers.CreateDecompressStream(compressionType, stream, context); - } + Properties = props, + InputSize = + Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1, + OutputSize = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1) + ? -1 + : Header.UncompressedSize, + }; + return providers.CreateDecompressStream(compressionType, stream, context); } case ZipCompressionMethod.PPMd: { diff --git a/src/SharpCompress/Compressors/ADC/ADCStream.Async.cs b/src/SharpCompress/Compressors/ADC/ADCStream.Async.cs index 22941be58..d2e535cd6 100644 --- a/src/SharpCompress/Compressors/ADC/ADCStream.Async.cs +++ b/src/SharpCompress/Compressors/ADC/ADCStream.Async.cs @@ -47,18 +47,9 @@ public override async Task ReadAsync( { return 0; } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } - if (count < 0) - { - throw new ArgumentOutOfRangeException(nameof(count)); - } - if (offset < buffer.GetLowerBound(0)) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } + ThrowHelper.ThrowIfNull(buffer); + ThrowHelper.ThrowIfNegative(count); + ThrowHelper.ThrowIfLessThan(offset, buffer.GetLowerBound(0)); if ((offset + count) > buffer.GetLength(0)) { throw new ArgumentOutOfRangeException(nameof(count)); diff --git a/src/SharpCompress/Compressors/ADC/ADCStream.cs b/src/SharpCompress/Compressors/ADC/ADCStream.cs index cb67d9d22..f2ef155eb 100644 --- a/src/SharpCompress/Compressors/ADC/ADCStream.cs +++ b/src/SharpCompress/Compressors/ADC/ADCStream.cs @@ -110,18 +110,9 @@ public override int Read(byte[] buffer, int offset, int count) { return 0; } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } - if (count < 0) - { - throw new ArgumentOutOfRangeException(nameof(count)); - } - if (offset < buffer.GetLowerBound(0)) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } + ThrowHelper.ThrowIfNull(buffer); + ThrowHelper.ThrowIfNegative(count); + ThrowHelper.ThrowIfLessThan(offset, buffer.GetLowerBound(0)); if ((offset + count) > buffer.GetLength(0)) { throw new ArgumentOutOfRangeException(nameof(count)); diff --git a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.Async.cs b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.Async.cs index fbcc45729..56496c9a1 100644 --- a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.Async.cs +++ b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.Async.cs @@ -1,10 +1,11 @@ using System; -using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using SharpCompress.Compressors.RLE90; +namespace SharpCompress.Compressors.ArcLzw; + public partial class ArcLzwStream { public override async Task ReadAsync( @@ -33,7 +34,7 @@ CancellationToken cancellationToken totalRead += read; } var decoded = Decompress(data, _useCrunched); - var result = decoded.Count(); + var result = decoded.Count; if (_useCrunched) { var unpacked = RLE.UnpackRLE(decoded.ToArray()); diff --git a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs index f9308228c..29373659c 100644 --- a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs +++ b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs @@ -3,7 +3,8 @@ using System.IO; using System.Linq; using SharpCompress.Compressors.RLE90; -using SharpCompress.Compressors.Squeezed; + +namespace SharpCompress.Compressors.ArcLzw; public partial class ArcLzwStream : Stream { @@ -175,7 +176,7 @@ public override int Read(byte[] buffer, int offset, int count) var data = new byte[_compressedSize]; _stream.Read(data, 0, _compressedSize); var decoded = Decompress(data, _useCrunched); - var result = decoded.Count(); + var result = decoded.Count; if (_useCrunched) { var unpacked = RLE.UnpackRLE(decoded.ToArray()); diff --git a/src/SharpCompress/Compressors/ArcLzw/BitReader.cs b/src/SharpCompress/Compressors/ArcLzw/BitReader.cs index ecca0907f..5c2a9cc85 100644 --- a/src/SharpCompress/Compressors/ArcLzw/BitReader.cs +++ b/src/SharpCompress/Compressors/ArcLzw/BitReader.cs @@ -1,5 +1,7 @@ using System; +namespace SharpCompress.Compressors.ArcLzw; + public partial class ArcLzwStream { public class BitReader diff --git a/src/SharpCompress/Compressors/Arj/HuffmanTree.cs b/src/SharpCompress/Compressors/Arj/HuffmanTree.cs index 63bf89682..2e4ddd57c 100644 --- a/src/SharpCompress/Compressors/Arj/HuffmanTree.cs +++ b/src/SharpCompress/Compressors/Arj/HuffmanTree.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.IO; using System.Text; +using SharpCompress.Common; namespace SharpCompress.Compressors.Arj; @@ -61,10 +62,7 @@ public void SetSingle(int value) public void BuildTree(byte[] lengths, int count) { - if (lengths == null) - { - throw new ArgumentNullException(nameof(lengths)); - } + ThrowHelper.ThrowIfNull(lengths); if (count < 0 || count > lengths.Length) { @@ -85,10 +83,7 @@ public void BuildTree(byte[] lengths, int count) public void BuildTree(byte[] valueLengths) { - if (valueLengths == null) - { - throw new ArgumentNullException(nameof(valueLengths)); - } + ThrowHelper.ThrowIfNull(valueLengths); if (valueLengths.Length > TreeEntry.MAX_INDEX / 2) { @@ -195,7 +190,11 @@ void FormatStep(int index, string prefix) var node = _tree[index]; if (node.Type == NodeType.Leaf) { - result.AppendLine($"{prefix} -> {node.LeafValue}"); + result + .Append(prefix) + .Append(" -> ") + .Append(node.LeafValue.ToString(Constants.DefaultCultureInfo)) + .AppendLine(); } else { diff --git a/src/SharpCompress/Compressors/Arj/LHDecoderStream.Async.cs b/src/SharpCompress/Compressors/Arj/LHDecoderStream.Async.cs index 037c56ba3..f978b0a80 100644 --- a/src/SharpCompress/Compressors/Arj/LHDecoderStream.Async.cs +++ b/src/SharpCompress/Compressors/Arj/LHDecoderStream.Async.cs @@ -92,10 +92,7 @@ CancellationToken cancellationToken throw new ObjectDisposedException(nameof(LHDecoderStream)); } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { diff --git a/src/SharpCompress/Compressors/Arj/LHDecoderStream.cs b/src/SharpCompress/Compressors/Arj/LHDecoderStream.cs index 52b51a734..c60ef8715 100644 --- a/src/SharpCompress/Compressors/Arj/LHDecoderStream.cs +++ b/src/SharpCompress/Compressors/Arj/LHDecoderStream.cs @@ -8,7 +8,6 @@ namespace SharpCompress.Compressors.Arj; public sealed partial class LHDecoderStream : Stream { private readonly BitReader _bitReader; - private readonly Stream _stream; // Buffer containing *all* bytes decoded so far. private readonly List _buffer = new(); @@ -22,7 +21,6 @@ public sealed partial class LHDecoderStream : Stream public LHDecoderStream(Stream compressedStream, int originalSize) { - _stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream)); if (!compressedStream.CanRead) { throw new ArgumentException( @@ -37,8 +35,6 @@ public LHDecoderStream(Stream compressedStream, int originalSize) _finishedDecoding = (originalSize == 0); } - public Stream BaseStream => _stream; - public override bool CanRead => true; public override bool CanSeek => false; public override bool CanWrite => false; @@ -123,10 +119,7 @@ public override int Read(byte[] buffer, int offset, int count) throw new ObjectDisposedException(nameof(LHDecoderStream)); } - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { @@ -179,4 +172,13 @@ public override int Read(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException(); + + protected override void Dispose(bool disposing) + { + if (disposing && !_disposed) + { + _disposed = true; + } + base.Dispose(disposing); + } } diff --git a/src/SharpCompress/Compressors/Arj/LhaStream.Async.cs b/src/SharpCompress/Compressors/Arj/LhaStream.Async.cs index 26d6fe709..0dfe1a881 100644 --- a/src/SharpCompress/Compressors/Arj/LhaStream.Async.cs +++ b/src/SharpCompress/Compressors/Arj/LhaStream.Async.cs @@ -1,11 +1,12 @@ using System; using System.IO; +using System.IO.Compression; using System.Threading; using System.Threading.Tasks; namespace SharpCompress.Compressors.Arj; -public sealed partial class LhaStream +public sealed partial class LhaStream { public override async Task ReadAsync( byte[] buffer, @@ -14,13 +15,10 @@ public override async Task ReadAsync( CancellationToken cancellationToken ) { - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || (offset + count) > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } if (_producedBytes >= _originalSize) @@ -116,7 +114,7 @@ private async ValueTask ReadTempTreeAsync(CancellationToken cancellationToken) if (numCodes > NUM_TEMP_CODELEN) { - throw new Exception("temporary codelen table has invalid size"); + throw new InvalidDataException("temporary codelen table has invalid size"); } // read actual lengths @@ -132,7 +130,7 @@ private async ValueTask ReadTempTreeAsync(CancellationToken cancellationToken) if (3 + skip > numCodes) { - throw new Exception("temporary codelen table has invalid size"); + throw new InvalidDataException("temporary codelen table has invalid size"); } for (int i = 3 + skip; i < numCodes; i++) @@ -161,7 +159,7 @@ private async ValueTask ReadCommandTreeAsync(CancellationToken cancellationToken if (numCodes > NUM_COMMANDS) { - throw new Exception("commands codelen table has invalid size"); + throw new InvalidDataException("commands codelen table has invalid size"); } int index = 0; diff --git a/src/SharpCompress/Compressors/Arj/LhaStream.cs b/src/SharpCompress/Compressors/Arj/LhaStream.cs index 76071b45c..651954053 100644 --- a/src/SharpCompress/Compressors/Arj/LhaStream.cs +++ b/src/SharpCompress/Compressors/Arj/LhaStream.cs @@ -1,23 +1,23 @@ using System; using System.Data; using System.IO; +using System.IO.Compression; using System.Linq; namespace SharpCompress.Compressors.Arj; [CLSCompliant(true)] -public sealed partial class LhaStream : Stream - where C : ILhaDecoderConfig, new() +public sealed partial class LhaStream : Stream + where TDecoderConfig : ILhaDecoderConfig, new() { private readonly BitReader _bitReader; - private readonly Stream _stream; private readonly HuffTree _commandTree; private readonly HuffTree _offsetTree; private int _remainingCommands; private (int offset, int count)? _copyProgress; private readonly RingBuffer _ringBuffer; - private readonly C _config = new C(); + private readonly TDecoderConfig _config = new TDecoderConfig(); private const int NUM_COMMANDS = 510; private const int NUM_TEMP_CODELEN = 20; @@ -27,7 +27,6 @@ public sealed partial class LhaStream : Stream public LhaStream(Stream compressedStream, int originalSize) { - _stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream)); _bitReader = new BitReader(compressedStream); _ringBuffer = _config.RingBuffer; _commandTree = new HuffTree(NUM_COMMANDS * 2); @@ -58,13 +57,10 @@ public override void Write(byte[] buffer, int offset, int count) => public override int Read(byte[] buffer, int offset, int count) { - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || (offset + count) > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } if (_producedBytes >= _originalSize) @@ -137,7 +133,7 @@ private void ReadTempTree() if (numCodes > NUM_TEMP_CODELEN) { - throw new Exception("temporary codelen table has invalid size"); + throw new InvalidDataException("temporary codelen table has invalid size"); } // read actual lengths @@ -152,7 +148,7 @@ private void ReadTempTree() if (3 + skip > numCodes) { - throw new Exception("temporary codelen table has invalid size"); + throw new InvalidDataException("temporary codelen table has invalid size"); } for (int i = 3 + skip; i < numCodes; i++) @@ -180,7 +176,7 @@ private void ReadCommandTree() if (numCodes > NUM_COMMANDS) { - throw new Exception("commands codelen table has invalid size"); + throw new InvalidDataException("commands codelen table has invalid size"); } int index = 0; diff --git a/src/SharpCompress/Compressors/BZip2/BZip2Stream.cs b/src/SharpCompress/Compressors/BZip2/BZip2Stream.cs index 57b729ba3..0813b088f 100644 --- a/src/SharpCompress/Compressors/BZip2/BZip2Stream.cs +++ b/src/SharpCompress/Compressors/BZip2/BZip2Stream.cs @@ -55,6 +55,7 @@ protected override void Dispose(bool disposing) { if (isDisposed || leaveOpen) { + base.Dispose(disposing); return; } isDisposed = true; @@ -62,6 +63,7 @@ protected override void Dispose(bool disposing) { stream.Dispose(); } + base.Dispose(disposing); } public CompressionMode Mode { get; private set; } diff --git a/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs b/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs index f846c3eb1..4710aaf5e 100644 --- a/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs +++ b/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs @@ -447,6 +447,7 @@ protected override void Dispose(bool disposing) { if (disposed) { + base.Dispose(disposing); return; } @@ -460,6 +461,7 @@ protected override void Dispose(bool disposing) } bsStream = null; } + base.Dispose(disposing); } public void Finish() diff --git a/src/SharpCompress/Compressors/Deflate/DeflateManager.cs b/src/SharpCompress/Compressors/Deflate/DeflateManager.cs index 933f57529..a7b643cb7 100644 --- a/src/SharpCompress/Compressors/Deflate/DeflateManager.cs +++ b/src/SharpCompress/Compressors/Deflate/DeflateManager.cs @@ -1706,7 +1706,11 @@ CompressionStrategy strategy if (memLevel < 1 || memLevel > MEM_LEVEL_MAX) { throw new ZlibException( - string.Format("memLevel must be in the range 1.. {0}", MEM_LEVEL_MAX) + string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "memLevel must be in the range 1.. {0}", + MEM_LEVEL_MAX + ) ); } @@ -1876,7 +1880,13 @@ _codec.OutputBuffer is null _codec.Message = _ErrorMessage[ ZlibConstants.Z_NEED_DICT - (ZlibConstants.Z_STREAM_ERROR) ]; - throw new ZlibException(string.Format("Something is fishy. [{0}]", _codec.Message)); + throw new ZlibException( + string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Something is fishy. [{0}]", + _codec.Message + ) + ); //return ZlibConstants.Z_STREAM_ERROR; } diff --git a/src/SharpCompress/Compressors/Deflate/DeflateStream.cs b/src/SharpCompress/Compressors/Deflate/DeflateStream.cs index d9249d1f7..6e212db68 100644 --- a/src/SharpCompress/Compressors/Deflate/DeflateStream.cs +++ b/src/SharpCompress/Compressors/Deflate/DeflateStream.cs @@ -120,6 +120,7 @@ public int BufferSize { throw new ZlibException( string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin diff --git a/src/SharpCompress/Compressors/Deflate/GZipStream.cs b/src/SharpCompress/Compressors/Deflate/GZipStream.cs index b0b132161..04c7a5d95 100644 --- a/src/SharpCompress/Compressors/Deflate/GZipStream.cs +++ b/src/SharpCompress/Compressors/Deflate/GZipStream.cs @@ -116,6 +116,7 @@ public int BufferSize { throw new ZlibException( string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin @@ -402,7 +403,11 @@ public string? FileName { return; } +#if LEGACY_DOTNET if (_fileName.Contains('/')) +#else + if (_fileName.Contains('/', StringComparison.Ordinal)) +#endif { _fileName = _fileName.Replace('/', '\\'); } @@ -411,7 +416,11 @@ public string? FileName throw new InvalidOperationException("Illegal filename"); } +#if LEGACY_DOTNET if (_fileName.Contains('\\')) +#else + if (_fileName.Contains('\\', StringComparison.Ordinal)) +#endif { // trim any leading path _fileName = Path.GetFileName(_fileName); diff --git a/src/SharpCompress/Compressors/Deflate/Inflate.cs b/src/SharpCompress/Compressors/Deflate/Inflate.cs index aff2c6590..664f1bf48 100644 --- a/src/SharpCompress/Compressors/Deflate/Inflate.cs +++ b/src/SharpCompress/Compressors/Deflate/Inflate.cs @@ -1746,6 +1746,7 @@ internal int Inflate(FlushType flush) { mode = InflateManagerMode.BAD; _codec.Message = string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "unknown compression method (0x{0:X2})", method ); @@ -1756,6 +1757,7 @@ internal int Inflate(FlushType flush) { mode = InflateManagerMode.BAD; _codec.Message = string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "invalid window size ({0})", (method >> 4) + 8 ); @@ -1945,7 +1947,13 @@ internal int Inflate(FlushType flush) return ZlibConstants.Z_STREAM_END; case InflateManagerMode.BAD: - throw new ZlibException(string.Format("Bad state ({0})", _codec.Message)); + throw new ZlibException( + string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Bad state ({0})", + _codec.Message + ) + ); default: throw new ZlibException("Stream error."); diff --git a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs index 513029db9..467780002 100644 --- a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs +++ b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs @@ -275,7 +275,14 @@ private void finish() var verb = (_wantCompress ? "de" : "in") + "flating"; if (_z.Message is null) { - throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc)); + throw new ZlibException( + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "{0}: (rc = {1})", + verb, + rc + ) + ); } throw new ZlibException(verb + ": " + _z.Message); } @@ -344,6 +351,7 @@ private void finish() { throw new ZlibException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Protocol error. AvailableBytesIn={0}, expected 8", _z.AvailableBytesIn + bytesRead ) @@ -364,6 +372,7 @@ private void finish() { throw new ZlibException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Bad CRC32 in GZIP stream. (actual({0:X8})!=expected({1:X8}))", crc32_actual, crc32_expected @@ -375,6 +384,7 @@ private void finish() { throw new ZlibException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Bad size in GZIP stream. (actual({0})!=expected({1}))", isize_actual, isize_expected @@ -413,7 +423,14 @@ private async ValueTask finishAsync(CancellationToken cancellationToken = defaul var verb = (_wantCompress ? "de" : "in") + "flating"; if (_z.Message is null) { - throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc)); + throw new ZlibException( + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "{0}: (rc = {1})", + verb, + rc + ) + ); } throw new ZlibException(verb + ": " + _z.Message); } @@ -869,7 +886,12 @@ public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count) if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END) { throw new ZlibException( - String.Format("Deflating: rc={0} msg={1}", rc, _z.Message) + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Deflating: rc={0} msg={1}", + rc, + _z.Message + ) ); } @@ -883,18 +905,9 @@ public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count) return rc; } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } - if (count < 0) - { - throw new ArgumentOutOfRangeException(nameof(count)); - } - if (offset < buffer.GetLowerBound(0)) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } + ThrowHelper.ThrowIfNull(buffer); + ThrowHelper.ThrowIfNegative(count); + ThrowHelper.ThrowIfLessThan(offset, buffer.GetLowerBound(0)); if ((offset + count) > buffer.GetLength(0)) { throw new ArgumentOutOfRangeException(nameof(count)); @@ -931,6 +944,7 @@ public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count) { throw new ZlibException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "{0}flating: rc={1} msg={2}", (_wantCompress ? "de" : "in"), rc, @@ -970,7 +984,12 @@ public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count) if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END) { throw new ZlibException( - String.Format("Deflating: rc={0} msg={1}", rc, _z.Message) + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Deflating: rc={0} msg={1}", + rc, + _z.Message + ) ); } } @@ -1059,7 +1078,12 @@ CancellationToken cancellationToken if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END) { throw new ZlibException( - String.Format("Deflating: rc={0} msg={1}", rc, _z.Message) + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Deflating: rc={0} msg={1}", + rc, + _z.Message + ) ); } @@ -1073,18 +1097,9 @@ CancellationToken cancellationToken return rc; } - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } - if (count < 0) - { - throw new ArgumentOutOfRangeException(nameof(count)); - } - if (offset < buffer.GetLowerBound(0)) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } + ThrowHelper.ThrowIfNull(buffer); + ThrowHelper.ThrowIfNegative(count); + ThrowHelper.ThrowIfLessThan(offset, buffer.GetLowerBound(0)); if ((offset + count) > buffer.GetLength(0)) { throw new ArgumentOutOfRangeException(nameof(count)); @@ -1123,6 +1138,7 @@ CancellationToken cancellationToken { throw new ZlibException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "{0}flating: rc={1} msg={2}", (_wantCompress ? "de" : "in"), rc, @@ -1162,7 +1178,12 @@ CancellationToken cancellationToken if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END) { throw new ZlibException( - String.Format("Deflating: rc={0} msg={1}", rc, _z.Message) + String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, + "Deflating: rc={0} msg={1}", + rc, + _z.Message + ) ); } } diff --git a/src/SharpCompress/Compressors/Deflate/ZlibCodec.cs b/src/SharpCompress/Compressors/Deflate/ZlibCodec.cs index 7c49c780b..b00f93b8b 100644 --- a/src/SharpCompress/Compressors/Deflate/ZlibCodec.cs +++ b/src/SharpCompress/Compressors/Deflate/ZlibCodec.cs @@ -696,6 +696,7 @@ internal void flush_pending() { throw new ZlibException( string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Invalid State. (pending.Length={0}, pendingCount={1})", dstate.pending.Length, dstate.pendingCount diff --git a/src/SharpCompress/Compressors/Deflate/ZlibStream.cs b/src/SharpCompress/Compressors/Deflate/ZlibStream.cs index 417c32839..bfdd82cd2 100644 --- a/src/SharpCompress/Compressors/Deflate/ZlibStream.cs +++ b/src/SharpCompress/Compressors/Deflate/ZlibStream.cs @@ -107,6 +107,7 @@ public int BufferSize { throw new ZlibException( string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin diff --git a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.Async.cs b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.Async.cs index 8182cc578..64652872e 100644 --- a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.Async.cs +++ b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.Async.cs @@ -1,5 +1,4 @@ using System; -using System.Diagnostics; using System.IO; using System.Runtime.InteropServices; using System.Threading; @@ -12,13 +11,13 @@ namespace SharpCompress.Compressors.Deflate64; public sealed partial class Deflate64Stream { public override async Task ReadAsync( - byte[] array, + byte[] buffer, int offset, int count, CancellationToken cancellationToken ) { - ValidateParameters(array, offset, count); + ValidateParameters(buffer, offset, count); EnsureNotDisposed(); int bytesRead; @@ -27,7 +26,7 @@ CancellationToken cancellationToken while (true) { - bytesRead = _inflater.Inflate(array, currentOffset, remainingCount); + bytesRead = _inflater.Inflate(buffer, currentOffset, remainingCount); currentOffset += bytesRead; remainingCount -= bytesRead; @@ -39,10 +38,6 @@ CancellationToken cancellationToken if (_inflater.Finished()) { // if we finished decompressing, we can't have anything left in the outputwindow. - Debug.Assert( - _inflater.AvailableOutput == 0, - "We should have copied all stuff out!" - ); break; } diff --git a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs index 80b23dd82..59d40dc73 100644 --- a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs +++ b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using System; -using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; using System.Threading; @@ -23,10 +22,7 @@ public sealed partial class Deflate64Stream : Stream public Deflate64Stream(Stream stream, CompressionMode mode) { - if (stream is null) - { - throw new ArgumentNullException(nameof(stream)); - } + ThrowHelper.ThrowIfNull(stream); if (mode != CompressionMode.Decompress) { @@ -73,9 +69,9 @@ public override long Seek(long offset, SeekOrigin origin) => public override void SetLength(long value) => throw new NotSupportedException("Deflate64: not supported"); - public override int Read(byte[] array, int offset, int count) + public override int Read(byte[] buffer, int offset, int count) { - ValidateParameters(array, offset, count); + ValidateParameters(buffer, offset, count); EnsureNotDisposed(); int bytesRead; @@ -84,7 +80,7 @@ public override int Read(byte[] array, int offset, int count) while (true) { - bytesRead = _inflater.Inflate(array, currentOffset, remainingCount); + bytesRead = _inflater.Inflate(buffer, currentOffset, remainingCount); currentOffset += bytesRead; remainingCount -= bytesRead; @@ -96,10 +92,6 @@ public override int Read(byte[] array, int offset, int count) if (_inflater.Finished()) { // if we finished decompressing, we can't have anything left in the outputwindow. - Debug.Assert( - _inflater.AvailableOutput == 0, - "We should have copied all stuff out!" - ); break; } @@ -123,20 +115,11 @@ public override int Read(byte[] array, int offset, int count) private void ValidateParameters(byte[] array, int offset, int count) { - if (array is null) - { - throw new ArgumentNullException(nameof(array)); - } + ThrowHelper.ThrowIfNull(array); - if (offset < 0) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } + ThrowHelper.ThrowIfNegative(offset); - if (count < 0) - { - throw new ArgumentOutOfRangeException(nameof(count)); - } + ThrowHelper.ThrowIfNegative(count); if (array.Length - offset < count) { @@ -160,7 +143,7 @@ private static void ThrowStreamClosedException() => private static void ThrowCannotWriteToDeflateManagedStreamException() => throw new InvalidOperationException("Deflate64: cannot write to this stream"); - public override void Write(byte[] array, int offset, int count) => + public override void Write(byte[] buffer, int offset, int count) => ThrowCannotWriteToDeflateManagedStreamException(); // This is called by Dispose: diff --git a/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs b/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs index 304a2a78e..6bac3c518 100644 --- a/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs +++ b/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs @@ -2,8 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System.Diagnostics; - namespace SharpCompress.Compressors.Deflate64; internal sealed class DeflateInput @@ -16,10 +14,8 @@ internal sealed class DeflateInput internal void ConsumeBytes(int n) { - Debug.Assert(n <= Count, "Should use more bytes than what we have in the buffer"); StartIndex += n; Count -= n; - Debug.Assert(StartIndex + Count <= Buffer.Length, "Input buffer is in invalid state!"); } internal InputState DumpState() => new(Count, StartIndex); diff --git a/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs b/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs index bc8214a31..2cb290519 100644 --- a/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs +++ b/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using System; -using System.Diagnostics; namespace SharpCompress.Compressors.Deflate64; @@ -964,7 +963,6 @@ public static uint BitReverse(uint code, int length) { uint newCode = 0; - Debug.Assert(length > 0 && length <= 16, "Invalid len"); do { newCode |= (code & 1); diff --git a/src/SharpCompress/Compressors/Deflate64/HuffmanTree.cs b/src/SharpCompress/Compressors/Deflate64/HuffmanTree.cs index aac88b059..e98cfc698 100644 --- a/src/SharpCompress/Compressors/Deflate64/HuffmanTree.cs +++ b/src/SharpCompress/Compressors/Deflate64/HuffmanTree.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using System; -using System.Diagnostics; using SharpCompress.Common; namespace SharpCompress.Compressors.Deflate64; @@ -35,9 +34,6 @@ internal sealed class HuffmanTree private readonly short[] _left; private readonly short[] _right; private readonly byte[] _codeLengthArray; -#if DEBUG - private uint[]? _codeArrayDebug; -#endif private readonly int _tableMask; @@ -48,12 +44,6 @@ internal sealed class HuffmanTree public HuffmanTree(byte[] codeLengths) { - Debug.Assert( - codeLengths.Length == MAX_LITERAL_TREE_ELEMENTS - || codeLengths.Length == MAX_DIST_TREE_ELEMENTS - || codeLengths.Length == NUMBER_OF_CODE_LENGTH_TREE_ELEMENTS, - "we only expect three kinds of Length here" - ); _codeLengthArray = codeLengths; if (_codeLengthArray.Length == MAX_LITERAL_TREE_ELEMENTS) @@ -152,9 +142,6 @@ private uint[] CalculateHuffmanCode() private void CreateTable() { var codeArray = CalculateHuffmanCode(); -#if DEBUG - _codeArrayDebug = codeArray; -#endif var avail = (short)_codeLengthArray.Length; @@ -237,11 +224,6 @@ private void CreateTable() throw new InvalidFormatException("Deflate64: invalid Huffman data"); } - Debug.Assert( - value < 0, - "CreateTable: Only negative numbers are used for tree pointers!" - ); - if ((start & codeBitMask) == 0) { // if current bit is 0, go change the left array diff --git a/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs b/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs index 5caf257d0..4b3fdeded 100644 --- a/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs +++ b/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs @@ -572,7 +572,6 @@ private bool DecodeBlock(out bool endOfBlockCodeSeen) throw new ZlibException("Deflate64: invalid data"); } _extraBits = S_EXTRA_LENGTH_BITS[symbol]; - Debug.Assert(_extraBits != 0, "We handle other cases separately!"); } _length = symbol; goto case InflaterState.HaveInitialLength; diff --git a/src/SharpCompress/Compressors/Deflate64/InputBuffer.cs b/src/SharpCompress/Compressors/Deflate64/InputBuffer.cs index 28d61e422..ec4033e1c 100644 --- a/src/SharpCompress/Compressors/Deflate64/InputBuffer.cs +++ b/src/SharpCompress/Compressors/Deflate64/InputBuffer.cs @@ -5,7 +5,6 @@ #nullable disable using System; -using System.Diagnostics; namespace SharpCompress.Compressors.Deflate64; @@ -38,8 +37,6 @@ internal sealed class InputBuffer /// Returns false if input is not sufficient to make this true. public bool EnsureBitsAvailable(int count) { - Debug.Assert(0 < count && count <= 16, "count is invalid."); - // manual inlining to improve perf if (_bitsInBuffer < count) { @@ -106,8 +103,6 @@ public uint TryLoad16Bits() /// Gets count bits from the input buffer. Returns -1 if not enough bits available. public int GetBits(int count) { - Debug.Assert(0 < count && count <= 16, "count is invalid."); - if (!EnsureBitsAvailable(count)) { return -1; @@ -127,12 +122,6 @@ public int GetBits(int count) /// Returns the number of bytes copied, 0 if no byte is available. public int CopyTo(byte[] output, int offset, int length) { - Debug.Assert(output != null); - Debug.Assert(offset >= 0); - Debug.Assert(length >= 0); - Debug.Assert(offset <= output.Length - length); - Debug.Assert((_bitsInBuffer % 8) == 0); - // Copy the bytes in bitBuffer first. var bytesFromBitBuffer = 0; while (_bitsInBuffer > 0 && length > 0) @@ -175,12 +164,6 @@ public int CopyTo(byte[] output, int offset, int length) /// public void SetInput(byte[] buffer, int offset, int length) { - Debug.Assert(buffer != null); - Debug.Assert(offset >= 0); - Debug.Assert(length >= 0); - Debug.Assert(offset <= buffer.Length - length); - Debug.Assert(_start == _end); - _buffer = buffer; _start = offset; _end = offset + length; @@ -189,10 +172,6 @@ public void SetInput(byte[] buffer, int offset, int length) /// Skip n bits in the buffer. public void SkipBits(int n) { - Debug.Assert( - _bitsInBuffer >= n, - "No enough bits in the buffer, Did you call EnsureBitsAvailable?" - ); _bitBuffer >>= n; _bitsInBuffer -= n; } diff --git a/src/SharpCompress/Compressors/Deflate64/OutputWindow.cs b/src/SharpCompress/Compressors/Deflate64/OutputWindow.cs index 90b65cd37..e476e6344 100644 --- a/src/SharpCompress/Compressors/Deflate64/OutputWindow.cs +++ b/src/SharpCompress/Compressors/Deflate64/OutputWindow.cs @@ -3,7 +3,6 @@ // See the LICENSE file in the project root for more information. using System; -using System.Diagnostics; namespace SharpCompress.Compressors.Deflate64; @@ -29,7 +28,6 @@ internal sealed class OutputWindow /// Add a byte to output window. public void Write(byte b) { - Debug.Assert(_bytesUsed < WINDOW_SIZE, "Can't add byte when window is full!"); _window[_end++] = b; _end &= WINDOW_MASK; ++_bytesUsed; @@ -37,8 +35,6 @@ public void Write(byte b) public void WriteLengthDistance(int length, int distance) { - Debug.Assert((_bytesUsed + length) <= WINDOW_SIZE, "No Enough space"); - // move backwards distance bytes in the output stream, // and copy length bytes from this position to the output stream. _bytesUsed += length; @@ -143,10 +139,6 @@ public int CopyTo(byte[] output, int offset, int length) } Array.Copy(_window, copyEnd - length, output, offset, length); _bytesUsed -= copied; - Debug.Assert( - _bytesUsed >= 0, - "check this function and find why we copied more bytes than we have" - ); return copied; } } diff --git a/src/SharpCompress/Compressors/Explode/ExplodeStream.Async.cs b/src/SharpCompress/Compressors/Explode/ExplodeStream.Async.cs index 6eb1afef3..16dbfdd9b 100644 --- a/src/SharpCompress/Compressors/Explode/ExplodeStream.Async.cs +++ b/src/SharpCompress/Compressors/Explode/ExplodeStream.Async.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.IO.Compression; using System.Threading; using System.Threading.Tasks; using SharpCompress.Common.Zip.Headers; @@ -278,7 +279,7 @@ CancellationToken cancellationToken if (literalResult.returnCode != 0) { - throw new Exception("Error decoding literal value"); + throw new InvalidDataException("Error decoding literal value"); } huftPointer = literalResult.huftPointer; @@ -318,7 +319,7 @@ await NeedBitsAsync(numOfUncodedLowerDistanceBits, cancellationToken) if (distanceResult.returnCode != 0) { - throw new Exception("Error decoding distance high bits"); + throw new InvalidDataException("Error decoding distance high bits"); } huftPointer = distanceResult.huftPointer; @@ -334,7 +335,7 @@ await NeedBitsAsync(numOfUncodedLowerDistanceBits, cancellationToken) if (lengthResult.returnCode != 0) { - throw new Exception("Error decoding coded length"); + throw new InvalidDataException("Error decoding coded length"); } huftPointer = lengthResult.huftPointer; diff --git a/src/SharpCompress/Compressors/Explode/ExplodeStream.cs b/src/SharpCompress/Compressors/Explode/ExplodeStream.cs index 3d1aa2fd6..977cd629e 100644 --- a/src/SharpCompress/Compressors/Explode/ExplodeStream.cs +++ b/src/SharpCompress/Compressors/Explode/ExplodeStream.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.IO.Compression; using SharpCompress.Common.Zip.Headers; namespace SharpCompress.Compressors.Explode; @@ -696,7 +697,7 @@ out _ ) != 0 ) { - throw new Exception("Error decoding literal value"); + throw new InvalidDataException("Error decoding literal value"); } nextByte = (byte)huftPointer.Value; @@ -735,7 +736,7 @@ out _ ) != 0 ) { - throw new Exception("Error decoding distance high bits"); + throw new InvalidDataException("Error decoding distance high bits"); } distance = windowIndex - (distance + huftPointer.Value); /* construct offset */ @@ -751,7 +752,7 @@ out int extraBitLength ) != 0 ) { - throw new Exception("Error decoding coded length"); + throw new InvalidDataException("Error decoding coded length"); } length = huftPointer.Value; diff --git a/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs b/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs index bbe91196e..d14b85640 100644 --- a/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs +++ b/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs @@ -18,9 +18,9 @@ public enum Alignment : int ARCH_x86_ALIGNMENT = 1, ARCH_PowerPC_ALIGNMENT = 4, ARCH_IA64_ALIGNMENT = 16, - ARCH_ARM_ALIGNMENT = 4, + ARCH_ARM_ALIGNMENT = ARCH_PowerPC_ALIGNMENT, ARCH_ARMTHUMB_ALIGNMENT = 2, - ARCH_SPARC_ALIGNMENT = 4, + ARCH_SPARC_ALIGNMENT = ARCH_PowerPC_ALIGNMENT, } [MethodImpl(MethodImplOptions.AggressiveInlining)] diff --git a/src/SharpCompress/Compressors/Filters/DeltaFilter.cs b/src/SharpCompress/Compressors/Filters/DeltaFilter.cs index 5f7593ce9..82a76ae9c 100644 --- a/src/SharpCompress/Compressors/Filters/DeltaFilter.cs +++ b/src/SharpCompress/Compressors/Filters/DeltaFilter.cs @@ -4,7 +4,6 @@ namespace SharpCompress.Compressors.Filters; internal class DeltaFilter : Filter { - private const int DISTANCE_MIN = 1; private const int DISTANCE_MAX = 256; private const int DISTANCE_MASK = DISTANCE_MAX - 1; diff --git a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs index 48fd28b13..557456c8d 100644 --- a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs +++ b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs @@ -4,7 +4,7 @@ using System.Text; using System.Threading; using System.Threading.Tasks; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; namespace SharpCompress.Compressors.LZMA; diff --git a/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs b/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs index 9b89993c3..9f2c10115 100644 --- a/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs +++ b/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs @@ -88,13 +88,8 @@ public uint Decode(RangeDecoder decoder) private bool _mFinished; private bool _isDisposed; - public Bcj2DecoderStream(Stream[] streams, byte[] info, long limit) + public Bcj2DecoderStream(Stream[] streams) { - if (info != null && info.Length > 0) - { - throw new NotSupportedException(); - } - if (streams.Length != 4) { throw new NotSupportedException(); diff --git a/src/SharpCompress/Compressors/LZMA/DecoderRegistry.Async.cs b/src/SharpCompress/Compressors/LZMA/DecoderRegistry.Async.cs index a3d3a2ea9..84e03e7ac 100644 --- a/src/SharpCompress/Compressors/LZMA/DecoderRegistry.Async.cs +++ b/src/SharpCompress/Compressors/LZMA/DecoderRegistry.Async.cs @@ -7,7 +7,7 @@ using SharpCompress.Compressors.BZip2; using SharpCompress.Compressors.Deflate; using SharpCompress.Compressors.Filters; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.Compressors.PPMd; using SharpCompress.Compressors.ZStandard; @@ -18,7 +18,7 @@ internal static partial class DecoderRegistry internal static async ValueTask CreateDecoderStreamAsync( CMethodId id, Stream[] inStreams, - byte[] info, + byte[]? info, IPasswordProvider pass, long limit, CancellationToken cancellationToken @@ -33,18 +33,26 @@ CancellationToken cancellationToken } return inStreams.Single(); case K_DELTA: - return new DeltaFilter(false, inStreams.Single(), info); + return new DeltaFilter(false, inStreams.Single(), info.NotNull()); case K_LZMA: case K_LZMA2: return await LzmaStream - .CreateAsync(info, inStreams.Single(), -1, limit, null, info.Length < 5, false) + .CreateAsync( + info.NotNull(), + inStreams.Single(), + -1, + limit, + null, + info.NotNull().Length < 5, + false + ) .ConfigureAwait(false); case CMethodId.K_AES_ID: - return new AesDecoderStream(inStreams.Single(), info, pass, limit); + return new AesDecoderStream(inStreams.Single(), info.NotNull(), pass, limit); case K_BCJ: return new BCJFilter(false, inStreams.Single()); case K_BCJ2: - return new Bcj2DecoderStream(inStreams, info, limit); + return new Bcj2DecoderStream(inStreams); case K_PPC: return new BCJFilterPPC(false, inStreams.Single()); case K_IA64: @@ -71,7 +79,7 @@ CancellationToken cancellationToken case K_PPMD: return await PpmdStream .CreateAsync( - new PpmdProperties(info), + new PpmdProperties(info.NotNull()), inStreams.Single(), false, cancellationToken diff --git a/src/SharpCompress/Compressors/LZMA/DecoderStream.cs b/src/SharpCompress/Compressors/LZMA/DecoderStream.cs index 3963358b9..ac50ac522 100644 --- a/src/SharpCompress/Compressors/LZMA/DecoderStream.cs +++ b/src/SharpCompress/Compressors/LZMA/DecoderStream.cs @@ -4,7 +4,7 @@ using System.Threading.Tasks; using SharpCompress.Common; using SharpCompress.Common.SevenZip; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.IO; namespace SharpCompress.Compressors.LZMA; diff --git a/src/SharpCompress/Compressors/LZMA/LZ/LzBinTree.cs b/src/SharpCompress/Compressors/LZMA/LZ/LzBinTree.cs index b123e5d59..bc0a8a7ae 100644 --- a/src/SharpCompress/Compressors/LZMA/LZ/LzBinTree.cs +++ b/src/SharpCompress/Compressors/LZMA/LZ/LzBinTree.cs @@ -91,10 +91,7 @@ public void Create( uint keepAddBufferAfter ) { - if (historySize > K_MAX_VAL_FOR_NORMALIZE - 256) - { - throw new ArgumentOutOfRangeException(nameof(historySize)); - } + ThrowHelper.ThrowIfGreaterThan(historySize, K_MAX_VAL_FOR_NORMALIZE - 256); _cutValue = 16 + (matchMaxLen >> 1); var windowReservSize = diff --git a/src/SharpCompress/Compressors/LZMA/LZipStream.cs b/src/SharpCompress/Compressors/LZMA/LZipStream.cs index 262aa0e64..f507bce70 100644 --- a/src/SharpCompress/Compressors/LZMA/LZipStream.cs +++ b/src/SharpCompress/Compressors/LZMA/LZipStream.cs @@ -100,6 +100,7 @@ protected override void Dispose(bool disposing) { if (_disposed) { + base.Dispose(disposing); return; } _disposed = true; @@ -112,6 +113,7 @@ protected override void Dispose(bool disposing) _originalStream?.Dispose(); } } + base.Dispose(disposing); } public CompressionMode Mode { get; } diff --git a/src/SharpCompress/Compressors/LZMA/Log.cs b/src/SharpCompress/Compressors/LZMA/Log.cs index c2b5651e4..204831aa0 100644 --- a/src/SharpCompress/Compressors/LZMA/Log.cs +++ b/src/SharpCompress/Compressors/LZMA/Log.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.Diagnostics; namespace SharpCompress.Compressors.LZMA; @@ -28,68 +27,44 @@ private static void EnsureIndent() if (NEEDS_INDENT) { NEEDS_INDENT = false; -#if DEBUG_LZMA - Debug.Write(INDENT.Peek()); -#endif } } public static void Write(object value) { EnsureIndent(); -#if DEBUG_LZMA - Debug.Write(value); -#endif } public static void Write(string text) { EnsureIndent(); -#if DEBUG_LZMA - Debug.Write(text); -#endif } public static void Write(string format, params object[] args) { EnsureIndent(); -#if DEBUG_LZMA - Debug.Write(string.Format(format, args)); -#endif } public static void WriteLine() { -#if DEBUG_LZMA - Debug.WriteLine(""); -#endif NEEDS_INDENT = true; } public static void WriteLine(object value) { EnsureIndent(); -#if DEBUG_LZMA - Debug.WriteLine(value); -#endif NEEDS_INDENT = true; } public static void WriteLine(string text) { EnsureIndent(); -#if DEBUG_LZMA - Debug.WriteLine(text); -#endif NEEDS_INDENT = true; } public static void WriteLine(string format, params object[] args) { EnsureIndent(); -#if DEBUG_LZMA - Debug.WriteLine(string.Format(format, args)); -#endif NEEDS_INDENT = true; } } diff --git a/src/SharpCompress/Compressors/LZMA/LzmaDecoder.Async.cs b/src/SharpCompress/Compressors/LZMA/LzmaDecoder.Async.cs index b1d061e89..d8f7c51b4 100644 --- a/src/SharpCompress/Compressors/LZMA/LzmaDecoder.Async.cs +++ b/src/SharpCompress/Compressors/LZMA/LzmaDecoder.Async.cs @@ -9,8 +9,17 @@ namespace SharpCompress.Compressors.LZMA; -public partial class Decoder : ICoder, ISetDecoderProperties +public partial class Decoder : IAsyncDisposable { + public async ValueTask DisposeAsync() + { + if (_outWindow is not null) + { + await _outWindow.DisposeAsync().ConfigureAwait(false); + _outWindow = null; + } + } + partial class LenDecoder { public async ValueTask DecodeAsync( diff --git a/src/SharpCompress/Compressors/LZMA/LzmaDecoder.cs b/src/SharpCompress/Compressors/LZMA/LzmaDecoder.cs index d12adbfff..49a0e4c28 100644 --- a/src/SharpCompress/Compressors/LZMA/LzmaDecoder.cs +++ b/src/SharpCompress/Compressors/LZMA/LzmaDecoder.cs @@ -9,8 +9,14 @@ namespace SharpCompress.Compressors.LZMA; -public partial class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream +public partial class Decoder : ICoder, ISetDecoderProperties, IDisposable { + public void Dispose() + { + _outWindow?.Dispose(); + _outWindow = null; + } + private partial class LenDecoder { private BitDecoder _choice = new(); diff --git a/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs b/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs index e9fa1bab9..d674cf450 100644 --- a/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs +++ b/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs @@ -281,9 +281,6 @@ public void SetPrices(uint posState, uint numSymbols, uint[] prices, uint st) } } - private const uint K_NUM_LEN_SPEC_SYMBOLS = - Base.K_NUM_LOW_LEN_SYMBOLS + Base.K_NUM_MID_LEN_SYMBOLS; - private class LenPriceTableEncoder : LenEncoder { private readonly uint[] _prices = new uint[ @@ -1232,12 +1229,6 @@ private uint GetOptimum(uint position, out uint backRes) } } - private bool ChangePair(uint smallDist, uint bigDist) - { - const int kDif = 7; - return (smallDist < ((uint)(1) << (32 - kDif)) && bigDist >= (smallDist << kDif)); - } - private void WriteEndMarker(uint posState) { if (!_writeEndMark) diff --git a/src/SharpCompress/Compressors/LZMA/Registry.cs b/src/SharpCompress/Compressors/LZMA/Registry.cs index 23db75fe5..ff734ef76 100644 --- a/src/SharpCompress/Compressors/LZMA/Registry.cs +++ b/src/SharpCompress/Compressors/LZMA/Registry.cs @@ -5,7 +5,7 @@ using SharpCompress.Compressors.BZip2; using SharpCompress.Compressors.Deflate; using SharpCompress.Compressors.Filters; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.Compressors.PPMd; using SharpCompress.Compressors.ZStandard; @@ -34,7 +34,7 @@ internal static partial class DecoderRegistry internal static Stream CreateDecoderStream( CMethodId id, Stream[] inStreams, - byte[] info, + byte[]? info, IPasswordProvider pass, long limit ) @@ -48,16 +48,16 @@ long limit } return inStreams.Single(); case K_DELTA: - return new DeltaFilter(false, inStreams.Single(), info); + return new DeltaFilter(false, inStreams.Single(), info.NotNull()); case K_LZMA: case K_LZMA2: - return LzmaStream.Create(info, inStreams.Single(), -1, limit); + return LzmaStream.Create(info.NotNull(), inStreams.Single(), -1, limit); case CMethodId.K_AES_ID: - return new AesDecoderStream(inStreams.Single(), info, pass, limit); + return new AesDecoderStream(inStreams.Single(), info.NotNull(), pass, limit); case K_BCJ: return new BCJFilter(false, inStreams.Single()); case K_BCJ2: - return new Bcj2DecoderStream(inStreams, info, limit); + return new Bcj2DecoderStream(inStreams); case K_PPC: return new BCJFilterPPC(false, inStreams.Single()); case K_IA64: @@ -75,7 +75,11 @@ long limit case K_B_ZIP2: return BZip2Stream.Create(inStreams.Single(), CompressionMode.Decompress, true); case K_PPMD: - return PpmdStream.Create(new PpmdProperties(info), inStreams.Single(), false); + return PpmdStream.Create( + new PpmdProperties(info.NotNull()), + inStreams.Single(), + false + ); case K_DEFLATE: return new DeflateStream(inStreams.Single(), CompressionMode.Decompress); case K_ZSTD: diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs b/src/SharpCompress/Compressors/LZMA/Utilities/BlockType.cs similarity index 56% rename from src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs rename to src/SharpCompress/Compressors/LZMA/Utilities/BlockType.cs index b57cd53f5..8a4d7ddf6 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/Utils.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilities/BlockType.cs @@ -1,8 +1,4 @@ -using System; -using System.Diagnostics; -using System.IO; - -namespace SharpCompress.Compressors.LZMA.Utilites; +namespace SharpCompress.Compressors.LZMA.Utilities; internal enum BlockType : byte { @@ -37,20 +33,3 @@ internal enum BlockType : byte #endregion } - -internal static class Utils -{ - [Conditional("DEBUG")] - public static void Assert(bool expression) - { - if (!expression) - { - if (Debugger.IsAttached) - { - Debugger.Break(); - } - - throw new InvalidOperationException("Assertion failed."); - } - } -} diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.Async.cs b/src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.Async.cs similarity index 93% rename from src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.Async.cs rename to src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.Async.cs index 8ba2901b2..8ff18ef7f 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.Async.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.Async.cs @@ -3,7 +3,7 @@ using System.Threading; using System.Threading.Tasks; -namespace SharpCompress.Compressors.LZMA.Utilites; +namespace SharpCompress.Compressors.LZMA.Utilities; internal partial class CrcBuilderStream : Stream { diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.cs b/src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.cs similarity index 94% rename from src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.cs rename to src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.cs index 523e3e1f3..1c5acb636 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/CrcBuilderStream.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilities/CrcBuilderStream.cs @@ -1,9 +1,7 @@ using System; using System.IO; -using System.Threading; -using System.Threading.Tasks; -namespace SharpCompress.Compressors.LZMA.Utilites; +namespace SharpCompress.Compressors.LZMA.Utilities; internal partial class CrcBuilderStream : Stream { diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/CrcCheckStream.cs b/src/SharpCompress/Compressors/LZMA/Utilities/CrcCheckStream.cs similarity index 58% rename from src/SharpCompress/Compressors/LZMA/Utilites/CrcCheckStream.cs rename to src/SharpCompress/Compressors/LZMA/Utilities/CrcCheckStream.cs index c90ebfda3..2c657519f 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/CrcCheckStream.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilities/CrcCheckStream.cs @@ -1,65 +1,29 @@ using System; using System.Buffers; -using System.Diagnostics; using System.IO; using System.Threading; using System.Threading.Tasks; -namespace SharpCompress.Compressors.LZMA.Utilites; +namespace SharpCompress.Compressors.LZMA.Utilities; [CLSCompliant(false)] -public class CrcCheckStream : Stream +public class CrcCheckStream(uint crc) : Stream { - private readonly uint _mExpectedCrc; - private uint _mCurrentCrc; + private uint _mCurrentCrc = Crc.INIT_CRC; private bool _mClosed; private readonly long[] _mBytes = ArrayPool.Shared.Rent(256); - private long _mLength; - - public CrcCheckStream(uint crc) - { - _mExpectedCrc = crc; - _mCurrentCrc = Crc.INIT_CRC; - } protected override void Dispose(bool disposing) { - //Nanook - is not equal here - _mCurrentCrc is yet to be negated - //if (_mCurrentCrc != _mExpectedCrc) - //{ - // throw new InvalidOperationException(); - //} try { if (disposing && !_mClosed) { _mClosed = true; _mCurrentCrc = Crc.Finish(_mCurrentCrc); //now becomes equal -#if DEBUG - if (_mCurrentCrc == _mExpectedCrc) - { - Debug.WriteLine("CRC ok: " + _mExpectedCrc.ToString("x8")); - } - else - { - Debugger.Break(); - Debug.WriteLine("bad CRC"); - } - var lengthInv = 1.0 / _mLength; - double entropy = 0; - for (var i = 0; i < 256; i++) - { - if (_mBytes[i] != 0) - { - var p = lengthInv * _mBytes[i]; - entropy -= p * Math.Log(p, 256); - } - } - Debug.WriteLine("entropy: " + (int)(entropy * 100) + "%"); -#endif - if (_mCurrentCrc != _mExpectedCrc) //moved test to here + if (_mCurrentCrc != crc) //moved test to here { throw new InvalidOperationException(); } @@ -97,7 +61,6 @@ public override int Read(byte[] buffer, int offset, int count) => public override void Write(byte[] buffer, int offset, int count) { - _mLength += count; for (var i = 0; i < count; i++) { _mBytes[buffer[offset + i]]++; diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs b/src/SharpCompress/Compressors/LZMA/Utilities/IPasswordProvider.cs similarity index 60% rename from src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs rename to src/SharpCompress/Compressors/LZMA/Utilities/IPasswordProvider.cs index d7d787d22..868b4a529 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilities/IPasswordProvider.cs @@ -1,4 +1,4 @@ -namespace SharpCompress.Compressors.LZMA.Utilites; +namespace SharpCompress.Compressors.LZMA.Utilities; internal interface IPasswordProvider { diff --git a/src/SharpCompress/Compressors/Lzw/LzwStream.Async.cs b/src/SharpCompress/Compressors/Lzw/LzwStream.Async.cs index c6103e101..3303bbd98 100644 --- a/src/SharpCompress/Compressors/Lzw/LzwStream.Async.cs +++ b/src/SharpCompress/Compressors/Lzw/LzwStream.Async.cs @@ -38,6 +38,7 @@ public static async ValueTask IsLzwStreamAsync( { throw new IncompleteArchiveException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}", hdr[0], hdr[1] @@ -325,6 +326,7 @@ private async ValueTask ParseHeaderAsync(CancellationToken cancellationToken) { throw new IncompleteArchiveException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}", hdr[0], hdr[1] diff --git a/src/SharpCompress/Compressors/Lzw/LzwStream.cs b/src/SharpCompress/Compressors/Lzw/LzwStream.cs index 274abca0a..e09e4397c 100644 --- a/src/SharpCompress/Compressors/Lzw/LzwStream.cs +++ b/src/SharpCompress/Compressors/Lzw/LzwStream.cs @@ -64,6 +64,7 @@ public static bool IsLzwStream(Stream stream) { throw new IncompleteArchiveException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}", hdr[0], hdr[1] @@ -397,6 +398,7 @@ private void ParseHeader() { throw new IncompleteArchiveException( String.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}", hdr[0], hdr[1] @@ -559,6 +561,7 @@ protected override void Dispose(bool disposing) baseInputStream.Dispose(); } } + base.Dispose(disposing); } #endregion Stream Overrides @@ -580,10 +583,10 @@ protected override void Dispose(bool disposing) private const int TBL_FIRST = TBL_CLEAR + 1; - private int[] tabPrefix = new int[0]; // - private byte[] tabSuffix = new byte[0]; // + private int[] tabPrefix = []; // + private byte[] tabSuffix = []; // private readonly int[] zeros = new int[256]; - private byte[] stack = new byte[0]; // + private byte[] stack = []; // // various state private bool blockMode; diff --git a/src/SharpCompress/Compressors/PPMd/I1/Model.cs b/src/SharpCompress/Compressors/PPMd/I1/Model.cs index 2c462285a..7b32fc098 100644 --- a/src/SharpCompress/Compressors/PPMd/I1/Model.cs +++ b/src/SharpCompress/Compressors/PPMd/I1/Model.cs @@ -151,15 +151,9 @@ public Model() /// public void Encode(Stream target, Stream source, PpmdProperties properties) { - if (target is null) - { - throw new ArgumentNullException(nameof(target)); - } + ThrowHelper.ThrowIfNull(target); - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } + ThrowHelper.ThrowIfNull(source); EncodeStart(properties); EncodeBlock(target, source, true); @@ -239,15 +233,9 @@ internal void EncodeBlock(Stream target, Stream source, bool final) /// public void Decode(Stream target, Stream source, PpmdProperties properties) { - if (target is null) - { - throw new ArgumentNullException(nameof(target)); - } + ThrowHelper.ThrowIfNull(target); - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } + ThrowHelper.ThrowIfNull(source); DecodeStart(source, properties); var buffer = new byte[65536]; diff --git a/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs b/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs index d1a895452..cf798cd26 100644 --- a/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs +++ b/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs @@ -41,26 +41,8 @@ public Pointer(uint address, byte[] memory) /// public byte this[int offset] { - get - { -#if DEBUG - if (_address == 0) - { - throw new InvalidOperationException("The pointer being indexed is a null pointer."); - } -#endif - return _memory[_address + offset]; - } - set - { -#if DEBUG - if (_address == 0) - { - throw new InvalidOperationException("The pointer being indexed is a null pointer."); - } -#endif - _memory[_address + offset] = value; - } + get { return _memory[_address + offset]; } + set { _memory[_address + offset] = value; } } /// @@ -94,12 +76,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator +(Pointer pointer, int offset) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer is a null pointer."); - } -#endif pointer._address = (uint)(pointer._address + offset); return pointer; } @@ -112,12 +88,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator +(Pointer pointer, uint offset) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer is a null pointer."); - } -#endif pointer._address += offset; return pointer; } @@ -129,12 +99,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator ++(Pointer pointer) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer being incremented is a null pointer."); - } -#endif pointer._address++; return pointer; } @@ -147,12 +111,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator -(Pointer pointer, int offset) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer is a null pointer."); - } -#endif pointer._address = (uint)(pointer._address - offset); return pointer; } @@ -165,12 +123,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator -(Pointer pointer, uint offset) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer is a null pointer."); - } -#endif pointer._address -= offset; return pointer; } @@ -182,12 +134,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static Pointer operator --(Pointer pointer) { -#if DEBUG - if (pointer._address == 0) - { - throw new InvalidOperationException("The pointer being decremented is a null pointer."); - } -#endif pointer._address--; return pointer; } @@ -200,20 +146,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// The number of bytes between the two pointers. public static uint operator -(Pointer pointer1, Pointer pointer2) { -#if DEBUG - if (pointer1._address == 0) - { - throw new InvalidOperationException( - "The pointer to the left of the subtraction operator is a null pointer." - ); - } - if (pointer2._address == 0) - { - throw new InvalidOperationException( - "The pointer to the right of the subtraction operator is a null pointer." - ); - } -#endif return pointer1._address - pointer2._address; } @@ -225,20 +157,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static bool operator <(Pointer pointer1, Pointer pointer2) { -#if DEBUG - if (pointer1._address == 0) - { - throw new InvalidOperationException( - "The pointer to the left of the less than operator is a null pointer." - ); - } - if (pointer2._address == 0) - { - throw new InvalidOperationException( - "The pointer to the right of the less than operator is a null pointer." - ); - } -#endif return pointer1._address < pointer2._address; } @@ -250,20 +168,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static bool operator <=(Pointer pointer1, Pointer pointer2) { -#if DEBUG - if (pointer1._address == 0) - { - throw new InvalidOperationException( - "The pointer to the left of the less than or equal to operator is a null pointer." - ); - } - if (pointer2._address == 0) - { - throw new InvalidOperationException( - "The pointer to the right of the less than or equal to operator is a null pointer." - ); - } -#endif return pointer1._address <= pointer2._address; } @@ -275,20 +179,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static bool operator >(Pointer pointer1, Pointer pointer2) { -#if DEBUG - if (pointer1._address == 0) - { - throw new InvalidOperationException( - "The pointer to the left of the greater than operator is a null pointer." - ); - } - if (pointer2._address == 0) - { - throw new InvalidOperationException( - "The pointer to the right of the greater than operator is a null pointer." - ); - } -#endif return pointer1._address > pointer2._address; } @@ -300,20 +190,6 @@ public static implicit operator Pointer(Model.PpmContext context) => /// public static bool operator >=(Pointer pointer1, Pointer pointer2) { -#if DEBUG - if (pointer1._address == 0) - { - throw new InvalidOperationException( - "The pointer to the left of the greater than or equal to operator is a null pointer." - ); - } - if (pointer2._address == 0) - { - throw new InvalidOperationException( - "The pointer to the right of the greater than or equal to operator is a null pointer." - ); - } -#endif return pointer1._address >= pointer2._address; } diff --git a/src/SharpCompress/Compressors/PPMd/PpmdStream.cs b/src/SharpCompress/Compressors/PPMd/PpmdStream.cs index 022f7df0a..93791dc03 100644 --- a/src/SharpCompress/Compressors/PPMd/PpmdStream.cs +++ b/src/SharpCompress/Compressors/PPMd/PpmdStream.cs @@ -90,10 +90,7 @@ public static async ValueTask CreateAsync( CancellationToken cancellationToken = default ) { - if (stream is null) - { - throw new ArgumentNullException(nameof(stream)); - } + ThrowHelper.ThrowIfNull(stream); if (properties.Version == PpmdVersion.H && compress) { @@ -154,7 +151,11 @@ await instance } catch { +#if LEGACY_DOTNET instance.Dispose(); +#else + await instance.DisposeAsync().ConfigureAwait(false); +#endif throw; } } @@ -167,21 +168,21 @@ await instance public override void Flush() { } - protected override void Dispose(bool isDisposing) + protected override void Dispose(bool disposing) { if (_isDisposed) { return; } _isDisposed = true; - if (isDisposing) + if (disposing) { if (_compress) { _model.EncodeBlock(_stream, new MemoryStream(), true); } } - base.Dispose(isDisposing); + base.Dispose(disposing); } public override long Length => throw new NotSupportedException(); diff --git a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.Async.cs b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.Async.cs index e47ac2e1d..95f82d2c6 100644 --- a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.Async.cs +++ b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.Async.cs @@ -14,14 +14,11 @@ public override async Task ReadAsync( CancellationToken cancellationToken ) { - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } int bytesWritten = 0; diff --git a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs index 178a22205..d7bffe85d 100644 --- a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs +++ b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs @@ -53,14 +53,11 @@ public override void Write(byte[] buffer, int offset, int count) => public override int Read(byte[] buffer, int offset, int count) { - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } int bytesWritten = 0; diff --git a/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyAsyncStream.Async.cs b/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyAsyncStream.Async.cs index 64f461d43..09d90e06d 100644 --- a/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyAsyncStream.Async.cs +++ b/src/SharpCompress/Compressors/Rar/MultiVolumeReadOnlyAsyncStream.Async.cs @@ -35,7 +35,9 @@ protected override void Dispose(bool disposing) { base.Dispose(disposing); //acceptable for now? +#pragma warning disable VSTHRD002 // Avoid problematic synchronous waits filePartEnumerator.DisposeAsync().AsTask().GetAwaiter().GetResult(); +#pragma warning restore VSTHRD002 // Avoid problematic synchronous waits currentStream = null; } diff --git a/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs b/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs index fa47181ad..41ae7a49a 100644 --- a/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs +++ b/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs @@ -54,7 +54,6 @@ internal class BLAKE2S internal byte[] b; internal int bufferPosition; internal UInt32 lastNodeFlag; - UInt32[] dummy; public BLAKE2S() { @@ -62,7 +61,6 @@ public BLAKE2S() t = new uint[2]; f = new uint[2]; b = new byte[BLAKE2S_BLOCK_SIZE]; - dummy = new uint[2]; } }; @@ -83,7 +81,7 @@ public BLAKE2SP() BLAKE2SP _blake2sp; - byte[] _hash = { }; + byte[] _hash = []; private RarBLAKE2spStream( IRarUnpack unpack, diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs index c4548ef84..929c4eb72 100644 --- a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs +++ b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs @@ -27,7 +27,7 @@ public override void Dispose() if (!disposed) { base.Dispose(); - if (!externalWindow && window is not null) + if (window is not null) { ArrayPool.Shared.Return(window); window = null; @@ -91,8 +91,6 @@ public int Char private BlockTypes unpBlockType; - private bool externalWindow; - private long writtenFileSize; private bool ppmError; diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_async.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_async.cs index 59004430e..03b61ed91 100644 --- a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_async.cs +++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_async.cs @@ -73,7 +73,7 @@ private async Task Unpack5Async(bool Solid, CancellationToken cancellationToken if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr) { - await UnpWriteBufAsync(cancellationToken); + await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false); if (WrittenFileSize > DestUnpSize) { return; @@ -215,7 +215,7 @@ private async Task Unpack5Async(bool Solid, CancellationToken cancellationToken continue; } } - await UnpWriteBufAsync(cancellationToken); + await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false); } private async Task ReadFilterAsync( diff --git a/src/SharpCompress/Compressors/Shrink/BitStream.cs b/src/SharpCompress/Compressors/Shrink/BitStream.cs index fc0d5e536..77df3fa31 100644 --- a/src/SharpCompress/Compressors/Shrink/BitStream.cs +++ b/src/SharpCompress/Compressors/Shrink/BitStream.cs @@ -19,7 +19,7 @@ internal class BitStream 31U, 63U, (uint)sbyte.MaxValue, - (uint)byte.MaxValue, + byte.MaxValue, 511U, 1023U, 2047U, @@ -27,7 +27,7 @@ internal class BitStream 8191U, 16383U, (uint)short.MaxValue, - (uint)ushort.MaxValue, + ushort.MaxValue, }; public BitStream(byte[] src, int srcLen) @@ -62,7 +62,7 @@ public int NextBits(int nbits) _bitsLeft += 8; } } - result = (int)((long)_bitBuffer & (long)_maskBits[nbits]); + result = (int)(_bitBuffer & _maskBits[nbits]); _bitBuffer >>= nbits; _bitsLeft -= nbits; return result; diff --git a/src/SharpCompress/Compressors/Shrink/ShrinkStream.cs b/src/SharpCompress/Compressors/Shrink/ShrinkStream.cs index dfd61834a..97da74311 100644 --- a/src/SharpCompress/Compressors/Shrink/ShrinkStream.cs +++ b/src/SharpCompress/Compressors/Shrink/ShrinkStream.cs @@ -7,7 +7,6 @@ namespace SharpCompress.Compressors.Shrink; internal partial class ShrinkStream : Stream { private Stream inStream; - private CompressionMode _compressionMode; private ulong _compressedSize; private long _uncompressedSize; @@ -24,7 +23,6 @@ long uncompressedSize ) { inStream = stream; - _compressionMode = compressionMode; _compressedSize = (ulong)compressedSize; _uncompressedSize = uncompressedSize; diff --git a/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs b/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs index d1f179b3b..f9c4439c4 100644 --- a/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs +++ b/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs @@ -10,7 +10,6 @@ namespace SharpCompress.Compressors.Squeezed; public partial class SqueezeStream : Stream { private readonly Stream _stream; - private readonly int _compressedSize; private const int NUMVALS = 257; private const int SPEOF = 256; @@ -19,7 +18,6 @@ public partial class SqueezeStream : Stream private SqueezeStream(Stream stream, int compressedSize) { _stream = stream ?? throw new ArgumentNullException(nameof(stream)); - _compressedSize = compressedSize; } public static SqueezeStream Create(Stream stream, int compressedSize) @@ -63,43 +61,41 @@ public override int Read(byte[] buffer, int offset, int count) private Stream BuildDecodedStream() { - using (var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true)) - { - int numnodes = binaryReader.ReadUInt16(); + using var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true); + int numnodes = binaryReader.ReadUInt16(); - if (numnodes >= NUMVALS || numnodes == 0) - { - return new MemoryStream(Array.Empty()); - } + if (numnodes >= NUMVALS || numnodes == 0) + { + return new MemoryStream(Array.Empty()); + } - var dnode = new int[numnodes, 2]; - for (int j = 0; j < numnodes; j++) - { - dnode[j, 0] = binaryReader.ReadInt16(); - dnode[j, 1] = binaryReader.ReadInt16(); - } + var dnode = new int[numnodes, 2]; + for (int j = 0; j < numnodes; j++) + { + dnode[j, 0] = binaryReader.ReadInt16(); + dnode[j, 1] = binaryReader.ReadInt16(); + } - var bitReader = new BitReader(_stream); - var huffmanDecoded = new MemoryStream(); - int i = 0; + var bitReader = new BitReader(_stream); + var huffmanDecoded = new MemoryStream(); + int i = 0; - while (true) + while (true) + { + i = dnode[i, bitReader.ReadBit() ? 1 : 0]; + if (i < 0) { - i = dnode[i, bitReader.ReadBit() ? 1 : 0]; - if (i < 0) + i = -(i + 1); + if (i == SPEOF) { - i = -(i + 1); - if (i == SPEOF) - { - break; - } - huffmanDecoded.WriteByte((byte)i); - i = 0; + break; } + huffmanDecoded.WriteByte((byte)i); + i = 0; } - - huffmanDecoded.Position = 0; - return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length); } + + huffmanDecoded.Position = 0; + return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length); } } diff --git a/src/SharpCompress/Compressors/Xz/Crc32.cs b/src/SharpCompress/Compressors/Xz/Crc32.cs index 611238c58..be303898a 100644 --- a/src/SharpCompress/Compressors/Xz/Crc32.cs +++ b/src/SharpCompress/Compressors/Xz/Crc32.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; namespace SharpCompress.Compressors.Xz; @@ -10,7 +8,7 @@ public static class Crc32 public const uint DefaultPolynomial = 0xedb88320u; public const uint DefaultSeed = 0xffffffffu; - private static uint[] defaultTable; + private static uint[]? defaultTable; public static uint Compute(byte[] buffer) => Compute(DefaultSeed, buffer); diff --git a/src/SharpCompress/Compressors/Xz/Crc64.cs b/src/SharpCompress/Compressors/Xz/Crc64.cs index 2c7df9d5f..e37d04a94 100644 --- a/src/SharpCompress/Compressors/Xz/Crc64.cs +++ b/src/SharpCompress/Compressors/Xz/Crc64.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; namespace SharpCompress.Compressors.Xz; @@ -9,7 +7,7 @@ public static class Crc64 { public const ulong DefaultSeed = 0x0; - internal static ulong[] Table; + internal static ulong[]? Table; public const ulong Iso3309Polynomial = 0xD800000000000000; diff --git a/src/SharpCompress/Compressors/Xz/Filters/BlockFilter.cs b/src/SharpCompress/Compressors/Xz/Filters/BlockFilter.cs index eba9b1acc..c819af8c9 100644 --- a/src/SharpCompress/Compressors/Xz/Filters/BlockFilter.cs +++ b/src/SharpCompress/Compressors/Xz/Filters/BlockFilter.cs @@ -40,12 +40,12 @@ private enum FilterTypes : ulong public static BlockFilter Read(BinaryReader reader) { var filterType = (FilterTypes)reader.ReadXZInteger(); - if (!FILTER_MAP.ContainsKey(filterType)) + if (!FILTER_MAP.TryGetValue(filterType, out var createFilter)) { throw new NotImplementedException($"Filter {filterType} has not yet been implemented"); } - var filter = FILTER_MAP[filterType](); + var filter = createFilter(); var sizeOfProperties = reader.ReadXZInteger(); if (sizeOfProperties > int.MaxValue) diff --git a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.Async.cs b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.Async.cs index 05b95ac4a..e348b68f8 100644 --- a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.Async.cs +++ b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.Async.cs @@ -10,18 +10,15 @@ internal static partial class MultiByteIntegers { public static async ValueTask ReadXZIntegerAsync( this BinaryReader reader, - CancellationToken cancellationToken = default, - int MaxBytes = 9 + int maxBytes = 9, + CancellationToken cancellationToken = default ) { - if (MaxBytes <= 0) - { - throw new ArgumentOutOfRangeException(nameof(MaxBytes)); - } + ThrowHelper.ThrowIfNegativeOrZero(maxBytes); - if (MaxBytes > 9) + if (maxBytes > 9) { - MaxBytes = 9; + maxBytes = 9; } var LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false); @@ -30,7 +27,7 @@ public static async ValueTask ReadXZIntegerAsync( var i = 0; while ((LastByte & 0x80) != 0) { - if (++i >= MaxBytes) + if (++i >= maxBytes) { throw new InvalidFormatException(); } diff --git a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs index 52dbd5b8d..bf9f29d26 100644 --- a/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs +++ b/src/SharpCompress/Compressors/Xz/MultiByteIntegers.cs @@ -10,10 +10,7 @@ internal static partial class MultiByteIntegers { public static ulong ReadXZInteger(this BinaryReader reader, int MaxBytes = 9) { - if (MaxBytes <= 0) - { - throw new ArgumentOutOfRangeException(nameof(MaxBytes)); - } + ThrowHelper.ThrowIfNegativeOrZero(MaxBytes); if (MaxBytes > 9) { diff --git a/src/SharpCompress/Compressors/Xz/XZBlock.Async.cs b/src/SharpCompress/Compressors/Xz/XZBlock.Async.cs index 78d2de3d8..08c0da70c 100644 --- a/src/SharpCompress/Compressors/Xz/XZBlock.Async.cs +++ b/src/SharpCompress/Compressors/Xz/XZBlock.Async.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; diff --git a/src/SharpCompress/Compressors/Xz/XZBlock.cs b/src/SharpCompress/Compressors/Xz/XZBlock.cs index 39c3f0b5c..9642cd51f 100644 --- a/src/SharpCompress/Compressors/Xz/XZBlock.cs +++ b/src/SharpCompress/Compressors/Xz/XZBlock.cs @@ -19,7 +19,6 @@ public sealed partial class XZBlock : XZReadOnlyStream public ulong? UncompressedSize { get; private set; } public Stack Filters { get; private set; } = new(); public bool HeaderIsLoaded { get; private set; } - private CheckType _checkType; private readonly int _checkSize; private bool _streamConnected; private int _numFilters; @@ -33,7 +32,6 @@ public sealed partial class XZBlock : XZReadOnlyStream public XZBlock(Stream stream, CheckType checkType, int checkSize) : base(stream) { - _checkType = checkType; _checkSize = checkSize; _startPosition = stream.Position; } diff --git a/src/SharpCompress/Compressors/Xz/XZIndex.Async.cs b/src/SharpCompress/Compressors/Xz/XZIndex.Async.cs index b2feaabee..4a2d9e503 100644 --- a/src/SharpCompress/Compressors/Xz/XZIndex.Async.cs +++ b/src/SharpCompress/Compressors/Xz/XZIndex.Async.cs @@ -33,7 +33,9 @@ public async ValueTask ProcessAsync(CancellationToken cancellationToken = defaul await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false); } - NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false); + NumberOfRecords = await _reader + .ReadXZIntegerAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); for (ulong i = 0; i < NumberOfRecords; i++) { Records.Add( diff --git a/src/SharpCompress/Compressors/Xz/XZIndexRecord.Async.cs b/src/SharpCompress/Compressors/Xz/XZIndexRecord.Async.cs index 9d6d6f6c9..812b18534 100644 --- a/src/SharpCompress/Compressors/Xz/XZIndexRecord.Async.cs +++ b/src/SharpCompress/Compressors/Xz/XZIndexRecord.Async.cs @@ -13,8 +13,9 @@ public static async ValueTask FromBinaryReaderAsync( ) { var record = new XZIndexRecord(); - record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false); - record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken) + record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken: cancellationToken) .ConfigureAwait(false); return record; } diff --git a/src/SharpCompress/Compressors/Xz/XZStream.cs b/src/SharpCompress/Compressors/Xz/XZStream.cs index 489b449a8..ea193431f 100644 --- a/src/SharpCompress/Compressors/Xz/XZStream.cs +++ b/src/SharpCompress/Compressors/Xz/XZStream.cs @@ -12,10 +12,7 @@ namespace SharpCompress.Compressors.Xz; public sealed partial class XZStream : XZReadOnlyStream { public XZStream(Stream baseStream) - : base(baseStream) - { - _baseStream = baseStream; - } + : base(baseStream) { } protected override void Dispose(bool disposing) { @@ -48,7 +45,6 @@ private void AssertBlockCheckTypeIsSupported() } } - private readonly Stream _baseStream; public XZHeader Header { get; private set; } public XZIndex Index { get; private set; } public XZFooter Footer { get; private set; } diff --git a/src/SharpCompress/Compressors/ZStandard/CompressionStream.Async.cs b/src/SharpCompress/Compressors/ZStandard/CompressionStream.Async.cs index 7a16b500b..ba2371a53 100644 --- a/src/SharpCompress/Compressors/ZStandard/CompressionStream.Async.cs +++ b/src/SharpCompress/Compressors/ZStandard/CompressionStream.Async.cs @@ -16,6 +16,13 @@ public async ValueTask DisposeAsync() { if (compressor == null) { +#if LEGACY_DOTNET + Dispose(true); + GC.SuppressFinalize(this); + await Task.CompletedTask.ConfigureAwait(false); +#else + await base.DisposeAsync().ConfigureAwait(false); +#endif return; } @@ -28,6 +35,12 @@ public async ValueTask DisposeAsync() ReleaseUnmanagedResources(); GC.SuppressFinalize(this); } +#if LEGACY_DOTNET + Dispose(true); + await Task.CompletedTask.ConfigureAwait(false); +#else + await base.DisposeAsync().ConfigureAwait(false); +#endif } public override async Task FlushAsync(CancellationToken cancellationToken) => diff --git a/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs index 21e341ce0..432fcb2df 100644 --- a/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs +++ b/src/SharpCompress/Compressors/ZStandard/CompressionStream.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Buffers; using System.IO; using System.Threading; @@ -32,20 +32,14 @@ public CompressionStream( bool leaveOpen = true ) { - if (stream == null) - { - throw new ArgumentNullException(nameof(stream)); - } + SharpCompress.ThrowHelper.ThrowIfNull(stream); if (!stream.CanWrite) { throw new ArgumentException("Stream is not writable", nameof(stream)); } - if (bufferSize < 0) - { - throw new ArgumentOutOfRangeException(nameof(bufferSize)); - } + SharpCompress.ThrowHelper.ThrowIfNegative(bufferSize); innerStream = stream; this.compressor = compressor; @@ -84,6 +78,7 @@ protected override void Dispose(bool disposing) { if (compressor == null) { + base.Dispose(disposing); return; } @@ -98,6 +93,7 @@ protected override void Dispose(bool disposing) { ReleaseUnmanagedResources(); } + base.Dispose(disposing); } private void ReleaseUnmanagedResources() @@ -121,7 +117,8 @@ private void ReleaseUnmanagedResources() public override void Flush() => FlushInternal(ZSTD_EndDirective.ZSTD_e_flush); - private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive); + private void FlushInternal(ZSTD_EndDirective directive) => + WriteInternal(ReadOnlySpan.Empty, directive); public override void Write(byte[] buffer, int offset, int count) => Write(new ReadOnlySpan(buffer, offset, count)); @@ -138,11 +135,7 @@ private void WriteInternal(ReadOnlySpan buffer, ZSTD_EndDirective directiv { EnsureNotDisposed(); - var input = new ZSTD_inBuffer_s - { - pos = 0, - size = buffer != null ? (nuint)buffer.Length : 0, - }; + var input = new ZSTD_inBuffer_s { pos = 0, size = (nuint)buffer.Length }; nuint remaining; do { diff --git a/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs index 97d289774..abb55e098 100644 --- a/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs +++ b/src/SharpCompress/Compressors/ZStandard/DecompressionStream.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Buffers; using System.IO; using System.Threading; @@ -37,20 +37,14 @@ public DecompressionStream( bool leaveOpen = true ) { - if (stream == null) - { - throw new ArgumentNullException(nameof(stream)); - } + SharpCompress.ThrowHelper.ThrowIfNull(stream); if (!stream.CanRead) { throw new ArgumentException("Stream is not readable", nameof(stream)); } - if (bufferSize < 0) - { - throw new ArgumentOutOfRangeException(nameof(bufferSize)); - } + SharpCompress.ThrowHelper.ThrowIfNegative(bufferSize); innerStream = stream; this.decompressor = decompressor; @@ -90,6 +84,7 @@ protected override void Dispose(bool disposing) { if (decompressor == null) { + base.Dispose(disposing); return; } @@ -108,6 +103,7 @@ protected override void Dispose(bool disposing) { innerStream.Dispose(); } + base.Dispose(disposing); } public override int Read(byte[] buffer, int offset, int count) => diff --git a/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs b/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs index 79190428a..896dd19ef 100644 --- a/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs +++ b/src/SharpCompress/Compressors/ZStandard/SafeHandles.cs @@ -32,7 +32,7 @@ protected SafeZstdHandle() internal sealed unsafe class SafeCctxHandle : SafeZstdHandle { /// - private SafeCctxHandle() { } + internal SafeCctxHandle() { } /// /// Creates a new instance of . @@ -85,7 +85,7 @@ protected override bool ReleaseHandle() internal sealed unsafe class SafeDctxHandle : SafeZstdHandle { /// - private SafeDctxHandle() { } + internal SafeDctxHandle() { } /// /// Creates a new instance of . diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompress.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompress.cs index be97cccb5..0242d47b0 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompress.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompress.cs @@ -1,5 +1,4 @@ using System; -using System.Diagnostics; using System.Numerics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -2838,12 +2837,6 @@ public static ZSTD_frameProgression ZSTD_getFrameProgression(ZSTD_CCtx_s* cctx) { ZSTD_frameProgression fp; nuint buffered = cctx->inBuff == null ? 0 : cctx->inBuffPos - cctx->inToCompress; -#if DEBUG - if (buffered != 0) - { - assert(cctx->inBuffPos >= cctx->inToCompress); - } -#endif assert(buffered <= 1 << 17); fp.ingested = cctx->consumedSrcSize + buffered; fp.consumed = cctx->consumedSrcSize; @@ -2868,7 +2861,6 @@ public static nuint ZSTD_toFlushNow(ZSTD_CCtx_s* cctx) return 0; } - [Conditional("DEBUG")] private static void ZSTD_assertEqualCParams( ZSTD_compressionParameters cParams1, ZSTD_compressionParameters cParams2 @@ -4617,12 +4609,6 @@ private static nuint ZSTD_buildSeqStore(ZSTD_CCtx_s* zc, void* src, nuint srcSiz byte* @base = ms->window.@base; byte* istart = (byte*)src; uint curr = (uint)(istart - @base); -#if DEBUG - if (sizeof(nint) == 8) - { - assert(istart - @base < unchecked((nint)(uint)-1)); - } -#endif if (curr > ms->nextToUpdate + 384) { ms->nextToUpdate = @@ -7160,12 +7146,6 @@ ZSTD_tableFillPurpose_e tfp if (srcSize > unchecked((uint)-1) - (MEM_64bits ? 3500U * (1 << 20) : 2000U * (1 << 20))) { assert(ZSTD_window_isEmpty(ms->window) != 0); -#if DEBUG - if (loadLdmDict != 0) - { - assert(ZSTD_window_isEmpty(ls->window) != 0); - } -#endif } ZSTD_window_update(&ms->window, src, srcSize, 0); @@ -9227,35 +9207,7 @@ ZSTD_EndDirective flushMode zcs->stableIn_notConsumed = 0; } -#if DEBUG - if (zcs->appliedParams.inBufferMode == ZSTD_bufferMode_e.ZSTD_bm_buffered) - { - assert(zcs->inBuff != null); - assert(zcs->inBuffSize > 0); - } -#endif - -#if DEBUG - if (zcs->appliedParams.outBufferMode == ZSTD_bufferMode_e.ZSTD_bm_buffered) - { - assert(zcs->outBuff != null); - assert(zcs->outBuffSize > 0); - } -#endif - -#if DEBUG - if (input->src == null) - { - assert(input->size == 0); - } -#endif assert(input->pos <= input->size); -#if DEBUG - if (output->dst == null) - { - assert(output->size == 0); - } -#endif assert(output->pos <= output->size); assert((uint)flushMode <= (uint)ZSTD_EndDirective.ZSTD_e_end); while (someMoreWork != 0) @@ -9413,12 +9365,6 @@ ZSTD_EndDirective flushMode zcs->inBuffTarget = zcs->blockSizeMax; } -#if DEBUG - if (lastBlock == 0) - { - assert(zcs->inBuffTarget <= zcs->inBuffSize); - } -#endif zcs->inToCompress = zcs->inBuffPos; } else @@ -9445,12 +9391,6 @@ ZSTD_EndDirective flushMode } zcs->frameEnded = lastBlock; -#if DEBUG - if (lastBlock != 0) - { - assert(ip == iend); - } -#endif } if (cDst == op) diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressLiterals.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressLiterals.cs index 8f203faa0..c4af37ebc 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressLiterals.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressLiterals.cs @@ -279,22 +279,15 @@ int bmi2 switch (lhSize) { case 3: -#if DEBUG - if (singleStream == 0) - { - assert(srcSize >= 6); - } - -#endif - { - uint lhc = - (uint)hType - + ((singleStream == 0 ? 1U : 0U) << 2) - + ((uint)srcSize << 4) - + ((uint)cLitSize << 14); - MEM_writeLE24(ostart, lhc); - break; - } + { + uint lhc = + (uint)hType + + ((singleStream == 0 ? 1U : 0U) << 2) + + ((uint)srcSize << 4) + + ((uint)cLitSize << 14); + MEM_writeLE24(ostart, lhc); + break; + } case 4: assert(srcSize >= 6); diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSequences.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSequences.cs index d9e010896..10dca3cbc 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSequences.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCompressSequences.cs @@ -729,13 +729,6 @@ ZSTD_strategy strategy : unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_GENERIC)); nuint NCountCost = ZSTD_NCountCost(count, max, nbSeq, FSELog); nuint compressedCost = (NCountCost << 3) + ZSTD_entropyCost(count, max, nbSeq); -#if DEBUG - if (isDefaultAllowed != default) - { - assert(!ERR_isError(basicCost)); - assert(!(*repeatMode == FSE_repeat.FSE_repeat_valid && ERR_isError(repeatCost))); - } -#endif assert(!ERR_isError(NCountCost)); assert(compressedCost < unchecked((nuint)(-(int)ZSTD_ErrorCode.ZSTD_error_maxCode))); diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCwksp.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCwksp.cs index c11759976..455ab8614 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCwksp.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdCwksp.cs @@ -1,4 +1,3 @@ -using System.Diagnostics; using System.Runtime.CompilerServices; using static SharpCompress.Compressors.ZStandard.UnsafeHelper; @@ -7,7 +6,6 @@ namespace SharpCompress.Compressors.ZStandard.Unsafe; public static unsafe partial class Methods { [MethodImpl(MethodImplOptions.AggressiveInlining)] - [Conditional("DEBUG")] private static void ZSTD_cwksp_assert_internal_consistency(ZSTD_cwksp* ws) { assert(ws->workspace <= ws->objectEnd); diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLazy.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLazy.cs index a1ed13a03..178160b1d 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLazy.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdLazy.cs @@ -3604,14 +3604,6 @@ ZSTD_dictMode_e dictMode } } -#if DEBUG - if (isDxS != 0) - { - assert(offset_1 <= dictAndPrefixLength); - assert(offset_2 <= dictAndPrefixLength); - } -#endif - ms->lazySkipping = 0; if (searchMethod == searchMethod_e.search_rowHash) { diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdOpt.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdOpt.cs index 08efcf671..00066de52 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdOpt.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdOpt.cs @@ -1043,12 +1043,6 @@ uint mls { assert(matchIndex + matchLength >= dictLimit); match = @base + matchIndex; -#if DEBUG - if (matchIndex >= dictLimit) - { - assert(memcmp(match, ip, matchLength) == 0); - } -#endif matchLength += ZSTD_count(ip + matchLength, match + matchLength, iLimit); } else diff --git a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdmtCompress.cs b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdmtCompress.cs index 701ed7306..04ca1cd42 100644 --- a/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdmtCompress.cs +++ b/src/SharpCompress/Compressors/ZStandard/Unsafe/ZstdmtCompress.cs @@ -735,12 +735,6 @@ private static void ZSTDMT_compressionJob(void* jobDescription) byte* op = ostart; byte* oend = op + dstBuff.capacity; int chunkNb; -#if DEBUG - if (sizeof(nuint) > sizeof(int)) - { - assert(job->src.size < unchecked(2147483647 * chunkSize)); - } -#endif assert(job->cSize == 0); for (chunkNb = 1; chunkNb < nbChunks; chunkNb++) { @@ -798,13 +792,6 @@ private static void ZSTDMT_compressionJob(void* jobDescription) } } -#if DEBUG - if (job->firstJob == 0) - { - assert(ZSTD_window_hasExtDict(cctx->blockState.matchState.window) == 0); - } -#endif - ZSTD_CCtx_trace(cctx, 0); _endJob: ZSTDMT_serialState_ensureFinished(job->serial, job->jobID, job->cSize); @@ -1211,12 +1198,6 @@ private static nuint ZSTDMT_toFlushNow(ZSTDMT_CCtx_s* mtctx) assert(flushed <= produced); assert(jobPtr->consumed <= jobPtr->src.size); toFlush = produced - flushed; -#if DEBUG - if (toFlush == 0) - { - assert(jobPtr->consumed < jobPtr->src.size); - } -#endif } SynchronizationWrapper.Exit(&mtctx->jobs[wJobID].job_mutex); diff --git a/src/SharpCompress/Compressors/ZStandard/UnsafeHelper.cs b/src/SharpCompress/Compressors/ZStandard/UnsafeHelper.cs index 34325f70e..a55976443 100644 --- a/src/SharpCompress/Compressors/ZStandard/UnsafeHelper.cs +++ b/src/SharpCompress/Compressors/ZStandard/UnsafeHelper.cs @@ -1,5 +1,4 @@ using System; -using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; @@ -21,11 +20,7 @@ public static unsafe class UnsafeHelper #else var ptr = (void*)Marshal.AllocHGlobal((nint)size); #endif -#if DEBUG - return PoisonMemory(ptr, size); -#else return ptr; -#endif } [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -85,7 +80,6 @@ public static void free(void* ptr) return destination; } - [Conditional("DEBUG")] [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void assert(bool condition, string? message = null) { diff --git a/src/SharpCompress/Crypto/BlockTransformer.cs b/src/SharpCompress/Crypto/BlockTransformer.cs index ed9ead44a..d3196aaad 100644 --- a/src/SharpCompress/Crypto/BlockTransformer.cs +++ b/src/SharpCompress/Crypto/BlockTransformer.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Security.Cryptography; diff --git a/src/SharpCompress/Crypto/Crc32Stream.cs b/src/SharpCompress/Crypto/Crc32Stream.cs index 1cbe88845..4a65587e9 100644 --- a/src/SharpCompress/Crypto/Crc32Stream.cs +++ b/src/SharpCompress/Crypto/Crc32Stream.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.IO; @@ -15,7 +13,7 @@ public sealed class Crc32Stream : Stream public const uint DEFAULT_POLYNOMIAL = 0xedb88320u; public const uint DEFAULT_SEED = 0xffffffffu; - private static uint[] _defaultTable; + private static uint[]? _defaultTable; public Crc32Stream(Stream stream) : this(stream, DEFAULT_POLYNOMIAL, DEFAULT_SEED) { } diff --git a/src/SharpCompress/Factories/LzwFactory.cs b/src/SharpCompress/Factories/LzwFactory.cs index c4797be54..cbfabb60e 100644 --- a/src/SharpCompress/Factories/LzwFactory.cs +++ b/src/SharpCompress/Factories/LzwFactory.cs @@ -92,7 +92,7 @@ public ValueTask OpenAsyncReader( ) { cancellationToken.ThrowIfCancellationRequested(); - return LzwReader.OpenAsyncReader(stream, options); + return LzwReader.OpenAsyncReader(stream, options, cancellationToken); } #endregion diff --git a/src/SharpCompress/IO/CountingStream.cs b/src/SharpCompress/IO/CountingStream.cs index ff6df3d59..ecc36cf79 100644 --- a/src/SharpCompress/IO/CountingStream.cs +++ b/src/SharpCompress/IO/CountingStream.cs @@ -9,7 +9,6 @@ namespace SharpCompress.IO; internal class CountingStream : Stream { private readonly Stream _stream; - private readonly bool _leaveOpen; private long _bytesWritten; public CountingStream(Stream stream) @@ -59,7 +58,7 @@ public override void WriteByte(byte value) protected override void Dispose(bool disposing) { - if (disposing && !_leaveOpen) + if (disposing) { _stream.Dispose(); } diff --git a/src/SharpCompress/IO/IStreamStack.cs b/src/SharpCompress/IO/IStreamStack.cs index d156ab757..c24a9c90a 100644 --- a/src/SharpCompress/IO/IStreamStack.cs +++ b/src/SharpCompress/IO/IStreamStack.cs @@ -1,7 +1,6 @@ using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using System.Text; diff --git a/src/SharpCompress/IO/MarkingBinaryReader.cs b/src/SharpCompress/IO/MarkingBinaryReader.cs index be26df86a..859022638 100644 --- a/src/SharpCompress/IO/MarkingBinaryReader.cs +++ b/src/SharpCompress/IO/MarkingBinaryReader.cs @@ -1,15 +1,14 @@ using System; using System.Buffers.Binary; using System.IO; +using System.Text; using SharpCompress.Common; namespace SharpCompress.IO; -internal class MarkingBinaryReader : BinaryReader +internal class MarkingBinaryReader(Stream stream) + : BinaryReader(stream, Encoding.UTF8, leaveOpen: true) //always leave the stream open { - public MarkingBinaryReader(Stream stream) - : base(stream) { } - public virtual long CurrentReadByteCount { get; protected set; } public virtual void Mark() => CurrentReadByteCount = 0; @@ -47,6 +46,7 @@ public override byte[] ReadBytes(int count) { throw new InvalidFormatException( string.Format( + global::SharpCompress.Common.Constants.DefaultCultureInfo, "Could not read the requested amount of bytes. End of stream reached. Requested: {0} Read: {1}", count, bytes.Length diff --git a/src/SharpCompress/IO/ReadOnlySubStream.cs b/src/SharpCompress/IO/ReadOnlySubStream.cs index 763000deb..33317c0e3 100644 --- a/src/SharpCompress/IO/ReadOnlySubStream.cs +++ b/src/SharpCompress/IO/ReadOnlySubStream.cs @@ -1,5 +1,4 @@ using System; -using System.Diagnostics; using System.IO; namespace SharpCompress.IO; diff --git a/src/SharpCompress/IO/SeekableSharpCompressStream.cs b/src/SharpCompress/IO/SeekableSharpCompressStream.cs index 28b67c4e2..a2602a953 100644 --- a/src/SharpCompress/IO/SeekableSharpCompressStream.cs +++ b/src/SharpCompress/IO/SeekableSharpCompressStream.cs @@ -25,10 +25,7 @@ internal sealed partial class SeekableSharpCompressStream : SharpCompressStream public SeekableSharpCompressStream(Stream stream, bool leaveStreamOpen = false) : base(Null, true, false, null) { - if (stream is null) - { - throw new ArgumentNullException(nameof(stream)); - } + ThrowHelper.ThrowIfNull(stream); if (!stream.CanSeek) { throw new ArgumentException("Stream must be seekable", nameof(stream)); diff --git a/src/SharpCompress/IO/SharpCompressStream.Async.cs b/src/SharpCompress/IO/SharpCompressStream.Async.cs index a36a9862e..159238aa3 100644 --- a/src/SharpCompress/IO/SharpCompressStream.Async.cs +++ b/src/SharpCompress/IO/SharpCompressStream.Async.cs @@ -275,6 +275,7 @@ public override async ValueTask DisposeAsync() _ringBuffer?.Dispose(); _ringBuffer = null; } + await base.DisposeAsync().ConfigureAwait(false); } #endif } diff --git a/src/SharpCompress/LazyAsyncReadOnlyCollection.cs b/src/SharpCompress/LazyAsyncReadOnlyCollection.cs index 6d1525936..88d08fdff 100644 --- a/src/SharpCompress/LazyAsyncReadOnlyCollection.cs +++ b/src/SharpCompress/LazyAsyncReadOnlyCollection.cs @@ -1,4 +1,3 @@ -#nullable disable using System; using System.Collections; using System.Collections.Generic; diff --git a/src/SharpCompress/LazyReadOnlyCollection.cs b/src/SharpCompress/LazyReadOnlyCollection.cs index eee60bc76..2a7e95ffb 100644 --- a/src/SharpCompress/LazyReadOnlyCollection.cs +++ b/src/SharpCompress/LazyReadOnlyCollection.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections; using System.Collections.Generic; @@ -43,7 +41,7 @@ public void Dispose() #region IEnumerator Members - object IEnumerator.Current => Current; + object IEnumerator.Current => Current!; public bool MoveNext() { diff --git a/src/SharpCompress/NotNullExtensions.cs b/src/SharpCompress/NotNullExtensions.cs index 0f5ba99cd..4c25e008d 100644 --- a/src/SharpCompress/NotNullExtensions.cs +++ b/src/SharpCompress/NotNullExtensions.cs @@ -52,7 +52,7 @@ public static T NotNull( ) where T : class { - ArgumentNullException.ThrowIfNull(obj, paramName); + ThrowHelper.ThrowIfNull(obj, paramName); return obj; } @@ -63,7 +63,11 @@ public static T NotNull( ) where T : struct { - ArgumentNullException.ThrowIfNull(obj, paramName); + if (!obj.HasValue) + { + throw new ArgumentNullException(paramName); + } + return obj.Value; } #endif diff --git a/src/SharpCompress/Polyfills/StringExtensions.cs b/src/SharpCompress/Polyfills/StringExtensions.cs index c83dee70e..84e1cbbe4 100644 --- a/src/SharpCompress/Polyfills/StringExtensions.cs +++ b/src/SharpCompress/Polyfills/StringExtensions.cs @@ -1,4 +1,6 @@ -#if LEGACY_DOTNET +using System; + +#if LEGACY_DOTNET namespace SharpCompress; @@ -10,4 +12,9 @@ internal static bool EndsWith(this string text, char value) => internal static bool Contains(this string text, char value) => text.IndexOf(value) > -1; } +[AttributeUsage( + AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue +)] +public class NotNullAttribute : Attribute; + #endif diff --git a/src/SharpCompress/Providers/CompressionProviderRegistry.cs b/src/SharpCompress/Providers/CompressionProviderRegistry.cs index 27de3181b..c061dc261 100644 --- a/src/SharpCompress/Providers/CompressionProviderRegistry.cs +++ b/src/SharpCompress/Providers/CompressionProviderRegistry.cs @@ -275,10 +275,7 @@ public ValueTask CreateDecompressStreamAsync( /// If provider is null. public CompressionProviderRegistry With(ICompressionProvider provider) { - if (provider is null) - { - throw new ArgumentNullException(nameof(provider)); - } + ThrowHelper.ThrowIfNull(provider); var newProviders = new Dictionary(_providers) { diff --git a/src/SharpCompress/Readers/AbstractReader.Async.cs b/src/SharpCompress/Readers/AbstractReader.Async.cs index 0a507a34d..3c3e01df0 100644 --- a/src/SharpCompress/Readers/AbstractReader.Async.cs +++ b/src/SharpCompress/Readers/AbstractReader.Async.cs @@ -126,10 +126,7 @@ public async ValueTask WriteEntryToAsync( ); } - if (writableStream is null) - { - throw new ArgumentNullException(nameof(writableStream)); - } + ThrowHelper.ThrowIfNull(writableStream); if (!writableStream.CanWrite) { throw new ArgumentException( diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs index 5dd143819..73a282d41 100644 --- a/src/SharpCompress/Readers/AbstractReader.cs +++ b/src/SharpCompress/Readers/AbstractReader.cs @@ -190,10 +190,7 @@ public void WriteEntryTo(Stream writableStream) throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once."); } - if (writableStream is null) - { - throw new ArgumentNullException(nameof(writableStream)); - } + ThrowHelper.ThrowIfNull(writableStream); if (!writableStream.CanWrite) { throw new ArgumentException( diff --git a/src/SharpCompress/Readers/Ace/AceReader.Factory.cs b/src/SharpCompress/Readers/Ace/AceReader.Factory.cs index a9e3e8767..d3adddd03 100644 --- a/src/SharpCompress/Readers/Ace/AceReader.Factory.cs +++ b/src/SharpCompress/Readers/Ace/AceReader.Factory.cs @@ -15,12 +15,12 @@ public partial class AceReader /// Opens an AceReader for non-seeking usage with a single volume. /// /// The stream containing the ACE archive. - /// Reader options. + /// Reader options. /// An AceReader instance. - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new SingleVolumeAceReader(stream, options ?? new ReaderOptions()); + return new SingleVolumeAceReader(stream, readerOptions ?? new ReaderOptions()); } /// diff --git a/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs b/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs index 0b000f128..c8abfae91 100644 --- a/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs +++ b/src/SharpCompress/Readers/Ace/MultiVolumeAceReader.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections; using System.Collections.Generic; @@ -15,7 +13,7 @@ namespace SharpCompress.Readers.Ace; internal class MultiVolumeAceReader : AceReader { private readonly IEnumerator streams; - private Stream tempStream; + private Stream? tempStream; internal MultiVolumeAceReader(IEnumerable streams, ReaderOptions options) : base(options) => this.streams = streams.GetEnumerator(); @@ -54,13 +52,13 @@ private class MultiVolumeStreamEnumerator : IEnumerable, IEnumerator nextReadableStreams; - private Stream tempStream; + private Stream? tempStream; private bool isFirst = true; internal MultiVolumeStreamEnumerator( MultiVolumeAceReader r, IEnumerator nextReadableStreams, - Stream tempStream + Stream? tempStream ) { reader = r; @@ -72,7 +70,7 @@ Stream tempStream IEnumerator IEnumerable.GetEnumerator() => this; - public FilePart Current { get; private set; } + public FilePart Current { get; private set; } = null!; public void Dispose() { } diff --git a/src/SharpCompress/Readers/Arc/ArcReader.cs b/src/SharpCompress/Readers/Arc/ArcReader.cs index d641b554b..376d598e2 100644 --- a/src/SharpCompress/Readers/Arc/ArcReader.cs +++ b/src/SharpCompress/Readers/Arc/ArcReader.cs @@ -20,12 +20,12 @@ private ArcReader(Stream stream, ReaderOptions options) /// Opens an ArcReader for Non-seeking usage with a single volume /// /// - /// + /// /// - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new ArcReader(stream, options ?? new ReaderOptions()); + return new ArcReader(stream, readerOptions ?? new ReaderOptions()); } protected override IEnumerable GetEntries(Stream stream) diff --git a/src/SharpCompress/Readers/Arj/ArjReader.cs b/src/SharpCompress/Readers/Arj/ArjReader.cs index 857a5ec90..d5a0ae741 100644 --- a/src/SharpCompress/Readers/Arj/ArjReader.cs +++ b/src/SharpCompress/Readers/Arj/ArjReader.cs @@ -27,12 +27,12 @@ internal ArjReader(ReaderOptions options) /// Opens an ArjReader for Non-seeking usage with a single volume /// /// - /// + /// /// - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new SingleVolumeArjReader(stream, options ?? new ReaderOptions()); + return new SingleVolumeArjReader(stream, readerOptions ?? new ReaderOptions()); } /// diff --git a/src/SharpCompress/Readers/Arj/MultiVolumeArjReader.cs b/src/SharpCompress/Readers/Arj/MultiVolumeArjReader.cs index 547541ef8..1eb4a9eae 100644 --- a/src/SharpCompress/Readers/Arj/MultiVolumeArjReader.cs +++ b/src/SharpCompress/Readers/Arj/MultiVolumeArjReader.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections; using System.Collections.Generic; @@ -16,7 +14,7 @@ namespace SharpCompress.Readers.Arj; internal class MultiVolumeArjReader : ArjReader { private readonly IEnumerator streams; - private Stream tempStream; + private Stream? tempStream; internal MultiVolumeArjReader(IEnumerable streams, ReaderOptions options) : base(options) => this.streams = streams.GetEnumerator(); @@ -55,13 +53,13 @@ private class MultiVolumeStreamEnumerator : IEnumerable, IEnumerator nextReadableStreams; - private Stream tempStream; + private Stream? tempStream; private bool isFirst = true; internal MultiVolumeStreamEnumerator( MultiVolumeArjReader r, IEnumerator nextReadableStreams, - Stream tempStream + Stream? tempStream ) { reader = r; @@ -73,7 +71,7 @@ Stream tempStream IEnumerator IEnumerable.GetEnumerator() => this; - public FilePart Current { get; private set; } + public FilePart Current { get; private set; } = null!; public void Dispose() { } diff --git a/src/SharpCompress/Readers/GZip/GZipReader.Factory.cs b/src/SharpCompress/Readers/GZip/GZipReader.Factory.cs index 5f1542e96..2a05e3232 100644 --- a/src/SharpCompress/Readers/GZip/GZipReader.Factory.cs +++ b/src/SharpCompress/Readers/GZip/GZipReader.Factory.cs @@ -52,9 +52,9 @@ public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? readerOptions return OpenReader(fileInfo.OpenRead(), readerOptions); } - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new GZipReader(stream, options ?? new ReaderOptions()); + return new GZipReader(stream, readerOptions ?? new ReaderOptions()); } } diff --git a/src/SharpCompress/Readers/Lzw/LzwReader.Factory.cs b/src/SharpCompress/Readers/Lzw/LzwReader.Factory.cs index 008562c66..6e17e2225 100644 --- a/src/SharpCompress/Readers/Lzw/LzwReader.Factory.cs +++ b/src/SharpCompress/Readers/Lzw/LzwReader.Factory.cs @@ -52,9 +52,9 @@ public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? readerOptions return OpenReader(fileInfo.OpenRead(), readerOptions); } - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new LzwReader(stream, options ?? new ReaderOptions()); + return new LzwReader(stream, readerOptions ?? new ReaderOptions()); } } diff --git a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.Async.cs b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.Async.cs index 1f9d4463f..620b28c6f 100644 --- a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.Async.cs +++ b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.Async.cs @@ -1,5 +1,3 @@ -#nullable disable - using System.Collections; using System.Collections.Generic; using System.IO; @@ -26,13 +24,13 @@ private class MultiVolumeStreamAsyncEnumerator { private readonly MultiVolumeRarReader reader; private readonly IEnumerator nextReadableStreams; - private Stream tempStream; + private Stream? tempStream; private bool isFirst = true; internal MultiVolumeStreamAsyncEnumerator( MultiVolumeRarReader r, IEnumerator nextReadableStreams, - Stream tempStream + Stream? tempStream ) { reader = r; @@ -40,7 +38,7 @@ Stream tempStream this.tempStream = tempStream; } - public FilePart Current { get; private set; } + public FilePart Current { get; private set; } = null!; public async ValueTask MoveNextAsync() { diff --git a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs index dd811bc51..5456ff6d5 100644 --- a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs +++ b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs @@ -1,5 +1,3 @@ -#nullable disable - using System.Collections; using System.Collections.Generic; using System.IO; @@ -14,7 +12,7 @@ namespace SharpCompress.Readers.Rar; internal partial class MultiVolumeRarReader : RarReader { private readonly IEnumerator streams; - private Stream tempStream; + private Stream? tempStream; internal MultiVolumeRarReader(IEnumerable streams, ReaderOptions options) : base(options) => this.streams = streams.GetEnumerator(); @@ -55,13 +53,13 @@ private class MultiVolumeStreamEnumerator : IEnumerable, IEnumerator nextReadableStreams; - private Stream tempStream; + private Stream? tempStream; private bool isFirst = true; internal MultiVolumeStreamEnumerator( MultiVolumeRarReader r, IEnumerator nextReadableStreams, - Stream tempStream + Stream? tempStream ) { reader = r; @@ -73,7 +71,7 @@ Stream tempStream IEnumerator IEnumerable.GetEnumerator() => this; - public FilePart Current { get; private set; } + public FilePart Current { get; private set; } = null!; public void Dispose() { } diff --git a/src/SharpCompress/Readers/Rar/RarReader.cs b/src/SharpCompress/Readers/Rar/RarReader.cs index 741f7155f..df0063629 100644 --- a/src/SharpCompress/Readers/Rar/RarReader.cs +++ b/src/SharpCompress/Readers/Rar/RarReader.cs @@ -40,16 +40,16 @@ public override void Dispose() public override RarVolume? Volume => volume; - public static IReader OpenReader(string filePath, ReaderOptions? options = null) + public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null) { filePath.NotNullOrEmpty(nameof(filePath)); - return OpenReader(new FileInfo(filePath), options); + return OpenReader(new FileInfo(filePath), readerOptions); } - public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? options = null) + public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? readerOptions = null) { - options ??= new ReaderOptions { LeaveStreamOpen = false }; - return OpenReader(fileInfo.OpenRead(), options); + readerOptions ??= new ReaderOptions { LeaveStreamOpen = false }; + return OpenReader(fileInfo.OpenRead(), readerOptions); } public static IReader OpenReader(IEnumerable filePaths, ReaderOptions? options = null) @@ -67,12 +67,12 @@ public static IReader OpenReader(IEnumerable fileInfos, ReaderOptions? /// Opens a RarReader for Non-seeking usage with a single volume /// /// - /// + /// /// - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new SingleVolumeRarReader(stream, options ?? new ReaderOptions()); + return new SingleVolumeRarReader(stream, readerOptions ?? new ReaderOptions()); } /// diff --git a/src/SharpCompress/Readers/Tar/TarReader.Factory.cs b/src/SharpCompress/Readers/Tar/TarReader.Factory.cs index f537ecab9..2bb9d8496 100644 --- a/src/SharpCompress/Readers/Tar/TarReader.Factory.cs +++ b/src/SharpCompress/Readers/Tar/TarReader.Factory.cs @@ -82,16 +82,16 @@ public static ValueTask OpenAsyncReader( public static async ValueTask OpenAsyncReader( Stream stream, - ReaderOptions? options = null, + ReaderOptions? readerOptions = null, CancellationToken cancellationToken = default ) { cancellationToken.ThrowIfCancellationRequested(); stream.NotNull(nameof(stream)); - options ??= new ReaderOptions(); + readerOptions ??= new ReaderOptions(); var sharpCompressStream = SharpCompressStream.Create( stream, - bufferSize: options.RewindableBufferSize + bufferSize: readerOptions.RewindableBufferSize ); long pos = sharpCompressStream.Position; foreach (var wrapper in TarWrapper.Wrappers) @@ -110,8 +110,8 @@ public static async ValueTask OpenAsyncReader( var testStream = await CreateProbeDecompressionStreamAsync( sharpCompressStream, wrapper.CompressionType, - options.Providers, - options, + readerOptions.Providers, + readerOptions, cancellationToken ) .ConfigureAwait(false); @@ -120,7 +120,7 @@ await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(fa ) { sharpCompressStream.Position = pos; - return new TarReader(sharpCompressStream, options, wrapper.CompressionType); + return new TarReader(sharpCompressStream, readerOptions, wrapper.CompressionType); } if (wrapper.CompressionType != CompressionType.None) @@ -130,7 +130,7 @@ await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(fa } sharpCompressStream.Position = pos; - return new TarReader(sharpCompressStream, options, CompressionType.None); + return new TarReader(sharpCompressStream, readerOptions, CompressionType.None); } public static ValueTask OpenAsyncReader( @@ -160,15 +160,15 @@ public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? readerOptions /// Opens a TarReader for Non-seeking usage with a single volume /// /// - /// + /// /// - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - options ??= new ReaderOptions(); + readerOptions ??= new ReaderOptions(); var sharpCompressStream = SharpCompressStream.Create( stream, - bufferSize: options.RewindableBufferSize + bufferSize: readerOptions.RewindableBufferSize ); long pos = sharpCompressStream.Position; foreach (var wrapper in TarWrapper.Wrappers) @@ -183,13 +183,13 @@ public static IReader OpenReader(Stream stream, ReaderOptions? options = null) var testStream = CreateProbeDecompressionStream( sharpCompressStream, wrapper.CompressionType, - options.Providers, - options + readerOptions.Providers, + readerOptions ); if (TarArchive.IsTarFile(testStream)) { sharpCompressStream.Position = pos; - return new TarReader(sharpCompressStream, options, wrapper.CompressionType); + return new TarReader(sharpCompressStream, readerOptions, wrapper.CompressionType); } if (wrapper.CompressionType != CompressionType.None) @@ -199,6 +199,6 @@ public static IReader OpenReader(Stream stream, ReaderOptions? options = null) } sharpCompressStream.Position = pos; - return new TarReader(sharpCompressStream, options, CompressionType.None); + return new TarReader(sharpCompressStream, readerOptions, CompressionType.None); } } diff --git a/src/SharpCompress/Readers/Zip/ZipReader.cs b/src/SharpCompress/Readers/Zip/ZipReader.cs index ac1b4419b..91dff5bfe 100644 --- a/src/SharpCompress/Readers/Zip/ZipReader.cs +++ b/src/SharpCompress/Readers/Zip/ZipReader.cs @@ -43,12 +43,12 @@ private ZipReader(Stream stream, ReaderOptions options, IEnumerable en /// Opens a ZipReader for Non-seeking usage with a single volume /// /// - /// + /// /// - public static IReader OpenReader(Stream stream, ReaderOptions? options = null) + public static IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null) { stream.NotNull(nameof(stream)); - return new ZipReader(stream, options ?? new ReaderOptions()); + return new ZipReader(stream, readerOptions ?? new ReaderOptions()); } public static IReader OpenReader( diff --git a/src/SharpCompress/ThrowHelper.cs b/src/SharpCompress/ThrowHelper.cs new file mode 100644 index 000000000..9d598107f --- /dev/null +++ b/src/SharpCompress/ThrowHelper.cs @@ -0,0 +1,71 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; + +namespace SharpCompress; + +internal static class ThrowHelper +{ + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfNull([NotNull] object? argument, string? paramName = null) + { + if (argument is null) + { + throw new ArgumentNullException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfNegative(int value, string? paramName = null) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfNegative(long value, string? paramName = null) + { + if (value < 0) + { + throw new ArgumentOutOfRangeException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfNegativeOrZero(int value, string? paramName = null) + { + if (value <= 0) + { + throw new ArgumentOutOfRangeException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfLessThan(int value, int other, string? paramName = null) + { + if (value < other) + { + throw new ArgumentOutOfRangeException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfGreaterThan(int value, int other, string? paramName = null) + { + if (value > other) + { + throw new ArgumentOutOfRangeException(paramName); + } + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void ThrowIfGreaterThan(uint value, uint other, string? paramName = null) + { + if (value > other) + { + throw new ArgumentOutOfRangeException(paramName); + } + } +} diff --git a/src/SharpCompress/Utility.Async.cs b/src/SharpCompress/Utility.Async.cs index e8e6b0c3e..79df1f9cb 100644 --- a/src/SharpCompress/Utility.Async.cs +++ b/src/SharpCompress/Utility.Async.cs @@ -20,15 +20,16 @@ public async ValueTask ReadExactAsync( CancellationToken cancellationToken = default ) { +#if LEGACY_DOTNET if (source is null) { - throw new ArgumentNullException(nameof(source)); + throw new ArgumentNullException(); } +#else + ThrowHelper.ThrowIfNull(source); +#endif - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || offset > buffer.Length) { diff --git a/src/SharpCompress/Utility.cs b/src/SharpCompress/Utility.cs index de3a1ddcc..cdc78549c 100644 --- a/src/SharpCompress/Utility.cs +++ b/src/SharpCompress/Utility.cs @@ -247,15 +247,16 @@ public bool ReadFully(Span buffer) /// public void ReadExact(byte[] buffer, int offset, int length) { +#if LEGACY_DOTNET if (source is null) { - throw new ArgumentNullException(nameof(source)); + throw new ArgumentNullException(); } +#else + ThrowHelper.ThrowIfNull(source); +#endif - if (buffer is null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ThrowHelper.ThrowIfNull(buffer); if (offset < 0 || offset > buffer.Length) { diff --git a/src/SharpCompress/Writers/AbstractWriter.cs b/src/SharpCompress/Writers/AbstractWriter.cs index c653d73cd..60ddb1b16 100644 --- a/src/SharpCompress/Writers/AbstractWriter.cs +++ b/src/SharpCompress/Writers/AbstractWriter.cs @@ -8,7 +8,6 @@ namespace SharpCompress.Writers; -#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. public abstract partial class AbstractWriter(ArchiveType type, IWriterOptions writerOptions) : IWriter, IAsyncWriter @@ -19,8 +18,7 @@ public abstract partial class AbstractWriter(ArchiveType type, IWriterOptions wr protected void InitializeStream(Stream stream) => OutputStream = stream; - protected Stream OutputStream { get; private set; } -#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. + protected Stream? OutputStream { get; private set; } public ArchiveType WriterType { get; } = type; @@ -57,7 +55,7 @@ protected virtual void Dispose(bool isDisposing) { if (isDisposing) { - OutputStream.Dispose(); + OutputStream?.Dispose(); } } diff --git a/src/SharpCompress/Writers/GZip/GZipWriter.Async.cs b/src/SharpCompress/Writers/GZip/GZipWriter.Async.cs index 478373aa6..a1aa613ab 100644 --- a/src/SharpCompress/Writers/GZip/GZipWriter.Async.cs +++ b/src/SharpCompress/Writers/GZip/GZipWriter.Async.cs @@ -19,7 +19,7 @@ public override async ValueTask WriteAsync( { throw new ArgumentException("Can only write a single stream to a GZip file."); } - var stream = (GZipStream)OutputStream; + var stream = (GZipStream)OutputStream.NotNull(); stream.FileName = filename; stream.LastModified = modificationTime; var progressStream = WrapWithProgress(source, filename); diff --git a/src/SharpCompress/Writers/GZip/GZipWriter.Factory.cs b/src/SharpCompress/Writers/GZip/GZipWriter.Factory.cs index 715bdc22f..01e6edb56 100644 --- a/src/SharpCompress/Writers/GZip/GZipWriter.Factory.cs +++ b/src/SharpCompress/Writers/GZip/GZipWriter.Factory.cs @@ -24,9 +24,9 @@ public static IWriter OpenWriter(Stream stream, GZipWriterOptions writerOptions) return new GZipWriter(stream, writerOptions); } - public static IAsyncWriter OpenAsyncWriter(string path, GZipWriterOptions writerOptions) + public static IAsyncWriter OpenAsyncWriter(string stream, GZipWriterOptions writerOptions) { - return (IAsyncWriter)OpenWriter(path, writerOptions); + return (IAsyncWriter)OpenWriter(stream, writerOptions); } public static IAsyncWriter OpenAsyncWriter(Stream stream, GZipWriterOptions writerOptions) diff --git a/src/SharpCompress/Writers/GZip/GZipWriter.cs b/src/SharpCompress/Writers/GZip/GZipWriter.cs index 41e2ef1e2..eebac6164 100644 --- a/src/SharpCompress/Writers/GZip/GZipWriter.cs +++ b/src/SharpCompress/Writers/GZip/GZipWriter.cs @@ -43,7 +43,7 @@ protected override void Dispose(bool isDisposing) if (isDisposing) { //dispose here to finish the GZip, GZip won't close the underlying stream - OutputStream.Dispose(); + OutputStream.NotNull().Dispose(); } base.Dispose(isDisposing); } @@ -63,7 +63,7 @@ public override void Write(string filename, Stream source, DateTime? modificatio } var progressStream = WrapWithProgress(source, filename); - progressStream.CopyTo(OutputStream, Constants.BufferSize); + progressStream.CopyTo(OutputStream.NotNull(), Constants.BufferSize); _wroteToStream = true; } diff --git a/src/SharpCompress/Writers/Tar/TarWriter.Async.cs b/src/SharpCompress/Writers/Tar/TarWriter.Async.cs index db5e00d4e..c3cc7bff9 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.Async.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.Async.cs @@ -29,7 +29,7 @@ public override async ValueTask WriteDirectoryAsync( header.Name = normalizedName; header.Size = 0; header.EntryType = EntryType.Directory; - await header.WriteAsync(OutputStream, cancellationToken).ConfigureAwait(false); + await header.WriteAsync(OutputStream.NotNull(), cancellationToken).ConfigureAwait(false); } /// @@ -67,10 +67,10 @@ public async ValueTask WriteAsync( header.LastModifiedTime = modificationTime ?? TarHeader.EPOCH; header.Name = NormalizeFilename(filename); header.Size = realSize; - await header.WriteAsync(OutputStream, cancellationToken).ConfigureAwait(false); + await header.WriteAsync(OutputStream.NotNull(), cancellationToken).ConfigureAwait(false); var progressStream = WrapWithProgress(source, filename); var written = await progressStream - .TransferToAsync(OutputStream, realSize, cancellationToken) + .TransferToAsync(OutputStream.NotNull(), realSize, cancellationToken) .ConfigureAwait(false); await PadTo512Async(written, cancellationToken).ConfigureAwait(false); } @@ -81,6 +81,7 @@ private async ValueTask PadTo512Async(long size, CancellationToken cancellationT if (zeros > 0) { await OutputStream + .NotNull() .WriteAsync(new byte[zeros], 0, zeros, cancellationToken) .ConfigureAwait(false); } diff --git a/src/SharpCompress/Writers/Tar/TarWriter.Factory.cs b/src/SharpCompress/Writers/Tar/TarWriter.Factory.cs index d70774780..a3c6bf6f4 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.Factory.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.Factory.cs @@ -24,9 +24,9 @@ public static IWriter OpenWriter(Stream stream, TarWriterOptions writerOptions) return new TarWriter(stream, writerOptions); } - public static IAsyncWriter OpenAsyncWriter(string path, TarWriterOptions writerOptions) + public static IAsyncWriter OpenAsyncWriter(string stream, TarWriterOptions writerOptions) { - return (IAsyncWriter)OpenWriter(path, writerOptions); + return (IAsyncWriter)OpenWriter(stream, writerOptions); } public static IAsyncWriter OpenAsyncWriter(Stream stream, TarWriterOptions writerOptions) diff --git a/src/SharpCompress/Writers/Tar/TarWriter.cs b/src/SharpCompress/Writers/Tar/TarWriter.cs index 01a3685f1..127759034 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.cs @@ -65,7 +65,11 @@ private string NormalizeFilename(string filename) { filename = filename.Replace('\\', '/'); +#if LEGACY_DOTNET var pos = filename.IndexOf(':'); +#else + var pos = filename.IndexOf(':', StringComparison.Ordinal); +#endif if (pos >= 0) { filename = filename.Remove(0, pos + 1); @@ -98,7 +102,7 @@ public override void WriteDirectory(string directoryName, DateTime? modification header.Name = normalizedName; header.Size = 0; header.EntryType = EntryType.Directory; - header.Write(OutputStream); + header.Write(OutputStream.NotNull()); } public void Write(string filename, Stream source, DateTime? modificationTime, long? size) @@ -115,9 +119,9 @@ public void Write(string filename, Stream source, DateTime? modificationTime, lo header.LastModifiedTime = modificationTime ?? TarHeader.EPOCH; header.Name = NormalizeFilename(filename); header.Size = realSize; - header.Write(OutputStream); + header.Write(OutputStream.NotNull()); var progressStream = WrapWithProgress(source, filename); - size = progressStream.TransferTo(OutputStream, realSize); + size = progressStream.TransferTo(OutputStream.NotNull(), realSize); PadTo512(size.Value); } @@ -125,7 +129,7 @@ private void PadTo512(long size) { var zeros = unchecked((int)(((size + 511L) & ~511L) - size)); - OutputStream.Write(stackalloc byte[zeros]); + OutputStream.NotNull().Write(stackalloc byte[zeros]); } protected override void Dispose(bool isDisposing) @@ -134,7 +138,7 @@ protected override void Dispose(bool isDisposing) { if (finalizeArchiveOnClose) { - OutputStream.Write(stackalloc byte[1024]); + OutputStream.NotNull().Write(stackalloc byte[1024]); } // Use IFinishable interface for generic finalization if (OutputStream is IFinishable finishable) diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.Async.cs b/src/SharpCompress/Writers/Zip/ZipWriter.Async.cs index 53c1af452..8c53f8fa8 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriter.Async.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriter.Async.cs @@ -14,7 +14,7 @@ public partial class ZipWriter /// Asynchronously writes an entry to the ZIP archive. /// public override async ValueTask WriteAsync( - string entryPath, + string filename, Stream source, DateTime? modificationTime, CancellationToken cancellationToken = default @@ -22,7 +22,7 @@ public override async ValueTask WriteAsync( { cancellationToken.ThrowIfCancellationRequested(); await WriteAsync( - entryPath, + filename, source, new ZipWriterEntryOptions { ModificationDateTime = modificationTime }, cancellationToken diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.Factory.cs b/src/SharpCompress/Writers/Zip/ZipWriter.Factory.cs index c4083aeae..d21ab7e2b 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriter.Factory.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriter.Factory.cs @@ -24,9 +24,9 @@ public static IWriter OpenWriter(Stream stream, ZipWriterOptions writerOptions) return new ZipWriter(stream, writerOptions); } - public static IAsyncWriter OpenAsyncWriter(string path, ZipWriterOptions writerOptions) + public static IAsyncWriter OpenAsyncWriter(string stream, ZipWriterOptions writerOptions) { - return (IAsyncWriter)OpenWriter(path, writerOptions); + return (IAsyncWriter)OpenWriter(stream, writerOptions); } public static IAsyncWriter OpenAsyncWriter(Stream stream, ZipWriterOptions writerOptions) diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.cs b/src/SharpCompress/Writers/Zip/ZipWriter.cs index 6ab572c2c..26ee920d2 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriter.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriter.cs @@ -59,7 +59,7 @@ protected override void Dispose(bool isDisposing) ulong size = 0; foreach (var entry in entries) { - size += entry.Write(OutputStream); + size += entry.Write(OutputStream.NotNull()); } WriteEndRecord(size); } @@ -78,9 +78,9 @@ private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compr _ => throw new InvalidFormatException("Invalid compression method: " + compressionType), }; - public override void Write(string entryPath, Stream source, DateTime? modificationTime) => + public override void Write(string filename, Stream source, DateTime? modificationTime) => Write( - entryPath, + filename, source, new ZipWriterEntryOptions() { ModificationDateTime = modificationTime } ); @@ -133,7 +133,11 @@ private string NormalizeFilename(string filename) { filename = filename.Replace('\\', '/'); +#if LEGACY_DOTNET var pos = filename.IndexOf(':'); +#else + var pos = filename.IndexOf(':', StringComparison.Ordinal); +#endif if (pos >= 0) { filename = filename.Remove(0, pos + 1); @@ -208,7 +212,7 @@ bool useZip64 ) { // We err on the side of caution until the zip specification clarifies how to support this - if (!OutputStream.CanSeek && useZip64) + if (!OutputStream.NotNull().CanSeek && useZip64) { throw new NotSupportedException( "Zip64 extensions are not supported on non-seekable streams" @@ -222,26 +226,26 @@ bool useZip64 Span intBuf = stackalloc byte[4]; BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES); - OutputStream.Write(intBuf); + OutputStream.NotNull().Write(intBuf); if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate) { - if (OutputStream.CanSeek && useZip64) + if (OutputStream.NotNull().CanSeek && useZip64) { - OutputStream.Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64 + OutputStream.NotNull().Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64 } else { - OutputStream.Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible + OutputStream.NotNull().Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible } } else { - OutputStream.Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA + OutputStream.NotNull().Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA } var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8) ? HeaderFlags.Efs : 0; - if (!OutputStream.CanSeek) + if (!OutputStream.NotNull().CanSeek) { flags |= HeaderFlags.UsePostDataDescriptor; @@ -252,35 +256,35 @@ bool useZip64 } BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags); - OutputStream.Write(intBuf.Slice(0, 2)); + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo); - OutputStream.Write(intBuf.Slice(0, 2)); // zipping method + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); // zipping method BinaryPrimitives.WriteUInt32LittleEndian( intBuf, zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime() ); - OutputStream.Write(intBuf); + OutputStream.NotNull().Write(intBuf); // zipping date and time - OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }); + OutputStream.NotNull().Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }); // unused CRC, un/compressed size, updated later BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length); - OutputStream.Write(intBuf.Slice(0, 2)); // filename length + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); // filename length var extralength = 0; - if (OutputStream.CanSeek && useZip64) + if (OutputStream.NotNull().CanSeek && useZip64) { extralength = 2 + 2 + 8 + 8; } BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength); - OutputStream.Write(intBuf.Slice(0, 2)); // extra length - OutputStream.Write(encodedFilename, 0, encodedFilename.Length); + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); // extra length + OutputStream.NotNull().Write(encodedFilename, 0, encodedFilename.Length); if (extralength != 0) { - OutputStream.Write(new byte[extralength], 0, extralength); // reserve space for zip64 data + OutputStream.NotNull().Write(new byte[extralength], 0, extralength); // reserve space for zip64 data entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length); } @@ -291,11 +295,11 @@ private void WriteFooter(uint crc, uint compressed, uint uncompressed) { Span intBuf = stackalloc byte[4]; BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc); - OutputStream.Write(intBuf); + OutputStream.NotNull().Write(intBuf); BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed); - OutputStream.Write(intBuf); + OutputStream.NotNull().Write(intBuf); BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed); - OutputStream.Write(intBuf); + OutputStream.NotNull().Write(intBuf); } private void WriteEndRecord(ulong size) @@ -315,57 +319,57 @@ private void WriteEndRecord(ulong size) var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8; // Write zip64 end of central directory record - OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 }); + OutputStream.NotNull().Write(stackalloc byte[] { 80, 75, 6, 6 }); BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen); - OutputStream.Write(intBuf); // Size of zip64 end of central directory record + OutputStream.NotNull().Write(intBuf); // Size of zip64 end of central directory record BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45); - OutputStream.Write(intBuf.Slice(0, 2)); // Made by + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); // Made by BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45); - OutputStream.Write(intBuf.Slice(0, 2)); // Version needed + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); // Version needed BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0); - OutputStream.Write(intBuf.Slice(0, 4)); // Disk number - OutputStream.Write(intBuf.Slice(0, 4)); // Central dir disk + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); // Disk number + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); // Central dir disk // TODO: entries.Count is int, so max 2^31 files BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count); - OutputStream.Write(intBuf); // Entries in this disk - OutputStream.Write(intBuf); // Total entries + OutputStream.NotNull().Write(intBuf); // Entries in this disk + OutputStream.NotNull().Write(intBuf); // Total entries BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size); - OutputStream.Write(intBuf); // Central Directory size + OutputStream.NotNull().Write(intBuf); // Central Directory size BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition); - OutputStream.Write(intBuf); // Disk offset + OutputStream.NotNull().Write(intBuf); // Disk offset // Write zip64 end of central directory locator - OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 }); + OutputStream.NotNull().Write(stackalloc byte[] { 80, 75, 6, 7 }); BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0); - OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); // Entry disk BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size); - OutputStream.Write(intBuf); // Offset to the zip64 central directory + OutputStream.NotNull().Write(intBuf); // Offset to the zip64 central directory BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1); - OutputStream.Write(intBuf.Slice(0, 4)); // Number of disks + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); // Number of disks streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4); } // Write normal end of central directory record - OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }); + OutputStream.NotNull().Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }); BinaryPrimitives.WriteUInt16LittleEndian( intBuf, (ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF) ); - OutputStream.Write(intBuf.Slice(0, 2)); - OutputStream.Write(intBuf.Slice(0, 2)); + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue); - OutputStream.Write(intBuf.Slice(0, 4)); + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue); - OutputStream.Write(intBuf.Slice(0, 4)); + OutputStream.NotNull().Write(intBuf.Slice(0, 4)); var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment); BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length); - OutputStream.Write(intBuf.Slice(0, 2)); - OutputStream.Write(encodedComment, 0, encodedComment.Length); + OutputStream.NotNull().Write(intBuf.Slice(0, 2)); + OutputStream.NotNull().Write(encodedComment, 0, encodedComment.Length); } #region Nested type: ZipWritingStream diff --git a/src/SharpCompress/Writers/Zip/ZipWriterEntryOptions.cs b/src/SharpCompress/Writers/Zip/ZipWriterEntryOptions.cs index d51f2bd13..ef601c218 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriterEntryOptions.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriterEntryOptions.cs @@ -44,26 +44,6 @@ public int? CompressionLevel } } - /// - /// When CompressionType.Deflate is used, this property is referenced. - /// Valid range: 0-9 (0=no compression, 6=default, 9=best compression). - /// When null, uses the archive's default compression level. - /// - /// - /// This property is deprecated. Use instead. - /// - [Obsolete( - "Use CompressionLevel property instead. This property will be removed in a future version." - )] - public CompressionLevel? DeflateCompressionLevel - { - get => - CompressionLevel.HasValue - ? (CompressionLevel)Math.Min(CompressionLevel.Value, 9) - : null; - set => CompressionLevel = value.HasValue ? (int)value.Value : null; - } - public string? EntryComment { get; set; } public DateTime? ModificationDateTime { get; set; } diff --git a/tests/SharpCompress.Performance/LargeMemoryStream.cs b/tests/SharpCompress.Performance/LargeMemoryStream.cs index 6822e5d01..f8922f9f0 100644 --- a/tests/SharpCompress.Performance/LargeMemoryStream.cs +++ b/tests/SharpCompress.Performance/LargeMemoryStream.cs @@ -85,14 +85,11 @@ public override void Flush() public override int Read(byte[] buffer, int offset, int count) { ThrowIfDisposed(); - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ArgumentNullException.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } long length = Length; @@ -130,14 +127,11 @@ public override int Read(byte[] buffer, int offset, int count) public override void Write(byte[] buffer, int offset, int count) { ThrowIfDisposed(); - if (buffer == null) - { - throw new ArgumentNullException(nameof(buffer)); - } + ArgumentNullException.ThrowIfNull(buffer); if (offset < 0 || count < 0 || offset + count > buffer.Length) { - throw new ArgumentOutOfRangeException(); + throw new ArgumentOutOfRangeException(nameof(offset)); } int bytesWritten = 0; @@ -292,10 +286,7 @@ public byte[] ToArray() private void ThrowIfDisposed() { - if (_isDisposed) - { - throw new ObjectDisposedException(GetType().Name); - } + ObjectDisposedException.ThrowIf(_isDisposed, this); } protected override void Dispose(bool disposing) diff --git a/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs b/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs index 620f307c2..d5ec94c4e 100644 --- a/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs +++ b/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs @@ -16,6 +16,6 @@ public void BZip2_Reader_Factory() Stream stream = new MemoryStream( new byte[] { 0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35 } ); - Assert.Throws(typeof(InvalidOperationException), () => ReaderFactory.OpenReader(stream)); + Assert.Throws(() => ReaderFactory.OpenReader(stream)); } } diff --git a/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs index 60e115c08..993e16696 100644 --- a/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs +++ b/tests/SharpCompress.Test/GZip/GZipWriterAsyncTests.cs @@ -23,7 +23,7 @@ public async ValueTask GZip_Writer_Generic_Async() FileAccess.Write ) ) - using ( + await using ( var writer = WriterFactory.OpenAsyncWriter( new AsyncOnlyStream(stream), ArchiveType.GZip, @@ -81,7 +81,7 @@ public async ValueTask GZip_Writer_Entry_Path_With_Dir_Async() FileAccess.Write ) ) - using (var writer = new GZipWriter(new AsyncOnlyStream(stream))) + await using (var writer = new GZipWriter(new AsyncOnlyStream(stream))) { var path = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"); await writer.WriteAsync(path, path); diff --git a/tests/SharpCompress.Test/MarkingBinaryReaderParityTests.cs b/tests/SharpCompress.Test/MarkingBinaryReaderParityTests.cs index 5dfeb5b8e..ff4646b7b 100644 --- a/tests/SharpCompress.Test/MarkingBinaryReaderParityTests.cs +++ b/tests/SharpCompress.Test/MarkingBinaryReaderParityTests.cs @@ -26,7 +26,7 @@ public MarkingBinaryReaderParityTests() public void Mark_Resets_ByteCount() { using var stream = new MemoryStream(_testData); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.ReadBytes(10); Assert.Equal(10, reader.CurrentReadByteCount); @@ -58,7 +58,7 @@ public async Task Mark_Resets_ByteCount_Async() public void ReadByte_Updates_ByteCount() { using var stream = new MemoryStream(_testData); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.Mark(); reader.ReadByte(); @@ -86,7 +86,7 @@ public async Task ReadByte_Updates_ByteCount_Async() public void ReadBytes_Updates_ByteCount() { using var stream = new MemoryStream(_testData); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.Mark(); reader.ReadBytes(16); @@ -114,7 +114,7 @@ public async Task ReadBytes_Updates_ByteCount_Async() public void ReadUInt16_Updates_ByteCount() { using var stream = new MemoryStream(_testData); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.Mark(); reader.ReadUInt16(); @@ -136,7 +136,7 @@ public async Task ReadUInt16_Updates_ByteCount_Async() public void ReadUInt32_Updates_ByteCount() { using var stream = new MemoryStream(_testData); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.Mark(); reader.ReadUInt32(); @@ -160,7 +160,7 @@ public void ReadRarVInt_Updates_ByteCount() // Create valid RAR v-int data: 0x05 (value 5, no continuation bit) var data = new byte[] { 0x05, 0x85, 0x01, 0x00 }; // 0x05, then 0x85 0x01 (value 5 + 128 = 133) using var stream = new MemoryStream(data); - var reader = new MarkingBinaryReader(stream); + using var reader = new MarkingBinaryReader(stream); reader.Mark(); // Read a single-byte v-int (value 5, no continuation bit) @@ -197,7 +197,7 @@ public async Task Sync_Async_ByteCount_Parity() { using var syncStream = new MemoryStream(_testData); using var asyncStream = new MemoryStream(_testData); - var syncReader = new MarkingBinaryReader(syncStream); + using var syncReader = new MarkingBinaryReader(syncStream); var asyncReader = new AsyncMarkingBinaryReader(asyncStream); syncReader.Mark(); @@ -229,7 +229,7 @@ public async Task Sync_Async_ByteCount_Parity_Alt() { using var syncStream = new MemoryStream(_testData); using var asyncStream = new MemoryStream(_testData); - var syncReader = new MarkingBinaryReader(syncStream); + using var syncReader = new MarkingBinaryReader(syncStream); var asyncReader = new AsyncMarkingBinaryReader(asyncStream); syncReader.Mark(); diff --git a/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs b/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs index bed63133e..bba174d6f 100644 --- a/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs @@ -4,7 +4,7 @@ using SharpCompress.Archives; using SharpCompress.Archives.Rar; using SharpCompress.Common; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.Readers; using SharpCompress.Test.Mocks; using Xunit; diff --git a/tests/SharpCompress.Test/Rar/RarArchiveTests.cs b/tests/SharpCompress.Test/Rar/RarArchiveTests.cs index 72029d56d..f2bdcb00a 100644 --- a/tests/SharpCompress.Test/Rar/RarArchiveTests.cs +++ b/tests/SharpCompress.Test/Rar/RarArchiveTests.cs @@ -4,7 +4,7 @@ using SharpCompress.Archives; using SharpCompress.Archives.Rar; using SharpCompress.Common; -using SharpCompress.Compressors.LZMA.Utilites; +using SharpCompress.Compressors.LZMA.Utilities; using SharpCompress.Readers; using SharpCompress.Test.Mocks; using Xunit; @@ -19,9 +19,8 @@ public void Rar_EncryptedFileAndHeader_Archive() => [Fact] public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() => - Assert.Throws( - typeof(CryptographicException), - () => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null) + Assert.Throws(() => + ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null) ); [Fact] @@ -30,16 +29,14 @@ public void Rar5_EncryptedFileAndHeader_Archive() => [Fact] public void Rar5_EncryptedFileAndHeader_Archive_Err() => - Assert.Throws( - typeof(CryptographicException), - () => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed") + Assert.Throws(() => + ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed") ); [Fact] public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() => - Assert.Throws( - typeof(CryptographicException), - () => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null) + Assert.Throws(() => + ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null) ); [Fact] @@ -48,9 +45,8 @@ public void Rar_EncryptedFileOnly_Archive() => [Fact] public void Rar_EncryptedFileOnly_Archive_Err() => - Assert.Throws( - typeof(CryptographicException), - () => ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed") + Assert.Throws(() => + ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed") ); [Fact] diff --git a/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs b/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs index eb25b76cb..f74f2be9c 100644 --- a/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Rar/RarReaderAsyncTests.cs @@ -212,7 +212,9 @@ private async ValueTask DoRar_Entry_Stream_Async(string filename) var file = Path.GetFileName(reader.Entry.Key).NotNull(); var folder = Path.GetDirectoryName(reader.Entry.Key) - ?? throw new ArgumentNullException(); + ?? throw new InvalidOperationException( + "Entry key must have a directory name." + ); var destdir = Path.Combine(SCRATCH_FILES_PATH, folder); if (!Directory.Exists(destdir)) { diff --git a/tests/SharpCompress.Test/Rar/RarReaderTests.cs b/tests/SharpCompress.Test/Rar/RarReaderTests.cs index beb3ad0e1..008db7fd1 100644 --- a/tests/SharpCompress.Test/Rar/RarReaderTests.cs +++ b/tests/SharpCompress.Test/Rar/RarReaderTests.cs @@ -195,7 +195,9 @@ private void DoRar_Entry_Stream(string filename) var file = Path.GetFileName(reader.Entry.Key).NotNull(); var folder = Path.GetDirectoryName(reader.Entry.Key) - ?? throw new ArgumentNullException(); + ?? throw new InvalidOperationException( + "Entry key must have a directory name." + ); var destdir = Path.Combine(SCRATCH_FILES_PATH, folder); if (!Directory.Exists(destdir)) { diff --git a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs index 0b77c0400..6f5b6b84f 100644 --- a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs +++ b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs @@ -35,9 +35,8 @@ public void SevenZipArchive_LZMAAES_PathRead() => [Fact] public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest() => - Assert.Throws( - typeof(CryptographicException), - () => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null }) + Assert.Throws(() => + ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null }) ); //was failing with ArgumentNullException not CryptographicException like rar [Fact] diff --git a/tests/SharpCompress.Test/Streams/LzwStreamAsyncTests.cs b/tests/SharpCompress.Test/Streams/LzwStreamAsyncTests.cs index 25edafb0a..e91089cde 100644 --- a/tests/SharpCompress.Test/Streams/LzwStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Streams/LzwStreamAsyncTests.cs @@ -110,7 +110,7 @@ public async Task LzwStream_ReadAsync_EmptyBuffer() using var stream = File.OpenRead(testArchive); using var lzwStream = new LzwStream(stream); - var buffer = new byte[0]; + var buffer = Array.Empty(); int bytesRead = await lzwStream.ReadAsync(buffer, 0, 0).ConfigureAwait(false); Assert.Equal(0, bytesRead); diff --git a/tests/SharpCompress.Test/Streams/SharpCompressStreamErrorAsyncTest.cs b/tests/SharpCompress.Test/Streams/SharpCompressStreamErrorAsyncTest.cs index ebd225c05..8d59ddaa1 100644 --- a/tests/SharpCompress.Test/Streams/SharpCompressStreamErrorAsyncTest.cs +++ b/tests/SharpCompress.Test/Streams/SharpCompressStreamErrorAsyncTest.cs @@ -40,7 +40,10 @@ public override void Write(byte[] buffer, int offset, int count) => protected override void Dispose(bool disposing) { if (disposing) + { _baseStream.Dispose(); + } + base.Dispose(disposing); } } diff --git a/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekAsyncTest.cs b/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekAsyncTest.cs index 9d17febf9..3a93960f1 100644 --- a/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekAsyncTest.cs +++ b/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekAsyncTest.cs @@ -40,7 +40,10 @@ public override void Write(byte[] buffer, int offset, int count) => protected override void Dispose(bool disposing) { if (disposing) + { _baseStream.Dispose(); + } + base.Dispose(disposing); } } diff --git a/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekTest.cs b/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekTest.cs index b2313489d..05d3dc21d 100644 --- a/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekTest.cs +++ b/tests/SharpCompress.Test/Streams/SharpCompressStreamSeekTest.cs @@ -43,7 +43,10 @@ public override void Write(byte[] buffer, int offset, int count) => protected override void Dispose(bool disposing) { if (disposing) + { _baseStream.Dispose(); + } + base.Dispose(disposing); } } diff --git a/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs index 194a07d8c..2a085408d 100644 --- a/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs +++ b/tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs @@ -69,10 +69,10 @@ await archive2.EntriesAsync.Select(entry => entry.Key).ToListAsync() await foreach (var entry in archive2.EntriesAsync) { - Assert.Equal( - "dummy filecontent", - await new StreamReader(await entry.OpenEntryStreamAsync()).ReadLineAsync() - ); + using (var sr = new StreamReader(await entry.OpenEntryStreamAsync())) + { + Assert.Equal("dummy filecontent", await sr.ReadLineAsync()); + } } } } @@ -127,10 +127,10 @@ await archive2.EntriesAsync.Select(entry => entry.Key).ToListAsync() await foreach (var entry in archive2.EntriesAsync) { - Assert.Equal( - "dummy filecontent", - await new StreamReader(await entry.OpenEntryStreamAsync()).ReadLineAsync() - ); + using (var sr = new StreamReader(await entry.OpenEntryStreamAsync())) + { + Assert.Equal("dummy filecontent", await sr.ReadLineAsync()); + } } } #if LEGACY_DOTNET diff --git a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs index 488e39148..008bb1450 100644 --- a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs +++ b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs @@ -71,10 +71,10 @@ public void Tar_FileName_Exactly_100_Characters() foreach (var entry in archive2.Entries) { - Assert.Equal( - "dummy filecontent", - new StreamReader(entry.OpenEntryStream()).ReadLine() - ); + using (var sr = new StreamReader(entry.OpenEntryStream())) + { + Assert.Equal("dummy filecontent", sr.ReadLine()); + } } } } @@ -137,10 +137,10 @@ public void Tar_VeryLongFilepathReadback() foreach (var entry in archive2.Entries) { - Assert.Equal( - "dummy filecontent", - new StreamReader(entry.OpenEntryStream()).ReadLine() - ); + using (var sr = new StreamReader(entry.OpenEntryStream())) + { + Assert.Equal("dummy filecontent", sr.ReadLine()); + } } } } diff --git a/tests/SharpCompress.Test/UtilityTests.cs b/tests/SharpCompress.Test/UtilityTests.cs index c0455d740..ea3e902ca 100644 --- a/tests/SharpCompress.Test/UtilityTests.cs +++ b/tests/SharpCompress.Test/UtilityTests.cs @@ -101,7 +101,7 @@ public void ReadFully_ByteArray_EmptyBuffer_ReturnsTrue() { var data = new byte[] { 1, 2, 3 }; using var stream = new MemoryStream(data); - var buffer = new byte[0]; + var buffer = Array.Empty(); var result = stream.ReadFully(buffer); @@ -149,7 +149,7 @@ public void ReadFully_Span_EmptyBuffer_ReturnsTrue() { var data = new byte[] { 1, 2, 3 }; using var stream = new MemoryStream(data); - Span buffer = new byte[0]; + Span buffer = Array.Empty(); var result = stream.ReadFully(buffer); diff --git a/tests/SharpCompress.Test/WriterTests.cs b/tests/SharpCompress.Test/WriterTests.cs index 4b66c0d9c..2e4417359 100644 --- a/tests/SharpCompress.Test/WriterTests.cs +++ b/tests/SharpCompress.Test/WriterTests.cs @@ -70,7 +70,12 @@ protected async Task WriteAsync( writerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; - using var writer = WriterFactory.OpenAsyncWriter(stream, _type, writerOptions); + using var writer = WriterFactory.OpenAsyncWriter( + stream, + _type, + writerOptions, + cancellationToken + ); await writer.WriteAllAsync( ORIGINAL_FILES_PATH, "*", diff --git a/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs index bff4d7c34..6460ed53b 100644 --- a/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs @@ -30,7 +30,7 @@ public async ValueTask OnFindIndexBlockThrowAsync() { var bytes = new byte[] { 0 }; using Stream indexBlockStream = new MemoryStream(bytes); - var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8); await Assert.ThrowsAsync(async () => { await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false); @@ -44,7 +44,7 @@ public async ValueTask CrcIncorrectThrowsAsync() bytes[20]++; using Stream badCrcStream = new MemoryStream(bytes); Rewind(badCrcStream); - var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8); var ex = await Assert.ThrowsAsync(async () => { await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false); @@ -55,7 +55,7 @@ public async ValueTask CrcIncorrectThrowsAsync() [Fact] public async ValueTask CanReadMAsync() { - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal( Encoding.ASCII.GetBytes("M"), await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false) @@ -65,7 +65,7 @@ await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false) [Fact] public async ValueTask CanReadMaryAsync() { - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal( Encoding.ASCII.GetBytes("M"), await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false) @@ -83,8 +83,8 @@ await ReadBytesAsync(xzBlock, 2).ConfigureAwait(false) [Fact] public async ValueTask CanReadPoemWithStreamReaderAsync() { - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); Assert.Equal(await sr.ReadToEndAsync().ConfigureAwait(false), Original); } @@ -92,8 +92,8 @@ public async ValueTask CanReadPoemWithStreamReaderAsync() public async ValueTask NoopWhenNoPaddingAsync() { // CompressedStream's only block has no padding. - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(0L, CompressedStream.Position % 4L); } @@ -102,8 +102,8 @@ public async ValueTask NoopWhenNoPaddingAsync() public async ValueTask SkipsPaddingWhenPresentAsync() { // CompressedIndexedStream's first block has 1-byte padding. - var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(0L, CompressedIndexedStream.Position % 4L); } @@ -117,8 +117,8 @@ public async ValueTask HandlesPaddingInUnalignedBlockAsync() compressedUnalignedStream.Position = 13; // Compressed's only block has no padding. - var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(1L, compressedUnalignedStream.Position % 4L); } diff --git a/tests/SharpCompress.Test/Xz/XZBlockTests.cs b/tests/SharpCompress.Test/Xz/XZBlockTests.cs index 44245075b..3b06008e2 100644 --- a/tests/SharpCompress.Test/Xz/XZBlockTests.cs +++ b/tests/SharpCompress.Test/Xz/XZBlockTests.cs @@ -29,7 +29,7 @@ public void OnFindIndexBlockThrow() { var bytes = new byte[] { 0 }; using Stream indexBlockStream = new MemoryStream(bytes); - var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8); Assert.Throws(() => { ReadBytes(xzBlock, 1); @@ -43,7 +43,7 @@ public void CrcIncorrectThrows() bytes[20]++; using Stream badCrcStream = new MemoryStream(bytes); Rewind(badCrcStream); - var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8); var ex = Assert.Throws(() => { ReadBytes(xzBlock, 1); @@ -54,14 +54,14 @@ public void CrcIncorrectThrows() [Fact] public void CanReadM() { - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(xzBlock, 1)); } [Fact] public void CanReadMary() { - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(xzBlock, 1)); Assert.Equal(Encoding.ASCII.GetBytes("a"), ReadBytes(xzBlock, 1)); Assert.Equal(Encoding.ASCII.GetBytes("ry"), ReadBytes(xzBlock, 2)); @@ -71,7 +71,7 @@ public void CanReadMary() public void CanReadPoemWithStreamReader() { var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var sr = new StreamReader(xzBlock); Assert.Equal(sr.ReadToEnd(), Original); } @@ -79,8 +79,8 @@ public void CanReadPoemWithStreamReader() public void NoopWhenNoPadding() { // CompressedStream's only block has no padding. - var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); sr.ReadToEnd(); Assert.Equal(0L, CompressedStream.Position % 4L); } @@ -89,8 +89,8 @@ public void NoopWhenNoPadding() public void SkipsPaddingWhenPresent() { // CompressedIndexedStream's first block has 1-byte padding. - var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); sr.ReadToEnd(); Assert.Equal(0L, CompressedIndexedStream.Position % 4L); } @@ -104,8 +104,8 @@ public void HandlesPaddingInUnalignedBlock() compressedUnalignedStream.Position = 13; // Compressed's only block has no padding. - var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8); - var sr = new StreamReader(xzBlock); + using var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8); + using var sr = new StreamReader(xzBlock); sr.ReadToEnd(); Assert.Equal(1L, compressedUnalignedStream.Position % 4L); } diff --git a/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs index 74fb94c00..61296b912 100644 --- a/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs @@ -14,7 +14,7 @@ public async ValueTask ChecksMagicNumberAsync() var bytes = (byte[])Compressed.Clone(); bytes[3]++; using Stream badMagicNumberStream = new MemoryStream(bytes); - var br = new BinaryReader(badMagicNumberStream); + using var br = new BinaryReader(badMagicNumberStream); var header = new XZHeader(br); var ex = await Assert.ThrowsAsync(async () => { @@ -29,7 +29,7 @@ public async ValueTask CorruptHeaderThrowsAsync() var bytes = (byte[])Compressed.Clone(); bytes[8]++; using Stream badCrcStream = new MemoryStream(bytes); - var br = new BinaryReader(badCrcStream); + using var br = new BinaryReader(badCrcStream); var header = new XZHeader(br); var ex = await Assert.ThrowsAsync(async () => { @@ -47,7 +47,7 @@ public async ValueTask BadVersionIfCrcOkButStreamFlagUnknownAsync() streamFlags.CopyTo(bytes, 6); crc.CopyTo(bytes, 8); using Stream badFlagStream = new MemoryStream(bytes); - var br = new BinaryReader(badFlagStream); + using var br = new BinaryReader(badFlagStream); var header = new XZHeader(br); var ex = await Assert.ThrowsAsync(async () => { @@ -59,7 +59,7 @@ public async ValueTask BadVersionIfCrcOkButStreamFlagUnknownAsync() [Fact] public async ValueTask ProcessesBlockCheckTypeAsync() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); await header.ProcessAsync().ConfigureAwait(false); Assert.Equal(CheckType.CRC64, header.BlockCheckType); @@ -68,7 +68,7 @@ public async ValueTask ProcessesBlockCheckTypeAsync() [Fact] public async ValueTask CanCalculateBlockCheckSizeAsync() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); await header.ProcessAsync().ConfigureAwait(false); Assert.Equal(8, header.BlockCheckSize); diff --git a/tests/SharpCompress.Test/Xz/XZHeaderTests.cs b/tests/SharpCompress.Test/Xz/XZHeaderTests.cs index c8f3ac5e6..ab9bd3150 100644 --- a/tests/SharpCompress.Test/Xz/XZHeaderTests.cs +++ b/tests/SharpCompress.Test/Xz/XZHeaderTests.cs @@ -13,7 +13,7 @@ public void ChecksMagicNumber() var bytes = (byte[])Compressed.Clone(); bytes[3]++; using Stream badMagicNumberStream = new MemoryStream(bytes); - var br = new BinaryReader(badMagicNumberStream); + using var br = new BinaryReader(badMagicNumberStream); var header = new XZHeader(br); var ex = Assert.Throws(() => { @@ -28,7 +28,7 @@ public void CorruptHeaderThrows() var bytes = (byte[])Compressed.Clone(); bytes[8]++; using Stream badCrcStream = new MemoryStream(bytes); - var br = new BinaryReader(badCrcStream); + using var br = new BinaryReader(badCrcStream); var header = new XZHeader(br); var ex = Assert.Throws(() => { @@ -46,7 +46,7 @@ public void BadVersionIfCrcOkButStreamFlagUnknown() streamFlags.CopyTo(bytes, 6); crc.CopyTo(bytes, 8); using Stream badFlagStream = new MemoryStream(bytes); - var br = new BinaryReader(badFlagStream); + using var br = new BinaryReader(badFlagStream); var header = new XZHeader(br); var ex = Assert.Throws(() => { @@ -58,7 +58,7 @@ public void BadVersionIfCrcOkButStreamFlagUnknown() [Fact] public void ProcessesBlockCheckType() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); header.Process(); Assert.Equal(CheckType.CRC64, header.BlockCheckType); @@ -67,7 +67,7 @@ public void ProcessesBlockCheckType() [Fact] public void CanCalculateBlockCheckSize() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var header = new XZHeader(br); header.Process(); Assert.Equal(8, header.BlockCheckSize); diff --git a/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs index 02a96e9be..b740ddcde 100644 --- a/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs @@ -18,7 +18,7 @@ public class XzIndexAsyncTests : XzTestsBase public void RecordsStreamStartOnInit() { using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); Assert.Equal(0, index.StreamStartPosition); } @@ -27,7 +27,7 @@ public void RecordsStreamStartOnInit() public async ValueTask ThrowsIfHasNoIndexMarkerAsync() { using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); await Assert.ThrowsAsync(async () => await index.ProcessAsync().ConfigureAwait(false) @@ -37,7 +37,7 @@ await index.ProcessAsync().ConfigureAwait(false) [Fact] public async ValueTask ReadsNoRecordAsync() { - var br = new BinaryReader(CompressedEmptyStream); + using var br = new BinaryReader(CompressedEmptyStream); var index = new XZIndex(br, false); await index.ProcessAsync().ConfigureAwait(false); Assert.Equal((ulong)0, index.NumberOfRecords); @@ -46,7 +46,7 @@ public async ValueTask ReadsNoRecordAsync() [Fact] public async ValueTask ReadsOneRecordAsync() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); await index.ProcessAsync().ConfigureAwait(false); Assert.Equal((ulong)1, index.NumberOfRecords); @@ -55,7 +55,7 @@ public async ValueTask ReadsOneRecordAsync() [Fact] public async ValueTask ReadsMultipleRecordsAsync() { - var br = new BinaryReader(CompressedIndexedStream); + using var br = new BinaryReader(CompressedIndexedStream); var index = new XZIndex(br, false); await index.ProcessAsync().ConfigureAwait(false); Assert.Equal((ulong)2, index.NumberOfRecords); @@ -64,7 +64,7 @@ public async ValueTask ReadsMultipleRecordsAsync() [Fact] public async ValueTask ReadsFirstRecordAsync() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); await index.ProcessAsync().ConfigureAwait(false); Assert.Equal((ulong)OriginalBytes.Length, index.Records[0].UncompressedSize); @@ -89,7 +89,7 @@ public async ValueTask SkipsPaddingAsync() 0xC9, 0xFF, ]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); await index.ProcessAsync().ConfigureAwait(false); Assert.Equal(0L, badStream.Position % 4L); diff --git a/tests/SharpCompress.Test/Xz/XZIndexTests.cs b/tests/SharpCompress.Test/Xz/XZIndexTests.cs index 6930e8692..022dcd972 100644 --- a/tests/SharpCompress.Test/Xz/XZIndexTests.cs +++ b/tests/SharpCompress.Test/Xz/XZIndexTests.cs @@ -1,4 +1,4 @@ -using System.IO; +using System.IO; using SharpCompress.Common; using SharpCompress.Compressors.Xz; using Xunit; @@ -17,7 +17,7 @@ public class XzIndexTests : XzTestsBase public void RecordsStreamStartOnInit() { using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); Assert.Equal(0, index.StreamStartPosition); } @@ -26,7 +26,7 @@ public void RecordsStreamStartOnInit() public void ThrowsIfHasNoIndexMarker() { using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); Assert.Throws(() => index.Process()); } @@ -34,7 +34,7 @@ public void ThrowsIfHasNoIndexMarker() [Fact] public void ReadsNoRecord() { - var br = new BinaryReader(CompressedEmptyStream); + using var br = new BinaryReader(CompressedEmptyStream); var index = new XZIndex(br, false); index.Process(); Assert.Equal((ulong)0, index.NumberOfRecords); @@ -43,7 +43,7 @@ public void ReadsNoRecord() [Fact] public void ReadsOneRecord() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); index.Process(); Assert.Equal((ulong)1, index.NumberOfRecords); @@ -52,7 +52,7 @@ public void ReadsOneRecord() [Fact] public void ReadsMultipleRecords() { - var br = new BinaryReader(CompressedIndexedStream); + using var br = new BinaryReader(CompressedIndexedStream); var index = new XZIndex(br, false); index.Process(); Assert.Equal((ulong)2, index.NumberOfRecords); @@ -61,7 +61,7 @@ public void ReadsMultipleRecords() [Fact] public void ReadsFirstRecord() { - var br = new BinaryReader(CompressedStream); + using var br = new BinaryReader(CompressedStream); var index = new XZIndex(br, false); index.Process(); Assert.Equal((ulong)OriginalBytes.Length, index.Records[0].UncompressedSize); @@ -86,7 +86,7 @@ public void SkipsPadding() 0xC9, 0xFF, ]); - var br = new BinaryReader(badStream); + using var br = new BinaryReader(badStream); var index = new XZIndex(br, false); index.Process(); Assert.Equal(0L, badStream.Position % 4L); diff --git a/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs b/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs index 9402e7c41..8f8e59b8b 100644 --- a/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs +++ b/tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs @@ -12,7 +12,7 @@ public class XzStreamAsyncTests : XzTestsBase [Fact] public async ValueTask CanReadEmptyStreamAsync() { - var xz = new XZStream(CompressedEmptyStream); + using var xz = new XZStream(CompressedEmptyStream); using var sr = new StreamReader(new AsyncOnlyStream(xz)); var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(OriginalEmpty, uncompressed); @@ -21,7 +21,7 @@ public async ValueTask CanReadEmptyStreamAsync() [Fact] public async ValueTask CanReadStreamAsync() { - var xz = new XZStream(CompressedStream); + using var xz = new XZStream(CompressedStream); using var sr = new StreamReader(new AsyncOnlyStream(xz)); var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(Original, uncompressed); @@ -30,7 +30,7 @@ public async ValueTask CanReadStreamAsync() [Fact] public async ValueTask CanReadIndexedStreamAsync() { - var xz = new XZStream(CompressedIndexedStream); + using var xz = new XZStream(CompressedIndexedStream); using var sr = new StreamReader(new AsyncOnlyStream(xz)); var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false); Assert.Equal(OriginalIndexed, uncompressed); diff --git a/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs index 7e1b143a2..2d7a0c7b3 100644 --- a/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs @@ -173,7 +173,7 @@ bool forwardOnly var opts = new ZipWriterOptions(CompressionType.Deflate) { UseZip64 = setZip64 }; // Use no compression to ensure we hit the limits (actually inflates a bit, but seems better than using method==Store) - var eo = new ZipWriterEntryOptions { DeflateCompressionLevel = CompressionLevel.None }; + var eo = new ZipWriterEntryOptions { CompressionLevel = 0 }; using var zip = File.OpenWrite(filename); using var st = forwardOnly ? (Stream)new ForwardOnlyStream(zip) : zip; @@ -199,33 +199,30 @@ public async ValueTask> ReadForwardOnlyAsync(string filename) ZipEntry? prev = null; using (var fs = File.OpenRead(filename)) { - var rd = await ReaderFactory.OpenAsyncReader( + await using var rd = await ReaderFactory.OpenAsyncReader( new AsyncOnlyStream(fs), new ReaderOptions { LookForHeader = false } ); - await using (rd) + while (await rd.MoveToNextEntryAsync()) { - while (await rd.MoveToNextEntryAsync()) - { #if LEGACY_DOTNET - using (var entryStream = await rd.OpenEntryStreamAsync()) - { - await entryStream.SkipEntryAsync(); - } + using (var entryStream = await rd.OpenEntryStreamAsync()) + { + await entryStream.SkipEntryAsync(); + } #else - await using (var entryStream = await rd.OpenEntryStreamAsync()) - { - await entryStream.SkipEntryAsync(); - } + await using (var entryStream = await rd.OpenEntryStreamAsync()) + { + await entryStream.SkipEntryAsync(); + } #endif - count++; - if (prev != null) - { - size += prev.Size; - } - - prev = (ZipEntry)rd.Entry; + count++; + if (prev != null) + { + size += prev.Size; } + + prev = (ZipEntry)rd.Entry; } } diff --git a/tests/SharpCompress.Test/Zip/Zip64Tests.cs b/tests/SharpCompress.Test/Zip/Zip64Tests.cs index 6242b8773..68d27625b 100644 --- a/tests/SharpCompress.Test/Zip/Zip64Tests.cs +++ b/tests/SharpCompress.Test/Zip/Zip64Tests.cs @@ -158,7 +158,7 @@ bool forwardOnly var opts = new ZipWriterOptions(CompressionType.Deflate) { UseZip64 = setZip64 }; // Use no compression to ensure we hit the limits (actually inflates a bit, but seems better than using method==Store) - var eo = new ZipWriterEntryOptions { DeflateCompressionLevel = CompressionLevel.None }; + var eo = new ZipWriterEntryOptions { CompressionLevel = 0 }; using var zip = File.OpenWrite(filename); using var st = forwardOnly ? (Stream)new ForwardOnlyStream(zip) : zip; diff --git a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs index c57d50267..7228a2004 100644 --- a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; @@ -76,7 +75,6 @@ await writer.WriteAsync( // Calculate and output actual compression ratio var originalSize = file1Data.Length + file2Data.Length + file3Data.Length; var actualRatio = (double)zipStream.Length / originalSize; - //Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); // Verify compression occurred (except for None compression type) if (compressionType != CompressionType.None) @@ -148,7 +146,6 @@ await writer.WriteAsync( // Calculate and output actual compression ratio var actualRatio = (double)zipStream.Length / testData.Length; - //Debug.WriteLine($"Zip_WriterFactory_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); VerifyCompressionRatio( testData.Length, @@ -212,7 +209,6 @@ await writer.WriteAsync( // Calculate and output actual compression ratio var actualRatio = (double)zipStream.Length / testData.Length; - //Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); // Verify the archive zipStream.Position = 0; diff --git a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcTests.cs b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcTests.cs index fb9226bed..03ce4e1e5 100644 --- a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcTests.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using SharpCompress.Archives.Zip; @@ -69,7 +68,6 @@ float expectedRatio // Calculate and output actual compression ratio var originalSize = file1Data.Length + file2Data.Length + file3Data.Length; var actualRatio = (double)zipStream.Length / originalSize; - //Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); // Verify compression occurred (except for None compression type) if (compressionType != CompressionType.None) @@ -135,7 +133,6 @@ float expectedRatio // Calculate and output actual compression ratio var actualRatio = (double)zipStream.Length / testData.Length; - //Debug.WriteLine($"Zip_WriterFactory_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); VerifyCompressionRatio( testData.Length, @@ -197,7 +194,6 @@ float expectedRatio // Calculate and output actual compression ratio var actualRatio = (double)zipStream.Length / testData.Length; - //Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}"); // Verify the archive zipStream.Position = 0;