diff --git a/BizHawk.Client.Common/BizHawk.Client.Common.csproj b/BizHawk.Client.Common/BizHawk.Client.Common.csproj
index 9816bf2bfb..0b878042f5 100644
--- a/BizHawk.Client.Common/BizHawk.Client.Common.csproj
+++ b/BizHawk.Client.Common/BizHawk.Client.Common.csproj
@@ -255,8 +255,337 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -318,7 +647,9 @@
BizHawk.Bizware.BizwareGL
-
+
+
+
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/AbstractArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/AbstractArchive.cs
new file mode 100644
index 0000000000..2981f734c8
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/AbstractArchive.cs
@@ -0,0 +1,179 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives
+{
+ public abstract class AbstractArchive : IArchive, IArchiveExtractionListener
+ where TEntry : IArchiveEntry
+ where TVolume : IVolume
+ {
+ private readonly LazyReadOnlyCollection lazyVolumes;
+ private readonly LazyReadOnlyCollection lazyEntries;
+
+ public event EventHandler> EntryExtractionBegin;
+ public event EventHandler> EntryExtractionEnd;
+
+ public event EventHandler CompressedBytesRead;
+ public event EventHandler FilePartExtractionBegin;
+
+ protected ReaderOptions ReaderOptions { get; }
+
+ private bool disposed;
+
+#if !NO_FILE
+ internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
+ {
+ Type = type;
+ if (!fileInfo.Exists)
+ {
+ throw new ArgumentException("File does not exist: " + fileInfo.FullName);
+ }
+ ReaderOptions = readerOptions;
+ readerOptions.LeaveStreamOpen = false;
+ lazyVolumes = new LazyReadOnlyCollection(LoadVolumes(fileInfo));
+ lazyEntries = new LazyReadOnlyCollection(LoadEntries(Volumes));
+ }
+
+
+ protected abstract IEnumerable LoadVolumes(FileInfo file);
+#endif
+
+ internal AbstractArchive(ArchiveType type, IEnumerable streams, ReaderOptions readerOptions)
+ {
+ Type = type;
+ ReaderOptions = readerOptions;
+ lazyVolumes = new LazyReadOnlyCollection(LoadVolumes(streams.Select(CheckStreams)));
+ lazyEntries = new LazyReadOnlyCollection(LoadEntries(Volumes));
+ }
+
+ internal AbstractArchive(ArchiveType type)
+ {
+ Type = type;
+ lazyVolumes = new LazyReadOnlyCollection(Enumerable.Empty());
+ lazyEntries = new LazyReadOnlyCollection(Enumerable.Empty());
+ }
+
+ public ArchiveType Type { get; }
+
+ void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
+ {
+ EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs(entry));
+ }
+
+ void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
+ {
+ EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs(entry));
+ }
+
+ private static Stream CheckStreams(Stream stream)
+ {
+ if (!stream.CanSeek || !stream.CanRead)
+ {
+ throw new ArgumentException("Archive streams must be Readable and Seekable");
+ }
+ return stream;
+ }
+
+ ///
+ /// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
+ ///
+ public virtual ICollection Entries { get { return lazyEntries; } }
+
+ ///
+ /// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
+ ///
+ public ICollection Volumes { get { return lazyVolumes; } }
+
+ ///
+ /// The total size of the files compressed in the archive.
+ ///
+ public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
+
+ ///
+ /// The total size of the files as uncompressed in the archive.
+ ///
+ public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
+
+ protected abstract IEnumerable LoadVolumes(IEnumerable streams);
+ protected abstract IEnumerable LoadEntries(IEnumerable volumes);
+
+ IEnumerable IArchive.Entries { get { return Entries.Cast(); } }
+
+ IEnumerable IArchive.Volumes { get { return lazyVolumes.Cast(); } }
+
+ public virtual void Dispose()
+ {
+ if (!disposed)
+ {
+ lazyVolumes.ForEach(v => v.Dispose());
+ lazyEntries.GetLoaded().Cast().ForEach(x => x.Close());
+ disposed = true;
+ }
+ }
+
+ void IArchiveExtractionListener.EnsureEntriesLoaded()
+ {
+ lazyEntries.EnsureFullyLoaded();
+ lazyVolumes.EnsureFullyLoaded();
+ }
+
+ void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
+ {
+ CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
+ {
+ CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
+ CompressedBytesRead = compressedReadBytes
+ });
+ }
+
+ void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
+ {
+ FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
+ {
+ CompressedSize = compressedSize,
+ Size = size,
+ Name = name
+ });
+ }
+
+ ///
+ /// Use this method to extract all entries in an archive in order.
+ /// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
+ /// extracted sequentially for the best performance.
+ ///
+ /// This method will load all entry information from the archive.
+ ///
+ /// WARNING: this will reuse the underlying stream for the archive. Errors may
+ /// occur if this is used at the same time as other extraction methods on this instance.
+ ///
+ ///
+ public IReader ExtractAllEntries()
+ {
+ ((IArchiveExtractionListener)this).EnsureEntriesLoaded();
+ return CreateReaderForSolidExtraction();
+ }
+
+ protected abstract IReader CreateReaderForSolidExtraction();
+
+ ///
+ /// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
+ ///
+ public virtual bool IsSolid { get { return false; } }
+
+ ///
+ /// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
+ ///
+ public bool IsComplete
+ {
+ get
+ {
+ ((IArchiveExtractionListener)this).EnsureEntriesLoaded();
+ return Entries.All(x => x.IsComplete);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/AbstractWritableArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/AbstractWritableArchive.cs
new file mode 100644
index 0000000000..5f38d459ee
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/AbstractWritableArchive.cs
@@ -0,0 +1,147 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Readers;
+using SharpCompress.Writers;
+
+namespace SharpCompress.Archives
+{
+ public abstract class AbstractWritableArchive : AbstractArchive, IWritableArchive
+ where TEntry : IArchiveEntry
+ where TVolume : IVolume
+ {
+ private readonly List newEntries = new List();
+ private readonly List removedEntries = new List();
+
+ private readonly List modifiedEntries = new List();
+ private bool hasModifications;
+
+ internal AbstractWritableArchive(ArchiveType type)
+ : base(type)
+ {
+ }
+
+ internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
+ : base(type, stream.AsEnumerable(), readerFactoryOptions)
+ {
+ }
+
+#if !NO_FILE
+ internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
+ : base(type, fileInfo, readerFactoryOptions)
+ {
+ }
+#endif
+
+ public override ICollection Entries
+ {
+ get
+ {
+ if (hasModifications)
+ {
+ return modifiedEntries;
+ }
+ return base.Entries;
+ }
+ }
+
+ private void RebuildModifiedCollection()
+ {
+ hasModifications = true;
+ newEntries.RemoveAll(v => removedEntries.Contains(v));
+ modifiedEntries.Clear();
+ modifiedEntries.AddRange(OldEntries.Concat(newEntries));
+ }
+
+ private IEnumerable OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
+
+ public void RemoveEntry(TEntry entry)
+ {
+ if (!removedEntries.Contains(entry))
+ {
+ removedEntries.Add(entry);
+ RebuildModifiedCollection();
+ }
+ }
+
+ void IWritableArchive.RemoveEntry(IArchiveEntry entry)
+ {
+ RemoveEntry((TEntry)entry);
+ }
+
+ public TEntry AddEntry(string key, Stream source,
+ long size = 0, DateTime? modified = null)
+ {
+ return AddEntry(key, source, false, size, modified);
+ }
+
+ IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
+ {
+ return AddEntry(key, source, closeStream, size, modified);
+ }
+
+ public TEntry AddEntry(string key, Stream source, bool closeStream,
+ long size = 0, DateTime? modified = null)
+ {
+ if (key.StartsWith("/")
+ || key.StartsWith("\\"))
+ {
+ key = key.Substring(1);
+ }
+ if (DoesKeyMatchExisting(key))
+ {
+ throw new ArchiveException("Cannot add entry with duplicate key: " + key);
+ }
+ var entry = CreateEntry(key, source, size, modified, closeStream);
+ newEntries.Add(entry);
+ RebuildModifiedCollection();
+ return entry;
+ }
+
+ private bool DoesKeyMatchExisting(string key)
+ {
+ foreach (var path in Entries.Select(x => x.Key))
+ {
+ var p = path.Replace('/', '\\');
+ if (p.StartsWith("\\"))
+ {
+ p = p.Substring(1);
+ }
+ return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
+ }
+ return false;
+ }
+
+ public void SaveTo(Stream stream, WriterOptions options)
+ {
+ //reset streams of new entries
+ newEntries.Cast().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
+ SaveTo(stream, options, OldEntries, newEntries);
+ }
+
+ protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
+ bool closeStream)
+ {
+ if (!source.CanRead || !source.CanSeek)
+ {
+ throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
+ }
+ return CreateEntryInternal(key, source, size, modified, closeStream);
+ }
+
+ protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
+ bool closeStream);
+
+ protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable oldEntries, IEnumerable newEntries);
+
+ public override void Dispose()
+ {
+ base.Dispose();
+ newEntries.Cast().ForEach(x => x.Close());
+ removedEntries.Cast().ForEach(x => x.Close());
+ modifiedEntries.Cast().ForEach(x => x.Close());
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/ArchiveFactory.cs b/BizHawk.Client.Common/SharpCompress/Archives/ArchiveFactory.cs
new file mode 100644
index 0000000000..41435e2254
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/ArchiveFactory.cs
@@ -0,0 +1,153 @@
+using System;
+using System.IO;
+using SharpCompress.Archives.GZip;
+using SharpCompress.Archives.Rar;
+using SharpCompress.Archives.SevenZip;
+using SharpCompress.Archives.Tar;
+using SharpCompress.Archives.Zip;
+using SharpCompress.Common;
+using SharpCompress.Compressors.LZMA;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives
+{
+ public class ArchiveFactory
+ {
+ ///
+ /// Opens an Archive for random access
+ ///
+ ///
+ ///
+ ///
+ public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
+ {
+ stream.CheckNotNull("stream");
+ if (!stream.CanRead || !stream.CanSeek)
+ {
+ throw new ArgumentException("Stream should be readable and seekable");
+ }
+ readerOptions = readerOptions ?? new ReaderOptions();
+ if (ZipArchive.IsZipFile(stream, null))
+ {
+ stream.Seek(0, SeekOrigin.Begin);
+ return ZipArchive.Open(stream, readerOptions);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (SevenZipArchive.IsSevenZipFile(stream))
+ {
+ stream.Seek(0, SeekOrigin.Begin);
+ return SevenZipArchive.Open(stream, readerOptions);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (GZipArchive.IsGZipFile(stream))
+ {
+ stream.Seek(0, SeekOrigin.Begin);
+ return GZipArchive.Open(stream, readerOptions);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (RarArchive.IsRarFile(stream, readerOptions))
+ {
+ stream.Seek(0, SeekOrigin.Begin);
+ return RarArchive.Open(stream, readerOptions);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (TarArchive.IsTarFile(stream))
+ {
+ stream.Seek(0, SeekOrigin.Begin);
+ return TarArchive.Open(stream, readerOptions);
+ }
+ throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
+ }
+
+ public static IWritableArchive Create(ArchiveType type)
+ {
+ switch (type)
+ {
+ case ArchiveType.Zip:
+ {
+ return ZipArchive.Create();
+ }
+ case ArchiveType.Tar:
+ {
+ return TarArchive.Create();
+ }
+ case ArchiveType.GZip:
+ {
+ return GZipArchive.Create();
+ }
+ default:
+ {
+ throw new NotSupportedException("Cannot create Archives of type: " + type);
+ }
+ }
+ }
+
+#if !NO_FILE
+
+ ///
+ /// Constructor expects a filepath to an existing file.
+ ///
+ ///
+ ///
+ public static IArchive Open(string filePath, ReaderOptions options = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return Open(new FileInfo(filePath), options);
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ options = options ?? new ReaderOptions { LeaveStreamOpen = false };
+ using (var stream = fileInfo.OpenRead())
+ {
+ if (ZipArchive.IsZipFile(stream, null))
+ {
+ return ZipArchive.Open(fileInfo, options);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (SevenZipArchive.IsSevenZipFile(stream))
+ {
+ return SevenZipArchive.Open(fileInfo, options);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (GZipArchive.IsGZipFile(stream))
+ {
+ return GZipArchive.Open(fileInfo, options);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (RarArchive.IsRarFile(stream, options))
+ {
+ return RarArchive.Open(fileInfo, options);
+ }
+ stream.Seek(0, SeekOrigin.Begin);
+ if (TarArchive.IsTarFile(stream))
+ {
+ return TarArchive.Open(fileInfo, options);
+ }
+ throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
+ }
+ }
+
+ ///
+ /// Extract to specific directory, retaining filename
+ ///
+ public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
+ ExtractionOptions options = null)
+ {
+ using (IArchive archive = Open(sourceArchive))
+ {
+ foreach (IArchiveEntry entry in archive.Entries)
+ {
+ entry.WriteToDirectory(destinationDirectory, options);
+ }
+ }
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchive.cs
new file mode 100644
index 0000000000..1ae40c5cc0
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchive.cs
@@ -0,0 +1,188 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.GZip;
+using SharpCompress.Readers;
+using SharpCompress.Readers.GZip;
+using SharpCompress.Writers;
+using SharpCompress.Writers.GZip;
+
+namespace SharpCompress.Archives.GZip
+{
+ public class GZipArchive : AbstractWritableArchive
+ {
+#if !NO_FILE
+
+ ///
+ /// Constructor expects a filepath to an existing file.
+ ///
+ ///
+ ///
+ public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
+ }
+#endif
+
+ ///
+ /// Takes a seekable Stream as a source
+ ///
+ ///
+ ///
+ public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
+ {
+ stream.CheckNotNull("stream");
+ return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
+ }
+
+ public static GZipArchive Create()
+ {
+ return new GZipArchive();
+ }
+
+#if !NO_FILE
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
+ : base(ArchiveType.GZip, fileInfo, options)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(FileInfo file)
+ {
+ return new GZipVolume(file, ReaderOptions).AsEnumerable();
+ }
+
+ public static bool IsGZipFile(string filePath)
+ {
+ return IsGZipFile(new FileInfo(filePath));
+ }
+
+ public static bool IsGZipFile(FileInfo fileInfo)
+ {
+ if (!fileInfo.Exists)
+ {
+ return false;
+ }
+ using (Stream stream = fileInfo.OpenRead())
+ {
+ return IsGZipFile(stream);
+ }
+ }
+
+ public void SaveTo(string filePath)
+ {
+ SaveTo(new FileInfo(filePath));
+ }
+
+ public void SaveTo(FileInfo fileInfo)
+ {
+ using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
+ {
+ SaveTo(stream, new WriterOptions(CompressionType.GZip));
+ }
+ }
+#endif
+
+ public static bool IsGZipFile(Stream stream)
+ {
+ // read the header on the first read
+ byte[] header = new byte[10];
+
+ // workitem 8501: handle edge case (decompress empty stream)
+ if (!stream.ReadFully(header))
+ {
+ return false;
+ }
+
+ if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ ///
+ /// Takes multiple seekable Streams for a multi-part archive
+ ///
+ ///
+ ///
+ internal GZipArchive(Stream stream, ReaderOptions options)
+ : base(ArchiveType.GZip, stream, options)
+ {
+ }
+
+ internal GZipArchive()
+ : base(ArchiveType.GZip)
+ {
+ }
+
+ protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
+ bool closeStream)
+ {
+ if (Entries.Any())
+ {
+ throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
+ }
+ return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
+ }
+
+ protected override void SaveTo(Stream stream, WriterOptions options,
+ IEnumerable oldEntries,
+ IEnumerable newEntries)
+ {
+ if (Entries.Count > 1)
+ {
+ throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
+ }
+ using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
+ {
+ foreach (var entry in oldEntries.Concat(newEntries)
+ .Where(x => !x.IsDirectory))
+ {
+ using (var entryStream = entry.OpenEntryStream())
+ {
+ writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
+ }
+ }
+ }
+ }
+
+ protected override IEnumerable LoadVolumes(IEnumerable streams)
+ {
+ return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
+ }
+
+ protected override IEnumerable LoadEntries(IEnumerable volumes)
+ {
+ Stream stream = volumes.Single().Stream;
+ yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
+ }
+
+ protected override IReader CreateReaderForSolidExtraction()
+ {
+ var stream = Volumes.Single().Stream;
+ stream.Position = 0;
+ return GZipReader.Open(stream);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchiveEntry.cs
new file mode 100644
index 0000000000..7f417171e4
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipArchiveEntry.cs
@@ -0,0 +1,34 @@
+using System.IO;
+using System.Linq;
+using SharpCompress.Common.GZip;
+
+namespace SharpCompress.Archives.GZip
+{
+ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
+ {
+ internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
+ : base(part)
+ {
+ Archive = archive;
+ }
+
+ public virtual Stream OpenEntryStream()
+ {
+ //this is to reset the stream to be read multiple times
+ var part = Parts.Single() as GZipFilePart;
+ if (part.GetRawStream().Position != part.EntryStartPosition)
+ {
+ part.GetRawStream().Position = part.EntryStartPosition;
+ }
+ return Parts.Single().GetCompressedStream();
+ }
+
+ #region IArchiveEntry Members
+
+ public IArchive Archive { get; }
+
+ public bool IsComplete => true;
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs
new file mode 100644
index 0000000000..8bf96f3928
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs
@@ -0,0 +1,66 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common;
+using SharpCompress.IO;
+
+namespace SharpCompress.Archives.GZip
+{
+ internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
+ {
+ private readonly bool closeStream;
+ private readonly Stream stream;
+
+ internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
+ string path, long size, DateTime? lastModified, bool closeStream)
+ : base(archive, null)
+ {
+ this.stream = stream;
+ Key = path;
+ Size = size;
+ LastModifiedTime = lastModified;
+ this.closeStream = closeStream;
+ }
+
+ public override long Crc => 0;
+
+ public override string Key { get; }
+
+ public override long CompressedSize => 0;
+
+ public override long Size { get; }
+
+ public override DateTime? LastModifiedTime { get; }
+
+ public override DateTime? CreatedTime => null;
+
+ public override DateTime? LastAccessedTime => null;
+
+ public override DateTime? ArchivedTime => null;
+
+ public override bool IsEncrypted => false;
+
+ public override bool IsDirectory => false;
+
+ public override bool IsSplitAfter => false;
+
+ internal override IEnumerable Parts => throw new NotImplementedException();
+
+ Stream IWritableArchiveEntry.Stream => stream;
+
+ public override Stream OpenEntryStream()
+ {
+ //ensure new stream is at the start, this could be reset
+ stream.Seek(0, SeekOrigin.Begin);
+ return new NonDisposingStream(stream);
+ }
+
+ internal override void Close()
+ {
+ if (closeStream)
+ {
+ stream.Dispose();
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/IArchive.cs
new file mode 100644
index 0000000000..2ba84a399b
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IArchive.cs
@@ -0,0 +1,49 @@
+using System;
+using System.Collections.Generic;
+using SharpCompress.Common;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives
+{
+ public interface IArchive : IDisposable
+ {
+ event EventHandler> EntryExtractionBegin;
+ event EventHandler> EntryExtractionEnd;
+
+ event EventHandler CompressedBytesRead;
+ event EventHandler FilePartExtractionBegin;
+
+ IEnumerable Entries { get; }
+ IEnumerable Volumes { get; }
+
+ ArchiveType Type { get; }
+
+ ///
+ /// Use this method to extract all entries in an archive in order.
+ /// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
+ /// extracted sequentially for the best performance.
+ ///
+ IReader ExtractAllEntries();
+
+ ///
+ /// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
+ /// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
+ ///
+ bool IsSolid { get; }
+
+ ///
+ /// This checks to see if all the known entries have IsComplete = true
+ ///
+ bool IsComplete { get; }
+
+ ///
+ /// The total size of the files compressed in the archive.
+ ///
+ long TotalSize { get; }
+
+ ///
+ /// The total size of the files as uncompressed in the archive.
+ ///
+ long TotalUncompressSize { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntry.cs
new file mode 100644
index 0000000000..43e681b454
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntry.cs
@@ -0,0 +1,24 @@
+using System.IO;
+using SharpCompress.Common;
+
+namespace SharpCompress.Archives
+{
+ public interface IArchiveEntry : IEntry
+ {
+ ///
+ /// Opens the current entry as a stream that will decompress as it is read.
+ /// Read the entire stream or use SkipEntry on EntryStream.
+ ///
+ Stream OpenEntryStream();
+
+ ///
+ /// The archive can find all the parts of the archive needed to extract this entry.
+ ///
+ bool IsComplete { get; }
+
+ ///
+ /// The archive instance this entry belongs to
+ ///
+ IArchive Archive { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntryExtensions.cs b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntryExtensions.cs
new file mode 100644
index 0000000000..e1716fb30c
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveEntryExtensions.cs
@@ -0,0 +1,70 @@
+using System.IO;
+using SharpCompress.Common;
+using SharpCompress.IO;
+
+namespace SharpCompress.Archives
+{
+ public static class IArchiveEntryExtensions
+ {
+ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
+ {
+ if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
+ {
+ throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
+ }
+
+ if (archiveEntry.IsDirectory)
+ {
+ throw new ExtractionException("Entry is a file directory and cannot be extracted.");
+ }
+
+ var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
+ streamListener.EnsureEntriesLoaded();
+ streamListener.FireEntryExtractionBegin(archiveEntry);
+ streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
+ var entryStream = archiveEntry.OpenEntryStream();
+ if (entryStream == null)
+ {
+ return;
+ }
+ using (entryStream)
+ {
+ using (Stream s = new ListeningStream(streamListener, entryStream))
+ {
+ s.TransferTo(streamToWriteTo);
+ }
+ }
+ streamListener.FireEntryExtractionEnd(archiveEntry);
+ }
+
+#if !NO_FILE
+
+///
+/// Extract to specific directory, retaining filename
+///
+ public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
+ ExtractionOptions options = null)
+ {
+ ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
+ entry.WriteToFile);
+ }
+
+ ///
+ /// Extract to specific file
+ ///
+ public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
+ ExtractionOptions options = null)
+ {
+
+ ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
+ (x, fm) =>
+ {
+ using (FileStream fs = File.Open(destinationFileName, fm))
+ {
+ entry.WriteTo(fs);
+ }
+ });
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtensions.cs b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtensions.cs
new file mode 100644
index 0000000000..7b66966631
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtensions.cs
@@ -0,0 +1,26 @@
+#if !NO_FILE
+using System.Linq;
+using SharpCompress.Common;
+
+#endif
+
+namespace SharpCompress.Archives
+{
+ public static class IArchiveExtensions
+ {
+#if !NO_FILE
+
+///
+/// Extract to specific directory, retaining filename
+///
+ public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
+ ExtractionOptions options = null)
+ {
+ foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
+ {
+ entry.WriteToDirectory(destinationDirectory, options);
+ }
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtractionListener.cs b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtractionListener.cs
new file mode 100644
index 0000000000..9ce07e8ae6
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IArchiveExtractionListener.cs
@@ -0,0 +1,11 @@
+using SharpCompress.Common;
+
+namespace SharpCompress.Archives
+{
+ internal interface IArchiveExtractionListener : IExtractionListener
+ {
+ void EnsureEntriesLoaded();
+ void FireEntryExtractionBegin(IArchiveEntry entry);
+ void FireEntryExtractionEnd(IArchiveEntry entry);
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchive.cs
new file mode 100644
index 0000000000..380d681482
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchive.cs
@@ -0,0 +1,15 @@
+using System;
+using System.IO;
+using SharpCompress.Writers;
+
+namespace SharpCompress.Archives
+{
+ public interface IWritableArchive : IArchive
+ {
+ void RemoveEntry(IArchiveEntry entry);
+
+ IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
+
+ void SaveTo(Stream stream, WriterOptions options);
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveEntry.cs
new file mode 100644
index 0000000000..044eb1c7b7
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveEntry.cs
@@ -0,0 +1,9 @@
+using System.IO;
+
+namespace SharpCompress.Archives
+{
+ internal interface IWritableArchiveEntry
+ {
+ Stream Stream { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveExtensions.cs b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveExtensions.cs
new file mode 100644
index 0000000000..bee42a4994
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/IWritableArchiveExtensions.cs
@@ -0,0 +1,63 @@
+#if !NO_FILE
+using System;
+#endif
+using System.IO;
+using SharpCompress.Writers;
+
+namespace SharpCompress.Archives
+{
+ public static class IWritableArchiveExtensions
+ {
+#if !NO_FILE
+
+ public static void AddEntry(this IWritableArchive writableArchive,
+ string entryPath, string filePath)
+ {
+ var fileInfo = new FileInfo(filePath);
+ if (!fileInfo.Exists)
+ {
+ throw new FileNotFoundException("Could not AddEntry: " + filePath);
+ }
+ writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
+ fileInfo.LastWriteTime);
+ }
+
+ public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
+ {
+ writableArchive.SaveTo(new FileInfo(filePath), options);
+ }
+
+ public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
+ {
+ using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
+ {
+ writableArchive.SaveTo(stream, options);
+ }
+ }
+
+ public static void AddAllFromDirectory(
+ this IWritableArchive writableArchive,
+ string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
+ {
+#if NET35
+ foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
+#else
+ foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
+#endif
+ {
+ var fileInfo = new FileInfo(path);
+ writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
+ fileInfo.LastWriteTime);
+ }
+ }
+ public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
+ {
+ if (!fileInfo.Exists)
+ {
+ throw new ArgumentException("FileInfo does not exist.");
+ }
+ return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs
new file mode 100644
index 0000000000..7932caadfe
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs
@@ -0,0 +1,46 @@
+
+#if !NO_FILE
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common.Rar;
+using SharpCompress.Common.Rar.Headers;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives.Rar
+{
+ ///
+ /// A rar part based on a FileInfo object
+ ///
+ internal class FileInfoRarArchiveVolume : RarVolume
+ {
+ internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
+ : base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
+ {
+ FileInfo = fileInfo;
+ FileParts = GetVolumeFileParts().ToReadOnly();
+ }
+
+ private static ReaderOptions FixOptions(ReaderOptions options)
+ {
+ //make sure we're closing streams with fileinfo
+ options.LeaveStreamOpen = false;
+ return options;
+ }
+
+ internal ReadOnlyCollection FileParts { get; }
+
+ internal FileInfo FileInfo { get; }
+
+ internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
+ {
+ return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
+ }
+
+ internal override IEnumerable ReadFileParts()
+ {
+ return FileParts;
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
new file mode 100644
index 0000000000..4b31a774aa
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
@@ -0,0 +1,28 @@
+
+#if !NO_FILE
+using System.IO;
+using SharpCompress.Common.Rar.Headers;
+
+namespace SharpCompress.Archives.Rar
+{
+ internal class FileInfoRarFilePart : SeekableFilePart
+ {
+ internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
+ : base(mh, fh, volume.Stream, password)
+ {
+ FileInfo = fi;
+ }
+
+ internal FileInfo FileInfo { get; }
+
+ internal override string FilePartName
+ {
+ get
+ {
+ return "Rar File: " + FileInfo.FullName
+ + " File Entry: " + FileHeader.FileName;
+ }
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.Extensions.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.Extensions.cs
new file mode 100644
index 0000000000..7eefef9c07
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.Extensions.cs
@@ -0,0 +1,23 @@
+using System.Linq;
+
+namespace SharpCompress.Archives.Rar
+{
+ public static class RarArchiveExtensions
+ {
+ ///
+ /// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
+ ///
+ public static bool IsFirstVolume(this RarArchive archive)
+ {
+ return archive.Volumes.First().IsFirstVolume;
+ }
+
+ ///
+ /// RarArchive is part of a multi-part archive.
+ ///
+ public static bool IsMultipartVolume(this RarArchive archive)
+ {
+ return archive.Volumes.First().IsMultiVolume;
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.cs
new file mode 100644
index 0000000000..52ffaa1c82
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchive.cs
@@ -0,0 +1,148 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.Rar;
+using SharpCompress.Common.Rar.Headers;
+using SharpCompress.Compressors.Rar;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Rar;
+
+namespace SharpCompress.Archives.Rar
+{
+ public class RarArchive : AbstractArchive
+ {
+ internal Lazy UnpackV2017 { get; } = new Lazy(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
+ internal Lazy UnpackV1 { get; } = new Lazy(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
+
+#if !NO_FILE
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ internal RarArchive(FileInfo fileInfo, ReaderOptions options)
+ : base(ArchiveType.Rar, fileInfo, options)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(FileInfo file)
+ {
+ return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
+ }
+#endif
+
+ ///
+ /// Takes multiple seekable Streams for a multi-part archive
+ ///
+ ///
+ ///
+ internal RarArchive(IEnumerable streams, ReaderOptions options)
+ : base(ArchiveType.Rar, streams, options)
+ {
+ }
+
+ protected override IEnumerable LoadEntries(IEnumerable volumes)
+ {
+ return RarArchiveEntryFactory.GetEntries(this, volumes);
+ }
+
+ protected override IEnumerable LoadVolumes(IEnumerable streams)
+ {
+ return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
+ }
+
+ protected override IReader CreateReaderForSolidExtraction()
+ {
+ var stream = Volumes.First().Stream;
+ stream.Position = 0;
+ return RarReader.Open(stream, ReaderOptions);
+ }
+
+ public override bool IsSolid => Volumes.First().IsSolidArchive;
+
+ #region Creation
+
+#if !NO_FILE
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static RarArchive Open(string filePath, ReaderOptions options = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ return new RarArchive(fileInfo, options ?? new ReaderOptions());
+ }
+#endif
+
+ ///
+ /// Takes a seekable Stream as a source
+ ///
+ ///
+ ///
+ public static RarArchive Open(Stream stream, ReaderOptions options = null)
+ {
+ stream.CheckNotNull("stream");
+ return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
+ }
+
+ ///
+ /// Takes multiple seekable Streams for a multi-part archive
+ ///
+ ///
+ ///
+ public static RarArchive Open(IEnumerable streams, ReaderOptions options = null)
+ {
+ streams.CheckNotNull("streams");
+ return new RarArchive(streams, options ?? new ReaderOptions());
+ }
+
+#if !NO_FILE
+ public static bool IsRarFile(string filePath)
+ {
+ return IsRarFile(new FileInfo(filePath));
+ }
+
+ public static bool IsRarFile(FileInfo fileInfo)
+ {
+ if (!fileInfo.Exists)
+ {
+ return false;
+ }
+ using (Stream stream = fileInfo.OpenRead())
+ {
+ return IsRarFile(stream);
+ }
+ }
+#endif
+
+ public static bool IsRarFile(Stream stream, ReaderOptions options = null)
+ {
+ try
+ {
+ MarkHeader.Read(stream, true, false);
+ return true;
+ }
+ catch
+ {
+ return false;
+ }
+ }
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntry.cs
new file mode 100644
index 0000000000..3f2360b2b8
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntry.cs
@@ -0,0 +1,89 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.Rar;
+using SharpCompress.Common.Rar.Headers;
+using SharpCompress.Compressors.Rar;
+
+namespace SharpCompress.Archives.Rar
+{
+ public class RarArchiveEntry : RarEntry, IArchiveEntry
+ {
+ private readonly ICollection parts;
+ private readonly RarArchive archive;
+
+ internal RarArchiveEntry(RarArchive archive, IEnumerable parts)
+ {
+ this.parts = parts.ToList();
+ this.archive = archive;
+ }
+
+ public override CompressionType CompressionType => CompressionType.Rar;
+
+ public IArchive Archive => archive;
+
+ internal override IEnumerable Parts => parts.Cast();
+
+ internal override FileHeader FileHeader => parts.First().FileHeader;
+
+ public override long Crc
+ {
+ get
+ {
+ CheckIncomplete();
+ return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
+ }
+ }
+
+ public override long Size
+ {
+ get
+ {
+ CheckIncomplete();
+ return parts.First().FileHeader.UncompressedSize;
+ }
+ }
+
+ public override long CompressedSize
+ {
+ get
+ {
+ CheckIncomplete();
+ return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
+ }
+ }
+
+ public Stream OpenEntryStream()
+ {
+ if (archive.IsSolid)
+ {
+ throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
+ }
+
+ if (IsRarV3)
+ {
+ return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast(), archive));
+ }
+
+ return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast(), archive));
+ }
+
+ public bool IsComplete
+ {
+ get
+ {
+ return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
+ }
+ }
+
+ private void CheckIncomplete()
+ {
+ if (!IsComplete)
+ {
+ throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs
new file mode 100644
index 0000000000..e41c024dcd
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs
@@ -0,0 +1,47 @@
+using System.Collections.Generic;
+using SharpCompress.Common.Rar;
+
+namespace SharpCompress.Archives.Rar
+{
+ internal static class RarArchiveEntryFactory
+ {
+ private static IEnumerable GetFileParts(IEnumerable parts)
+ {
+ foreach (RarVolume rarPart in parts)
+ {
+ foreach (RarFilePart fp in rarPart.ReadFileParts())
+ {
+ yield return fp;
+ }
+ }
+ }
+
+ private static IEnumerable> GetMatchedFileParts(IEnumerable parts)
+ {
+ var groupedParts = new List();
+ foreach (RarFilePart fp in GetFileParts(parts))
+ {
+ groupedParts.Add(fp);
+
+ if (!fp.FileHeader.IsSplitAfter)
+ {
+ yield return groupedParts;
+ groupedParts = new List();
+ }
+ }
+ if (groupedParts.Count > 0)
+ {
+ yield return groupedParts;
+ }
+ }
+
+ internal static IEnumerable GetEntries(RarArchive archive,
+ IEnumerable rarParts)
+ {
+ foreach (var groupedParts in GetMatchedFileParts(rarParts))
+ {
+ yield return new RarArchiveEntry(archive, groupedParts);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs
new file mode 100644
index 0000000000..57eb3c694f
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs
@@ -0,0 +1,147 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common.Rar;
+using SharpCompress.Readers;
+#if !NO_FILE
+using System.Linq;
+using System.Text;
+using SharpCompress.Common.Rar.Headers;
+#endif
+
+namespace SharpCompress.Archives.Rar
+{
+ internal static class RarArchiveVolumeFactory
+ {
+ internal static IEnumerable GetParts(IEnumerable streams, ReaderOptions options)
+ {
+ foreach (Stream s in streams)
+ {
+ if (!s.CanRead || !s.CanSeek)
+ {
+ throw new ArgumentException("Stream is not readable and seekable");
+ }
+ StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
+ yield return part;
+ }
+ }
+
+#if !NO_FILE
+ internal static IEnumerable GetParts(FileInfo fileInfo, ReaderOptions options)
+ {
+ FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
+ yield return part;
+
+ ArchiveHeader ah = part.ArchiveHeader;
+ if (!ah.IsVolume)
+ {
+ yield break; //if file isn't volume then there is no reason to look
+ }
+ fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
+ //we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
+ while (fileInfo != null && fileInfo.Exists)
+ {
+ part = new FileInfoRarArchiveVolume(fileInfo, options);
+
+ fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
+ yield return part;
+ }
+ }
+
+ private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
+ {
+ if (currentFilePart == null)
+ {
+ return null;
+ }
+ bool oldNumbering = ah.OldNumberingFormat
+ || currentFilePart.MarkHeader.OldNumberingFormat;
+ if (oldNumbering)
+ {
+ return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
+ }
+ else
+ {
+ return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
+ }
+ }
+
+ private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
+ {
+ // .rar, .r00, .r01, ...
+ string extension = currentFileInfo.Extension;
+
+ StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
+ buffer.Append(currentFileInfo.FullName.Substring(0,
+ currentFileInfo.FullName.Length - extension.Length));
+ if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
+ {
+ buffer.Append(".r00");
+ }
+ else
+ {
+ int num = 0;
+ if (int.TryParse(extension.Substring(2, 2), out num))
+ {
+ num++;
+ buffer.Append(".r");
+ if (num < 10)
+ {
+ buffer.Append('0');
+ }
+ buffer.Append(num);
+ }
+ else
+ {
+ ThrowInvalidFileName(currentFileInfo);
+ }
+ }
+ return new FileInfo(buffer.ToString());
+ }
+
+ private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
+ {
+ // part1.rar, part2.rar, ...
+ string extension = currentFileInfo.Extension;
+ if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
+ {
+ throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
+ }
+ int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
+ if (startIndex < 0)
+ {
+ ThrowInvalidFileName(currentFileInfo);
+ }
+ StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
+ buffer.Append(currentFileInfo.FullName, 0, startIndex);
+ int num = 0;
+ string numString = currentFileInfo.FullName.Substring(startIndex + 5,
+ currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
+ startIndex - 5);
+ buffer.Append(".part");
+ if (int.TryParse(numString, out num))
+ {
+ num++;
+ for (int i = 0; i < numString.Length - num.ToString().Length; i++)
+ {
+ buffer.Append('0');
+ }
+ buffer.Append(num);
+ }
+ else
+ {
+ ThrowInvalidFileName(currentFileInfo);
+ }
+ buffer.Append(".rar");
+ return new FileInfo(buffer.ToString());
+ }
+
+ private static void ThrowInvalidFileName(FileInfo fileInfo)
+ {
+ throw new ArgumentException("Filename invalid or next archive could not be found:"
+ + fileInfo.FullName);
+ }
+
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/SeekableFilePart.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/SeekableFilePart.cs
new file mode 100644
index 0000000000..b7d3affcb9
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/SeekableFilePart.cs
@@ -0,0 +1,33 @@
+using System.IO;
+using SharpCompress.Common.Rar;
+using SharpCompress.Common.Rar.Headers;
+
+namespace SharpCompress.Archives.Rar
+{
+ internal class SeekableFilePart : RarFilePart
+ {
+ private readonly Stream stream;
+ private readonly string password;
+
+ internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
+ : base(mh, fh)
+ {
+ this.stream = stream;
+ this.password = password;
+ }
+
+ internal override Stream GetCompressedStream()
+ {
+ stream.Position = FileHeader.DataStartPosition;
+#if !NO_CRYPTO
+ if (FileHeader.R4Salt != null)
+ {
+ return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
+ }
+#endif
+ return stream;
+ }
+
+ internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs b/BizHawk.Client.Common/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs
new file mode 100644
index 0000000000..92602ae9a5
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs
@@ -0,0 +1,27 @@
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common.Rar;
+using SharpCompress.Common.Rar.Headers;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives.Rar
+{
+ internal class StreamRarArchiveVolume : RarVolume
+ {
+ internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
+ : base(StreamingMode.Seekable, stream, options)
+ {
+ }
+
+ internal override IEnumerable ReadFileParts()
+ {
+ return GetVolumeFileParts();
+ }
+
+ internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
+ {
+ return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
new file mode 100644
index 0000000000..9cf6414281
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
@@ -0,0 +1,226 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.SevenZip;
+using SharpCompress.Compressors.LZMA.Utilites;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Archives.SevenZip
+{
+ public class SevenZipArchive : AbstractArchive
+ {
+ private ArchiveDatabase database;
+#if !NO_FILE
+
+ ///
+ /// Constructor expects a filepath to an existing file.
+ ///
+ ///
+ ///
+ public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
+ }
+#endif
+ ///
+ /// Takes a seekable Stream as a source
+ ///
+ ///
+ ///
+ public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
+ {
+ stream.CheckNotNull("stream");
+ return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
+ }
+
+#if !NO_FILE
+ internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
+ : base(ArchiveType.SevenZip, fileInfo, readerOptions)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(FileInfo file)
+ {
+ return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
+ }
+
+ public static bool IsSevenZipFile(string filePath)
+ {
+ return IsSevenZipFile(new FileInfo(filePath));
+ }
+
+ public static bool IsSevenZipFile(FileInfo fileInfo)
+ {
+ if (!fileInfo.Exists)
+ {
+ return false;
+ }
+ using (Stream stream = fileInfo.OpenRead())
+ {
+ return IsSevenZipFile(stream);
+ }
+ }
+#endif
+
+ internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
+ : base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
+ {
+ }
+
+ internal SevenZipArchive()
+ : base(ArchiveType.SevenZip)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(IEnumerable streams)
+ {
+ foreach (Stream s in streams)
+ {
+ if (!s.CanRead || !s.CanSeek)
+ {
+ throw new ArgumentException("Stream is not readable and seekable");
+ }
+ SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
+ yield return volume;
+ }
+ }
+
+ protected override IEnumerable LoadEntries(IEnumerable volumes)
+ {
+ var stream = volumes.Single().Stream;
+ LoadFactory(stream);
+ for (int i = 0; i < database._files.Count; i++)
+ {
+ var file = database._files[i];
+ yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
+ }
+ }
+
+ private void LoadFactory(Stream stream)
+ {
+ if (database == null)
+ {
+ stream.Position = 0;
+ var reader = new ArchiveReader();
+ reader.Open(stream);
+ database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
+ }
+ }
+
+ public static bool IsSevenZipFile(Stream stream)
+ {
+ try
+ {
+ return SignatureMatch(stream);
+ }
+ catch
+ {
+ return false;
+ }
+ }
+
+ private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
+
+ private static bool SignatureMatch(Stream stream)
+ {
+ BinaryReader reader = new BinaryReader(stream);
+ byte[] signatureBytes = reader.ReadBytes(6);
+ return signatureBytes.BinaryEquals(SIGNATURE);
+ }
+
+ protected override IReader CreateReaderForSolidExtraction()
+ {
+ return new SevenZipReader(ReaderOptions, this);
+ }
+
+ public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
+
+ public override long TotalSize
+ {
+ get
+ {
+ int i = Entries.Count;
+ return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
+ }
+ }
+
+ private class SevenZipReader : AbstractReader
+ {
+ private readonly SevenZipArchive archive;
+ private CFolder currentFolder;
+ private Stream currentStream;
+ private CFileItem currentItem;
+
+ internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
+ : base(readerOptions, ArchiveType.SevenZip)
+ {
+ this.archive = archive;
+ }
+
+ public override SevenZipVolume Volume => archive.Volumes.Single();
+
+ protected override IEnumerable GetEntries(Stream stream)
+ {
+ List entries = archive.Entries.ToList();
+ stream.Position = 0;
+ foreach (var dir in entries.Where(x => x.IsDirectory))
+ {
+ yield return dir;
+ }
+ foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
+ {
+ currentFolder = group.Key;
+ if (group.Key == null)
+ {
+ currentStream = Stream.Null;
+ }
+ else
+ {
+ currentStream = archive.database.GetFolderStream(stream, currentFolder, new PasswordProvider(Options.Password));
+ }
+ foreach (var entry in group)
+ {
+ currentItem = entry.FilePart.Header;
+ yield return entry;
+ }
+ }
+ }
+
+ protected override EntryStream GetEntryStream()
+ {
+ return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
+ }
+ }
+
+ private class PasswordProvider : IPasswordProvider
+ {
+ private readonly string _password;
+
+ public PasswordProvider(string password)
+ {
+ _password = password;
+
+ }
+
+ public string CryptoGetTextPassword()
+ {
+ return _password;
+ }
+ }
+ }
+}
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs
new file mode 100644
index 0000000000..ea80b5cbfd
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs
@@ -0,0 +1,28 @@
+using System.IO;
+using SharpCompress.Common.SevenZip;
+
+namespace SharpCompress.Archives.SevenZip
+{
+ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
+ {
+ internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
+ : base(part)
+ {
+ Archive = archive;
+ }
+
+ public Stream OpenEntryStream()
+ {
+ return FilePart.GetCompressedStream();
+ }
+
+ public IArchive Archive { get; }
+
+ public bool IsComplete => true;
+
+ ///
+ /// This is a 7Zip Anti item
+ ///
+ public bool IsAnti => FilePart.Header.IsAnti;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchive.cs
new file mode 100644
index 0000000000..2ac7ce446c
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchive.cs
@@ -0,0 +1,206 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.Tar;
+using SharpCompress.Common.Tar.Headers;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Tar;
+using SharpCompress.Writers;
+using SharpCompress.Writers.Tar;
+
+namespace SharpCompress.Archives.Tar
+{
+ public class TarArchive : AbstractWritableArchive
+ {
+#if !NO_FILE
+
+ ///
+ /// Constructor expects a filepath to an existing file.
+ ///
+ ///
+ ///
+ public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
+ }
+#endif
+
+ ///
+ /// Takes a seekable Stream as a source
+ ///
+ ///
+ ///
+ public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
+ {
+ stream.CheckNotNull("stream");
+ return new TarArchive(stream, readerOptions ?? new ReaderOptions());
+ }
+
+#if !NO_FILE
+
+ public static bool IsTarFile(string filePath)
+ {
+ return IsTarFile(new FileInfo(filePath));
+ }
+
+ public static bool IsTarFile(FileInfo fileInfo)
+ {
+ if (!fileInfo.Exists)
+ {
+ return false;
+ }
+ using (Stream stream = fileInfo.OpenRead())
+ {
+ return IsTarFile(stream);
+ }
+ }
+#endif
+
+ public static bool IsTarFile(Stream stream)
+ {
+ try
+ {
+ TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
+ bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
+ bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
+ return readSucceeded || isEmptyArchive;
+ }
+ catch
+ {
+ }
+ return false;
+ }
+
+#if !NO_FILE
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
+ : base(ArchiveType.Tar, fileInfo, readerOptions)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(FileInfo file)
+ {
+ return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
+ }
+#endif
+
+ ///
+ /// Takes multiple seekable Streams for a multi-part archive
+ ///
+ ///
+ ///
+ internal TarArchive(Stream stream, ReaderOptions readerOptions)
+ : base(ArchiveType.Tar, stream, readerOptions)
+ {
+ }
+
+ internal TarArchive()
+ : base(ArchiveType.Tar)
+ {
+ }
+
+ protected override IEnumerable LoadVolumes(IEnumerable streams)
+ {
+ return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
+ }
+
+ protected override IEnumerable LoadEntries(IEnumerable volumes)
+ {
+ Stream stream = volumes.Single().Stream;
+ TarHeader previousHeader = null;
+ foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
+ {
+ if (header != null)
+ {
+ if (header.EntryType == EntryType.LongName)
+ {
+ previousHeader = header;
+ }
+ else
+ {
+ if (previousHeader != null)
+ {
+ var entry = new TarArchiveEntry(this, new TarFilePart(previousHeader, stream),
+ CompressionType.None);
+
+ var oldStreamPos = stream.Position;
+
+ using (var entryStream = entry.OpenEntryStream())
+ {
+ using (var memoryStream = new MemoryStream())
+ {
+ entryStream.TransferTo(memoryStream);
+ memoryStream.Position = 0;
+ var bytes = memoryStream.ToArray();
+
+ header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
+ }
+ }
+
+ stream.Position = oldStreamPos;
+
+ previousHeader = null;
+ }
+ yield return new TarArchiveEntry(this, new TarFilePart(header, stream), CompressionType.None);
+ }
+ }
+ }
+ }
+
+ public static TarArchive Create()
+ {
+ return new TarArchive();
+ }
+
+ protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
+ long size, DateTime? modified, bool closeStream)
+ {
+ return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
+ closeStream);
+ }
+
+ protected override void SaveTo(Stream stream, WriterOptions options,
+ IEnumerable oldEntries,
+ IEnumerable newEntries)
+ {
+ using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
+ {
+ foreach (var entry in oldEntries.Concat(newEntries)
+ .Where(x => !x.IsDirectory))
+ {
+ using (var entryStream = entry.OpenEntryStream())
+ {
+ writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
+ }
+ }
+ }
+ }
+
+ protected override IReader CreateReaderForSolidExtraction()
+ {
+ var stream = Volumes.Single().Stream;
+ stream.Position = 0;
+ return TarReader.Open(stream);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchiveEntry.cs
new file mode 100644
index 0000000000..51a0a49bfb
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarArchiveEntry.cs
@@ -0,0 +1,29 @@
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.Tar;
+
+namespace SharpCompress.Archives.Tar
+{
+ public class TarArchiveEntry : TarEntry, IArchiveEntry
+ {
+ internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
+ : base(part, compressionType)
+ {
+ Archive = archive;
+ }
+
+ public virtual Stream OpenEntryStream()
+ {
+ return Parts.Single().GetCompressedStream();
+ }
+
+ #region IArchiveEntry Members
+
+ public IArchive Archive { get; }
+
+ public bool IsComplete => true;
+
+ #endregion
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs
new file mode 100644
index 0000000000..8e693d523c
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs
@@ -0,0 +1,65 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common;
+using SharpCompress.IO;
+
+namespace SharpCompress.Archives.Tar
+{
+ internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
+ {
+ private readonly bool closeStream;
+ private readonly Stream stream;
+
+ internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
+ string path, long size, DateTime? lastModified, bool closeStream)
+ : base(archive, null, compressionType)
+ {
+ this.stream = stream;
+ Key = path;
+ Size = size;
+ LastModifiedTime = lastModified;
+ this.closeStream = closeStream;
+ }
+
+ public override long Crc => 0;
+
+ public override string Key { get; }
+
+ public override long CompressedSize => 0;
+
+ public override long Size { get; }
+
+ public override DateTime? LastModifiedTime { get; }
+
+ public override DateTime? CreatedTime => null;
+
+ public override DateTime? LastAccessedTime => null;
+
+ public override DateTime? ArchivedTime => null;
+
+ public override bool IsEncrypted => false;
+
+ public override bool IsDirectory => false;
+
+ public override bool IsSplitAfter => false;
+
+ internal override IEnumerable Parts => throw new NotImplementedException();
+ Stream IWritableArchiveEntry.Stream => stream;
+
+ public override Stream OpenEntryStream()
+ {
+ //ensure new stream is at the start, this could be reset
+ stream.Seek(0, SeekOrigin.Begin);
+ return new NonDisposingStream(stream);
+ }
+
+ internal override void Close()
+ {
+ if (closeStream)
+ {
+ stream.Dispose();
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchive.cs b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchive.cs
new file mode 100644
index 0000000000..f0889668c4
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchive.cs
@@ -0,0 +1,214 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common;
+using SharpCompress.Common.Zip;
+using SharpCompress.Common.Zip.Headers;
+using SharpCompress.Compressors.Deflate;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Zip;
+using SharpCompress.Writers;
+using SharpCompress.Writers.Zip;
+
+namespace SharpCompress.Archives.Zip
+{
+ public class ZipArchive : AbstractWritableArchive
+ {
+ private readonly SeekableZipHeaderFactory headerFactory;
+
+ ///
+ /// Gets or sets the compression level applied to files added to the archive,
+ /// if the compression method is set to deflate
+ ///
+ public CompressionLevel DeflateCompressionLevel { get; set; }
+
+#if !NO_FILE
+
+ ///
+ /// Constructor expects a filepath to an existing file.
+ ///
+ ///
+ ///
+ public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
+ {
+ filePath.CheckNotNullOrEmpty("filePath");
+ return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
+ }
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
+ {
+ fileInfo.CheckNotNull("fileInfo");
+ return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
+ }
+#endif
+
+ ///
+ /// Takes a seekable Stream as a source
+ ///
+ ///
+ ///
+ public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
+ {
+ stream.CheckNotNull("stream");
+ return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
+ }
+
+#if !NO_FILE
+
+ public static bool IsZipFile(string filePath, string password = null)
+ {
+ return IsZipFile(new FileInfo(filePath), password);
+ }
+
+ public static bool IsZipFile(FileInfo fileInfo, string password = null)
+ {
+ if (!fileInfo.Exists)
+ {
+ return false;
+ }
+ using (Stream stream = fileInfo.OpenRead())
+ {
+ return IsZipFile(stream, password);
+ }
+ }
+#endif
+
+ public static bool IsZipFile(Stream stream, string password = null)
+ {
+ StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
+ try
+ {
+ ZipHeader header =
+ headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
+ if (header == null)
+ {
+ return false;
+ }
+ return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
+ }
+ catch (CryptographicException)
+ {
+ return true;
+ }
+ catch
+ {
+ return false;
+ }
+ }
+
+#if !NO_FILE
+
+ ///
+ /// Constructor with a FileInfo object to an existing file.
+ ///
+ ///
+ ///
+ internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
+ : base(ArchiveType.Zip, fileInfo, readerOptions)
+ {
+ headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
+ }
+
+ protected override IEnumerable LoadVolumes(FileInfo file)
+ {
+ return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
+ }
+#endif
+
+ internal ZipArchive()
+ : base(ArchiveType.Zip)
+ {
+ }
+
+ ///
+ /// Takes multiple seekable Streams for a multi-part archive
+ ///
+ ///
+ ///
+ internal ZipArchive(Stream stream, ReaderOptions readerOptions)
+ : base(ArchiveType.Zip, stream, readerOptions)
+ {
+ headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
+ }
+
+ protected override IEnumerable LoadVolumes(IEnumerable streams)
+ {
+ return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
+ }
+
+ protected override IEnumerable LoadEntries(IEnumerable volumes)
+ {
+ var volume = volumes.Single();
+ Stream stream = volume.Stream;
+ foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
+ {
+ if (h != null)
+ {
+ switch (h.ZipHeaderType)
+ {
+ case ZipHeaderType.DirectoryEntry:
+ {
+ yield return new ZipArchiveEntry(this,
+ new SeekableZipFilePart(headerFactory,
+ h as DirectoryEntryHeader,
+ stream));
+ }
+ break;
+ case ZipHeaderType.DirectoryEnd:
+ {
+ byte[] bytes = (h as DirectoryEndHeader).Comment;
+ volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
+ yield break;
+ }
+ }
+ }
+ }
+ }
+
+ public void SaveTo(Stream stream)
+ {
+ SaveTo(stream, new WriterOptions(CompressionType.Deflate));
+ }
+
+ protected override void SaveTo(Stream stream, WriterOptions options,
+ IEnumerable oldEntries,
+ IEnumerable newEntries)
+ {
+ using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
+ {
+ foreach (var entry in oldEntries.Concat(newEntries)
+ .Where(x => !x.IsDirectory))
+ {
+ using (var entryStream = entry.OpenEntryStream())
+ {
+ writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
+ }
+ }
+ }
+ }
+
+ protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
+ bool closeStream)
+ {
+ return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
+ }
+
+ public static ZipArchive Create()
+ {
+ return new ZipArchive();
+ }
+
+ protected override IReader CreateReaderForSolidExtraction()
+ {
+ var stream = Volumes.Single().Stream;
+ stream.Position = 0;
+ return ZipReader.Open(stream, ReaderOptions);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchiveEntry.cs
new file mode 100644
index 0000000000..2f1f80f2cc
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipArchiveEntry.cs
@@ -0,0 +1,30 @@
+using System.IO;
+using System.Linq;
+using SharpCompress.Common.Zip;
+
+namespace SharpCompress.Archives.Zip
+{
+ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
+ {
+ internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
+ : base(part)
+ {
+ Archive = archive;
+ }
+
+ public virtual Stream OpenEntryStream()
+ {
+ return Parts.Single().GetCompressedStream();
+ }
+
+ #region IArchiveEntry Members
+
+ public IArchive Archive { get; }
+
+ public bool IsComplete => true;
+
+ #endregion
+
+ public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipWritableArchiveEntry.cs b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipWritableArchiveEntry.cs
new file mode 100644
index 0000000000..4cd1fe6140
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Archives/Zip/ZipWritableArchiveEntry.cs
@@ -0,0 +1,68 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common;
+using SharpCompress.IO;
+
+namespace SharpCompress.Archives.Zip
+{
+ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
+ {
+ private readonly bool closeStream;
+ private readonly Stream stream;
+ private bool isDisposed;
+
+ internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
+ DateTime? lastModified, bool closeStream)
+ : base(archive, null)
+ {
+ this.stream = stream;
+ Key = path;
+ Size = size;
+ LastModifiedTime = lastModified;
+ this.closeStream = closeStream;
+ }
+
+ public override long Crc => 0;
+
+ public override string Key { get; }
+
+ public override long CompressedSize => 0;
+
+ public override long Size { get; }
+
+ public override DateTime? LastModifiedTime { get; }
+
+ public override DateTime? CreatedTime => null;
+
+ public override DateTime? LastAccessedTime => null;
+
+ public override DateTime? ArchivedTime => null;
+
+ public override bool IsEncrypted => false;
+
+ public override bool IsDirectory => false;
+
+ public override bool IsSplitAfter => false;
+
+ internal override IEnumerable Parts => throw new NotImplementedException();
+
+ Stream IWritableArchiveEntry.Stream => stream;
+
+ public override Stream OpenEntryStream()
+ {
+ //ensure new stream is at the start, this could be reset
+ stream.Seek(0, SeekOrigin.Begin);
+ return new NonDisposingStream(stream);
+ }
+
+ internal override void Close()
+ {
+ if (closeStream && !isDisposed)
+ {
+ stream.Dispose();
+ isDisposed = true;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Buffers/ArrayPool.cs b/BizHawk.Client.Common/SharpCompress/Buffers/ArrayPool.cs
new file mode 100644
index 0000000000..d81ed4c07b
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Buffers/ArrayPool.cs
@@ -0,0 +1,119 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+#if NETCORE
+using System.Runtime.CompilerServices;
+using System.Threading;
+
+namespace SharpCompress.Buffers
+{
+ ///
+ /// Provides a resource pool that enables reusing instances of type .
+ ///
+ ///
+ ///
+ /// Renting and returning buffers with an can increase performance
+ /// in situations where arrays are created and destroyed frequently, resulting in significant
+ /// memory pressure on the garbage collector.
+ ///
+ ///
+ /// This class is thread-safe. All members may be used by multiple threads concurrently.
+ ///
+ ///
+ internal abstract class ArrayPool
+ {
+ /// The lazily-initialized shared pool instance.
+ private static ArrayPool s_sharedInstance = null;
+
+ ///
+ /// Retrieves a shared instance.
+ ///
+ ///
+ /// The shared pool provides a default implementation of
+ /// that's intended for general applicability. It maintains arrays of multiple sizes, and
+ /// may hand back a larger array than was actually requested, but will never hand back a smaller
+ /// array than was requested. Renting a buffer from it with will result in an
+ /// existing buffer being taken from the pool if an appropriate buffer is available or in a new
+ /// buffer being allocated if one is not available.
+ ///
+ public static ArrayPool Shared
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get { return Volatile.Read(ref s_sharedInstance) ?? EnsureSharedCreated(); }
+ }
+
+ /// Ensures that has been initialized to a pool and returns it.
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static ArrayPool EnsureSharedCreated()
+ {
+ Interlocked.CompareExchange(ref s_sharedInstance, Create(), null);
+ return s_sharedInstance;
+ }
+
+ ///
+ /// Creates a new instance using default configuration options.
+ ///
+ /// A new instance.
+ public static ArrayPool Create()
+ {
+ return new DefaultArrayPool();
+ }
+
+ ///
+ /// Creates a new instance using custom configuration options.
+ ///
+ /// The maximum length of array instances that may be stored in the pool.
+ ///
+ /// The maximum number of array instances that may be stored in each bucket in the pool. The pool
+ /// groups arrays of similar lengths into buckets for faster access.
+ ///
+ /// A new instance with the specified configuration options.
+ ///
+ /// The created pool will group arrays into buckets, with no more than
+ /// in each bucket and with those arrays not exceeding in length.
+ ///
+ public static ArrayPool Create(int maxArrayLength, int maxArraysPerBucket)
+ {
+ return new DefaultArrayPool(maxArrayLength, maxArraysPerBucket);
+ }
+
+ ///
+ /// Retrieves a buffer that is at least the requested length.
+ ///
+ /// The minimum length of the array needed.
+ ///
+ /// An that is at least in length.
+ ///
+ ///
+ /// This buffer is loaned to the caller and should be returned to the same pool via
+ /// so that it may be reused in subsequent usage of .
+ /// It is not a fatal error to not return a rented buffer, but failure to do so may lead to
+ /// decreased application performance, as the pool may need to create a new buffer to replace
+ /// the one lost.
+ ///
+ public abstract T[] Rent(int minimumLength);
+
+ ///
+ /// Returns to the pool an array that was previously obtained via on the same
+ /// instance.
+ ///
+ ///
+ /// The buffer previously obtained from to return to the pool.
+ ///
+ ///
+ /// If true and if the pool will store the buffer to enable subsequent reuse,
+ /// will clear of its contents so that a subsequent consumer via
+ /// will not see the previous consumer's content. If false or if the pool will release the buffer,
+ /// the array's contents are left unchanged.
+ ///
+ ///
+ /// Once a buffer has been returned to the pool, the caller gives up all ownership of the buffer
+ /// and must not use it. The reference returned from a given call to must only be
+ /// returned via once. The default
+ /// may hold onto the returned buffer in order to rent it again, or it may release the returned buffer
+ /// if it's determined that the pool already has enough buffers stored.
+ ///
+ public abstract void Return(T[] array, bool clearArray = false);
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPool.cs b/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPool.cs
new file mode 100644
index 0000000000..43cd2c37e8
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPool.cs
@@ -0,0 +1,144 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+#if NETCORE
+using System;
+
+namespace SharpCompress.Buffers
+{
+ internal sealed partial class DefaultArrayPool : ArrayPool
+ {
+ /// The default maximum length of each array in the pool (2^20).
+ private const int DefaultMaxArrayLength = 1024 * 1024;
+ /// The default maximum number of arrays per bucket that are available for rent.
+ private const int DefaultMaxNumberOfArraysPerBucket = 50;
+ /// Lazily-allocated empty array used when arrays of length 0 are requested.
+ private static T[] s_emptyArray; // we support contracts earlier than those with Array.Empty()
+
+ private readonly Bucket[] _buckets;
+
+ internal DefaultArrayPool() : this(DefaultMaxArrayLength, DefaultMaxNumberOfArraysPerBucket)
+ {
+ }
+
+ internal DefaultArrayPool(int maxArrayLength, int maxArraysPerBucket)
+ {
+ if (maxArrayLength <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(maxArrayLength));
+ }
+ if (maxArraysPerBucket <= 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(maxArraysPerBucket));
+ }
+
+ // Our bucketing algorithm has a min length of 2^4 and a max length of 2^30.
+ // Constrain the actual max used to those values.
+ const int MinimumArrayLength = 0x10, MaximumArrayLength = 0x40000000;
+ if (maxArrayLength > MaximumArrayLength)
+ {
+ maxArrayLength = MaximumArrayLength;
+ }
+ else if (maxArrayLength < MinimumArrayLength)
+ {
+ maxArrayLength = MinimumArrayLength;
+ }
+
+ // Create the buckets.
+ int poolId = Id;
+ int maxBuckets = Utilities.SelectBucketIndex(maxArrayLength);
+ var buckets = new Bucket[maxBuckets + 1];
+ for (int i = 0; i < buckets.Length; i++)
+ {
+ buckets[i] = new Bucket(Utilities.GetMaxSizeForBucket(i), maxArraysPerBucket, poolId);
+ }
+ _buckets = buckets;
+ }
+
+ /// Gets an ID for the pool to use with events.
+ private int Id => GetHashCode();
+
+ public override T[] Rent(int minimumLength)
+ {
+ // Arrays can't be smaller than zero. We allow requesting zero-length arrays (even though
+ // pooling such an array isn't valuable) as it's a valid length array, and we want the pool
+ // to be usable in general instead of using `new`, even for computed lengths.
+ if (minimumLength < 0)
+ {
+ throw new ArgumentOutOfRangeException(nameof(minimumLength));
+ }
+ else if (minimumLength == 0)
+ {
+ // No need for events with the empty array. Our pool is effectively infinite
+ // and we'll never allocate for rents and never store for returns.
+ return s_emptyArray ?? (s_emptyArray = new T[0]);
+ }
+
+ T[] buffer = null;
+
+ int index = Utilities.SelectBucketIndex(minimumLength);
+ if (index < _buckets.Length)
+ {
+ // Search for an array starting at the 'index' bucket. If the bucket is empty, bump up to the
+ // next higher bucket and try that one, but only try at most a few buckets.
+ const int MaxBucketsToTry = 2;
+ int i = index;
+ do
+ {
+ // Attempt to rent from the bucket. If we get a buffer from it, return it.
+ buffer = _buckets[i].Rent();
+ if (buffer != null)
+ {
+ return buffer;
+ }
+ }
+ while (++i < _buckets.Length && i != index + MaxBucketsToTry);
+
+ // The pool was exhausted for this buffer size. Allocate a new buffer with a size corresponding
+ // to the appropriate bucket.
+ buffer = new T[_buckets[index]._bufferLength];
+ }
+ else
+ {
+ // The request was for a size too large for the pool. Allocate an array of exactly the requested length.
+ // When it's returned to the pool, we'll simply throw it away.
+ buffer = new T[minimumLength];
+ }
+
+ return buffer;
+ }
+
+ public override void Return(T[] array, bool clearArray = false)
+ {
+ if (array == null)
+ {
+ throw new ArgumentNullException(nameof(array));
+ }
+ else if (array.Length == 0)
+ {
+ // Ignore empty arrays. When a zero-length array is rented, we return a singleton
+ // rather than actually taking a buffer out of the lowest bucket.
+ return;
+ }
+
+ // Determine with what bucket this array length is associated
+ int bucket = Utilities.SelectBucketIndex(array.Length);
+
+ // If we can tell that the buffer was allocated, drop it. Otherwise, check if we have space in the pool
+ if (bucket < _buckets.Length)
+ {
+ // Clear the array if the user requests
+ if (clearArray)
+ {
+ Array.Clear(array, 0, array.Length);
+ }
+
+ // Return the buffer to its bucket. In the future, we might consider having Return return false
+ // instead of dropping a bucket, in which case we could try to return to a lower-sized bucket,
+ // just as how in Rent we allow renting from a higher-sized bucket.
+ _buckets[bucket].Return(array);
+ }
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPoolBucket.cs b/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPoolBucket.cs
new file mode 100644
index 0000000000..3012488911
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Buffers/DefaultArrayPoolBucket.cs
@@ -0,0 +1,111 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+#if NETCORE
+using System;
+using System.Diagnostics;
+using System.Threading;
+
+namespace SharpCompress.Buffers
+{
+ internal sealed partial class DefaultArrayPool : ArrayPool
+ {
+ /// Provides a thread-safe bucket containing buffers that can be Rent'd and Return'd.
+ private sealed class Bucket
+ {
+ internal readonly int _bufferLength;
+ private readonly T[][] _buffers;
+ private readonly int _poolId;
+
+ private SpinLock _lock; // do not make this readonly; it's a mutable struct
+ private int _index;
+
+ ///
+ /// Creates the pool with numberOfBuffers arrays where each buffer is of bufferLength length.
+ ///
+ internal Bucket(int bufferLength, int numberOfBuffers, int poolId)
+ {
+ _lock = new SpinLock(Debugger.IsAttached); // only enable thread tracking if debugger is attached; it adds non-trivial overheads to Enter/Exit
+ _buffers = new T[numberOfBuffers][];
+ _bufferLength = bufferLength;
+ _poolId = poolId;
+ }
+
+ /// Gets an ID for the bucket to use with events.
+ internal int Id => GetHashCode();
+
+ /// Takes an array from the bucket. If the bucket is empty, returns null.
+ internal T[] Rent()
+ {
+ T[][] buffers = _buffers;
+ T[] buffer = null;
+
+ // While holding the lock, grab whatever is at the next available index and
+ // update the index. We do as little work as possible while holding the spin
+ // lock to minimize contention with other threads. The try/finally is
+ // necessary to properly handle thread aborts on platforms which have them.
+ bool lockTaken = false, allocateBuffer = false;
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index < buffers.Length)
+ {
+ buffer = buffers[_index];
+ buffers[_index++] = null;
+ allocateBuffer = buffer == null;
+ }
+ }
+ finally
+ {
+ if (lockTaken) _lock.Exit(false);
+ }
+
+ // While we were holding the lock, we grabbed whatever was at the next available index, if
+ // there was one. If we tried and if we got back null, that means we hadn't yet allocated
+ // for that slot, in which case we should do so now.
+ if (allocateBuffer)
+ {
+ buffer = new T[_bufferLength];
+ }
+
+ return buffer;
+ }
+
+ ///
+ /// Attempts to return the buffer to the bucket. If successful, the buffer will be stored
+ /// in the bucket and true will be returned; otherwise, the buffer won't be stored, and false
+ /// will be returned.
+ ///
+ internal void Return(T[] array)
+ {
+ // Check to see if the buffer is the correct size for this bucket
+ if (array.Length != _bufferLength)
+ {
+ throw new ArgumentException("Buffer not from pool", nameof(array));
+ }
+
+ // While holding the spin lock, if there's room available in the bucket,
+ // put the buffer into the next available slot. Otherwise, we just drop it.
+ // The try/finally is necessary to properly handle thread aborts on platforms
+ // which have them.
+ bool lockTaken = false;
+ try
+ {
+ _lock.Enter(ref lockTaken);
+
+ if (_index != 0)
+ {
+ _buffers[--_index] = array;
+ }
+ }
+ finally
+ {
+ if (lockTaken) _lock.Exit(false);
+ }
+ }
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Buffers/Utilities.cs b/BizHawk.Client.Common/SharpCompress/Buffers/Utilities.cs
new file mode 100644
index 0000000000..f4100e37d5
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Buffers/Utilities.cs
@@ -0,0 +1,38 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+#if NETCORE
+using System.Diagnostics;
+using System.Runtime.CompilerServices;
+
+namespace SharpCompress.Buffers
+{
+ internal static class Utilities
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal static int SelectBucketIndex(int bufferSize)
+ {
+ Debug.Assert(bufferSize > 0);
+
+ uint bitsRemaining = ((uint)bufferSize - 1) >> 4;
+
+ int poolIndex = 0;
+ if (bitsRemaining > 0xFFFF) { bitsRemaining >>= 16; poolIndex = 16; }
+ if (bitsRemaining > 0xFF) { bitsRemaining >>= 8; poolIndex += 8; }
+ if (bitsRemaining > 0xF) { bitsRemaining >>= 4; poolIndex += 4; }
+ if (bitsRemaining > 0x3) { bitsRemaining >>= 2; poolIndex += 2; }
+ if (bitsRemaining > 0x1) { bitsRemaining >>= 1; poolIndex += 1; }
+
+ return poolIndex + (int)bitsRemaining;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal static int GetMaxSizeForBucket(int binIndex)
+ {
+ int maxSize = 16 << binIndex;
+ Debug.Assert(maxSize >= 0);
+ return maxSize;
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ArchiveEncoding.cs b/BizHawk.Client.Common/SharpCompress/Common/ArchiveEncoding.cs
new file mode 100644
index 0000000000..e546503068
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ArchiveEncoding.cs
@@ -0,0 +1,77 @@
+using System;
+using System.Text;
+
+namespace SharpCompress.Common
+{
+ public class ArchiveEncoding
+ {
+ ///
+ /// Default encoding to use when archive format doesn't specify one.
+ ///
+ public Encoding Default { get; set; }
+
+ ///
+ /// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
+ ///
+ public Encoding Password { get; set; }
+
+ ///
+ /// Set this encoding when you want to force it for all encoding operations.
+ ///
+ public Encoding Forced { get; set; }
+
+ ///
+ /// Set this when you want to use a custom method for all decoding operations.
+ ///
+ /// string Func(bytes, index, length)
+ public Func CustomDecoder { get; set; }
+
+ public ArchiveEncoding()
+ {
+#if NETSTANDARD1_0
+ Default = Encoding.GetEncoding("cp437");
+ Password = Encoding.GetEncoding("cp437");
+#else
+ Default = Encoding.GetEncoding(437);
+ Password = Encoding.GetEncoding(437);
+#endif
+ }
+
+#if NETSTANDARD1_3 || NETSTANDARD2_0
+ static ArchiveEncoding()
+ {
+ Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
+ }
+#endif
+
+ public string Decode(byte[] bytes)
+ {
+ return Decode(bytes, 0, bytes.Length);
+ }
+
+ public string Decode(byte[] bytes, int start, int length)
+ {
+ return GetDecoder().Invoke(bytes, start, length);
+ }
+
+ public string DecodeUTF8(byte[] bytes)
+ {
+ return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
+ }
+
+ public byte[] Encode(string str)
+ {
+ return GetEncoding().GetBytes(str);
+ }
+
+ public Encoding GetEncoding()
+ {
+ return Forced ?? Default ?? Encoding.UTF8;
+ }
+
+ public Func GetDecoder()
+ {
+ return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ArchiveException.cs b/BizHawk.Client.Common/SharpCompress/Common/ArchiveException.cs
new file mode 100644
index 0000000000..18207c641f
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ArchiveException.cs
@@ -0,0 +1,12 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class ArchiveException : Exception
+ {
+ public ArchiveException(string message)
+ : base(message)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ArchiveExtractionEventArgs.cs b/BizHawk.Client.Common/SharpCompress/Common/ArchiveExtractionEventArgs.cs
new file mode 100644
index 0000000000..b1c9fc757b
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ArchiveExtractionEventArgs.cs
@@ -0,0 +1,14 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class ArchiveExtractionEventArgs : EventArgs
+ {
+ internal ArchiveExtractionEventArgs(T entry)
+ {
+ Item = entry;
+ }
+
+ public T Item { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ArchiveType.cs b/BizHawk.Client.Common/SharpCompress/Common/ArchiveType.cs
new file mode 100644
index 0000000000..a0d3097a24
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ArchiveType.cs
@@ -0,0 +1,11 @@
+namespace SharpCompress.Common
+{
+ public enum ArchiveType
+ {
+ Rar,
+ Zip,
+ Tar,
+ SevenZip,
+ GZip
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/CompressedBytesReadEventArgs.cs b/BizHawk.Client.Common/SharpCompress/Common/CompressedBytesReadEventArgs.cs
new file mode 100644
index 0000000000..fdae9c4d15
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/CompressedBytesReadEventArgs.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class CompressedBytesReadEventArgs : EventArgs
+ {
+ ///
+ /// Compressed bytes read for the current entry
+ ///
+ public long CompressedBytesRead { get; internal set; }
+
+ ///
+ /// Current file part read for Multipart files (e.g. Rar)
+ ///
+ public long CurrentFilePartCompressedBytesRead { get; internal set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/CompressionType.cs b/BizHawk.Client.Common/SharpCompress/Common/CompressionType.cs
new file mode 100644
index 0000000000..23ed354fec
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/CompressionType.cs
@@ -0,0 +1,19 @@
+namespace SharpCompress.Common
+{
+ public enum CompressionType
+ {
+ None,
+ GZip,
+ BZip2,
+ PPMd,
+ Deflate,
+ Rar,
+ LZMA,
+ BCJ,
+ BCJ2,
+ LZip,
+ Xz,
+ Unknown,
+ Deflate64
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/CryptographicException.cs b/BizHawk.Client.Common/SharpCompress/Common/CryptographicException.cs
new file mode 100644
index 0000000000..450cd237b7
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/CryptographicException.cs
@@ -0,0 +1,12 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class CryptographicException : Exception
+ {
+ public CryptographicException(string message)
+ : base(message)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Entry.cs b/BizHawk.Client.Common/SharpCompress/Common/Entry.cs
new file mode 100644
index 0000000000..c12cdf6f8c
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Entry.cs
@@ -0,0 +1,91 @@
+using System;
+using System.Collections.Generic;
+
+namespace SharpCompress.Common
+{
+ public abstract class Entry : IEntry
+ {
+ ///
+ /// The File's 32 bit CRC Hash
+ ///
+ public abstract long Crc { get; }
+
+ ///
+ /// The string key of the file internal to the Archive.
+ ///
+ public abstract string Key { get; }
+
+ ///
+ /// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
+ ///
+ public abstract string LinkTarget { get; }
+
+ ///
+ /// The compressed file size
+ ///
+ public abstract long CompressedSize { get; }
+
+ ///
+ /// The compression type
+ ///
+ public abstract CompressionType CompressionType { get; }
+
+ ///
+ /// The uncompressed file size
+ ///
+ public abstract long Size { get; }
+
+ ///
+ /// The entry last modified time in the archive, if recorded
+ ///
+ public abstract DateTime? LastModifiedTime { get; }
+
+ ///
+ /// The entry create time in the archive, if recorded
+ ///
+ public abstract DateTime? CreatedTime { get; }
+
+ ///
+ /// The entry last accessed time in the archive, if recorded
+ ///
+ public abstract DateTime? LastAccessedTime { get; }
+
+ ///
+ /// The entry time when archived, if recorded
+ ///
+ public abstract DateTime? ArchivedTime { get; }
+
+ ///
+ /// Entry is password protected and encrypted and cannot be extracted.
+ ///
+ public abstract bool IsEncrypted { get; }
+
+ ///
+ /// Entry is directory.
+ ///
+ public abstract bool IsDirectory { get; }
+
+ ///
+ /// Entry is split among multiple volumes
+ ///
+ public abstract bool IsSplitAfter { get; }
+
+ ///
+ public override string ToString()
+ {
+ return Key;
+ }
+
+ internal abstract IEnumerable Parts { get; }
+ internal bool IsSolid { get; set; }
+
+ internal virtual void Close()
+ {
+ }
+
+ ///
+ /// Entry file attribute.
+ ///
+ public virtual int? Attrib => throw new NotImplementedException();
+ }
+}
diff --git a/BizHawk.Client.Common/SharpCompress/Common/EntryStream.cs b/BizHawk.Client.Common/SharpCompress/Common/EntryStream.cs
new file mode 100644
index 0000000000..fe7fd3e748
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/EntryStream.cs
@@ -0,0 +1,92 @@
+using System;
+using System.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common
+{
+ public class EntryStream : Stream
+ {
+ private readonly IReader _reader;
+ private readonly Stream _stream;
+ private bool _completed;
+ private bool _isDisposed;
+
+ internal EntryStream(IReader reader, Stream stream)
+ {
+ _reader = reader;
+ _stream = stream;
+ }
+
+ ///
+ /// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
+ ///
+ public void SkipEntry()
+ {
+ this.Skip();
+ _completed = true;
+ }
+
+ protected override void Dispose(bool disposing)
+ {
+ if (!(_completed || _reader.Cancelled))
+ {
+ SkipEntry();
+ }
+ if (_isDisposed)
+ {
+ return;
+ }
+ _isDisposed = true;
+ base.Dispose(disposing);
+ _stream.Dispose();
+ }
+
+ public override bool CanRead => true;
+
+ public override bool CanSeek => false;
+
+ public override bool CanWrite => false;
+
+ public override void Flush() {
+ }
+
+ public override long Length => _stream.Length;
+
+ public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
+
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ int read = _stream.Read(buffer, offset, count);
+ if (read <= 0)
+ {
+ _completed = true;
+ }
+ return read;
+ }
+
+ public override int ReadByte()
+ {
+ int value = _stream.ReadByte();
+ if (value == -1)
+ {
+ _completed = true;
+ }
+ return value;
+ }
+
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override void SetLength(long value)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ throw new NotSupportedException();
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ExtractionException.cs b/BizHawk.Client.Common/SharpCompress/Common/ExtractionException.cs
new file mode 100644
index 0000000000..be5e688c3a
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ExtractionException.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class ExtractionException : Exception
+ {
+ public ExtractionException(string message)
+ : base(message)
+ {
+ }
+
+ public ExtractionException(string message, Exception inner)
+ : base(message, inner)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ExtractionMethods.cs b/BizHawk.Client.Common/SharpCompress/Common/ExtractionMethods.cs
new file mode 100644
index 0000000000..15efd22010
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ExtractionMethods.cs
@@ -0,0 +1,98 @@
+#if !NO_FILE
+using System;
+using System.IO;
+#endif
+
+namespace SharpCompress.Common
+{
+ internal static class ExtractionMethods
+ {
+
+#if !NO_FILE
+ ///
+ /// Extract to specific directory, retaining filename
+ ///
+ public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
+ ExtractionOptions options, Action write)
+ {
+ string destinationFileName;
+ string file = Path.GetFileName(entry.Key);
+ string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
+
+ options = options ?? new ExtractionOptions()
+ {
+ Overwrite = true
+ };
+
+ if (options.ExtractFullPath)
+ {
+ string folder = Path.GetDirectoryName(entry.Key);
+ string destdir = Path.GetFullPath(
+ Path.Combine(fullDestinationDirectoryPath, folder)
+ );
+
+ if (!Directory.Exists(destdir))
+ {
+ if (!destdir.StartsWith(fullDestinationDirectoryPath))
+ {
+ throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
+ }
+
+ Directory.CreateDirectory(destdir);
+ }
+ destinationFileName = Path.Combine(destdir, file);
+ }
+ else
+ {
+ destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
+
+ }
+
+ if (!entry.IsDirectory)
+ {
+ destinationFileName = Path.GetFullPath(destinationFileName);
+
+ if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
+ {
+ throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
+ }
+ write(destinationFileName, options);
+ }
+ else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
+ {
+ Directory.CreateDirectory(destinationFileName);
+ }
+ }
+
+ public static void WriteEntryToFile(IEntry entry, string destinationFileName,
+ ExtractionOptions options,
+ Action openAndWrite)
+ {
+ if (entry.LinkTarget != null)
+ {
+ if (null == options.WriteSymbolicLink)
+ {
+ throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
+ }
+ options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
+ }
+ else
+ {
+ FileMode fm = FileMode.Create;
+ options = options ?? new ExtractionOptions()
+ {
+ Overwrite = true
+ };
+
+ if (!options.Overwrite)
+ {
+ fm = FileMode.CreateNew;
+ }
+
+ openAndWrite(destinationFileName, fm);
+ entry.PreserveExtractionOptions(destinationFileName, options);
+ }
+ }
+#endif
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ExtractionOptions.cs b/BizHawk.Client.Common/SharpCompress/Common/ExtractionOptions.cs
new file mode 100644
index 0000000000..7f6e1efcc9
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ExtractionOptions.cs
@@ -0,0 +1,34 @@
+namespace SharpCompress.Common
+{
+ public class ExtractionOptions
+ {
+ ///
+ /// overwrite target if it exists
+ ///
+ public bool Overwrite {get; set; }
+
+ ///
+ /// extract with internal directory structure
+ ///
+ public bool ExtractFullPath { get; set; }
+
+ ///
+ /// preserve file time
+ ///
+ public bool PreserveFileTime { get; set; }
+
+ ///
+ /// preserve windows file attributes
+ ///
+ public bool PreserveAttributes { get; set; }
+
+ ///
+ /// Delegate for writing symbolic links to disk.
+ /// sourcePath is where the symlink is created.
+ /// targetPath is what the symlink refers to.
+ ///
+ public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
+
+ public SymbolicLinkWriterDelegate WriteSymbolicLink;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/FilePart.cs b/BizHawk.Client.Common/SharpCompress/Common/FilePart.cs
new file mode 100644
index 0000000000..85bdc894f6
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/FilePart.cs
@@ -0,0 +1,20 @@
+using System.IO;
+
+namespace SharpCompress.Common
+{
+ public abstract class FilePart
+ {
+ protected FilePart(ArchiveEncoding archiveEncoding)
+ {
+ ArchiveEncoding = archiveEncoding;
+ }
+
+ internal ArchiveEncoding ArchiveEncoding { get; }
+
+ internal abstract string FilePartName { get; }
+
+ internal abstract Stream GetCompressedStream();
+ internal abstract Stream GetRawStream();
+ internal bool Skipped { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs b/BizHawk.Client.Common/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs
new file mode 100644
index 0000000000..913f2093ff
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/FilePartExtractionBeginEventArgs.cs
@@ -0,0 +1,22 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class FilePartExtractionBeginEventArgs : EventArgs
+ {
+ ///
+ /// File name for the part for the current entry
+ ///
+ public string Name { get; internal set; }
+
+ ///
+ /// Uncompressed size of the current entry in the part
+ ///
+ public long Size { get; internal set; }
+
+ ///
+ /// Compressed size of the current entry in the part
+ ///
+ public long CompressedSize { get; internal set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/FlagUtility.cs b/BizHawk.Client.Common/SharpCompress/Common/FlagUtility.cs
new file mode 100644
index 0000000000..8a3aaee1fa
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/FlagUtility.cs
@@ -0,0 +1,108 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ internal static class FlagUtility
+ {
+ ///
+ /// Returns true if the flag is set on the specified bit field.
+ /// Currently only works with 32-bit bitfields.
+ ///
+ /// Enumeration with Flags attribute
+ /// Flagged variable
+ /// Flag to test
+ ///
+ public static bool HasFlag(long bitField, T flag)
+ where T : struct
+ {
+ return HasFlag(bitField, flag);
+ }
+
+ ///
+ /// Returns true if the flag is set on the specified bit field.
+ /// Currently only works with 32-bit bitfields.
+ ///
+ /// Enumeration with Flags attribute
+ /// Flagged variable
+ /// Flag to test
+ ///
+ public static bool HasFlag(ulong bitField, T flag)
+ where T : struct
+ {
+ return HasFlag(bitField, flag);
+ }
+
+ ///
+ /// Returns true if the flag is set on the specified bit field.
+ /// Currently only works with 32-bit bitfields.
+ ///
+ /// Flagged variable
+ /// Flag to test
+ ///
+ public static bool HasFlag(ulong bitField, ulong flag)
+ {
+ return ((bitField & flag) == flag);
+ }
+
+ public static bool HasFlag(short bitField, short flag)
+ {
+ return ((bitField & flag) == flag);
+ }
+
+ ///
+ /// Returns true if the flag is set on the specified bit field.
+ /// Currently only works with 32-bit bitfields.
+ ///
+ /// Enumeration with Flags attribute
+ /// Flagged variable
+ /// Flag to test
+ ///
+ public static bool HasFlag(T bitField, T flag)
+ where T : struct
+ {
+ return HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
+ }
+
+ ///
+ /// Returns true if the flag is set on the specified bit field.
+ /// Currently only works with 32-bit bitfields.
+ ///
+ /// Flagged variable
+ /// Flag to test
+ ///
+ public static bool HasFlag(long bitField, long flag)
+ {
+ return ((bitField & flag) == flag);
+ }
+
+ ///
+ /// Sets a bit-field to either on or off for the specified flag.
+ ///
+ /// Flagged variable
+ /// Flag to change
+ /// bool
+ /// The flagged variable with the flag changed
+ public static long SetFlag(long bitField, long flag, bool on)
+ {
+ if (on)
+ {
+ return bitField | flag;
+ }
+ return bitField & (~flag);
+ }
+
+ ///
+ /// Sets a bit-field to either on or off for the specified flag.
+ ///
+ /// Enumeration with Flags attribute
+ /// Flagged variable
+ /// Flag to change
+ /// bool
+ /// The flagged variable with the flag changed
+ public static long SetFlag(T bitField, T flag, bool on)
+ where T : struct
+ {
+ return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipEntry.cs b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipEntry.cs
new file mode 100644
index 0000000000..8b2d3e9378
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipEntry.cs
@@ -0,0 +1,50 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+
+namespace SharpCompress.Common.GZip
+{
+ public class GZipEntry : Entry
+ {
+ private readonly GZipFilePart _filePart;
+
+ internal GZipEntry(GZipFilePart filePart)
+ {
+ _filePart = filePart;
+ }
+
+ public override CompressionType CompressionType => CompressionType.GZip;
+
+ public override long Crc => 0;
+
+ public override string Key => _filePart.FilePartName;
+
+ public override string LinkTarget => null;
+
+ public override long CompressedSize => 0;
+
+ public override long Size => 0;
+
+ public override DateTime? LastModifiedTime => _filePart.DateModified;
+
+ public override DateTime? CreatedTime => null;
+
+ public override DateTime? LastAccessedTime => null;
+
+ public override DateTime? ArchivedTime => null;
+
+ public override bool IsEncrypted => false;
+
+ public override bool IsDirectory => false;
+
+ public override bool IsSplitAfter => false;
+
+ internal override IEnumerable Parts => _filePart.AsEnumerable();
+
+ internal static IEnumerable GetEntries(Stream stream, OptionsBase options)
+ {
+ yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipFilePart.cs b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipFilePart.cs
new file mode 100644
index 0000000000..1f942e7459
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipFilePart.cs
@@ -0,0 +1,120 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Common.Tar.Headers;
+using SharpCompress.Compressors;
+using SharpCompress.Compressors.Deflate;
+using SharpCompress.Converters;
+using System.Text;
+
+namespace SharpCompress.Common.GZip
+{
+ internal class GZipFilePart : FilePart
+ {
+ private string _name;
+ private readonly Stream _stream;
+
+ internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
+ : base(archiveEncoding)
+ {
+ ReadAndValidateGzipHeader(stream);
+ EntryStartPosition = stream.Position;
+ _stream = stream;
+ }
+
+ internal long EntryStartPosition { get; }
+
+ internal DateTime? DateModified { get; private set; }
+
+ internal override string FilePartName => _name;
+
+ internal override Stream GetCompressedStream()
+ {
+ return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
+ }
+
+ internal override Stream GetRawStream()
+ {
+ return _stream;
+ }
+
+ private void ReadAndValidateGzipHeader(Stream stream)
+ {
+ // read the header on the first read
+ byte[] header = new byte[10];
+ int n = stream.Read(header, 0, header.Length);
+
+ // workitem 8501: handle edge case (decompress empty stream)
+ if (n == 0)
+ {
+ return;
+ }
+
+ if (n != 10)
+ {
+ throw new ZlibException("Not a valid GZIP stream.");
+ }
+
+ if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
+ {
+ throw new ZlibException("Bad GZIP header.");
+ }
+
+ Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
+ DateModified = TarHeader.EPOCH.AddSeconds(timet);
+ if ((header[3] & 0x04) == 0x04)
+ {
+ // read and discard extra field
+ n = stream.Read(header, 0, 2); // 2-byte length field
+
+ Int16 extraLength = (Int16)(header[0] + header[1] * 256);
+ byte[] extra = new byte[extraLength];
+
+ if (!stream.ReadFully(extra))
+ {
+ throw new ZlibException("Unexpected end-of-file reading GZIP header.");
+ }
+ n = extraLength;
+ }
+ if ((header[3] & 0x08) == 0x08)
+ {
+ _name = ReadZeroTerminatedString(stream);
+ }
+ if ((header[3] & 0x10) == 0x010)
+ {
+ ReadZeroTerminatedString(stream);
+ }
+ if ((header[3] & 0x02) == 0x02)
+ {
+ stream.ReadByte(); // CRC16, ignore
+ }
+ }
+
+ private string ReadZeroTerminatedString(Stream stream)
+ {
+ byte[] buf1 = new byte[1];
+ var list = new List();
+ bool done = false;
+ do
+ {
+ // workitem 7740
+ int n = stream.Read(buf1, 0, 1);
+ if (n != 1)
+ {
+ throw new ZlibException("Unexpected EOF reading GZIP header.");
+ }
+ if (buf1[0] == 0)
+ {
+ done = true;
+ }
+ else
+ {
+ list.Add(buf1[0]);
+ }
+ }
+ while (!done);
+ byte[] buffer = list.ToArray();
+ return ArchiveEncoding.Decode(buffer);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipVolume.cs b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipVolume.cs
new file mode 100644
index 0000000000..7da73560e9
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/GZip/GZipVolume.cs
@@ -0,0 +1,25 @@
+using System.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common.GZip
+{
+ public class GZipVolume : Volume
+ {
+ public GZipVolume(Stream stream, ReaderOptions options)
+ : base(stream, options)
+ {
+ }
+
+#if !NO_FILE
+ public GZipVolume(FileInfo fileInfo, ReaderOptions options)
+ : base(fileInfo.OpenRead(), options)
+ {
+ options.LeaveStreamOpen = false;
+ }
+#endif
+
+ public override bool IsFirstVolume => true;
+
+ public override bool IsMultiVolume => true;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/IEntry.Extensions.cs b/BizHawk.Client.Common/SharpCompress/Common/IEntry.Extensions.cs
new file mode 100644
index 0000000000..76d75a8f53
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/IEntry.Extensions.cs
@@ -0,0 +1,51 @@
+
+#if !NO_FILE
+using System.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common
+{
+ internal static class EntryExtensions
+ {
+ internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
+ ExtractionOptions options)
+ {
+ if (options.PreserveFileTime || options.PreserveAttributes)
+ {
+ FileInfo nf = new FileInfo(destinationFileName);
+ if (!nf.Exists)
+ {
+ return;
+ }
+
+ // update file time to original packed time
+ if (options.PreserveFileTime)
+ {
+ if (entry.CreatedTime.HasValue)
+ {
+ nf.CreationTime = entry.CreatedTime.Value;
+ }
+
+ if (entry.LastModifiedTime.HasValue)
+ {
+ nf.LastWriteTime = entry.LastModifiedTime.Value;
+ }
+
+ if (entry.LastAccessedTime.HasValue)
+ {
+ nf.LastAccessTime = entry.LastAccessedTime.Value;
+ }
+ }
+
+ if (options.PreserveAttributes)
+ {
+ if (entry.Attrib.HasValue)
+ {
+ nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
+ }
+ }
+ }
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/IEntry.cs b/BizHawk.Client.Common/SharpCompress/Common/IEntry.cs
new file mode 100644
index 0000000000..84cfcb33a7
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/IEntry.cs
@@ -0,0 +1,22 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public interface IEntry
+ {
+ CompressionType CompressionType { get; }
+ DateTime? ArchivedTime { get; }
+ long CompressedSize { get; }
+ long Crc { get; }
+ DateTime? CreatedTime { get; }
+ string Key { get; }
+ string LinkTarget { get; }
+ bool IsDirectory { get; }
+ bool IsEncrypted { get; }
+ bool IsSplitAfter { get; }
+ DateTime? LastAccessedTime { get; }
+ DateTime? LastModifiedTime { get; }
+ long Size { get; }
+ int? Attrib { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/IExtractionListener.cs b/BizHawk.Client.Common/SharpCompress/Common/IExtractionListener.cs
new file mode 100644
index 0000000000..b6fb50fcd5
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/IExtractionListener.cs
@@ -0,0 +1,8 @@
+namespace SharpCompress.Common
+{
+ internal interface IExtractionListener
+ {
+ void FireFilePartExtractionBegin(string name, long size, long compressedSize);
+ void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/IVolume.cs b/BizHawk.Client.Common/SharpCompress/Common/IVolume.cs
new file mode 100644
index 0000000000..d5dac255ec
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/IVolume.cs
@@ -0,0 +1,12 @@
+using System;
+
+#if !NO_FILE
+using System.IO;
+#endif
+
+namespace SharpCompress.Common
+{
+ public interface IVolume : IDisposable
+ {
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/IncompleteArchiveException.cs b/BizHawk.Client.Common/SharpCompress/Common/IncompleteArchiveException.cs
new file mode 100644
index 0000000000..78d567f455
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/IncompleteArchiveException.cs
@@ -0,0 +1,10 @@
+namespace SharpCompress.Common
+{
+ public class IncompleteArchiveException : ArchiveException
+ {
+ public IncompleteArchiveException(string message)
+ : base(message)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/InvalidFormatException.cs b/BizHawk.Client.Common/SharpCompress/Common/InvalidFormatException.cs
new file mode 100644
index 0000000000..fa141cb465
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/InvalidFormatException.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class InvalidFormatException : ExtractionException
+ {
+ public InvalidFormatException(string message)
+ : base(message)
+ {
+ }
+
+ public InvalidFormatException(string message, Exception inner)
+ : base(message, inner)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/MultiVolumeExtractionException.cs b/BizHawk.Client.Common/SharpCompress/Common/MultiVolumeExtractionException.cs
new file mode 100644
index 0000000000..d9b97fa3e5
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/MultiVolumeExtractionException.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class MultiVolumeExtractionException : ExtractionException
+ {
+ public MultiVolumeExtractionException(string message)
+ : base(message)
+ {
+ }
+
+ public MultiVolumeExtractionException(string message, Exception inner)
+ : base(message, inner)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/MultipartStreamRequiredException.cs b/BizHawk.Client.Common/SharpCompress/Common/MultipartStreamRequiredException.cs
new file mode 100644
index 0000000000..cf030ed613
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/MultipartStreamRequiredException.cs
@@ -0,0 +1,10 @@
+namespace SharpCompress.Common
+{
+ public class MultipartStreamRequiredException : ExtractionException
+ {
+ public MultipartStreamRequiredException(string message)
+ : base(message)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/OptionsBase.cs b/BizHawk.Client.Common/SharpCompress/Common/OptionsBase.cs
new file mode 100644
index 0000000000..b98f0a6020
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/OptionsBase.cs
@@ -0,0 +1,13 @@
+
+namespace SharpCompress.Common
+{
+ public class OptionsBase
+ {
+ ///
+ /// SharpCompress will keep the supplied streams open. Default is true.
+ ///
+ public bool LeaveStreamOpen { get; set; } = true;
+
+ public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/PasswordProtectedException.cs b/BizHawk.Client.Common/SharpCompress/Common/PasswordProtectedException.cs
new file mode 100644
index 0000000000..58dc766036
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/PasswordProtectedException.cs
@@ -0,0 +1,17 @@
+using System;
+
+namespace SharpCompress.Common
+{
+ public class PasswordProtectedException : ExtractionException
+ {
+ public PasswordProtectedException(string message)
+ : base(message)
+ {
+ }
+
+ public PasswordProtectedException(string message, Exception inner)
+ : base(message, inner)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/AVHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/AVHeader.cs
new file mode 100644
index 0000000000..51dda2cf25
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/AVHeader.cs
@@ -0,0 +1,30 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class AvHeader : RarHeader
+ {
+ public AvHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Av)
+ {
+ if (IsRar5)
+ throw new InvalidFormatException("unexpected rar5 record");
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ UnpackVersion = reader.ReadByte();
+ Method = reader.ReadByte();
+ AvVersion = reader.ReadByte();
+ AvInfoCrc = reader.ReadInt32();
+ }
+
+ internal int AvInfoCrc { get; private set; }
+
+ internal byte UnpackVersion { get; private set; }
+
+ internal byte Method { get; private set; }
+
+ internal byte AvVersion { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs
new file mode 100644
index 0000000000..389a069764
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveCryptHeader.cs
@@ -0,0 +1,57 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class ArchiveCryptHeader : RarHeader
+ {
+
+ private const int CRYPT_VERSION = 0; // Supported encryption version.
+ private const int SIZE_SALT50 = 16;
+ private const int SIZE_SALT30 = 8;
+ private const int SIZE_INITV = 16;
+ private const int SIZE_PSWCHECK = 8;
+ private const int SIZE_PSWCHECK_CSUM = 4;
+ private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
+ private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
+
+
+ private bool _usePswCheck;
+ private uint _lg2Count; // Log2 of PBKDF2 repetition count.
+ private byte[] _salt;
+ private byte[] _pswCheck;
+ private byte[] _pswCheckCsm;
+
+ public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Crypt)
+ {
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ var cryptVersion = reader.ReadRarVIntUInt32();
+ if (cryptVersion > CRYPT_VERSION)
+ {
+ //error?
+ return;
+ }
+ var encryptionFlags = reader.ReadRarVIntUInt32();
+ _usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
+ _lg2Count = reader.ReadRarVIntByte(1);
+
+
+ //UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
+ if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
+ {
+ //error?
+ return;
+ }
+
+ _salt = reader.ReadBytes(SIZE_SALT50);
+ if (_usePswCheck)
+ {
+ _pswCheck = reader.ReadBytes(SIZE_PSWCHECK);
+ _pswCheckCsm = reader.ReadBytes(SIZE_PSWCHECK_CSUM);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs
new file mode 100644
index 0000000000..72b4bd4b91
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ArchiveHeader.cs
@@ -0,0 +1,81 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class ArchiveHeader : RarHeader
+ {
+ public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Archive)
+ {
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ if (IsRar5)
+ {
+ Flags = reader.ReadRarVIntUInt16();
+ if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
+ {
+ VolumeNumber = (int)reader.ReadRarVIntUInt32();
+ }
+ // later: we may have a locator record if we need it
+ //if (ExtraSize != 0) {
+ // ReadLocator(reader);
+ //}
+ }
+ else
+ {
+ Flags = HeaderFlags;
+ HighPosAv = reader.ReadInt16();
+ PosAv = reader.ReadInt32();
+ if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
+ {
+ EncryptionVersion = reader.ReadByte();
+ }
+ }
+ }
+
+ private void ReadLocator(MarkingBinaryReader reader) {
+ var size = reader.ReadRarVIntUInt16();
+ var type = reader.ReadRarVIntUInt16();
+ if (type != 1) throw new InvalidFormatException("expected locator record");
+ var flags = reader.ReadRarVIntUInt16();
+ const ushort hasQuickOpenOffset = 0x01;
+ const ushort hasRecoveryOffset = 0x02;
+ ulong quickOpenOffset = 0;
+ if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
+ quickOpenOffset = reader.ReadRarVInt();
+ }
+ ulong recoveryOffset = 0;
+ if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
+ recoveryOffset = reader.ReadRarVInt();
+ }
+ }
+
+ private ushort Flags { get; set; }
+
+ private bool HasFlag(ushort flag)
+ {
+ return (Flags & flag) == flag;
+ }
+
+ internal int? VolumeNumber { get; private set; }
+
+ internal short? HighPosAv { get; private set; }
+
+ internal int? PosAv { get; private set; }
+
+ private byte? EncryptionVersion { get; set; }
+
+ public bool? IsEncrypted => IsRar5 ? (bool?)null : HasFlag(ArchiveFlagsV4.PASSWORD);
+
+ public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);
+
+ public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
+
+ // RAR5: Volume number field is present. True for all volumes except first.
+ public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
+
+ public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/CommentHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/CommentHeader.cs
new file mode 100644
index 0000000000..4845ce2bd5
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/CommentHeader.cs
@@ -0,0 +1,28 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class CommentHeader : RarHeader
+ {
+ protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Comment)
+ {
+ if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ UnpSize = reader.ReadInt16();
+ UnpVersion = reader.ReadByte();
+ UnpMethod = reader.ReadByte();
+ CommCrc = reader.ReadInt16();
+ }
+
+ internal short UnpSize { get; private set; }
+
+ internal byte UnpVersion { get; private set; }
+
+ internal byte UnpMethod { get; private set; }
+ internal short CommCrc { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/EndArchiveHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/EndArchiveHeader.cs
new file mode 100644
index 0000000000..926a8dfe19
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/EndArchiveHeader.cs
@@ -0,0 +1,43 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class EndArchiveHeader : RarHeader
+ {
+ public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.EndArchive)
+ {
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ if (IsRar5)
+ {
+ Flags = reader.ReadRarVIntUInt16();
+ }
+ else
+ {
+ Flags = HeaderFlags;
+ if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
+ {
+ ArchiveCrc = reader.ReadInt32();
+ }
+ if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
+ {
+ VolumeNumber = reader.ReadInt16();
+ }
+ }
+ }
+
+ private ushort Flags { get; set; }
+
+ private bool HasFlag(ushort flag)
+ {
+ return (Flags & flag) == flag;
+ }
+
+ internal int? ArchiveCrc { get; private set; }
+
+ internal short? VolumeNumber { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileHeader.cs
new file mode 100644
index 0000000000..09ded0fc62
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileHeader.cs
@@ -0,0 +1,452 @@
+#if !Rar2017_64bit
+using nint = System.Int32;
+using nuint = System.UInt32;
+using size_t = System.UInt32;
+#else
+using nint = System.Int64;
+using nuint = System.UInt64;
+using size_t = System.UInt64;
+#endif
+
+using SharpCompress.IO;
+using System;
+using System.IO;
+using System.Text;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class FileHeader : RarHeader
+ {
+ private uint _fileCrc;
+
+ public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
+ : base(header, reader, headerType)
+ {
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ if (IsRar5)
+ {
+ ReadFromReaderV5(reader);
+ }
+ else
+ {
+ ReadFromReaderV4(reader);
+ }
+ }
+
+ private void ReadFromReaderV5(MarkingBinaryReader reader)
+ {
+ Flags = reader.ReadRarVIntUInt16();
+
+ var lvalue = checked((long)reader.ReadRarVInt());
+
+ // long.MaxValue causes the unpack code to finish when the input stream is exhausted
+ UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
+
+ FileAttributes = reader.ReadRarVIntUInt32();
+
+ if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
+ FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
+ }
+
+ if (HasFlag(FileFlagsV5.HAS_CRC32)) {
+ FileCrc = reader.ReadUInt32();
+ }
+
+ var compressionInfo = reader.ReadRarVIntUInt16();
+
+ // Lower 6 bits (0x003f mask) contain the version of compression algorithm, resulting in possible 0 - 63 values. Current version is 0.
+ // "+ 50" to not mix with old RAR format algorithms. For example,
+ // we may need to use the compression algorithm 15 in the future,
+ // but it was already used in RAR 1.5 and Unpack needs to distinguish
+ // them.
+ CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
+
+ // 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
+ // It can be set only for file headers and is never set for service headers.
+ IsSolid = (compressionInfo & 0x40) == 0x40;
+
+ // Bits 8 - 10 (0x0380 mask) define the compression method. Currently only values 0 - 5 are used. 0 means no compression.
+ CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
+
+ // Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
+ WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
+
+ HostOs = reader.ReadRarVIntByte();
+
+ var nameSize = reader.ReadRarVIntUInt16();
+
+ // Variable length field containing Name length bytes in UTF-8 format without trailing zero.
+ // For file header this is a name of archived file. Forward slash character is used as the path separator both for Unix and Windows names.
+ // Backslashes are treated as a part of name for Unix names and as invalid character for Windows file names. Type of name is defined by Host OS field.
+ //
+ // TODO: not sure if anything needs to be done to handle the following:
+ // If Unix file name contains any high ASCII characters which cannot be correctly converted to Unicode and UTF-8
+ // we map such characters to to 0xE080 - 0xE0FF private use Unicode area and insert 0xFFFE Unicode non-character
+ // to resulting string to indicate that it contains mapped characters, which need to be converted back when extracting.
+ // Concrete position of 0xFFFE is not defined, we need to search the entire string for it. Such mapped names are not
+ // portable and can be correctly unpacked only on the same system where they were created.
+ //
+ // For service header this field contains a name of service header. Now the following names are used:
+ // CMT Archive comment
+ // QO Archive quick open data
+ // ACL NTFS file permissions
+ // STM NTFS alternate data stream
+ // RR Recovery record
+ var b = reader.ReadBytes(nameSize);
+ FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
+
+ // extra size seems to be redudant since we know the total header size
+ if (ExtraSize != RemainingHeaderBytes(reader))
+ {
+ throw new InvalidFormatException("rar5 header size / extra size inconsistency");
+ }
+
+ isEncryptedRar5 = false;
+
+ while (RemainingHeaderBytes(reader) > 0) {
+ var size = reader.ReadRarVIntUInt16();
+ int n = RemainingHeaderBytes(reader);
+ var type = reader.ReadRarVIntUInt16();
+ switch (type) {
+ //TODO
+ case 1: // file encryption
+ {
+ isEncryptedRar5 = true;
+
+ //var version = reader.ReadRarVIntByte();
+ //if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
+ }
+ break;
+ // case 2: // file hash
+ // {
+ //
+ // }
+ // break;
+ case 3: // file time
+ {
+ ushort flags = reader.ReadRarVIntUInt16();
+ var isWindowsTime = (flags & 1) == 0;
+ if ((flags & 0x2) == 0x2) {
+ FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
+ }
+ if ((flags & 0x4) == 0x4) {
+ FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
+ }
+ if ((flags & 0x8) == 0x8) {
+ FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
+ }
+ }
+ break;
+//TODO
+// case 4: // file version
+// {
+//
+// }
+// break;
+// case 5: // file system redirection
+// {
+//
+// }
+// break;
+// case 6: // unix owner
+// {
+//
+// }
+// break;
+// case 7: // service data
+// {
+//
+// }
+// break;
+
+ default:
+ // skip unknown record types to allow new record types to be added in the future
+ break;
+ }
+ // drain any trailing bytes of extra record
+ int did = n - RemainingHeaderBytes(reader);
+ int drain = size - did;
+ if (drain > 0)
+ {
+ reader.ReadBytes(drain);
+ }
+ }
+
+ if (AdditionalDataSize != 0) {
+ CompressedSize = AdditionalDataSize;
+ }
+ }
+
+
+ private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
+ {
+ if (isWindowsTime)
+ {
+ return DateTime.FromFileTime(reader.ReadInt64());
+ }
+ else
+ {
+ return Utility.UnixTimeToDateTime(reader.ReadUInt32());
+ }
+ }
+
+ private static string ConvertPathV5(string path)
+ {
+#if NO_FILE
+ // not sure what to do here
+ throw new NotImplementedException("TODO");
+#else
+ if (Path.DirectorySeparatorChar == '\\')
+ {
+ // replace embedded \\ with valid filename char
+ return path.Replace('\\', '-').Replace('/', '\\');
+ }
+ return path;
+#endif
+ }
+
+
+ private void ReadFromReaderV4(MarkingBinaryReader reader)
+ {
+ Flags = HeaderFlags;
+ IsSolid = HasFlag(FileFlagsV4.SOLID);
+ WindowSize = IsDirectory ? 0U : ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
+
+ uint lowUncompressedSize = reader.ReadUInt32();
+
+ HostOs = reader.ReadByte();
+
+ FileCrc = reader.ReadUInt32();
+
+ FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadUInt32());
+
+ CompressionAlgorithm = reader.ReadByte();
+ CompressionMethod = (byte)(reader.ReadByte() - 0x30);
+
+ short nameSize = reader.ReadInt16();
+
+ FileAttributes = reader.ReadUInt32();
+
+ uint highCompressedSize = 0;
+ uint highUncompressedkSize = 0;
+ if (HasFlag(FileFlagsV4.LARGE))
+ {
+ highCompressedSize = reader.ReadUInt32();
+ highUncompressedkSize = reader.ReadUInt32();
+ }
+ else
+ {
+ if (lowUncompressedSize == 0xffffffff)
+ {
+ lowUncompressedSize = 0xffffffff;
+ highUncompressedkSize = int.MaxValue;
+ }
+ }
+ CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
+ UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
+
+ nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
+
+ byte[] fileNameBytes = reader.ReadBytes(nameSize);
+
+ const int saltSize = 8;
+ const int newLhdSize = 32;
+
+ switch (HeaderCode)
+ {
+ case HeaderCodeV.RAR4_FILE_HEADER:
+ {
+ if (HasFlag(FileFlagsV4.UNICODE))
+ {
+ int length = 0;
+ while (length < fileNameBytes.Length
+ && fileNameBytes[length] != 0)
+ {
+ length++;
+ }
+ if (length != nameSize)
+ {
+ length++;
+ FileName = FileNameDecoder.Decode(fileNameBytes, length);
+ }
+ else
+ {
+ FileName = ArchiveEncoding.Decode(fileNameBytes);
+ }
+ }
+ else
+ {
+ FileName = ArchiveEncoding.Decode(fileNameBytes);
+ }
+ FileName = ConvertPathV4(FileName);
+ }
+ break;
+ case HeaderCodeV.RAR4_NEW_SUB_HEADER:
+ {
+ int datasize = HeaderSize - newLhdSize - nameSize;
+ if (HasFlag(FileFlagsV4.SALT))
+ {
+ datasize -= saltSize;
+ }
+ if (datasize > 0)
+ {
+ SubData = reader.ReadBytes(datasize);
+ }
+
+ if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
+ {
+ RecoverySectors = SubData[8] + (SubData[9] << 8)
+ + (SubData[10] << 16) + (SubData[11] << 24);
+ }
+ }
+ break;
+ }
+
+ if (HasFlag(FileFlagsV4.SALT))
+ {
+ R4Salt = reader.ReadBytes(saltSize);
+ }
+ if (HasFlag(FileFlagsV4.EXT_TIME))
+ {
+ // verify that the end of the header hasn't been reached before reading the Extended Time.
+ // some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
+ if (RemainingHeaderBytes(reader) >= 2)
+ {
+ ushort extendedFlags = reader.ReadUInt16();
+ FileLastModifiedTime = ProcessExtendedTimeV4(extendedFlags, FileLastModifiedTime, reader, 0);
+ FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
+ FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
+ FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
+ }
+ }
+ }
+
+ private static long UInt32To64(uint x, uint y)
+ {
+ long l = x;
+ l <<= 32;
+ return l + y;
+ }
+
+ private static DateTime? ProcessExtendedTimeV4(ushort extendedFlags, DateTime? time, MarkingBinaryReader reader, int i)
+ {
+ uint rmode = (uint)extendedFlags >> (3 - i) * 4;
+ if ((rmode & 8) == 0)
+ {
+ return null;
+ }
+ if (i != 0)
+ {
+ uint dosTime = reader.ReadUInt32();
+ time = Utility.DosDateToDateTime(dosTime);
+ }
+ if ((rmode & 4) == 0)
+ {
+ time = time.Value.AddSeconds(1);
+ }
+ uint nanosecondHundreds = 0;
+ int count = (int)rmode & 3;
+ for (int j = 0; j < count; j++)
+ {
+ byte b = reader.ReadByte();
+ nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
+ }
+
+ //10^-7 to 10^-3
+ return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
+ }
+
+ private static string ConvertPathV4(string path)
+ {
+#if NO_FILE
+ return path.Replace('\\', '/');
+#else
+ if (Path.DirectorySeparatorChar == '/')
+ {
+ return path.Replace('\\', '/');
+ }
+ else if (Path.DirectorySeparatorChar == '\\')
+ {
+ return path.Replace('/', '\\');
+ }
+ return path;
+#endif
+ }
+
+ public override string ToString()
+ {
+ return FileName;
+ }
+
+ private ushort Flags { get; set; }
+
+ private bool HasFlag(ushort flag)
+ {
+ return (Flags & flag) == flag;
+ }
+
+ internal uint FileCrc
+ {
+ get {
+ if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
+//!!! rar5:
+ throw new InvalidOperationException("TODO rar5");
+ }
+ return _fileCrc;
+ }
+ private set => _fileCrc = value;
+ }
+
+ // 0 - storing
+ // 1 - fastest compression
+ // 2 - fast compression
+ // 3 - normal compression
+ // 4 - good compression
+ // 5 - best compression
+ internal byte CompressionMethod { get; private set; }
+ internal bool IsStored => CompressionMethod == 0;
+
+ // eg (see DoUnpack())
+ //case 15: // rar 1.5 compression
+ //case 20: // rar 2.x compression
+ //case 26: // files larger than 2GB
+ //case 29: // rar 3.x compression
+ //case 50: // RAR 5.0 compression algorithm.
+ internal byte CompressionAlgorithm { get; private set; }
+
+ public bool IsSolid { get; private set; }
+
+ // unused for UnpackV1 implementation (limitation)
+ internal size_t WindowSize { get; private set; }
+
+ internal byte[] R4Salt { get; private set; }
+
+ private byte HostOs { get; set; }
+ internal uint FileAttributes { get; private set; }
+ internal long CompressedSize { get; private set; }
+ internal long UncompressedSize { get; private set; }
+ internal string FileName { get; private set; }
+ internal byte[] SubData { get; private set; }
+ internal int RecoverySectors { get; private set; }
+ internal long DataStartPosition { get; set; }
+ public Stream PackedStream { get; set; }
+
+ public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
+
+ public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
+
+ private bool isEncryptedRar5 = false;
+ public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
+
+ internal DateTime? FileLastModifiedTime { get; private set; }
+
+ internal DateTime? FileCreatedTime { get; private set; }
+
+ internal DateTime? FileLastAccessedTime { get; private set; }
+
+ internal DateTime? FileArchivedTime { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileNameDecoder.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileNameDecoder.cs
new file mode 100644
index 0000000000..98da75dae6
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/FileNameDecoder.cs
@@ -0,0 +1,78 @@
+using System.Text;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ ///
+ /// This is for the crazy Rar encoding that I don't understand
+ ///
+ internal static class FileNameDecoder
+ {
+ internal static int GetChar(byte[] name, int pos)
+ {
+ return name[pos] & 0xff;
+ }
+
+ internal static string Decode(byte[] name, int encPos)
+ {
+ int decPos = 0;
+ int flags = 0;
+ int flagBits = 0;
+
+ int low = 0;
+ int high = 0;
+ int highByte = GetChar(name, encPos++);
+ StringBuilder buf = new StringBuilder();
+ while (encPos < name.Length)
+ {
+ if (flagBits == 0)
+ {
+ flags = GetChar(name, encPos++);
+ flagBits = 8;
+ }
+ switch (flags >> 6)
+ {
+ case 0:
+ buf.Append((char)(GetChar(name, encPos++)));
+ ++decPos;
+ break;
+
+ case 1:
+ buf.Append((char)(GetChar(name, encPos++) + (highByte << 8)));
+ ++decPos;
+ break;
+
+ case 2:
+ low = GetChar(name, encPos);
+ high = GetChar(name, encPos + 1);
+ buf.Append((char)((high << 8) + low));
+ ++decPos;
+ encPos += 2;
+ break;
+
+ case 3:
+ int length = GetChar(name, encPos++);
+ if ((length & 0x80) != 0)
+ {
+ int correction = GetChar(name, encPos++);
+ for (length = (length & 0x7f) + 2; length > 0 && decPos < name.Length; length--, decPos++)
+ {
+ low = (GetChar(name, decPos) + correction) & 0xff;
+ buf.Append((char)((highByte << 8) + low));
+ }
+ }
+ else
+ {
+ for (length += 2; length > 0 && decPos < name.Length; length--, decPos++)
+ {
+ buf.Append((char)(GetChar(name, decPos)));
+ }
+ }
+ break;
+ }
+ flags = (flags << 2) & 0xff;
+ flagBits -= 2;
+ }
+ return buf.ToString();
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/Flags.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/Flags.cs
new file mode 100644
index 0000000000..0c19079b2a
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/Flags.cs
@@ -0,0 +1,149 @@
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal enum HeaderType : byte
+ {
+ Null,
+ Mark,
+ Archive,
+ File,
+ Service,
+ Comment,
+ Av,
+ Protect,
+ Sign,
+ NewSub,
+ EndArchive,
+ Crypt
+ }
+
+ internal static class HeaderCodeV
+ {
+ public const byte RAR4_MARK_HEADER = 0x72;
+ public const byte RAR4_ARCHIVE_HEADER = 0x73;
+ public const byte RAR4_FILE_HEADER = 0x74;
+ public const byte RAR4_COMMENT_HEADER = 0x75;
+ public const byte RAR4_AV_HEADER = 0x76;
+ public const byte RAR4_SUB_HEADER = 0x77;
+ public const byte RAR4_PROTECT_HEADER = 0x78;
+ public const byte RAR4_SIGN_HEADER = 0x79;
+ public const byte RAR4_NEW_SUB_HEADER = 0x7a;
+ public const byte RAR4_END_ARCHIVE_HEADER = 0x7b;
+
+ public const byte RAR5_ARCHIVE_HEADER = 0x01;
+ public const byte RAR5_FILE_HEADER = 0x02;
+ public const byte RAR5_SERVICE_HEADER = 0x03;
+ public const byte RAR5_ARCHIVE_ENCRYPTION_HEADER = 0x04;
+ public const byte RAR5_END_ARCHIVE_HEADER = 0x05;
+ }
+
+ internal static class HeaderFlagsV4
+ {
+ public const ushort HAS_DATA = 0x8000;
+ }
+
+ internal static class EncryptionFlagsV5
+ {
+ // RAR 5.0 archive encryption header specific flags.
+ public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
+
+ public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
+ public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
+ }
+
+ internal static class HeaderFlagsV5
+ {
+ public const ushort HAS_EXTRA = 0x0001;
+ public const ushort HAS_DATA = 0x0002;
+ public const ushort KEEP = 0x0004; // block must be kept during an update
+ public const ushort SPLIT_BEFORE = 0x0008;
+ public const ushort SPLIT_AFTER = 0x0010;
+ public const ushort CHILD = 0x0020; // ??? Block depends on preceding file block.
+ public const ushort PRESERVE_CHILD = 0x0040; // ???? Preserve a child block if host block is modified
+ }
+
+ internal static class ArchiveFlagsV4
+ {
+ public const ushort VOLUME = 0x0001;
+ public const ushort COMMENT = 0x0002;
+ public const ushort LOCK = 0x0004;
+ public const ushort SOLID = 0x0008;
+ public const ushort NEW_NUMBERING = 0x0010;
+ public const ushort AV = 0x0020;
+ public const ushort PROTECT = 0x0040;
+ public const ushort PASSWORD = 0x0080;
+ public const ushort FIRST_VOLUME = 0x0100;
+ public const ushort ENCRYPT_VER = 0x0200;
+ }
+
+ internal static class ArchiveFlagsV5
+ {
+ public const ushort VOLUME = 0x0001;
+ public const ushort HAS_VOLUME_NUMBER = 0x0002;
+ public const ushort SOLID = 0x0004;
+ public const ushort PROTECT = 0x0008;
+ public const ushort LOCK = 0x0010;
+ }
+
+ internal static class HostOsV4
+ {
+ public const byte MS_DOS = 0;
+ public const byte OS2 = 1;
+ public const byte WIN32 = 2;
+ public const byte UNIX = 3;
+ public const byte MAC_OS = 4;
+ public const byte BE_OS = 5;
+ }
+
+ internal static class HostOsV5
+ {
+ public const byte WINDOWS = 0;
+ public const byte UNIX = 1;
+ }
+
+ internal static class FileFlagsV4
+ {
+ public const ushort SPLIT_BEFORE = 0x0001;
+ public const ushort SPLIT_AFTER = 0x0002;
+ public const ushort PASSWORD = 0x0004;
+ public const ushort COMMENT = 0x0008;
+ public const ushort SOLID = 0x0010;
+
+ public const ushort WINDOW_MASK = 0x00e0;
+ public const ushort WINDOW64 = 0x0000;
+ public const ushort WINDOW128 = 0x0020;
+ public const ushort WINDOW256 = 0x0040;
+ public const ushort WINDOW512 = 0x0060;
+ public const ushort WINDOW1024 = 0x0080;
+ public const ushort WINDOW2048 = 0x00a0;
+ public const ushort WINDOW4096 = 0x00c0;
+ public const ushort DIRECTORY = 0x00e0;
+
+ public const ushort LARGE = 0x0100;
+ public const ushort UNICODE = 0x0200;
+ public const ushort SALT = 0x0400;
+ public const ushort VERSION = 0x0800;
+ public const ushort EXT_TIME = 0x1000;
+ public const ushort EXT_FLAGS = 0x2000;
+ }
+
+ internal static class FileFlagsV5
+ {
+ public const ushort DIRECTORY = 0x0001;
+ public const ushort HAS_MOD_TIME = 0x0002;
+ public const ushort HAS_CRC32 = 0x0004;
+ public const ushort UNPACKED_SIZE_UNKNOWN = 0x0008;
+ }
+
+ internal static class EndArchiveFlagsV4
+ {
+ public const ushort NEXT_VOLUME = 0x0001;
+ public const ushort DATA_CRC = 0x0002;
+ public const ushort REV_SPACE = 0x0004;
+ public const ushort VOLUME_NUMBER = 0x0008;
+ }
+
+ internal static class EndArchiveFlagsV5
+ {
+ public const ushort HAS_NEXT_VOLUME = 0x0001;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/IRarHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/IRarHeader.cs
new file mode 100644
index 0000000000..bbc03593cf
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/IRarHeader.cs
@@ -0,0 +1,7 @@
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal interface IRarHeader
+ {
+ HeaderType HeaderType { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/MarkHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/MarkHeader.cs
new file mode 100644
index 0000000000..3a27cf69f3
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/MarkHeader.cs
@@ -0,0 +1,96 @@
+using System;
+using System.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class MarkHeader : IRarHeader
+ {
+ private const int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136
+
+ internal bool OldNumberingFormat { get; private set; }
+
+ public bool IsRar5 { get; }
+
+ private MarkHeader(bool isRar5)
+ {
+ IsRar5 = isRar5;
+ }
+
+ public HeaderType HeaderType => HeaderType.Mark;
+
+ private static byte GetByte(Stream stream)
+ {
+ var b = stream.ReadByte();
+ if (b != -1)
+ {
+ return (byte)b;
+ }
+ throw new EndOfStreamException();
+ }
+
+ public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
+ {
+ int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
+ try
+ {
+ int start = -1;
+ var b = GetByte(stream); start++;
+ while (start <= maxScanIndex)
+ {
+ // Rar old signature: 52 45 7E 5E
+ // Rar4 signature: 52 61 72 21 1A 07 00
+ // Rar5 signature: 52 61 72 21 1A 07 01 00
+ if (b == 0x52)
+ {
+ b = GetByte(stream); start++;
+ if (b == 0x61)
+ {
+ b = GetByte(stream); start++;
+ if (b != 0x72) continue;
+ b = GetByte(stream); start++;
+ if (b != 0x21) continue;
+ b = GetByte(stream); start++;
+ if (b != 0x1a) continue;
+ b = GetByte(stream); start++;
+ if (b != 0x07) continue;
+
+ b = GetByte(stream); start++;
+ if (b == 1)
+ {
+ b = GetByte(stream); start++;
+ if (b != 0) continue;
+ return new MarkHeader(true); // Rar5
+ }
+ else if (b == 0)
+ {
+ return new MarkHeader(false); // Rar4
+ }
+ }
+ else if (b == 0x45)
+ {
+ b = GetByte(stream); start++;
+ if (b != 0x7e) continue;
+ b = GetByte(stream); start++;
+ if (b != 0x5e) continue;
+ throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
+ }
+ }
+ else
+ {
+ b = GetByte(stream); start++;
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ if (!leaveStreamOpen)
+ {
+ stream.Dispose();
+ }
+ throw new InvalidFormatException("Error trying to read rar signature.", e);
+ }
+
+ throw new InvalidFormatException("Rar signature not found");
+ }
+ }
+}
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs
new file mode 100644
index 0000000000..84d50aee2a
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs
@@ -0,0 +1,55 @@
+using System;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class NewSubHeaderType : IEquatable
+ {
+ internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
+
+ internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
+
+ //internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
+
+ private readonly byte[] _bytes;
+
+ private NewSubHeaderType(params char[] chars)
+ {
+ _bytes = new byte[chars.Length];
+ for (int i = 0; i < chars.Length; ++i)
+ {
+ _bytes[i] = (byte)chars[i];
+ }
+ }
+
+ internal bool Equals(byte[] bytes)
+ {
+ if (_bytes.Length != bytes.Length)
+ {
+ return false;
+ }
+ for (int i = 0; i < bytes.Length; ++i)
+ {
+ if (_bytes[i] != bytes[i])
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public bool Equals(NewSubHeaderType other)
+ {
+ return Equals(other._bytes);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ProtectHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ProtectHeader.cs
new file mode 100644
index 0000000000..f7f0e8ba9e
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/ProtectHeader.cs
@@ -0,0 +1,28 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ // ProtectHeader is part of the Recovery Record feature
+ internal class ProtectHeader : RarHeader
+ {
+ public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Protect)
+ {
+ if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ Version = reader.ReadByte();
+ RecSectors = reader.ReadUInt16();
+ TotalBlocks = reader.ReadUInt32();
+ Mark = reader.ReadBytes(8);
+ }
+
+ internal uint DataSize => checked((uint)AdditionalDataSize);
+ internal byte Version { get; private set; }
+ internal ushort RecSectors { get; private set; }
+ internal uint TotalBlocks { get; private set; }
+ internal byte[] Mark { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeader.cs
new file mode 100644
index 0000000000..b15f650078
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeader.cs
@@ -0,0 +1,130 @@
+using System;
+using System.IO;
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ // http://www.forensicswiki.org/w/images/5/5b/RARFileStructure.txt
+ // https://www.rarlab.com/technote.htm
+ internal class RarHeader : IRarHeader
+ {
+ private readonly HeaderType _headerType;
+ private readonly bool _isRar5;
+
+ internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
+ {
+ try
+ {
+ return new RarHeader(reader, isRar5, archiveEncoding);
+ }
+ catch (EndOfStreamException)
+ {
+ return null;
+ }
+ }
+
+ private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
+ {
+ _headerType = HeaderType.Null;
+ _isRar5 = isRar5;
+ ArchiveEncoding = archiveEncoding;
+ if (IsRar5)
+ {
+ HeaderCrc = reader.ReadUInt32();
+ reader.ResetCrc();
+ HeaderSize = (int)reader.ReadRarVIntUInt32(3);
+ reader.Mark();
+ HeaderCode = reader.ReadRarVIntByte();
+ HeaderFlags = reader.ReadRarVIntUInt16(2);
+
+ if (HasHeaderFlag(HeaderFlagsV5.HAS_EXTRA))
+ {
+ ExtraSize = reader.ReadRarVIntUInt32();
+ }
+ if (HasHeaderFlag(HeaderFlagsV5.HAS_DATA))
+ {
+ AdditionalDataSize = (long)reader.ReadRarVInt();
+ }
+ } else {
+ reader.Mark();
+ HeaderCrc = reader.ReadUInt16();
+ reader.ResetCrc();
+ HeaderCode = reader.ReadByte();
+ HeaderFlags = reader.ReadUInt16();
+ HeaderSize = reader.ReadInt16();
+ if (HasHeaderFlag(HeaderFlagsV4.HAS_DATA))
+ {
+ AdditionalDataSize = reader.ReadUInt32();
+ }
+ }
+ }
+
+ protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
+ _headerType = headerType;
+ _isRar5 = header.IsRar5;
+ HeaderCrc = header.HeaderCrc;
+ HeaderCode = header.HeaderCode;
+ HeaderFlags = header.HeaderFlags;
+ HeaderSize = header.HeaderSize;
+ ExtraSize = header.ExtraSize;
+ AdditionalDataSize = header.AdditionalDataSize;
+ ArchiveEncoding = header.ArchiveEncoding;
+ ReadFinish(reader);
+
+ int n = RemainingHeaderBytes(reader);
+ if (n > 0)
+ {
+ reader.ReadBytes(n);
+ }
+
+ VerifyHeaderCrc(reader.GetCrc32());
+ }
+
+ protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
+ return checked(HeaderSize - (int)reader.CurrentReadByteCount);
+ }
+
+ protected virtual void ReadFinish(MarkingBinaryReader reader)
+ {
+ throw new NotImplementedException();
+ }
+
+ private void VerifyHeaderCrc(uint crc32)
+ {
+ var b = (IsRar5 ? crc32 : (ushort)crc32) == HeaderCrc;
+ if (!b)
+ {
+ throw new InvalidFormatException("rar header crc mismatch");
+ }
+ }
+
+ public HeaderType HeaderType => _headerType;
+
+ protected bool IsRar5 => _isRar5;
+
+ protected uint HeaderCrc { get; }
+
+ internal byte HeaderCode { get; }
+
+ protected ushort HeaderFlags { get; }
+
+ protected bool HasHeaderFlag(ushort flag)
+ {
+ return (HeaderFlags & flag) == flag;
+ }
+
+ protected int HeaderSize { get; }
+
+ internal ArchiveEncoding ArchiveEncoding { get; }
+
+ ///
+ /// Extra header size.
+ ///
+ protected uint ExtraSize { get; }
+
+ ///
+ /// Size of additional data (eg file contents)
+ ///
+ protected long AdditionalDataSize { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
new file mode 100644
index 0000000000..8c4f34f7ac
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs
@@ -0,0 +1,192 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class RarHeaderFactory
+ {
+ private bool _isRar5;
+
+ internal RarHeaderFactory(StreamingMode mode, ReaderOptions options)
+ {
+ StreamingMode = mode;
+ Options = options;
+ }
+
+ private ReaderOptions Options { get; }
+ internal StreamingMode StreamingMode { get; }
+ internal bool IsEncrypted { get; private set; }
+
+ internal IEnumerable ReadHeaders(Stream stream)
+ {
+ var markHeader = MarkHeader.Read(stream, Options.LeaveStreamOpen, Options.LookForHeader);
+ _isRar5 = markHeader.IsRar5;
+ yield return markHeader;
+
+ RarHeader header;
+ while ((header = TryReadNextHeader(stream)) != null)
+ {
+ yield return header;
+ if (header.HeaderType == HeaderType.EndArchive)
+ {
+ // End of archive marker. RAR does not read anything after this header letting to use third
+ // party tools to add extra information such as a digital signature to archive.
+ yield break;
+ }
+ }
+ }
+
+ private RarHeader TryReadNextHeader(Stream stream)
+ {
+ RarCrcBinaryReader reader;
+ if (!IsEncrypted)
+ {
+ reader = new RarCrcBinaryReader(stream);
+ }
+ else
+ {
+#if !NO_CRYPTO
+ if (Options.Password == null)
+ {
+ throw new CryptographicException("Encrypted Rar archive has no password specified.");
+ }
+ reader = new RarCryptoBinaryReader(stream, Options.Password);
+#else
+ throw new CryptographicException("Rar encryption unsupported on this platform");
+#endif
+ }
+
+ var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
+ if (header == null)
+ {
+ return null;
+ }
+ switch (header.HeaderCode)
+ {
+ case HeaderCodeV.RAR5_ARCHIVE_HEADER:
+ case HeaderCodeV.RAR4_ARCHIVE_HEADER:
+ {
+ var ah = new ArchiveHeader(header, reader);
+ if (ah.IsEncrypted == true)
+ {
+ //!!! rar5 we don't know yet
+ IsEncrypted = true;
+ }
+ return ah;
+ }
+
+ case HeaderCodeV.RAR4_PROTECT_HEADER:
+ {
+ var ph = new ProtectHeader(header, reader);
+ // skip the recovery record data, we do not use it.
+ switch (StreamingMode)
+ {
+ case StreamingMode.Seekable:
+ {
+ reader.BaseStream.Position += ph.DataSize;
+ }
+ break;
+ case StreamingMode.Streaming:
+ {
+ reader.BaseStream.Skip(ph.DataSize);
+ }
+ break;
+ default:
+ {
+ throw new InvalidFormatException("Invalid StreamingMode");
+ }
+ }
+
+ return ph;
+ }
+
+ case HeaderCodeV.RAR5_SERVICE_HEADER:
+ {
+ var fh = new FileHeader(header, reader, HeaderType.Service);
+ SkipData(fh, reader);
+ return fh;
+ }
+
+ case HeaderCodeV.RAR4_NEW_SUB_HEADER:
+ {
+ var fh = new FileHeader(header, reader, HeaderType.NewSub);
+ SkipData(fh, reader);
+ return fh;
+ }
+
+ case HeaderCodeV.RAR5_FILE_HEADER:
+ case HeaderCodeV.RAR4_FILE_HEADER:
+ {
+ var fh = new FileHeader(header, reader, HeaderType.File);
+ switch (StreamingMode)
+ {
+ case StreamingMode.Seekable:
+ {
+ fh.DataStartPosition = reader.BaseStream.Position;
+ reader.BaseStream.Position += fh.CompressedSize;
+ }
+ break;
+ case StreamingMode.Streaming:
+ {
+ var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
+ if (fh.R4Salt == null)
+ {
+ fh.PackedStream = ms;
+ }
+ else
+ {
+#if !NO_CRYPTO
+ fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
+#else
+ throw new NotSupportedException("RarCrypto not supported");
+#endif
+ }
+ }
+ break;
+ default:
+ {
+ throw new InvalidFormatException("Invalid StreamingMode");
+ }
+ }
+ return fh;
+ }
+ case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
+ case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
+ {
+ return new EndArchiveHeader(header, reader);
+ }
+ case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
+ {
+ var ch = new ArchiveCryptHeader(header, reader);
+ IsEncrypted = true;
+ return ch;
+ }
+ default:
+ {
+ throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
+ }
+ }
+ }
+
+ private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
+ switch (StreamingMode) {
+ case StreamingMode.Seekable: {
+ fh.DataStartPosition = reader.BaseStream.Position;
+ reader.BaseStream.Position += fh.CompressedSize;
+ }
+ break;
+ case StreamingMode.Streaming: {
+ //skip the data because it's useless?
+ reader.BaseStream.Skip(fh.CompressedSize);
+ }
+ break;
+ default: {
+ throw new InvalidFormatException("Invalid StreamingMode");
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/SignHeader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/SignHeader.cs
new file mode 100644
index 0000000000..58b3baefbd
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/Headers/SignHeader.cs
@@ -0,0 +1,26 @@
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar.Headers
+{
+ internal class SignHeader : RarHeader
+ {
+ protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
+ : base(header, reader, HeaderType.Sign)
+ {
+ if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
+ }
+
+ protected override void ReadFinish(MarkingBinaryReader reader)
+ {
+ CreationTime = reader.ReadInt32();
+ ArcNameSize = reader.ReadInt16();
+ UserNameSize = reader.ReadInt16();
+ }
+
+ internal int CreationTime { get; private set; }
+
+ internal short ArcNameSize { get; private set; }
+
+ internal short UserNameSize { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCrcBinaryReader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCrcBinaryReader.cs
new file mode 100644
index 0000000000..ddfd8b9848
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCrcBinaryReader.cs
@@ -0,0 +1,50 @@
+using System.IO;
+using SharpCompress.Compressors.Rar;
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.Rar
+{
+ internal class RarCrcBinaryReader : MarkingBinaryReader
+ {
+ private uint _currentCrc;
+
+ public RarCrcBinaryReader(Stream stream)
+ : base(stream)
+ {
+ }
+
+ public uint GetCrc32()
+ {
+ return ~_currentCrc;
+ }
+
+ public void ResetCrc()
+ {
+ _currentCrc = 0xffffffff;
+ }
+
+ protected void UpdateCrc(byte b)
+ {
+ _currentCrc = RarCRC.CheckCrc(_currentCrc, b);
+ }
+
+ protected byte[] ReadBytesNoCrc(int count)
+ {
+ return base.ReadBytes(count);
+ }
+
+ public override byte ReadByte()
+ {
+ var b = base.ReadByte();
+ _currentCrc = RarCRC.CheckCrc(_currentCrc, b);
+ return b;
+ }
+
+ public override byte[] ReadBytes(int count)
+ {
+ var result = base.ReadBytes(count);
+ _currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
+ return result;
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs
new file mode 100644
index 0000000000..66c0d60688
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs
@@ -0,0 +1,115 @@
+#if !NO_CRYPTO
+using System.Collections.Generic;
+using System.IO;
+
+namespace SharpCompress.Common.Rar
+{
+ internal class RarCryptoBinaryReader : RarCrcBinaryReader
+ {
+ private RarRijndael _rijndael;
+ private byte[] _salt;
+ private readonly string _password;
+ private readonly Queue _data = new Queue();
+ private long _readCount;
+
+ public RarCryptoBinaryReader(Stream stream, string password)
+ : base(stream)
+ {
+ _password = password;
+
+ // coderb: not sure why this was being done at this logical point
+ //SkipQueue();
+ byte[] salt = ReadBytes(8);
+ InitializeAes(salt);
+ }
+
+ // track read count ourselves rather than using the underlying stream since we buffer
+ public override long CurrentReadByteCount
+ {
+ get => _readCount;
+ protected set
+ {
+ // ignore
+ }
+ }
+
+ public override void Mark()
+ {
+ _readCount = 0;
+ }
+
+ private bool UseEncryption => _salt != null;
+
+ internal void InitializeAes(byte[] salt)
+ {
+ _salt = salt;
+ _rijndael = RarRijndael.InitializeFrom(_password, salt);
+ }
+
+ public override byte ReadByte()
+ {
+ if (UseEncryption)
+ {
+ return ReadAndDecryptBytes(1)[0];
+ }
+
+ _readCount++;
+ return base.ReadByte();
+ }
+
+ public override byte[] ReadBytes(int count)
+ {
+ if (UseEncryption)
+ {
+ return ReadAndDecryptBytes(count);
+ }
+
+ _readCount += count;
+ return base.ReadBytes(count);
+ }
+
+ private byte[] ReadAndDecryptBytes(int count)
+ {
+ int queueSize = _data.Count;
+ int sizeToRead = count - queueSize;
+
+ if (sizeToRead > 0)
+ {
+ int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
+ for (int i = 0; i < alignedSize / 16; i++)
+ {
+ //long ax = System.currentTimeMillis();
+ byte[] cipherText = ReadBytesNoCrc(16);
+ var readBytes = _rijndael.ProcessBlock(cipherText);
+ foreach (var readByte in readBytes)
+ _data.Enqueue(readByte);
+ }
+ }
+
+ var decryptedBytes = new byte[count];
+
+ for (int i = 0; i < count; i++)
+ {
+ var b = _data.Dequeue();
+ decryptedBytes[i] = b;
+ UpdateCrc(b);
+ }
+
+ _readCount += count;
+ return decryptedBytes;
+ }
+
+ public void ClearQueue()
+ {
+ _data.Clear();
+ }
+
+ public void SkipQueue()
+ {
+ var position = BaseStream.Position;
+ BaseStream.Position = position + _data.Count;
+ ClearQueue();
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoWrapper.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoWrapper.cs
new file mode 100644
index 0000000000..50e1520ce8
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarCryptoWrapper.cs
@@ -0,0 +1,99 @@
+
+#if !NO_CRYPTO
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace SharpCompress.Common.Rar
+{
+ internal class RarCryptoWrapper : Stream
+ {
+ private readonly Stream _actualStream;
+ private readonly byte[] _salt;
+ private RarRijndael _rijndael;
+ private readonly Queue _data = new Queue();
+
+ public RarCryptoWrapper(Stream actualStream, string password, byte[] salt)
+ {
+ _actualStream = actualStream;
+ _salt = salt;
+ _rijndael = RarRijndael.InitializeFrom(password, salt);
+ }
+
+ public override void Flush()
+ {
+ throw new NotSupportedException();
+ }
+
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override void SetLength(long value)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (_salt == null)
+ {
+ return _actualStream.Read(buffer, offset, count);
+ }
+ return ReadAndDecrypt(buffer, offset, count);
+ }
+
+ public int ReadAndDecrypt(byte[] buffer, int offset, int count)
+ {
+ int queueSize = _data.Count;
+ int sizeToRead = count - queueSize;
+
+ if (sizeToRead > 0)
+ {
+ int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
+ for (int i = 0; i < alignedSize / 16; i++)
+ {
+ //long ax = System.currentTimeMillis();
+ byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
+ _actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
+
+ var readBytes = _rijndael.ProcessBlock(cipherText);
+ foreach (var readByte in readBytes)
+ _data.Enqueue(readByte);
+
+ }
+
+ for (int i = 0; i < count; i++)
+ buffer[offset + i] = _data.Dequeue();
+ }
+ return count;
+ }
+
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override bool CanRead => true;
+
+ public override bool CanSeek => false;
+
+ public override bool CanWrite => false;
+
+ public override long Length => throw new NotSupportedException();
+
+ public override long Position { get; set; }
+
+ protected override void Dispose(bool disposing)
+ {
+ if (_rijndael != null)
+ {
+ _rijndael.Dispose();
+ _rijndael = null;
+ }
+ base.Dispose(disposing);
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarEntry.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarEntry.cs
new file mode 100644
index 0000000000..c461f42a44
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarEntry.cs
@@ -0,0 +1,65 @@
+using System;
+using SharpCompress.Common.Rar.Headers;
+
+namespace SharpCompress.Common.Rar
+{
+ public abstract class RarEntry : Entry
+ {
+ internal abstract FileHeader FileHeader { get; }
+
+ ///
+ /// As the V2017 port isn't complete, add this check to use the legacy Rar code.
+ ///
+ internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
+
+ ///
+ /// The File's 32 bit CRC Hash
+ ///
+ public override long Crc => FileHeader.FileCrc;
+
+ ///
+ /// The path of the file internal to the Rar Archive.
+ ///
+ public override string Key => FileHeader.FileName;
+
+ public override string LinkTarget => null;
+
+ ///
+ /// The entry last modified time in the archive, if recorded
+ ///
+ public override DateTime? LastModifiedTime => FileHeader.FileLastModifiedTime;
+
+ ///
+ /// The entry create time in the archive, if recorded
+ ///
+ public override DateTime? CreatedTime => FileHeader.FileCreatedTime;
+
+ ///
+ /// The entry last accessed time in the archive, if recorded
+ ///
+ public override DateTime? LastAccessedTime => FileHeader.FileLastAccessedTime;
+
+ ///
+ /// The entry time whend archived, if recorded
+ ///
+ public override DateTime? ArchivedTime => FileHeader.FileArchivedTime;
+
+ ///
+ /// Entry is password protected and encrypted and cannot be extracted.
+ ///
+ public override bool IsEncrypted => FileHeader.IsEncrypted;
+
+ ///
+ /// Entry is password protected and encrypted and cannot be extracted.
+ ///
+ public override bool IsDirectory => FileHeader.IsDirectory;
+
+ public override bool IsSplitAfter => FileHeader.IsSplitAfter;
+
+ public override string ToString()
+ {
+ return string.Format("Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",
+ Key, CompressedSize, Size, Crc);
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarFilePart.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarFilePart.cs
new file mode 100644
index 0000000000..5c05cc8091
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarFilePart.cs
@@ -0,0 +1,27 @@
+using System.IO;
+using SharpCompress.Common.Rar.Headers;
+
+namespace SharpCompress.Common.Rar
+{
+ ///
+ /// This represents a single file part that exists in a rar volume. A compressed file is one or many file parts that are spread across one or may rar parts.
+ ///
+ internal abstract class RarFilePart : FilePart
+ {
+ internal RarFilePart(MarkHeader mh, FileHeader fh)
+ : base(fh.ArchiveEncoding)
+ {
+ MarkHeader = mh;
+ FileHeader = fh;
+ }
+
+ internal MarkHeader MarkHeader { get; }
+
+ internal FileHeader FileHeader { get; }
+
+ internal override Stream GetRawStream()
+ {
+ return null;
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarRijndael.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarRijndael.cs
new file mode 100644
index 0000000000..824cb64e73
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarRijndael.cs
@@ -0,0 +1,121 @@
+#if !NO_CRYPTO
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Security.Cryptography;
+using System.Text;
+using SharpCompress.Crypto;
+
+namespace SharpCompress.Common.Rar
+{
+ internal class RarRijndael : IDisposable
+ {
+ internal const int CRYPTO_BLOCK_SIZE = 16;
+
+ private readonly string _password;
+ private readonly byte[] _salt;
+ private byte[] _aesInitializationVector;
+ private RijndaelEngine _rijndael;
+
+ private RarRijndael(string password, byte[] salt)
+ {
+ _password = password;
+ _salt = salt;
+ }
+
+ private byte[] ComputeHash(byte[] input)
+ {
+ var sha = SHA1.Create();
+ return sha.ComputeHash(input);
+ }
+
+ private void Initialize()
+ {
+
+ _rijndael = new RijndaelEngine();
+ _aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
+ int rawLength = 2*_password.Length;
+ byte[] rawPassword = new byte[rawLength + 8];
+ byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
+ for (int i = 0; i < _password.Length; i++)
+ {
+ rawPassword[i*2] = passwordBytes[i];
+ rawPassword[i*2 + 1] = 0;
+ }
+ for (int i = 0; i < _salt.Length; i++)
+ {
+ rawPassword[i + rawLength] = _salt[i];
+ }
+
+
+ const int noOfRounds = (1 << 18);
+ IList bytes = new List();
+ byte[] digest;
+
+ //TODO slow code below, find ways to optimize
+ for (int i = 0; i < noOfRounds; i++)
+ {
+ bytes.AddRange(rawPassword);
+
+ bytes.AddRange(new[]
+ {
+ (byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
+ });
+ if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
+ {
+ digest = ComputeHash(bytes.ToArray());
+ _aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
+ }
+ }
+
+ digest = ComputeHash(bytes.ToArray());
+ //slow code ends
+
+ byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
+ for (int i = 0; i < 4; i++)
+ {
+ for (int j = 0; j < 4; j++)
+ {
+ aesKey[i*4 + j] = (byte)
+ (((digest[i*4]*0x1000000) & 0xff000000 |
+ (uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
+ (uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
+ (uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
+ }
+ }
+
+ _rijndael.Init(false, new KeyParameter(aesKey));
+
+ }
+
+ public static RarRijndael InitializeFrom(string password, byte[] salt)
+ {
+ var rijndael = new RarRijndael(password, salt);
+ rijndael.Initialize();
+ return rijndael;
+ }
+
+ public byte[] ProcessBlock(byte[] cipherText)
+ {
+ var plainText = new byte[CRYPTO_BLOCK_SIZE];
+ var decryptedBytes = new List();
+ _rijndael.ProcessBlock(cipherText, 0, plainText, 0);
+
+ for (int j = 0; j < plainText.Length; j++)
+ {
+ decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
+ }
+
+ for (int j = 0; j < _aesInitializationVector.Length; j++)
+ {
+ _aesInitializationVector[j] = cipherText[j];
+ }
+ return decryptedBytes.ToArray();
+ }
+
+ public void Dispose()
+ {
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/Rar/RarVolume.cs b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarVolume.cs
new file mode 100644
index 0000000000..265db11c63
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/Rar/RarVolume.cs
@@ -0,0 +1,112 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using SharpCompress.Common.Rar.Headers;
+using SharpCompress.IO;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common.Rar
+{
+ ///
+ /// A RarArchiveVolume is a single rar file that may or may not be a split RarArchive. A Rar Archive is one to many Rar Parts
+ ///
+ public abstract class RarVolume : Volume
+ {
+ private readonly RarHeaderFactory _headerFactory;
+
+ internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
+ : base(stream, options)
+ {
+ _headerFactory = new RarHeaderFactory(mode, options);
+ }
+
+ internal ArchiveHeader ArchiveHeader { get; private set; }
+
+ internal StreamingMode Mode => _headerFactory.StreamingMode;
+
+ internal abstract IEnumerable ReadFileParts();
+
+ internal abstract RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader);
+
+ internal IEnumerable GetVolumeFileParts()
+ {
+ MarkHeader lastMarkHeader = null;
+ foreach (var header in _headerFactory.ReadHeaders(Stream))
+ {
+ switch (header.HeaderType)
+ {
+ case HeaderType.Mark:
+ {
+ lastMarkHeader = header as MarkHeader;
+ }
+ break;
+ case HeaderType.Archive:
+ {
+ ArchiveHeader = header as ArchiveHeader;
+ }
+ break;
+ case HeaderType.File:
+ {
+ var fh = header as FileHeader;
+ yield return CreateFilePart(lastMarkHeader, fh);
+ }
+ break;
+ }
+ }
+ }
+
+ private void EnsureArchiveHeaderLoaded()
+ {
+ if (ArchiveHeader == null)
+ {
+ if (Mode == StreamingMode.Streaming)
+ {
+ throw new InvalidOperationException("ArchiveHeader should never been null in a streaming read.");
+ }
+
+ // we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
+ GetVolumeFileParts().First();
+ Stream.Position = 0;
+ }
+ }
+
+ ///
+ /// RarArchive is the first volume of a multi-part archive.
+ /// Only Rar 3.0 format and higher
+ ///
+ public override bool IsFirstVolume
+ {
+ get
+ {
+ EnsureArchiveHeaderLoaded();
+ return ArchiveHeader.IsFirstVolume;
+ }
+ }
+
+ ///
+ /// RarArchive is part of a multi-part archive.
+ ///
+ public override bool IsMultiVolume
+ {
+ get
+ {
+ EnsureArchiveHeaderLoaded();
+ return ArchiveHeader.IsVolume;
+ }
+ }
+
+ ///
+ /// RarArchive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
+ /// Currently, SharpCompress cannot decompress SOLID archives.
+ ///
+ public bool IsSolidArchive
+ {
+ get
+ {
+ EnsureArchiveHeaderLoaded();
+ return ArchiveHeader.IsSolid;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/ReaderExtractionEventArgs.cs b/BizHawk.Client.Common/SharpCompress/Common/ReaderExtractionEventArgs.cs
new file mode 100644
index 0000000000..aadc563c7e
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/ReaderExtractionEventArgs.cs
@@ -0,0 +1,17 @@
+using System;
+using SharpCompress.Readers;
+
+namespace SharpCompress.Common
+{
+ public class ReaderExtractionEventArgs : EventArgs
+ {
+ internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
+ {
+ Item = entry;
+ ReaderProgress = readerProgress;
+ }
+
+ public T Item { get; }
+ public ReaderProgress ReaderProgress { get; }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveDatabase.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveDatabase.cs
new file mode 100644
index 0000000000..e827eb20ea
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveDatabase.cs
@@ -0,0 +1,182 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using SharpCompress.Compressors.LZMA;
+using SharpCompress.Compressors.LZMA.Utilites;
+
+namespace SharpCompress.Common.SevenZip
+{
+ internal class ArchiveDatabase
+ {
+ internal byte _majorVersion;
+ internal byte _minorVersion;
+ internal long _startPositionAfterHeader;
+ internal long _dataStartPosition;
+
+ internal List _packSizes = new List();
+ internal List _packCrCs = new List();
+ internal List _folders = new List();
+ internal List _numUnpackStreamsVector;
+ internal List _files = new List();
+
+ internal List _packStreamStartPositions = new List();
+ internal List _folderStartFileIndex = new List();
+ internal List _fileIndexToFolderIndexMap = new List();
+
+ internal IPasswordProvider PasswordProvider { get; }
+
+ public ArchiveDatabase(IPasswordProvider passwordProvider)
+ {
+ PasswordProvider = passwordProvider;
+ }
+
+ internal void Clear()
+ {
+ _packSizes.Clear();
+ _packCrCs.Clear();
+ _folders.Clear();
+ _numUnpackStreamsVector = null;
+ _files.Clear();
+
+ _packStreamStartPositions.Clear();
+ _folderStartFileIndex.Clear();
+ _fileIndexToFolderIndexMap.Clear();
+ }
+
+ internal bool IsEmpty()
+ {
+ return _packSizes.Count == 0
+ && _packCrCs.Count == 0
+ && _folders.Count == 0
+ && _numUnpackStreamsVector.Count == 0
+ && _files.Count == 0;
+ }
+
+ private void FillStartPos()
+ {
+ _packStreamStartPositions.Clear();
+
+ long startPos = 0;
+ for (int i = 0; i < _packSizes.Count; i++)
+ {
+ _packStreamStartPositions.Add(startPos);
+ startPos += _packSizes[i];
+ }
+ }
+
+ private void FillFolderStartFileIndex()
+ {
+ _folderStartFileIndex.Clear();
+ _fileIndexToFolderIndexMap.Clear();
+
+ int folderIndex = 0;
+ int indexInFolder = 0;
+ for (int i = 0; i < _files.Count; i++)
+ {
+ CFileItem file = _files[i];
+
+ bool emptyStream = !file.HasStream;
+
+ if (emptyStream && indexInFolder == 0)
+ {
+ _fileIndexToFolderIndexMap.Add(-1);
+ continue;
+ }
+
+ if (indexInFolder == 0)
+ {
+ // v3.13 incorrectly worked with empty folders
+ // v4.07: Loop for skipping empty folders
+ for (;;)
+ {
+ if (folderIndex >= _folders.Count)
+ {
+ throw new InvalidOperationException();
+ }
+
+ _folderStartFileIndex.Add(i); // check it
+
+ if (_numUnpackStreamsVector[folderIndex] != 0)
+ {
+ break;
+ }
+
+ folderIndex++;
+ }
+ }
+
+ _fileIndexToFolderIndexMap.Add(folderIndex);
+
+ if (emptyStream)
+ {
+ continue;
+ }
+
+ indexInFolder++;
+
+ if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
+ {
+ folderIndex++;
+ indexInFolder = 0;
+ }
+ }
+ }
+
+ public void Fill()
+ {
+ FillStartPos();
+ FillFolderStartFileIndex();
+ }
+
+ internal long GetFolderStreamPos(CFolder folder, int indexInFolder)
+ {
+ int index = folder._firstPackStreamId + indexInFolder;
+ return _dataStartPosition + _packStreamStartPositions[index];
+ }
+
+ internal long GetFolderFullPackSize(int folderIndex)
+ {
+ int packStreamIndex = _folders[folderIndex]._firstPackStreamId;
+ CFolder folder = _folders[folderIndex];
+
+ long size = 0;
+ for (int i = 0; i < folder._packStreams.Count; i++)
+ {
+ size += _packSizes[packStreamIndex + i];
+ }
+
+ return size;
+ }
+
+ internal Stream GetFolderStream(Stream stream, CFolder folder, IPasswordProvider pw)
+ {
+ int packStreamIndex = folder._firstPackStreamId;
+ long folderStartPackPos = GetFolderStreamPos(folder, 0);
+ List packSizes = new List();
+ for (int j = 0; j < folder._packStreams.Count; j++)
+ {
+ packSizes.Add(_packSizes[packStreamIndex + j]);
+ }
+
+ return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
+ }
+
+ private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
+ {
+ return _packSizes[_folders[folderIndex]._firstPackStreamId + streamIndex];
+ }
+
+ private long GetFilePackSize(int fileIndex)
+ {
+ int folderIndex = _fileIndexToFolderIndexMap[fileIndex];
+ if (folderIndex != -1)
+ {
+ if (_folderStartFileIndex[folderIndex] == fileIndex)
+ {
+ return GetFolderFullPackSize(folderIndex);
+ }
+ }
+ return 0;
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveReader.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveReader.cs
new file mode 100644
index 0000000000..2093a83106
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/ArchiveReader.cs
@@ -0,0 +1,1591 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using SharpCompress.Compressors.LZMA;
+using SharpCompress.Compressors.LZMA.Utilites;
+using SharpCompress.IO;
+
+namespace SharpCompress.Common.SevenZip
+{
+ internal class ArchiveReader
+ {
+ internal Stream _stream;
+ internal Stack _readerStack = new Stack();
+ internal DataReader _currentReader;
+ internal long _streamOrigin;
+ internal long _streamEnding;
+ internal byte[] _header;
+
+ private readonly Dictionary _cachedStreams = new Dictionary();
+
+ internal void AddByteStream(byte[] buffer, int offset, int length)
+ {
+ _readerStack.Push(_currentReader);
+ _currentReader = new DataReader(buffer, offset, length);
+ }
+
+ internal void DeleteByteStream()
+ {
+ _currentReader = _readerStack.Pop();
+ }
+
+ #region Private Methods - Data Reader
+
+ internal Byte ReadByte()
+ {
+ return _currentReader.ReadByte();
+ }
+
+ private void ReadBytes(byte[] buffer, int offset, int length)
+ {
+ _currentReader.ReadBytes(buffer, offset, length);
+ }
+
+ private ulong ReadNumber()
+ {
+ return _currentReader.ReadNumber();
+ }
+
+ internal int ReadNum()
+ {
+ return _currentReader.ReadNum();
+ }
+
+ private uint ReadUInt32()
+ {
+ return _currentReader.ReadUInt32();
+ }
+
+ private ulong ReadUInt64()
+ {
+ return _currentReader.ReadUInt64();
+ }
+
+ private BlockType? ReadId()
+ {
+ ulong id = _currentReader.ReadNumber();
+ if (id > 25)
+ {
+ return null;
+ }
+#if DEBUG
+ Log.WriteLine("ReadId: {0}", (BlockType)id);
+#endif
+ return (BlockType)id;
+ }
+
+ private void SkipData(long size)
+ {
+ _currentReader.SkipData(size);
+ }
+
+ private void SkipData()
+ {
+ _currentReader.SkipData();
+ }
+
+ private void WaitAttribute(BlockType attribute)
+ {
+ for (;;)
+ {
+ BlockType? type = ReadId();
+ if (type == attribute)
+ {
+ return;
+ }
+ if (type == BlockType.End)
+ {
+ throw new InvalidOperationException();
+ }
+ SkipData();
+ }
+ }
+
+ private void ReadArchiveProperties()
+ {
+ while (ReadId() != BlockType.End)
+ {
+ SkipData();
+ }
+ }
+
+ #endregion
+
+ #region Private Methods - Reader Utilities
+
+ private BitVector ReadBitVector(int length)
+ {
+ var bits = new BitVector(length);
+
+ byte data = 0;
+ byte mask = 0;
+
+ for (int i = 0; i < length; i++)
+ {
+ if (mask == 0)
+ {
+ data = ReadByte();
+ mask = 0x80;
+ }
+
+ if ((data & mask) != 0)
+ {
+ bits.SetBit(i);
+ }
+
+ mask >>= 1;
+ }
+
+ return bits;
+ }
+
+ private BitVector ReadOptionalBitVector(int length)
+ {
+ byte allTrue = ReadByte();
+ if (allTrue != 0)
+ {
+ return new BitVector(length, true);
+ }
+
+ return ReadBitVector(length);
+ }
+
+ private void ReadNumberVector(List dataVector, int numFiles, Action action)
+ {
+ var defined = ReadOptionalBitVector(numFiles);
+
+ using (CStreamSwitch streamSwitch = new CStreamSwitch())
+ {
+ streamSwitch.Set(this, dataVector);
+
+ for (int i = 0; i < numFiles; i++)
+ {
+ if (defined[i])
+ {
+ action(i, checked((long)ReadUInt64()));
+ }
+ else
+ {
+ action(i, null);
+ }
+ }
+ }
+ }
+
+ private DateTime TranslateTime(long time)
+ {
+ // FILETIME = 100-nanosecond intervals since January 1, 1601 (UTC)
+ return DateTime.FromFileTimeUtc(time).ToLocalTime();
+ }
+
+ private DateTime? TranslateTime(long? time)
+ {
+ if (time.HasValue && time.Value >= 0 && time.Value <= 2650467743999999999) //maximum Windows file time 31.12.9999
+ {
+ return TranslateTime(time.Value);
+ }
+ return null;
+ }
+
+ private void ReadDateTimeVector(List dataVector, int numFiles, Action action)
+ {
+ ReadNumberVector(dataVector, numFiles, (index, value) => action(index, TranslateTime(value)));
+ }
+
+ private void ReadAttributeVector(List dataVector, int numFiles, Action action)
+ {
+ BitVector boolVector = ReadOptionalBitVector(numFiles);
+ using (var streamSwitch = new CStreamSwitch())
+ {
+ streamSwitch.Set(this, dataVector);
+ for (int i = 0; i < numFiles; i++)
+ {
+ if (boolVector[i])
+ {
+ action(i, ReadUInt32());
+ }
+ else
+ {
+ action(i, null);
+ }
+ }
+ }
+ }
+
+ #endregion
+
+ #region Private Methods
+
+ private void GetNextFolderItem(CFolder folder)
+ {
+#if DEBUG
+ Log.WriteLine("-- GetNextFolderItem --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ int numCoders = ReadNum();
+#if DEBUG
+ Log.WriteLine("NumCoders: " + numCoders);
+#endif
+ folder._coders = new List(numCoders);
+ int numInStreams = 0;
+ int numOutStreams = 0;
+ for (int i = 0; i < numCoders; i++)
+ {
+#if DEBUG
+ Log.WriteLine("-- Coder --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ CCoderInfo coder = new CCoderInfo();
+ folder._coders.Add(coder);
+
+ byte mainByte = ReadByte();
+ int idSize = (mainByte & 0xF);
+ byte[] longId = new byte[idSize];
+ ReadBytes(longId, 0, idSize);
+#if DEBUG
+ Log.WriteLine("MethodId: " + String.Join("", Enumerable.Range(0, idSize).Select(x => longId[x].ToString("x2")).ToArray()));
+#endif
+ if (idSize > 8)
+ {
+ throw new NotSupportedException();
+ }
+ ulong id = 0;
+ for (int j = 0; j < idSize; j++)
+ {
+ id |= (ulong)longId[idSize - 1 - j] << (8 * j);
+ }
+ coder._methodId = new CMethodId(id);
+
+ if ((mainByte & 0x10) != 0)
+ {
+ coder._numInStreams = ReadNum();
+ coder._numOutStreams = ReadNum();
+#if DEBUG
+ Log.WriteLine("Complex Stream (In: " + coder._numInStreams + " - Out: " + coder._numOutStreams + ")");
+#endif
+ }
+ else
+ {
+#if DEBUG
+ Log.WriteLine("Simple Stream (In: 1 - Out: 1)");
+#endif
+ coder._numInStreams = 1;
+ coder._numOutStreams = 1;
+ }
+
+ if ((mainByte & 0x20) != 0)
+ {
+ int propsSize = ReadNum();
+ coder._props = new byte[propsSize];
+ ReadBytes(coder._props, 0, propsSize);
+#if DEBUG
+ Log.WriteLine("Settings: " + String.Join("", coder._props.Select(bt => bt.ToString("x2")).ToArray()));
+#endif
+ }
+
+ if ((mainByte & 0x80) != 0)
+ {
+ throw new NotSupportedException();
+ }
+
+ numInStreams += coder._numInStreams;
+ numOutStreams += coder._numOutStreams;
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ int numBindPairs = numOutStreams - 1;
+ folder._bindPairs = new List(numBindPairs);
+#if DEBUG
+ Log.WriteLine("BindPairs: " + numBindPairs);
+ Log.PushIndent();
+#endif
+ for (int i = 0; i < numBindPairs; i++)
+ {
+ CBindPair bp = new CBindPair();
+ bp._inIndex = ReadNum();
+ bp._outIndex = ReadNum();
+ folder._bindPairs.Add(bp);
+#if DEBUG
+ Log.WriteLine("#" + i + " - In: " + bp._inIndex + " - Out: " + bp._outIndex);
+#endif
+ }
+#if DEBUG
+ Log.PopIndent();
+#endif
+
+ if (numInStreams < numBindPairs)
+ {
+ throw new NotSupportedException();
+ }
+
+ int numPackStreams = numInStreams - numBindPairs;
+
+ //folder.PackStreams.Reserve(numPackStreams);
+ if (numPackStreams == 1)
+ {
+ for (int i = 0; i < numInStreams; i++)
+ {
+ if (folder.FindBindPairForInStream(i) < 0)
+ {
+#if DEBUG
+ Log.WriteLine("Single PackStream: #" + i);
+#endif
+ folder._packStreams.Add(i);
+ break;
+ }
+ }
+
+ if (folder._packStreams.Count != 1)
+ {
+ throw new NotSupportedException();
+ }
+ }
+ else
+ {
+#if DEBUG
+ Log.WriteLine("Multiple PackStreams ...");
+ Log.PushIndent();
+#endif
+ for (int i = 0; i < numPackStreams; i++)
+ {
+ var num = ReadNum();
+#if DEBUG
+ Log.WriteLine("#" + i + " - " + num);
+#endif
+ folder._packStreams.Add(num);
+ }
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private List ReadHashDigests(int count)
+ {
+#if DEBUG
+ Log.Write("ReadHashDigests:");
+#endif
+
+ var defined = ReadOptionalBitVector(count);
+ var digests = new List(count);
+ for (int i = 0; i < count; i++)
+ {
+ if (defined[i])
+ {
+ uint crc = ReadUInt32();
+#if DEBUG
+ Log.Write(" " + crc.ToString("x8"));
+#endif
+ digests.Add(crc);
+ }
+ else
+ {
+#if DEBUG
+ Log.Write(" ########");
+#endif
+ digests.Add(null);
+ }
+ }
+#if DEBUG
+
+ Log.WriteLine();
+#endif
+ return digests;
+ }
+
+ private void ReadPackInfo(out long dataOffset, out List packSizes, out List packCrCs)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadPackInfo --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ packCrCs = null;
+
+ dataOffset = checked((long)ReadNumber());
+#if DEBUG
+ Log.WriteLine("DataOffset: " + dataOffset);
+#endif
+
+ int numPackStreams = ReadNum();
+#if DEBUG
+ Log.WriteLine("NumPackStreams: " + numPackStreams);
+#endif
+
+ WaitAttribute(BlockType.Size);
+ packSizes = new List(numPackStreams);
+#if DEBUG
+ Log.Write("Sizes:");
+#endif
+ for (int i = 0; i < numPackStreams; i++)
+ {
+ var size = checked((long)ReadNumber());
+#if DEBUG
+ Log.Write(" " + size);
+#endif
+ packSizes.Add(size);
+ }
+#if DEBUG
+ Log.WriteLine();
+#endif
+
+ BlockType? type;
+ for (;;)
+ {
+ type = ReadId();
+ if (type == BlockType.End)
+ {
+ break;
+ }
+ if (type == BlockType.Crc)
+ {
+ packCrCs = ReadHashDigests(numPackStreams);
+ continue;
+ }
+ SkipData();
+ }
+
+ if (packCrCs == null)
+ {
+ packCrCs = new List(numPackStreams);
+ for (int i = 0; i < numPackStreams; i++)
+ {
+ packCrCs.Add(null);
+ }
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private void ReadUnpackInfo(List dataVector, out List folders)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadUnpackInfo --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ WaitAttribute(BlockType.Folder);
+ int numFolders = ReadNum();
+#if DEBUG
+ Log.WriteLine("NumFolders: {0}", numFolders);
+#endif
+
+ using (CStreamSwitch streamSwitch = new CStreamSwitch())
+ {
+ streamSwitch.Set(this, dataVector);
+
+ //folders.Clear();
+ //folders.Reserve(numFolders);
+ folders = new List(numFolders);
+ int index = 0;
+ for (int i = 0; i < numFolders; i++)
+ {
+ var f = new CFolder {_firstPackStreamId = index};
+ folders.Add(f);
+ GetNextFolderItem(f);
+ index += f._packStreams.Count;
+ }
+ }
+
+ WaitAttribute(BlockType.CodersUnpackSize);
+#if DEBUG
+ Log.WriteLine("UnpackSizes:");
+#endif
+ for (int i = 0; i < numFolders; i++)
+ {
+ CFolder folder = folders[i];
+#if DEBUG
+ Log.Write(" #" + i + ":");
+#endif
+ int numOutStreams = folder.GetNumOutStreams();
+ for (int j = 0; j < numOutStreams; j++)
+ {
+ long size = checked((long)ReadNumber());
+#if DEBUG
+ Log.Write(" " + size);
+#endif
+ folder._unpackSizes.Add(size);
+ }
+#if DEBUG
+ Log.WriteLine();
+#endif
+ }
+
+ for (;;)
+ {
+ BlockType? type = ReadId();
+ if (type == BlockType.End)
+ {
+ return;
+ }
+
+ if (type == BlockType.Crc)
+ {
+ List crcs = ReadHashDigests(numFolders);
+ for (int i = 0; i < numFolders; i++)
+ {
+ folders[i]._unpackCrc = crcs[i];
+ }
+ continue;
+ }
+
+ SkipData();
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private void ReadSubStreamsInfo(List folders, out List numUnpackStreamsInFolders,
+ out List unpackSizes, out List digests)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadSubStreamsInfo --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ numUnpackStreamsInFolders = null;
+
+ BlockType? type;
+ for (;;)
+ {
+ type = ReadId();
+ if (type == BlockType.NumUnpackStream)
+ {
+ numUnpackStreamsInFolders = new List(folders.Count);
+#if DEBUG
+ Log.Write("NumUnpackStreams:");
+#endif
+ for (int i = 0; i < folders.Count; i++)
+ {
+ var num = ReadNum();
+#if DEBUG
+ Log.Write(" " + num);
+#endif
+ numUnpackStreamsInFolders.Add(num);
+ }
+#if DEBUG
+ Log.WriteLine();
+#endif
+ continue;
+ }
+ if (type == BlockType.Crc || type == BlockType.Size)
+ {
+ break;
+ }
+ if (type == BlockType.End)
+ {
+ break;
+ }
+ SkipData();
+ }
+
+ if (numUnpackStreamsInFolders == null)
+ {
+ numUnpackStreamsInFolders = new List(folders.Count);
+ for (int i = 0; i < folders.Count; i++)
+ {
+ numUnpackStreamsInFolders.Add(1);
+ }
+ }
+
+ unpackSizes = new List(folders.Count);
+ for (int i = 0; i < numUnpackStreamsInFolders.Count; i++)
+ {
+ // v3.13 incorrectly worked with empty folders
+ // v4.07: we check that folder is empty
+ int numSubstreams = numUnpackStreamsInFolders[i];
+ if (numSubstreams == 0)
+ {
+ continue;
+ }
+#if DEBUG
+ Log.Write("#{0} StreamSizes:", i);
+#endif
+ long sum = 0;
+ for (int j = 1; j < numSubstreams; j++)
+ {
+ if (type == BlockType.Size)
+ {
+ long size = checked((long)ReadNumber());
+#if DEBUG
+ Log.Write(" " + size);
+#endif
+ unpackSizes.Add(size);
+ sum += size;
+ }
+ }
+ unpackSizes.Add(folders[i].GetUnpackSize() - sum);
+#if DEBUG
+ Log.WriteLine(" - rest: " + unpackSizes.Last());
+#endif
+ }
+ if (type == BlockType.Size)
+ {
+ type = ReadId();
+ }
+
+ int numDigests = 0;
+ int numDigestsTotal = 0;
+ for (int i = 0; i < folders.Count; i++)
+ {
+ int numSubstreams = numUnpackStreamsInFolders[i];
+ if (numSubstreams != 1 || !folders[i].UnpackCrcDefined)
+ {
+ numDigests += numSubstreams;
+ }
+ numDigestsTotal += numSubstreams;
+ }
+
+ digests = null;
+
+ for (;;)
+ {
+ if (type == BlockType.Crc)
+ {
+ digests = new List(numDigestsTotal);
+
+ List digests2 = ReadHashDigests(numDigests);
+
+ int digestIndex = 0;
+ for (int i = 0; i < folders.Count; i++)
+ {
+ int numSubstreams = numUnpackStreamsInFolders[i];
+ CFolder folder = folders[i];
+ if (numSubstreams == 1 && folder.UnpackCrcDefined)
+ {
+ digests.Add(folder._unpackCrc.Value);
+ }
+ else
+ {
+ for (int j = 0; j < numSubstreams; j++, digestIndex++)
+ {
+ digests.Add(digests2[digestIndex]);
+ }
+ }
+ }
+
+ if (digestIndex != numDigests || numDigestsTotal != digests.Count)
+ {
+ Debugger.Break();
+ }
+ }
+ else if (type == BlockType.End)
+ {
+ if (digests == null)
+ {
+ digests = new List(numDigestsTotal);
+ for (int i = 0; i < numDigestsTotal; i++)
+ {
+ digests.Add(null);
+ }
+ }
+ return;
+ }
+ else
+ {
+ SkipData();
+ }
+
+ type = ReadId();
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private void ReadStreamsInfo(
+ List dataVector,
+ out long dataOffset,
+ out List packSizes,
+ out List packCrCs,
+ out List folders,
+ out List numUnpackStreamsInFolders,
+ out List unpackSizes,
+ out List digests)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadStreamsInfo --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ dataOffset = long.MinValue;
+ packSizes = null;
+ packCrCs = null;
+ folders = null;
+ numUnpackStreamsInFolders = null;
+ unpackSizes = null;
+ digests = null;
+
+ for (;;)
+ {
+ switch (ReadId())
+ {
+ case BlockType.End:
+ return;
+ case BlockType.PackInfo:
+ ReadPackInfo(out dataOffset, out packSizes, out packCrCs);
+ break;
+ case BlockType.UnpackInfo:
+ ReadUnpackInfo(dataVector, out folders);
+ break;
+ case BlockType.SubStreamsInfo:
+ ReadSubStreamsInfo(folders, out numUnpackStreamsInFolders, out unpackSizes, out digests);
+ break;
+ default:
+ throw new InvalidOperationException();
+ }
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private List ReadAndDecodePackedStreams(long baseOffset, IPasswordProvider pass)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadAndDecodePackedStreams --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ long dataStartPos;
+ List packSizes;
+ List packCrCs;
+ List folders;
+ List numUnpackStreamsInFolders;
+ List unpackSizes;
+ List digests;
+
+ ReadStreamsInfo(null,
+ out dataStartPos,
+ out packSizes,
+ out packCrCs,
+ out folders,
+ out numUnpackStreamsInFolders,
+ out unpackSizes,
+ out digests);
+
+ dataStartPos += baseOffset;
+
+ var dataVector = new List(folders.Count);
+ int packIndex = 0;
+ foreach (var folder in folders)
+ {
+ long oldDataStartPos = dataStartPos;
+ long[] myPackSizes = new long[folder._packStreams.Count];
+ for (int i = 0; i < myPackSizes.Length; i++)
+ {
+ long packSize = packSizes[packIndex + i];
+ myPackSizes[i] = packSize;
+ dataStartPos += packSize;
+ }
+
+ var outStream = DecoderStreamHelper.CreateDecoderStream(_stream, oldDataStartPos, myPackSizes,
+ folder, pass);
+
+ int unpackSize = checked((int)folder.GetUnpackSize());
+ byte[] data = new byte[unpackSize];
+ outStream.ReadExact(data, 0, data.Length);
+ if (outStream.ReadByte() >= 0)
+ {
+ throw new InvalidOperationException("Decoded stream is longer than expected.");
+ }
+ dataVector.Add(data);
+
+ if (folder.UnpackCrcDefined)
+ {
+ if (Crc.Finish(Crc.Update(Crc.INIT_CRC, data, 0, unpackSize)) != folder._unpackCrc)
+ {
+ throw new InvalidOperationException("Decoded stream does not match expected CRC.");
+ }
+ }
+ }
+ return dataVector;
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ private void ReadHeader(ArchiveDatabase db, IPasswordProvider getTextPassword)
+ {
+#if DEBUG
+ Log.WriteLine("-- ReadHeader --");
+ Log.PushIndent();
+#endif
+ try
+ {
+ BlockType? type = ReadId();
+
+ if (type == BlockType.ArchiveProperties)
+ {
+ ReadArchiveProperties();
+ type = ReadId();
+ }
+
+ List dataVector = null;
+ if (type == BlockType.AdditionalStreamsInfo)
+ {
+ dataVector = ReadAndDecodePackedStreams(db._startPositionAfterHeader, getTextPassword);
+ type = ReadId();
+ }
+
+ List unpackSizes;
+ List digests;
+
+ if (type == BlockType.MainStreamsInfo)
+ {
+ ReadStreamsInfo(dataVector,
+ out db._dataStartPosition,
+ out db._packSizes,
+ out db._packCrCs,
+ out db._folders,
+ out db._numUnpackStreamsVector,
+ out unpackSizes,
+ out digests);
+
+ db._dataStartPosition += db._startPositionAfterHeader;
+ type = ReadId();
+ }
+ else
+ {
+ unpackSizes = new List(db._folders.Count);
+ digests = new List(db._folders.Count);
+ db._numUnpackStreamsVector = new List(db._folders.Count);
+ for (int i = 0; i < db._folders.Count; i++)
+ {
+ var folder = db._folders[i];
+ unpackSizes.Add(folder.GetUnpackSize());
+ digests.Add(folder._unpackCrc);
+ db._numUnpackStreamsVector.Add(1);
+ }
+ }
+
+ db._files.Clear();
+
+ if (type == BlockType.End)
+ {
+ return;
+ }
+
+ if (type != BlockType.FilesInfo)
+ {
+ throw new InvalidOperationException();
+ }
+
+ int numFiles = ReadNum();
+#if DEBUG
+ Log.WriteLine("NumFiles: " + numFiles);
+#endif
+ db._files = new List(numFiles);
+ for (int i = 0; i < numFiles; i++)
+ {
+ db._files.Add(new CFileItem());
+ }
+
+ BitVector emptyStreamVector = new BitVector(numFiles);
+ BitVector emptyFileVector = null;
+ BitVector antiFileVector = null;
+ int numEmptyStreams = 0;
+
+ for (;;)
+ {
+ type = ReadId();
+ if (type == BlockType.End)
+ {
+ break;
+ }
+
+ long size = checked((long)ReadNumber()); // TODO: throw invalid data on negative
+ int oldPos = _currentReader.Offset;
+ switch (type)
+ {
+ case BlockType.Name:
+ using (var streamSwitch = new CStreamSwitch())
+ {
+ streamSwitch.Set(this, dataVector);
+#if DEBUG
+ Log.Write("FileNames:");
+#endif
+ for (int i = 0; i < db._files.Count; i++)
+ {
+ db._files[i].Name = _currentReader.ReadString();
+#if DEBUG
+ Log.Write(" " + db._files[i].Name);
+#endif
+ }
+#if DEBUG
+ Log.WriteLine();
+#endif
+ }
+ break;
+ case BlockType.WinAttributes:
+#if DEBUG
+ Log.Write("WinAttributes:");
+#endif
+ ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
+ {
+ // Some third party implementations established an unofficial extension
+ // of the 7z archive format by placing posix file attributes in the high
+ // bits of the windows file attributes. This makes use of the fact that
+ // the official implementation does not perform checks on this value.
+ //
+ // Newer versions of the official 7z GUI client will try to parse this
+ // extension, thus acknowledging the unofficial use of these bits.
+ //
+ // For us it is safe to just discard the upper bits if they are set and
+ // keep the windows attributes from the lower bits (which should be set
+ // properly even if posix file attributes are present, in order to be
+ // compatible with older 7z archive readers)
+ //
+ // Note that the 15th bit is used by some implementations to indicate
+ // presence of the extension, but not all implementations do that so
+ // we can't trust that bit and must ignore it.
+ //
+ if (attr.HasValue && (attr.Value >> 16) != 0)
+ {
+ attr = attr.Value & 0x7FFFu;
+ }
+
+ db._files[i].Attrib = attr;
+#if DEBUG
+ Log.Write(" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a"));
+#endif
+ });
+#if DEBUG
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.EmptyStream:
+ emptyStreamVector = ReadBitVector(numFiles);
+#if DEBUG
+
+ Log.Write("EmptyStream: ");
+#endif
+ for (int i = 0; i < emptyStreamVector.Length; i++)
+ {
+ if (emptyStreamVector[i])
+ {
+#if DEBUG
+ Log.Write("x");
+#endif
+ numEmptyStreams++;
+ }
+ else
+ {
+#if DEBUG
+ Log.Write(".");
+#endif
+ }
+ }
+#if DEBUG
+ Log.WriteLine();
+#endif
+
+ emptyFileVector = new BitVector(numEmptyStreams);
+ antiFileVector = new BitVector(numEmptyStreams);
+ break;
+ case BlockType.EmptyFile:
+ emptyFileVector = ReadBitVector(numEmptyStreams);
+#if DEBUG
+ Log.Write("EmptyFile: ");
+ for (int i = 0; i < numEmptyStreams; i++)
+ {
+ Log.Write(emptyFileVector[i] ? "x" : ".");
+ }
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.Anti:
+ antiFileVector = ReadBitVector(numEmptyStreams);
+#if DEBUG
+ Log.Write("Anti: ");
+ for (int i = 0; i < numEmptyStreams; i++)
+ {
+ Log.Write(antiFileVector[i] ? "x" : ".");
+ }
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.StartPos:
+#if DEBUG
+ Log.Write("StartPos:");
+#endif
+ ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
+ {
+ db._files[i].StartPos = startPos;
+#if DEBUG
+ Log.Write(" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a"));
+#endif
+ });
+#if DEBUG
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.CTime:
+#if DEBUG
+ Log.Write("CTime:");
+#endif
+ ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
+ {
+ db._files[i].CTime = time;
+#if DEBUG
+ Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
+#endif
+ });
+#if DEBUG
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.ATime:
+#if DEBUG
+ Log.Write("ATime:");
+#endif
+ ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
+ {
+ db._files[i].ATime = time;
+#if DEBUG
+ Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
+#endif
+ });
+#if DEBUG
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.MTime:
+#if DEBUG
+ Log.Write("MTime:");
+#endif
+ ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
+ {
+ db._files[i].MTime = time;
+#if DEBUG
+ Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
+#endif
+ });
+#if DEBUG
+ Log.WriteLine();
+#endif
+ break;
+ case BlockType.Dummy:
+#if DEBUG
+ Log.Write("Dummy: " + size);
+#endif
+ for (long j = 0; j < size; j++)
+ {
+ if (ReadByte() != 0)
+ {
+ throw new InvalidOperationException();
+ }
+ }
+ break;
+ default:
+ SkipData(size);
+ break;
+ }
+
+ // since 0.3 record sizes must be correct
+ bool checkRecordsSize = (db._majorVersion > 0 || db._minorVersion > 2);
+ if (checkRecordsSize && _currentReader.Offset - oldPos != size)
+ {
+ throw new InvalidOperationException();
+ }
+ }
+
+ int emptyFileIndex = 0;
+ int sizeIndex = 0;
+ for (int i = 0; i < numFiles; i++)
+ {
+ CFileItem file = db._files[i];
+ file.HasStream = !emptyStreamVector[i];
+ if (file.HasStream)
+ {
+ file.IsDir = false;
+ file.IsAnti = false;
+ file.Size = unpackSizes[sizeIndex];
+ file.Crc = digests[sizeIndex];
+ sizeIndex++;
+ }
+ else
+ {
+ file.IsDir = !emptyFileVector[emptyFileIndex];
+ file.IsAnti = antiFileVector[emptyFileIndex];
+ emptyFileIndex++;
+ file.Size = 0;
+ file.Crc = null;
+ }
+ }
+ }
+ finally
+ {
+#if DEBUG
+ Log.PopIndent();
+#endif
+ }
+ }
+
+ #endregion
+
+ #region Public Methods
+
+ public void Open(Stream stream)
+ {
+ Close();
+
+ _streamOrigin = stream.Position;
+ _streamEnding = stream.Length;
+
+ // TODO: Check Signature!
+ _header = new byte[0x20];
+ for (int offset = 0; offset < 0x20;)
+ {
+ int delta = stream.Read(_header, offset, 0x20 - offset);
+ if (delta == 0)
+ {
+ throw new EndOfStreamException();
+ }
+ offset += delta;
+ }
+
+ _stream = stream;
+ }
+
+ public void Close()
+ {
+ if (_stream != null)
+ {
+ _stream.Dispose();
+ }
+
+ foreach (var stream in _cachedStreams.Values)
+ {
+ stream.Dispose();
+ }
+
+ _cachedStreams.Clear();
+ }
+
+ public ArchiveDatabase ReadDatabase(IPasswordProvider pass)
+ {
+ var db = new ArchiveDatabase(pass);
+ db.Clear();
+
+ db._majorVersion = _header[6];
+ db._minorVersion = _header[7];
+
+ if (db._majorVersion != 0)
+ {
+ throw new InvalidOperationException();
+ }
+
+ uint crcFromArchive = DataReader.Get32(_header, 8);
+ long nextHeaderOffset = (long)DataReader.Get64(_header, 0xC);
+ long nextHeaderSize = (long)DataReader.Get64(_header, 0x14);
+ uint nextHeaderCrc = DataReader.Get32(_header, 0x1C);
+
+ uint crc = Crc.INIT_CRC;
+ crc = Crc.Update(crc, nextHeaderOffset);
+ crc = Crc.Update(crc, nextHeaderSize);
+ crc = Crc.Update(crc, nextHeaderCrc);
+ crc = Crc.Finish(crc);
+
+ if (crc != crcFromArchive)
+ {
+ throw new InvalidOperationException();
+ }
+
+ db._startPositionAfterHeader = _streamOrigin + 0x20;
+
+ // empty header is ok
+ if (nextHeaderSize == 0)
+ {
+ db.Fill();
+ return db;
+ }
+
+ if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > Int32.MaxValue)
+ {
+ throw new InvalidOperationException();
+ }
+
+ if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
+ {
+ throw new IndexOutOfRangeException();
+ }
+
+ _stream.Seek(nextHeaderOffset, SeekOrigin.Current);
+
+ byte[] header = new byte[nextHeaderSize];
+ _stream.ReadExact(header, 0, header.Length);
+
+ if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
+ {
+ throw new InvalidOperationException();
+ }
+
+ using (CStreamSwitch streamSwitch = new CStreamSwitch())
+ {
+ streamSwitch.Set(this, header);
+
+ BlockType? type = ReadId();
+ if (type != BlockType.Header)
+ {
+ if (type != BlockType.EncodedHeader)
+ {
+ throw new InvalidOperationException();
+ }
+
+ var dataVector = ReadAndDecodePackedStreams(db._startPositionAfterHeader, db.PasswordProvider);
+
+ // compressed header without content is odd but ok
+ if (dataVector.Count == 0)
+ {
+ db.Fill();
+ return db;
+ }
+
+ if (dataVector.Count != 1)
+ {
+ throw new InvalidOperationException();
+ }
+
+ streamSwitch.Set(this, dataVector[0]);
+
+ if (ReadId() != BlockType.Header)
+ {
+ throw new InvalidOperationException();
+ }
+ }
+
+ ReadHeader(db, db.PasswordProvider);
+ }
+ db.Fill();
+ return db;
+ }
+
+ internal class CExtractFolderInfo
+ {
+ internal int _fileIndex;
+ internal int _folderIndex;
+ internal List _extractStatuses = new List();
+
+ internal CExtractFolderInfo(int fileIndex, int folderIndex)
+ {
+ _fileIndex = fileIndex;
+ _folderIndex = folderIndex;
+ if (fileIndex != -1)
+ {
+ _extractStatuses.Add(true);
+ }
+ }
+ }
+
+ private class FolderUnpackStream : Stream
+ {
+ private readonly ArchiveDatabase _db;
+ private readonly int _startIndex;
+ private readonly List _extractStatuses;
+
+ public FolderUnpackStream(ArchiveDatabase db, int p, int startIndex, List list)
+ {
+ _db = db;
+ _startIndex = startIndex;
+ _extractStatuses = list;
+ }
+
+ #region Stream
+
+ public override bool CanRead => true;
+
+ public override bool CanSeek => false;
+
+ public override bool CanWrite => false;
+
+ public override void Flush()
+ {
+ throw new NotSupportedException();
+ }
+
+ public override long Length => throw new NotSupportedException();
+
+ public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
+
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override long Seek(long offset, SeekOrigin origin)
+ {
+ throw new NotSupportedException();
+ }
+
+ public override void SetLength(long value)
+ {
+ throw new NotSupportedException();
+ }
+
+ private Stream _stream;
+ private long _rem;
+ private int _currentIndex;
+
+ private void ProcessEmptyFiles()
+ {
+ while (_currentIndex < _extractStatuses.Count && _db._files[_startIndex + _currentIndex].Size == 0)
+ {
+ OpenFile();
+ _stream.Dispose();
+ _stream = null;
+ _currentIndex++;
+ }
+ }
+
+ private void OpenFile()
+ {
+ int index = _startIndex + _currentIndex;
+#if DEBUG
+ Log.WriteLine(_db._files[index].Name);
+#endif
+ if (_db._files[index].CrcDefined)
+ {
+ _stream = new CrcCheckStream(_db._files[index].Crc.Value);
+ }
+ else
+ {
+ _stream = new MemoryStream();
+ }
+ _rem = _db._files[index].Size;
+ }
+
+ public override void Write(byte[] buffer, int offset, int count)
+ {
+ while (count != 0)
+ {
+ if (_stream != null)
+ {
+ int write = count;
+ if (write > _rem)
+ {
+ write = (int)_rem;
+ }
+ _stream.Write(buffer, offset, write);
+ count -= write;
+ _rem -= write;
+ offset += write;
+ if (_rem == 0)
+ {
+ _stream.Dispose();
+ _stream = null;
+ _currentIndex++;
+ ProcessEmptyFiles();
+ }
+ }
+ else
+ {
+ ProcessEmptyFiles();
+ if (_currentIndex == _extractStatuses.Count)
+ {
+ // we support partial extracting
+ Debugger.Break();
+ throw new NotSupportedException();
+ }
+ OpenFile();
+ }
+ }
+ }
+
+ #endregion
+ }
+
+ private Stream GetCachedDecoderStream(ArchiveDatabase db, int folderIndex)
+ {
+ Stream s;
+ if (!_cachedStreams.TryGetValue(folderIndex, out s))
+ {
+ CFolder folderInfo = db._folders[folderIndex];
+ int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
+ long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
+ List packSizes = new List();
+ for (int j = 0; j < folderInfo._packStreams.Count; j++)
+ {
+ packSizes.Add(db._packSizes[packStreamIndex + j]);
+ }
+
+ s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
+ db.PasswordProvider);
+ _cachedStreams.Add(folderIndex, s);
+ }
+ return s;
+ }
+
+ public Stream OpenStream(ArchiveDatabase db, int fileIndex)
+ {
+ int folderIndex = db._fileIndexToFolderIndexMap[fileIndex];
+ int numFilesInFolder = db._numUnpackStreamsVector[folderIndex];
+ int firstFileIndex = db._folderStartFileIndex[folderIndex];
+ if (firstFileIndex > fileIndex || fileIndex - firstFileIndex >= numFilesInFolder)
+ {
+ throw new InvalidOperationException();
+ }
+
+ int skipCount = fileIndex - firstFileIndex;
+ long skipSize = 0;
+ for (int i = 0; i < skipCount; i++)
+ {
+ skipSize += db._files[firstFileIndex + i].Size;
+ }
+
+ Stream s = GetCachedDecoderStream(db, folderIndex);
+ s.Position = skipSize;
+ return new ReadOnlySubStream(s, db._files[fileIndex].Size);
+ }
+
+ public void Extract(ArchiveDatabase db, int[] indices)
+ {
+ int numItems;
+ bool allFilesMode = (indices == null);
+ if (allFilesMode)
+ {
+ numItems = db._files.Count;
+ }
+ else
+ {
+ numItems = indices.Length;
+ }
+
+ if (numItems == 0)
+ {
+ return;
+ }
+
+ List extractFolderInfoVector = new List();
+ for (int i = 0; i < numItems; i++)
+ {
+ int fileIndex = allFilesMode ? i : indices[i];
+
+ int folderIndex = db._fileIndexToFolderIndexMap[fileIndex];
+ if (folderIndex == -1)
+ {
+ extractFolderInfoVector.Add(new CExtractFolderInfo(fileIndex, -1));
+ continue;
+ }
+
+ if (extractFolderInfoVector.Count == 0 || folderIndex != extractFolderInfoVector.Last()._folderIndex)
+ {
+ extractFolderInfoVector.Add(new CExtractFolderInfo(-1, folderIndex));
+ }
+
+ CExtractFolderInfo efi = extractFolderInfoVector.Last();
+
+ int startIndex = db._folderStartFileIndex[folderIndex];
+ for (int index = efi._extractStatuses.Count; index <= fileIndex - startIndex; index++)
+ {
+ efi._extractStatuses.Add(index == fileIndex - startIndex);
+ }
+ }
+
+ foreach (CExtractFolderInfo efi in extractFolderInfoVector)
+ {
+ int startIndex;
+ if (efi._fileIndex != -1)
+ {
+ startIndex = efi._fileIndex;
+ }
+ else
+ {
+ startIndex = db._folderStartFileIndex[efi._folderIndex];
+ }
+
+ var outStream = new FolderUnpackStream(db, 0, startIndex, efi._extractStatuses);
+
+ if (efi._fileIndex != -1)
+ {
+ continue;
+ }
+
+ int folderIndex = efi._folderIndex;
+ CFolder folderInfo = db._folders[folderIndex];
+
+ int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
+ long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
+
+ List packSizes = new List();
+ for (int j = 0; j < folderInfo._packStreams.Count; j++)
+ {
+ packSizes.Add(db._packSizes[packStreamIndex + j]);
+ }
+
+ // TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
+
+ Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
+ folderInfo, db.PasswordProvider);
+ byte[] buffer = new byte[4 << 10];
+ for (;;)
+ {
+ int processed = s.Read(buffer, 0, buffer.Length);
+ if (processed == 0)
+ {
+ break;
+ }
+ outStream.Write(buffer, 0, processed);
+ }
+ }
+ }
+
+ public IEnumerable GetFiles(ArchiveDatabase db)
+ {
+ return db._files;
+ }
+
+ public int GetFileIndex(ArchiveDatabase db, CFileItem item)
+ {
+ return db._files.IndexOf(item);
+ }
+
+ #endregion
+ }
+}
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CBindPair.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CBindPair.cs
new file mode 100644
index 0000000000..ad947967a4
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CBindPair.cs
@@ -0,0 +1,8 @@
+namespace SharpCompress.Common.SevenZip
+{
+ internal class CBindPair
+ {
+ internal int _inIndex;
+ internal int _outIndex;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CCoderInfo.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CCoderInfo.cs
new file mode 100644
index 0000000000..035722991f
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CCoderInfo.cs
@@ -0,0 +1,10 @@
+namespace SharpCompress.Common.SevenZip
+{
+ internal class CCoderInfo
+ {
+ internal CMethodId _methodId;
+ internal byte[] _props;
+ internal int _numInStreams;
+ internal int _numOutStreams;
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFileItem.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFileItem.cs
new file mode 100644
index 0000000000..450cbdfc5c
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFileItem.cs
@@ -0,0 +1,36 @@
+using System;
+
+namespace SharpCompress.Common.SevenZip
+{
+ internal class CFileItem
+ {
+ public long Size { get; internal set; }
+ public uint? Attrib { get; internal set; }
+ public uint? Crc { get; internal set; }
+ public string Name { get; internal set; }
+
+ public bool HasStream { get; internal set; }
+ public bool IsDir { get; internal set; }
+
+ public bool CrcDefined => Crc != null;
+
+ public bool AttribDefined => Attrib != null;
+
+ public void SetAttrib(uint attrib)
+ {
+ Attrib = attrib;
+ }
+
+ public DateTime? CTime { get; internal set; }
+ public DateTime? ATime { get; internal set; }
+ public DateTime? MTime { get; internal set; }
+
+ public long? StartPos { get; internal set; }
+ public bool IsAnti { get; internal set; }
+
+ internal CFileItem()
+ {
+ HasStream = true;
+ }
+ }
+}
\ No newline at end of file
diff --git a/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFolder.cs b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFolder.cs
new file mode 100644
index 0000000000..4a68cf505e
--- /dev/null
+++ b/BizHawk.Client.Common/SharpCompress/Common/SevenZip/CFolder.cs
@@ -0,0 +1,188 @@
+using System;
+using System.Collections.Generic;
+using SharpCompress.Compressors.LZMA;
+
+namespace SharpCompress.Common.SevenZip
+{
+ internal class CFolder
+ {
+ internal List _coders = new List();
+ internal List _bindPairs = new List();
+ internal List _packStreams = new List();
+ internal int _firstPackStreamId;
+ internal List _unpackSizes = new List();
+ internal uint? _unpackCrc;
+
+ internal bool UnpackCrcDefined => _unpackCrc != null;
+
+ public long GetUnpackSize()
+ {
+ if (_unpackSizes.Count == 0)
+ {
+ return 0;
+ }
+
+ for (int i = _unpackSizes.Count - 1; i >= 0; i--)
+ {
+ if (FindBindPairForOutStream(i) < 0)
+ {
+ return _unpackSizes[i];
+ }
+ }
+
+ throw new Exception();
+ }
+
+ public int GetNumOutStreams()
+ {
+ int count = 0;
+ for (int i = 0; i < _coders.Count; i++)
+ {
+ count += _coders[i]._numOutStreams;
+ }
+
+ return count;
+ }
+
+ public int FindBindPairForInStream(int inStreamIndex)
+ {
+ for (int i = 0; i < _bindPairs.Count; i++)
+ {
+ if (_bindPairs[i]._inIndex == inStreamIndex)
+ {
+ return i;
+ }
+ }
+
+ return -1;
+ }
+
+ public int FindBindPairForOutStream(int outStreamIndex)
+ {
+ for (int i = 0; i < _bindPairs.Count; i++)
+ {
+ if (_bindPairs[i]._outIndex == outStreamIndex)
+ {
+ return i;
+ }
+ }
+
+ return -1;
+ }
+
+ public int FindPackStreamArrayIndex(int inStreamIndex)
+ {
+ for (int i = 0; i < _packStreams.Count; i++)
+ {
+ if (_packStreams[i] == inStreamIndex)
+ {
+ return i;
+ }
+ }
+
+ return -1;
+ }
+
+ public bool IsEncrypted()
+ {
+ for (int i = _coders.Count - 1; i >= 0; i--)
+ {
+ if (_coders[i]._methodId == CMethodId.K_AES)
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public bool CheckStructure()
+ {
+ const int kNumCodersMax = 32; // don't change it
+ const int kMaskSize = 32; // it must be >= kNumCodersMax
+ const int kNumBindsMax = 32;
+
+ if (_coders.Count > kNumCodersMax || _bindPairs.Count > kNumBindsMax)
+ {
+ return false;
+ }
+
+ {
+ var v = new BitVector(_bindPairs.Count + _packStreams.Count);
+
+ for (int i = 0; i < _bindPairs.Count; i++)
+ {
+ if (v.GetAndSet(_bindPairs[i]._inIndex))
+ {
+ return false;
+ }
+ }
+
+ for (int i = 0; i < _packStreams.Count; i++)
+ {
+ if (v.GetAndSet(_packStreams[i]))
+ {
+ return false;
+ }
+ }
+ }
+
+ {
+ var v = new BitVector(_unpackSizes.Count);
+ for (int i = 0; i < _bindPairs.Count; i++)
+ {
+ if (v.GetAndSet(_bindPairs[i]._outIndex))
+ {
+ return false;
+ }
+ }
+ }
+
+ uint[] mask = new uint[kMaskSize];
+
+ {
+ List inStreamToCoder = new List();
+ List outStreamToCoder = new List