SharpCompress: remove source and add .dll

This commit is contained in:
Asnivor 2019-01-25 20:53:43 +00:00
parent 0f5b4555b4
commit ab6acb1065
333 changed files with 6 additions and 55053 deletions

View File

@ -56,6 +56,9 @@
<HintPath>..\output\dll\nlua\NLua.dll</HintPath>
<Private>False</Private>
</Reference>
<Reference Include="SharpCompress">
<HintPath>..\References\SharpCompress.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Data.SQLite, Version=1.0.105.2, Culture=neutral, PublicKeyToken=db937bc2d44ff139, processorArchitecture=AMD64">
@ -258,334 +261,6 @@
<Compile Include="SharpCompressArchiveHandler.cs" />
<Compile Include="SevenZipSharpArchiveHandler.cs" />
<Compile Include="SevenZipWriter.cs" />
<Compile Include="SharpCompress\Archives\AbstractArchive.cs" />
<Compile Include="SharpCompress\Archives\AbstractWritableArchive.cs" />
<Compile Include="SharpCompress\Archives\ArchiveFactory.cs" />
<Compile Include="SharpCompress\Archives\GZip\GZipArchive.cs" />
<Compile Include="SharpCompress\Archives\GZip\GZipArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\GZip\GZipWritableArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\IArchive.cs" />
<Compile Include="SharpCompress\Archives\IArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\IArchiveEntryExtensions.cs" />
<Compile Include="SharpCompress\Archives\IArchiveExtensions.cs" />
<Compile Include="SharpCompress\Archives\IArchiveExtractionListener.cs" />
<Compile Include="SharpCompress\Archives\IWritableArchive.cs" />
<Compile Include="SharpCompress\Archives\IWritableArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\IWritableArchiveExtensions.cs" />
<Compile Include="SharpCompress\Archives\Rar\FileInfoRarArchiveVolume.cs" />
<Compile Include="SharpCompress\Archives\Rar\FileInfoRarFilePart.cs" />
<Compile Include="SharpCompress\Archives\Rar\RarArchive.cs" />
<Compile Include="SharpCompress\Archives\Rar\RarArchive.Extensions.cs" />
<Compile Include="SharpCompress\Archives\Rar\RarArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\Rar\RarArchiveEntryFactory.cs" />
<Compile Include="SharpCompress\Archives\Rar\RarArchiveVolumeFactory.cs" />
<Compile Include="SharpCompress\Archives\Rar\SeekableFilePart.cs" />
<Compile Include="SharpCompress\Archives\Rar\StreamRarArchiveVolume.cs" />
<Compile Include="SharpCompress\Archives\SevenZip\SevenZipArchive.cs" />
<Compile Include="SharpCompress\Archives\SevenZip\SevenZipArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\Tar\TarArchive.cs" />
<Compile Include="SharpCompress\Archives\Tar\TarArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\Tar\TarWritableArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\Zip\ZipArchive.cs" />
<Compile Include="SharpCompress\Archives\Zip\ZipArchiveEntry.cs" />
<Compile Include="SharpCompress\Archives\Zip\ZipWritableArchiveEntry.cs" />
<Compile Include="SharpCompress\Buffers\ArrayPool.cs" />
<Compile Include="SharpCompress\Buffers\DefaultArrayPool.cs" />
<Compile Include="SharpCompress\Buffers\DefaultArrayPoolBucket.cs" />
<Compile Include="SharpCompress\Buffers\Utilities.cs" />
<Compile Include="SharpCompress\Common\ArchiveEncoding.cs" />
<Compile Include="SharpCompress\Common\ArchiveException.cs" />
<Compile Include="SharpCompress\Common\ArchiveExtractionEventArgs.cs" />
<Compile Include="SharpCompress\Common\ArchiveType.cs" />
<Compile Include="SharpCompress\Common\CompressedBytesReadEventArgs.cs" />
<Compile Include="SharpCompress\Common\CompressionType.cs" />
<Compile Include="SharpCompress\Common\CryptographicException.cs" />
<Compile Include="SharpCompress\Common\Entry.cs" />
<Compile Include="SharpCompress\Common\EntryStream.cs" />
<Compile Include="SharpCompress\Common\ExtractionException.cs" />
<Compile Include="SharpCompress\Common\ExtractionMethods.cs" />
<Compile Include="SharpCompress\Common\ExtractionOptions.cs" />
<Compile Include="SharpCompress\Common\FilePart.cs" />
<Compile Include="SharpCompress\Common\FilePartExtractionBeginEventArgs.cs" />
<Compile Include="SharpCompress\Common\FlagUtility.cs" />
<Compile Include="SharpCompress\Common\GZip\GZipEntry.cs" />
<Compile Include="SharpCompress\Common\GZip\GZipFilePart.cs" />
<Compile Include="SharpCompress\Common\GZip\GZipVolume.cs" />
<Compile Include="SharpCompress\Common\IEntry.cs" />
<Compile Include="SharpCompress\Common\IEntry.Extensions.cs" />
<Compile Include="SharpCompress\Common\IExtractionListener.cs" />
<Compile Include="SharpCompress\Common\IncompleteArchiveException.cs" />
<Compile Include="SharpCompress\Common\InvalidFormatException.cs" />
<Compile Include="SharpCompress\Common\IVolume.cs" />
<Compile Include="SharpCompress\Common\MultipartStreamRequiredException.cs" />
<Compile Include="SharpCompress\Common\MultiVolumeExtractionException.cs" />
<Compile Include="SharpCompress\Common\OptionsBase.cs" />
<Compile Include="SharpCompress\Common\PasswordProtectedException.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\ArchiveCryptHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\ArchiveHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\AVHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\CommentHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\EndArchiveHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\FileHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\FileNameDecoder.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\Flags.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\IRarHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\MarkHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\NewSubHeaderType.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\ProtectHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\RarHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\RarHeaderFactory.cs" />
<Compile Include="SharpCompress\Common\Rar\Headers\SignHeader.cs" />
<Compile Include="SharpCompress\Common\Rar\RarCrcBinaryReader.cs" />
<Compile Include="SharpCompress\Common\Rar\RarCryptoBinaryReader.cs" />
<Compile Include="SharpCompress\Common\Rar\RarCryptoWrapper.cs" />
<Compile Include="SharpCompress\Common\Rar\RarEntry.cs" />
<Compile Include="SharpCompress\Common\Rar\RarFilePart.cs" />
<Compile Include="SharpCompress\Common\Rar\RarRijndael.cs" />
<Compile Include="SharpCompress\Common\Rar\RarVolume.cs" />
<Compile Include="SharpCompress\Common\ReaderExtractionEventArgs.cs" />
<Compile Include="SharpCompress\Common\SevenZip\ArchiveDatabase.cs" />
<Compile Include="SharpCompress\Common\SevenZip\ArchiveReader.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CBindPair.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CCoderInfo.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CFileItem.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CFolder.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CMethodId.cs" />
<Compile Include="SharpCompress\Common\SevenZip\CStreamSwitch.cs" />
<Compile Include="SharpCompress\Common\SevenZip\DataReader.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipEntry.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipFilePart.cs" />
<Compile Include="SharpCompress\Common\SevenZip\SevenZipVolume.cs" />
<Compile Include="SharpCompress\Common\Tar\Headers\EntryType.cs" />
<Compile Include="SharpCompress\Common\Tar\Headers\TarHeader.cs" />
<Compile Include="SharpCompress\Common\Tar\TarEntry.cs" />
<Compile Include="SharpCompress\Common\Tar\TarFilePart.cs" />
<Compile Include="SharpCompress\Common\Tar\TarHeaderFactory.cs" />
<Compile Include="SharpCompress\Common\Tar\TarReadOnlySubStream.cs" />
<Compile Include="SharpCompress\Common\Tar\TarVolume.cs" />
<Compile Include="SharpCompress\Common\Volume.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\DirectoryEndHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\DirectoryEntryHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\HeaderFlags.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\IgnoreHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\LocalEntryHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\LocalEntryHeaderExtraFactory.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\SplitHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\Zip64DirectoryEndHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\Zip64DirectoryEndLocatorHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\ZipFileEntry.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\ZipHeader.cs" />
<Compile Include="SharpCompress\Common\Zip\Headers\ZipHeaderType.cs" />
<Compile Include="SharpCompress\Common\Zip\PkwareTraditionalCryptoStream.cs" />
<Compile Include="SharpCompress\Common\Zip\PkwareTraditionalEncryptionData.cs" />
<Compile Include="SharpCompress\Common\Zip\SeekableZipFilePart.cs" />
<Compile Include="SharpCompress\Common\Zip\SeekableZipHeaderFactory.cs" />
<Compile Include="SharpCompress\Common\Zip\StreamingZipFilePart.cs" />
<Compile Include="SharpCompress\Common\Zip\StreamingZipHeaderFactory.cs" />
<Compile Include="SharpCompress\Common\Zip\WinzipAesCryptoStream.cs" />
<Compile Include="SharpCompress\Common\Zip\WinzipAesEncryptionData.cs" />
<Compile Include="SharpCompress\Common\Zip\WinzipAesKeySize.cs" />
<Compile Include="SharpCompress\Common\Zip\ZipCompressionMethod.cs" />
<Compile Include="SharpCompress\Common\Zip\ZipEntry.cs" />
<Compile Include="SharpCompress\Common\Zip\ZipFilePart.cs" />
<Compile Include="SharpCompress\Common\Zip\ZipHeaderFactory.cs" />
<Compile Include="SharpCompress\Common\Zip\ZipVolume.cs" />
<Compile Include="SharpCompress\Compressors\ADC\ADCBase.cs" />
<Compile Include="SharpCompress\Compressors\ADC\ADCStream.cs" />
<Compile Include="SharpCompress\Compressors\BZip2\BZip2Constants.cs" />
<Compile Include="SharpCompress\Compressors\BZip2\BZip2Stream.cs" />
<Compile Include="SharpCompress\Compressors\BZip2\CBZip2InputStream.cs" />
<Compile Include="SharpCompress\Compressors\BZip2\CBZip2OutputStream.cs" />
<Compile Include="SharpCompress\Compressors\BZip2\CRC.cs" />
<Compile Include="SharpCompress\Compressors\CompressionMode.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\BlockType.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\Deflate64Stream.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\DeflateInput.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\FastEncoderStatus.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\HuffmanTree.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\InflaterManaged.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\InflaterState.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\InputBuffer.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\Match.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\MatchState.cs" />
<Compile Include="SharpCompress\Compressors\Deflate64\OutputWindow.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\CRC32.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\DeflateManager.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\DeflateStream.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\FlushType.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\GZipStream.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\Inflate.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\InfTree.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\Tree.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\Zlib.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\ZlibBaseStream.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\ZlibCodec.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\ZlibConstants.cs" />
<Compile Include="SharpCompress\Compressors\Deflate\ZlibStream.cs" />
<Compile Include="SharpCompress\Compressors\Filters\BCJ2Filter.cs" />
<Compile Include="SharpCompress\Compressors\Filters\BCJFilter.cs" />
<Compile Include="SharpCompress\Compressors\Filters\Filter.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\AesDecoderStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Bcj2DecoderStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\BitVector.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\CRC.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\DecoderStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\ICoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Log.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZipStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaBase.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaDecoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaEncoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaEncoderProperties.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LzmaStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzBinTree.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzInWindow.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\LZ\LzOutWindow.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoder.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoderBit.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\RangeCoder\RangeCoderBitTree.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Registry.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilites\CrcBuilderStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilites\CrcCheckStream.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilites\IPasswordProvider.cs" />
<Compile Include="SharpCompress\Compressors\LZMA\Utilites\Utils.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\FreqData.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\ModelPPM.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\Pointer.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\PPMContext.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\RangeCoder.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\RarMemBlock.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\RarNode.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\SEE2Context.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\State.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\StateRef.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\H\SubAllocator.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\Allocator.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\Coder.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\MemoryNode.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\Model.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\ModelRestorationMethod.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\Pointer.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\PpmContext.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\PpmState.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\I1\See2Context.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\PpmdProperties.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\PpmdStream.cs" />
<Compile Include="SharpCompress\Compressors\PPMd\PpmdVersion.cs" />
<Compile Include="SharpCompress\Compressors\Rar\IRarUnpack.cs" />
<Compile Include="SharpCompress\Compressors\Rar\MultiVolumeReadOnlyStream.cs" />
<Compile Include="SharpCompress\Compressors\Rar\RarCRC.cs" />
<Compile Include="SharpCompress\Compressors\Rar\RarCrcStream.cs" />
<Compile Include="SharpCompress\Compressors\Rar\RarStream.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\AudioVariables.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\BitDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\CodeType.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\Decode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\DistDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\FilterType.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\LitDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\LowDistDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\MultDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\PackDef.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Decode\RepDecode.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\PPM\BlockTypes.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Unpack.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Unpack15.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Unpack20.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\Unpack50.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\UnpackFilter.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\UnpackInline.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV1\UnpackUtility.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\BitInput.getbits_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\BitInput.getbits_hpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\FragmentedWindow.unpack50frag_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\PackDef.compress_hpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.rawint_hpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpack15_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpack20_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpack30_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpack50_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpackinline_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\Unpack.unpack_cpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\UnpackV2017\unpack_hpp.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\BitInput.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\RarVM.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMCmdFlags.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMCommands.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMFlags.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMOpType.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMPreparedCommand.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMPreparedOperand.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMPreparedProgram.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMStandardFilters.cs" />
<Compile Include="SharpCompress\Compressors\Rar\VM\VMStandardFilterSignature.cs" />
<Compile Include="SharpCompress\Compressors\Xz\BinaryUtils.cs" />
<Compile Include="SharpCompress\Compressors\Xz\CheckType.cs" />
<Compile Include="SharpCompress\Compressors\Xz\Crc32.cs" />
<Compile Include="SharpCompress\Compressors\Xz\Crc64.cs" />
<Compile Include="SharpCompress\Compressors\Xz\Filters\BlockFilter.cs" />
<Compile Include="SharpCompress\Compressors\Xz\Filters\Lzma2Filter.cs" />
<Compile Include="SharpCompress\Compressors\Xz\MultiByteIntegers.cs" />
<Compile Include="SharpCompress\Compressors\Xz\ReadOnlyStream.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZBlock.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZFooter.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZHeader.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZIndex.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZIndexMarkerReachedException.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZIndexRecord.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZReadOnlyStream.cs" />
<Compile Include="SharpCompress\Compressors\Xz\XZStream.cs" />
<Compile Include="SharpCompress\Converters\DataConverter.cs" />
<Compile Include="SharpCompress\Crypto\Crc32Stream.cs" />
<Compile Include="SharpCompress\Crypto\CryptoException.cs" />
<Compile Include="SharpCompress\Crypto\DataLengthException.cs" />
<Compile Include="SharpCompress\Crypto\IBlockCipher.cs" />
<Compile Include="SharpCompress\Crypto\ICipherParameters.cs" />
<Compile Include="SharpCompress\Crypto\KeyParameter.cs" />
<Compile Include="SharpCompress\Crypto\RijndaelEngine.cs" />
<Compile Include="SharpCompress\EnumExtensions.cs" />
<Compile Include="SharpCompress\IO\BufferedSubStream.cs" />
<Compile Include="SharpCompress\IO\CountingWritableSubStream.cs" />
<Compile Include="SharpCompress\IO\ListeningStream.cs" />
<Compile Include="SharpCompress\IO\MarkingBinaryReader.cs" />
<Compile Include="SharpCompress\IO\NonDisposingStream.cs" />
<Compile Include="SharpCompress\IO\ReadOnlySubStream.cs" />
<Compile Include="SharpCompress\IO\RewindableStream.cs" />
<Compile Include="SharpCompress\IO\StreamingMode.cs" />
<Compile Include="SharpCompress\Lazy.cs" />
<Compile Include="SharpCompress\LazyReadOnlyCollection.cs" />
<Compile Include="SharpCompress\Readers\AbstractReader.cs" />
<Compile Include="SharpCompress\Readers\GZip\GZipReader.cs" />
<Compile Include="SharpCompress\Readers\IReader.cs" />
<Compile Include="SharpCompress\Readers\IReaderExtensions.cs" />
<Compile Include="SharpCompress\Readers\IReaderExtractionListener.cs" />
<Compile Include="SharpCompress\Readers\Rar\MultiVolumeRarReader.cs" />
<Compile Include="SharpCompress\Readers\Rar\NonSeekableStreamFilePart.cs" />
<Compile Include="SharpCompress\Readers\Rar\RarReader.cs" />
<Compile Include="SharpCompress\Readers\Rar\RarReaderEntry.cs" />
<Compile Include="SharpCompress\Readers\Rar\RarReaderVolume.cs" />
<Compile Include="SharpCompress\Readers\Rar\SingleVolumeRarReader.cs" />
<Compile Include="SharpCompress\Readers\ReaderFactory.cs" />
<Compile Include="SharpCompress\Readers\ReaderOptions.cs" />
<Compile Include="SharpCompress\Readers\ReaderProgress.cs" />
<Compile Include="SharpCompress\Readers\Tar\TarReader.cs" />
<Compile Include="SharpCompress\Readers\Zip\ZipReader.cs" />
<Compile Include="SharpCompress\ReadOnlyCollection.cs" />
<Compile Include="SharpCompress\Utility.cs" />
<Compile Include="SharpCompress\Writers\AbstractWriter.cs" />
<Compile Include="SharpCompress\Writers\GZip\GZipWriter.cs" />
<Compile Include="SharpCompress\Writers\GZip\GZipWriterOptions.cs" />
<Compile Include="SharpCompress\Writers\IWriter.cs" />
<Compile Include="SharpCompress\Writers\IWriterExtensions.cs" />
<Compile Include="SharpCompress\Writers\Tar\TarWriter.cs" />
<Compile Include="SharpCompress\Writers\Tar\TarWriterOptions.cs" />
<Compile Include="SharpCompress\Writers\WriterFactory.cs" />
<Compile Include="SharpCompress\Writers\WriterOptions.cs" />
<Compile Include="SharpCompress\Writers\Zip\ZipCentralDirectoryEntry.cs" />
<Compile Include="SharpCompress\Writers\Zip\ZipWriter.cs" />
<Compile Include="SharpCompress\Writers\Zip\ZipWriterEntryOptions.cs" />
<Compile Include="SharpCompress\Writers\Zip\ZipWriterOptions.cs" />
<Compile Include="SharpZipWriter.cs" />
<Compile Include="SystemInfo.cs" />
<Compile Include="tools\Cheat.cs" />
@ -647,10 +322,6 @@
<Name>BizHawk.Bizware.BizwareGL</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<Content Include="SharpCompress\Compressors\Rar\UnpackV2017\notes.txt" />
<Content Include="SharpCompress\LICENSE.txt" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<PropertyGroup>
<PreBuildEvent>

View File

@ -1,179 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
if (!fileInfo.Exists)
{
throw new ArgumentException("File does not exist: " + fileInfo.FullName);
}
ReaderOptions = readerOptions;
readerOptions.LeaveStreamOpen = false;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(fileInfo));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
Type = type;
ReaderOptions = readerOptions;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(streams.Select(CheckStreams)));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
{
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
{
throw new ArgumentException("Archive streams must be Readable and Seekable");
}
return stream;
}
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
public virtual void Dispose()
{
if (!disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs
{
CurrentFilePartCompressedBytesRead = currentPartCompressedBytes,
CompressedBytesRead = compressedReadBytes
});
}
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs
{
CompressedSize = compressedSize,
Size = size,
Name = name
});
}
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract IReader CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid { get { return false; } }
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}
}
}

View File

@ -1,147 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>, IWritableArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
{
}
internal AbstractWritableArchive(ArchiveType type, Stream stream, ReaderOptions readerFactoryOptions)
: base(type, stream.AsEnumerable(), readerFactoryOptions)
{
}
#if !NO_FILE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
#endif
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries;
}
return base.Entries;
}
}
private void RebuildModifiedCollection()
{
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public void RemoveEntry(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
{
RemoveEntry((TEntry)entry);
}
public TEntry AddEntry(string key, Stream source,
long size = 0, DateTime? modified = null)
{
return AddEntry(key, source, false, size, modified);
}
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
{
return AddEntry(key, source, closeStream, size, modified);
}
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.StartsWith("/")
|| key.StartsWith("\\"))
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
private bool DoesKeyMatchExisting(string key)
{
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.StartsWith("\\"))
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, options, OldEntries, newEntries);
}
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
}
return CreateEntryInternal(key, source, size, modified, closeStream);
}
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream);
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
public override void Dispose()
{
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}

View File

@ -1,153 +0,0 @@
using System;
using System.IO;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions = readerOptions ?? new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
{
stream.Seek(0, SeekOrigin.Begin);
return ZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return SevenZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return GZipArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, readerOptions))
{
stream.Seek(0, SeekOrigin.Begin);
return RarArchive.Open(stream, readerOptions);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
stream.Seek(0, SeekOrigin.Begin);
return TarArchive.Open(stream, readerOptions);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
}
public static IWritableArchive Create(ArchiveType type)
{
switch (type)
{
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
}
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
{
if (ZipArchive.IsZipFile(stream, null))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions options = null)
{
using (IArchive archive = Open(sourceArchive))
{
foreach (IArchiveEntry entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
}
}
#endif
}
}

View File

@ -1,188 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static GZipArchive Create()
{
return new GZipArchive();
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(FileInfo file)
{
return new GZipVolume(file, ReaderOptions).AsEnumerable();
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
}
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsGZipFile(stream);
}
}
public void SaveTo(string filePath)
{
SaveTo(new FileInfo(filePath));
}
public void SaveTo(FileInfo fileInfo)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
#endif
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
internal GZipArchive(Stream stream, ReaderOptions options)
: base(ArchiveType.GZip, stream, options)
{
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
}
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (Entries.Any())
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries)
{
if (Entries.Count > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override IEnumerable<GZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new GZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}
}
}

View File

@ -1,34 +0,0 @@
using System.IO;
using System.Linq;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip
{
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part)
{
Archive = archive;
}
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = Parts.Single() as GZipFilePart;
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}
}

View File

@ -1,66 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
{
internal class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@ -1,49 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public interface IArchive : IDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
ArchiveType Type { get; }
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
IReader ExtractAllEntries();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
bool IsSolid { get; }
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
long TotalSize { get; }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
}
}

View File

@ -1,24 +0,0 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Archives
{
public interface IArchiveEntry : IEntry
{
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Stream OpenEntryStream();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}
}

View File

@ -1,70 +0,0 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives
{
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
if (archiveEntry.Archive.Type == ArchiveType.Rar && archiveEntry.Archive.IsSolid)
{
throw new InvalidFormatException("Cannot use Archive random access on SOLID Rar files.");
}
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = archiveEntry.Archive as IArchiveExtractionListener;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream == null)
{
return;
}
using (entryStream)
{
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry, string destinationFileName,
ExtractionOptions options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
}
#endif
}
}

View File

@ -1,26 +0,0 @@
#if !NO_FILE
using System.Linq;
using SharpCompress.Common;
#endif
namespace SharpCompress.Archives
{
public static class IArchiveExtensions
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
{
entry.WriteToDirectory(destinationDirectory, options);
}
}
#endif
}
}

View File

@ -1,11 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives
{
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}
}

View File

@ -1,15 +0,0 @@
using System;
using System.IO;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public interface IWritableArchive : IArchive
{
void RemoveEntry(IArchiveEntry entry);
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
}
}

View File

@ -1,9 +0,0 @@
using System.IO;
namespace SharpCompress.Archives
{
internal interface IWritableArchiveEntry
{
Stream Stream { get; }
}
}

View File

@ -1,63 +0,0 @@
#if !NO_FILE
using System;
#endif
using System.IO;
using SharpCompress.Writers;
namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
#if !NO_FILE
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
{
writableArchive.SaveTo(new FileInfo(filePath), options);
}
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, options);
}
}
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
#if NET35
foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
#else
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
#endif
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
#endif
}
}

View File

@ -1,46 +0,0 @@

#if !NO_FILE
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return FileParts;
}
}
}
#endif

View File

@ -1,28 +0,0 @@

#if !NO_FILE
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;
}
internal FileInfo FileInfo { get; }
internal override string FilePartName
{
get
{
return "Rar File: " + FileInfo.FullName
+ " File Entry: " + FileHeader.FileName;
}
}
}
}
#endif

View File

@ -1,23 +0,0 @@
using System.Linq;
namespace SharpCompress.Archives.Rar
{
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public static bool IsFirstVolume(this RarArchive archive)
{
return archive.Volumes.First().IsFirstVolume;
}
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public static bool IsMultipartVolume(this RarArchive archive)
{
return archive.Volumes.First().IsMultiVolume;
}
}
}

View File

@ -1,148 +0,0 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal RarArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.Rar, fileInfo, options)
{
}
protected override IEnumerable<RarVolume> LoadVolumes(FileInfo file)
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
internal RarArchive(IEnumerable<Stream> streams, ReaderOptions options)
: base(ArchiveType.Rar, streams, options)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return RarArchiveVolumeFactory.GetParts(streams, ReaderOptions);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull("streams");
return new RarArchive(streams, options ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
}
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsRarFile(stream);
}
}
#endif
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
#endregion
}
}

View File

@ -1,89 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchiveEntry : RarEntry, IArchiveEntry
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
{
this.parts = parts.ToList();
this.archive = archive;
}
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{
get
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
}
}
public Stream OpenEntryStream()
{
if (archive.IsSolid)
{
throw new InvalidOperationException("Use ExtractAllEntries to extract SOLID archives.");
}
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
}
}
private void CheckIncomplete()
{
if (!IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
}
}
}

View File

@ -1,47 +0,0 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveEntryFactory
{
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
{
foreach (RarVolume rarPart in parts)
{
foreach (RarFilePart fp in rarPart.ReadFileParts())
{
yield return fp;
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(IEnumerable<RarVolume> parts)
{
var groupedParts = new List<RarFilePart>();
foreach (RarFilePart fp in GetFileParts(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts);
}
}
}
}

View File

@ -1,147 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
#if !NO_FILE
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
#endif
namespace SharpCompress.Archives.Rar
{
internal static class RarArchiveVolumeFactory
{
internal static IEnumerable<RarVolume> GetParts(IEnumerable<Stream> streams, ReaderOptions options)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
StreamRarArchiveVolume part = new StreamRarArchiveVolume(s, options);
yield return part;
}
}
#if !NO_FILE
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
yield return part;
ArchiveHeader ah = part.ArchiveHeader;
if (!ah.IsVolume)
{
yield break; //if file isn't volume then there is no reason to look
}
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
//we use fileinfo because rar is dumb and looks at file names rather than archive info for another volume
while (fileInfo != null && fileInfo.Exists)
{
part = new FileInfoRarArchiveVolume(fileInfo, options);
fileInfo = GetNextFileInfo(ah, part.FileParts.FirstOrDefault() as FileInfoRarFilePart);
yield return part;
}
}
private static FileInfo GetNextFileInfo(ArchiveHeader ah, FileInfoRarFilePart currentFilePart)
{
if (currentFilePart == null)
{
return null;
}
bool oldNumbering = ah.OldNumberingFormat
|| currentFilePart.MarkHeader.OldNumberingFormat;
if (oldNumbering)
{
return FindNextFileWithOldNumbering(currentFilePart.FileInfo);
}
else
{
return FindNextFileWithNewNumbering(currentFilePart.FileInfo);
}
}
private static FileInfo FindNextFileWithOldNumbering(FileInfo currentFileInfo)
{
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
{
buffer.Append(".r00");
}
else
{
int num = 0;
if (int.TryParse(extension.Substring(2, 2), out num))
{
num++;
buffer.Append(".r");
if (num < 10)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
}
return new FileInfo(buffer.ToString());
}
private static FileInfo FindNextFileWithNewNumbering(FileInfo currentFileInfo)
{
// part1.rar, part2.rar, ...
string extension = currentFileInfo.Extension;
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid extension, expected 'rar': " + currentFileInfo.FullName);
}
int startIndex = currentFileInfo.FullName.LastIndexOf(".part");
if (startIndex < 0)
{
ThrowInvalidFileName(currentFileInfo);
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
int num = 0;
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)
{
buffer.Append('0');
}
buffer.Append(num);
}
else
{
ThrowInvalidFileName(currentFileInfo);
}
buffer.Append(".rar");
return new FileInfo(buffer.ToString());
}
private static void ThrowInvalidFileName(FileInfo fileInfo)
{
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
#endif
}
}

View File

@ -1,33 +0,0 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
{
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string password)
: base(mh, fh)
{
this.stream = stream;
this.password = password;
}
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
#if !NO_CRYPTO
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
}
#endif
return stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}
}

View File

@ -1,27 +0,0 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
{
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return GetVolumeFileParts();
}
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
}
}
}

View File

@ -1,226 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(FileInfo file)
{
return new SevenZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
public static bool IsSevenZipFile(string filePath)
{
return IsSevenZipFile(new FileInfo(filePath));
}
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsSevenZipFile(stream);
}
}
#endif
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
{
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
{
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
foreach (Stream s in streams)
{
if (!s.CanRead || !s.CanSeek)
{
throw new ArgumentException("Stream is not readable and seekable");
}
SevenZipVolume volume = new SevenZipVolume(s, ReaderOptions);
yield return volume;
}
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
}
private void LoadFactory(Stream stream)
{
if (database == null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
{
return new SevenZipReader(ReaderOptions, this);
}
public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
public override long TotalSize
{
get
{
int i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
private class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip)
{
this.archive = archive;
}
public override SevenZipVolume Volume => archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
List<SevenZipArchiveEntry> entries = archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
yield return dir;
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key == null)
{
currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(stream, currentFolder, new PasswordProvider(Options.Password));
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream()
{
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()
{
return _password;
}
}
}
}

View File

@ -1,28 +0,0 @@
using System.IO;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip
{
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part)
{
Archive = archive;
}
public Stream OpenEntryStream()
{
return FilePart.GetCompressedStream();
}
public IArchive Archive { get; }
public bool IsComplete => true;
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti => FilePart.Header.IsAnti;
}
}

View File

@ -1,206 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
}
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsTarFile(stream);
}
}
#endif
public static bool IsTarFile(Stream stream)
{
try
{
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{
}
return false;
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal TarArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Tar, fileInfo, readerOptions)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal TarArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Tar, stream, readerOptions)
{
}
internal TarArchive()
: base(ArchiveType.Tar)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new TarVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(this, new TarFilePart(previousHeader, stream),
CompressionType.None);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using (var memoryStream = new MemoryStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
}
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(this, new TarFilePart(header, stream), CompressionType.None);
}
}
}
}
public static TarArchive Create()
{
return new TarArchive();
}
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream)
{
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries)
{
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
}
}
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.Open(stream);
}
}
}

View File

@ -1,29 +0,0 @@
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
namespace SharpCompress.Archives.Tar
{
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
: base(part, compressionType)
{
Archive = archive;
}
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}
}

View File

@ -1,65 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
{
internal class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
}
}
}

View File

@ -1,214 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string password = null)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsZipFile(stream, password);
}
}
#endif
public static bool IsZipFile(Stream stream, string password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header == null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
internal ZipArchive()
: base(ArchiveType.Zip)
{
}
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
{
return new ZipVolume(streams.First(), ReaderOptions).AsEnumerable();
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var volume = volumes.Single();
Stream stream = volume.Stream;
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream)
{
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries)
{
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
public static ZipArchive Create()
{
return new ZipArchive();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
}
}
}

View File

@ -1,30 +0,0 @@
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart part)
: base(part)
{
Archive = archive;
}
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
public string Comment => (Parts.Single() as SeekableZipFilePart).Comment;
}
}

View File

@ -1,68 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Zip
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
isDisposed = true;
}
}
}
}

View File

@ -1,119 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Buffers
{
/// <summary>
/// Provides a resource pool that enables reusing instances of type <see cref="T:T[]"/>.
/// </summary>
/// <remarks>
/// <para>
/// Renting and returning buffers with an <see cref="ArrayPool{T}"/> can increase performance
/// in situations where arrays are created and destroyed frequently, resulting in significant
/// memory pressure on the garbage collector.
/// </para>
/// <para>
/// This class is thread-safe. All members may be used by multiple threads concurrently.
/// </para>
/// </remarks>
internal abstract class ArrayPool<T>
{
/// <summary>The lazily-initialized shared pool instance.</summary>
private static ArrayPool<T> s_sharedInstance = null;
/// <summary>
/// Retrieves a shared <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <remarks>
/// The shared pool provides a default implementation of <see cref="ArrayPool{T}"/>
/// that's intended for general applicability. It maintains arrays of multiple sizes, and
/// may hand back a larger array than was actually requested, but will never hand back a smaller
/// array than was requested. Renting a buffer from it with <see cref="Rent"/> will result in an
/// existing buffer being taken from the pool if an appropriate buffer is available or in a new
/// buffer being allocated if one is not available.
/// </remarks>
public static ArrayPool<T> Shared
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get { return Volatile.Read(ref s_sharedInstance) ?? EnsureSharedCreated(); }
}
/// <summary>Ensures that <see cref="s_sharedInstance"/> has been initialized to a pool and returns it.</summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private static ArrayPool<T> EnsureSharedCreated()
{
Interlocked.CompareExchange(ref s_sharedInstance, Create(), null);
return s_sharedInstance;
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using default configuration options.
/// </summary>
/// <returns>A new <see cref="ArrayPool{T}"/> instance.</returns>
public static ArrayPool<T> Create()
{
return new DefaultArrayPool<T>();
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using custom configuration options.
/// </summary>
/// <param name="maxArrayLength">The maximum length of array instances that may be stored in the pool.</param>
/// <param name="maxArraysPerBucket">
/// The maximum number of array instances that may be stored in each bucket in the pool. The pool
/// groups arrays of similar lengths into buckets for faster access.
/// </param>
/// <returns>A new <see cref="ArrayPool{T}"/> instance with the specified configuration options.</returns>
/// <remarks>
/// The created pool will group arrays into buckets, with no more than <paramref name="maxArraysPerBucket"/>
/// in each bucket and with those arrays not exceeding <paramref name="maxArrayLength"/> in length.
/// </remarks>
public static ArrayPool<T> Create(int maxArrayLength, int maxArraysPerBucket)
{
return new DefaultArrayPool<T>(maxArrayLength, maxArraysPerBucket);
}
/// <summary>
/// Retrieves a buffer that is at least the requested length.
/// </summary>
/// <param name="minimumLength">The minimum length of the array needed.</param>
/// <returns>
/// An <see cref="T:T[]"/> that is at least <paramref name="minimumLength"/> in length.
/// </returns>
/// <remarks>
/// This buffer is loaned to the caller and should be returned to the same pool via
/// <see cref="Return"/> so that it may be reused in subsequent usage of <see cref="Rent"/>.
/// It is not a fatal error to not return a rented buffer, but failure to do so may lead to
/// decreased application performance, as the pool may need to create a new buffer to replace
/// the one lost.
/// </remarks>
public abstract T[] Rent(int minimumLength);
/// <summary>
/// Returns to the pool an array that was previously obtained via <see cref="Rent"/> on the same
/// <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <param name="array">
/// The buffer previously obtained from <see cref="Rent"/> to return to the pool.
/// </param>
/// <param name="clearArray">
/// If <c>true</c> and if the pool will store the buffer to enable subsequent reuse, <see cref="Return"/>
/// will clear <paramref name="array"/> of its contents so that a subsequent consumer via <see cref="Rent"/>
/// will not see the previous consumer's content. If <c>false</c> or if the pool will release the buffer,
/// the array's contents are left unchanged.
/// </param>
/// <remarks>
/// Once a buffer has been returned to the pool, the caller gives up all ownership of the buffer
/// and must not use it. The reference returned from a given call to <see cref="Rent"/> must only be
/// returned via <see cref="Return"/> once. The default <see cref="ArrayPool{T}"/>
/// may hold onto the returned buffer in order to rent it again, or it may release the returned buffer
/// if it's determined that the pool already has enough buffers stored.
/// </remarks>
public abstract void Return(T[] array, bool clearArray = false);
}
}
#endif

View File

@ -1,144 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>The default maximum length of each array in the pool (2^20).</summary>
private const int DefaultMaxArrayLength = 1024 * 1024;
/// <summary>The default maximum number of arrays per bucket that are available for rent.</summary>
private const int DefaultMaxNumberOfArraysPerBucket = 50;
/// <summary>Lazily-allocated empty array used when arrays of length 0 are requested.</summary>
private static T[] s_emptyArray; // we support contracts earlier than those with Array.Empty<T>()
private readonly Bucket[] _buckets;
internal DefaultArrayPool() : this(DefaultMaxArrayLength, DefaultMaxNumberOfArraysPerBucket)
{
}
internal DefaultArrayPool(int maxArrayLength, int maxArraysPerBucket)
{
if (maxArrayLength <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArrayLength));
}
if (maxArraysPerBucket <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArraysPerBucket));
}
// Our bucketing algorithm has a min length of 2^4 and a max length of 2^30.
// Constrain the actual max used to those values.
const int MinimumArrayLength = 0x10, MaximumArrayLength = 0x40000000;
if (maxArrayLength > MaximumArrayLength)
{
maxArrayLength = MaximumArrayLength;
}
else if (maxArrayLength < MinimumArrayLength)
{
maxArrayLength = MinimumArrayLength;
}
// Create the buckets.
int poolId = Id;
int maxBuckets = Utilities.SelectBucketIndex(maxArrayLength);
var buckets = new Bucket[maxBuckets + 1];
for (int i = 0; i < buckets.Length; i++)
{
buckets[i] = new Bucket(Utilities.GetMaxSizeForBucket(i), maxArraysPerBucket, poolId);
}
_buckets = buckets;
}
/// <summary>Gets an ID for the pool to use with events.</summary>
private int Id => GetHashCode();
public override T[] Rent(int minimumLength)
{
// Arrays can't be smaller than zero. We allow requesting zero-length arrays (even though
// pooling such an array isn't valuable) as it's a valid length array, and we want the pool
// to be usable in general instead of using `new`, even for computed lengths.
if (minimumLength < 0)
{
throw new ArgumentOutOfRangeException(nameof(minimumLength));
}
else if (minimumLength == 0)
{
// No need for events with the empty array. Our pool is effectively infinite
// and we'll never allocate for rents and never store for returns.
return s_emptyArray ?? (s_emptyArray = new T[0]);
}
T[] buffer = null;
int index = Utilities.SelectBucketIndex(minimumLength);
if (index < _buckets.Length)
{
// Search for an array starting at the 'index' bucket. If the bucket is empty, bump up to the
// next higher bucket and try that one, but only try at most a few buckets.
const int MaxBucketsToTry = 2;
int i = index;
do
{
// Attempt to rent from the bucket. If we get a buffer from it, return it.
buffer = _buckets[i].Rent();
if (buffer != null)
{
return buffer;
}
}
while (++i < _buckets.Length && i != index + MaxBucketsToTry);
// The pool was exhausted for this buffer size. Allocate a new buffer with a size corresponding
// to the appropriate bucket.
buffer = new T[_buckets[index]._bufferLength];
}
else
{
// The request was for a size too large for the pool. Allocate an array of exactly the requested length.
// When it's returned to the pool, we'll simply throw it away.
buffer = new T[minimumLength];
}
return buffer;
}
public override void Return(T[] array, bool clearArray = false)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
else if (array.Length == 0)
{
// Ignore empty arrays. When a zero-length array is rented, we return a singleton
// rather than actually taking a buffer out of the lowest bucket.
return;
}
// Determine with what bucket this array length is associated
int bucket = Utilities.SelectBucketIndex(array.Length);
// If we can tell that the buffer was allocated, drop it. Otherwise, check if we have space in the pool
if (bucket < _buckets.Length)
{
// Clear the array if the user requests
if (clearArray)
{
Array.Clear(array, 0, array.Length);
}
// Return the buffer to its bucket. In the future, we might consider having Return return false
// instead of dropping a bucket, in which case we could try to return to a lower-sized bucket,
// just as how in Rent we allow renting from a higher-sized bucket.
_buckets[bucket].Return(array);
}
}
}
}
#endif

View File

@ -1,111 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
using System.Diagnostics;
using System.Threading;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>Provides a thread-safe bucket containing buffers that can be Rent'd and Return'd.</summary>
private sealed class Bucket
{
internal readonly int _bufferLength;
private readonly T[][] _buffers;
private readonly int _poolId;
private SpinLock _lock; // do not make this readonly; it's a mutable struct
private int _index;
/// <summary>
/// Creates the pool with numberOfBuffers arrays where each buffer is of bufferLength length.
/// </summary>
internal Bucket(int bufferLength, int numberOfBuffers, int poolId)
{
_lock = new SpinLock(Debugger.IsAttached); // only enable thread tracking if debugger is attached; it adds non-trivial overheads to Enter/Exit
_buffers = new T[numberOfBuffers][];
_bufferLength = bufferLength;
_poolId = poolId;
}
/// <summary>Gets an ID for the bucket to use with events.</summary>
internal int Id => GetHashCode();
/// <summary>Takes an array from the bucket. If the bucket is empty, returns null.</summary>
internal T[] Rent()
{
T[][] buffers = _buffers;
T[] buffer = null;
// While holding the lock, grab whatever is at the next available index and
// update the index. We do as little work as possible while holding the spin
// lock to minimize contention with other threads. The try/finally is
// necessary to properly handle thread aborts on platforms which have them.
bool lockTaken = false, allocateBuffer = false;
try
{
_lock.Enter(ref lockTaken);
if (_index < buffers.Length)
{
buffer = buffers[_index];
buffers[_index++] = null;
allocateBuffer = buffer == null;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
// While we were holding the lock, we grabbed whatever was at the next available index, if
// there was one. If we tried and if we got back null, that means we hadn't yet allocated
// for that slot, in which case we should do so now.
if (allocateBuffer)
{
buffer = new T[_bufferLength];
}
return buffer;
}
/// <summary>
/// Attempts to return the buffer to the bucket. If successful, the buffer will be stored
/// in the bucket and true will be returned; otherwise, the buffer won't be stored, and false
/// will be returned.
/// </summary>
internal void Return(T[] array)
{
// Check to see if the buffer is the correct size for this bucket
if (array.Length != _bufferLength)
{
throw new ArgumentException("Buffer not from pool", nameof(array));
}
// While holding the spin lock, if there's room available in the bucket,
// put the buffer into the next available slot. Otherwise, we just drop it.
// The try/finally is necessary to properly handle thread aborts on platforms
// which have them.
bool lockTaken = false;
try
{
_lock.Enter(ref lockTaken);
if (_index != 0)
{
_buffers[--_index] = array;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
}
}
}
}
#endif

View File

@ -1,38 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace SharpCompress.Buffers
{
internal static class Utilities
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int SelectBucketIndex(int bufferSize)
{
Debug.Assert(bufferSize > 0);
uint bitsRemaining = ((uint)bufferSize - 1) >> 4;
int poolIndex = 0;
if (bitsRemaining > 0xFFFF) { bitsRemaining >>= 16; poolIndex = 16; }
if (bitsRemaining > 0xFF) { bitsRemaining >>= 8; poolIndex += 8; }
if (bitsRemaining > 0xF) { bitsRemaining >>= 4; poolIndex += 4; }
if (bitsRemaining > 0x3) { bitsRemaining >>= 2; poolIndex += 2; }
if (bitsRemaining > 0x1) { bitsRemaining >>= 1; poolIndex += 1; }
return poolIndex + (int)bitsRemaining;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int GetMaxSizeForBucket(int binIndex)
{
int maxSize = 16 << binIndex;
Debug.Assert(maxSize >= 0);
return maxSize;
}
}
}
#endif

View File

@ -1,77 +0,0 @@
using System;
using System.Text;
namespace SharpCompress.Common
{
public class ArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public ArchiveEncoding()
{
#if NETSTANDARD1_0
Default = Encoding.GetEncoding("cp437");
Password = Encoding.GetEncoding("cp437");
#else
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
#endif
}
#if NETSTANDARD1_3 || NETSTANDARD2_0
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
return Decode(bytes, 0, bytes.Length);
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public string DecodeUTF8(byte[] bytes)
{
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
}
public Encoding GetEncoding()
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}

View File

@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ArchiveException : Exception
{
public ArchiveException(string message)
: base(message)
{
}
}
}

View File

@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry)
{
Item = entry;
}
public T Item { get; }
}
}

View File

@ -1,11 +0,0 @@
namespace SharpCompress.Common
{
public enum ArchiveType
{
Rar,
Zip,
Tar,
SevenZip,
GZip
}
}

View File

@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class CompressedBytesReadEventArgs : EventArgs
{
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; internal set; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; internal set; }
}
}

View File

@ -1,19 +0,0 @@
namespace SharpCompress.Common
{
public enum CompressionType
{
None,
GZip,
BZip2,
PPMd,
Deflate,
Rar,
LZMA,
BCJ,
BCJ2,
LZip,
Xz,
Unknown,
Deflate64
}
}

View File

@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class CryptographicException : Exception
{
public CryptographicException(string message)
: base(message)
{
}
}
}

View File

@ -1,91 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common
{
public abstract class Entry : IEntry
{
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public abstract long Crc { get; }
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string LinkTarget { get; }
/// <summary>
/// The compressed file size
/// </summary>
public abstract long CompressedSize { get; }
/// <summary>
/// The compression type
/// </summary>
public abstract CompressionType CompressionType { get; }
/// <summary>
/// The uncompressed file size
/// </summary>
public abstract long Size { get; }
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public abstract DateTime? LastModifiedTime { get; }
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public abstract DateTime? CreatedTime { get; }
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public abstract DateTime? LastAccessedTime { get; }
/// <summary>
/// The entry time when archived, if recorded
/// </summary>
public abstract DateTime? ArchivedTime { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
/// <summary>
/// Entry is split among multiple volumes
/// </summary>
public abstract bool IsSplitAfter { get; }
/// <inheritdoc/>
public override string ToString()
{
return Key;
}
internal abstract IEnumerable<FilePart> Parts { get; }
internal bool IsSolid { get; set; }
internal virtual void Close()
{
}
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}
}

View File

@ -1,92 +0,0 @@
using System;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : Stream
{
private readonly IReader _reader;
private readonly Stream _stream;
private bool _completed;
private bool _isDisposed;
internal EntryStream(IReader reader, Stream stream)
{
_reader = reader;
_stream = stream;
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
{
this.Skip();
_completed = true;
}
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
base.Dispose(disposing);
_stream.Dispose();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush() {
}
public override long Length => _stream.Length;
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
int read = _stream.Read(buffer, offset, count);
if (read <= 0)
{
_completed = true;
}
return read;
}
public override int ReadByte()
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
}
}

View File

@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ExtractionException : Exception
{
public ExtractionException(string message)
: base(message)
{
}
public ExtractionException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@ -1,98 +0,0 @@
#if !NO_FILE
using System;
using System.IO;
#endif
namespace SharpCompress.Common
{
internal static class ExtractionMethods
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry, string destinationDirectory,
ExtractionOptions options, Action<string, ExtractionOptions> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions options,
Action<string, FileMode> openAndWrite)
{
if (entry.LinkTarget != null)
{
if (null == options.WriteSymbolicLink)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
FileMode fm = FileMode.Create;
options = options ?? new ExtractionOptions()
{
Overwrite = true
};
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
#endif
}
}

View File

@ -1,34 +0,0 @@
namespace SharpCompress.Common
{
public class ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite {get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink;
}
}

View File

@ -1,20 +0,0 @@
using System.IO;
namespace SharpCompress.Common
{
public abstract class FilePart
{
protected FilePart(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@ -1,22 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class FilePartExtractionBeginEventArgs : EventArgs
{
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; internal set; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; internal set; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; internal set; }
}
}

View File

@ -1,108 +0,0 @@
using System;
namespace SharpCompress.Common
{
internal static class FlagUtility
{
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(long bitField, T flag)
where T : struct
{
return HasFlag(bitField, flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(ulong bitField, T flag)
where T : struct
{
return HasFlag(bitField, flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(ulong bitField, ulong flag)
{
return ((bitField & flag) == flag);
}
public static bool HasFlag(short bitField, short flag)
{
return ((bitField & flag) == flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(T bitField, T flag)
where T : struct
{
return HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(long bitField, long flag)
{
return ((bitField & flag) == flag);
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag(long bitField, long flag, bool on)
{
if (on)
{
return bitField | flag;
}
return bitField & (~flag);
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag<T>(T bitField, T flag, bool on)
where T : struct
{
return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
}
}
}

View File

@ -1,50 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
public class GZipEntry : Entry
{
private readonly GZipFilePart _filePart;
internal GZipEntry(GZipFilePart filePart)
{
_filePart = filePart;
}
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => 0;
public override string Key => _filePart.FilePartName;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
}
}

View File

@ -1,120 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
{
private string _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
_stream = stream;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal override string FilePartName => _name;
internal override Stream GetCompressedStream()
{
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
}
internal override Stream GetRawStream()
{
return _stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x02) == 0x02)
{
stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
}
while (!done);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}
}

View File

@ -1,25 +0,0 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
{
}
#if !NO_FILE
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
#endif
public override bool IsFirstVolume => true;
public override bool IsMultiVolume => true;
}
}

View File

@ -1,51 +0,0 @@

#if !NO_FILE
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
internal static class EntryExtensions
{
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
ExtractionOptions options)
{
if (options.PreserveFileTime || options.PreserveAttributes)
{
FileInfo nf = new FileInfo(destinationFileName);
if (!nf.Exists)
{
return;
}
// update file time to original packed time
if (options.PreserveFileTime)
{
if (entry.CreatedTime.HasValue)
{
nf.CreationTime = entry.CreatedTime.Value;
}
if (entry.LastModifiedTime.HasValue)
{
nf.LastWriteTime = entry.LastModifiedTime.Value;
}
if (entry.LastAccessedTime.HasValue)
{
nf.LastAccessTime = entry.LastAccessedTime.Value;
}
}
if (options.PreserveAttributes)
{
if (entry.Attrib.HasValue)
{
nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
}
}
}
}
}
}
#endif

View File

@ -1,22 +0,0 @@
using System;
namespace SharpCompress.Common
{
public interface IEntry
{
CompressionType CompressionType { get; }
DateTime? ArchivedTime { get; }
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}

View File

@ -1,8 +0,0 @@
namespace SharpCompress.Common
{
internal interface IExtractionListener
{
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}
}

View File

@ -1,12 +0,0 @@
using System;
#if !NO_FILE
using System.IO;
#endif
namespace SharpCompress.Common
{
public interface IVolume : IDisposable
{
}
}

View File

@ -1,10 +0,0 @@
namespace SharpCompress.Common
{
public class IncompleteArchiveException : ArchiveException
{
public IncompleteArchiveException(string message)
: base(message)
{
}
}
}

View File

@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class InvalidFormatException : ExtractionException
{
public InvalidFormatException(string message)
: base(message)
{
}
public InvalidFormatException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class MultiVolumeExtractionException : ExtractionException
{
public MultiVolumeExtractionException(string message)
: base(message)
{
}
public MultiVolumeExtractionException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@ -1,10 +0,0 @@
namespace SharpCompress.Common
{
public class MultipartStreamRequiredException : ExtractionException
{
public MultipartStreamRequiredException(string message)
: base(message)
{
}
}
}

View File

@ -1,13 +0,0 @@

namespace SharpCompress.Common
{
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}
}

View File

@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class PasswordProtectedException : ExtractionException
{
public PasswordProtectedException(string message)
: base(message)
{
}
public PasswordProtectedException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@ -1,30 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpackVersion = reader.ReadByte();
Method = reader.ReadByte();
AvVersion = reader.ReadByte();
AvInfoCrc = reader.ReadInt32();
}
internal int AvInfoCrc { get; private set; }
internal byte UnpackVersion { get; private set; }
internal byte Method { get; private set; }
internal byte AvVersion { get; private set; }
}
}

View File

@ -1,57 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
private const int SIZE_INITV = 16;
private const int SIZE_PSWCHECK = 8;
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
var cryptVersion = reader.ReadRarVIntUInt32();
if (cryptVersion > CRYPT_VERSION)
{
//error?
return;
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{
//error?
return;
}
_salt = reader.ReadBytes(SIZE_SALT50);
if (_usePswCheck)
{
_pswCheck = reader.ReadBytes(SIZE_PSWCHECK);
_pswCheckCsm = reader.ReadBytes(SIZE_PSWCHECK_CSUM);
}
}
}
}

View File

@ -1,81 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
{
VolumeNumber = (int)reader.ReadRarVIntUInt32();
}
// later: we may have a locator record if we need it
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
PosAv = reader.ReadInt32();
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
EncryptionVersion = reader.ReadByte();
}
}
}
private void ReadLocator(MarkingBinaryReader reader) {
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1) throw new InvalidFormatException("expected locator record");
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal int? VolumeNumber { get; private set; }
internal short? HighPosAv { get; private set; }
internal int? PosAv { get; private set; }
private byte? EncryptionVersion { get; set; }
public bool? IsEncrypted => IsRar5 ? (bool?)null : HasFlag(ArchiveFlagsV4.PASSWORD);
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber == null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}
}

View File

@ -1,28 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpSize = reader.ReadInt16();
UnpVersion = reader.ReadByte();
UnpMethod = reader.ReadByte();
CommCrc = reader.ReadInt16();
}
internal short UnpSize { get; private set; }
internal byte UnpVersion { get; private set; }
internal byte UnpMethod { get; private set; }
internal short CommCrc { get; private set; }
}
}

View File

@ -1,43 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
else
{
Flags = HeaderFlags;
if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
{
ArchiveCrc = reader.ReadInt32();
}
if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
{
VolumeNumber = reader.ReadInt16();
}
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal int? ArchiveCrc { get; private set; }
internal short? VolumeNumber { get; private set; }
}
}

View File

@ -1,452 +0,0 @@
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
using SharpCompress.IO;
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Rar.Headers
{
internal class FileHeader : RarHeader
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
{
ReadFromReaderV4(reader);
}
}
private void ReadFromReaderV5(MarkingBinaryReader reader)
{
Flags = reader.ReadRarVIntUInt16();
var lvalue = checked((long)reader.ReadRarVInt());
// long.MaxValue causes the unpack code to finish when the input stream is exhausted
UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
FileCrc = reader.ReadUInt32();
}
var compressionInfo = reader.ReadRarVIntUInt16();
// Lower 6 bits (0x003f mask) contain the version of compression algorithm, resulting in possible 0 - 63 values. Current version is 0.
// "+ 50" to not mix with old RAR format algorithms. For example,
// we may need to use the compression algorithm 15 in the future,
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
// Bits 8 - 10 (0x0380 mask) define the compression method. Currently only values 0 - 5 are used. 0 means no compression.
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
HostOs = reader.ReadRarVIntByte();
var nameSize = reader.ReadRarVIntUInt16();
// Variable length field containing Name length bytes in UTF-8 format without trailing zero.
// For file header this is a name of archived file. Forward slash character is used as the path separator both for Unix and Windows names.
// Backslashes are treated as a part of name for Unix names and as invalid character for Windows file names. Type of name is defined by Host OS field.
//
// TODO: not sure if anything needs to be done to handle the following:
// If Unix file name contains any high ASCII characters which cannot be correctly converted to Unicode and UTF-8
// we map such characters to to 0xE080 - 0xE0FF private use Unicode area and insert 0xFFFE Unicode non-character
// to resulting string to indicate that it contains mapped characters, which need to be converted back when extracting.
// Concrete position of 0xFFFE is not defined, we need to search the entire string for it. Such mapped names are not
// portable and can be correctly unpacked only on the same system where they were created.
//
// For service header this field contains a name of service header. Now the following names are used:
// CMT Archive comment
// QO Archive quick open data
// ACL NTFS file permissions
// STM NTFS alternate data stream
// RR Recovery record
var b = reader.ReadBytes(nameSize);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0) {
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
//TODO
case 1: // file encryption
{
isEncryptedRar5 = true;
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2) {
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4) {
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8) {
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
break;
}
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0) {
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
}
private static string ConvertPathV5(string path)
{
#if NO_FILE
// not sure what to do here
throw new NotImplementedException("TODO");
#else
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
return path;
#endif
}
private void ReadFromReaderV4(MarkingBinaryReader reader)
{
Flags = HeaderFlags;
IsSolid = HasFlag(FileFlagsV4.SOLID);
WindowSize = IsDirectory ? 0U : ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
uint lowUncompressedSize = reader.ReadUInt32();
HostOs = reader.ReadByte();
FileCrc = reader.ReadUInt32();
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadUInt32());
CompressionAlgorithm = reader.ReadByte();
CompressionMethod = (byte)(reader.ReadByte() - 0x30);
short nameSize = reader.ReadInt16();
FileAttributes = reader.ReadUInt32();
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (HasFlag(FileFlagsV4.LARGE))
{
highCompressedSize = reader.ReadUInt32();
highUncompressedkSize = reader.ReadUInt32();
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
byte[] fileNameBytes = reader.ReadBytes(nameSize);
const int saltSize = 8;
const int newLhdSize = 32;
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
FileName = ConvertPathV4(FileName);
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
int datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= saltSize;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
break;
}
if (HasFlag(FileFlagsV4.SALT))
{
R4Salt = reader.ReadBytes(saltSize);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
// verify that the end of the header hasn't been reached before reading the Extended Time.
// some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
if (RemainingHeaderBytes(reader) >= 2)
{
ushort extendedFlags = reader.ReadUInt16();
FileLastModifiedTime = ProcessExtendedTimeV4(extendedFlags, FileLastModifiedTime, reader, 0);
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
}
}
}
private static long UInt32To64(uint x, uint y)
{
long l = x;
l <<= 32;
return l + y;
}
private static DateTime? ProcessExtendedTimeV4(ushort extendedFlags, DateTime? time, MarkingBinaryReader reader, int i)
{
uint rmode = (uint)extendedFlags >> (3 - i) * 4;
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
uint dosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
int count = (int)rmode & 3;
for (int j = 0; j < count; j++)
{
byte b = reader.ReadByte();
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
private static string ConvertPathV4(string path)
{
#if NO_FILE
return path.Replace('\\', '/');
#else
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
else if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
return path;
#endif
}
public override string ToString()
{
return FileName;
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get {
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
private set => _fileCrc = value;
}
// 0 - storing
// 1 - fastest compression
// 2 - fast compression
// 3 - normal compression
// 4 - good compression
// 5 - best compression
internal byte CompressionMethod { get; private set; }
internal bool IsStored => CompressionMethod == 0;
// eg (see DoUnpack())
//case 15: // rar 1.5 compression
//case 20: // rar 2.x compression
//case 26: // files larger than 2GB
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }
internal byte[] R4Salt { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }
internal byte[] SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }
internal DateTime? FileLastAccessedTime { get; private set; }
internal DateTime? FileArchivedTime { get; private set; }
}
}

View File

@ -1,78 +0,0 @@
using System.Text;
namespace SharpCompress.Common.Rar.Headers
{
/// <summary>
/// This is for the crazy Rar encoding that I don't understand
/// </summary>
internal static class FileNameDecoder
{
internal static int GetChar(byte[] name, int pos)
{
return name[pos] & 0xff;
}
internal static string Decode(byte[] name, int encPos)
{
int decPos = 0;
int flags = 0;
int flagBits = 0;
int low = 0;
int high = 0;
int highByte = GetChar(name, encPos++);
StringBuilder buf = new StringBuilder();
while (encPos < name.Length)
{
if (flagBits == 0)
{
flags = GetChar(name, encPos++);
flagBits = 8;
}
switch (flags >> 6)
{
case 0:
buf.Append((char)(GetChar(name, encPos++)));
++decPos;
break;
case 1:
buf.Append((char)(GetChar(name, encPos++) + (highByte << 8)));
++decPos;
break;
case 2:
low = GetChar(name, encPos);
high = GetChar(name, encPos + 1);
buf.Append((char)((high << 8) + low));
++decPos;
encPos += 2;
break;
case 3:
int length = GetChar(name, encPos++);
if ((length & 0x80) != 0)
{
int correction = GetChar(name, encPos++);
for (length = (length & 0x7f) + 2; length > 0 && decPos < name.Length; length--, decPos++)
{
low = (GetChar(name, decPos) + correction) & 0xff;
buf.Append((char)((highByte << 8) + low));
}
}
else
{
for (length += 2; length > 0 && decPos < name.Length; length--, decPos++)
{
buf.Append((char)(GetChar(name, decPos)));
}
}
break;
}
flags = (flags << 2) & 0xff;
flagBits -= 2;
}
return buf.ToString();
}
}
}

View File

@ -1,149 +0,0 @@
namespace SharpCompress.Common.Rar.Headers
{
internal enum HeaderType : byte
{
Null,
Mark,
Archive,
File,
Service,
Comment,
Av,
Protect,
Sign,
NewSub,
EndArchive,
Crypt
}
internal static class HeaderCodeV
{
public const byte RAR4_MARK_HEADER = 0x72;
public const byte RAR4_ARCHIVE_HEADER = 0x73;
public const byte RAR4_FILE_HEADER = 0x74;
public const byte RAR4_COMMENT_HEADER = 0x75;
public const byte RAR4_AV_HEADER = 0x76;
public const byte RAR4_SUB_HEADER = 0x77;
public const byte RAR4_PROTECT_HEADER = 0x78;
public const byte RAR4_SIGN_HEADER = 0x79;
public const byte RAR4_NEW_SUB_HEADER = 0x7a;
public const byte RAR4_END_ARCHIVE_HEADER = 0x7b;
public const byte RAR5_ARCHIVE_HEADER = 0x01;
public const byte RAR5_FILE_HEADER = 0x02;
public const byte RAR5_SERVICE_HEADER = 0x03;
public const byte RAR5_ARCHIVE_ENCRYPTION_HEADER = 0x04;
public const byte RAR5_END_ARCHIVE_HEADER = 0x05;
}
internal static class HeaderFlagsV4
{
public const ushort HAS_DATA = 0x8000;
}
internal static class EncryptionFlagsV5
{
// RAR 5.0 archive encryption header specific flags.
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}
internal static class HeaderFlagsV5
{
public const ushort HAS_EXTRA = 0x0001;
public const ushort HAS_DATA = 0x0002;
public const ushort KEEP = 0x0004; // block must be kept during an update
public const ushort SPLIT_BEFORE = 0x0008;
public const ushort SPLIT_AFTER = 0x0010;
public const ushort CHILD = 0x0020; // ??? Block depends on preceding file block.
public const ushort PRESERVE_CHILD = 0x0040; // ???? Preserve a child block if host block is modified
}
internal static class ArchiveFlagsV4
{
public const ushort VOLUME = 0x0001;
public const ushort COMMENT = 0x0002;
public const ushort LOCK = 0x0004;
public const ushort SOLID = 0x0008;
public const ushort NEW_NUMBERING = 0x0010;
public const ushort AV = 0x0020;
public const ushort PROTECT = 0x0040;
public const ushort PASSWORD = 0x0080;
public const ushort FIRST_VOLUME = 0x0100;
public const ushort ENCRYPT_VER = 0x0200;
}
internal static class ArchiveFlagsV5
{
public const ushort VOLUME = 0x0001;
public const ushort HAS_VOLUME_NUMBER = 0x0002;
public const ushort SOLID = 0x0004;
public const ushort PROTECT = 0x0008;
public const ushort LOCK = 0x0010;
}
internal static class HostOsV4
{
public const byte MS_DOS = 0;
public const byte OS2 = 1;
public const byte WIN32 = 2;
public const byte UNIX = 3;
public const byte MAC_OS = 4;
public const byte BE_OS = 5;
}
internal static class HostOsV5
{
public const byte WINDOWS = 0;
public const byte UNIX = 1;
}
internal static class FileFlagsV4
{
public const ushort SPLIT_BEFORE = 0x0001;
public const ushort SPLIT_AFTER = 0x0002;
public const ushort PASSWORD = 0x0004;
public const ushort COMMENT = 0x0008;
public const ushort SOLID = 0x0010;
public const ushort WINDOW_MASK = 0x00e0;
public const ushort WINDOW64 = 0x0000;
public const ushort WINDOW128 = 0x0020;
public const ushort WINDOW256 = 0x0040;
public const ushort WINDOW512 = 0x0060;
public const ushort WINDOW1024 = 0x0080;
public const ushort WINDOW2048 = 0x00a0;
public const ushort WINDOW4096 = 0x00c0;
public const ushort DIRECTORY = 0x00e0;
public const ushort LARGE = 0x0100;
public const ushort UNICODE = 0x0200;
public const ushort SALT = 0x0400;
public const ushort VERSION = 0x0800;
public const ushort EXT_TIME = 0x1000;
public const ushort EXT_FLAGS = 0x2000;
}
internal static class FileFlagsV5
{
public const ushort DIRECTORY = 0x0001;
public const ushort HAS_MOD_TIME = 0x0002;
public const ushort HAS_CRC32 = 0x0004;
public const ushort UNPACKED_SIZE_UNKNOWN = 0x0008;
}
internal static class EndArchiveFlagsV4
{
public const ushort NEXT_VOLUME = 0x0001;
public const ushort DATA_CRC = 0x0002;
public const ushort REV_SPACE = 0x0004;
public const ushort VOLUME_NUMBER = 0x0008;
}
internal static class EndArchiveFlagsV5
{
public const ushort HAS_NEXT_VOLUME = 0x0001;
}
}

View File

@ -1,7 +0,0 @@
namespace SharpCompress.Common.Rar.Headers
{
internal interface IRarHeader
{
HeaderType HeaderType { get; }
}
}

View File

@ -1,96 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class MarkHeader : IRarHeader
{
private const int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136
internal bool OldNumberingFormat { get; private set; }
public bool IsRar5 { get; }
private MarkHeader(bool isRar5)
{
IsRar5 = isRar5;
}
public HeaderType HeaderType => HeaderType.Mark;
private static byte GetByte(Stream stream)
{
var b = stream.ReadByte();
if (b != -1)
{
return (byte)b;
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
int start = -1;
var b = GetByte(stream); start++;
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
{
b = GetByte(stream); start++;
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72) continue;
b = GetByte(stream); start++;
if (b != 0x21) continue;
b = GetByte(stream); start++;
if (b != 0x1a) continue;
b = GetByte(stream); start++;
if (b != 0x07) continue;
b = GetByte(stream); start++;
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0) continue;
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e) continue;
b = GetByte(stream); start++;
if (b != 0x5e) continue;
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}
else
{
b = GetByte(stream); start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}
}

View File

@ -1,55 +0,0 @@
using System;
namespace SharpCompress.Common.Rar.Headers
{
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
{
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
private readonly byte[] _bytes;
private NewSubHeaderType(params char[] chars)
{
_bytes = new byte[chars.Length];
for (int i = 0; i < chars.Length; ++i)
{
_bytes[i] = (byte)chars[i];
}
}
internal bool Equals(byte[] bytes)
{
if (_bytes.Length != bytes.Length)
{
return false;
}
for (int i = 0; i < bytes.Length; ++i)
{
if (_bytes[i] != bytes[i])
{
return false;
}
}
return true;
}
public bool Equals(NewSubHeaderType other)
{
return Equals(other._bytes);
}
}
}

View File

@ -1,28 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
// ProtectHeader is part of the Recovery Record feature
internal class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
Version = reader.ReadByte();
RecSectors = reader.ReadUInt16();
TotalBlocks = reader.ReadUInt32();
Mark = reader.ReadBytes(8);
}
internal uint DataSize => checked((uint)AdditionalDataSize);
internal byte Version { get; private set; }
internal ushort RecSectors { get; private set; }
internal uint TotalBlocks { get; private set; }
internal byte[] Mark { get; private set; }
}
}

View File

@ -1,130 +0,0 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
// http://www.forensicswiki.org/w/images/5/5b/RARFileStructure.txt
// https://www.rarlab.com/technote.htm
internal class RarHeader : IRarHeader
{
private readonly HeaderType _headerType;
private readonly bool _isRar5;
internal static RarHeader TryReadBase(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
try
{
return new RarHeader(reader, isRar5, archiveEncoding);
}
catch (EndOfStreamException)
{
return null;
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
reader.ResetCrc();
HeaderSize = (int)reader.ReadRarVIntUInt32(3);
reader.Mark();
HeaderCode = reader.ReadRarVIntByte();
HeaderFlags = reader.ReadRarVIntUInt16(2);
if (HasHeaderFlag(HeaderFlagsV5.HAS_EXTRA))
{
ExtraSize = reader.ReadRarVIntUInt32();
}
if (HasHeaderFlag(HeaderFlagsV5.HAS_DATA))
{
AdditionalDataSize = (long)reader.ReadRarVInt();
}
} else {
reader.Mark();
HeaderCrc = reader.ReadUInt16();
reader.ResetCrc();
HeaderCode = reader.ReadByte();
HeaderFlags = reader.ReadUInt16();
HeaderSize = reader.ReadInt16();
if (HasHeaderFlag(HeaderFlagsV4.HAS_DATA))
{
AdditionalDataSize = reader.ReadUInt32();
}
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
HeaderCode = header.HeaderCode;
HeaderFlags = header.HeaderFlags;
HeaderSize = header.HeaderSize;
ExtraSize = header.ExtraSize;
AdditionalDataSize = header.AdditionalDataSize;
ArchiveEncoding = header.ArchiveEncoding;
ReadFinish(reader);
int n = RemainingHeaderBytes(reader);
if (n > 0)
{
reader.ReadBytes(n);
}
VerifyHeaderCrc(reader.GetCrc32());
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
}
protected virtual void ReadFinish(MarkingBinaryReader reader)
{
throw new NotImplementedException();
}
private void VerifyHeaderCrc(uint crc32)
{
var b = (IsRar5 ? crc32 : (ushort)crc32) == HeaderCrc;
if (!b)
{
throw new InvalidFormatException("rar header crc mismatch");
}
}
public HeaderType HeaderType => _headerType;
protected bool IsRar5 => _isRar5;
protected uint HeaderCrc { get; }
internal byte HeaderCode { get; }
protected ushort HeaderFlags { get; }
protected bool HasHeaderFlag(ushort flag)
{
return (HeaderFlags & flag) == flag;
}
protected int HeaderSize { get; }
internal ArchiveEncoding ArchiveEncoding { get; }
/// <summary>
/// Extra header size.
/// </summary>
protected uint ExtraSize { get; }
/// <summary>
/// Size of additional data (eg file contents)
/// </summary>
protected long AdditionalDataSize { get; }
}
}

View File

@ -1,192 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar.Headers
{
internal class RarHeaderFactory
{
private bool _isRar5;
internal RarHeaderFactory(StreamingMode mode, ReaderOptions options)
{
StreamingMode = mode;
Options = options;
}
private ReaderOptions Options { get; }
internal StreamingMode StreamingMode { get; }
internal bool IsEncrypted { get; private set; }
internal IEnumerable<IRarHeader> ReadHeaders(Stream stream)
{
var markHeader = MarkHeader.Read(stream, Options.LeaveStreamOpen, Options.LookForHeader);
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader header;
while ((header = TryReadNextHeader(stream)) != null)
{
yield return header;
if (header.HeaderType == HeaderType.EndArchive)
{
// End of archive marker. RAR does not read anything after this header letting to use third
// party tools to add extra information such as a digital signature to archive.
yield break;
}
}
}
private RarHeader TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
{
#if !NO_CRYPTO
if (Options.Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
reader = new RarCryptoBinaryReader(stream, Options.Password);
#else
throw new CryptographicException("Rar encryption unsupported on this platform");
#endif
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
if (header == null)
{
return null;
}
switch (header.HeaderCode)
{
case HeaderCodeV.RAR5_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
}
return ah;
}
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = new ProtectHeader(header, reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.Service);
SkipData(fh, reader);
return fh;
}
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.NewSub);
SkipData(fh, reader);
return fh;
}
case HeaderCodeV.RAR5_FILE_HEADER:
case HeaderCodeV.RAR4_FILE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.File);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
{
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
}
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
switch (StreamingMode) {
case StreamingMode.Seekable: {
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming: {
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default: {
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}
}

View File

@ -1,26 +0,0 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
CreationTime = reader.ReadInt32();
ArcNameSize = reader.ReadInt16();
UserNameSize = reader.ReadInt16();
}
internal int CreationTime { get; private set; }
internal short ArcNameSize { get; private set; }
internal short UserNameSize { get; private set; }
}
}

View File

@ -1,50 +0,0 @@
using System.IO;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCrcBinaryReader : MarkingBinaryReader
{
private uint _currentCrc;
public RarCrcBinaryReader(Stream stream)
: base(stream)
{
}
public uint GetCrc32()
{
return ~_currentCrc;
}
public void ResetCrc()
{
_currentCrc = 0xffffffff;
}
protected void UpdateCrc(byte b)
{
_currentCrc = RarCRC.CheckCrc(_currentCrc, b);
}
protected byte[] ReadBytesNoCrc(int count)
{
return base.ReadBytes(count);
}
public override byte ReadByte()
{
var b = base.ReadByte();
_currentCrc = RarCRC.CheckCrc(_currentCrc, b);
return b;
}
public override byte[] ReadBytes(int count)
{
var result = base.ReadBytes(count);
_currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
return result;
}
}
}

View File

@ -1,115 +0,0 @@
#if !NO_CRYPTO
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
private readonly string _password;
private readonly Queue<byte> _data = new Queue<byte>();
private long _readCount;
public RarCryptoBinaryReader(Stream stream, string password)
: base(stream)
{
_password = password;
// coderb: not sure why this was being done at this logical point
//SkipQueue();
byte[] salt = ReadBytes(8);
InitializeAes(salt);
}
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount
{
get => _readCount;
protected set
{
// ignore
}
}
public override void Mark()
{
_readCount = 0;
}
private bool UseEncryption => _salt != null;
internal void InitializeAes(byte[] salt)
{
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
}
public override byte ReadByte()
{
if (UseEncryption)
{
return ReadAndDecryptBytes(1)[0];
}
_readCount++;
return base.ReadByte();
}
public override byte[] ReadBytes(int count)
{
if (UseEncryption)
{
return ReadAndDecryptBytes(count);
}
_readCount += count;
return base.ReadBytes(count);
}
private byte[] ReadAndDecryptBytes(int count)
{
int queueSize = _data.Count;
int sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
_data.Enqueue(readByte);
}
}
var decryptedBytes = new byte[count];
for (int i = 0; i < count; i++)
{
var b = _data.Dequeue();
decryptedBytes[i] = b;
UpdateCrc(b);
}
_readCount += count;
return decryptedBytes;
}
public void ClearQueue()
{
_data.Clear();
}
public void SkipQueue()
{
var position = BaseStream.Position;
BaseStream.Position = position + _data.Count;
ClearQueue();
}
}
}
#endif

View File

@ -1,99 +0,0 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
{
internal class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
private RarRijndael _rijndael;
private readonly Queue<byte> _data = new Queue<byte>();
public RarCryptoWrapper(Stream actualStream, string password, byte[] salt)
{
_actualStream = actualStream;
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(password, salt);
}
public override void Flush()
{
throw new NotSupportedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt == null)
{
return _actualStream.Read(buffer, offset, count);
}
return ReadAndDecrypt(buffer, offset, count);
}
public int ReadAndDecrypt(byte[] buffer, int offset, int count)
{
int queueSize = _data.Count;
int sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
_data.Enqueue(readByte);
}
for (int i = 0; i < count; i++)
buffer[offset + i] = _data.Dequeue();
}
return count;
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position { get; set; }
protected override void Dispose(bool disposing)
{
if (_rijndael != null)
{
_rijndael.Dispose();
_rijndael = null;
}
base.Dispose(disposing);
}
}
}
#endif

View File

@ -1,65 +0,0 @@
using System;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
{
public abstract class RarEntry : Entry
{
internal abstract FileHeader FileHeader { get; }
/// <summary>
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override long Crc => FileHeader.FileCrc;
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string Key => FileHeader.FileName;
public override string LinkTarget => null;
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public override DateTime? LastModifiedTime => FileHeader.FileLastModifiedTime;
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public override DateTime? CreatedTime => FileHeader.FileCreatedTime;
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public override DateTime? LastAccessedTime => FileHeader.FileLastAccessedTime;
/// <summary>
/// The entry time whend archived, if recorded
/// </summary>
public override DateTime? ArchivedTime => FileHeader.FileArchivedTime;
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsEncrypted => FileHeader.IsEncrypted;
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public override bool IsDirectory => FileHeader.IsDirectory;
public override bool IsSplitAfter => FileHeader.IsSplitAfter;
public override string ToString()
{
return string.Format("Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",
Key, CompressedSize, Size, Crc);
}
}
}

View File

@ -1,27 +0,0 @@
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar
{
/// <summary>
/// This represents a single file part that exists in a rar volume. A compressed file is one or many file parts that are spread across one or may rar parts.
/// </summary>
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;
}
internal MarkHeader MarkHeader { get; }
internal FileHeader FileHeader { get; }
internal override Stream GetRawStream()
{
return null;
}
}
}

View File

@ -1,121 +0,0 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar
{
internal class RarRijndael : IDisposable
{
internal const int CRYPTO_BLOCK_SIZE = 16;
private readonly string _password;
private readonly byte[] _salt;
private byte[] _aesInitializationVector;
private RijndaelEngine _rijndael;
private RarRijndael(string password, byte[] salt)
{
_password = password;
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
_rijndael = new RijndaelEngine();
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2*_password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
for (int i = 0; i < _password.Length; i++)
{
rawPassword[i*2] = passwordBytes[i];
rawPassword[i*2 + 1] = 0;
}
for (int i = 0; i < _salt.Length; i++)
{
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
byte[] digest;
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
bytes.AddRange(new[]
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
for (int i = 0; i < 4; i++)
{
for (int j = 0; j < 4; j++)
{
aesKey[i*4 + j] = (byte)
(((digest[i*4]*0x1000000) & 0xff000000 |
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
}
}
_rijndael.Init(false, new KeyParameter(aesKey));
}
public static RarRijndael InitializeFrom(string password, byte[] salt)
{
var rijndael = new RarRijndael(password, salt);
rijndael.Initialize();
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
}
public void Dispose()
{
}
}
}
#endif

View File

@ -1,112 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar
{
/// <summary>
/// A RarArchiveVolume is a single rar file that may or may not be a split RarArchive. A Rar Archive is one to many Rar Parts
/// </summary>
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options)
: base(stream, options)
{
_headerFactory = new RarHeaderFactory(mode, options);
}
internal ArchiveHeader ArchiveHeader { get; private set; }
internal StreamingMode Mode => _headerFactory.StreamingMode;
internal abstract IEnumerable<RarFilePart> ReadFileParts();
internal abstract RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader);
internal IEnumerable<RarFilePart> GetVolumeFileParts()
{
MarkHeader lastMarkHeader = null;
foreach (var header in _headerFactory.ReadHeaders(Stream))
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = header as MarkHeader;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = header as ArchiveHeader;
}
break;
case HeaderType.File:
{
var fh = header as FileHeader;
yield return CreateFilePart(lastMarkHeader, fh);
}
break;
}
}
}
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader == null)
{
if (Mode == StreamingMode.Streaming)
{
throw new InvalidOperationException("ArchiveHeader should never been null in a streaming read.");
}
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
GetVolumeFileParts().First();
Stream.Position = 0;
}
}
/// <summary>
/// RarArchive is the first volume of a multi-part archive.
/// Only Rar 3.0 format and higher
/// </summary>
public override bool IsFirstVolume
{
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsFirstVolume;
}
}
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsVolume;
}
}
/// <summary>
/// RarArchive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Currently, SharpCompress cannot decompress SOLID archives.
/// </summary>
public bool IsSolidArchive
{
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsSolid;
}
}
}
}

View File

@ -1,17 +0,0 @@
using System;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress ReaderProgress { get; }
}
}

View File

@ -1,182 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip
{
internal class ArchiveDatabase
{
internal byte _majorVersion;
internal byte _minorVersion;
internal long _startPositionAfterHeader;
internal long _dataStartPosition;
internal List<long> _packSizes = new List<long>();
internal List<uint?> _packCrCs = new List<uint?>();
internal List<CFolder> _folders = new List<CFolder>();
internal List<int> _numUnpackStreamsVector;
internal List<CFileItem> _files = new List<CFileItem>();
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
{
PasswordProvider = passwordProvider;
}
internal void Clear()
{
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null;
_files.Clear();
_packStreamStartPositions.Clear();
_folderStartFileIndex.Clear();
_fileIndexToFolderIndexMap.Clear();
}
internal bool IsEmpty()
{
return _packSizes.Count == 0
&& _packCrCs.Count == 0
&& _folders.Count == 0
&& _numUnpackStreamsVector.Count == 0
&& _files.Count == 0;
}
private void FillStartPos()
{
_packStreamStartPositions.Clear();
long startPos = 0;
for (int i = 0; i < _packSizes.Count; i++)
{
_packStreamStartPositions.Add(startPos);
startPos += _packSizes[i];
}
}
private void FillFolderStartFileIndex()
{
_folderStartFileIndex.Clear();
_fileIndexToFolderIndexMap.Clear();
int folderIndex = 0;
int indexInFolder = 0;
for (int i = 0; i < _files.Count; i++)
{
CFileItem file = _files[i];
bool emptyStream = !file.HasStream;
if (emptyStream && indexInFolder == 0)
{
_fileIndexToFolderIndexMap.Add(-1);
continue;
}
if (indexInFolder == 0)
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (;;)
{
if (folderIndex >= _folders.Count)
{
throw new InvalidOperationException();
}
_folderStartFileIndex.Add(i); // check it
if (_numUnpackStreamsVector[folderIndex] != 0)
{
break;
}
folderIndex++;
}
}
_fileIndexToFolderIndexMap.Add(folderIndex);
if (emptyStream)
{
continue;
}
indexInFolder++;
if (indexInFolder >= _numUnpackStreamsVector[folderIndex])
{
folderIndex++;
indexInFolder = 0;
}
}
}
public void Fill()
{
FillStartPos();
FillFolderStartFileIndex();
}
internal long GetFolderStreamPos(CFolder folder, int indexInFolder)
{
int index = folder._firstPackStreamId + indexInFolder;
return _dataStartPosition + _packStreamStartPositions[index];
}
internal long GetFolderFullPackSize(int folderIndex)
{
int packStreamIndex = _folders[folderIndex]._firstPackStreamId;
CFolder folder = _folders[folderIndex];
long size = 0;
for (int i = 0; i < folder._packStreams.Count; i++)
{
size += _packSizes[packStreamIndex + i];
}
return size;
}
internal Stream GetFolderStream(Stream stream, CFolder folder, IPasswordProvider pw)
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
{
packSizes.Add(_packSizes[packStreamIndex + j]);
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
{
return _packSizes[_folders[folderIndex]._firstPackStreamId + streamIndex];
}
private long GetFilePackSize(int fileIndex)
{
int folderIndex = _fileIndexToFolderIndexMap[fileIndex];
if (folderIndex != -1)
{
if (_folderStartFileIndex[folderIndex] == fileIndex)
{
return GetFolderFullPackSize(folderIndex);
}
}
return 0;
}
}
}

View File

@ -1,8 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CBindPair
{
internal int _inIndex;
internal int _outIndex;
}
}

View File

@ -1,10 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal class CCoderInfo
{
internal CMethodId _methodId;
internal byte[] _props;
internal int _numInStreams;
internal int _numOutStreams;
}
}

View File

@ -1,36 +0,0 @@
using System;
namespace SharpCompress.Common.SevenZip
{
internal class CFileItem
{
public long Size { get; internal set; }
public uint? Attrib { get; internal set; }
public uint? Crc { get; internal set; }
public string Name { get; internal set; }
public bool HasStream { get; internal set; }
public bool IsDir { get; internal set; }
public bool CrcDefined => Crc != null;
public bool AttribDefined => Attrib != null;
public void SetAttrib(uint attrib)
{
Attrib = attrib;
}
public DateTime? CTime { get; internal set; }
public DateTime? ATime { get; internal set; }
public DateTime? MTime { get; internal set; }
public long? StartPos { get; internal set; }
public bool IsAnti { get; internal set; }
internal CFileItem()
{
HasStream = true;
}
}
}

View File

@ -1,188 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class CFolder
{
internal List<CCoderInfo> _coders = new List<CCoderInfo>();
internal List<CBindPair> _bindPairs = new List<CBindPair>();
internal List<int> _packStreams = new List<int>();
internal int _firstPackStreamId;
internal List<long> _unpackSizes = new List<long>();
internal uint? _unpackCrc;
internal bool UnpackCrcDefined => _unpackCrc != null;
public long GetUnpackSize()
{
if (_unpackSizes.Count == 0)
{
return 0;
}
for (int i = _unpackSizes.Count - 1; i >= 0; i--)
{
if (FindBindPairForOutStream(i) < 0)
{
return _unpackSizes[i];
}
}
throw new Exception();
}
public int GetNumOutStreams()
{
int count = 0;
for (int i = 0; i < _coders.Count; i++)
{
count += _coders[i]._numOutStreams;
}
return count;
}
public int FindBindPairForInStream(int inStreamIndex)
{
for (int i = 0; i < _bindPairs.Count; i++)
{
if (_bindPairs[i]._inIndex == inStreamIndex)
{
return i;
}
}
return -1;
}
public int FindBindPairForOutStream(int outStreamIndex)
{
for (int i = 0; i < _bindPairs.Count; i++)
{
if (_bindPairs[i]._outIndex == outStreamIndex)
{
return i;
}
}
return -1;
}
public int FindPackStreamArrayIndex(int inStreamIndex)
{
for (int i = 0; i < _packStreams.Count; i++)
{
if (_packStreams[i] == inStreamIndex)
{
return i;
}
}
return -1;
}
public bool IsEncrypted()
{
for (int i = _coders.Count - 1; i >= 0; i--)
{
if (_coders[i]._methodId == CMethodId.K_AES)
{
return true;
}
}
return false;
}
public bool CheckStructure()
{
const int kNumCodersMax = 32; // don't change it
const int kMaskSize = 32; // it must be >= kNumCodersMax
const int kNumBindsMax = 32;
if (_coders.Count > kNumCodersMax || _bindPairs.Count > kNumBindsMax)
{
return false;
}
{
var v = new BitVector(_bindPairs.Count + _packStreams.Count);
for (int i = 0; i < _bindPairs.Count; i++)
{
if (v.GetAndSet(_bindPairs[i]._inIndex))
{
return false;
}
}
for (int i = 0; i < _packStreams.Count; i++)
{
if (v.GetAndSet(_packStreams[i]))
{
return false;
}
}
}
{
var v = new BitVector(_unpackSizes.Count);
for (int i = 0; i < _bindPairs.Count; i++)
{
if (v.GetAndSet(_bindPairs[i]._outIndex))
{
return false;
}
}
}
uint[] mask = new uint[kMaskSize];
{
List<int> inStreamToCoder = new List<int>();
List<int> outStreamToCoder = new List<int>();
for (int i = 0; i < _coders.Count; i++)
{
CCoderInfo coder = _coders[i];
for (int j = 0; j < coder._numInStreams; j++)
{
inStreamToCoder.Add(i);
}
for (int j = 0; j < coder._numOutStreams; j++)
{
outStreamToCoder.Add(i);
}
}
for (int i = 0; i < _bindPairs.Count; i++)
{
CBindPair bp = _bindPairs[i];
mask[inStreamToCoder[bp._inIndex]] |= (1u << outStreamToCoder[bp._outIndex]);
}
}
for (int i = 0; i < kMaskSize; i++)
{
for (int j = 0; j < kMaskSize; j++)
{
if (((1u << j) & mask[i]) != 0)
{
mask[i] |= mask[j];
}
}
}
for (int i = 0; i < kMaskSize; i++)
{
if (((1u << i) & mask[i]) != 0)
{
return false;
}
}
return true;
}
}
}

View File

@ -1,57 +0,0 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
public const ulong K_LZMA2_ID = 0x21;
public const ulong K_AES_ID = 0x06F10701;
public static readonly CMethodId K_COPY = new CMethodId(K_COPY_ID);
public static readonly CMethodId K_LZMA = new CMethodId(K_LZMA_ID);
public static readonly CMethodId K_LZMA2 = new CMethodId(K_LZMA2_ID);
public static readonly CMethodId K_AES = new CMethodId(K_AES_ID);
public readonly ulong _id;
public CMethodId(ulong id)
{
_id = id;
}
public override int GetHashCode()
{
return _id.GetHashCode();
}
public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId)obj == this;
}
public bool Equals(CMethodId other)
{
return _id == other._id;
}
public static bool operator ==(CMethodId left, CMethodId right)
{
return left._id == right._id;
}
public static bool operator !=(CMethodId left, CMethodId right)
{
return left._id != right._id;
}
public int GetLength()
{
int bytes = 0;
for (ulong value = _id; value != 0; value >>= 8)
{
bytes++;
}
return bytes;
}
}
}

View File

@ -1,69 +0,0 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal struct CStreamSwitch : IDisposable
{
private ArchiveReader _archive;
private bool _needRemove;
private bool _active;
public void Dispose()
{
if (_active)
{
_active = false;
#if DEBUG
Log.WriteLine("[end of switch]");
#endif
}
if (_needRemove)
{
_needRemove = false;
_archive.DeleteByteStream();
}
}
public void Set(ArchiveReader archive, byte[] dataVector)
{
Dispose();
_archive = archive;
_archive.AddByteStream(dataVector, 0, dataVector.Length);
_needRemove = true;
_active = true;
}
public void Set(ArchiveReader archive, List<byte[]> dataVector)
{
Dispose();
_active = true;
byte external = archive.ReadByte();
if (external != 0)
{
int dataIndex = archive.ReadNum();
if (dataIndex < 0 || dataIndex >= dataVector.Count)
{
throw new InvalidOperationException();
}
#if DEBUG
Log.WriteLine("[switch to stream {0}]", dataIndex);
#endif
_archive = archive;
_archive.AddByteStream(dataVector[dataIndex], 0, dataVector[dataIndex].Length);
_needRemove = true;
_active = true;
}
else
{
#if DEBUG
Log.WriteLine("[inline data]");
#endif
}
}
}
}

View File

@ -1,186 +0,0 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Common.SevenZip
{
internal class DataReader
{
#region Static Methods
public static uint Get32(byte[] buffer, int offset)
{
return buffer[offset]
+ ((uint)buffer[offset + 1] << 8)
+ ((uint)buffer[offset + 2] << 16)
+ ((uint)buffer[offset + 3] << 24);
}
public static ulong Get64(byte[] buffer, int offset)
{
return buffer[offset]
+ ((ulong)buffer[offset + 1] << 8)
+ ((ulong)buffer[offset + 2] << 16)
+ ((ulong)buffer[offset + 3] << 24)
+ ((ulong)buffer[offset + 4] << 32)
+ ((ulong)buffer[offset + 5] << 40)
+ ((ulong)buffer[offset + 6] << 48)
+ ((ulong)buffer[offset + 7] << 56);
}
#endregion
#region Variables
private readonly byte[] _buffer;
private readonly int _ending;
#endregion
#region Public Methods
public DataReader(byte[] buffer, int offset, int length)
{
_buffer = buffer;
Offset = offset;
_ending = offset + length;
}
public int Offset { get; private set; }
public Byte ReadByte()
{
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
return _buffer[Offset++];
}
public void ReadBytes(byte[] buffer, int offset, int length)
{
if (length > _ending - Offset)
{
throw new EndOfStreamException();
}
while (length-- > 0)
{
buffer[offset++] = _buffer[Offset++];
}
}
public void SkipData(long size)
{
if (size > _ending - Offset)
{
throw new EndOfStreamException();
}
Offset += (int)size;
#if DEBUG
Log.WriteLine("SkipData {0}", size);
#endif
}
public void SkipData()
{
SkipData(checked((long)ReadNumber()));
}
public ulong ReadNumber()
{
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
byte firstByte = _buffer[Offset++];
byte mask = 0x80;
ulong value = 0;
for (int i = 0; i < 8; i++)
{
if ((firstByte & mask) == 0)
{
ulong highPart = firstByte & (mask - 1u);
value += highPart << (i * 8);
return value;
}
if (Offset >= _ending)
{
throw new EndOfStreamException();
}
value |= (ulong)_buffer[Offset++] << (8 * i);
mask >>= 1;
}
return value;
}
public int ReadNum()
{
ulong value = ReadNumber();
if (value > Int32.MaxValue)
{
throw new NotSupportedException();
}
return (int)value;
}
public uint ReadUInt32()
{
if (Offset + 4 > _ending)
{
throw new EndOfStreamException();
}
uint res = Get32(_buffer, Offset);
Offset += 4;
return res;
}
public ulong ReadUInt64()
{
if (Offset + 8 > _ending)
{
throw new EndOfStreamException();
}
ulong res = Get64(_buffer, Offset);
Offset += 8;
return res;
}
public string ReadString()
{
int ending = Offset;
for (;;)
{
if (ending + 2 > _ending)
{
throw new EndOfStreamException();
}
if (_buffer[ending] == 0 && _buffer[ending + 1] == 0)
{
break;
}
ending += 2;
}
string str = Encoding.Unicode.GetString(_buffer, Offset, ending - Offset);
Offset = ending + 2;
return str;
}
#endregion
}
}

View File

@ -1,45 +0,0 @@
using System;
using System.Collections.Generic;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipEntry : Entry
{
internal SevenZipEntry(SevenZipFilePart filePart)
{
FilePart = filePart;
}
internal SevenZipFilePart FilePart { get; }
public override CompressionType CompressionType => FilePart.CompressionType;
public override long Crc => FilePart.Header.Crc ?? 0;
public override string Key => FilePart.Header.Name;
public override string LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => FilePart.Header.Size;
public override DateTime? LastModifiedTime => FilePart.Header.MTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => FilePart.Header.IsDir;
public override bool IsSplitAfter => false;
public override int? Attrib => (int)FilePart.Header.Attrib;
internal override IEnumerable<FilePart> Parts => FilePart.AsEnumerable<FilePart>();
}
}

View File

@ -1,106 +0,0 @@
using System;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? _type;
private readonly Stream _stream;
private readonly ArchiveDatabase _database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
_stream = stream;
_database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
{
Folder = database._folders[database._fileIndexToFolderIndexMap[index]];
}
}
internal CFileItem Header { get; }
internal CFolder Folder { get; }
internal int Index { get; }
internal override string FilePartName => Header.Name;
internal override Stream GetRawStream()
{
return null;
}
internal override Stream GetCompressedStream()
{
if (!Header.HasStream)
{
return null;
}
var folderStream = _database.GetFolderStream(_stream, Folder, _database.PasswordProvider);
int firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += _database._files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
}
public CompressionType CompressionType
{
get
{
if (_type == null)
{
_type = GetCompression();
}
return _type.Value;
}
}
//copied from DecoderRegistry
private const uint K_COPY = 0x0;
private const uint K_DELTA = 3;
private const uint K_LZMA2 = 0x21;
private const uint K_LZMA = 0x030101;
private const uint K_PPMD = 0x030401;
private const uint K_BCJ = 0x03030103;
private const uint K_BCJ2 = 0x0303011B;
private const uint K_DEFLATE = 0x040108;
private const uint K_B_ZIP2 = 0x040202;
internal CompressionType GetCompression()
{
var coder = Folder._coders.First();
switch (coder._methodId._id)
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
}
}

View File

@ -1,14 +0,0 @@
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip
{
public class SevenZipVolume : Volume
{
public SevenZipVolume(Stream stream, ReaderOptions readerFactoryOptions)
: base(stream, readerFactoryOptions)
{
}
}
}

View File

@ -1,19 +0,0 @@
namespace SharpCompress.Common.Tar.Headers
{
internal enum EntryType : byte
{
File = 0,
OldFile = (byte)'0',
HardLink = (byte)'1',
SymLink = (byte)'2',
CharDevice = (byte)'3',
BlockDevice = (byte)'4',
Directory = (byte)'5',
Fifo = (byte)'6',
LongLink = (byte)'K',
LongName = (byte)'L',
SparseFile = (byte)'S',
VolumeHeader = (byte)'V',
GlobalExtendedHeader = (byte)'g'
}
}

Some files were not shown because too many files have changed in this diff Show More