DiscSystem-misc improvements and bugfixes, add CDFS parser, and add DiscID system for PSX and PSP discs.

This commit is contained in:
zeromus 2013-06-25 08:31:48 +00:00
parent 1de395ca2a
commit b7dbb91419
12 changed files with 1381 additions and 87 deletions

View File

@ -419,6 +419,13 @@
<Compile Include="DiscSystem\CCD_format.cs">
<SubType>Code</SubType>
</Compile>
<Compile Include="DiscSystem\cdfs\EndianBitConverter.cs" />
<Compile Include="DiscSystem\cdfs\ISODirectoryNode.cs" />
<Compile Include="DiscSystem\cdfs\ISOFile.cs" />
<Compile Include="DiscSystem\cdfs\ISOFileNode.cs" />
<Compile Include="DiscSystem\cdfs\ISONode.cs" />
<Compile Include="DiscSystem\cdfs\ISONodeRecord.cs" />
<Compile Include="DiscSystem\cdfs\ISOVolumeDescriptor.cs" />
<Compile Include="DiscSystem\CUE_format.cs">
<SubType>Code</SubType>
</Compile>
@ -429,6 +436,7 @@
<Compile Include="DiscSystem\Disc.cs">
<SubType>Code</SubType>
</Compile>
<Compile Include="DiscSystem\Disc.Id.cs" />
<Compile Include="DiscSystem\DiscTOC.cs">
<SubType>Code</SubType>
</Compile>
@ -503,6 +511,7 @@
<Content Include="Consoles\PC Engine\Compat.txt" />
<Content Include="Consoles\Sega\Genesis\Compat.txt" />
<Content Include="Consoles\Sega\SMS\Compat.txt" />
<Content Include="DiscSystem\cdfs\notes.txt" />
<Content Include="Notes.txt" />
</ItemGroup>
<ItemGroup>

View File

@ -12,15 +12,25 @@ namespace BizHawk.DiscSystem
/// <summary>
/// finds a file in the same directory with an extension alternate to the supplied one.
/// If two are found, an exception is thrown (later, we may have heuristics to try to acquire the desired content)
/// TODO - this whole concept could be turned into a gigantic FileResolver class and be way more powerful
/// </summary>
string FindAlternateExtensionFile(string path, bool caseSensitive)
string FindAlternateExtensionFile(string path, bool caseSensitive, string baseDir)
{
string targetFile = Path.GetFileName(path);
string targetFragment = Path.GetFileNameWithoutExtension(path);
var di = new FileInfo(path).Directory;
//if the directory doesnt exist, it may be because it was a full path or something. try an alternate base directory
if (!di.Exists)
di = new DirectoryInfo(baseDir);
var results = new List<FileInfo>();
foreach (var fi in di.GetFiles())
{
//dont acquire cue files...
if (Path.GetExtension(fi.FullName).ToLower() == ".cue")
continue;
string fragment = Path.GetFileNameWithoutExtension(fi.FullName);
//match files with differing extensions
int cmp = string.Compare(fragment, targetFragment, !caseSensitive);
@ -36,11 +46,12 @@ namespace BizHawk.DiscSystem
return results[0].FullName;
}
void FromCuePathInternal(string cuePath, CueBinPrefs prefs)
//cue files can get their data from other sources using this
Dictionary<string, string> CueFileResolver = new Dictionary<string, string>();
void FromCueInternal(Cue cue, string cueDir, CueBinPrefs prefs)
{
string cueDir = Path.GetDirectoryName(cuePath);
var cue = new Cue();
cue.LoadFromPath(cuePath);
//TODO - add cue directory to CueBinPrefs???? could make things cleaner...
var session = new DiscTOC.Session();
session.num = 1;
@ -56,6 +67,9 @@ namespace BizHawk.DiscSystem
string blobPath = Path.Combine(cueDir, cue_file.Path);
if (CueFileResolver.ContainsKey(cue_file.Path))
blobPath = CueFileResolver[cue_file.Path];
int blob_sectorsize = Cue.BINSectorSizeForTrackType(cue_file.Tracks[0].TrackType);
int blob_length_aba, blob_leftover;
IBlob cue_blob = null;
@ -63,11 +77,11 @@ namespace BizHawk.DiscSystem
//try any way we can to acquire a file
if (!File.Exists(blobPath) && prefs.ExtensionAware)
{
blobPath = FindAlternateExtensionFile(blobPath, prefs.CaseSensitive);
blobPath = FindAlternateExtensionFile(blobPath, prefs.CaseSensitive, cueDir);
}
if (!File.Exists(blobPath))
throw new DiscReferenceException(blobPath,"");
throw new DiscReferenceException(blobPath, "");
//some simple rules to mutate the file type if we received something fishy
string blobPathExt = Path.GetExtension(blobPath).ToLower();
@ -90,7 +104,7 @@ namespace BizHawk.DiscSystem
}
else if (cue_file.FileType == Cue.CueFileType.ECM)
{
if(!Blob_ECM.IsECM(blobPath))
if (!Blob_ECM.IsECM(blobPath))
{
throw new DiscReferenceException(blobPath, "an ECM file was specified or detected, but it isn't a valid ECM file. You've got issues. Consult your iso vendor.");
}
@ -265,7 +279,7 @@ namespace BizHawk.DiscSystem
{
case ETrackType.Audio: //all 2352 bytes are present
case ETrackType.Mode1_2352: //2352 bytes are present, containing 2048 bytes of user data as well as ECM
case ETrackType.Mode2_2352: //2352 bytes are present, containing 2336 bytes of user data, with no ECM
case ETrackType.Mode2_2352: //2352 bytes are present, containing the entirety of a mode2 sector (could be form0,1,2)
{
//these cases are all 2352 bytes
//in all these cases, either no ECM is present or ECM is provided.
@ -273,7 +287,15 @@ namespace BizHawk.DiscSystem
Sector_RawBlob sector_rawblob = new Sector_RawBlob();
sector_rawblob.Blob = cue_blob;
sector_rawblob.Offset = (long)blob_timestamp * 2352;
Sector_Raw sector_raw = new Sector_Raw();
Sector_Mode1_or_Mode2_2352 sector_raw;
if(cue_track.TrackType == ETrackType.Mode1_2352)
sector_raw = new Sector_Mode1_2352();
else if (cue_track.TrackType == ETrackType.Audio)
sector_raw = new Sector_Mode1_2352(); //TODO should probably make a new sector adapter which errors if 2048B are requested
else if (cue_track.TrackType == ETrackType.Mode2_2352)
sector_raw = new Sector_Mode2_2352();
else throw new InvalidOperationException();
sector_raw.BaseSector = sector_rawblob;
//take care to handle final sectors that are too short.
if (is_last_aba_in_track && blob_leftover > 0)
@ -338,6 +360,14 @@ namespace BizHawk.DiscSystem
TOC.length_aba += toc_session.length_aba;
}
}
void FromCuePathInternal(string cuePath, CueBinPrefs prefs)
{
string cueDir = Path.GetDirectoryName(cuePath);
var cue = new Cue();
cue.LoadFromPath(cuePath);
FromCueInternal(cue, cueDir, prefs);
}
}
public class Cue
@ -451,8 +481,13 @@ namespace BizHawk.DiscSystem
{
FileInfo fiCue = new FileInfo(cuePath);
if (!fiCue.Exists) throw new FileNotFoundException();
File.ReadAllText(cuePath);
TextReader tr = new StreamReader(cuePath);
string cueString = File.ReadAllText(cuePath);
LoadFromString(cueString);
}
public void LoadFromString(string cueString)
{
TextReader tr = new StringReader(cueString);
bool track_has_pregap = false;
bool track_has_postgap = false;

View File

@ -60,6 +60,80 @@ namespace BizHawk.DiscSystem
}
}
/// <summary>
/// Allows you to stream data off a disc
/// </summary>
public class DiscStream : System.IO.Stream
{
int SectorSize;
int NumSectors;
Disc Disc;
long currPosition;
int cachedSector;
byte[] cachedSectorBuffer;
public static DiscStream Open_LBA_2048(Disc disc)
{
var ret = new DiscStream();
ret._Open_LBA_2048(disc);
return ret;
}
void _Open_LBA_2048(Disc disc)
{
SectorSize = 2048;
this.Disc = disc;
NumSectors = disc.LBACount;
currPosition = 0;
cachedSector = -1;
cachedSectorBuffer = new byte[SectorSize];
}
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return true; } }
public override bool CanWrite { get { return false; } }
public override void Flush() { throw new NotImplementedException(); }
public override long Length { get { return NumSectors * SectorSize; } }
public override long Position
{
get { return currPosition; }
set
{
currPosition = value;
//invalidate the cached sector..
//as a later optimization, we could actually intelligently decide if this is necessary
cachedSector = -1;
}
}
public override int Read(byte[] buffer, int offset, int count)
{
long remain = Length - currPosition;
if (count > remain)
count = (int)Math.Min(remain,int.MaxValue);
Disc.READLBA_Flat_Implementation(currPosition, buffer, offset, count, (a, b, c) => Disc.ReadLBA_2048(a, b, c), SectorSize, cachedSectorBuffer, ref cachedSector);
currPosition += count;
return count;
}
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
switch (origin)
{
case System.IO.SeekOrigin.Begin: Position = offset; break;
case System.IO.SeekOrigin.Current: Position += offset; break;
case System.IO.SeekOrigin.End: Position = Length - offset; break;
}
return Position;
}
public override void SetLength(long value) { throw new NotImplementedException(); }
public override void Write(byte[] buffer, int offset, int count) { throw new NotImplementedException(); }
}
public partial class Disc
{
/// <summary>
@ -87,9 +161,7 @@ namespace BizHawk.DiscSystem
internal void ReadABA_2048(int aba, byte[] buffer, int offset)
{
byte[] temp = new byte[2352];
Sectors[aba].Sector.Read(temp, offset);
Array.Copy(temp, 16, buffer, offset, 2048);
Sectors[aba].Sector.Read_2048(buffer, offset);
}
/// <summary>
@ -100,16 +172,37 @@ namespace BizHawk.DiscSystem
{
int secsize = 2352;
byte[] lba_buf = new byte[secsize];
while(length > 0)
int sectorHint = -1;
READLBA_Flat_Implementation(disc_offset, buffer, offset, length, (a, b, c) => ReadLBA_2352(a, b, c), secsize, lba_buf, ref sectorHint);
}
/// <summary>
/// reads logical data from a flat disc address space
/// useful for plucking data from a known location on the disc
/// </summary>
public void ReadLBA_2048_Flat(long disc_offset, byte[] buffer, int offset, int length)
{
int secsize = 2048;
byte[] lba_buf = new byte[secsize];
int sectorHint = -1;
READLBA_Flat_Implementation(disc_offset, buffer, offset, length, (a, b, c) => ReadLBA_2048(a, b, c), secsize, lba_buf, ref sectorHint);
}
internal void READLBA_Flat_Implementation(long disc_offset, byte[] buffer, int offset, int length, Action<int, byte[], int> sectorReader, int sectorSize, byte[] sectorBuf, ref int sectorBufferHint)
{
//hint is the sector number which is already read. to avoid repeatedly reading the sector from the disc in case of several small reads, so that sectorBuf can be used as a sector cache
while (length > 0)
{
int lba = (int)(disc_offset / secsize);
int lba_within = (int)(disc_offset % secsize);
int lba = (int)(disc_offset / sectorSize);
int lba_within = (int)(disc_offset % sectorSize);
int todo = length;
int remains_in_lba = secsize - lba_within;
int remains_in_lba = sectorSize - lba_within;
if (remains_in_lba < todo)
todo = remains_in_lba;
ReadLBA_2352(lba, lba_buf, 0);
Array.Copy(lba_buf, lba_within, buffer, offset, todo);
if(sectorBufferHint != lba)
sectorReader(lba, sectorBuf, 0);
sectorBufferHint = lba;
Array.Copy(sectorBuf, lba_within, buffer, offset, todo);
offset += todo;
length -= todo;
disc_offset += todo;
@ -205,19 +298,5 @@ namespace BizHawk.DiscSystem
}
return "no data track found";
}
/// <summary>
/// this isn't quite right...
/// </summary>
/// <returns></returns>
public bool DetectSegaSaturn()
{
byte[] data = new byte[2048];
ReadLBA_2048(0, data, 0);
byte[] cmp = System.Text.Encoding.ASCII.GetBytes("SEGA SEGASATURN");
byte[] cmp2 = new byte[15];
Buffer.BlockCopy(data, 0, cmp2, 0, 15);
return System.Linq.Enumerable.SequenceEqual(cmp, cmp2);
}
}
}

View File

@ -0,0 +1,85 @@
using System;
using System.Collections.Generic;
//disc type identification logic
namespace BizHawk.DiscSystem
{
public enum DiscType
{
/// <summary>
/// Nothing is known about this disc type
/// </summary>
UnknownFormat,
/// <summary>
/// This is definitely a CDFS disc, but we can't identify anything more about it
/// </summary>
UnknownCDFS,
/// <summary>
/// Sony PSX
/// </summary>
SonyPSX,
/// <summary>
/// Sony PSP
/// </summary>
SonyPSP,
/// <summary>
/// Sega Saturn
/// </summary>
SegaSaturn,
/// <summary>
/// Its not clear whether we can ever have enough info to ID a turboCD disc (we're using hashes)
/// </summary>
TurboCD
}
public partial class Disc
{
/// <summary>
/// Attempts to determine the type of the disc.
/// In the future, we might return a struct or a class with more detailed information
/// </summary>
public DiscType DetectDiscType()
{
//sega doesnt put anything identifying in the cdfs volume info. but its consistent about putting its own header here in sector 0
if (DetectSegaSaturn()) return DiscType.SegaSaturn;
//we dont know how to detect TurboCD.
//an emulator frontend will likely just guess TurboCD if the disc is UnknownFormat
var iso = new ISOParser.ISOFile();
bool isIso = iso.Parse(DiscStream.Open_LBA_2048(this));
if (isIso)
{
var appId = System.Text.Encoding.ASCII.GetString(iso.VolumeDescriptors[0].ApplicationIdentifier).TrimEnd('\0', ' ');
if (appId == "PLAYSTATION")
return DiscType.SonyPSX;
if(appId == "PSP GAME")
return DiscType.SonyPSP;
return DiscType.UnknownCDFS;
}
return DiscType.UnknownFormat;
}
/// <summary>
/// This is reasonable approach to ID saturn.
/// </summary>
bool DetectSegaSaturn()
{
byte[] data = new byte[2048];
ReadLBA_2048(0, data, 0);
byte[] cmp = System.Text.Encoding.ASCII.GetBytes("SEGA SEGASATURN");
byte[] cmp2 = new byte[15];
Buffer.BlockCopy(data, 0, cmp2, 0, 15);
return System.Linq.Enumerable.SequenceEqual(cmp, cmp2);
}
}
}

View File

@ -59,31 +59,41 @@ using System.Collections.Generic;
//2048 bytes packed into 2352:
//12 bytes sync(00 ff ff ff ff ff ff ff ff ff ff 00)
//3 bytes sector address (min+A0),sec,frac //does this correspond to ccd `point` field in the TOC entries?
//sector mode byte (0: silence; 1: 2048Byte mode (EDC,ECC,CIRC), 2: 2352Byte mode (CIRC only)
//user data: 2336 bytes
//sector mode byte (0: silence; 1: 2048Byte mode (EDC,ECC,CIRC), 2: mode2 (could be 2336[vanilla mode2], 2048[xa mode2 form1], 2324[xa mode2 form2])
//cue sheets may use mode1_2048 (and the error coding needs to be regenerated to get accurate raw data) or mode1_2352 (the entire sector is present)
//mode2_2352 is the only kind of mode2, by necessity
//audio is a different mode, seems to be just 2352 bytes with no sync, header or error correction. i guess the CIRC error correction is still there
namespace BizHawk.DiscSystem
{
public partial class Disc : IDisposable
{
//TODO - separate these into Read_2352 and Read_2048 (optimizations can be made by ISector implementors depending on what is requested)
//(for example, avoiding the 2048 byte sector creating the ECC data and then immediately discarding it)
public interface ISector
{
int Read(byte[] buffer, int offset);
/// <summary>
/// reads the entire sector, raw
/// </summary>
int Read(byte[] buffer, int offset); //todo - rename to Read_2352
/// <summary>
/// reads 2048 bytes of userdata.. precisely what this means isnt always 100% certain (for instance mode2 form 0 has 2336 bytes of userdata instead of 2048)..
/// ..but its certain enough for this to be useful
/// </summary>
int Read_2048(byte[] buffer, int offset);
}
/// <summary>
/// Presently, an IBlob doesn't need to work multithreadedly. It's quite an onerous demand. This should probably be managed by the Disc class somehow, or by the user making another Disc.
/// Presently, an IBlob doesn't need to work multithreadedly. It's quite an onerous demand.
/// This should probably be managed by the Disc class somehow, or by the user making another Disc.
/// </summary>
public interface IBlob : IDisposable
{
/// <summary>
/// what a weird parameter order. normally the dest buffer would be first. weird.
/// </summary>
/// <param name="byte_pos">location in the blob to read from</param>
/// <param name="buffer">destination buffer for read data</param>
/// <param name="offset">offset into destination buffer</param>
/// <param name="count">amount to read</param>
int Read(long byte_pos, byte[] buffer, int offset, int count);
}
@ -132,15 +142,9 @@ namespace BizHawk.DiscSystem
}
}
class Sector_RawSilence : ISector
{
public int Read(byte[] buffer, int offset)
{
Array.Clear(buffer, 0, 2352);
return 2352;
}
}
/// <summary>
/// this ISector is dumb and only knows how to drag chunks off a source blob
/// </summary>
class Sector_RawBlob : ISector
{
public IBlob Blob;
@ -149,39 +153,108 @@ namespace BizHawk.DiscSystem
{
return Blob.Read(Offset, buffer, offset, 2352);
}
public int Read_2048(byte[] buffer, int offset)
{
return Blob.Read(Offset, buffer, offset, 2048);
}
}
/// <summary>
/// this ISector always returns zeroes
/// </summary>
class Sector_Zero : ISector
{
public int Read(byte[] buffer, int offset)
{
for (int i = 0; i < 2352; i++)
buffer[offset + i] = 0;
Array.Clear(buffer, 0, 2352);
return 2352;
}
public int Read_2048(byte[] buffer, int offset)
{
Array.Clear(buffer, 0, 2048);
return 2048;
}
}
/// <summary>
/// this ISector adapts another ISector by always returning zeroes
/// TODO I dont like the way this works. I think blobs should get adapted instead to zero-pad to a certain length.
/// </summary>
class Sector_ZeroPad : ISector
{
public ISector BaseSector;
public int BaseLength;
public int Read(byte[] buffer, int offset)
{
return _Read(buffer, offset, 2352);
}
public int Read_2048(byte[] buffer, int offset)
{
return _Read(buffer, offset, 2352);
}
int _Read(byte[] buffer, int offset, int amount)
{
int read = BaseSector.Read(buffer, offset);
if(read < BaseLength) return read;
for (int i = BaseLength; i < 2352; i++)
for (int i = BaseLength; i < amount; i++)
buffer[offset + i] = 0;
return 2352;
return amount;
}
}
class Sector_Raw : ISector
abstract class Sector_Mode1_or_Mode2_2352 : ISector
{
public ISector BaseSector;
public int Read(byte[] buffer, int offset)
public abstract int Read(byte[] buffer, int offset);
public abstract int Read_2048(byte[] buffer, int offset);
}
/// <summary>
/// This ISector is a raw MODE1 sector
/// </summary>
class Sector_Mode1_2352 : Sector_Mode1_or_Mode2_2352
{
public override int Read(byte[] buffer, int offset)
{
return BaseSector.Read(buffer, offset);
}
public override int Read_2048(byte[] buffer, int offset)
{
//to get 2048 bytes out of this sector type, start 16 bytes in
int ret = BaseSector.Read(TempSector, 0);
Buffer.BlockCopy(TempSector, 16, buffer, offset, 2048);
System.Diagnostics.Debug.Assert(buffer != TempSector);
return 2048;
}
[ThreadStatic]
static byte[] TempSector = new byte[2352];
}
/// <summary>
/// this ISector is a raw MODE2 sector. could be form 0,1,2... who can say? supposedly:
/// To tell the different Mode 2s apart you have to examine bytes 16-23 of the sector (the first 8 bytes of Mode Data).
/// If bytes 16-19 are not the same as 20-23, then it is Mode 2. If they are equal and bit 5 is on (0x20), then it is Mode 2 Form 2. Otherwise it is Mode 2 Form 1.
/// ...but we're not using this information in any way
/// </summary>
class Sector_Mode2_2352 : Sector_Mode1_or_Mode2_2352
{
public override int Read(byte[] buffer, int offset)
{
return BaseSector.Read(buffer, offset);
}
public override int Read_2048(byte[] buffer, int offset)
{
//to get 2048 bytes out of this sector type, start 24 bytes in
int ret = BaseSector.Read(TempSector, 0);
Buffer.BlockCopy(TempSector, 24, buffer, offset, 2048);
System.Diagnostics.Debug.Assert(buffer != TempSector);
return 2048;
}
[ThreadStatic]
static byte[] TempSector = new byte[2352];
}
protected static byte BCD_Byte(byte val)
@ -201,6 +274,9 @@ namespace BizHawk.DiscSystem
public IBlob BaseBlob;
}
/// <summary>
/// this ISector is a MODE1 sector that is generating itself from an underlying MODE1/2048 userdata piece
/// </summary>
class Sector_Mode1_2048 : ISector
{
public Sector_Mode1_2048(int ABA)
@ -218,6 +294,13 @@ namespace BizHawk.DiscSystem
public long Offset;
byte[] extra_data;
bool has_extra_data;
public int Read_2048(byte[] buffer, int offset)
{
//this is easy. we only have 2048 bytes, and 2048 bytes wer erequested
return Blob.BaseBlob.Read(Offset, buffer, offset, 2048);
}
public int Read(byte[] buffer, int offset)
{
//user data
@ -335,39 +418,52 @@ namespace BizHawk.DiscSystem
void FromIsoPathInternal(string isoPath)
{
var session = new DiscTOC.Session();
session.num = 1;
TOC.Sessions.Add(session);
var track = new DiscTOC.Track();
track.num = 1;
session.Tracks.Add(track);
var index = new DiscTOC.Index();
index.num = 0;
track.Indexes.Add(index);
index = new DiscTOC.Index();
index.num = 1;
track.Indexes.Add(index);
//make a fake cue file to represent this iso file
string isoCueWrapper = @"
FILE ""xarp.barp.marp.farp"" BINARY
TRACK 01 MODE1/2048
INDEX 01 00:00:00
";
var fiIso = new FileInfo(isoPath);
Blob_RawFile blob = new Blob_RawFile();
blob.PhysicalPath = fiIso.FullName;
Blobs.Add(blob);
int num_aba = (int)(fiIso.Length / 2048);
track.length_aba = num_aba;
if (fiIso.Length % 2048 != 0)
throw new InvalidOperationException("invalid iso file (size not multiple of 2048)");
//TODO - handle this with Final Fantasy 9 cd1.iso
string cueDir = "";
var cue = new Cue();
CueFileResolver["xarp.barp.marp.farp"] = isoPath;
cue.LoadFromString(isoCueWrapper);
FromCueInternal(cue, cueDir, new CueBinPrefs());
var ecmCacheBlob = new ECMCacheBlob(blob);
for (int i = 0; i < num_aba; i++)
{
Sector_Mode1_2048 sector = new Sector_Mode1_2048(i+150);
sector.Blob = ecmCacheBlob;
sector.Offset = i * 2048;
Sectors.Add(new SectorEntry(sector));
}
//var session = new DiscTOC.Session();
//session.num = 1;
//TOC.Sessions.Add(session);
//var track = new DiscTOC.Track();
//track.num = 1;
//session.Tracks.Add(track);
//var index = new DiscTOC.Index();
//index.num = 0;
//track.Indexes.Add(index);
//index = new DiscTOC.Index();
//index.num = 1;
//track.Indexes.Add(index);
TOC.AnalyzeLengthsFromIndexLengths();
//var fiIso = new FileInfo(isoPath);
//Blob_RawFile blob = new Blob_RawFile();
//blob.PhysicalPath = fiIso.FullName;
//Blobs.Add(blob);
//int num_aba = (int)(fiIso.Length / 2048);
//track.length_aba = num_aba;
//if (fiIso.Length % 2048 != 0)
// throw new InvalidOperationException("invalid iso file (size not multiple of 2048)");
////TODO - handle this with Final Fantasy 9 cd1.iso
//var ecmCacheBlob = new ECMCacheBlob(blob);
//for (int i = 0; i < num_aba; i++)
//{
// Sector_Mode1_2048 sector = new Sector_Mode1_2048(i+150);
// sector.Blob = ecmCacheBlob;
// sector.Offset = i * 2048;
// Sectors.Add(new SectorEntry(sector));
//}
//TOC.AnalyzeLengthsFromIndexLengths();
}

View File

@ -0,0 +1,117 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ISOParser {
/// <summary>
/// Helper class to convert big and little endian numbers from a byte
/// array to a value.
///
/// This code was modified from the endian bit converter presented by
/// Robert Unoki in his blog post:
/// http://blogs.msdn.com/robunoki/archive/2006/04/05/568737.aspx
///
/// I have added support for more data types and the ability to
/// specify an offset into the array to be converted where the value
/// begins.
/// </summary>
public class EndianBitConverter {
#region Static Constructors
/// <summary>
/// Build a converter from little endian to the system endian-ness.
/// </summary>
/// <returns>The converter</returns>
public static EndianBitConverter CreateForLittleEndian() {
return new EndianBitConverter(!BitConverter.IsLittleEndian);
}
/// <summary>
/// Build a converter from big endian to the system endian-ness.
/// </summary>
/// <returns>The converter</returns>
public static EndianBitConverter CreateForBigEndian() {
return new EndianBitConverter(BitConverter.IsLittleEndian);
}
#endregion
#region Private Properties
/// <summary>
/// Keep track of whether we need to swap the bytes or not
/// </summary>
private bool swap;
#endregion
#region Private Constructor
/// <summary>
/// Create the converter with the given endian-ness.
/// </summary>
/// <param name="swapBytes">Whether or not to swap bytes.</param>
private EndianBitConverter(bool swapBytes) {
swap = swapBytes;
}
#endregion
#region 16-bit
public Int16 ToInt16(byte[] data) {
return ToInt16(data, 0);
}
public Int16 ToInt16(byte[] data, int offset) {
byte[] corrected;
if (swap) {
corrected = (byte[])data.Clone();
Array.Reverse(corrected, offset, 2);
}
else {
corrected = data;
}
return BitConverter.ToInt16(corrected, offset);
}
#endregion
#region 32-bit
public Int32 ToInt32(byte[] data) {
return ToInt32(data, 0);
}
public Int32 ToInt32(byte[] data, int offset) {
byte[] corrected;
if (swap) {
corrected = (byte[])data.Clone();
Array.Reverse(corrected, offset, 4);
}
else {
corrected = data;
}
return BitConverter.ToInt32(corrected, offset);
}
#endregion
#region 64-bit
public Int64 ToInt64(byte[] data) {
return ToInt64(data, 0);
}
public Int64 ToInt64(byte[] data, int offset) {
byte[] corrected;
if (swap) {
corrected = (byte[])data.Clone();
Array.Reverse(corrected, offset, 8);
}
else {
corrected = data;
}
return BitConverter.ToInt64(corrected, offset);
}
#endregion
}
}

View File

@ -0,0 +1,145 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
namespace ISOParser {
/// <summary>
/// Representation of a directory in the file system.
/// </summary>
public class ISODirectoryNode : ISONode {
#region Public Properties
/// <summary>
/// The children in this directory.
/// </summary>
public Dictionary<string,ISONode> Children;
#endregion
#region Construction
/// <summary>
/// Constructor.
/// </summary>
/// <param name="record">The node for this directory.</param>
public ISODirectoryNode( ISONodeRecord record )
: base( record ) {
this.Children = new Dictionary<string, ISONode>();
}
#endregion
#region Parsing
/// <summary>
/// Parse the children based on the data in this directory.
/// </summary>
/// <param name="s">The stream to parse from.</param>
/// <param name="visited">The set of already handled
/// files/directories.</param>
public void Parse(Stream s, Dictionary<long, ISONode> visited) {
// Go to the beginning of the set of directories
s.Seek(this.Offset*ISOFile.SECTOR_SIZE, SeekOrigin.Begin);
List<ISONodeRecord> records = new List<ISONodeRecord>();
// Read the directory entries
while(s.Position < ((this.Offset*ISOFile.SECTOR_SIZE)+this.Length)) {
ISONode node;
ISONodeRecord record;
// Read the record
record = new ISONodeRecord();
record.Parse(s);
//zero 24-jun-2013 - improved validity checks
//theres nothing here!
if (record.Length == 0)
{
break;
}
else
{
// Check if we already have this node
if (visited.ContainsKey(record.OffsetOfData))
{
// Get the node
node = visited[record.OffsetOfData];
}
else
{
// Create the node from the record
if (record.IsFile())
{
node = new ISOFileNode(record);
}
else if (record.IsDirectory())
{
node = new ISODirectoryNode(record);
}
else
{
node = new ISONode(record);
}
// Keep track that we've now seen the node and are parsing it
visited.Add(node.Offset, node);
}
// Add the node as a child
this.Children.Add(record.Name, node);
}
}
long currentPosition = s.Position;
// Iterate over directories...
foreach(KeyValuePair<string,ISONode> child in this.Children) {
// Parse this node
if( child.Key != ISONodeRecord.CURRENT_DIRECTORY &&
child.Key != ISONodeRecord.PARENT_DIRECTORY &&
child.Value is ISODirectoryNode ) {
((ISODirectoryNode)child.Value).Parse(s, visited);
}
}
s.Seek(currentPosition, SeekOrigin.Begin);
}
#endregion
#region Printing
/// <summary>
/// Print out this node's children.
/// </summary>
/// <param name="depth">The number of "tabs" to indent this directory.</param>
public void Print(int depth) {
// Get the tabs string
string tabs = "";
for (int i = 0; i < depth; i++) {
tabs += " ";
}
// Get the names and sort
string[] names = this.Children.Keys.ToArray();
Array.Sort(names);
// Print the directory names recursively
foreach (string s in names) {
ISONode n = this.Children[s];
Console.WriteLine(tabs + s);
if (s != ISONodeRecord.CURRENT_DIRECTORY &&
s != ISONodeRecord.PARENT_DIRECTORY &&
n is ISODirectoryNode) {
((ISODirectoryNode)n).Print(depth + 1);
}
}
}
#endregion
}
}

View File

@ -0,0 +1,135 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace ISOParser {
/// <summary>
/// This class is meant to parse disk images as specified by ISO9660.
/// Specifically, it should work for most disk images that are created
/// by the stanard disk imaging software. This class is by no means
/// robust to all variations of ISO9660.
/// Also, this class does not currently support the UDF file system.
///
/// TODO: Add functions to enumerate a directory or visit a file...
///
/// The information for building class came from three primary sources:
/// 1. The ISO9660 wikipedia article:
/// http://en.wikipedia.org/wiki/ISO_9660
/// 2. ISO9660 Simplified for DOS/Windows
/// http://alumnus.caltech.edu/~pje/iso9660.html
/// 3. The ISO 9660 File System
/// http://users.telenet.be/it3.consultants.bvba/handouts/ISO9960.html
/// </summary>
public class ISOFile {
#region Constants
/// <summary>
/// We are hard coding the SECTOR_SIZE
/// </summary>
public const int SECTOR_SIZE = 2048;
#endregion
#region Public Members
/// <summary>
/// This is a list of all the volume descriptors in the disk image.
/// NOTE: The first entry should be the primary volume.
/// </summary>
public List<ISOVolumeDescriptor> VolumeDescriptors;
/// <summary>
/// The Directory that is the root of this file system
/// </summary>
public ISODirectoryNode Root;
#endregion
#region Construction
/// <summary>
/// Construct the ISO file data structures, but leave everything
/// blank.
/// </summary>
public ISOFile() {
}
#endregion
#region Parsing
/// <summary>
/// Parse the given stream to populate the iso information
/// </summary>
/// <param name="s">The stream which we are using to parse the image.
/// Should already be located at the start of the image.</param>
public bool Parse(Stream s, int startSector=16)
{
this.VolumeDescriptors = new List<ISOVolumeDescriptor>();
Root = null;
long startPosition = s.Position;
byte[] buffer = new byte[ISOFile.SECTOR_SIZE];
// Seek through the first volume descriptor
s.Seek(startPosition + (SECTOR_SIZE * startSector), SeekOrigin.Begin);
// Read one of more volume descriptors
do {
//zero 24-jun-2013 - improved validity checks
ISOVolumeDescriptor desc = new ISOVolumeDescriptor();
bool isValid = desc.Parse(s);
if (!isValid) return false;
if (desc.IsTerminator())
break;
else if (desc.Type < 4)
this.VolumeDescriptors.Add(desc);
else
//found a volume descriptor of incorrect type.. maybe this isnt a cdfs
//supposedly these exist.. wait for one to show up
return false;
} while(true);
//zero 24-jun-2013 - well, my very first test iso had 2 volume descriptors.
// Check to make sure we only read one volume descriptor
// Finding more could be an error with the disk.
//if (this.VolumeDescriptors.Count != 1) {
// Console.WriteLine("Strange ISO format...");
// return;
//}
//zero 24-jun-2013 - if theres no volume descriptors, we're gonna call this not a cdfs
if (VolumeDescriptors.Count == 0) return false;
// Visit all the directories and get the offset of each directory/file
// We need to keep track of the directories and files we have visited in case there are loops.
Dictionary<long, ISONode> visitedNodes = new Dictionary<long,ISONode>();
// Create (and visit) the root node
this.Root = new ISODirectoryNode(this.VolumeDescriptors[0].RootDirectoryRecord);
visitedNodes.Add(this.Root.Offset, this.Root);
this.Root.Parse(s, visitedNodes);
return true;
}
#endregion
#region Printing
/// <summary>
/// Print the directory tree for the image.
/// </summary>
public void Print() {
// DEBUGGING: Now print out the directory structure
this.Root.Print(0);
}
#endregion
}
}

View File

@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ISOParser {
/// <summary>
/// Representation of a file in the file system.
/// </summary>
public class ISOFileNode : ISONode {
#region Construction
/// <summary>
/// Constructor.
/// </summary>
/// <param name="record">The record to construct from.</param>
public ISOFileNode( ISONodeRecord record ) : base( record ) {
// Do Nothing
}
#endregion
}
}

View File

@ -0,0 +1,43 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ISOParser {
/// <summary>
/// Abstract class to represent a file/directory node
/// </summary>
public class ISONode {
#region Public Properties
/// <summary>
/// The record this node was created from.
/// </summary>
public ISONodeRecord FirstRecord;
/// <summary>
/// The sector offset of the file/directory data
/// </summary>
public long Offset;
/// <summary>
/// The byte length of the file/directory data.
/// </summary>
public long Length;
#endregion
#region Construction
/// <summary>
/// Constructor.
/// TODO: Make this constructor protected???
/// </summary>
/// <param name="record">The ISONodeRecord to construct from.</param>
public ISONode( ISONodeRecord record ) {
this.FirstRecord = record;
this.Offset = record.OffsetOfData;
this.Length = record.LengthOfData;
}
#endregion
}
}

View File

@ -0,0 +1,208 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace ISOParser {
/// <summary>
/// Class to represent the file/directory information read from the disk.
/// </summary>
public class ISONodeRecord {
#region Constants
/// <summary>
/// String representing the current directory entry
/// </summary>
public const string CURRENT_DIRECTORY = ".";
/// <summary>
/// String representing the parent directory entry
/// </summary>
public const string PARENT_DIRECTORY = "..";
#endregion
#region Public Properties
/// <summary>
/// The length of the record in bytes.
/// </summary>
public byte Length;
/// <summary>
/// The file offset of the data for this file/directory (in sectors).
/// </summary>
public long OffsetOfData;
/// <summary>
/// The length of the data for this file/directory (in bytes).
/// </summary>
public long LengthOfData;
/// <summary>
/// The file/directory creation year since 1900.
/// </summary>
public byte Year;
/// <summary>
/// The file/directory creation month.
/// </summary>
public byte Month;
/// <summary>
/// The file/directory creation day.
/// </summary>
public byte Day;
/// <summary>
/// The file/directory creation hour.
/// </summary>
public byte Hour;
/// <summary>
/// The file/directory creation minute.
/// </summary>
public byte Minute;
/// <summary>
/// The file/directory creation second.
/// </summary>
public byte Second;
/// <summary>
/// The file time offset from GMT.
/// </summary>
public byte TimeZoneOffset;
/// <summary>
/// Flags representing the attributes of this file/directory.
/// </summary>
public byte Flags;
/// <summary>
/// The length of the file/directory name.
/// </summary>
public byte NameLength;
/// <summary>
/// The file/directory name.
/// </summary>
public string Name;
#endregion
#region Construction
/// <summary>
/// Constructor
/// </summary>
public ISONodeRecord() {
// Set initial values
this.Length = 0;
this.OffsetOfData = 0;
this.LengthOfData = 0;
this.Year = 0;
this.Month = 0;
this.Day = 0;
this.Hour = 0;
this.Minute = 0;
this.Second = 0;
this.TimeZoneOffset = 0;
this.Flags = 0;
this.NameLength = 0;
this.Name = null;
}
#endregion
#region File/Directory Methods
/// <summary>
/// Return true if the record represents a file.
/// </summary>
/// <returns>True if a file.</returns>
public bool IsFile() {
return ((this.Flags >> 1) & 0x01) == 0;
}
/// <summary>
/// Return true if the record represents a directory.
/// </summary>
/// <returns>True if a directory.</returns>
public bool IsDirectory() {
return ((this.Flags >> 1) & 0x01) == 1;
}
#endregion
#region Parsing
/// <summary>
/// Parse the record from an array and offset.
/// </summary>
/// <param name="data">The array to parse from.</param>
/// <param name="cursor">The offset to start parsing at.</param>
public void Parse(byte[] data, int cursor) {
// Put the array into a memory stream and pass to the main parsing function
MemoryStream s = new MemoryStream(data);
s.Seek(cursor, SeekOrigin.Begin);
this.Parse(s);
}
/// <summary>
/// Parse the node record from the given stream.
/// </summary>
/// <param name="s">The stream to parse from.</param>
public void Parse(Stream s) {
EndianBitConverter bc = EndianBitConverter.CreateForLittleEndian();
long startPosition = s.Position;
byte[] buffer = new byte[ISOFile.SECTOR_SIZE];
// Get the length
s.Read(buffer, 0, 1);
this.Length = buffer[0];
//the number of sectors in the attribute record
s.Read(buffer, 0, 1);
// Read Data Offset
s.Read(buffer, 0, 8);
this.OffsetOfData = (long)bc.ToInt32(buffer);
// Read Data Length
s.Read(buffer, 0, 8);
this.LengthOfData = (long)bc.ToInt32(buffer);
// Read the time and flags
s.Read(buffer, 0, 8);
this.Year = buffer[0];
this.Month = buffer[1];
this.Day = buffer[2];
this.Hour = buffer[3];
this.Minute = buffer[4];
this.Second = buffer[5];
this.TimeZoneOffset = buffer[6];
this.Flags = buffer[7];
s.Read(buffer, 0, 6);
// Read the name length
s.Read(buffer, 0, 1);
this.NameLength = buffer[0];
// Read the directory name
s.Read(buffer, 0, this.NameLength);
if (this.NameLength == 1 && (buffer[0] == 0 || buffer[0] == 1)) {
if (buffer[0] == 0)
this.Name = ISONodeRecord.CURRENT_DIRECTORY;
else
this.Name = ISONodeRecord.PARENT_DIRECTORY;
}
else {
this.Name = ASCIIEncoding.ASCII.GetString(buffer, 0, this.NameLength);
}
// Seek to end
s.Seek(startPosition + this.Length, SeekOrigin.Begin);
}
#endregion
}
}

View File

@ -0,0 +1,320 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace ISOParser {
/// <summary>
/// Represents a volume descriptor for a disk image.
/// </summary>
public class ISOVolumeDescriptor {
#region Constants
/// <summary>
/// We are handling the parsing by reading the entire header and
/// extracting the appropriate bytes.
///
/// This is done for performance reasons.
/// </summary>
private const int LENGTH_SHORT_IDENTIFIER = 32;
private const int LENGTH_IDENTIFIER = 37;
private const int LENGTH_LONG_IDENTIFIER = 128;
private const int LENGTH_ROOT_DIRECTORY_RECORD = 34;
private const int LENGTH_TIME = 17;
private const int LENGTH_RESERVED = 512;
#endregion
#region Public Properties
/// <summary>
/// The type of this volume description, only 1 and 255 are supported
/// </summary>
public byte Type;
/// <summary>
/// The system identifier
/// </summary>
public byte[] SystemIdentifier;
/// <summary>
/// The volume identifier
/// </summary>
public byte[] VolumeIdentifier;
/// <summary>
/// The number of sectors on the disk
/// </summary>
public int NumberOfSectors;
/// <summary>
/// Volume Set Size (should be 1)
/// </summary>
public int VolumeSetSize;
/// <summary>
/// Volume Sequence Number (should be 1)
/// </summary>
public int VolumeSequenceNumber;
/// <summary>
/// Sector Size (should be 2048)
/// </summary>
public int SectorSize;
/// <summary>
/// Size of the path table
/// </summary>
public int PathTableSize;
/// <summary>
/// Sector offset of the first path table
/// </summary>
public int OffsetOfFirstLittleEndianPathTable;
/// <summary>
/// Sector offset of the second path table
/// </summary>
public int OffsetOfSecondLittleEndianPathTable;
/// <summary>
/// Sector offset of the first path table
/// </summary>
public int OffsetOfFirstBigEndianPathTable;
/// <summary>
/// Sector offset of the second path table
/// </summary>
public int OffsetOfSecondBigEndianPathTable;
/// <summary>
/// The root directory record
/// </summary>
public ISONodeRecord RootDirectoryRecord;
/// <summary>
/// The volumen set identifier
/// </summary>
public byte[] VolumeSetIdentifier;
/// <summary>
/// The publisher identifier
/// </summary>
public byte[] PublisherIdentifier;
/// <summary>
/// The data preparer identifier
/// </summary>
public byte[] DataPreparerIdentifier;
/// <summary>
/// The application identifier
/// </summary>
public byte[] ApplicationIdentifier;
/// <summary>
/// The copyright identifier
/// </summary>
public byte[] CopyrightFileIdentifier;
/// <summary>
/// The abstract file identifier
/// </summary>
public byte[] AbstractFileIdentifier;
/// <summary>
/// The bibliographical file identifier
/// </summary>
public byte[] BibliographicalFileIdentifier;
/// <summary>
/// The time and date the volume was created
/// </summary>
public byte[] VolumeCreationDateTime;
/// <summary>
/// The time and date the volume was last modified
/// </summary>
public byte[] LastModifiedDateTime;
/// <summary>
/// The time and date the volume expires
/// </summary>
public byte[] ExpirationDateTime;
/// <summary>
/// The time and data when the volume is effective
/// </summary>
public byte[] EffectiveDateTime;
/// <summary>
/// Extra reserved data
/// </summary>
public byte[] Reserved;
#endregion
#region Construction
/// <summary>
/// Constructor.
/// </summary>
public ISOVolumeDescriptor() {
// Set everything to the default value
this.Type = 0;
this.SystemIdentifier = new byte[LENGTH_SHORT_IDENTIFIER];
this.VolumeIdentifier = new byte[LENGTH_SHORT_IDENTIFIER];
this.NumberOfSectors = 0;
this.VolumeSetSize = 1;
this.VolumeSequenceNumber = 1;
this.SectorSize = ISOFile.SECTOR_SIZE;
this.PathTableSize = 0;
this.OffsetOfFirstLittleEndianPathTable = 0;
this.OffsetOfSecondLittleEndianPathTable = 0;
this.OffsetOfFirstBigEndianPathTable = 0;
this.OffsetOfSecondBigEndianPathTable = 0;
this.RootDirectoryRecord = new ISONodeRecord();
this.VolumeSetIdentifier = new byte[LENGTH_LONG_IDENTIFIER];
this.PublisherIdentifier = new byte[LENGTH_LONG_IDENTIFIER];
this.DataPreparerIdentifier = new byte[LENGTH_LONG_IDENTIFIER];
this.ApplicationIdentifier = new byte[LENGTH_LONG_IDENTIFIER];
this.CopyrightFileIdentifier = new byte[LENGTH_IDENTIFIER];
this.AbstractFileIdentifier = new byte[LENGTH_IDENTIFIER];
this.BibliographicalFileIdentifier = new byte[LENGTH_IDENTIFIER];
this.VolumeCreationDateTime = new byte[LENGTH_TIME];
this.LastModifiedDateTime = new byte[LENGTH_TIME];
this.ExpirationDateTime = new byte[LENGTH_TIME];
this.EffectiveDateTime = new byte[LENGTH_TIME];
this.Reserved = new byte[LENGTH_RESERVED];
}
#endregion
#region Parsing
/// <summary>
/// Parse the volume descriptor header.
/// </summary>
/// <param name="s">The stream to parse from.</param>
public bool Parse(Stream s) {
EndianBitConverter bc = EndianBitConverter.CreateForLittleEndian();
EndianBitConverter bcBig = EndianBitConverter.CreateForBigEndian();
long startPosition = s.Position;
byte[] buffer = new byte[ISOFile.SECTOR_SIZE];
// Read the entire structure
s.Read(buffer, 0, ISOFile.SECTOR_SIZE);
// Get the type
this.Type = buffer[0];
//zero 24-jun-2013 - validate
// "CD001" + 0x01
if (buffer[1] == 'C' && buffer[2] == 'D' && buffer[3] == '0' && buffer[4] == '0' && buffer[5] == '1' && buffer[6] == 0x01)
{
//it seems to be a valid volume descriptor
}
else
{
return false;
}
// Handle the primary volume information
if (this.Type == 1) {
int cursor = 8;
// Get the system identifier
Array.Copy(buffer, cursor,
this.SystemIdentifier, 0, LENGTH_SHORT_IDENTIFIER);
cursor += LENGTH_SHORT_IDENTIFIER;
// Get the volume identifier
Array.Copy(buffer, cursor,
this.VolumeIdentifier, 0, LENGTH_SHORT_IDENTIFIER);
cursor += LENGTH_SHORT_IDENTIFIER;
cursor += 8;
// Get the total number of sectors
this.NumberOfSectors = bc.ToInt32(buffer, cursor);
cursor += 8;
cursor += 32;
this.VolumeSetSize = bc.ToInt16(buffer, cursor);
cursor += 4;
this.VolumeSequenceNumber = bc.ToInt16(buffer, cursor);
cursor += 4;
this.SectorSize = bc.ToInt16(buffer, cursor);
cursor += 4;
this.PathTableSize = bc.ToInt32(buffer, cursor);
cursor += 8;
this.OffsetOfFirstLittleEndianPathTable = bc.ToInt32(buffer, cursor);
cursor += 4;
this.OffsetOfSecondLittleEndianPathTable = bc.ToInt32(buffer, cursor);
cursor += 4;
this.OffsetOfFirstLittleEndianPathTable = bcBig.ToInt32(buffer, cursor);
cursor += 4;
this.OffsetOfSecondLittleEndianPathTable = bcBig.ToInt32(buffer, cursor);
cursor += 4;
this.RootDirectoryRecord.Parse(buffer, cursor);
cursor += LENGTH_ROOT_DIRECTORY_RECORD;
Array.Copy(buffer, cursor,
this.VolumeSetIdentifier, 0, LENGTH_LONG_IDENTIFIER);
cursor += LENGTH_LONG_IDENTIFIER;
Array.Copy(buffer, cursor,
this.PublisherIdentifier, 0, LENGTH_LONG_IDENTIFIER);
cursor += LENGTH_LONG_IDENTIFIER;
Array.Copy(buffer, cursor,
this.DataPreparerIdentifier, 0, LENGTH_LONG_IDENTIFIER);
cursor += LENGTH_LONG_IDENTIFIER;
Array.Copy(buffer, cursor,
this.ApplicationIdentifier, 0, LENGTH_LONG_IDENTIFIER);
cursor += LENGTH_LONG_IDENTIFIER;
Array.Copy(buffer, cursor,
this.CopyrightFileIdentifier, 0, LENGTH_IDENTIFIER);
cursor += LENGTH_IDENTIFIER;
Array.Copy(buffer, cursor,
this.AbstractFileIdentifier, 0, LENGTH_IDENTIFIER);
cursor += LENGTH_IDENTIFIER;
Array.Copy(buffer, cursor,
this.BibliographicalFileIdentifier, 0, LENGTH_IDENTIFIER);
cursor += LENGTH_IDENTIFIER;
Array.Copy(buffer, cursor,
this.VolumeCreationDateTime, 0, LENGTH_TIME);
cursor += LENGTH_TIME;
Array.Copy(buffer, cursor,
this.LastModifiedDateTime, 0, LENGTH_TIME);
cursor += LENGTH_TIME;
Array.Copy(buffer, cursor,
this.ExpirationDateTime, 0, LENGTH_TIME);
cursor += LENGTH_TIME;
Array.Copy(buffer, cursor,
this.EffectiveDateTime, 0, LENGTH_TIME);
cursor += LENGTH_TIME;
cursor += 1;
cursor += 1;
Array.Copy(buffer, cursor,
this.Reserved, 0, LENGTH_RESERVED);
cursor += LENGTH_RESERVED;
}
return true;
}
#endregion
#region Type Information
/// <summary>
/// Returns true if this is the terminator volume descriptor.
/// </summary>
/// <returns>True if the terminator.</returns>
public bool IsTerminator() {
return (this.Type == 255);
}
#endregion
}
}