BizHawk/BizHawk.Emulation.DiscSystem/Disc.cs

1009 lines
32 KiB
C#

using System;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.IO;
using System.Collections.Generic;
//http://www.pctechguide.com/iso-9660-data-format-for-cds-cd-roms-cd-rs-and-cd-rws
//http://linux.die.net/man/1/cue2toc
//http://cdemu.sourceforge.net/project.php#sf
//apparently cdrdao is the ultimate linux tool for doing this stuff but it doesnt support DAO96 (or other DAO modes) that would be necessary to extract P-Q subchannels
//(cdrdao only supports R-W)
//here is a featureset list of windows cd burning programs (useful for cuesheet compatibility info)
//http://www.dcsoft.com/cue_mastering_progs.htm
//good
//http://linux-sxs.org/bedtime/cdapi.html
//http://en.wikipedia.org/wiki/Track_%28CD%29
//http://docs.google.com/viewer?a=v&q=cache:imNKye05zIEJ:www.13thmonkey.org/documentation/SCSI/mmc-r10a.pdf+q+subchannel+TOC+format&hl=en&gl=us&pid=bl&srcid=ADGEEShtYqlluBX2lgxTL3pVsXwk6lKMIqSmyuUCX4RJ3DntaNq5vI2pCvtkyze-fumj7vvrmap6g1kOg5uAVC0IxwU_MRhC5FB0c_PQ2BlZQXDD7P3GeNaAjDeomelKaIODrhwOoFNb&sig=AHIEtbRXljAcFjeBn3rMb6tauHWjSNMYrw
//r:\consoles\~docs\yellowbook
//http://digitalx.org/cue-sheet/examples/
//
//"qemu cdrom emulator"
//http://www.koders.com/c/fid7171440DEC7C18B932715D671DEE03743111A95A.aspx
//less good
//http://www.cyberciti.biz/faq/getting-volume-information-from-cds-iso-images/
//http://www.cims.nyu.edu/cgi-systems/man.cgi?section=7I&topic=cdio
//ideas:
/*
* do some stuff asynchronously. for example, decoding mp3 sectors.
* keep a list of 'blobs' (giant bins or decoded wavs likely) which can reference the disk
* keep a list of sectors and the blob/offset from which they pull -- also whether the sector is available
* if it is not available and something requests it then it will have to block while that sector gets generated
* perhaps the blobs know how to resolve themselves and the requested sector can be immediately resolved (priority boost)
* mp3 blobs should be hashed and dropped in %TEMP% as a wav decode
*/
//here is an MIT licensed C mp3 decoder
//http://core.fluendo.com/gstreamer/src/gst-fluendo-mp3/
/*information on saturn TOC and session data structures is on pdf page 58 of System Library User's Manual;
* as seen in yabause, there are 1000 u32s in this format:
* Ctrl[4bit] Adr[4bit] StartFrameAddressFAD[24bit] (nonexisting tracks are 0xFFFFFFFF)
* Followed by Fist Track Information, Last Track Information..
* Ctrl[4bit] Adr[4bit] FirstTrackNumber/LastTrackNumber[8bit] and then some stuff I dont understand
* ..and Read Out Information:
* Ctrl[4bit] Adr[4bit] ReadOutStartFrameAddress[24bit]
*
* Also there is some stuff about FAD of sessions.
* This should be generated by the saturn core, but we need to make sure we pass down enough information to do it
*/
//2048 bytes packed into 2352:
//12 bytes sync(00 ff ff ff ff ff ff ff ff ff ff 00)
//3 bytes sector address (min+A0),sec,frac //does this correspond to ccd `point` field in the TOC entries?
//sector mode byte (0: silence; 1: 2048Byte mode (EDC,ECC,CIRC), 2: mode2 (could be 2336[vanilla mode2], 2048[xa mode2 form1], 2324[xa mode2 form2])
//cue sheets may use mode1_2048 (and the error coding needs to be regenerated to get accurate raw data) or mode1_2352 (the entire sector is present)
//audio is a different mode, seems to be just 2352 bytes with no sync, header or error correction. i guess the CIRC error correction is still there
namespace BizHawk.Emulation.DiscSystem
{
public partial class Disc : IDisposable
{
public interface ISector
{
/// <summary>
/// reads the entire sector, raw
/// </summary>
int Read_2352(byte[] buffer, int offset);
/// <summary>
/// reads 2048 bytes of userdata.. precisely what this means isnt always 100% certain (for instance mode2 form 0 has 2336 bytes of userdata instead of 2048)..
/// ..but its certain enough for this to be useful
/// </summary>
int Read_2048(byte[] buffer, int offset);
}
/// <summary>
/// Presently, an IBlob doesn't need to work multithreadedly. It's quite an onerous demand.
/// This should probably be managed by the Disc class somehow, or by the user making another Disc.
/// </summary>
public interface IBlob : IDisposable
{
/// <summary>
/// what a weird parameter order. normally the dest buffer would be first. weird.
/// </summary>
/// <param name="byte_pos">location in the blob to read from</param>
/// <param name="buffer">destination buffer for read data</param>
/// <param name="offset">offset into destination buffer</param>
/// <param name="count">amount to read</param>
int Read(long byte_pos, byte[] buffer, int offset, int count);
}
public class Blob_ZeroPadAdapter : IBlob
{
public Blob_ZeroPadAdapter(IBlob baseBlob, long padFrom, long padLen)
{
this.baseBlob = baseBlob;
this.padFrom = padFrom;
this.padLen = padLen;
}
public int Read(long byte_pos, byte[] buffer, int offset, int count)
{
throw new NotImplementedException("Blob_ZeroPadAdapter hasnt been tested yet! please report this!");
//something about this seems unnecessarily complex, but... i dunno.
/*
//figure out how much remains until the zero-padding begins
long remain = byte_pos - padFrom;
int todo;
if (remain < count)
todo = (int)remain;
else todo = count;
//read up until the zero-padding
int totalRead = 0;
int readed = baseBlob.Read(byte_pos, buffer, offset, todo);
totalRead += readed;
offset += todo;
//if we didnt read enough, we certainly shouldnt try to read any more
if (readed < todo)
return readed;
//if that was all we needed, then we're done
count -= todo;
if (count == 0)
return totalRead;
//if we need more, it must come from zero-padding
remain = padLen;
if (remain < count)
todo = (int)remain;
else todo = count;
Array.Clear(buffer, offset, todo);
totalRead += todo;
return totalRead;
*/
}
public void Dispose()
{
baseBlob.Dispose();
}
IBlob baseBlob;
long padFrom;
long padLen;
}
class Blob_RawFile : IBlob
{
public string PhysicalPath {
get { return physicalPath; }
set
{
physicalPath = value;
length = new FileInfo(physicalPath).Length;
}
}
string physicalPath;
long length;
public long Offset = 0;
BufferedStream fs;
public void Dispose()
{
if (fs != null)
{
fs.Dispose();
fs = null;
}
}
public int Read(long byte_pos, byte[] buffer, int offset, int count)
{
//use quite a large buffer, because normally we will be reading these sequentially but in small chunks.
//this enhances performance considerably
const int buffersize = 2352 * 75 * 2;
if (fs == null)
fs = new BufferedStream(new FileStream(physicalPath, FileMode.Open, FileAccess.Read, FileShare.Read), buffersize);
long target = byte_pos + Offset;
if(fs.Position != target)
fs.Position = target;
return fs.Read(buffer, offset, count);
}
public long Length
{
get
{
return length;
}
}
}
/// <summary>
/// this ISector is dumb and only knows how to drag chunks off a source blob
/// </summary>
class Sector_RawBlob : ISector
{
public IBlob Blob;
public long Offset;
public int Read_2352(byte[] buffer, int offset)
{
return Blob.Read(Offset, buffer, offset, 2352);
}
public int Read_2048(byte[] buffer, int offset)
{
return Blob.Read(Offset, buffer, offset, 2048);
}
}
/// <summary>
/// this ISector always returns zeroes
/// </summary>
class Sector_Zero : ISector
{
public int Read_2352(byte[] buffer, int offset)
{
Array.Clear(buffer, 0, 2352);
return 2352;
}
public int Read_2048(byte[] buffer, int offset)
{
Array.Clear(buffer, 0, 2048);
return 2048;
}
}
// ------ replaced by Blob_ZeroPadAdapter
///// <summary>
///// this ISector adapts another ISector by always returning zeroes
///// TODO I dont like the way this works. I think blobs should get adapted instead to zero-pad to a certain length.
///// </summary>
//class Sector_ZeroPad : ISector
//{
// public ISector BaseSector;
// public int BaseLength;
// public int Read(byte[] buffer, int offset)
// {
// return _Read(buffer, offset, 2352);
// }
// public int Read_2048(byte[] buffer, int offset)
// {
// return _Read(buffer, offset, 2352);
// }
// int _Read(byte[] buffer, int offset, int amount)
// {
// int read = BaseSector.Read(buffer, offset);
// if(read < BaseLength) return read;
// for (int i = BaseLength; i < amount; i++)
// buffer[offset + i] = 0;
// return amount;
// }
//}
abstract class Sector_Mode1_or_Mode2_2352 : ISector
{
public ISector BaseSector;
public abstract int Read_2352(byte[] buffer, int offset);
public abstract int Read_2048(byte[] buffer, int offset);
}
/// <summary>
/// This ISector is a raw MODE1 sector
/// </summary>
class Sector_Mode1_2352 : Sector_Mode1_or_Mode2_2352
{
public override int Read_2352(byte[] buffer, int offset)
{
return BaseSector.Read_2352(buffer, offset);
}
public override int Read_2048(byte[] buffer, int offset)
{
//to get 2048 bytes out of this sector type, start 16 bytes in
int ret = BaseSector.Read_2352(TempSector, 0);
Buffer.BlockCopy(TempSector, 16, buffer, offset, 2048);
System.Diagnostics.Debug.Assert(buffer != TempSector);
return 2048;
}
[ThreadStatic]
static byte[] TempSector = new byte[2352];
}
/// <summary>
/// this ISector is a raw MODE2 sector. could be form 0,1,2... who can say? supposedly:
/// To tell the different Mode 2s apart you have to examine bytes 16-23 of the sector (the first 8 bytes of Mode Data).
/// If bytes 16-19 are not the same as 20-23, then it is Mode 2. If they are equal and bit 5 is on (0x20), then it is Mode 2 Form 2. Otherwise it is Mode 2 Form 1.
/// ...but we're not using this information in any way
/// </summary>
class Sector_Mode2_2352 : Sector_Mode1_or_Mode2_2352
{
public override int Read_2352(byte[] buffer, int offset)
{
return BaseSector.Read_2352(buffer, offset);
}
public override int Read_2048(byte[] buffer, int offset)
{
//to get 2048 bytes out of this sector type, start 24 bytes in
int ret = BaseSector.Read_2352(TempSector, 0);
Buffer.BlockCopy(TempSector, 24, buffer, offset, 2048);
System.Diagnostics.Debug.Assert(buffer != TempSector);
return 2048;
}
[ThreadStatic]
static byte[] TempSector = new byte[2352];
}
protected static byte BCD_Byte(byte val)
{
byte ret = (byte)(val % 10);
ret += (byte)(16 * (val / 10));
return ret;
}
//a blob that also has an ECM cache associated with it. maybe one day.
class ECMCacheBlob
{
public ECMCacheBlob(IBlob blob)
{
BaseBlob = blob;
}
public IBlob BaseBlob;
}
/// <summary>
/// this ISector is a MODE1 sector that is generating itself from an underlying MODE1/2048 userdata piece
/// </summary>
class Sector_Mode1_2048 : ISector
{
public Sector_Mode1_2048(int ABA)
{
byte aba_min = (byte)(ABA / 60 / 75);
byte aba_sec = (byte)((ABA / 75) % 60);
byte aba_frac = (byte)(ABA % 75);
bcd_aba_min = BCD_Byte(aba_min);
bcd_aba_sec = BCD_Byte(aba_sec);
bcd_aba_frac = BCD_Byte(aba_frac);
}
byte bcd_aba_min, bcd_aba_sec, bcd_aba_frac;
public ECMCacheBlob Blob;
public long Offset;
byte[] extra_data;
bool has_extra_data;
public int Read_2048(byte[] buffer, int offset)
{
//this is easy. we only have 2048 bytes, and 2048 bytes were requested
return Blob.BaseBlob.Read(Offset, buffer, offset, 2048);
}
public int Read_2352(byte[] buffer, int offset)
{
//user data
int read = Blob.BaseBlob.Read(Offset, buffer, offset + 16, 2048);
//if we read the 2048 physical bytes OK, then return the complete sector
if (read == 2048 && has_extra_data)
{
Buffer.BlockCopy(extra_data, 0, buffer, offset, 16);
Buffer.BlockCopy(extra_data, 16, buffer, offset + 2064, 4 + 8 + 172 + 104);
return 2352;
}
//sync
buffer[offset + 0] = 0x00; buffer[offset + 1] = 0xFF; buffer[offset + 2] = 0xFF; buffer[offset + 3] = 0xFF;
buffer[offset + 4] = 0xFF; buffer[offset + 5] = 0xFF; buffer[offset + 6] = 0xFF; buffer[offset + 7] = 0xFF;
buffer[offset + 8] = 0xFF; buffer[offset + 9] = 0xFF; buffer[offset + 10] = 0xFF; buffer[offset + 11] = 0x00;
//sector address
buffer[offset + 12] = bcd_aba_min;
buffer[offset + 13] = bcd_aba_sec;
buffer[offset + 14] = bcd_aba_frac;
//mode 1
buffer[offset + 15] = 1;
//calculate EDC and poke into the sector
uint edc = ECM.EDC_Calc(buffer, offset, 2064);
ECM.PokeUint(buffer, 2064, edc);
//intermediate
for (int i = 0; i < 8; i++) buffer[offset + 2068 + i] = 0;
//ECC
ECM.ECC_Populate(buffer, offset, buffer, offset, false);
//VALIDATION - check our homemade algorithms against code derived from ECM
////EDC
//GPL_ECM.edc_validateblock(buffer, 2064, buffer, offset + 2064);
////ECC
//GPL_ECM.ecc_validate(buffer, offset, false);
//if we read the 2048 physical bytes OK, then return the complete sector
if (read == 2048)
{
extra_data = new byte[16 + 4 + 8 + 172 + 104];
Buffer.BlockCopy(buffer, 0, extra_data, 0, 16);
Buffer.BlockCopy(buffer, 2064, extra_data, 16, 4 + 8 + 172 + 104);
has_extra_data = true;
return 2352;
}
//otherwise, return a smaller value to indicate an error
else return read;
}
}
//this is a physical 2352 byte sector.
public class SectorEntry
{
public SectorEntry(ISector sec) { Sector = sec; }
public ISector Sector;
//todo - add some PARAMETER fields to this, so that the ISector can use them (so that each ISector doesnt have to be constructed also)
//also then, maybe this could be a struct
//q-subchannel stuff. can be returned directly, or built into the entire subcode sector if you want
/// <summary>
/// ADR and CONTROL
/// </summary>
public byte q_status;
/// <summary>
/// BCD indications of the current track number and index
/// </summary>
public BCD2 q_tno, q_index;
/// <summary>
/// track-relative timestamp
/// </summary>
public BCD2 q_min, q_sec, q_frame;
/// <summary>
/// absolute timestamp
/// </summary>
public BCD2 q_amin, q_asec, q_aframe;
public void Read_SubchannelQ(byte[] buffer, int offset)
{
buffer[offset + 0] = q_status;
buffer[offset + 1] = q_tno.BCDValue;
buffer[offset + 2] = q_index.BCDValue;
buffer[offset + 3] = q_min.BCDValue;
buffer[offset + 4] = q_sec.BCDValue;
buffer[offset + 5] = q_frame.BCDValue;
buffer[offset + 6] = 0;
buffer[offset + 7] = q_amin.BCDValue;
buffer[offset + 8] = q_asec.BCDValue;
buffer[offset + 9] = q_aframe.BCDValue;
ushort crc16 = CRC16_CCITT.Calculate(buffer, 0, 10);
//CRC is stored inverted and big endian
buffer[offset + 10] = (byte)(~(crc16 >> 8));
buffer[offset + 11] = (byte)(~(crc16));
}
}
public List<IBlob> Blobs = new List<IBlob>();
public List<SectorEntry> Sectors = new List<SectorEntry>();
public DiscTOC TOC = new DiscTOC();
public void Dispose()
{
foreach (var blob in Blobs)
{
blob.Dispose();
}
}
void FromIsoPathInternal(string isoPath)
{
//make a fake cue file to represent this iso file
string isoCueWrapper = @"
FILE ""xarp.barp.marp.farp"" BINARY
TRACK 01 MODE1/2048
INDEX 01 00:00:00
";
string cueDir = "";
var cue = new Cue();
CueFileResolver["xarp.barp.marp.farp"] = isoPath;
cue.LoadFromString(isoCueWrapper);
FromCueInternal(cue, cueDir, new CueBinPrefs());
//var session = new DiscTOC.Session();
//session.num = 1;
//TOC.Sessions.Add(session);
//var track = new DiscTOC.Track();
//track.num = 1;
//session.Tracks.Add(track);
//var index = new DiscTOC.Index();
//index.num = 0;
//track.Indexes.Add(index);
//index = new DiscTOC.Index();
//index.num = 1;
//track.Indexes.Add(index);
//var fiIso = new FileInfo(isoPath);
//Blob_RawFile blob = new Blob_RawFile();
//blob.PhysicalPath = fiIso.FullName;
//Blobs.Add(blob);
//int num_aba = (int)(fiIso.Length / 2048);
//track.length_aba = num_aba;
//if (fiIso.Length % 2048 != 0)
// throw new InvalidOperationException("invalid iso file (size not multiple of 2048)");
////TODO - handle this with Final Fantasy 9 cd1.iso
//var ecmCacheBlob = new ECMCacheBlob(blob);
//for (int i = 0; i < num_aba; i++)
//{
// Sector_Mode1_2048 sector = new Sector_Mode1_2048(i+150);
// sector.Blob = ecmCacheBlob;
// sector.Offset = i * 2048;
// Sectors.Add(new SectorEntry(sector));
//}
//TOC.AnalyzeLengthsFromIndexLengths();
}
public CueBin DumpCueBin(string baseName, CueBinPrefs prefs)
{
if (TOC.Sessions.Count > 1)
throw new NotSupportedException("can't dump cue+bin with more than 1 session yet");
CueBin ret = new CueBin();
ret.baseName = baseName;
ret.disc = this;
if (!prefs.OneBlobPerTrack)
{
//this is the preferred mode of dumping things. we will always write full sectors.
string cue = TOC.GenerateCUE_OneBin(prefs);
var bfd = new CueBin.BinFileDescriptor();
bfd.name = baseName + ".bin";
ret.cue = string.Format("FILE \"{0}\" BINARY\n", bfd.name) + cue;
ret.bins.Add(bfd);
bfd.SectorSize = 2352;
//skip the mandatory track 1 pregap! cue+bin files do not contain it
for (int i = 150; i < TOC.length_aba; i++)
{
bfd.abas.Add(i);
bfd.aba_zeros.Add(false);
}
}
else
{
//we build our own cue here (unlike above) because we need to build the cue and the output dat aat the same time
StringBuilder sbCue = new StringBuilder();
for (int i = 0; i < TOC.Sessions[0].Tracks.Count; i++)
{
var track = TOC.Sessions[0].Tracks[i];
var bfd = new CueBin.BinFileDescriptor();
bfd.name = baseName + string.Format(" (Track {0:D2}).bin", track.num);
bfd.SectorSize = Cue.BINSectorSizeForTrackType(track.TrackType);
ret.bins.Add(bfd);
int aba = 0;
//skip the mandatory track 1 pregap! cue+bin files do not contain it
if (i == 0) aba = 150;
for (; aba < track.length_aba; aba++)
{
int thisaba = track.Indexes[0].aba + aba;
bfd.abas.Add(thisaba);
bfd.aba_zeros.Add(false);
}
sbCue.AppendFormat("FILE \"{0}\" BINARY\n", bfd.name);
sbCue.AppendFormat(" TRACK {0:D2} {1}\n", track.num, Cue.TrackTypeStringForTrackType(track.TrackType));
foreach (var index in track.Indexes)
{
int x = index.aba - track.Indexes[0].aba;
if (index.num == 0 && index.aba == track.Indexes[1].aba)
{
//dont emit index 0 when it is the same as index 1, it is illegal for some reason
}
//else if (i==0 && index.num == 0)
//{
// //don't generate the first index, it is illogical
//}
else
{
//track 1 included the lead-in at the beginning of it. sneak past that.
//if (i == 0) x -= 150;
sbCue.AppendFormat(" INDEX {0:D2} {1}\n", index.num, new Timestamp(x).Value);
}
}
}
ret.cue = sbCue.ToString();
}
return ret;
}
/// <summary>
/// NOT USED RIGHT NOW. AMBIGUOUS, ANYWAY.
/// "bin" is an ill-defined concept.
/// </summary>
[Obsolete]
void DumpBin_2352(string binPath)
{
byte[] temp = new byte[2352];
//a cue's bin probably doesn't contain the first 150 sectors, so skip it
using (FileStream fs = new FileStream(binPath, FileMode.Create, FileAccess.Write, FileShare.None))
for (int i = 150; i < Sectors.Count; i++)
{
ReadLBA_2352(i, temp, 0);
fs.Write(temp, 0, 2352);
}
}
public static Disc FromCuePath(string cuePath, CueBinPrefs prefs)
{
var ret = new Disc();
ret.FromCuePathInternal(cuePath, prefs);
ret.TOC.GeneratePoints();
ret.PopulateQSubchannel();
return ret;
}
/// <summary>
/// THIS HASNT BEEN TESTED IN A LONG TIME. DOES IT WORK?
/// </summary>
public static Disc FromIsoPath(string isoPath)
{
var ret = new Disc();
ret.FromIsoPathInternal(isoPath);
ret.TOC.GeneratePoints();
ret.PopulateQSubchannel();
return ret;
}
/// <summary>
/// creates subchannel Q data track for this disc
/// </summary>
void PopulateQSubchannel()
{
int aba = 0;
int dpIndex = 0;
while (aba < Sectors.Count)
{
if (dpIndex < TOC.Points.Count - 1)
{
if (aba >= TOC.Points[dpIndex + 1].ABA)
{
dpIndex++;
}
}
var dp = TOC.Points[dpIndex];
var se = Sectors[aba];
int control = 0;
//choose a control byte depending on whether this is an audio or data track
if(dp.Track.TrackType == ETrackType.Audio)
control = (int)Q_Control.StereoNoPreEmph;
else control = (int)Q_Control.DataUninterrupted;
//we always use ADR=1 (mode-1 q block)
//this could be more sophisticated but it is almost useless for emulation (only useful for catalog/ISRC numbers)
int adr = 1;
se.q_status = (byte)(adr | (control << 4));
se.q_tno = BCD2.FromDecimal(dp.TrackNum);
se.q_index = BCD2.FromDecimal(dp.IndexNum);
int track_relative_aba = aba - dp.Track.Indexes[1].aba;
track_relative_aba = Math.Abs(track_relative_aba);
Timestamp track_relative_timestamp = new Timestamp(track_relative_aba);
se.q_min = BCD2.FromDecimal(track_relative_timestamp.MIN);
se.q_sec = BCD2.FromDecimal(track_relative_timestamp.SEC);
se.q_frame = BCD2.FromDecimal(track_relative_timestamp.FRAC);
Timestamp absolute_timestamp = new Timestamp(aba);
se.q_amin = BCD2.FromDecimal(absolute_timestamp.MIN);
se.q_asec = BCD2.FromDecimal(absolute_timestamp.SEC);
se.q_aframe = BCD2.FromDecimal(absolute_timestamp.FRAC);
aba++;
}
}
static byte IntToBCD(int n)
{
int ones;
int tens = Math.DivRem(n,10,out ones);
return (byte)((tens<<4)|ones);
}
private enum Q_Control
{
StereoNoPreEmph = 0,
StereoPreEmph = 1,
MonoNoPreemph = 8,
MonoPreEmph = 9,
DataUninterrupted = 4,
DataIncremental = 5,
CopyProhibitedMask = 0,
CopyPermittedMask = 2,
}
}
/// <summary>
/// encapsulates a 2 digit BCD number as used various places in the CD specs
/// </summary>
public struct BCD2
{
/// <summary>
/// The raw BCD value. you can't do math on this number! but you may be asked to supply it to a game program.
/// The largest number it can logically contain is 99
/// </summary>
public byte BCDValue;
/// <summary>
/// The derived decimal value. you can do math on this! the largest number it can logically contain is 99.
/// </summary>
public int DecimalValue
{
get { return (BCDValue & 0xF) + ((BCDValue >> 4) & 0xF) * 10; }
set { BCDValue = IntToBCD(value); }
}
/// <summary>
/// makes a BCD2 from a decimal number. don't supply a number > 99 or you might not like the results
/// </summary>
public static BCD2 FromDecimal(int d)
{
BCD2 ret = new BCD2();
ret.DecimalValue = d;
return ret;
}
static byte IntToBCD(int n)
{
int ones;
int tens = Math.DivRem(n, 10, out ones);
return (byte)((tens << 4) | ones);
}
}
public class Timestamp
{
/// <summary>
/// creates timestamp of 00:00:00
/// </summary>
public Timestamp()
{
Value = "00:00:00";
}
/// <summary>
/// creates a timestamp from a string in the form mm:ss:ff
/// </summary>
public Timestamp(string value)
{
this.Value = value;
MIN = int.Parse(value.Substring(0, 2));
SEC = int.Parse(value.Substring(3, 2));
FRAC = int.Parse(value.Substring(6, 2));
ABA = MIN * 60 * 75 + SEC * 75 + FRAC;
}
public readonly string Value;
public readonly int MIN, SEC, FRAC, ABA;
/// <summary>
/// creates timestamp from supplied ABA
/// </summary>
public Timestamp(int ABA)
{
this.ABA = ABA;
MIN = ABA / (60 * 75);
SEC = (ABA / 75) % 60;
FRAC = ABA % 75;
Value = string.Format("{0:D2}:{1:D2}:{2:D2}", MIN, SEC, FRAC);
}
}
public enum ETrackType
{
Mode1_2352,
Mode1_2048,
Mode2_2352,
Audio
}
public class CueBinPrefs
{
/// <summary>
/// Controls general operations: should the output be split into several blobs, or just use one?
/// </summary>
public bool OneBlobPerTrack;
/// <summary>
/// NOT SUPPORTED YET (just here as a reminder) If choosing OneBinPerTrack, you may wish to write wave files for audio tracks.
/// </summary>
//public bool DumpWaveFiles;
/// <summary>
/// turn this on to dump bins instead of just cues
/// </summary>
public bool ReallyDumpBin;
/// <summary>
/// Dump bins to bitbucket instead of disk
/// </summary>
public bool DumpToBitbucket;
/// <summary>
/// dump a .sub.q along with bins. one day we'll want to dump the entire subcode but really Q is all thats important for debugging most things
/// </summary>
public bool DumpSubchannelQ;
/// <summary>
/// generate remarks and other annotations to help humans understand whats going on, but which will confuse many cue parsers
/// </summary>
public bool AnnotateCue;
/// <summary>
/// EVIL: in theory this would attempt to generate pregap commands to save disc space, but I think this is a bad idea.
/// it would also be useful for OneBinPerTrack mode in making wave files.
/// HOWEVER - by the time we've loaded things up into our canonical format, we don't know which 'pregaps' are safe for turning back into pregaps
/// Because they might sometimes contain data (gapless audio discs). So we would have to inspect a series of sectors to look for silence.
/// And even still, the ECC information might be important. So, forget it.
/// NEVER USE OR IMPLEMENT THIS
/// </summary>
//public bool PreferPregapCommand = false;
/// <summary>
/// some cue parsers cant handle sessions. better not emit a session command then. multi-session discs will then be broken
/// </summary>
public bool SingleSession;
/// <summary>
/// enables various extension-aware behaviours.
/// enables auto-search for files with the same name but differing extension.
/// enables auto-detection of situations where cue blobfiles are indicating the wrong type in the cuefile
/// </summary>
public bool ExtensionAware = false;
/// <summary>
/// whenever we have a choice, use case sensitivity in searching for files
/// </summary>
public bool CaseSensitive = false;
/// <summary>
/// DO NOT CHANGE THIS! All sectors will be written with ECM data. It's a waste of space, but it is exact. (not completely supported yet)
/// </summary>
public bool DumpECM = true;
}
/// <summary>
/// Encapsulates an in-memory cue+bin (complete cuesheet and a little registry of files)
/// it will be based on a disc (fro mwhich it can read sectors to avoid burning through extra memory)
/// TODO - we must merge this with whatever reads in cue+bin
/// </summary>
public class CueBin
{
public string cue;
public string baseName;
public Disc disc;
public class BinFileDescriptor
{
public string name;
public List<int> abas = new List<int>();
//todo - do we really need this? i dont think so...
public List<bool> aba_zeros = new List<bool>();
public int SectorSize;
}
public List<BinFileDescriptor> bins = new List<BinFileDescriptor>();
//NOT SUPPORTED RIGHT NOW
//public string CreateRedumpReport()
//{
// if (disc.TOC.Sessions[0].Tracks.Count != bins.Count)
// throw new InvalidOperationException("Cannot generate redump report on CueBin lacking OneBinPerTrack property");
// StringBuilder sb = new StringBuilder();
// for (int i = 0; i < disc.TOC.Sessions[0].Tracks.Count; i++)
// {
// var track = disc.TOC.Sessions[0].Tracks[i];
// var bfd = bins[i];
// //dump the track
// byte[] dump = new byte[track.length_aba * 2352];
// //TODO ????????? post-ABA unknown
// //for (int aba = 0; aba < track.length_aba; aba++)
// // disc.ReadLBA_2352(bfd.lbas[lba],dump,lba*2352);
// string crc32 = string.Format("{0:X8}", CRC32.Calculate(dump));
// string md5 = Util.Hash_MD5(dump, 0, dump.Length);
// string sha1 = Util.Hash_SHA1(dump, 0, dump.Length);
// int pregap = track.Indexes[1].lba - track.Indexes[0].lba;
// Timestamp pregap_ts = new Timestamp(pregap);
// Timestamp len_ts = new Timestamp(track.length_lba);
// sb.AppendFormat("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\n",
// i,
// Cue.RedumpTypeStringForTrackType(track.TrackType),
// pregap_ts.Value,
// len_ts.Value,
// track.length_lba,
// track.length_lba*Cue.BINSectorSizeForTrackType(track.TrackType),
// crc32,
// md5,
// sha1
// );
// }
// return sb.ToString();
//}
public void Dump(string directory, CueBinPrefs prefs)
{
ProgressReport pr = new ProgressReport();
Dump(directory, prefs, pr);
}
public void Dump(string directory, CueBinPrefs prefs, ProgressReport progress)
{
byte[] subQ_temp = new byte[12];
progress.TaskCount = 2;
progress.Message = "Generating Cue";
progress.ProgressEstimate = 1;
progress.ProgressCurrent = 0;
progress.InfoPresent = true;
string cuePath = Path.Combine(directory, baseName + ".cue");
if (prefs.DumpToBitbucket) { }
else File.WriteAllText(cuePath, cue);
progress.Message = "Writing bin(s)";
progress.TaskCurrent = 1;
progress.ProgressEstimate = bins.Sum(bfd => bfd.abas.Count);
progress.ProgressCurrent = 0;
if(!prefs.ReallyDumpBin) return;
foreach (var bfd in bins)
{
int sectorSize = bfd.SectorSize;
byte[] temp = new byte[2352];
byte[] empty = new byte[2352];
string trackBinFile = bfd.name;
string trackBinPath = Path.Combine(directory, trackBinFile);
string subQPath = Path.ChangeExtension(trackBinPath, ".sub.q");
Stream fsSubQ = null;
Stream fs;
if(prefs.DumpToBitbucket)
fs = Stream.Null;
else fs = new FileStream(trackBinPath, FileMode.Create, FileAccess.Write, FileShare.None);
try
{
if (prefs.DumpSubchannelQ)
if (prefs.DumpToBitbucket)
fsSubQ = Stream.Null;
else fsSubQ = new FileStream(subQPath, FileMode.Create, FileAccess.Write, FileShare.None);
for (int i = 0; i < bfd.abas.Count; i++)
{
if (progress.CancelSignal) return;
progress.ProgressCurrent++;
int aba = bfd.abas[i];
if (bfd.aba_zeros[i])
{
fs.Write(empty, 0, sectorSize);
}
else
{
if (sectorSize == 2352)
disc.ReadABA_2352(aba, temp, 0);
else if (sectorSize == 2048) disc.ReadABA_2048(aba, temp, 0);
else throw new InvalidOperationException();
fs.Write(temp, 0, sectorSize);
//write subQ if necessary
if (fsSubQ != null)
{
disc.Sectors[aba].Read_SubchannelQ(subQ_temp, 0);
fsSubQ.Write(subQ_temp, 0, 12);
}
}
}
}
finally
{
fs.Dispose();
if (fsSubQ != null) fsSubQ.Dispose();
}
}
}
}
}