diff --git a/DiscoHawk/Wave/WaveFile.cs b/DiscoHawk/Wave/WaveFile.cs new file mode 100644 index 0000000000..58ea1b96fc --- /dev/null +++ b/DiscoHawk/Wave/WaveFile.cs @@ -0,0 +1,97 @@ +/* Wave File Library + * A simple library to write a wave file + * + * Garrett Hoofman + * 10/21/08 + * http://www.visionsofafar.com + * */ + +/* Wave File Format +Reference : http://ccrma.stanford.edu/CCRMA/Courses/422/projects/WaveFormat/ +The canonical WAVE format starts with the RIFF header: +0 4 ChunkID Contains the letters "RIFF" in ASCII form + (0x52494646 big-endian form). +4 4 ChunkSize 36 + SubChunk2Size, or more precisely: + 4 + (8 + SubChunk1Size) + (8 + SubChunk2Size) + This is the size of the rest of the chunk + following this number. This is the size of the + entire file in bytes minus 8 bytes for the + two fields not included in this count: + ChunkID and ChunkSize. +8 4 Format Contains the letters "WAVE" + (0x57415645 big-endian form). + +The "WAVE" format consists of two subchunks: "fmt " and "data": +The "fmt " subchunk describes the sound data's format: +12 4 Subchunk1ID Contains the letters "fmt " + (0x666d7420 big-endian form). +16 4 Subchunk1Size 16 for PCM. This is the size of the + rest of the Subchunk which follows this number. +20 2 AudioFormat PCM = 1 (i.e. Linear quantization) + Values other than 1 indicate some + form of compression. +22 2 NumChannels Mono = 1, Stereo = 2, etc. +24 4 SampleRate 8000, 44100, etc. +28 4 ByteRate == SampleRate * NumChannels * BitsPerSample/8 +32 2 BlockAlign == NumChannels * BitsPerSample/8 + The number of bytes for one sample including + all channels. I wonder what happens when + this number isn't an integer? +34 2 BitsPerSample 8 bits = 8, 16 bits = 16, etc. + 2 ExtraParamSize if PCM, then doesn't exist + X ExtraParams space for extra parameters + +The "data" subchunk contains the size of the data and the actual sound: +36 4 Subchunk2ID Contains the letters "data" + (0x64617461 big-endian form). +40 4 Subchunk2Size == NumSamples * NumChannels * BitsPerSample/8 + This is the number of bytes in the data. + You can also think of this as the size + of the read of the subchunk following this + number. +44 * Data The actual sound data. +*/ +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; + +namespace WaveLibrary +{ + public class WaveFile + { + WaveHeader header; + WavefmtSubChunk fmt; + WavedataSubChunk data; + + public WaveFile(int channels, int bitsPerSample, int sampleRate) + { + header = new WaveHeader(); + fmt = new WavefmtSubChunk(channels, bitsPerSample, sampleRate); + } + + public void SetData(byte[] SoundData, int numSamples) + { + data = new WavedataSubChunk(numSamples, fmt.NumChannels, fmt.BitsPerSample, SoundData); + } + + public void WriteFile(string file) + { + FileStream fs = File.Create(file); + + //Set the total file chunk size + //Has to be set here because we might not know what the actual Data size was until now + header.SetChunkSize(fmt.Size, data.Size); + + header.WriteHeader(fs); + fmt.Writefmt(fs); + data.WriteData(fs); + + fs.Close(); + fs.Dispose(); + } + + public int NumChannels { get { return fmt.NumChannels; }} + public int BitsPerSample { get { return fmt.BitsPerSample; }} + } +} diff --git a/DiscoHawk/Wave/WaveHeader.cs b/DiscoHawk/Wave/WaveHeader.cs new file mode 100644 index 0000000000..892ccba4fc --- /dev/null +++ b/DiscoHawk/Wave/WaveHeader.cs @@ -0,0 +1,40 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; + +namespace WaveLibrary +{ + class WaveHeader + { + string ChunkID = "RIFF"; + int ChunkSize = 0; + string Format = "WAVE"; //Specifiy WAVE, AVI could also be used for a RIFF format + + public WaveHeader() + { + + } + public void SetChunkSize(int fmtSubChunkSize, int dataSubChunkSize) + { + ChunkSize = 4 + (8 + fmtSubChunkSize) + (8 + dataSubChunkSize); + } + + public void WriteHeader(FileStream fs) + { + //ChunkID + byte[] riff = Encoding.ASCII.GetBytes(ChunkID); + fs.Write(riff, 0, riff.Length); + + //Chunk Size + byte[] chunkSize = BitConverter.GetBytes(ChunkSize); + fs.Write(chunkSize, 0, chunkSize.Length); + + //Data Type + byte[] wave = Encoding.ASCII.GetBytes(Format); + fs.Write(wave, 0, wave.Length); + } + + public int Chunk_Size { get { return ChunkSize; }} + } +} diff --git a/DiscoHawk/Wave/WavedataSubChunk.cs b/DiscoHawk/Wave/WavedataSubChunk.cs new file mode 100644 index 0000000000..cab37af0d8 --- /dev/null +++ b/DiscoHawk/Wave/WavedataSubChunk.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; + +namespace WaveLibrary +{ + class WavedataSubChunk + { + string SubChunk2ID = "data"; + int SubChunk2Size; + byte[] SoundData; + + public WavedataSubChunk(int NumSamples, int NumChannels, int BitsPerSample, byte[] SoundData) + { + SubChunk2Size = NumSamples * NumChannels * (BitsPerSample / 8); + this.SoundData = SoundData; + } + + public void WriteData(FileStream fs) + { + //Chunk ID + byte[] _subChunk2ID = Encoding.ASCII.GetBytes(SubChunk2ID); + fs.Write(_subChunk2ID, 0, _subChunk2ID.Length); + + //Chunk Size + byte[] _subChunk2Size = BitConverter.GetBytes(SubChunk2Size); + fs.Write(_subChunk2Size, 0, _subChunk2Size.Length); + + //Wave Sound Data + fs.Write(SoundData, 0, SoundData.Length); + } + + public int Size { get { return SubChunk2Size; } } + } +} diff --git a/DiscoHawk/Wave/WavefmtSubChunk.cs b/DiscoHawk/Wave/WavefmtSubChunk.cs new file mode 100644 index 0000000000..fc711938e1 --- /dev/null +++ b/DiscoHawk/Wave/WavefmtSubChunk.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.IO; + +namespace WaveLibrary +{ + class WavefmtSubChunk + { + string SubChunk1ID = "fmt "; + int Subchunk1Size = 16; //For PCM + int AudioFormat = 1; //For no compression + public int NumChannels = 2; //1 For Mono, 2 For Stereo + int SampleRate = 22050; + int ByteRate; + int BlockAlign; + public int BitsPerSample = 16; + + public WavefmtSubChunk(int channels, int bitsPerSamples, int sampleRate) + { + BitsPerSample = bitsPerSamples; + NumChannels = channels; + SampleRate = sampleRate; + ByteRate = SampleRate * NumChannels * (BitsPerSample / 8); + BlockAlign = NumChannels * (BitsPerSample / 8); + } + + public void Writefmt(FileStream fs) + { + //Chunk ID + byte[] _subchunk1ID = Encoding.ASCII.GetBytes(SubChunk1ID); + fs.Write(_subchunk1ID, 0, _subchunk1ID.Length); + + //Chunk Size + byte[] _subchunk1Size = BitConverter.GetBytes(Subchunk1Size); + fs.Write(_subchunk1Size, 0, _subchunk1Size.Length); + + //Audio Format (PCM) + byte[] _audioFormat = BitConverter.GetBytes(AudioFormat); + fs.Write(_audioFormat, 0, 2); + + //Number of Channels (1 or 2) + byte[] _numChannels = BitConverter.GetBytes(NumChannels); + fs.Write(_numChannels, 0, 2); + + //Sample Rate + byte[] _sampleRate = BitConverter.GetBytes(SampleRate); + fs.Write(_sampleRate, 0, _sampleRate.Length); + + //Byte Rate + byte[] _byteRate = BitConverter.GetBytes(ByteRate); + fs.Write(_byteRate, 0, _byteRate.Length); + + //Block Align + byte[] _blockAlign = BitConverter.GetBytes(BlockAlign); + fs.Write(_blockAlign, 0, 2); + + //Bits Per Sample + byte[] _bitsPerSample = BitConverter.GetBytes(BitsPerSample); + fs.Write(_bitsPerSample, 0, 2); + } + + public int Size { get { return Subchunk1Size; } } + } +}