using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Text; using ICSharpCode.SharpZipLib.Zip.Compression.Streams; using ICSharpCode.SharpZipLib.Zip.Compression; using BizHawk.Emulation.Common; using BizHawk.Client.Common; namespace BizHawk.Client.EmuHawk { /// /// implements IVideoWriter, outputting to format "JMD" /// this is the JPC-rr multidump format; there are no filesize limits, and resolution can switch dynamically /// so each dump is always one file /// they can be processed with JPC-rr streamtools or JMDSource (avisynth) /// [VideoWriter("jmd", "JMD writer", "Writes a JPC-rr multidump file (JMD). These can be read and further processed with jpc-streamtools. One JMD file contains all audio (uncompressed) and video (compressed).")] class JMDWriter : IVideoWriter { /// /// carries private compression information data /// class CodecToken : IDisposable { public void Dispose() { } /// /// how hard the zlib compressor works /// public int compressionlevel { get; set; } /// /// number of threads to be used for video compression (sort of) /// public int numthreads { get; set; } /// /// instantiates a CodecToken with default parameters /// public CodecToken() { compressionlevel = Deflater.DEFAULT_COMPRESSION; numthreads = 3; } } /// /// stores compression parameters /// CodecToken token; /// /// fps numerator, constant /// int fpsnum; /// /// fps denominator, constant /// int fpsden; /// /// audio samplerate, constant /// int audiosamplerate; /// /// audio number of channels, constant; 1 or 2 only /// int audiochannels; /// /// audio bits per sample, constant; only 16 supported /// int audiobits; /// /// actual disk file being written /// JMDfile jmdfile; /// /// metadata for a movie /// not needed if we aren't dumping something that's not a movie /// class MovieMetaData { /// /// name of the game (rom) /// public string gamename; /// /// author(s) names /// public string authors; /// /// total length of the movie: ms /// public UInt64 lengthms; /// /// number of rerecords /// public UInt64 rerecords; } /// /// represents the metadata for the active movie (if applicable) /// MovieMetaData moviemetadata; /// /// represents a JMD file packet ready to be written except for sorting and timestamp offset /// class JMDPacket { public UInt16 stream; public UInt64 timestamp; // final muxed timestamp will be relative to previous public byte subtype; public byte[] data; } /// /// writes JMDfile packets to an underlying bytestream /// handles one video, one pcm audio, and one metadata track /// class JMDfile { /// /// current timestamp position /// UInt64 timestampoff; /// /// total number of video frames written /// UInt64 totalframes; /// /// total number of sample pairs written /// UInt64 totalsamples; /// /// fps of the video stream is fpsnum/fpsden /// int fpsnum; /// /// fps of the video stream is fpsnum/fpsden /// int fpsden; /// /// audio samplerate in hz /// int audiosamplerate; /// /// true if input will be stereo; mono otherwise /// output stream is always stereo /// bool stereo; /// /// underlying bytestream that is being written to /// Stream f; public JMDfile(Stream f, int fpsnum, int fpsden, int audiosamplerate, bool stereo) { if (!f.CanWrite) throw new ArgumentException("Stream must be writable!"); this.f = f; this.fpsnum = fpsnum; this.fpsden = fpsden; this.audiosamplerate = audiosamplerate; this.stereo = stereo; timestampoff = 0; totalframes = 0; totalsamples = 0; astorage = new Queue(); vstorage = new Queue(); writeheader(); } /// /// write header to the JPC file /// assumes one video, one audio, and one metadata stream, with hardcoded IDs /// void writeheader() { // write JPC MAGIC writeBE16(0xffff); f.Write(Encoding.ASCII.GetBytes("JPCRRMULTIDUMP"), 0, 14); // write channel table writeBE16(3); // number of streams // for each stream writeBE16(0); // channel 0 writeBE16(0); // video writeBE16(0); // no name writeBE16(1); // channel 1 writeBE16(1); // pcm audio writeBE16(0); // no name writeBE16(2); // channel 2 writeBE16(5); // metadata writeBE16(0); // no name } /// /// write metadata for a movie file /// can be called at any time /// /// metadata to write public void writemetadata(MovieMetaData mmd) { byte[] temp; // write metadatas writeBE16(2); // data channel writeBE32(0); // timestamp (same time as previous packet) f.WriteByte(71); // gamename temp = Encoding.UTF8.GetBytes(mmd.gamename); writeVar(temp.Length); f.Write(temp, 0, temp.Length); writeBE16(2); writeBE32(0); f.WriteByte(65); // authors temp = Encoding.UTF8.GetBytes(mmd.authors); writeVar(temp.Length); f.Write(temp, 0, temp.Length); writeBE16(2); writeBE32(0); f.WriteByte(76); // length writeVar(8); writeBE64(mmd.lengthms * 1000000); writeBE16(2); writeBE32(0); f.WriteByte(82); // rerecords writeVar(8); writeBE64(mmd.rerecords); } /// /// write big endian 16 bit unsigned /// /// void writeBE16(UInt16 v) { byte[] b = new byte[2]; b[0] = (byte)(v >> 8); b[1] = (byte)(v & 255); f.Write(b, 0, 2); } /// /// write big endian 32 bit unsigned /// /// void writeBE32(UInt32 v) { byte[] b = new byte[4]; b[0] = (byte)(v >> 24); b[1] = (byte)(v >> 16); b[2] = (byte)(v >> 8); b[3] = (byte)(v & 255); f.Write(b, 0, 4); } /// /// write big endian 64 bit unsigned /// /// void writeBE64(UInt64 v) { byte[] b = new byte[8]; for (int i = 7; i >= 0; i--) { b[i] = (byte)(v & 255); v >>= 8; } f.Write(b, 0, 8); } /// /// write variable length value /// encoding is similar to MIDI /// /// void writeVar(UInt64 v) { byte[] b = new byte[10]; int i = 0; while (v > 0) { if (i > 0) b[i++] = (byte)((v & 127) | 128); else b[i++] = (byte)(v & 127); v /= 128; } if (i == 0) f.WriteByte(0); else for (; i > 0; i--) f.WriteByte(b[i - 1]); } /// /// write variable length value /// encoding is similar to MIDI /// /// void writeVar(int v) { if (v < 0) throw new ArgumentException("length cannot be less than 0!"); writeVar((UInt64)v); } /// /// creates a timestamp out of fps value /// /// fpsnum /// fpsden /// frame position /// timestamp in nanoseconds static UInt64 timestampcalc(int rate, int scale, UInt64 pos) { // rate/scale events per second // timestamp is in nanoseconds // round down, consistent with JPC-rr apparently? var b = new System.Numerics.BigInteger(pos) * scale * 1000000000 / rate; return (UInt64)b; } /// /// actually write a packet to file /// timestamp sequence must be nondecreasing /// /// void writeActual(JMDPacket j) { if (j.timestamp < timestampoff) throw new ArithmeticException("JMD Timestamp problem?"); UInt64 timestampout = j.timestamp - timestampoff; while (timestampout > 0xffffffff) { timestampout -= 0xffffffff; // write timestamp skipper for (int i = 0; i < 6; i++) f.WriteByte(0xff); } timestampoff = j.timestamp; writeBE16(j.stream); writeBE32((UInt32)timestampout); f.WriteByte(j.subtype); writeVar((UInt64)j.data.LongLength); f.Write(j.data, 0, j.data.Length); } /// /// assemble JMDPacket and send to packetqueue /// /// zlibed frame with width and height prepended public void AddVideo(byte[] source) { var j = new JMDPacket(); j.stream = 0; j.subtype = 1; // zlib compressed, other possibility is 0 = uncompressed j.data = source; j.timestamp = timestampcalc(fpsnum, fpsden, (UInt64)totalframes); totalframes++; writevideo(j); } /// /// assemble JMDPacket and send to packetqueue /// one audio packet is split up into many many JMD packets, since JMD requires only 2 samples (1 left, 1 right) per packet /// /// public void AddSamples(short[] samples) { if (!stereo) for (int i = 0; i < samples.Length; i++) doaudiopacket(samples[i], samples[i]); else for (int i = 0; i < samples.Length / 2; i++) doaudiopacket(samples[2 * i], samples[2 * i + 1]); } /// /// helper function makes a JMDPacket out of one sample pair and adds it to the order queue /// /// left sample /// right sample void doaudiopacket(short l, short r) { var j = new JMDPacket(); j.stream = 1; j.subtype = 1; // raw PCM audio j.data = new byte[4]; j.data[0] = (byte)(l >> 8); j.data[1] = (byte)(l & 255); j.data[2] = (byte)(r >> 8); j.data[3] = (byte)(r & 255); j.timestamp = timestampcalc(audiosamplerate, 1, totalsamples); totalsamples++; writesound(j); } // ensure outputs are in order // JMD packets must be in nondecreasing timestamp order, but there's no obligation // for us to get handed that. this code is a bit overcomplex to handle edge cases // that may not be a problem with the current system? /// /// collection of JMDpackets yet to be written (audio) /// Queue astorage; /// /// collection of JMDpackets yet to be written (video) /// Queue vstorage; /// /// add a sound packet to the file write queue /// will be written when order-appropriate wrt video /// the sound packets added must be internally ordered (but need not match video order) /// /// void writesound(JMDPacket j) { while (vstorage.Count > 0) { var p = vstorage.Peek(); if (p.timestamp <= j.timestamp) writeActual(vstorage.Dequeue()); else break; } astorage.Enqueue(j); } /// /// add a video packet to the file write queue /// will be written when order-appropriate wrt audio /// the video packets added must be internally ordered (but need not match audio order) /// /// void writevideo(JMDPacket j) { while (astorage.Count > 0) { var p = astorage.Peek(); if (p.timestamp <= j.timestamp) writeActual(astorage.Dequeue()); else break; } vstorage.Enqueue(j); } /// /// flush all remaining JMDPackets to file /// call before closing the file /// void flushpackets() { while (astorage.Count > 0 && vstorage.Count > 0) { var ap = astorage.Peek(); var av = vstorage.Peek(); if (ap.timestamp <= av.timestamp) writeActual(astorage.Dequeue()); else writeActual(vstorage.Dequeue()); } while (astorage.Count > 0) writeActual(astorage.Dequeue()); while (vstorage.Count > 0) writeActual(vstorage.Dequeue()); } /// /// flush any remaining packets and close underlying stream /// public void Close() { flushpackets(); f.Close(); } } /// /// sets default (probably wrong) parameters /// public JMDWriter() { fpsnum = 25; fpsden = 1; audiosamplerate = 22050; audiochannels = 1; audiobits = 8; token = null; moviemetadata = null; } public void Dispose() { // we have no unmanaged resources } /// /// sets the codec token to be used for video compression /// public void SetVideoCodecToken(IDisposable token) { if (token is CodecToken) this.token = (CodecToken)token; else throw new ArgumentException("codec token must be of right type"); } /// /// obtain a set of recording compression parameters /// /// hwnd to attach to if the user is shown config dialog /// codec token, dispose of it when you're done with it public IDisposable AcquireVideoCodecToken(System.Windows.Forms.IWin32Window hwnd) { CodecToken ret = new CodecToken(); // load from config and sanitize int t = Math.Min(Math.Max(Global.Config.JMDThreads, 1), 6); int c = Math.Min(Math.Max(Global.Config.JMDCompression, Deflater.NO_COMPRESSION), Deflater.BEST_COMPRESSION); if (!JMDForm.DoCompressionDlg(ref t, ref c, 1, 6, Deflater.NO_COMPRESSION, Deflater.BEST_COMPRESSION, hwnd)) return null; Global.Config.JMDThreads = ret.numthreads = t; Global.Config.JMDCompression = ret.compressionlevel = c; return ret; } /// /// set framerate to fpsnum/fpsden (assumed to be unchanging over the life of the stream) /// public void SetMovieParameters(int fpsnum, int fpsden) { this.fpsnum = fpsnum; this.fpsden = fpsden; } /// /// set resolution parameters (width x height) /// must be set before file is opened /// can be changed in future /// should always match IVideoProvider /// /// /// public void SetVideoParameters(int width, int height) { // each frame is dumped independently with its own resolution tag, so we don't care to store this } /// /// set audio parameters. cannot change later /// public void SetAudioParameters(int sampleRate, int channels, int bits) { // the sampleRate limits are arbitrary, just to catch things which are probably silly-wrong // if a larger range of sampling rates is needed, it should be supported if (sampleRate < 8000 || sampleRate > 96000 || channels < 1 || channels > 2 || bits != 16) throw new ArgumentException("Audio parameters out of range!"); audiosamplerate = sampleRate; audiochannels = channels; audiobits = bits; } /// /// opens a recording stream /// set a video codec token first. /// public void OpenFile(string baseName) { string ext = Path.GetExtension(baseName); if (ext == null || ext.ToLower() != ".jmd") baseName = baseName + ".jmd"; jmdfile = new JMDfile(File.Open(baseName, FileMode.Create), fpsnum, fpsden, audiosamplerate, audiochannels == 2); if (moviemetadata != null) jmdfile.writemetadata(moviemetadata); // start up thread // problem: since audio chunks and video frames both go through here, exactly how many zlib workers // gives is not known without knowing how the emulator will chunk audio packets // this shouldn't affect results though, just performance threadQ = new System.Collections.Concurrent.BlockingCollection(token.numthreads * 2); workerT = new System.Threading.Thread(new System.Threading.ThreadStart(threadproc)); workerT.Start(); GzipFrameDelegate = new GzipFrameD(GzipFrame); } // some of this code is copied from AviWriter... not sure how if at all it should be abstracted /// /// blocking threadsafe queue, used for communication between main program and file writing thread /// System.Collections.Concurrent.BlockingCollection threadQ; /// /// file writing thread; most of the work happens here /// System.Threading.Thread workerT; /// /// filewriting thread's loop /// void threadproc() { try { while (true) { Object o = threadQ.Take(); if (o is IAsyncResult) jmdfile.AddVideo(GzipFrameDelegate.EndInvoke((IAsyncResult)o)); else if (o is short[]) jmdfile.AddSamples((short[])o); else // anything else is assumed to be quit time return; } } catch (Exception e) { System.Windows.Forms.MessageBox.Show("JMD Worker Thread died:\n\n" + e); return; } } /// /// close recording stream /// public void CloseFile() { threadQ.Add(new Object()); // acts as stop message workerT.Join(); jmdfile.Close(); } /// /// makes a copy of an IVideoProvider /// handles conversion to a byte array suitable for compression by zlib /// class VideoCopy { public byte[] VideoBuffer; public int BufferWidth; public int BufferHeight; public VideoCopy(IVideoProvider c) { int[] vb = c.GetVideoBuffer(); VideoBuffer = new byte[vb.Length * sizeof(int)]; // we have to switch RGB ordering here for (int i = 0; i < vb.Length; i++) { VideoBuffer[i * 4 + 0] = (byte)(vb[i] >> 16); VideoBuffer[i * 4 + 1] = (byte)(vb[i] >> 8); VideoBuffer[i * 4 + 2] = (byte)(vb[i] & 255); VideoBuffer[i * 4 + 3] = 0; } //Buffer.BlockCopy(vb, 0, VideoBuffer, 0, VideoBuffer.Length); BufferWidth = c.BufferWidth; BufferHeight = c.BufferHeight; } } /// /// deflates (zlib) a VideoCopy, returning a byte array suitable for insertion into a JMD file /// the byte array includes width and height dimensions at the beginning /// this is run asynchronously for speedup, as compressing can be slow /// /// video frame to compress /// zlib compressed frame, with width and height prepended byte[] GzipFrame(VideoCopy v) { MemoryStream m = new MemoryStream(); // write frame height and width first m.WriteByte((byte)(v.BufferWidth >> 8)); m.WriteByte((byte)(v.BufferWidth & 255)); m.WriteByte((byte)(v.BufferHeight >> 8)); m.WriteByte((byte)(v.BufferHeight & 255)); var g = new DeflaterOutputStream(m, new Deflater(token.compressionlevel)); g.IsStreamOwner = false; // leave memory stream open so we can pick its contents g.Write(v.VideoBuffer, 0, v.VideoBuffer.Length); g.Flush(); g.Close(); byte[] ret = m.GetBuffer(); Array.Resize(ref ret, (int)m.Length); m.Close(); return ret; } /// /// delegate for GzipFrame /// /// VideoCopy to compress /// gzipped stream with width and height prepended delegate byte[] GzipFrameD(VideoCopy v); /// /// delegate for GzipFrame /// GzipFrameD GzipFrameDelegate; /// /// adds a frame to the stream /// public void AddFrame(IVideoProvider source) { if (!workerT.IsAlive) // signal some sort of error? return; threadQ.Add(GzipFrameDelegate.BeginInvoke(new VideoCopy(source), null, null)); } /// /// adds audio samples to the stream /// no attempt is made to sync this to the video /// public void AddSamples(short[] samples) { if (!workerT.IsAlive) // signal some sort of error? return; threadQ.Add((short[])samples.Clone()); } /// /// set metadata parameters; should be called before opening file /// public void SetMetaData(string gameName, string authors, UInt64 lengthMS, UInt64 rerecords) { moviemetadata = new MovieMetaData(); moviemetadata.gamename = gameName; moviemetadata.authors = authors; moviemetadata.lengthms = lengthMS; moviemetadata.rerecords = rerecords; } public string DesiredExtension() { return "jmd"; } public void SetDefaultVideoCodecToken() { CodecToken ct = new CodecToken(); // load from config and sanitize int t = Math.Min(Math.Max(Global.Config.JMDThreads, 1), 6); int c = Math.Min(Math.Max(Global.Config.JMDCompression, Deflater.NO_COMPRESSION), Deflater.BEST_COMPRESSION); ct.compressionlevel = c; ct.numthreads = t; token = ct; } public void SetFrame(int frame) { } public bool UsesAudio { get { return true; } } public bool UsesVideo { get { return true; } } } }