diff --git a/BizHawk.MultiClient/FFmpegWriter.cs b/BizHawk.MultiClient/FFmpegWriter.cs
index a3b1ffe1eb..64b5aebddd 100644
--- a/BizHawk.MultiClient/FFmpegWriter.cs
+++ b/BizHawk.MultiClient/FFmpegWriter.cs
@@ -2,16 +2,14 @@
using System.Collections.Generic;
using System.Linq;
using System.Text;
-using System.Net.Sockets;
-using System.Net;
using System.Diagnostics;
namespace BizHawk.MultiClient
{
///
- /// uses tcp sockets to launch an external ffmpeg process and encode
+ /// uses pipes to launch an external ffmpeg process and encode
///
- class FFmpegWriter : WavWriterV, IVideoWriter
+ class FFmpegWriter : IVideoWriter
{
///
/// handle to external ffmpeg process
@@ -43,12 +41,15 @@ namespace BizHawk.MultiClient
///
const int consolebuffer = 5;
- public new void OpenFile(string baseName)
+ ///
+ /// muxer handle for the current segment
+ ///
+ NutMuxer muxer;
+
+ public void OpenFile(string baseName)
{
string s = System.IO.Path.GetFileNameWithoutExtension(baseName);
- base.OpenFile(s + ".wav");
-
this.baseName = s;
segment = 0;
@@ -67,11 +68,7 @@ namespace BizHawk.MultiClient
ffmpeg.StartInfo.Arguments = String.Format
(
- "-y -f rawvideo -pix_fmt bgra -s {0}x{1} -r {2}/{3} -i - -vcodec libx264rgb -crf 0 \"{4}.mkv\"",
- width,
- height,
- fpsnum,
- fpsden,
+ "-y -f nut -i - -vcodec libx264rgb -acodec pcm_s16le -crf 0 \"{0}.mkv\"",
filename
);
@@ -90,6 +87,8 @@ namespace BizHawk.MultiClient
ffmpeg.Start();
ffmpeg.BeginErrorReadLine();
+
+ muxer = new NutMuxer(width, height, fpsnum, fpsden, sampleRate, channels, ffmpeg.StandardInput.BaseStream);
}
///
@@ -112,21 +111,22 @@ namespace BizHawk.MultiClient
///
void CloseFileSegment()
{
- ffmpeg.StandardInput.Close();
+ muxer.Finish();
+ //ffmpeg.StandardInput.Close();
// how long should we wait here?
ffmpeg.WaitForExit(20000);
ffmpeg = null;
stderr = null;
commandline = null;
+ muxer = null;
}
- public new void CloseFile()
+ public void CloseFile()
{
CloseFileSegment();
baseName = null;
- base.CloseFile();
}
///
@@ -152,7 +152,7 @@ namespace BizHawk.MultiClient
}
- public new void AddFrame(IVideoProvider source)
+ public void AddFrame(IVideoProvider source)
{
if (source.BufferWidth != width || source.BufferHeight != height)
SetVideoParameters(source.BufferWidth, source.BufferHeight);
@@ -162,8 +162,11 @@ namespace BizHawk.MultiClient
var a = source.GetVideoBuffer();
var b = new byte[a.Length * sizeof (int)];
Buffer.BlockCopy(a, 0, b, 0, b.Length);
+
+ muxer.writevideoframe(b);
+
// have to do binary write!
- ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
+ //ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
}
@@ -177,12 +180,12 @@ namespace BizHawk.MultiClient
}
}
- public new IDisposable AcquireVideoCodecToken(IntPtr hwnd)
+ public IDisposable AcquireVideoCodecToken(IntPtr hwnd)
{
return new FFmpegWriterToken();
}
- public new void SetVideoCodecToken(IDisposable token)
+ public void SetVideoCodecToken(IDisposable token)
{
// nyi
}
@@ -190,22 +193,22 @@ namespace BizHawk.MultiClient
///
/// video params
///
- int fpsnum, fpsden, width, height;
+ int fpsnum, fpsden, width, height, sampleRate, channels;
- public new void SetMovieParameters(int fpsnum, int fpsden)
+ public void SetMovieParameters(int fpsnum, int fpsden)
{
this.fpsnum = fpsnum;
this.fpsden = fpsden;
}
- public new void SetVideoParameters(int width, int height)
+ public void SetVideoParameters(int width, int height)
{
this.width = width;
this.height = height;
- /* ffmpeg theoretically supports variable resolution videos, but there's no way to
- * signal that metadata in a raw pipe. so if we're currently in a segment,
- * start a new one */
+ /* ffmpeg theoretically supports variable resolution videos, but in practice that's not handled very well.
+ * so we start a new segment.
+ */
if (ffmpeg != null)
{
CloseFileSegment();
@@ -215,15 +218,28 @@ namespace BizHawk.MultiClient
}
- public new void SetMetaData(string gameName, string authors, ulong lengthMS, ulong rerecords)
+ public void SetMetaData(string gameName, string authors, ulong lengthMS, ulong rerecords)
{
// can be implemented with ffmpeg "-metadata" parameter???
// nyi
}
- public new void Dispose()
+ public void Dispose()
{
- base.Dispose();
+ }
+
+
+ public void AddSamples(short[] samples)
+ {
+ muxer.writeaudioframe(samples);
+ }
+
+ public void SetAudioParameters(int sampleRate, int channels, int bits)
+ {
+ if (bits != 16)
+ throw new ArgumentOutOfRangeException("sampling depth must be 16 bits!");
+ this.sampleRate = sampleRate;
+ this.channels = channels;
}
}
}
diff --git a/BizHawk.MultiClient/NutMuxer.cs b/BizHawk.MultiClient/NutMuxer.cs
index a84539aea5..5252a22425 100644
--- a/BizHawk.MultiClient/NutMuxer.cs
+++ b/BizHawk.MultiClient/NutMuxer.cs
@@ -13,7 +13,7 @@ namespace BizHawk.MultiClient
///
class NutMuxer
{
- /* TODO: timestamp sanitization (like JMDWriter) */
+ // this code isn't really any good for general purpose nut creation
///
@@ -143,7 +143,7 @@ namespace BizHawk.MultiClient
/// seems to be different than standard CRC32?????
///
///
- ///
+ /// crc32, nut variant
static uint NutCRC32(byte[] buf)
{
uint crc = 0;
@@ -176,6 +176,11 @@ namespace BizHawk.MultiClient
StartCode startcode;
Stream underlying;
+ ///
+ /// create a new NutPacket
+ ///
+ /// startcode for this packet
+ /// stream to write to
public NutPacket(StartCode startcode, Stream underlying)
{
data = new MemoryStream();
@@ -306,13 +311,22 @@ namespace BizHawk.MultiClient
///
bool audiodone;
+ ///
+ /// video packets waiting to be written
+ ///
+ Queue videoqueue;
+ ///
+ /// audio packets waiting to be written
+ ///
+ Queue audioqueue;
+
///
/// write out the main header
///
void writemainheader()
{
- // note: this tag not actually part of main headers
+ // note: this file starttag not actually part of main headers
var tmp = Encoding.ASCII.GetBytes("nut/multimedia container\0");
output.Write(tmp, 0, tmp.Length);
@@ -396,16 +410,121 @@ namespace BizHawk.MultiClient
}
///
- /// writes a syncpoint header with already coded universal timestamp
+ /// stores a single frame with syncpoint, in mux-ready form
+ /// used because reordering of audio and video can be needed for proper interleave
///
- void writesyncpoint(ulong global_key_pts)
+ class NutFrame
{
- var header = new NutPacket(NutPacket.StartCode.Syncpoint, output);
- WriteVarU(global_key_pts, header); // global_key_pts; file starts at time 0
- WriteVarU(1, header); // back_ptr_div_16 ?????????????????????????????
- header.Flush();
+ ///
+ /// data ready to be written to stream/disk
+ ///
+ byte[] data;
+
+ ///
+ /// presentation timestamp
+ ///
+ ulong pts;
+
+ ///
+ /// fraction of the specified timebase
+ ///
+ ulong ptsnum, ptsden;
+
+ ///
+ ///
+ ///
+ /// frame data
+ /// presentation timestamp
+ /// numerator of timebase
+ /// denominator of timebase
+ /// which timestamp base is used, assumed to be also stream number
+ public NutFrame(byte[] payload, ulong pts, ulong ptsnum, ulong ptsden, int ptsindex)
+ {
+ this.pts = pts;
+ this.ptsnum = ptsnum;
+ this.ptsden = ptsden;
+
+ var frame = new MemoryStream();
+
+ // create syncpoint
+ var sync = new NutPacket(NutPacket.StartCode.Syncpoint, frame);
+ WriteVarU(pts * 2 + (ulong)ptsindex, sync); // global_key_pts
+ WriteVarU(1, sync); // back_ptr_div_16, this is wrong
+ sync.Flush();
+
+
+ var frameheader = new MemoryStream();
+ frameheader.WriteByte(0); // frame_code
+ // frame_flags = FLAG_CODED, so:
+ int flags = 0;
+ flags |= 1 << 0; // FLAG_KEY
+ if (payload.Length == 0)
+ flags |= 1 << 1; // FLAG_EOR
+ flags |= 1 << 3; // FLAG_CODED_PTS
+ flags |= 1 << 4; // FLAG_STREAM_ID
+ flags |= 1 << 5; // FLAG_SIZE_MSB
+ flags |= 1 << 6; // FLAG_CHECKSUM
+ WriteVarU(flags, frameheader);
+ WriteVarU(ptsindex, frameheader); // stream_id
+ WriteVarU(pts + 256, frameheader); // coded_pts = pts + 1 << msb_pts_shift
+ WriteVarU(payload.Length, frameheader); // data_size_msb
+
+ var frameheaderarr = frameheader.ToArray();
+ frame.Write(frameheaderarr, 0, frameheaderarr.Length);
+ WriteBE32(NutCRC32(frameheaderarr), frame); // checksum
+ frame.Write(payload, 0, payload.Length);
+
+ data = frame.ToArray();
+ }
+
+ ///
+ /// compare two NutFrames by pts
+ ///
+ ///
+ ///
+ ///
+ public static bool operator <=(NutFrame lhs, NutFrame rhs)
+ {
+ BigInteger left = new BigInteger(lhs.pts);
+ left = left * lhs.ptsnum * rhs.ptsden;
+ BigInteger right = new BigInteger(rhs.pts);
+ right = right * rhs.ptsnum * lhs.ptsden;
+
+ return left <= right;
+ }
+ public static bool operator >=(NutFrame lhs, NutFrame rhs)
+ {
+ BigInteger left = new BigInteger(lhs.pts);
+ left = left * lhs.ptsnum * rhs.ptsden;
+ BigInteger right = new BigInteger(rhs.pts);
+ right = right * rhs.ptsnum * lhs.ptsden;
+
+ return left >= right;
+ }
+
+
+ static NutFrame()
+ {
+ dbg = new StreamWriter(".\\nutframe.txt", false);
+ }
+
+ static StreamWriter dbg;
+
+ ///
+ /// write out frame, with syncpoint and all headers
+ ///
+ ///
+ public void WriteData(Stream dest)
+ {
+
+
+ dest.Write(data, 0, data.Length);
+ dbg.WriteLine(string.Format("{0},{1},{2}", pts, ptsnum, ptsden));
+ }
+
}
+
///
/// write a video frame to the stream
///
@@ -416,37 +535,15 @@ namespace BizHawk.MultiClient
throw new Exception("Can't write data after end of relevance!");
if (data.Length == 0)
videodone = true;
- writesyncpoint(videopts * 2 + 0);
- writeframe(data, 0, videopts);
+ var f = new NutFrame(data, videopts, (ulong) avparams.fpsden, (ulong) avparams.fpsnum, 0);
videopts++;
+ videoqueue.Enqueue(f);
+ while (audioqueue.Count > 0 && f >= audioqueue.Peek())
+ audioqueue.Dequeue().WriteData(output);
}
- void writeframe(byte[] data, int stream_id, ulong pts)
- {
- var frameheader = new MemoryStream();
- frameheader.WriteByte(0); // frame_code
- // frame_flags = FLAG_CODED, so:
- int flags = 0;
- flags |= 1 << 0; // FLAG_KEY
- if (data.Length == 0)
- flags |= 1 << 1; // FLAG_EOR
- flags |= 1 << 3; // FLAG_CODED_PTS
- flags |= 1 << 4; // FLAG_STREAM_ID
- flags |= 1 << 5; // FLAG_SIZE_MSB
- flags |= 1 << 6; // FLAG_CHECKSUM
- WriteVarU(flags, frameheader);
- WriteVarU(stream_id, frameheader); // stream_id
- WriteVarU(pts + 256, frameheader); // coded_pts = pts + 1 << msb_pts_shift
- WriteVarU(data.Length, frameheader); // data_size_msb
-
- var frameheaderarr = frameheader.ToArray();
- output.Write(frameheaderarr, 0, frameheaderarr.Length);
- WriteBE32(NutCRC32(frameheaderarr), output); // checksum
- output.Write(data, 0, data.Length);
-
- }
///
/// write an audio frame to the stream
@@ -461,9 +558,11 @@ namespace BizHawk.MultiClient
if (data.Length == 0)
audiodone = true;
- writesyncpoint(audiopts * 2 + 1);
- writeframe(data, 1, audiopts);
+ var f = new NutFrame(data, audiopts, 1, (ulong)avparams.samplerate, 1);
audiopts += (ulong)samples.Length / (ulong)avparams.channels;
+ audioqueue.Enqueue(f);
+ while (videoqueue.Count > 0 && f >= videoqueue.Peek())
+ videoqueue.Dequeue().WriteData(output);
}
///
@@ -491,6 +590,9 @@ namespace BizHawk.MultiClient
audiopts = 0;
videopts = 0;
+ audioqueue = new Queue();
+ videoqueue = new Queue();
+
writemainheader();
writevideoheader();
writeaudioheader();
@@ -510,6 +612,20 @@ namespace BizHawk.MultiClient
if (!audiodone)
writeaudioframe(new short[0]);
+ // flush any remaining queued packets
+
+ while (audioqueue.Count > 0 && videoqueue.Count > 0)
+ {
+ if (audioqueue.Peek() <= videoqueue.Peek())
+ audioqueue.Dequeue().WriteData(output);
+ else
+ videoqueue.Dequeue().WriteData(output);
+ }
+ while (audioqueue.Count > 0)
+ audioqueue.Dequeue().WriteData(output);
+ while (videoqueue.Count > 0)
+ videoqueue.Dequeue().WriteData(output);
+
output.Close();
output = null;
}