NutMuxer.cs: add actual pts-based stream interleave code
FFmpegWriter.cs: use NutMuxer to create a muxed raw AV stream to send ffmpeg, so ffmpeg can output audio and video from a single stdin pipe
This commit is contained in:
parent
36860f5c16
commit
e1a3f687b5
|
@ -2,16 +2,14 @@
|
|||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Net.Sockets;
|
||||
using System.Net;
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace BizHawk.MultiClient
|
||||
{
|
||||
/// <summary>
|
||||
/// uses tcp sockets to launch an external ffmpeg process and encode
|
||||
/// uses pipes to launch an external ffmpeg process and encode
|
||||
/// </summary>
|
||||
class FFmpegWriter : WavWriterV, IVideoWriter
|
||||
class FFmpegWriter : IVideoWriter
|
||||
{
|
||||
/// <summary>
|
||||
/// handle to external ffmpeg process
|
||||
|
@ -43,12 +41,15 @@ namespace BizHawk.MultiClient
|
|||
/// </summary>
|
||||
const int consolebuffer = 5;
|
||||
|
||||
public new void OpenFile(string baseName)
|
||||
/// <summary>
|
||||
/// muxer handle for the current segment
|
||||
/// </summary>
|
||||
NutMuxer muxer;
|
||||
|
||||
public void OpenFile(string baseName)
|
||||
{
|
||||
string s = System.IO.Path.GetFileNameWithoutExtension(baseName);
|
||||
|
||||
base.OpenFile(s + ".wav");
|
||||
|
||||
this.baseName = s;
|
||||
|
||||
segment = 0;
|
||||
|
@ -67,11 +68,7 @@ namespace BizHawk.MultiClient
|
|||
|
||||
ffmpeg.StartInfo.Arguments = String.Format
|
||||
(
|
||||
"-y -f rawvideo -pix_fmt bgra -s {0}x{1} -r {2}/{3} -i - -vcodec libx264rgb -crf 0 \"{4}.mkv\"",
|
||||
width,
|
||||
height,
|
||||
fpsnum,
|
||||
fpsden,
|
||||
"-y -f nut -i - -vcodec libx264rgb -acodec pcm_s16le -crf 0 \"{0}.mkv\"",
|
||||
filename
|
||||
);
|
||||
|
||||
|
@ -90,6 +87,8 @@ namespace BizHawk.MultiClient
|
|||
|
||||
ffmpeg.Start();
|
||||
ffmpeg.BeginErrorReadLine();
|
||||
|
||||
muxer = new NutMuxer(width, height, fpsnum, fpsden, sampleRate, channels, ffmpeg.StandardInput.BaseStream);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -112,21 +111,22 @@ namespace BizHawk.MultiClient
|
|||
/// </summary>
|
||||
void CloseFileSegment()
|
||||
{
|
||||
ffmpeg.StandardInput.Close();
|
||||
muxer.Finish();
|
||||
//ffmpeg.StandardInput.Close();
|
||||
|
||||
// how long should we wait here?
|
||||
ffmpeg.WaitForExit(20000);
|
||||
ffmpeg = null;
|
||||
stderr = null;
|
||||
commandline = null;
|
||||
muxer = null;
|
||||
}
|
||||
|
||||
|
||||
public new void CloseFile()
|
||||
public void CloseFile()
|
||||
{
|
||||
CloseFileSegment();
|
||||
baseName = null;
|
||||
base.CloseFile();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -152,7 +152,7 @@ namespace BizHawk.MultiClient
|
|||
}
|
||||
|
||||
|
||||
public new void AddFrame(IVideoProvider source)
|
||||
public void AddFrame(IVideoProvider source)
|
||||
{
|
||||
if (source.BufferWidth != width || source.BufferHeight != height)
|
||||
SetVideoParameters(source.BufferWidth, source.BufferHeight);
|
||||
|
@ -162,8 +162,11 @@ namespace BizHawk.MultiClient
|
|||
var a = source.GetVideoBuffer();
|
||||
var b = new byte[a.Length * sizeof (int)];
|
||||
Buffer.BlockCopy(a, 0, b, 0, b.Length);
|
||||
|
||||
muxer.writevideoframe(b);
|
||||
|
||||
// have to do binary write!
|
||||
ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
|
||||
//ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
|
||||
}
|
||||
|
||||
|
||||
|
@ -177,12 +180,12 @@ namespace BizHawk.MultiClient
|
|||
}
|
||||
}
|
||||
|
||||
public new IDisposable AcquireVideoCodecToken(IntPtr hwnd)
|
||||
public IDisposable AcquireVideoCodecToken(IntPtr hwnd)
|
||||
{
|
||||
return new FFmpegWriterToken();
|
||||
}
|
||||
|
||||
public new void SetVideoCodecToken(IDisposable token)
|
||||
public void SetVideoCodecToken(IDisposable token)
|
||||
{
|
||||
// nyi
|
||||
}
|
||||
|
@ -190,22 +193,22 @@ namespace BizHawk.MultiClient
|
|||
/// <summary>
|
||||
/// video params
|
||||
/// </summary>
|
||||
int fpsnum, fpsden, width, height;
|
||||
int fpsnum, fpsden, width, height, sampleRate, channels;
|
||||
|
||||
public new void SetMovieParameters(int fpsnum, int fpsden)
|
||||
public void SetMovieParameters(int fpsnum, int fpsden)
|
||||
{
|
||||
this.fpsnum = fpsnum;
|
||||
this.fpsden = fpsden;
|
||||
}
|
||||
|
||||
public new void SetVideoParameters(int width, int height)
|
||||
public void SetVideoParameters(int width, int height)
|
||||
{
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
/* ffmpeg theoretically supports variable resolution videos, but there's no way to
|
||||
* signal that metadata in a raw pipe. so if we're currently in a segment,
|
||||
* start a new one */
|
||||
/* ffmpeg theoretically supports variable resolution videos, but in practice that's not handled very well.
|
||||
* so we start a new segment.
|
||||
*/
|
||||
if (ffmpeg != null)
|
||||
{
|
||||
CloseFileSegment();
|
||||
|
@ -215,15 +218,28 @@ namespace BizHawk.MultiClient
|
|||
}
|
||||
|
||||
|
||||
public new void SetMetaData(string gameName, string authors, ulong lengthMS, ulong rerecords)
|
||||
public void SetMetaData(string gameName, string authors, ulong lengthMS, ulong rerecords)
|
||||
{
|
||||
// can be implemented with ffmpeg "-metadata" parameter???
|
||||
// nyi
|
||||
}
|
||||
|
||||
public new void Dispose()
|
||||
public void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
}
|
||||
|
||||
|
||||
public void AddSamples(short[] samples)
|
||||
{
|
||||
muxer.writeaudioframe(samples);
|
||||
}
|
||||
|
||||
public void SetAudioParameters(int sampleRate, int channels, int bits)
|
||||
{
|
||||
if (bits != 16)
|
||||
throw new ArgumentOutOfRangeException("sampling depth must be 16 bits!");
|
||||
this.sampleRate = sampleRate;
|
||||
this.channels = channels;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace BizHawk.MultiClient
|
|||
/// </summary>
|
||||
class NutMuxer
|
||||
{
|
||||
/* TODO: timestamp sanitization (like JMDWriter) */
|
||||
// this code isn't really any good for general purpose nut creation
|
||||
|
||||
|
||||
/// <summary>
|
||||
|
@ -143,7 +143,7 @@ namespace BizHawk.MultiClient
|
|||
/// seems to be different than standard CRC32?????
|
||||
/// </summary>
|
||||
/// <param name="buf"></param>
|
||||
/// <returns></returns>
|
||||
/// <returns>crc32, nut variant</returns>
|
||||
static uint NutCRC32(byte[] buf)
|
||||
{
|
||||
uint crc = 0;
|
||||
|
@ -176,6 +176,11 @@ namespace BizHawk.MultiClient
|
|||
StartCode startcode;
|
||||
Stream underlying;
|
||||
|
||||
/// <summary>
|
||||
/// create a new NutPacket
|
||||
/// </summary>
|
||||
/// <param name="startcode">startcode for this packet</param>
|
||||
/// <param name="underlying">stream to write to</param>
|
||||
public NutPacket(StartCode startcode, Stream underlying)
|
||||
{
|
||||
data = new MemoryStream();
|
||||
|
@ -306,13 +311,22 @@ namespace BizHawk.MultiClient
|
|||
/// </summary>
|
||||
bool audiodone;
|
||||
|
||||
/// <summary>
|
||||
/// video packets waiting to be written
|
||||
/// </summary>
|
||||
Queue<NutFrame> videoqueue;
|
||||
/// <summary>
|
||||
/// audio packets waiting to be written
|
||||
/// </summary>
|
||||
Queue<NutFrame> audioqueue;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// write out the main header
|
||||
/// </summary>
|
||||
void writemainheader()
|
||||
{
|
||||
// note: this tag not actually part of main headers
|
||||
// note: this file starttag not actually part of main headers
|
||||
var tmp = Encoding.ASCII.GetBytes("nut/multimedia container\0");
|
||||
output.Write(tmp, 0, tmp.Length);
|
||||
|
||||
|
@ -396,16 +410,121 @@ namespace BizHawk.MultiClient
|
|||
}
|
||||
|
||||
/// <summary>
|
||||
/// writes a syncpoint header with already coded universal timestamp
|
||||
/// stores a single frame with syncpoint, in mux-ready form
|
||||
/// used because reordering of audio and video can be needed for proper interleave
|
||||
/// </summary>
|
||||
void writesyncpoint(ulong global_key_pts)
|
||||
class NutFrame
|
||||
{
|
||||
var header = new NutPacket(NutPacket.StartCode.Syncpoint, output);
|
||||
WriteVarU(global_key_pts, header); // global_key_pts; file starts at time 0
|
||||
WriteVarU(1, header); // back_ptr_div_16 ?????????????????????????????
|
||||
header.Flush();
|
||||
/// <summary>
|
||||
/// data ready to be written to stream/disk
|
||||
/// </summary>
|
||||
byte[] data;
|
||||
|
||||
/// <summary>
|
||||
/// presentation timestamp
|
||||
/// </summary>
|
||||
ulong pts;
|
||||
|
||||
/// <summary>
|
||||
/// fraction of the specified timebase
|
||||
/// </summary>
|
||||
ulong ptsnum, ptsden;
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// </summary>
|
||||
/// <param name="payload">frame data</param>
|
||||
/// <param name="pts">presentation timestamp</param>
|
||||
/// <param name="ptsnum">numerator of timebase</param>
|
||||
/// <param name="ptsden">denominator of timebase</param>
|
||||
/// <param name="ptsindex">which timestamp base is used, assumed to be also stream number</param>
|
||||
public NutFrame(byte[] payload, ulong pts, ulong ptsnum, ulong ptsden, int ptsindex)
|
||||
{
|
||||
this.pts = pts;
|
||||
this.ptsnum = ptsnum;
|
||||
this.ptsden = ptsden;
|
||||
|
||||
var frame = new MemoryStream();
|
||||
|
||||
// create syncpoint
|
||||
var sync = new NutPacket(NutPacket.StartCode.Syncpoint, frame);
|
||||
WriteVarU(pts * 2 + (ulong)ptsindex, sync); // global_key_pts
|
||||
WriteVarU(1, sync); // back_ptr_div_16, this is wrong
|
||||
sync.Flush();
|
||||
|
||||
|
||||
var frameheader = new MemoryStream();
|
||||
frameheader.WriteByte(0); // frame_code
|
||||
// frame_flags = FLAG_CODED, so:
|
||||
int flags = 0;
|
||||
flags |= 1 << 0; // FLAG_KEY
|
||||
if (payload.Length == 0)
|
||||
flags |= 1 << 1; // FLAG_EOR
|
||||
flags |= 1 << 3; // FLAG_CODED_PTS
|
||||
flags |= 1 << 4; // FLAG_STREAM_ID
|
||||
flags |= 1 << 5; // FLAG_SIZE_MSB
|
||||
flags |= 1 << 6; // FLAG_CHECKSUM
|
||||
WriteVarU(flags, frameheader);
|
||||
WriteVarU(ptsindex, frameheader); // stream_id
|
||||
WriteVarU(pts + 256, frameheader); // coded_pts = pts + 1 << msb_pts_shift
|
||||
WriteVarU(payload.Length, frameheader); // data_size_msb
|
||||
|
||||
var frameheaderarr = frameheader.ToArray();
|
||||
frame.Write(frameheaderarr, 0, frameheaderarr.Length);
|
||||
WriteBE32(NutCRC32(frameheaderarr), frame); // checksum
|
||||
frame.Write(payload, 0, payload.Length);
|
||||
|
||||
data = frame.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// compare two NutFrames by pts
|
||||
/// </summary>
|
||||
/// <param name="lhs"></param>
|
||||
/// <param name="rhs"></param>
|
||||
/// <returns></returns>
|
||||
public static bool operator <=(NutFrame lhs, NutFrame rhs)
|
||||
{
|
||||
BigInteger left = new BigInteger(lhs.pts);
|
||||
left = left * lhs.ptsnum * rhs.ptsden;
|
||||
BigInteger right = new BigInteger(rhs.pts);
|
||||
right = right * rhs.ptsnum * lhs.ptsden;
|
||||
|
||||
return left <= right;
|
||||
}
|
||||
public static bool operator >=(NutFrame lhs, NutFrame rhs)
|
||||
{
|
||||
BigInteger left = new BigInteger(lhs.pts);
|
||||
left = left * lhs.ptsnum * rhs.ptsden;
|
||||
BigInteger right = new BigInteger(rhs.pts);
|
||||
right = right * rhs.ptsnum * lhs.ptsden;
|
||||
|
||||
return left >= right;
|
||||
}
|
||||
|
||||
|
||||
static NutFrame()
|
||||
{
|
||||
dbg = new StreamWriter(".\\nutframe.txt", false);
|
||||
}
|
||||
|
||||
static StreamWriter dbg;
|
||||
|
||||
/// <summary>
|
||||
/// write out frame, with syncpoint and all headers
|
||||
/// </summary>
|
||||
/// <param name="dest"></param>
|
||||
public void WriteData(Stream dest)
|
||||
{
|
||||
|
||||
|
||||
dest.Write(data, 0, data.Length);
|
||||
dbg.WriteLine(string.Format("{0},{1},{2}", pts, ptsnum, ptsden));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// write a video frame to the stream
|
||||
/// </summary>
|
||||
|
@ -416,37 +535,15 @@ namespace BizHawk.MultiClient
|
|||
throw new Exception("Can't write data after end of relevance!");
|
||||
if (data.Length == 0)
|
||||
videodone = true;
|
||||
writesyncpoint(videopts * 2 + 0);
|
||||
writeframe(data, 0, videopts);
|
||||
var f = new NutFrame(data, videopts, (ulong) avparams.fpsden, (ulong) avparams.fpsnum, 0);
|
||||
videopts++;
|
||||
videoqueue.Enqueue(f);
|
||||
while (audioqueue.Count > 0 && f >= audioqueue.Peek())
|
||||
audioqueue.Dequeue().WriteData(output);
|
||||
}
|
||||
|
||||
|
||||
|
||||
void writeframe(byte[] data, int stream_id, ulong pts)
|
||||
{
|
||||
var frameheader = new MemoryStream();
|
||||
frameheader.WriteByte(0); // frame_code
|
||||
// frame_flags = FLAG_CODED, so:
|
||||
int flags = 0;
|
||||
flags |= 1 << 0; // FLAG_KEY
|
||||
if (data.Length == 0)
|
||||
flags |= 1 << 1; // FLAG_EOR
|
||||
flags |= 1 << 3; // FLAG_CODED_PTS
|
||||
flags |= 1 << 4; // FLAG_STREAM_ID
|
||||
flags |= 1 << 5; // FLAG_SIZE_MSB
|
||||
flags |= 1 << 6; // FLAG_CHECKSUM
|
||||
WriteVarU(flags, frameheader);
|
||||
WriteVarU(stream_id, frameheader); // stream_id
|
||||
WriteVarU(pts + 256, frameheader); // coded_pts = pts + 1 << msb_pts_shift
|
||||
WriteVarU(data.Length, frameheader); // data_size_msb
|
||||
|
||||
var frameheaderarr = frameheader.ToArray();
|
||||
output.Write(frameheaderarr, 0, frameheaderarr.Length);
|
||||
WriteBE32(NutCRC32(frameheaderarr), output); // checksum
|
||||
output.Write(data, 0, data.Length);
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// write an audio frame to the stream
|
||||
|
@ -461,9 +558,11 @@ namespace BizHawk.MultiClient
|
|||
if (data.Length == 0)
|
||||
audiodone = true;
|
||||
|
||||
writesyncpoint(audiopts * 2 + 1);
|
||||
writeframe(data, 1, audiopts);
|
||||
var f = new NutFrame(data, audiopts, 1, (ulong)avparams.samplerate, 1);
|
||||
audiopts += (ulong)samples.Length / (ulong)avparams.channels;
|
||||
audioqueue.Enqueue(f);
|
||||
while (videoqueue.Count > 0 && f >= videoqueue.Peek())
|
||||
videoqueue.Dequeue().WriteData(output);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -491,6 +590,9 @@ namespace BizHawk.MultiClient
|
|||
audiopts = 0;
|
||||
videopts = 0;
|
||||
|
||||
audioqueue = new Queue<NutFrame>();
|
||||
videoqueue = new Queue<NutFrame>();
|
||||
|
||||
writemainheader();
|
||||
writevideoheader();
|
||||
writeaudioheader();
|
||||
|
@ -510,6 +612,20 @@ namespace BizHawk.MultiClient
|
|||
if (!audiodone)
|
||||
writeaudioframe(new short[0]);
|
||||
|
||||
// flush any remaining queued packets
|
||||
|
||||
while (audioqueue.Count > 0 && videoqueue.Count > 0)
|
||||
{
|
||||
if (audioqueue.Peek() <= videoqueue.Peek())
|
||||
audioqueue.Dequeue().WriteData(output);
|
||||
else
|
||||
videoqueue.Dequeue().WriteData(output);
|
||||
}
|
||||
while (audioqueue.Count > 0)
|
||||
audioqueue.Dequeue().WriteData(output);
|
||||
while (videoqueue.Count > 0)
|
||||
videoqueue.Dequeue().WriteData(output);
|
||||
|
||||
output.Close();
|
||||
output = null;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue