Qt: Use FFmpeg to convert additional camera formats, if available

This commit is contained in:
Vicki Pfau 2022-09-09 16:37:56 -07:00
parent 550866fcac
commit e87f7b7b68
3 changed files with 82 additions and 0 deletions

View File

@ -106,6 +106,7 @@ Misc:
- Qt: Boot both a multiboot image and ROM with CLI args (closes mgba.io/i/1941) - Qt: Boot both a multiboot image and ROM with CLI args (closes mgba.io/i/1941)
- Qt: Improve cheat parsing (fixes mgba.io/i/2297) - Qt: Improve cheat parsing (fixes mgba.io/i/2297)
- Qt: Change lossless setting to use WavPack audio - Qt: Change lossless setting to use WavPack audio
- Qt: Use FFmpeg to convert additional camera formats, if available
- SDL: Support exposing an axis directly as the gyro value (closes mgba.io/i/2531) - SDL: Support exposing an axis directly as the gyro value (closes mgba.io/i/2531)
- Windows: Attach to console if present - Windows: Attach to console if present
- Vita: Add bilinear filtering option (closes mgba.io/i/344) - Vita: Add bilinear filtering option (closes mgba.io/i/344)

View File

@ -23,6 +23,9 @@ bool VideoDumper::present(const QVideoFrame& frame) {
QVideoFrame::PixelFormat vFormat = mappedFrame.pixelFormat(); QVideoFrame::PixelFormat vFormat = mappedFrame.pixelFormat();
QImage::Format format = QVideoFrame::imageFormatFromPixelFormat(vFormat); QImage::Format format = QVideoFrame::imageFormatFromPixelFormat(vFormat);
bool swap = false; bool swap = false;
#ifdef USE_FFMPEG
bool useScaler = false;
#endif
if (format == QImage::Format_Invalid) { if (format == QImage::Format_Invalid) {
if (vFormat < QVideoFrame::Format_BGRA5658_Premultiplied) { if (vFormat < QVideoFrame::Format_BGRA5658_Premultiplied) {
vFormat = static_cast<QVideoFrame::PixelFormat>(vFormat - QVideoFrame::Format_BGRA32 + QVideoFrame::Format_ARGB32); vFormat = static_cast<QVideoFrame::PixelFormat>(vFormat - QVideoFrame::Format_BGRA32 + QVideoFrame::Format_ARGB32);
@ -34,11 +37,68 @@ bool VideoDumper::present(const QVideoFrame& frame) {
} }
swap = true; swap = true;
} else { } else {
#ifdef USE_FFMPEG
enum AVPixelFormat pixelFormat;
switch (vFormat) {
case QVideoFrame::Format_YUV420P:
pixelFormat = AV_PIX_FMT_YUV420P;
break;
case QVideoFrame::Format_YUV422P:
pixelFormat = AV_PIX_FMT_YUV422P;
break;
case QVideoFrame::Format_YUYV:
pixelFormat = AV_PIX_FMT_YUYV422;
break;
case QVideoFrame::Format_UYVY:
pixelFormat = AV_PIX_FMT_UYVY422;
break;
case QVideoFrame::Format_NV12:
pixelFormat = AV_PIX_FMT_NV12;
break;
case QVideoFrame::Format_NV21:
pixelFormat = AV_PIX_FMT_NV12;
break;
default:
return false; return false;
} }
format = QImage::Format_RGB888;
useScaler = true;
if (pixelFormat != m_pixfmt || m_scalerSize != mappedFrame.size()) {
if (m_scaler) {
sws_freeContext(m_scaler);
}
m_scaler = sws_getContext(mappedFrame.width(), mappedFrame.height(), pixelFormat,
mappedFrame.width(), mappedFrame.height(), AV_PIX_FMT_RGB24,
SWS_POINT, nullptr, nullptr, nullptr);
m_scalerSize = mappedFrame.size();
m_pixfmt = pixelFormat;
}
#else
return false;
#endif
}
} }
uchar* bits = mappedFrame.bits(); uchar* bits = mappedFrame.bits();
#ifdef USE_FFMPEG
QImage image;
if (!useScaler) {
image = QImage(bits, mappedFrame.width(), mappedFrame.height(), mappedFrame.bytesPerLine(), format);
}
if (useScaler) {
image = QImage(mappedFrame.width(), mappedFrame.height(), format);
const uint8_t* planes[8] = {0};
int strides[8] = {0};
for (int plane = 0; plane < mappedFrame.planeCount(); ++plane) {
planes[plane] = mappedFrame.bits(plane);
strides[plane] = mappedFrame.bytesPerLine(plane);
}
uint8_t* outBits = image.bits();
int outStride = image.bytesPerLine();
sws_scale(m_scaler, planes, strides, 0, mappedFrame.height(), &outBits, &outStride);
} else
#else
QImage image(bits, mappedFrame.width(), mappedFrame.height(), mappedFrame.bytesPerLine(), format); QImage image(bits, mappedFrame.width(), mappedFrame.height(), mappedFrame.bytesPerLine(), format);
#endif
if (swap) { if (swap) {
image = image.rgbSwapped(); image = image.rgbSwapped();
} else if (surfaceFormat().scanLineDirection() != QVideoSurfaceFormat::BottomToTop) { } else if (surfaceFormat().scanLineDirection() != QVideoSurfaceFormat::BottomToTop) {
@ -66,5 +126,13 @@ QList<QVideoFrame::PixelFormat> VideoDumper::supportedPixelFormats(QAbstractVide
list.append(QVideoFrame::Format_BGRA32_Premultiplied); list.append(QVideoFrame::Format_BGRA32_Premultiplied);
list.append(QVideoFrame::Format_BGR565); list.append(QVideoFrame::Format_BGR565);
list.append(QVideoFrame::Format_BGR555); list.append(QVideoFrame::Format_BGR555);
#ifdef USE_FFMPEG
list.append(QVideoFrame::Format_YUYV);
list.append(QVideoFrame::Format_UYVY);
list.append(QVideoFrame::Format_YUV422P);
list.append(QVideoFrame::Format_YUV420P);
list.append(QVideoFrame::Format_NV12);
list.append(QVideoFrame::Format_NV21);
#endif
return list; return list;
} }

View File

@ -7,6 +7,12 @@
#include <QAbstractVideoSurface> #include <QAbstractVideoSurface>
#ifdef USE_FFMPEG
extern "C" {
#include <libswscale/swscale.h>
}
#endif
namespace QGBA { namespace QGBA {
class VideoDumper : public QAbstractVideoSurface { class VideoDumper : public QAbstractVideoSurface {
@ -20,6 +26,13 @@ public:
signals: signals:
void imageAvailable(const QImage& image); void imageAvailable(const QImage& image);
private:
#ifdef USE_FFMPEG
AVPixelFormat m_pixfmt = AV_PIX_FMT_NONE;
SwsContext* m_scaler = nullptr;
QSize m_scalerSize;
#endif
}; };
} }