Core: Refactor GBAAVStream into mAVStream

This commit is contained in:
Jeffrey Pfau 2016-02-07 18:02:18 -08:00
parent d86440e04f
commit 2eaaaa8491
18 changed files with 60 additions and 67 deletions

View File

@ -8,27 +8,18 @@
#include "util/common.h"
#include "core/config.h"
#if !defined(MINIMAL_CORE) || MINIMAL_CORE < 2
#include "core/directories.h"
#endif
#ifndef MINIMAL_CORE
#include "core/input.h"
#endif
#include "core/config.h"
#include "core/interface.h"
struct VFile;
struct mRTCSource;
struct mCoreConfig;
#ifdef COLOR_16_BIT
typedef uint16_t color_t;
#define BYTES_PER_PIXEL 2
#else
typedef uint32_t color_t;
#define BYTES_PER_PIXEL 4
#endif
struct blip_t;
struct mCoreSync;
struct mCore {
void* cpu;

View File

@ -8,6 +8,24 @@
#include "util/common.h"
struct mCore;
#ifdef COLOR_16_BIT
typedef uint16_t color_t;
#define BYTES_PER_PIXEL 2
#else
typedef uint32_t color_t;
#define BYTES_PER_PIXEL 4
#endif
struct blip_t;
struct mAVStream {
void (*postVideoFrame)(struct mAVStream*, const color_t* buffer, size_t stride);
void (*postAudioFrame)(struct mAVStream*, int16_t left, int16_t right);
void (*postAudioBuffer)(struct mAVStream*, struct blip_t* left, struct blip_t* right);
};
struct mKeyCallback {
uint16_t (*readKeys)(struct mKeyCallback*);
};

View File

@ -321,7 +321,7 @@ static void _sample(struct GBAAudio* audio) {
mCoreSyncProduceAudio(audio->p->sync, wait);
if (wait && audio->p->stream && audio->p->stream->postAudioBuffer) {
audio->p->stream->postAudioBuffer(audio->p->stream, audio);
audio->p->stream->postAudioBuffer(audio->p->stream, audio->psg.left, audio->psg.right);
}
}

View File

@ -912,7 +912,10 @@ void GBAFrameEnded(struct GBA* gba) {
}
if (gba->stream && gba->stream->postVideoFrame) {
gba->stream->postVideoFrame(gba->stream, gba->video.renderer);
const color_t* pixels;
unsigned stride;
gba->video.renderer->getPixels(gba->video.renderer, &stride, (const void**) &pixels);
gba->stream->postVideoFrame(gba->stream, pixels, stride);
}
if (gba->memory.hw.devices & (HW_GB_PLAYER | HW_GB_PLAYER_DETECTION)) {

View File

@ -116,7 +116,7 @@ struct GBA {
GBALogHandler logHandler;
enum GBALogLevel logLevel;
struct GBAAVStream* stream;
struct mAVStream* stream;
struct mKeyCallback* keyCallback;
struct mStopCallback* stopCallback;

View File

@ -43,12 +43,6 @@ struct GBAVideoRenderer;
typedef void (*GBALogHandler)(struct GBAThread*, enum GBALogLevel, const char* format, va_list args);
struct GBAAVStream {
void (*postVideoFrame)(struct GBAAVStream*, struct GBAVideoRenderer* renderer);
void (*postAudioFrame)(struct GBAAVStream*, int16_t left, int16_t right);
void (*postAudioBuffer)(struct GBAAVStream*, struct GBAAudio*);
};
extern const int GBA_LUX_LEVELS[10];
struct GBALuminanceSource {

View File

@ -46,7 +46,7 @@ struct GBAThread {
const char* fname;
const char* movie;
int activeKeys;
struct GBAAVStream* stream;
struct mAVStream* stream;
struct Configuration* overrides;
enum GBAIdleLoopOptimization idleOptimization;
bool bootBios;

View File

@ -52,7 +52,7 @@ static enum {
// TODO: Move into context
static void* outputBuffer;
static struct GBAAVStream stream;
static struct mAVStream stream;
static int16_t* audioLeft = 0;
static int16_t* audioRight = 0;
static size_t audioPos = 0;
@ -165,7 +165,7 @@ static void _csndPlaySound(u32 flags, u32 sampleRate, float vol, void* left, voi
CSND_SetChnRegs(flags | SOUND_CHANNEL(9), pright, pright, size, volumes, volumes);
}
static void _postAudioBuffer(struct GBAAVStream* stream, struct GBAAudio* audio);
static void _postAudioBuffer(struct mAVStream* stream, blip_t* left, blip_t* right);
static void _drawStart(void) {
ctrGpuBeginDrawing();
@ -504,11 +504,11 @@ static int32_t _readGyroZ(struct mRotationSource* source) {
return rotation->gyro.y << 18L; // Yes, y
}
static void _postAudioBuffer(struct GBAAVStream* stream, struct GBAAudio* audio) {
static void _postAudioBuffer(struct mAVStream* stream, blip_t* left, blip_t* right) {
UNUSED(stream);
if (hasSound == CSND_SUPPORTED) {
blip_read_samples(audio->psg.left, &audioLeft[audioPos], AUDIO_SAMPLES, false);
blip_read_samples(audio->psg.right, &audioRight[audioPos], AUDIO_SAMPLES, false);
blip_read_samples(left, &audioLeft[audioPos], AUDIO_SAMPLES, false);
blip_read_samples(right, &audioRight[audioPos], AUDIO_SAMPLES, false);
GSPGPU_FlushDataCache(&audioLeft[audioPos], AUDIO_SAMPLES * sizeof(int16_t));
GSPGPU_FlushDataCache(&audioRight[audioPos], AUDIO_SAMPLES * sizeof(int16_t));
audioPos = (audioPos + AUDIO_SAMPLES) % AUDIO_SAMPLE_BUFFER;
@ -526,8 +526,8 @@ static void _postAudioBuffer(struct GBAAVStream* stream, struct GBAAudio* audio)
while (dspBuffer[bufferId].status == NDSP_WBUF_QUEUED || dspBuffer[bufferId].status == NDSP_WBUF_PLAYING) {
bufferId = (bufferId + 1) & (DSP_BUFFERS - 1);
if (bufferId == startId) {
blip_clear(audio->psg.left);
blip_clear(audio->psg.right);
blip_clear(left);
blip_clear(right);
return;
}
}
@ -535,8 +535,8 @@ static void _postAudioBuffer(struct GBAAVStream* stream, struct GBAAudio* audio)
memset(&dspBuffer[bufferId], 0, sizeof(dspBuffer[bufferId]));
dspBuffer[bufferId].data_pcm16 = tmpBuf;
dspBuffer[bufferId].nsamples = AUDIO_SAMPLES;
blip_read_samples(audio->psg.left, dspBuffer[bufferId].data_pcm16, AUDIO_SAMPLES, true);
blip_read_samples(audio->psg.right, dspBuffer[bufferId].data_pcm16 + 1, AUDIO_SAMPLES, true);
blip_read_samples(left, dspBuffer[bufferId].data_pcm16, AUDIO_SAMPLES, true);
blip_read_samples(right, dspBuffer[bufferId].data_pcm16 + 1, AUDIO_SAMPLES, true);
DSP_FlushDataCache(dspBuffer[bufferId].data_pcm16, AUDIO_SAMPLES * 2 * sizeof(int16_t));
ndspChnWaveBufAdd(0, &dspBuffer[bufferId]);
}

View File

@ -22,8 +22,8 @@
#include <libavresample/avresample.h>
#include <libswscale/swscale.h>
static void _ffmpegPostVideoFrame(struct GBAAVStream*, struct GBAVideoRenderer* renderer);
static void _ffmpegPostAudioFrame(struct GBAAVStream*, int16_t left, int16_t right);
static void _ffmpegPostVideoFrame(struct mAVStream*, const color_t* pixels, size_t stride);
static void _ffmpegPostAudioFrame(struct mAVStream*, int16_t left, int16_t right);
enum {
PREFERRED_SAMPLE_RATE = 0x8000
@ -360,7 +360,7 @@ bool FFmpegEncoderIsOpen(struct FFmpegEncoder* encoder) {
return !!encoder->context;
}
void _ffmpegPostAudioFrame(struct GBAAVStream* stream, int16_t left, int16_t right) {
void _ffmpegPostAudioFrame(struct mAVStream* stream, int16_t left, int16_t right) {
struct FFmpegEncoder* encoder = (struct FFmpegEncoder*) stream;
if (!encoder->context || !encoder->audioCodec) {
return;
@ -419,14 +419,11 @@ void _ffmpegPostAudioFrame(struct GBAAVStream* stream, int16_t left, int16_t rig
av_free_packet(&packet);
}
void _ffmpegPostVideoFrame(struct GBAAVStream* stream, struct GBAVideoRenderer* renderer) {
void _ffmpegPostVideoFrame(struct mAVStream* stream, const color_t* pixels, size_t stride) {
struct FFmpegEncoder* encoder = (struct FFmpegEncoder*) stream;
if (!encoder->context) {
return;
}
const uint8_t* pixels;
unsigned stride;
renderer->getPixels(renderer, &stride, (const void**) &pixels);
stride *= BYTES_PER_PIXEL;
AVPacket packet;

View File

@ -11,7 +11,7 @@
#include <libavformat/avformat.h>
struct FFmpegEncoder {
struct GBAAVStream d;
struct mAVStream d;
struct AVFormatContext* context;
unsigned audioBitrate;

View File

@ -8,14 +8,13 @@
#include "gba/video.h"
#include "util/string.h"
static void _magickPostVideoFrame(struct GBAAVStream*, struct GBAVideoRenderer* renderer);
static void _magickPostAudioFrame(struct GBAAVStream*, int16_t left, int16_t right);
static void _magickPostVideoFrame(struct mAVStream*, const color_t* pixels, size_t stride);
void ImageMagickGIFEncoderInit(struct ImageMagickGIFEncoder* encoder) {
encoder->wand = 0;
encoder->d.postVideoFrame = _magickPostVideoFrame;
encoder->d.postAudioFrame = _magickPostAudioFrame;
encoder->d.postAudioFrame = 0;
encoder->d.postAudioBuffer = 0;
encoder->frameskip = 2;
@ -59,7 +58,7 @@ bool ImageMagickGIFEncoderIsOpen(struct ImageMagickGIFEncoder* encoder) {
return !!encoder->wand;
}
static void _magickPostVideoFrame(struct GBAAVStream* stream, struct GBAVideoRenderer* renderer) {
static void _magickPostVideoFrame(struct mAVStream* stream, const color_t* pixels, size_t stride) {
struct ImageMagickGIFEncoder* encoder = (struct ImageMagickGIFEncoder*) stream;
if (encoder->currentFrame % (encoder->frameskip + 1)) {
@ -67,12 +66,10 @@ static void _magickPostVideoFrame(struct GBAAVStream* stream, struct GBAVideoRen
return;
}
const uint8_t* pixels;
unsigned stride;
renderer->getPixels(renderer, &stride, (const void**) &pixels);
const uint8_t* p8 = (const uint8_t*) pixels;
size_t row;
for (row = 0; row < VIDEO_VERTICAL_PIXELS; ++row) {
memcpy(&encoder->frame[row * VIDEO_HORIZONTAL_PIXELS], &pixels[row * 4 * stride], VIDEO_HORIZONTAL_PIXELS * 4);
memcpy(&encoder->frame[row * VIDEO_HORIZONTAL_PIXELS], &p8[row * 4 * stride], VIDEO_HORIZONTAL_PIXELS * 4);
}
MagickConstituteImage(encoder->wand, VIDEO_HORIZONTAL_PIXELS, VIDEO_VERTICAL_PIXELS, "RGBP", CharPixel, encoder->frame);
@ -92,10 +89,3 @@ static void _magickPostVideoFrame(struct GBAAVStream* stream, struct GBAVideoRen
MagickSetImageDelay(encoder->wand, nts - ts);
++encoder->currentFrame;
}
static void _magickPostAudioFrame(struct GBAAVStream* stream, int16_t left, int16_t right) {
UNUSED(stream);
UNUSED(left);
UNUSED(right);
// This is a video-only format...
}

View File

@ -14,7 +14,7 @@
#include <wand/MagickWand.h>
struct ImageMagickGIFEncoder {
struct GBAAVStream d;
struct mAVStream d;
MagickWand* wand;
char* outfile;
uint32_t* frame;

View File

@ -29,7 +29,7 @@ static retro_set_rumble_state_t rumbleCallback;
static void GBARetroLog(struct mLogger* logger, int category, enum mLogLevel level, const char* format, va_list args);
static void _postAudioBuffer(struct GBAAVStream*, struct GBAAudio* audio);
static void _postAudioBuffer(struct mAVStream*, blip_t* left, blip_t* right);
static void _setRumble(struct mRumble* rumble, int enable);
static uint8_t _readLux(struct GBALuminanceSource* lux);
static void _updateLux(struct GBALuminanceSource* lux);
@ -39,7 +39,7 @@ static void* outputBuffer;
static void* data;
static size_t dataSize;
static void* savedata;
static struct GBAAVStream stream;
static struct mAVStream stream;
static int rumbleLevel;
static struct CircleBuffer rumbleHistory;
static struct mRumble rumble;
@ -463,11 +463,11 @@ void GBARetroLog(struct mLogger* logger, int category, enum mLogLevel level, con
logCallback(retroLevel, "%s: %s\n", mLogCategoryName(category), message);
}
static void _postAudioBuffer(struct GBAAVStream* stream, struct GBAAudio* audio) {
static void _postAudioBuffer(struct mAVStream* stream, blip_t* left, blip_t* right) {
UNUSED(stream);
int16_t samples[SAMPLES * 2];
blip_read_samples(audio->psg.left, samples, SAMPLES, true);
blip_read_samples(audio->psg.right, samples + 1, SAMPLES, true);
blip_read_samples(left, samples, SAMPLES, true);
blip_read_samples(right, samples + 1, SAMPLES, true);
audioCallback(samples, SAMPLES);
}

View File

@ -25,14 +25,14 @@ public:
GIFView(QWidget* parent = nullptr);
virtual ~GIFView();
GBAAVStream* getStream() { return &m_encoder.d; }
mAVStream* getStream() { return &m_encoder.d; }
public slots:
void startRecording();
void stopRecording();
signals:
void recordingStarted(GBAAVStream*);
void recordingStarted(mAVStream*);
void recordingStopped();
private slots:

View File

@ -873,7 +873,7 @@ void GameController::enableTurbo() {
threadContinue();
}
void GameController::setAVStream(GBAAVStream* stream) {
void GameController::setAVStream(mAVStream* stream) {
threadInterrupt();
m_threadContext.stream = stream;
if (isLoaded()) {

View File

@ -136,7 +136,7 @@ public slots:
void setMute(bool);
void setTurbo(bool, bool forced = true);
void setTurboSpeed(float ratio = -1);
void setAVStream(GBAAVStream*);
void setAVStream(mAVStream*);
void clearAVStream();
void reloadAudioDriver();
void setSaveStateExtdata(int flags);

View File

@ -37,14 +37,14 @@ public:
VideoView(QWidget* parent = nullptr);
virtual ~VideoView();
GBAAVStream* getStream() { return &m_encoder.d; }
mAVStream* getStream() { return &m_encoder.d; }
public slots:
void startRecording();
void stopRecording();
signals:
void recordingStarted(GBAAVStream*);
void recordingStarted(mAVStream*);
void recordingStopped();
private slots:

View File

@ -416,7 +416,7 @@ void Window::openROMInfo() {
void Window::openVideoWindow() {
if (!m_videoView) {
m_videoView = new VideoView();
connect(m_videoView, SIGNAL(recordingStarted(GBAAVStream*)), m_controller, SLOT(setAVStream(GBAAVStream*)));
connect(m_videoView, SIGNAL(recordingStarted(mAVStream*)), m_controller, SLOT(setAVStream(mAVStream*)));
connect(m_videoView, SIGNAL(recordingStopped()), m_controller, SLOT(clearAVStream()), Qt::DirectConnection);
connect(m_controller, SIGNAL(gameStopped(GBAThread*)), m_videoView, SLOT(stopRecording()));
connect(m_controller, SIGNAL(gameStopped(GBAThread*)), m_videoView, SLOT(close()));
@ -430,7 +430,7 @@ void Window::openVideoWindow() {
void Window::openGIFWindow() {
if (!m_gifView) {
m_gifView = new GIFView();
connect(m_gifView, SIGNAL(recordingStarted(GBAAVStream*)), m_controller, SLOT(setAVStream(GBAAVStream*)));
connect(m_gifView, SIGNAL(recordingStarted(mAVStream*)), m_controller, SLOT(setAVStream(mAVStream*)));
connect(m_gifView, SIGNAL(recordingStopped()), m_controller, SLOT(clearAVStream()), Qt::DirectConnection);
connect(m_controller, SIGNAL(gameStopped(GBAThread*)), m_gifView, SLOT(stopRecording()));
connect(m_controller, SIGNAL(gameStopped(GBAThread*)), m_gifView, SLOT(close()));