mirror of https://github.com/mgba-emu/mgba.git
Merge branch 'master' (early part) into medusa
This commit is contained in:
commit
5c274942e2
3
CHANGES
3
CHANGES
|
@ -55,6 +55,8 @@ Other fixes:
|
|||
- All: Fix several memory leaks
|
||||
- LR35902: Fix trailing whitespace in disassembly
|
||||
- Qt: Fix adjusting magnification in tile viewer when not fitting to window
|
||||
- FFmpeg: Improve initialization reliability and cleanup
|
||||
- Wii: Fix aspect ratio (fixes mgba.io/i/500)
|
||||
Misc:
|
||||
- GBA Savedata: EEPROM performance fixes
|
||||
- GBA Savedata: Automatically map 1Mbit Flash files as 1Mbit Flash
|
||||
|
@ -71,6 +73,7 @@ Misc:
|
|||
- GBA BIOS: Add timings for HLE BIOS math functions (fixes mgba.io/i/1396)
|
||||
- Debugger: Make tracing compatible with breakpoints/watchpoints
|
||||
- Debugger: Print breakpoint/watchpoint number when inserting
|
||||
- Qt: Open a message box for Qt frontend errors
|
||||
|
||||
0.7.1: (2019-02-24)
|
||||
Bugfixes:
|
||||
|
|
|
@ -44,11 +44,9 @@ void FFmpegEncoderInit(struct FFmpegEncoder* encoder) {
|
|||
encoder->d.postAudioBuffer = 0;
|
||||
encoder->d.videoFrameRateChanged = _ffmpegSetVideoFrameRate;
|
||||
|
||||
encoder->audioCodec = 0;
|
||||
encoder->videoCodec = 0;
|
||||
encoder->audio = NULL;
|
||||
encoder->video = NULL;
|
||||
encoder->containerFormat = 0;
|
||||
encoder->audioCodec = NULL;
|
||||
encoder->videoCodec = NULL;
|
||||
encoder->containerFormat = NULL;
|
||||
FFmpegEncoderSetAudio(encoder, "flac", 0);
|
||||
FFmpegEncoderSetVideo(encoder, "png", 0);
|
||||
FFmpegEncoderSetContainer(encoder, "matroska");
|
||||
|
@ -57,10 +55,18 @@ void FFmpegEncoderInit(struct FFmpegEncoder* encoder) {
|
|||
encoder->iheight = GBA_VIDEO_VERTICAL_PIXELS;
|
||||
encoder->frameCycles = VIDEO_TOTAL_LENGTH;
|
||||
encoder->cycles = GBA_ARM7TDMI_FREQUENCY;
|
||||
encoder->resampleContext = 0;
|
||||
encoder->absf = 0;
|
||||
encoder->context = 0;
|
||||
encoder->resampleContext = NULL;
|
||||
encoder->absf = NULL;
|
||||
encoder->context = NULL;
|
||||
encoder->scaleContext = NULL;
|
||||
encoder->audio = NULL;
|
||||
encoder->audioStream = NULL;
|
||||
encoder->audioFrame = NULL;
|
||||
encoder->audioBuffer = NULL;
|
||||
encoder->postaudioBuffer = NULL;
|
||||
encoder->video = NULL;
|
||||
encoder->videoStream = NULL;
|
||||
encoder->videoFrame = NULL;
|
||||
}
|
||||
|
||||
bool FFmpegEncoderSetAudio(struct FFmpegEncoder* encoder, const char* acodec, unsigned abr) {
|
||||
|
@ -146,6 +152,12 @@ bool FFmpegEncoderSetVideo(struct FFmpegEncoder* encoder, const char* vcodec, un
|
|||
{ AV_PIX_FMT_YUV444P, 5 },
|
||||
{ AV_PIX_FMT_YUV420P, 6 }
|
||||
};
|
||||
|
||||
if (!vcodec) {
|
||||
encoder->videoCodec = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
AVCodec* codec = avcodec_find_encoder_by_name(vcodec);
|
||||
if (!codec) {
|
||||
return false;
|
||||
|
@ -189,13 +201,13 @@ bool FFmpegEncoderVerifyContainer(struct FFmpegEncoder* encoder) {
|
|||
AVOutputFormat* oformat = av_guess_format(encoder->containerFormat, 0, 0);
|
||||
AVCodec* acodec = avcodec_find_encoder_by_name(encoder->audioCodec);
|
||||
AVCodec* vcodec = avcodec_find_encoder_by_name(encoder->videoCodec);
|
||||
if ((encoder->audioCodec && !acodec) || !vcodec || !oformat) {
|
||||
if ((encoder->audioCodec && !acodec) || (encoder->videoCodec && !vcodec) || !oformat) {
|
||||
return false;
|
||||
}
|
||||
if (encoder->audioCodec && !avformat_query_codec(oformat, acodec->id, FF_COMPLIANCE_EXPERIMENTAL)) {
|
||||
return false;
|
||||
}
|
||||
if (!avformat_query_codec(oformat, vcodec->id, FF_COMPLIANCE_EXPERIMENTAL)) {
|
||||
if (encoder->videoCodec && !avformat_query_codec(oformat, vcodec->id, FF_COMPLIANCE_EXPERIMENTAL)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -204,7 +216,11 @@ bool FFmpegEncoderVerifyContainer(struct FFmpegEncoder* encoder) {
|
|||
bool FFmpegEncoderOpen(struct FFmpegEncoder* encoder, const char* outfile) {
|
||||
AVCodec* acodec = avcodec_find_encoder_by_name(encoder->audioCodec);
|
||||
AVCodec* vcodec = avcodec_find_encoder_by_name(encoder->videoCodec);
|
||||
if ((encoder->audioCodec && !acodec) || !vcodec || !FFmpegEncoderVerifyContainer(encoder)) {
|
||||
if ((encoder->audioCodec && !acodec) || (encoder->videoCodec && !vcodec) || !FFmpegEncoderVerifyContainer(encoder)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (encoder->context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -244,8 +260,12 @@ bool FFmpegEncoderOpen(struct FFmpegEncoder* encoder, const char* outfile) {
|
|||
encoder->audio->flags |= CODEC_FLAG_GLOBAL_HEADER;
|
||||
#endif
|
||||
}
|
||||
avcodec_open2(encoder->audio, acodec, &opts);
|
||||
int res = avcodec_open2(encoder->audio, acodec, &opts);
|
||||
av_dict_free(&opts);
|
||||
if (res < 0) {
|
||||
FFmpegEncoderClose(encoder);
|
||||
return false;
|
||||
}
|
||||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
encoder->audioFrame = av_frame_alloc();
|
||||
#else
|
||||
|
@ -295,129 +315,151 @@ bool FFmpegEncoderOpen(struct FFmpegEncoder* encoder, const char* outfile) {
|
|||
#endif
|
||||
}
|
||||
|
||||
if (vcodec) {
|
||||
#ifdef FFMPEG_USE_CODECPAR
|
||||
encoder->videoStream = avformat_new_stream(encoder->context, NULL);
|
||||
encoder->video = avcodec_alloc_context3(vcodec);
|
||||
encoder->videoStream = avformat_new_stream(encoder->context, NULL);
|
||||
encoder->video = avcodec_alloc_context3(vcodec);
|
||||
#else
|
||||
encoder->videoStream = avformat_new_stream(encoder->context, vcodec);
|
||||
encoder->video = encoder->videoStream->codec;
|
||||
encoder->videoStream = avformat_new_stream(encoder->context, vcodec);
|
||||
encoder->video = encoder->videoStream->codec;
|
||||
#endif
|
||||
encoder->video->bit_rate = encoder->videoBitrate;
|
||||
encoder->video->width = encoder->width;
|
||||
encoder->video->height = encoder->height;
|
||||
encoder->video->time_base = (AVRational) { encoder->frameCycles, encoder->cycles };
|
||||
encoder->video->framerate = (AVRational) { encoder->cycles, encoder->frameCycles };
|
||||
encoder->video->pix_fmt = encoder->pixFormat;
|
||||
encoder->video->gop_size = 60;
|
||||
encoder->video->max_b_frames = 3;
|
||||
if (encoder->context->oformat->flags & AVFMT_GLOBALHEADER) {
|
||||
encoder->video->bit_rate = encoder->videoBitrate;
|
||||
encoder->video->width = encoder->width;
|
||||
encoder->video->height = encoder->height;
|
||||
encoder->video->time_base = (AVRational) { encoder->frameCycles, encoder->cycles };
|
||||
encoder->video->framerate = (AVRational) { encoder->cycles, encoder->frameCycles };
|
||||
encoder->video->pix_fmt = encoder->pixFormat;
|
||||
encoder->video->gop_size = 60;
|
||||
encoder->video->max_b_frames = 3;
|
||||
if (encoder->context->oformat->flags & AVFMT_GLOBALHEADER) {
|
||||
#ifdef AV_CODEC_FLAG_GLOBAL_HEADER
|
||||
encoder->video->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
encoder->video->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
#else
|
||||
encoder->video->flags |= CODEC_FLAG_GLOBAL_HEADER;
|
||||
encoder->video->flags |= CODEC_FLAG_GLOBAL_HEADER;
|
||||
#endif
|
||||
}
|
||||
|
||||
if (encoder->video->codec->id == AV_CODEC_ID_H264 &&
|
||||
(strcasecmp(encoder->containerFormat, "mp4") ||
|
||||
strcasecmp(encoder->containerFormat, "m4v") ||
|
||||
strcasecmp(encoder->containerFormat, "mov"))) {
|
||||
// QuickTime and a few other things require YUV420
|
||||
encoder->video->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
}
|
||||
|
||||
if (strcmp(vcodec->name, "libx264") == 0) {
|
||||
// Try to adaptively figure out when you can use a slower encoder
|
||||
if (encoder->width * encoder->height > 1000000) {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "superfast", 0);
|
||||
} else if (encoder->width * encoder->height > 500000) {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "veryfast", 0);
|
||||
} else {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "faster", 0);
|
||||
}
|
||||
if (encoder->videoBitrate == 0) {
|
||||
av_opt_set(encoder->video->priv_data, "crf", "0", 0);
|
||||
|
||||
if (encoder->video->codec->id == AV_CODEC_ID_H264 &&
|
||||
(strcasecmp(encoder->containerFormat, "mp4") ||
|
||||
strcasecmp(encoder->containerFormat, "m4v") ||
|
||||
strcasecmp(encoder->containerFormat, "mov"))) {
|
||||
// QuickTime and a few other things require YUV420
|
||||
encoder->video->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
}
|
||||
|
||||
if (strcmp(vcodec->name, "libx264") == 0) {
|
||||
// Try to adaptively figure out when you can use a slower encoder
|
||||
if (encoder->width * encoder->height > 1000000) {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "superfast", 0);
|
||||
} else if (encoder->width * encoder->height > 500000) {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "veryfast", 0);
|
||||
} else {
|
||||
av_opt_set(encoder->video->priv_data, "preset", "faster", 0);
|
||||
}
|
||||
if (encoder->videoBitrate == 0) {
|
||||
av_opt_set(encoder->video->priv_data, "crf", "0", 0);
|
||||
encoder->video->pix_fmt = AV_PIX_FMT_YUV444P;
|
||||
}
|
||||
}
|
||||
if (strcmp(vcodec->name, "libvpx-vp9") == 0 && encoder->videoBitrate == 0) {
|
||||
av_opt_set(encoder->video->priv_data, "lossless", "1", 0);
|
||||
encoder->video->pix_fmt = AV_PIX_FMT_YUV444P;
|
||||
}
|
||||
}
|
||||
if (strcmp(vcodec->name, "libvpx-vp9") == 0 && encoder->videoBitrate == 0) {
|
||||
av_opt_set(encoder->video->priv_data, "lossless", "1", 0);
|
||||
encoder->video->pix_fmt = AV_PIX_FMT_YUV444P;
|
||||
}
|
||||
|
||||
avcodec_open2(encoder->video, vcodec, 0);
|
||||
if (avcodec_open2(encoder->video, vcodec, 0) < 0) {
|
||||
FFmpegEncoderClose(encoder);
|
||||
return false;
|
||||
}
|
||||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
encoder->videoFrame = av_frame_alloc();
|
||||
encoder->videoFrame = av_frame_alloc();
|
||||
#else
|
||||
encoder->videoFrame = avcodec_alloc_frame();
|
||||
encoder->videoFrame = avcodec_alloc_frame();
|
||||
#endif
|
||||
encoder->videoFrame->format = encoder->video->pix_fmt;
|
||||
encoder->videoFrame->width = encoder->video->width;
|
||||
encoder->videoFrame->height = encoder->video->height;
|
||||
encoder->videoFrame->pts = 0;
|
||||
_ffmpegSetVideoDimensions(&encoder->d, encoder->iwidth, encoder->iheight);
|
||||
av_image_alloc(encoder->videoFrame->data, encoder->videoFrame->linesize, encoder->video->width, encoder->video->height, encoder->video->pix_fmt, 32);
|
||||
encoder->videoFrame->format = encoder->video->pix_fmt;
|
||||
encoder->videoFrame->width = encoder->video->width;
|
||||
encoder->videoFrame->height = encoder->video->height;
|
||||
encoder->videoFrame->pts = 0;
|
||||
_ffmpegSetVideoDimensions(&encoder->d, encoder->iwidth, encoder->iheight);
|
||||
av_image_alloc(encoder->videoFrame->data, encoder->videoFrame->linesize, encoder->video->width, encoder->video->height, encoder->video->pix_fmt, 32);
|
||||
#ifdef FFMPEG_USE_CODECPAR
|
||||
avcodec_parameters_from_context(encoder->videoStream->codecpar, encoder->video);
|
||||
avcodec_parameters_from_context(encoder->videoStream->codecpar, encoder->video);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (avio_open(&encoder->context->pb, outfile, AVIO_FLAG_WRITE) < 0) {
|
||||
if (avio_open(&encoder->context->pb, outfile, AVIO_FLAG_WRITE) < 0 || avformat_write_header(encoder->context, 0) < 0) {
|
||||
FFmpegEncoderClose(encoder);
|
||||
return false;
|
||||
}
|
||||
return avformat_write_header(encoder->context, 0) >= 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
void FFmpegEncoderClose(struct FFmpegEncoder* encoder) {
|
||||
if (!encoder->context) {
|
||||
return;
|
||||
if (encoder->context && encoder->context->pb) {
|
||||
av_write_trailer(encoder->context);
|
||||
avio_close(encoder->context->pb);
|
||||
}
|
||||
av_write_trailer(encoder->context);
|
||||
avio_close(encoder->context->pb);
|
||||
|
||||
if (encoder->audioCodec) {
|
||||
if (encoder->postaudioBuffer) {
|
||||
av_free(encoder->postaudioBuffer);
|
||||
if (encoder->audioBuffer) {
|
||||
av_free(encoder->audioBuffer);
|
||||
}
|
||||
encoder->postaudioBuffer = NULL;
|
||||
}
|
||||
if (encoder->audioBuffer) {
|
||||
av_free(encoder->audioBuffer);
|
||||
encoder->audioBuffer = NULL;
|
||||
}
|
||||
|
||||
if (encoder->audioFrame) {
|
||||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
av_frame_free(&encoder->audioFrame);
|
||||
#else
|
||||
avcodec_free_frame(&encoder->audioFrame);
|
||||
#endif
|
||||
}
|
||||
if (encoder->audio) {
|
||||
avcodec_close(encoder->audio);
|
||||
encoder->audio = NULL;
|
||||
|
||||
if (encoder->resampleContext) {
|
||||
#ifdef USE_LIBAVRESAMPLE
|
||||
avresample_close(encoder->resampleContext);
|
||||
#else
|
||||
swr_free(&encoder->resampleContext);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (encoder->absf) {
|
||||
#ifdef FFMPEG_USE_NEW_BSF
|
||||
av_bsf_free(&encoder->absf);
|
||||
#else
|
||||
av_bitstream_filter_close(encoder->absf);
|
||||
encoder->absf = 0;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
av_frame_free(&encoder->videoFrame);
|
||||
if (encoder->resampleContext) {
|
||||
#ifdef USE_LIBAVRESAMPLE
|
||||
avresample_close(encoder->resampleContext);
|
||||
encoder->resampleContext = NULL;
|
||||
#else
|
||||
avcodec_free_frame(&encoder->videoFrame);
|
||||
swr_free(&encoder->resampleContext);
|
||||
#endif
|
||||
avcodec_close(encoder->video);
|
||||
encoder->video = NULL;
|
||||
}
|
||||
|
||||
sws_freeContext(encoder->scaleContext);
|
||||
encoder->scaleContext = NULL;
|
||||
if (encoder->absf) {
|
||||
#ifdef FFMPEG_USE_NEW_BSF
|
||||
av_bsf_free(&encoder->absf);
|
||||
#else
|
||||
av_bitstream_filter_close(encoder->absf);
|
||||
encoder->absf = NULL;
|
||||
#endif
|
||||
}
|
||||
|
||||
avformat_free_context(encoder->context);
|
||||
encoder->context = 0;
|
||||
if (encoder->videoFrame) {
|
||||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
av_frame_free(&encoder->videoFrame);
|
||||
#else
|
||||
avcodec_free_frame(&encoder->videoFrame);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (encoder->video) {
|
||||
avcodec_close(encoder->video);
|
||||
encoder->video = NULL;
|
||||
}
|
||||
|
||||
if (encoder->scaleContext) {
|
||||
sws_freeContext(encoder->scaleContext);
|
||||
encoder->scaleContext = NULL;
|
||||
}
|
||||
|
||||
if (encoder->context) {
|
||||
avformat_free_context(encoder->context);
|
||||
encoder->context = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
bool FFmpegEncoderIsOpen(struct FFmpegEncoder* encoder) {
|
||||
|
@ -461,7 +503,7 @@ void _ffmpegPostAudioFrame(struct mAVStream* stream, int16_t left, int16_t right
|
|||
#if LIBAVCODEC_VERSION_MAJOR >= 55
|
||||
av_frame_make_writable(encoder->audioFrame);
|
||||
#endif
|
||||
if (swr_get_out_samples(encoder->resampleContext, encoder->audioBufferSize / 4) < encoder->audioFrame->nb_samples) {
|
||||
if (swr_get_out_samples(encoder->resampleContext, 1) < encoder->audioFrame->nb_samples) {
|
||||
swr_convert(encoder->resampleContext, NULL, 0, (const uint8_t**) &encoder->audioBuffer, encoder->audioBufferSize / 4);
|
||||
return;
|
||||
}
|
||||
|
@ -532,7 +574,7 @@ void _ffmpegPostAudioFrame(struct mAVStream* stream, int16_t left, int16_t right
|
|||
|
||||
void _ffmpegPostVideoFrame(struct mAVStream* stream, const color_t* pixels, size_t stride) {
|
||||
struct FFmpegEncoder* encoder = (struct FFmpegEncoder*) stream;
|
||||
if (!encoder->context) {
|
||||
if (!encoder->context || !encoder->videoCodec) {
|
||||
return;
|
||||
}
|
||||
stride *= BYTES_PER_PIXEL;
|
||||
|
@ -576,29 +618,30 @@ void _ffmpegPostVideoFrame(struct mAVStream* stream, const color_t* pixels, size
|
|||
|
||||
static void _ffmpegSetVideoDimensions(struct mAVStream* stream, unsigned width, unsigned height) {
|
||||
struct FFmpegEncoder* encoder = (struct FFmpegEncoder*) stream;
|
||||
if (!encoder->context || !encoder->videoCodec) {
|
||||
return;
|
||||
}
|
||||
encoder->iwidth = width;
|
||||
encoder->iheight = height;
|
||||
if (encoder->video) {
|
||||
if (encoder->scaleContext) {
|
||||
sws_freeContext(encoder->scaleContext);
|
||||
}
|
||||
encoder->scaleContext = sws_getContext(encoder->iwidth, encoder->iheight,
|
||||
if (encoder->scaleContext) {
|
||||
sws_freeContext(encoder->scaleContext);
|
||||
}
|
||||
encoder->scaleContext = sws_getContext(encoder->iwidth, encoder->iheight,
|
||||
#ifdef COLOR_16_BIT
|
||||
#ifdef COLOR_5_6_5
|
||||
AV_PIX_FMT_RGB565,
|
||||
AV_PIX_FMT_RGB565,
|
||||
#else
|
||||
AV_PIX_FMT_BGR555,
|
||||
AV_PIX_FMT_BGR555,
|
||||
#endif
|
||||
#else
|
||||
#ifndef USE_LIBAV
|
||||
AV_PIX_FMT_0BGR32,
|
||||
AV_PIX_FMT_0BGR32,
|
||||
#else
|
||||
AV_PIX_FMT_BGR32,
|
||||
AV_PIX_FMT_BGR32,
|
||||
#endif
|
||||
#endif
|
||||
encoder->videoFrame->width, encoder->videoFrame->height, encoder->video->pix_fmt,
|
||||
SWS_POINT, 0, 0, 0);
|
||||
}
|
||||
encoder->videoFrame->width, encoder->videoFrame->height, encoder->video->pix_fmt,
|
||||
SWS_POINT, 0, 0, 0);
|
||||
}
|
||||
|
||||
static void _ffmpegSetVideoFrameRate(struct mAVStream* stream, unsigned numerator, unsigned denominator) {
|
||||
|
|
|
@ -5,11 +5,14 @@
|
|||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
#include "LogController.h"
|
||||
|
||||
#include <QMessageBox>
|
||||
|
||||
#include "ConfigController.h"
|
||||
|
||||
using namespace QGBA;
|
||||
|
||||
LogController LogController::s_global(mLOG_ALL);
|
||||
int LogController::s_qtCat{-1};
|
||||
|
||||
LogController::LogController(int levels, QObject* parent)
|
||||
: QObject(parent)
|
||||
|
@ -18,6 +21,7 @@ LogController::LogController(int levels, QObject* parent)
|
|||
mLogFilterSet(&m_filter, "gba.bios", mLOG_STUB | mLOG_FATAL);
|
||||
mLogFilterSet(&m_filter, "core.status", mLOG_ALL & ~mLOG_DEBUG);
|
||||
m_filter.defaultLevels = levels;
|
||||
s_qtCat = mLogCategoryById("platform.qt");
|
||||
|
||||
if (this != &s_global) {
|
||||
connect(&s_global, &LogController::logPosted, this, &LogController::postLog);
|
||||
|
@ -65,6 +69,11 @@ void LogController::postLog(int level, int category, const QString& string) {
|
|||
*m_logStream << line << endl;
|
||||
}
|
||||
}
|
||||
if (category == s_qtCat && level == mLOG_ERROR && this == &s_global) {
|
||||
QMessageBox* dialog = new QMessageBox(QMessageBox::Critical, tr("An error occurred"), string, QMessageBox::Ok);
|
||||
dialog->setAttribute(Qt::WA_DeleteOnClose);
|
||||
dialog->show();
|
||||
}
|
||||
emit logPosted(level, category, string);
|
||||
}
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ private:
|
|||
std::unique_ptr<QTextStream> m_logStream;
|
||||
|
||||
static LogController s_global;
|
||||
static int s_qtCat;
|
||||
};
|
||||
|
||||
#define LOG(C, L) (*LogController::global())(mLOG_ ## L, _mLOG_CAT_ ## C)
|
||||
|
|
|
@ -51,9 +51,9 @@ void mSDLGLCommonInit(struct mSDLRenderer* renderer) {
|
|||
#else
|
||||
SDL_GL_SetAttribute(SDL_GL_SWAP_CONTROL, 1);
|
||||
#ifdef COLOR_16_BIT
|
||||
SDL_SetVideoMode(renderer->viewportWidth, renderer->viewportHeight, 16, SDL_OPENGL | (SDL_FULLSCREEN * renderer->fullscreen));
|
||||
SDL_SetVideoMode(renderer->viewportWidth, renderer->viewportHeight, 16, SDL_OPENGL | SDL_RESIZABLE | (SDL_FULLSCREEN * renderer->fullscreen));
|
||||
#else
|
||||
SDL_SetVideoMode(renderer->viewportWidth, renderer->viewportHeight, 32, SDL_OPENGL | (SDL_FULLSCREEN * renderer->fullscreen));
|
||||
SDL_SetVideoMode(renderer->viewportWidth, renderer->viewportHeight, 32, SDL_OPENGL | SDL_RESIZABLE | (SDL_FULLSCREEN * renderer->fullscreen));
|
||||
#endif
|
||||
SDL_WM_SetCaption(projectName, "");
|
||||
#endif
|
||||
|
|
|
@ -52,14 +52,18 @@ void mSDLGLRunloop(struct mSDLRenderer* renderer, void* user) {
|
|||
while (mCoreThreadIsActive(context)) {
|
||||
while (SDL_PollEvent(&event)) {
|
||||
mSDLHandleEvent(context, &renderer->player, &event);
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 0)
|
||||
// Event handling can change the size of the screen
|
||||
if (renderer->player.windowUpdated) {
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 0)
|
||||
SDL_GetWindowSize(renderer->window, &renderer->viewportWidth, &renderer->viewportHeight);
|
||||
#else
|
||||
renderer->viewportWidth = renderer->player.newWidth;
|
||||
renderer->viewportHeight = renderer->player.newHeight;
|
||||
mSDLGLCommonInit(renderer);
|
||||
#endif
|
||||
mSDLGLDoViewport(renderer->viewportWidth, renderer->viewportHeight, v);
|
||||
renderer->player.windowUpdated = 0;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
renderer->core->desiredVideoDimensions(renderer->core, &renderer->width, &renderer->height);
|
||||
if (renderer->width != v->width || renderer->height != v->height) {
|
||||
|
|
|
@ -128,14 +128,18 @@ void mSDLGLES2Runloop(struct mSDLRenderer* renderer, void* user) {
|
|||
while (mCoreThreadIsActive(context)) {
|
||||
while (SDL_PollEvent(&event)) {
|
||||
mSDLHandleEvent(context, &renderer->player, &event);
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 0)
|
||||
// Event handling can change the size of the screen
|
||||
if (renderer->player.windowUpdated) {
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 0)
|
||||
SDL_GetWindowSize(renderer->window, &renderer->viewportWidth, &renderer->viewportHeight);
|
||||
#else
|
||||
renderer->viewportWidth = renderer->player.newWidth;
|
||||
renderer->viewportHeight = renderer->player.newHeight;
|
||||
mSDLGLCommonInit(renderer);
|
||||
#endif
|
||||
mSDLGLDoViewport(renderer->viewportWidth, renderer->viewportHeight, v);
|
||||
renderer->player.windowUpdated = 0;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
if (mCoreSyncWaitFrameStart(&context->impl->sync)) {
|
||||
|
|
|
@ -634,6 +634,12 @@ void mSDLHandleEvent(struct mCoreThread* context, struct mSDLPlayer* sdlContext,
|
|||
case SDL_WINDOWEVENT:
|
||||
_mSDLHandleWindowEvent(sdlContext, &event->window);
|
||||
break;
|
||||
#else
|
||||
case SDL_VIDEORESIZE:
|
||||
sdlContext->newWidth = event->resize.w;
|
||||
sdlContext->newHeight = event->resize.h;
|
||||
sdlContext->windowUpdated = 1;
|
||||
break;
|
||||
#endif
|
||||
case SDL_KEYDOWN:
|
||||
case SDL_KEYUP:
|
||||
|
|
|
@ -63,10 +63,10 @@ struct mSDLPlayer {
|
|||
size_t playerId;
|
||||
struct mInputMap* bindings;
|
||||
struct SDL_JoystickCombo* joystick;
|
||||
int windowUpdated;
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 0)
|
||||
SDL_Window* window;
|
||||
int fullscreen;
|
||||
int windowUpdated;
|
||||
|
||||
struct mSDLRumble {
|
||||
struct mRumble d;
|
||||
|
@ -76,6 +76,9 @@ struct mSDLPlayer {
|
|||
float activeLevel;
|
||||
struct CircleBuffer history;
|
||||
} rumble;
|
||||
#else
|
||||
int newWidth;
|
||||
int newHeight;
|
||||
#endif
|
||||
|
||||
struct mSDLRotation {
|
||||
|
|
|
@ -102,7 +102,7 @@ static struct mRotationSource rotation;
|
|||
static GXRModeObj* vmode;
|
||||
static float wAdjust;
|
||||
static float hAdjust;
|
||||
static float wStretch = 1.0f;
|
||||
static float wStretch = 0.9f;
|
||||
static float hStretch = 0.9f;
|
||||
static float guiScale = GUI_SCALE;
|
||||
static Mtx model, view, modelview;
|
||||
|
@ -195,6 +195,9 @@ static void reconfigureScreen(struct mGUIRunner* runner) {
|
|||
break;
|
||||
}
|
||||
|
||||
vmode->viWidth = 704;
|
||||
vmode->viXOrigin = 8;
|
||||
|
||||
VIDEO_SetBlack(true);
|
||||
VIDEO_Configure(vmode);
|
||||
|
||||
|
@ -319,7 +322,7 @@ int main(int argc, char* argv[]) {
|
|||
|
||||
struct mGUIRunner runner = {
|
||||
.params = {
|
||||
720, 480,
|
||||
640, 480,
|
||||
font, "",
|
||||
_drawStart, _drawEnd,
|
||||
_pollInput, _pollCursor,
|
||||
|
|
Loading…
Reference in New Issue