win32: handle unicode when loading glsl shaders

This commit is contained in:
OV2 2018-05-24 18:19:00 +02:00
parent 4b770e0997
commit ba76c1c2be
3 changed files with 13 additions and 2 deletions

View File

@ -14,10 +14,13 @@
#include "gl_core_3_1.h"
#include <direct.h>
#ifdef UNICODE
#define chdir(dir) _wchdir(Utf8ToWide(dir))
#define realpath(src, resolved) _twfullpath(resolved, src, PATH_MAX)
#else
#define chdir(dir) _chdir(dir)
#define realpath(src, resolved) _fullpath(resolved, src, PATH_MAX)
#endif
#endif

View File

@ -229,4 +229,11 @@ extern "C" int _twopen(const char *filename, int oflag, int pmode) {
return _wopen(Utf8ToWide(filename), oflag, pmode);
}
extern "C" void _twfullpath(char* dst, const char* src, int len) {
wchar_t *resolved = _wfullpath(NULL, Utf8ToWide(src), MAX_PATH);
strncpy(dst, WideToUtf8(resolved), len);
dst[len - 1] = '\0';
return;
}
#endif // UNICODE

View File

@ -198,6 +198,7 @@ extern "C" {
FILE *_tfwopen(const char *filename, const char *mode );
int _twremove(const char *filename );
int _twopen(const char *filename, int oflag, int pmode);
void _twfullpath(char* dst, const char* src, int len);
#ifdef __cplusplus
}