Merge pull request #17815 from DisasterMo/master
Update the core localisation sync scripts, workflows and instructions
This commit is contained in:
commit
21c5c5db36
|
@ -0,0 +1,32 @@
|
|||
# Uploads source texts and any present translations to Crowdin.
|
||||
|
||||
name: Crowdin Translations Initial Setup
|
||||
|
||||
# On manual run only.
|
||||
on:
|
||||
workflow_dispatch
|
||||
|
||||
jobs:
|
||||
upload_source_file:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Java JDK
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: 18
|
||||
distribution: zulu
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Initial synchroniation
|
||||
shell: bash
|
||||
env:
|
||||
CROWDIN_API_KEY: ${{ secrets.CROWDIN_API_KEY }}
|
||||
run: |
|
||||
python3 intl/initial_sync.py $CROWDIN_API_KEY "<CORE_NAME>" "<PATH/TO>/libretro_core_options.h"
|
|
@ -0,0 +1,37 @@
|
|||
# Prepare source texts & upload them to Crowdin
|
||||
|
||||
name: Crowdin Source Texts Upload
|
||||
|
||||
# on change to the English texts
|
||||
on:
|
||||
workflow_dispatch
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- '<PATH/TO>/libretro_core_options.h'
|
||||
|
||||
jobs:
|
||||
upload_source_file:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Java JDK
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: 18
|
||||
distribution: zulu
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Upload Source
|
||||
shell: bash
|
||||
env:
|
||||
CROWDIN_API_KEY: ${{ secrets.CROWDIN_API_KEY }}
|
||||
run: |
|
||||
python3 intl/upload_workflow.py $CROWDIN_API_KEY "<CORE_NAME>" "<PATH/TO>/libretro_core_options.h"
|
|
@ -0,0 +1,55 @@
|
|||
# Download translations form Crowdin & Recreate libretro_core_options_intl.h
|
||||
|
||||
name: Crowdin Translation Sync
|
||||
|
||||
on:
|
||||
workflow_dispatch
|
||||
schedule:
|
||||
# please choose a random time & weekday to avoid all repos synching at the same time
|
||||
- cron: '<0-59> <0-23> * * 5' # Fridays at , UTC
|
||||
|
||||
jobs:
|
||||
create_intl_file:
|
||||
permissions:
|
||||
contents: write # 'write' access to repository contents
|
||||
pull-requests: write # 'write' access to pull requests
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Java JDK
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: 18
|
||||
distribution: zulu
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false # otherwise, the token used is the GITHUB_TOKEN, instead of your personal access token.
|
||||
fetch-depth: 0 # otherwise, there would be errors pushing refs to the destination repository.
|
||||
|
||||
- name: Create intl file
|
||||
shell: bash
|
||||
env:
|
||||
CROWDIN_API_KEY: ${{ secrets.CROWDIN_API_KEY }}
|
||||
run: |
|
||||
python3 intl/download_workflow.py $CROWDIN_API_KEY "<CORE_NAME>" "<PATH/TO>/libretro_core_options_intl.h"
|
||||
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "github-actions@github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git add intl/*_workflow.py "<PATH/TO>/libretro_core_options_intl.h"
|
||||
git commit -m "Fetch translations & Recreate libretro_core_options_intl.h"
|
||||
|
||||
- name: GitHub Push
|
||||
uses: ad-m/github-push-action@v0.8.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.ref }}
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
## Requirements
|
||||
|
||||
Make sure the core is libretro conformant:
|
||||
both `libretro_core_options.h`, containing the English texts, and
|
||||
`libretro_core_options_intl.h`, containing all already existing
|
||||
translations, if any, must be present in the same directory.
|
||||
|
||||
> Please note: `libretro_core_options_intl.h` does not need to contain
|
||||
anything, if no translations exist or none of them should be preserved.
|
||||
|
||||
The scripts are not compatible with text filled in by macros or during run time.
|
||||
The procedure should not fail - but those texts will not be made translatable.
|
||||
|
||||
Also, please verify the existence and correct use of
|
||||
|
||||
`#ifdef HAVE_LANGEXTRA`
|
||||
|
||||
and/or
|
||||
|
||||
`#ifndef NAVE_NO_LANGEXTRA`
|
||||
|
||||
pre-compiler instructions in `libretro_core_options.h` to remove any
|
||||
references to additional languages on platforms which cannot handle them,
|
||||
e.g. due to limited RAM.
|
||||
For an example, refer to an up-to-date core, like [gambatte-libretro](https://github.com/libretro/gambatte-libretro/blob/master/libgambatte/libretro/libretro_core_options.h).
|
||||
|
||||
> Make sure `options_intl` in `libretro_core_options.h` correctly references the `intl` options, or the translations will not be applied!
|
||||
|
||||
## Adding automatic Crowdin sync
|
||||
|
||||
Place the `intl` and `.github` folders, including content, into the root
|
||||
of the repository.
|
||||
|
||||
In `.github/workflows` are two files:
|
||||
`crowdin_prep.yml` & `crowdin_translate.yml`.
|
||||
In each of those are placeholders, which need to be replaced.
|
||||
|
||||
For convenience, one can run `intl/activate.py`, which will try to find
|
||||
the `libretro_core_options.h` file as well as identify the core name to
|
||||
fill those placeholders with.
|
||||
|
||||
Even then, one should still check, if it produced the correct result:
|
||||
|
||||
For `crowdin_prep.yml`:
|
||||
> **NOTE:** Please verify, that this workflow watches the correct branch!
|
||||
Uploads happen, whenever `libretro_core_options.h` of that branch is changed.
|
||||
|
||||
- <PATH/TO/libretro_core_options.h FILE> (x2)
|
||||
- replace with the full path from the root of the repo to the
|
||||
`libretro_core_options.h` file
|
||||
|
||||
- <CORE_NAME>
|
||||
- the name of the core (or repo)
|
||||
|
||||
And for crowdin_translate.yml:
|
||||
- <0-59> <0-23>
|
||||
- Minute and hour at which the sync will happen.
|
||||
The script will generate a random time for this, to avoid
|
||||
stressing GitHub & Crowdin with many simultaneous runs.
|
||||
|
||||
- <CORE_NAME>
|
||||
- same as above
|
||||
|
||||
- <PATH/TO/libretro_core_options_intl.h FILE> (x2)
|
||||
- replace with the full path from the root of the repo to the
|
||||
'libretro_core_options_intl.h' file
|
||||
|
||||
Create a Pull Request and ask a Crowdin project manager, either on [Crowdin](https://crowdin.com/project/retroarch) or, preferably, on [discord](https://discord.gg/xuMbcVuF) in the `retroarch-translations` channel, to provide you with an access key. Create an Actions repository secret on GitHub named CROWDIN_API_KEY for this access token.
|
||||
|
||||
<!-- TODO: set correct permissions https://github.com/marketplace/actions/github-push -->
|
||||
When everything is ready, run the "Crowdin Translations Initial Setup" workflow manually to upload the source texts and any translations to Crowdin.
|
||||
|
||||
> You may either disable the initial workflow or even remove it from your repository. Running it more than once is very much discouraged! That may mess with the newest translations, which are usually not yet incorporated into the repository.
|
||||
|
||||
Finally, it is recommended to run the "Crowdin Translation Sync" workflow manually once. If a "Permission to \<repository> denied" error occurs, you might need to configure the GITHUB_TOKEN with the appropriate access rights, [see here](https://github.com/marketplace/actions/github-push#requirements-and-prerequisites).
|
||||
|
||||
## (For Crowdin project managers) Creating an access token
|
||||
|
||||
To create an access token, navigate to the account settings via your profile picture in the top right. Change to the API tab. Here you should find a `New Token` button.
|
||||
|
||||
Name the token after the core/repository, which will receive it. The following permissions should be set:
|
||||
|
||||
- Projects
|
||||
- read
|
||||
- Source files & strings
|
||||
- read & write
|
||||
- Translations
|
||||
- read & write
|
||||
- (optional) Translation status
|
||||
- read
|
||||
|
||||
> Please provide these access tokens to the core developers in a private message and delete those after successful setup. Do not share tokens publicly or store them in plain text long term!
|
|
@ -0,0 +1,4 @@
|
|||
__pycache__
|
||||
crowdin-cli.jar
|
||||
*.h
|
||||
*.json
|
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import glob
|
||||
import random as r
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.basename(DIR_PATH) != "intl":
|
||||
raise RuntimeError("Script is not in intl folder!")
|
||||
|
||||
BASE_PATH = os.path.dirname(DIR_PATH)
|
||||
WORKFLOW_PATH = os.path.join(BASE_PATH, ".github", "workflows")
|
||||
PREP_WF = os.path.join(WORKFLOW_PATH, "crowdin_prep.yml")
|
||||
TRANSLATE_WF = os.path.join(WORKFLOW_PATH, "crowdin_translate.yml")
|
||||
CORE_NAME = os.path.basename(BASE_PATH)
|
||||
CORE_OP_FILE = os.path.join(BASE_PATH, "**", "libretro_core_options.h")
|
||||
|
||||
core_options_hits = glob.glob(CORE_OP_FILE, recursive=True)
|
||||
|
||||
if len(core_options_hits) == 0:
|
||||
raise RuntimeError("libretro_core_options.h not found!")
|
||||
elif len(core_options_hits) > 1:
|
||||
print("More than one libretro_core_options.h file found:\n\n")
|
||||
for i, file in enumerate(core_options_hits):
|
||||
print(f"{i} {file}\n")
|
||||
|
||||
while True:
|
||||
user_choice = input("Please choose one ('q' will exit): ")
|
||||
if user_choice == 'q':
|
||||
exit(0)
|
||||
elif user_choice.isdigit():
|
||||
core_op_file = core_options_hits[int(user_choice)]
|
||||
break
|
||||
else:
|
||||
print("Please make a valid choice!\n\n")
|
||||
else:
|
||||
core_op_file = core_options_hits[0]
|
||||
|
||||
core_intl_file = os.path.join(os.path.dirname(core_op_file.replace(BASE_PATH, ''))[1:],
|
||||
'libretro_core_options_intl.h')
|
||||
core_op_file = os.path.join(os.path.dirname(core_op_file.replace(BASE_PATH, ''))[1:],
|
||||
'libretro_core_options.h')
|
||||
minutes = r.randrange(0, 59, 5)
|
||||
hour = r.randrange(0, 23)
|
||||
|
||||
with open(PREP_WF, 'r') as wf_file:
|
||||
prep_txt = wf_file.read()
|
||||
|
||||
prep_txt = prep_txt.replace("<CORE_NAME>", CORE_NAME)
|
||||
prep_txt = prep_txt.replace("<PATH/TO>/libretro_core_options.h",
|
||||
core_op_file)
|
||||
with open(PREP_WF, 'w') as wf_file:
|
||||
wf_file.write(prep_txt)
|
||||
|
||||
|
||||
with open(TRANSLATE_WF, 'r') as wf_file:
|
||||
translate_txt = wf_file.read()
|
||||
|
||||
translate_txt = translate_txt.replace('<0-59>', f"{minutes}")
|
||||
translate_txt = translate_txt.replace('<0-23>', f"{hour}")
|
||||
translate_txt = translate_txt.replace('# Fridays at , UTC',
|
||||
f"# Fridays at {hour%12}:{minutes if minutes > 9 else '0' + str(minutes)} {'AM' if hour < 12 else 'PM'}, UTC")
|
||||
translate_txt = translate_txt.replace("<CORE_NAME>", CORE_NAME)
|
||||
translate_txt = translate_txt.replace('<PATH/TO>/libretro_core_options_intl.h',
|
||||
core_intl_file)
|
||||
with open(TRANSLATE_WF, 'w') as wf_file:
|
||||
wf_file.write(translate_txt)
|
|
@ -1,14 +1,14 @@
|
|||
import re
|
||||
|
||||
# 0: full struct; 1: up to & including first []; 2: content between first {}
|
||||
p_struct = re.compile(r'(struct\s*[a-zA-Z0-9_\s]+\[])\s*'
|
||||
r'(?:(?:\/\*(?:.|[\r\n])*?\*\/|\/\/.*[\r\n]+)\s*)*'
|
||||
# 0: full struct; 1: up to & including first []; 2 & 3: comments; 4: content between first {}
|
||||
p_struct = re.compile(r'(\bstruct\b\s*[a-zA-Z0-9_\s]+\[])\s*' # 1st capturing group
|
||||
r'(?:(?=(\/\*(?:.|[\r\n])*?\*\/|\/\/.*[\r\n]+))\2\s*)*' # 2nd capturing group
|
||||
r'=\s*' # =
|
||||
r'(?:(?:\/\*(?:.|[\r\n])*?\*\/|\/\/.*[\r\n]+)\s*)*'
|
||||
r'(?:(?=(\/\*(?:.|[\r\n])*?\*\/|\/\/.*[\r\n]+))\3\s*)*' # 3rd capturing group
|
||||
r'{((?:.|[\r\n])*?)\{\s*NULL,\s*NULL,\s*NULL\s*(?:.|[\r\n])*?},?(?:.|[\r\n])*?};') # captures full struct, it's beginning and it's content
|
||||
# 0: type name[]; 1: type; 2: name
|
||||
p_type_name = re.compile(r'(retro_core_option_[a-zA-Z0-9_]+)\s*'
|
||||
r'(option_cats([a-z_]{0,8})|option_defs([a-z_]{0,8}))\s*\[]')
|
||||
p_type_name = re.compile(r'(\bretro_core_option_[a-zA-Z0-9_]+)\s*'
|
||||
r'(\boption_cats([a-z_]{0,8})|\boption_defs([a-z_]*))\s*\[]')
|
||||
# 0: full option; 1: key; 2: description; 3: additional info; 4: key/value pairs
|
||||
p_option = re.compile(r'{\s*' # opening braces
|
||||
r'(?:(?:\/\*(?:.|[\r\n])*?\*\/|\/\/.*[\r\n]+|#.*[\r\n]+)\s*)*'
|
||||
|
@ -76,9 +76,9 @@ p_key_value = re.compile(r'{\s*' # opening braces
|
|||
|
||||
p_masked = re.compile(r'([A-Z_][A-Z0-9_]+)\s*(\"(?:"\s*"|\\\s*|.)*\")')
|
||||
|
||||
p_intl = re.compile(r'(struct retro_core_option_definition \*option_defs_intl\[RETRO_LANGUAGE_LAST]) = {'
|
||||
p_intl = re.compile(r'(\bstruct retro_core_option_definition \*option_defs_intl\[RETRO_LANGUAGE_LAST]) = {'
|
||||
r'((?:.|[\r\n])*?)};')
|
||||
p_set = re.compile(r'static INLINE void libretro_set_core_options\(retro_environment_t environ_cb\)'
|
||||
p_set = re.compile(r'\bstatic INLINE void libretro_set_core_options\(retro_environment_t environ_cb\)'
|
||||
r'(?:.|[\r\n])*?};?\s*#ifdef __cplusplus\s*}\s*#endif')
|
||||
|
||||
p_yaml = re.compile(r'"project_id": "[0-9]+".*\s*'
|
|
@ -0,0 +1,633 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""Core options text extractor
|
||||
|
||||
The purpose of this script is to set up & provide functions for automatic generation of 'libretro_core_options_intl.h'
|
||||
from 'libretro_core_options.h' using translations from Crowdin.
|
||||
|
||||
Both v1 and v2 structs are supported. It is, however, recommended to convert v1 files to v2 using the included
|
||||
'v1_to_v2_converter.py'.
|
||||
|
||||
Usage:
|
||||
python3 path/to/core_option_translation.py "path/to/where/libretro_core_options.h & libretro_core_options_intl.h/are" "core_name"
|
||||
|
||||
This script will:
|
||||
1.) create key words for & extract the texts from libretro_core_options.h & save them into intl/_us/core_options.h
|
||||
2.) do the same for any present translations in libretro_core_options_intl.h, saving those in their respective folder
|
||||
"""
|
||||
import core_option_regex as cor
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import urllib.request as req
|
||||
import shutil
|
||||
|
||||
# LANG_CODE_TO_R_LANG = {'_ar': 'RETRO_LANGUAGE_ARABIC',
|
||||
# '_ast': 'RETRO_LANGUAGE_ASTURIAN',
|
||||
# '_chs': 'RETRO_LANGUAGE_CHINESE_SIMPLIFIED',
|
||||
# '_cht': 'RETRO_LANGUAGE_CHINESE_TRADITIONAL',
|
||||
# '_cs': 'RETRO_LANGUAGE_CZECH',
|
||||
# '_cy': 'RETRO_LANGUAGE_WELSH',
|
||||
# '_da': 'RETRO_LANGUAGE_DANISH',
|
||||
# '_de': 'RETRO_LANGUAGE_GERMAN',
|
||||
# '_el': 'RETRO_LANGUAGE_GREEK',
|
||||
# '_eo': 'RETRO_LANGUAGE_ESPERANTO',
|
||||
# '_es': 'RETRO_LANGUAGE_SPANISH',
|
||||
# '_fa': 'RETRO_LANGUAGE_PERSIAN',
|
||||
# '_fi': 'RETRO_LANGUAGE_FINNISH',
|
||||
# '_fr': 'RETRO_LANGUAGE_FRENCH',
|
||||
# '_gl': 'RETRO_LANGUAGE_GALICIAN',
|
||||
# '_he': 'RETRO_LANGUAGE_HEBREW',
|
||||
# '_hu': 'RETRO_LANGUAGE_HUNGARIAN',
|
||||
# '_id': 'RETRO_LANGUAGE_INDONESIAN',
|
||||
# '_it': 'RETRO_LANGUAGE_ITALIAN',
|
||||
# '_ja': 'RETRO_LANGUAGE_JAPANESE',
|
||||
# '_ko': 'RETRO_LANGUAGE_KOREAN',
|
||||
# '_nl': 'RETRO_LANGUAGE_DUTCH',
|
||||
# '_oc': 'RETRO_LANGUAGE_OCCITAN',
|
||||
# '_pl': 'RETRO_LANGUAGE_POLISH',
|
||||
# '_pt_br': 'RETRO_LANGUAGE_PORTUGUESE_BRAZIL',
|
||||
# '_pt_pt': 'RETRO_LANGUAGE_PORTUGUESE_PORTUGAL',
|
||||
# '_ru': 'RETRO_LANGUAGE_RUSSIAN',
|
||||
# '_sk': 'RETRO_LANGUAGE_SLOVAK',
|
||||
# '_sv': 'RETRO_LANGUAGE_SWEDISH',
|
||||
# '_tr': 'RETRO_LANGUAGE_TURKISH',
|
||||
# '_uk': 'RETRO_LANGUAGE_UKRAINIAN',
|
||||
# '_us': 'RETRO_LANGUAGE_ENGLISH',
|
||||
# '_vn': 'RETRO_LANGUAGE_VIETNAMESE'}
|
||||
|
||||
# these are handled by RetroArch directly - no need to include them in core translations
|
||||
ON_OFFS = {'"enabled"', '"disabled"', '"true"', '"false"', '"on"', '"off"'}
|
||||
|
||||
|
||||
def remove_special_chars(text: str, char_set=0, allow_non_ascii=False) -> str:
|
||||
"""Removes special characters from a text.
|
||||
|
||||
:param text: String to be cleaned.
|
||||
:param char_set: 0 -> remove all ASCII special chars except for '_' & 'space' (default)
|
||||
1 -> remove invalid chars from file names
|
||||
:param allow_non_ascii: False -> all non-ascii characters will be removed (default)
|
||||
True -> non-ascii characters will be passed through
|
||||
:return: Clean text.
|
||||
"""
|
||||
command_chars = [chr(unicode) for unicode in tuple(range(0, 32)) + (127,)]
|
||||
special_chars = ([chr(unicode) for unicode in tuple(range(33, 48)) + tuple(range(58, 65)) + tuple(range(91, 95))
|
||||
+ (96,) + tuple(range(123, 127))],
|
||||
('\\', '/', ':', '*', '?', '"', '<', '>', '|', '#', '%',
|
||||
'&', '{', '}', '$', '!', '¸', "'", '@', '+', '='))
|
||||
res = text if allow_non_ascii \
|
||||
else text.encode('ascii', errors='ignore').decode('unicode-escape')
|
||||
|
||||
for cm in command_chars:
|
||||
res = res.replace(cm, '_')
|
||||
for sp in special_chars[char_set]:
|
||||
res = res.replace(sp, '_')
|
||||
while res.startswith('_'):
|
||||
res = res[1:]
|
||||
while res.endswith('_'):
|
||||
res = res[:-1]
|
||||
return res
|
||||
|
||||
|
||||
def clean_file_name(file_name: str) -> str:
|
||||
"""Removes characters which might make file_name inappropriate for files on some OS.
|
||||
|
||||
:param file_name: File name to be cleaned.
|
||||
:return: The clean file name.
|
||||
"""
|
||||
file_name = remove_special_chars(file_name, 1)
|
||||
file_name = re.sub(r'__+', '_', file_name.replace(' ', '_'))
|
||||
return file_name
|
||||
|
||||
|
||||
def get_struct_type_name(decl: str) -> tuple:
|
||||
""" Returns relevant parts of the struct declaration:
|
||||
type, name of the struct and the language appendix, if present.
|
||||
:param decl: The struct declaration matched by cor.p_type_name.
|
||||
:return: Tuple, e.g.: ('retro_core_option_definition', 'option_defs_us', '_us')
|
||||
"""
|
||||
struct_match = cor.p_type_name.search(decl)
|
||||
if struct_match:
|
||||
if struct_match.group(3):
|
||||
struct_type_name = struct_match.group(1, 2, 3)
|
||||
return struct_type_name
|
||||
elif struct_match.group(4):
|
||||
struct_type_name = struct_match.group(1, 2, 4)
|
||||
return struct_type_name
|
||||
else:
|
||||
struct_type_name = struct_match.group(1, 2)
|
||||
return struct_type_name
|
||||
else:
|
||||
raise ValueError(f'No or incomplete struct declaration: {decl}!\n'
|
||||
'Please make sure all structs are complete, including the type and name declaration.')
|
||||
|
||||
|
||||
def is_viable_non_dupe(text: str, comparison) -> bool:
|
||||
"""text must be longer than 2 ('""'), not 'NULL' and not in comparison.
|
||||
|
||||
:param text: String to be tested.
|
||||
:param comparison: Dictionary or set to search for text in.
|
||||
:return: bool
|
||||
"""
|
||||
return 2 < len(text) and text != 'NULL' and text not in comparison
|
||||
|
||||
|
||||
def is_viable_value(text: str) -> bool:
|
||||
"""text must be longer than 2 ('""') and not 'NULL'.
|
||||
|
||||
:param text: String to be tested.
|
||||
:return: bool
|
||||
"""
|
||||
return 2 < len(text) and text != 'NULL'
|
||||
|
||||
|
||||
def create_non_dupe(base_name: str, opt_num: int, comparison) -> str:
|
||||
"""Makes sure base_name is not in comparison, and if it is it's renamed.
|
||||
|
||||
:param base_name: Name to check/make unique.
|
||||
:param opt_num: Number of the option base_name belongs to, used in making it unique.
|
||||
:param comparison: Dictionary or set to search for base_name in.
|
||||
:return: Unique name.
|
||||
"""
|
||||
h = base_name
|
||||
if h in comparison:
|
||||
n = 0
|
||||
h = h + '_O' + str(opt_num)
|
||||
h_end = len(h)
|
||||
while h in comparison:
|
||||
h = h[:h_end] + '_' + str(n)
|
||||
n += 1
|
||||
return h
|
||||
|
||||
|
||||
def get_texts(text: str) -> dict:
|
||||
"""Extracts the strings, which are to be translated/are the translations,
|
||||
from text and creates macro names for them.
|
||||
|
||||
:param text: The string to be parsed.
|
||||
:return: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
|
||||
"""
|
||||
# all structs: group(0) full struct, group(1) beginning, group(2) content
|
||||
structs = cor.p_struct.finditer(text)
|
||||
hash_n_string = {}
|
||||
just_string = {}
|
||||
for struct in structs:
|
||||
struct_declaration = struct.group(1)
|
||||
struct_type_name = get_struct_type_name(struct_declaration)
|
||||
if 3 > len(struct_type_name):
|
||||
lang = '_us'
|
||||
else:
|
||||
lang = struct_type_name[2]
|
||||
if lang not in just_string:
|
||||
hash_n_string[lang] = {}
|
||||
just_string[lang] = set()
|
||||
is_v2_definition = 'retro_core_option_v2_definition' == struct_type_name[0]
|
||||
pre_name = ''
|
||||
# info texts format
|
||||
p = cor.p_info
|
||||
if 'retro_core_option_v2_category' == struct_type_name[0]:
|
||||
# prepend category labels, as they can be the same as option labels
|
||||
pre_name = 'CATEGORY_'
|
||||
# categories have different info texts format
|
||||
p = cor.p_info_cat
|
||||
|
||||
struct_content = struct.group(4)
|
||||
# 0: full option; 1: key; 2: description; 3: additional info; 4: key/value pairs
|
||||
struct_options = cor.p_option.finditer(struct_content)
|
||||
for opt, option in enumerate(struct_options):
|
||||
# group 1: key
|
||||
if option.group(1):
|
||||
opt_name = pre_name + option.group(1)
|
||||
# no special chars allowed in key
|
||||
opt_name = remove_special_chars(opt_name).upper().replace(' ', '_')
|
||||
else:
|
||||
raise ValueError(f'No option name (key) found in struct {struct_type_name[1]} option {opt}!')
|
||||
|
||||
# group 2: description0
|
||||
if option.group(2):
|
||||
desc0 = option.group(2)
|
||||
if is_viable_non_dupe(desc0, just_string[lang]):
|
||||
just_string[lang].add(desc0)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL'), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = desc0
|
||||
else:
|
||||
raise ValueError(f'No label found in struct {struct_type_name[1]} option {option.group(1)}!')
|
||||
|
||||
# group 3: desc1, info0, info1, category
|
||||
if option.group(3):
|
||||
infos = option.group(3)
|
||||
option_info = p.finditer(infos)
|
||||
if is_v2_definition:
|
||||
desc1 = next(option_info).group(1)
|
||||
if is_viable_non_dupe(desc1, just_string[lang]):
|
||||
just_string[lang].add(desc1)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL_CAT'), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = desc1
|
||||
last = None
|
||||
m_h = None
|
||||
for j, info in enumerate(option_info):
|
||||
last = info.group(1)
|
||||
if is_viable_non_dupe(last, just_string[lang]):
|
||||
just_string[lang].add(last)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
|
||||
hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = last
|
||||
if last in just_string[lang]: # category key should not be translated
|
||||
hash_n_string[lang].pop(m_h)
|
||||
just_string[lang].remove(last)
|
||||
else:
|
||||
for j, info in enumerate(option_info):
|
||||
gr1 = info.group(1)
|
||||
if is_viable_non_dupe(gr1, just_string[lang]):
|
||||
just_string[lang].add(gr1)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
|
||||
hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = gr1
|
||||
else:
|
||||
raise ValueError(f'Too few arguments in struct {struct_type_name[1]} option {option.group(1)}!')
|
||||
|
||||
# group 4: key/value pairs
|
||||
if option.group(4):
|
||||
for j, kv_set in enumerate(cor.p_key_value.finditer(option.group(4))):
|
||||
set_key, set_value = kv_set.group(1, 2)
|
||||
if not is_viable_value(set_value):
|
||||
# use the key if value not available
|
||||
set_value = set_key
|
||||
if not is_viable_value(set_value):
|
||||
continue
|
||||
# re.fullmatch(r'(?:[+-][0-9]+)+', value[1:-1])
|
||||
|
||||
# add only if non-dupe, not translated by RetroArch directly & not purely numeric
|
||||
if set_value not in just_string[lang]\
|
||||
and set_value.lower() not in ON_OFFS\
|
||||
and not re.sub(r'[+-]', '', set_value[1:-1]).isdigit():
|
||||
clean_key = set_key[1:-1]
|
||||
clean_key = remove_special_chars(clean_key).upper().replace(' ', '_')
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f"OPTION_VAL_{clean_key}"), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = set_value
|
||||
just_string[lang].add(set_value)
|
||||
return hash_n_string
|
||||
|
||||
|
||||
def create_msg_hash(intl_dir_path: str, core_name: str, keyword_string_dict: dict) -> dict:
|
||||
"""Creates '<core_name>.h' files in 'intl/_<lang>/' containing the macro name & string combinations.
|
||||
|
||||
:param intl_dir_path: Path to the intl directory.
|
||||
:param core_name: Name of the core, used for the files' paths.
|
||||
:param keyword_string_dict: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
|
||||
:return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
|
||||
"""
|
||||
files = {}
|
||||
for localisation in keyword_string_dict:
|
||||
path = os.path.join(intl_dir_path, core_name) # intl/<core_name>/
|
||||
files[localisation] = os.path.join(path, localisation + '.h') # intl/<core_name>/_<lang>.h
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
with open(files[localisation], 'w', encoding='utf-8') as crowdin_file:
|
||||
out_text = ''
|
||||
for keyword in keyword_string_dict[localisation]:
|
||||
out_text = f'{out_text}{keyword} {keyword_string_dict[localisation][keyword]}\n'
|
||||
crowdin_file.write(out_text)
|
||||
return files
|
||||
|
||||
|
||||
def h2json(file_paths: dict) -> dict:
|
||||
"""Converts .h files pointed to by file_paths into .jsons.
|
||||
|
||||
:param file_paths: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
|
||||
:return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.json)', ... }.
|
||||
"""
|
||||
jsons = {}
|
||||
for file_lang in file_paths:
|
||||
if not os.path.isfile(file_paths[file_lang]):
|
||||
continue
|
||||
file_path = file_paths[file_lang]
|
||||
try:
|
||||
jsons[file_lang] = file_path[:file_path.rindex('.')] + '.json'
|
||||
except ValueError:
|
||||
print(f"File {file_path} has incorrect format! File ending missing?")
|
||||
continue
|
||||
|
||||
p = cor.p_masked
|
||||
|
||||
with open(file_paths[file_lang], 'r+', encoding='utf-8') as h_file:
|
||||
text = h_file.read()
|
||||
result = p.finditer(text)
|
||||
messages = {}
|
||||
for msg in result:
|
||||
key, val = msg.group(1, 2)
|
||||
if key not in messages:
|
||||
if key and val:
|
||||
# unescape & remove "\n"
|
||||
messages[key] = re.sub(r'"\s*(?:(?:/\*(?:.|[\r\n])*?\*/|//.*[\r\n]+)\s*)*"',
|
||||
'\\\n', val[1:-1].replace('\\\"', '"'))
|
||||
else:
|
||||
print(f"DUPLICATE KEY in {file_paths[file_lang]}: {key}")
|
||||
with open(jsons[file_lang], 'w', encoding='utf-8') as json_file:
|
||||
json.dump(messages, json_file, indent=2)
|
||||
|
||||
return jsons
|
||||
|
||||
|
||||
def json2h(intl_dir_path: str, file_list) -> None:
|
||||
"""Converts .json file in json_file_path into an .h ready to be included in C code.
|
||||
|
||||
:param intl_dir_path: Path to the intl/<core_name> directory.
|
||||
:param file_list: Iterator of os.DirEntry objects. Contains localisation files to convert.
|
||||
:return: None
|
||||
"""
|
||||
|
||||
p = cor.p_masked
|
||||
|
||||
def update(s_messages, s_template, s_source_messages, file_name):
|
||||
translation = ''
|
||||
template_messages = p.finditer(s_template)
|
||||
for tp_msg in template_messages:
|
||||
old_key = tp_msg.group(1)
|
||||
if old_key in s_messages and s_messages[old_key] != s_source_messages[old_key]:
|
||||
tl_msg_val = s_messages[old_key]
|
||||
tl_msg_val = tl_msg_val.replace('"', '\\\"').replace('\n', '') # escape
|
||||
translation = ''.join((translation, '#define ', old_key, file_name.upper(), f' "{tl_msg_val}"\n'))
|
||||
|
||||
else: # Remove English duplicates and non-translatable strings
|
||||
translation = ''.join((translation, '#define ', old_key, file_name.upper(), ' NULL\n'))
|
||||
return translation
|
||||
|
||||
us_h = os.path.join(intl_dir_path, '_us.h')
|
||||
us_json = os.path.join(intl_dir_path, '_us.json')
|
||||
|
||||
with open(us_h, 'r', encoding='utf-8') as template_file:
|
||||
template = template_file.read()
|
||||
with open(us_json, 'r+', encoding='utf-8') as source_json_file:
|
||||
source_messages = json.load(source_json_file)
|
||||
|
||||
for file in file_list:
|
||||
if file.name.lower().startswith('_us') \
|
||||
or file.name.lower().endswith('.h') \
|
||||
or file.is_dir():
|
||||
continue
|
||||
|
||||
with open(file.path, 'r+', encoding='utf-8') as json_file:
|
||||
messages = json.load(json_file)
|
||||
new_translation = update(messages, template, source_messages, os.path.splitext(file.name)[0])
|
||||
with open(os.path.splitext(file.path)[0] + '.h', 'w', encoding='utf-8') as h_file:
|
||||
h_file.seek(0)
|
||||
h_file.write(new_translation)
|
||||
h_file.truncate()
|
||||
return
|
||||
|
||||
|
||||
def get_crowdin_client(dir_path: str) -> str:
|
||||
"""Makes sure the Crowdin CLI client is present. If it isn't, it is fetched & extracted.
|
||||
|
||||
:return: The path to 'crowdin-cli.jar'.
|
||||
"""
|
||||
jar_name = 'crowdin-cli.jar'
|
||||
jar_path = os.path.join(dir_path, jar_name)
|
||||
|
||||
if not os.path.isfile(jar_path):
|
||||
print('Downloading crowdin-cli.jar')
|
||||
crowdin_cli_file = os.path.join(dir_path, 'crowdin-cli.zip')
|
||||
crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/crowdin-cli.zip'
|
||||
req.urlretrieve(crowdin_cli_url, crowdin_cli_file)
|
||||
import zipfile
|
||||
with zipfile.ZipFile(crowdin_cli_file, 'r') as zip_ref:
|
||||
jar_dir = zip_ref.namelist()[0]
|
||||
for file in zip_ref.namelist():
|
||||
if file.endswith(jar_name):
|
||||
jar_file = file
|
||||
break
|
||||
zip_ref.extract(jar_file)
|
||||
os.rename(jar_file, jar_path)
|
||||
os.remove(crowdin_cli_file)
|
||||
shutil.rmtree(jar_dir)
|
||||
return jar_path
|
||||
|
||||
|
||||
def create_intl_file(intl_file_path: str, localisations_path: str, text: str, file_path: str) -> None:
|
||||
"""Creates 'libretro_core_options_intl.h' from Crowdin translations.
|
||||
|
||||
:param intl_file_path: Path to 'libretro_core_options_intl.h'
|
||||
:param localisations_path: Path to the intl/<core_name> directory.
|
||||
:param text: Content of the 'libretro_core_options.h' being translated.
|
||||
:param file_path: Path to the '_us.h' file, containing the original English texts.
|
||||
:return: None
|
||||
"""
|
||||
msg_dict = {}
|
||||
lang_up = ''
|
||||
|
||||
def replace_pair(pair_match):
|
||||
"""Replaces a key-value-pair of an option with the macros corresponding to the language.
|
||||
|
||||
:param pair_match: The re match object representing the key-value-pair block.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
offset = pair_match.start(0)
|
||||
if pair_match.group(1): # key
|
||||
if pair_match.group(2) in msg_dict: # value
|
||||
val = msg_dict[pair_match.group(2)] + lang_up
|
||||
elif pair_match.group(1) in msg_dict: # use key if value not viable (e.g. NULL)
|
||||
val = msg_dict[pair_match.group(1)] + lang_up
|
||||
else:
|
||||
return pair_match.group(0)
|
||||
else:
|
||||
return pair_match.group(0)
|
||||
res = pair_match.group(0)[:pair_match.start(2) - offset] + val \
|
||||
+ pair_match.group(0)[pair_match.end(2) - offset:]
|
||||
return res
|
||||
|
||||
def replace_info(info_match):
|
||||
"""Replaces the 'additional strings' of an option with the macros corresponding to the language.
|
||||
|
||||
:param info_match: The re match object representing the 'additional strings' block.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
offset = info_match.start(0)
|
||||
if info_match.group(1) in msg_dict:
|
||||
res = info_match.group(0)[:info_match.start(1) - offset] + \
|
||||
msg_dict[info_match.group(1)] + lang_up + \
|
||||
info_match.group(0)[info_match.end(1) - offset:]
|
||||
return res
|
||||
else:
|
||||
return info_match.group(0)
|
||||
|
||||
def replace_option(option_match):
|
||||
"""Replaces strings within an option
|
||||
'{ "opt_key", "label", "additional strings", ..., { {"key", "value"}, ... }, ... }'
|
||||
within a struct with the macros corresponding to the language:
|
||||
'{ "opt_key", MACRO_LABEL, MACRO_STRINGS, ..., { {"key", MACRO_VALUE}, ... }, ... }'
|
||||
|
||||
:param option_match: The re match object representing the option.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
# label
|
||||
offset = option_match.start(0)
|
||||
if option_match.group(2):
|
||||
res = option_match.group(0)[:option_match.start(2) - offset] + msg_dict[option_match.group(2)] + lang_up
|
||||
else:
|
||||
return option_match.group(0)
|
||||
# additional block
|
||||
if option_match.group(3):
|
||||
res = res + option_match.group(0)[option_match.end(2) - offset:option_match.start(3) - offset]
|
||||
new_info = p.sub(replace_info, option_match.group(3))
|
||||
res = res + new_info
|
||||
else:
|
||||
return res + option_match.group(0)[option_match.end(2) - offset:]
|
||||
# key-value-pairs
|
||||
if option_match.group(4):
|
||||
res = res + option_match.group(0)[option_match.end(3) - offset:option_match.start(4) - offset]
|
||||
new_pairs = cor.p_key_value.sub(replace_pair, option_match.group(4))
|
||||
res = res + new_pairs + option_match.group(0)[option_match.end(4) - offset:]
|
||||
else:
|
||||
res = res + option_match.group(0)[option_match.end(3) - offset:]
|
||||
|
||||
return res
|
||||
|
||||
# ------------------------------------------------------------------------------------
|
||||
|
||||
with open(file_path, 'r+', encoding='utf-8') as template: # intl/<core_name>/_us.h
|
||||
masked_msgs = cor.p_masked.finditer(template.read())
|
||||
|
||||
for msg in masked_msgs:
|
||||
msg_dict[msg.group(2)] = msg.group(1)
|
||||
|
||||
# top of the file - in case there is no file to copy it from
|
||||
out_txt = "#ifndef LIBRETRO_CORE_OPTIONS_INTL_H__\n" \
|
||||
"#define LIBRETRO_CORE_OPTIONS_INTL_H__\n\n" \
|
||||
"#if defined(_MSC_VER) && (_MSC_VER >= 1500 && _MSC_VER < 1900)\n" \
|
||||
"/* https://support.microsoft.com/en-us/kb/980263 */\n" \
|
||||
'#pragma execution_character_set("utf-8")\n' \
|
||||
"#pragma warning(disable:4566)\n" \
|
||||
"#endif\n\n" \
|
||||
"#include <libretro.h>\n\n" \
|
||||
'#ifdef __cplusplus\n' \
|
||||
'extern "C" {\n' \
|
||||
'#endif\n'
|
||||
|
||||
if os.path.isfile(intl_file_path):
|
||||
# copy top of the file for re-use
|
||||
with open(intl_file_path, 'r', encoding='utf-8') as intl: # libretro_core_options_intl.h
|
||||
in_text = intl.read()
|
||||
# attempt 1: find the distinct comment header
|
||||
intl_start = re.search(re.escape('/*\n'
|
||||
' ********************************\n'
|
||||
' * Core Option Definitions\n'
|
||||
' ********************************\n'
|
||||
'*/\n'), in_text)
|
||||
if intl_start:
|
||||
out_txt = in_text[:intl_start.end(0)]
|
||||
else:
|
||||
# attempt 2: if no comment header present, find c++ compiler instruction (it is kind of a must)
|
||||
intl_start = re.search(re.escape('#ifdef __cplusplus\n'
|
||||
'extern "C" {\n'
|
||||
'#endif\n'), in_text)
|
||||
if intl_start:
|
||||
out_txt = in_text[:intl_start.end(0)]
|
||||
# if all attempts fail, use default from above
|
||||
|
||||
# only write to file, if there is anything worthwhile to write!
|
||||
overwrite = False
|
||||
|
||||
# iterate through localisation files
|
||||
files = {}
|
||||
for file in os.scandir(localisations_path):
|
||||
files[file.name] = {'is_file': file.is_file(), 'path': file.path}
|
||||
|
||||
for file in sorted(files): # intl/<core_name>/_*
|
||||
if files[file]['is_file'] \
|
||||
and file.startswith('_') \
|
||||
and file.endswith('.h') \
|
||||
and not file.startswith('_us'):
|
||||
translation_path = files[file]['path'] # <core_name>_<lang>.h
|
||||
# all structs: group(0) full struct, group(1) beginning, group(2) content
|
||||
struct_groups = cor.p_struct.finditer(text)
|
||||
lang_low = os.path.splitext(file)[0].lower()
|
||||
lang_up = lang_low.upper()
|
||||
# mark each language's section with a comment, for readability
|
||||
out_txt = out_txt + f'/* RETRO_LANGUAGE{lang_up} */\n\n' # /* RETRO_LANGUAGE_NM */
|
||||
|
||||
# copy adjusted translations (makros)
|
||||
with open(translation_path, 'r+', encoding='utf-8') as f_in: # <core name>.h
|
||||
out_txt = out_txt + f_in.read() + '\n'
|
||||
# replace English texts with makros
|
||||
for construct in struct_groups:
|
||||
declaration = construct.group(1)
|
||||
struct_type_name = get_struct_type_name(declaration)
|
||||
if 3 > len(struct_type_name): # no language specifier
|
||||
new_decl = re.sub(re.escape(struct_type_name[1]), struct_type_name[1] + lang_low, declaration)
|
||||
else:
|
||||
if '_us' != struct_type_name[2]:
|
||||
# only use _us constructs - other languages present in the source file are not important
|
||||
continue
|
||||
new_decl = re.sub(re.escape(struct_type_name[2]), lang_low, declaration)
|
||||
|
||||
p = (cor.p_info_cat if 'retro_core_option_v2_category' == struct_type_name[0] else cor.p_info)
|
||||
offset_construct = construct.start(0)
|
||||
# append localised construct name and ' = {'
|
||||
start = construct.end(1) - offset_construct
|
||||
end = construct.start(4) - offset_construct
|
||||
out_txt = out_txt + new_decl + construct.group(0)[start:end]
|
||||
# insert macros
|
||||
content = construct.group(4)
|
||||
new_content = cor.p_option.sub(replace_option, content)
|
||||
start = construct.end(4) - offset_construct
|
||||
# append macro-filled content and close the construct
|
||||
out_txt = out_txt + new_content + construct.group(0)[start:] + '\n'
|
||||
|
||||
# for v2
|
||||
if 'retro_core_option_v2_definition' == struct_type_name[0]:
|
||||
out_txt = out_txt + f'struct retro_core_options_v2 options{lang_low}' \
|
||||
' = {\n' \
|
||||
f' option_cats{lang_low},\n' \
|
||||
f' option_defs{lang_low}\n' \
|
||||
'};\n\n'
|
||||
# if it got this far, we've got something to write
|
||||
overwrite = True
|
||||
|
||||
# only write to file, if there is anything worthwhile to write!
|
||||
if overwrite:
|
||||
with open(intl_file_path, 'w', encoding='utf-8') as intl:
|
||||
intl.write(out_txt + '\n#ifdef __cplusplus\n'
|
||||
'}\n#endif\n'
|
||||
'\n#endif')
|
||||
return
|
||||
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
if os.path.isfile(sys.argv[1]) or sys.argv[1].endswith('.h'):
|
||||
_temp = os.path.dirname(sys.argv[1])
|
||||
else:
|
||||
_temp = sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
CORE_NAME = clean_file_name(sys.argv[2])
|
||||
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
H_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
INTL_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options_intl.h')
|
||||
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = get_texts(_main_text)
|
||||
_files = create_msg_hash(DIR_PATH, CORE_NAME, _hash_n_str)
|
||||
_source_jsons = h2json(_files)
|
||||
|
||||
print('Getting texts from libretro_core_options_intl.h')
|
||||
if os.path.isfile(INTL_FILE_PATH):
|
||||
with open(INTL_FILE_PATH, 'r+', encoding='utf-8') as _intl_file:
|
||||
_intl_text = _intl_file.read()
|
||||
_hash_n_str_intl = get_texts(_intl_text)
|
||||
_intl_files = create_msg_hash(DIR_PATH, CORE_NAME, _hash_n_str_intl)
|
||||
_intl_jsons = h2json(_intl_files)
|
||||
|
||||
print('\nAll done!')
|
|
@ -0,0 +1,13 @@
|
|||
"project_id": "380544"
|
||||
"api_token": "_secret_"
|
||||
"base_url": "https://api.crowdin.com"
|
||||
"preserve_hierarchy": true
|
||||
|
||||
"files":
|
||||
[
|
||||
{
|
||||
"source": "/_core_name_/_us.json",
|
||||
"dest": "/_core_name_/_core_name_.json",
|
||||
"translation": "/_core_name_/_%two_letters_code%.json",
|
||||
},
|
||||
]
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import core_option_translation as t
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
if t.os.path.isfile(t.sys.argv[1]) or t.sys.argv[1].endswith('.h'):
|
||||
_temp = t.os.path.dirname(t.sys.argv[1])
|
||||
else:
|
||||
_temp = t.sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = t.os.path.dirname(t.os.path.dirname(t.os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
CORE_NAME = t.clean_file_name(t.sys.argv[2])
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
H_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = t.get_texts(_main_text)
|
||||
_files = t.create_msg_hash(DIR_PATH, CORE_NAME, _hash_n_str)
|
||||
|
||||
_source_jsons = t.h2json(_files)
|
||||
|
||||
print('\nAll done!')
|
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.request
|
||||
import zipfile
|
||||
import core_option_translation as t
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check Crowdin API Token and core name
|
||||
try:
|
||||
API_KEY = sys.argv[1]
|
||||
CORE_NAME = t.clean_file_name(sys.argv[2])
|
||||
except IndexError as e:
|
||||
print('Please provide Crowdin API Token and core name!')
|
||||
raise e
|
||||
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
YAML_PATH = t.os.path.join(DIR_PATH, 'crowdin.yaml')
|
||||
|
||||
# Apply Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": "_secret_"',
|
||||
f'"api_token": "{API_KEY}"',
|
||||
crowdin_config, 1)
|
||||
crowdin_config = re.sub(r'/_core_name_',
|
||||
f'/{CORE_NAME}'
|
||||
, crowdin_config)
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
try:
|
||||
# Download Crowdin CLI
|
||||
jar_name = 'crowdin-cli.jar'
|
||||
jar_path = t.os.path.join(DIR_PATH, jar_name)
|
||||
crowdin_cli_file = 'crowdin-cli.zip'
|
||||
crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/' + crowdin_cli_file
|
||||
crowdin_cli_path = t.os.path.join(DIR_PATH, crowdin_cli_file)
|
||||
|
||||
if not os.path.isfile(t.os.path.join(DIR_PATH, jar_name)):
|
||||
print('download crowdin-cli.jar')
|
||||
urllib.request.urlretrieve(crowdin_cli_url, crowdin_cli_path)
|
||||
with zipfile.ZipFile(crowdin_cli_path, 'r') as zip_ref:
|
||||
jar_dir = t.os.path.join(DIR_PATH, zip_ref.namelist()[0])
|
||||
for file in zip_ref.namelist():
|
||||
if file.endswith(jar_name):
|
||||
jar_file = file
|
||||
break
|
||||
zip_ref.extract(jar_file, path=DIR_PATH)
|
||||
os.rename(t.os.path.join(DIR_PATH, jar_file), jar_path)
|
||||
os.remove(crowdin_cli_path)
|
||||
shutil.rmtree(jar_dir)
|
||||
|
||||
print('upload source *.json')
|
||||
subprocess.run(['java', '-jar', jar_path, 'upload', 'sources', '--config', YAML_PATH])
|
||||
|
||||
# Reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*?"',
|
||||
'"api_token": "_secret_"',
|
||||
crowdin_config, 1)
|
||||
|
||||
# TODO this is NOT safe!
|
||||
crowdin_config = re.sub(re.escape(f'/{CORE_NAME}'),
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
except Exception as e:
|
||||
# Try really hard to reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*?"',
|
||||
'"api_token": "_secret_"',
|
||||
crowdin_config, 1)
|
||||
|
||||
# TODO this is NOT safe!
|
||||
crowdin_config = re.sub(re.escape(f'/{CORE_NAME}'),
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
raise e
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import core_option_translation as t
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
if t.os.path.isfile(t.sys.argv[1]) or t.sys.argv[1].endswith('.h'):
|
||||
_temp = t.os.path.dirname(t.sys.argv[1])
|
||||
else:
|
||||
_temp = t.sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = t.os.path.dirname(t.os.path.dirname(t.os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
CORE_NAME = t.clean_file_name(t.sys.argv[2])
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
LOCALISATIONS_PATH = t.os.path.join(DIR_PATH, CORE_NAME)
|
||||
US_FILE_PATH = t.os.path.join(LOCALISATIONS_PATH, '_us.h')
|
||||
H_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
INTL_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options_intl.h')
|
||||
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = t.get_texts(_main_text)
|
||||
_files = t.create_msg_hash(DIR_PATH, CORE_NAME, _hash_n_str)
|
||||
_source_jsons = t.h2json(_files)
|
||||
|
||||
print('Converting translations *.json to *.h:')
|
||||
localisation_files = t.os.scandir(LOCALISATIONS_PATH)
|
||||
t.json2h(LOCALISATIONS_PATH, localisation_files)
|
||||
|
||||
print('Constructing libretro_core_options_intl.h')
|
||||
t.create_intl_file(INTL_FILE_PATH, LOCALISATIONS_PATH, _main_text, _files["_us"])
|
||||
|
||||
print('\nAll done!')
|
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.request
|
||||
import zipfile
|
||||
import core_option_translation as t
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check Crowdin API Token and core name
|
||||
try:
|
||||
API_KEY = sys.argv[1]
|
||||
CORE_NAME = t.clean_file_name(sys.argv[2])
|
||||
except IndexError as e:
|
||||
print('Please provide Crowdin API Token and core name!')
|
||||
raise e
|
||||
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
YAML_PATH = t.os.path.join(DIR_PATH, 'crowdin.yaml')
|
||||
|
||||
# Apply Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": "_secret_"',
|
||||
f'"api_token": "{API_KEY}"',
|
||||
crowdin_config, 1)
|
||||
crowdin_config = re.sub(r'/_core_name_',
|
||||
f'/{CORE_NAME}'
|
||||
, crowdin_config)
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
try:
|
||||
# Download Crowdin CLI
|
||||
jar_name = 'crowdin-cli.jar'
|
||||
jar_path = t.os.path.join(DIR_PATH, jar_name)
|
||||
crowdin_cli_file = 'crowdin-cli.zip'
|
||||
crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/' + crowdin_cli_file
|
||||
crowdin_cli_path = t.os.path.join(DIR_PATH, crowdin_cli_file)
|
||||
|
||||
if not os.path.isfile(t.os.path.join(DIR_PATH, jar_name)):
|
||||
print('download crowdin-cli.jar')
|
||||
urllib.request.urlretrieve(crowdin_cli_url, crowdin_cli_path)
|
||||
with zipfile.ZipFile(crowdin_cli_path, 'r') as zip_ref:
|
||||
jar_dir = t.os.path.join(DIR_PATH, zip_ref.namelist()[0])
|
||||
for file in zip_ref.namelist():
|
||||
if file.endswith(jar_name):
|
||||
jar_file = file
|
||||
break
|
||||
zip_ref.extract(jar_file, path=DIR_PATH)
|
||||
os.rename(t.os.path.join(DIR_PATH, jar_file), jar_path)
|
||||
os.remove(crowdin_cli_path)
|
||||
shutil.rmtree(jar_dir)
|
||||
|
||||
print('download translation *.json')
|
||||
subprocess.run(['java', '-jar', jar_path, 'download', '--config', YAML_PATH])
|
||||
|
||||
# Reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*?"',
|
||||
'"api_token": "_secret_"',
|
||||
crowdin_config, 1)
|
||||
|
||||
# TODO this is NOT safe!
|
||||
crowdin_config = re.sub(re.escape(f'/{CORE_NAME}'),
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
except Exception as e:
|
||||
# Try really hard to reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*?"',
|
||||
'"api_token": "_secret_"',
|
||||
crowdin_config, 1)
|
||||
|
||||
# TODO this is NOT safe!
|
||||
crowdin_config = re.sub(re.escape(f'/{CORE_NAME}'),
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
raise e
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
api_key = sys.argv[1]
|
||||
core_name = sys.argv[2]
|
||||
dir_path = sys.argv[3]
|
||||
except IndexError as e:
|
||||
print('Please provide path to libretro_core_options.h, Crowdin API Token and core name!')
|
||||
raise e
|
||||
|
||||
subprocess.run(['python3', 'intl/crowdin_prep.py', dir_path, core_name])
|
||||
subprocess.run(['python3', 'intl/crowdin_translation_download.py', api_key, core_name])
|
||||
subprocess.run(['python3', 'intl/crowdin_translate.py', dir_path, core_name])
|
|
@ -0,0 +1,102 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
import zipfile
|
||||
import core_option_translation as t
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check Crowdin API Token and core name
|
||||
try:
|
||||
API_KEY = sys.argv[1]
|
||||
CORE_NAME = t.clean_file_name(sys.argv[2])
|
||||
OPTIONS_PATH = t.clean_file_name(sys.argv[3])
|
||||
except IndexError as e:
|
||||
print('Please provide Crowdin API Token, core name and path to the core options file!')
|
||||
raise e
|
||||
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
YAML_PATH = os.path.join(DIR_PATH, 'crowdin.yaml')
|
||||
|
||||
# Apply Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": "_secret_"',
|
||||
f'"api_token": "{API_KEY}"',
|
||||
crowdin_config, 1)
|
||||
crowdin_config = re.sub(r'/_core_name_(?=[/.])]',
|
||||
f'/{CORE_NAME}'
|
||||
, crowdin_config)
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
try:
|
||||
jar_name = 'crowdin-cli.jar'
|
||||
jar_path = os.path.join(DIR_PATH, jar_name)
|
||||
crowdin_cli_file = 'crowdin-cli.zip'
|
||||
crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/' + crowdin_cli_file
|
||||
crowdin_cli_path = os.path.join(DIR_PATH, crowdin_cli_file)
|
||||
|
||||
# Download Crowdin CLI
|
||||
if not os.path.isfile(os.path.join(DIR_PATH, jar_name)):
|
||||
print('download crowdin-cli.jar')
|
||||
urllib.request.urlretrieve(crowdin_cli_url, crowdin_cli_path)
|
||||
with zipfile.ZipFile(crowdin_cli_path, 'r') as zip_ref:
|
||||
jar_dir = os.path.join(DIR_PATH, zip_ref.namelist()[0])
|
||||
for file in zip_ref.namelist():
|
||||
if file.endswith(jar_name):
|
||||
jar_file = file
|
||||
break
|
||||
zip_ref.extract(jar_file, path=DIR_PATH)
|
||||
os.rename(os.path.join(DIR_PATH, jar_file), jar_path)
|
||||
os.remove(crowdin_cli_path)
|
||||
shutil.rmtree(jar_dir)
|
||||
|
||||
# Create JSON data
|
||||
subprocess.run(['python3', 'intl/core_option_translation.py', OPTIONS_PATH, CORE_NAME])
|
||||
print('upload source & translations *.json')
|
||||
subprocess.run(['java', '-jar', jar_path, 'upload', 'sources', '--config', YAML_PATH])
|
||||
subprocess.run(['java', '-jar', jar_path, 'upload', 'translations', '--config', YAML_PATH])
|
||||
|
||||
print('wait for crowdin server to process data')
|
||||
time.sleep(10)
|
||||
|
||||
print('download translation *.json')
|
||||
subprocess.run(['java', '-jar', jar_path, 'download', '--config', YAML_PATH])
|
||||
|
||||
# Reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*"', '"api_token": "_secret_"', crowdin_config, 1)
|
||||
|
||||
# TODO This is technically not safe and could replace more than intended.
|
||||
crowdin_config = re.sub(r'/' + re.escape(CORE_NAME) + r'(?=[/.])',
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
|
||||
except Exception as e:
|
||||
# Try really hard to reset Crowdin API Key
|
||||
with open(YAML_PATH, 'r') as crowdin_config_file:
|
||||
crowdin_config = crowdin_config_file.read()
|
||||
crowdin_config = re.sub(r'"api_token": ".*?"',
|
||||
'"api_token": "_secret_"',
|
||||
crowdin_config, 1)
|
||||
|
||||
# TODO This is technically not safe and could replace more than intended.
|
||||
crowdin_config = re.sub(r'/' + re.escape(CORE_NAME) + r'(?=[/.])',
|
||||
'/_core_name_',
|
||||
crowdin_config)
|
||||
|
||||
with open(YAML_PATH, 'w') as crowdin_config_file:
|
||||
crowdin_config_file.write(crowdin_config)
|
||||
raise e
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
with open('intl/upload_workflow.py', 'r') as workflow:
|
||||
workflow_config = workflow.read()
|
||||
|
||||
workflow_config = workflow_config.replace(
|
||||
"subprocess.run(['python3', 'intl/core_option_translation.py', dir_path, core_name])",
|
||||
"subprocess.run(['python3', 'intl/crowdin_prep.py', dir_path, core_name])"
|
||||
)
|
||||
workflow_config = workflow_config.replace(
|
||||
"subprocess.run(['python3', 'intl/initial_sync.py', api_key, core_name])",
|
||||
"subprocess.run(['python3', 'intl/crowdin_source_upload.py', api_key, core_name])"
|
||||
)
|
||||
with open('intl/upload_workflow.py', 'w') as workflow:
|
||||
workflow.write(workflow_config)
|
||||
|
||||
|
||||
with open('intl/download_workflow.py', 'r') as workflow:
|
||||
workflow_config = workflow.read()
|
||||
|
||||
workflow_config = workflow_config.replace(
|
||||
"subprocess.run(['python3', 'intl/core_option_translation.py', dir_path, core_name])",
|
||||
"subprocess.run(['python3', 'intl/crowdin_prep.py', dir_path, core_name])"
|
||||
)
|
||||
workflow_config = workflow_config.replace(
|
||||
"subprocess.run(['python3', 'intl/initial_sync.py', api_key, core_name])",
|
||||
"subprocess.run(['python3', 'intl/crowdin_translation_download.py', api_key, core_name])"
|
||||
)
|
||||
with open('intl/download_workflow.py', 'w') as workflow:
|
||||
workflow.write(workflow_config)
|
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
api_key = sys.argv[1]
|
||||
core_name = sys.argv[2]
|
||||
dir_path = sys.argv[3]
|
||||
except IndexError as e:
|
||||
print('Please provide path to libretro_core_options.h, Crowdin API Token and core name!')
|
||||
raise e
|
||||
|
||||
subprocess.run(['python3', 'intl/crowdin_prep.py', dir_path, core_name])
|
||||
subprocess.run(['python3', 'intl/crowdin_source_upload.py', api_key, core_name])
|
|
@ -0,0 +1,483 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""Core options v1 to v2 converter
|
||||
|
||||
Just run this script as follows, to convert 'libretro_core_options.h' & 'Libretro_coreoptions_intl.h' to v2:
|
||||
python3 "/path/to/v1_to_v2_converter.py" "/path/to/where/libretro_core_options.h & Libretro_coreoptions_intl.h/are"
|
||||
|
||||
The original files will be preserved as *.v1
|
||||
"""
|
||||
import core_option_regex as cor
|
||||
import os
|
||||
import glob
|
||||
|
||||
|
||||
def create_v2_code_file(struct_text, file_name):
|
||||
def replace_option(option_match):
|
||||
_offset = option_match.start(0)
|
||||
|
||||
if option_match.group(3):
|
||||
res = option_match.group(0)[:option_match.end(2) - _offset] + ',\n NULL' + \
|
||||
option_match.group(0)[option_match.end(2) - _offset:option_match.end(3) - _offset] + \
|
||||
'NULL,\n NULL,\n ' + option_match.group(0)[option_match.end(3) - _offset:]
|
||||
else:
|
||||
return option_match.group(0)
|
||||
|
||||
return res
|
||||
|
||||
comment_v1 = '/*\n' \
|
||||
' ********************************\n' \
|
||||
' * VERSION: 1.3\n' \
|
||||
' ********************************\n' \
|
||||
' *\n' \
|
||||
' * - 1.3: Move translations to libretro_core_options_intl.h\n' \
|
||||
' * - libretro_core_options_intl.h includes BOM and utf-8\n' \
|
||||
' * fix for MSVC 2010-2013\n' \
|
||||
' * - Added HAVE_NO_LANGEXTRA flag to disable translations\n' \
|
||||
' * on platforms/compilers without BOM support\n' \
|
||||
' * - 1.2: Use core options v1 interface when\n' \
|
||||
' * RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION is >= 1\n' \
|
||||
' * (previously required RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION == 1)\n' \
|
||||
' * - 1.1: Support generation of core options v0 retro_core_option_value\n' \
|
||||
' * arrays containing options with a single value\n' \
|
||||
' * - 1.0: First commit\n' \
|
||||
'*/\n'
|
||||
|
||||
comment_v2 = '/*\n' \
|
||||
' ********************************\n' \
|
||||
' * VERSION: 2.0\n' \
|
||||
' ********************************\n' \
|
||||
' *\n' \
|
||||
' * - 2.0: Add support for core options v2 interface\n' \
|
||||
' * - 1.3: Move translations to libretro_core_options_intl.h\n' \
|
||||
' * - libretro_core_options_intl.h includes BOM and utf-8\n' \
|
||||
' * fix for MSVC 2010-2013\n' \
|
||||
' * - Added HAVE_NO_LANGEXTRA flag to disable translations\n' \
|
||||
' * on platforms/compilers without BOM support\n' \
|
||||
' * - 1.2: Use core options v1 interface when\n' \
|
||||
' * RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION is >= 1\n' \
|
||||
' * (previously required RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION == 1)\n' \
|
||||
' * - 1.1: Support generation of core options v0 retro_core_option_value\n' \
|
||||
' * arrays containing options with a single value\n' \
|
||||
' * - 1.0: First commit\n' \
|
||||
'*/\n'
|
||||
|
||||
p_intl = cor.p_intl
|
||||
p_set = cor.p_set
|
||||
new_set = 'static INLINE void libretro_set_core_options(retro_environment_t environ_cb,\n' \
|
||||
' bool *categories_supported)\n' \
|
||||
'{\n' \
|
||||
' unsigned version = 0;\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' unsigned language = 0;\n' \
|
||||
'#endif\n' \
|
||||
'\n' \
|
||||
' if (!environ_cb || !categories_supported)\n' \
|
||||
' return;\n' \
|
||||
'\n' \
|
||||
' *categories_supported = false;\n' \
|
||||
'\n' \
|
||||
' if (!environ_cb(RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION, &version))\n' \
|
||||
' version = 0;\n' \
|
||||
'\n' \
|
||||
' if (version >= 2)\n' \
|
||||
' {\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' struct retro_core_options_v2_intl core_options_intl;\n' \
|
||||
'\n' \
|
||||
' core_options_intl.us = &options_us;\n' \
|
||||
' core_options_intl.local = NULL;\n' \
|
||||
'\n' \
|
||||
' if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&\n' \
|
||||
' (language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH))\n' \
|
||||
' core_options_intl.local = options_intl[language];\n' \
|
||||
'\n' \
|
||||
' *categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2_INTL,\n' \
|
||||
' &core_options_intl);\n' \
|
||||
'#else\n' \
|
||||
' *categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2,\n' \
|
||||
' &options_us);\n' \
|
||||
'#endif\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' {\n' \
|
||||
' size_t i, j;\n' \
|
||||
' size_t option_index = 0;\n' \
|
||||
' size_t num_options = 0;\n' \
|
||||
' struct retro_core_option_definition\n' \
|
||||
' *option_v1_defs_us = NULL;\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' size_t num_options_intl = 0;\n' \
|
||||
' struct retro_core_option_v2_definition\n' \
|
||||
' *option_defs_intl = NULL;\n' \
|
||||
' struct retro_core_option_definition\n' \
|
||||
' *option_v1_defs_intl = NULL;\n' \
|
||||
' struct retro_core_options_intl\n' \
|
||||
' core_options_v1_intl;\n' \
|
||||
'#endif\n' \
|
||||
' struct retro_variable *variables = NULL;\n' \
|
||||
' char **values_buf = NULL;\n' \
|
||||
'\n' \
|
||||
' /* Determine total number of options */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (option_defs_us[num_options].key)\n' \
|
||||
' num_options++;\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' if (version >= 1)\n' \
|
||||
' {\n' \
|
||||
' /* Allocate US array */\n' \
|
||||
' option_v1_defs_us = (struct retro_core_option_definition *)\n' \
|
||||
' calloc(num_options + 1, sizeof(struct retro_core_option_definition));\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_us array */\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' struct retro_core_option_v2_definition *option_def_us = &option_defs_us[i];\n' \
|
||||
' struct retro_core_option_value *option_values = option_def_us->values;\n' \
|
||||
' struct retro_core_option_definition *option_v1_def_us = &option_v1_defs_us[i];\n' \
|
||||
' struct retro_core_option_value *option_v1_values = option_v1_def_us->values;\n' \
|
||||
'\n' \
|
||||
' option_v1_def_us->key = option_def_us->key;\n' \
|
||||
' option_v1_def_us->desc = option_def_us->desc;\n' \
|
||||
' option_v1_def_us->info = option_def_us->info;\n' \
|
||||
' option_v1_def_us->default_value = option_def_us->default_value;\n' \
|
||||
'\n' \
|
||||
' /* Values must be copied individually... */\n' \
|
||||
' while (option_values->value)\n' \
|
||||
' {\n' \
|
||||
' option_v1_values->value = option_values->value;\n' \
|
||||
' option_v1_values->label = option_values->label;\n' \
|
||||
'\n' \
|
||||
' option_values++;\n' \
|
||||
' option_v1_values++;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&\n' \
|
||||
' (language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH) &&\n' \
|
||||
' options_intl[language])\n' \
|
||||
' option_defs_intl = options_intl[language]->definitions;\n' \
|
||||
'\n' \
|
||||
' if (option_defs_intl)\n' \
|
||||
' {\n' \
|
||||
' /* Determine number of intl options */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (option_defs_intl[num_options_intl].key)\n' \
|
||||
' num_options_intl++;\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Allocate intl array */\n' \
|
||||
' option_v1_defs_intl = (struct retro_core_option_definition *)\n' \
|
||||
' calloc(num_options_intl + 1, sizeof(struct retro_core_option_definition));\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_intl array */\n' \
|
||||
' for (i = 0; i < num_options_intl; i++)\n' \
|
||||
' {\n' \
|
||||
' struct retro_core_option_v2_definition *option_def_intl = &option_defs_intl[i];\n' \
|
||||
' struct retro_core_option_value *option_values = option_def_intl->values;\n' \
|
||||
' struct retro_core_option_definition *option_v1_def_intl = &option_v1_defs_intl[i];\n' \
|
||||
' struct retro_core_option_value *option_v1_values = option_v1_def_intl->values;\n' \
|
||||
'\n' \
|
||||
' option_v1_def_intl->key = option_def_intl->key;\n' \
|
||||
' option_v1_def_intl->desc = option_def_intl->desc;\n' \
|
||||
' option_v1_def_intl->info = option_def_intl->info;\n' \
|
||||
' option_v1_def_intl->default_value = option_def_intl->default_value;\n' \
|
||||
'\n' \
|
||||
' /* Values must be copied individually... */\n' \
|
||||
' while (option_values->value)\n' \
|
||||
' {\n' \
|
||||
' option_v1_values->value = option_values->value;\n' \
|
||||
' option_v1_values->label = option_values->label;\n' \
|
||||
'\n' \
|
||||
' option_values++;\n' \
|
||||
' option_v1_values++;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' core_options_v1_intl.us = option_v1_defs_us;\n' \
|
||||
' core_options_v1_intl.local = option_v1_defs_intl;\n' \
|
||||
'\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_INTL, &core_options_v1_intl);\n' \
|
||||
'#else\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS, option_v1_defs_us);\n' \
|
||||
'#endif\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' {\n' \
|
||||
' /* Allocate arrays */\n' \
|
||||
' variables = (struct retro_variable *)calloc(num_options + 1,\n' \
|
||||
' sizeof(struct retro_variable));\n' \
|
||||
' values_buf = (char **)calloc(num_options, sizeof(char *));\n' \
|
||||
'\n' \
|
||||
' if (!variables || !values_buf)\n' \
|
||||
' goto error;\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_us array */\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' const char *key = option_defs_us[i].key;\n' \
|
||||
' const char *desc = option_defs_us[i].desc;\n' \
|
||||
' const char *default_value = option_defs_us[i].default_value;\n' \
|
||||
' struct retro_core_option_value *values = option_defs_us[i].values;\n' \
|
||||
' size_t buf_len = 3;\n' \
|
||||
' size_t default_index = 0;\n' \
|
||||
'\n' \
|
||||
' values_buf[i] = NULL;\n' \
|
||||
'\n' \
|
||||
' if (desc)\n' \
|
||||
' {\n' \
|
||||
' size_t num_values = 0;\n' \
|
||||
'\n' \
|
||||
' /* Determine number of values */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (values[num_values].value)\n' \
|
||||
' {\n' \
|
||||
' /* Check if this is the default value */\n' \
|
||||
' if (default_value)\n' \
|
||||
' if (strcmp(values[num_values].value, default_value) == 0)\n' \
|
||||
' default_index = num_values;\n' \
|
||||
'\n' \
|
||||
' buf_len += strlen(values[num_values].value);\n' \
|
||||
' num_values++;\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Build values string */\n' \
|
||||
' if (num_values > 0)\n' \
|
||||
' {\n' \
|
||||
' buf_len += num_values - 1;\n' \
|
||||
' buf_len += strlen(desc);\n' \
|
||||
'\n' \
|
||||
' values_buf[i] = (char *)calloc(buf_len, sizeof(char));\n' \
|
||||
' if (!values_buf[i])\n' \
|
||||
' goto error;\n' \
|
||||
'\n' \
|
||||
' strcpy(values_buf[i], desc);\n' \
|
||||
' strcat(values_buf[i], "; ");\n' \
|
||||
'\n' \
|
||||
' /* Default value goes first */\n' \
|
||||
' strcat(values_buf[i], values[default_index].value);\n' \
|
||||
'\n' \
|
||||
' /* Add remaining values */\n' \
|
||||
' for (j = 0; j < num_values; j++)\n' \
|
||||
' {\n' \
|
||||
' if (j != default_index)\n' \
|
||||
' {\n' \
|
||||
' strcat(values_buf[i], "|");\n' \
|
||||
' strcat(values_buf[i], values[j].value);\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' variables[option_index].key = key;\n' \
|
||||
' variables[option_index].value = values_buf[i];\n' \
|
||||
' option_index++;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Set variables */\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_VARIABLES, variables);\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'error:\n' \
|
||||
' /* Clean up */\n' \
|
||||
'\n' \
|
||||
' if (option_v1_defs_us)\n' \
|
||||
' {\n' \
|
||||
' free(option_v1_defs_us);\n' \
|
||||
' option_v1_defs_us = NULL;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' if (option_v1_defs_intl)\n' \
|
||||
' {\n' \
|
||||
' free(option_v1_defs_intl);\n' \
|
||||
' option_v1_defs_intl = NULL;\n' \
|
||||
' }\n' \
|
||||
'#endif\n' \
|
||||
'\n' \
|
||||
' if (values_buf)\n' \
|
||||
' {\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' if (values_buf[i])\n' \
|
||||
' {\n' \
|
||||
' free(values_buf[i]);\n' \
|
||||
' values_buf[i] = NULL;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' free(values_buf);\n' \
|
||||
' values_buf = NULL;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' if (variables)\n' \
|
||||
' {\n' \
|
||||
' free(variables);\n' \
|
||||
' variables = NULL;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'}\n' \
|
||||
'\n' \
|
||||
'#ifdef __cplusplus\n' \
|
||||
'}\n' \
|
||||
'#endif'
|
||||
|
||||
struct_groups = cor.p_struct.finditer(struct_text)
|
||||
out_text = struct_text
|
||||
|
||||
for construct in struct_groups:
|
||||
repl_text = ''
|
||||
declaration = construct.group(1)
|
||||
struct_match = cor.p_type_name.search(declaration)
|
||||
if struct_match:
|
||||
if struct_match.group(3):
|
||||
struct_type_name_lang = struct_match.group(1, 2, 3)
|
||||
declaration_end = declaration[struct_match.end(1):]
|
||||
elif struct_match.group(4):
|
||||
struct_type_name_lang = struct_match.group(1, 2, 4)
|
||||
declaration_end = declaration[struct_match.end(1):]
|
||||
else:
|
||||
struct_type_name_lang = sum((struct_match.group(1, 2), ('_us',)), ())
|
||||
declaration_end = f'{declaration[struct_match.end(1):struct_match.end(2)]}_us' \
|
||||
f'{declaration[struct_match.end(2):]}'
|
||||
else:
|
||||
return -1
|
||||
|
||||
if 'retro_core_option_definition' == struct_type_name_lang[0]:
|
||||
import shutil
|
||||
shutil.copy(file_name, file_name + '.v1')
|
||||
new_declaration = f'\nstruct retro_core_option_v2_category option_cats{struct_type_name_lang[2]}[] = ' \
|
||||
'{\n { NULL, NULL, NULL },\n' \
|
||||
'};\n\n' \
|
||||
+ declaration[:struct_match.start(1)] + \
|
||||
'retro_core_option_v2_definition' \
|
||||
+ declaration_end
|
||||
offset = construct.start(0)
|
||||
repl_text = repl_text + cor.re.sub(cor.re.escape(declaration), new_declaration,
|
||||
construct.group(0)[:construct.start(2) - offset])
|
||||
content = construct.group(2)
|
||||
new_content = cor.p_option.sub(replace_option, content)
|
||||
|
||||
repl_text = repl_text + new_content + cor.re.sub(r'{\s*NULL,\s*NULL,\s*NULL,\s*{\{0}},\s*NULL\s*},\s*};',
|
||||
'{ NULL, NULL, NULL, NULL, NULL, NULL, {{0}}, NULL },\n};'
|
||||
'\n\nstruct retro_core_options_v2 options' +
|
||||
struct_type_name_lang[2] + ' = {\n'
|
||||
f' option_cats{struct_type_name_lang[2]},\n'
|
||||
f' option_defs{struct_type_name_lang[2]}\n'
|
||||
'};',
|
||||
construct.group(0)[construct.end(2) - offset:])
|
||||
out_text = out_text.replace(construct.group(0), repl_text)
|
||||
#out_text = cor.re.sub(cor.re.escape(construct.group(0)), repl_text, raw_out)
|
||||
else:
|
||||
return -2
|
||||
with open(file_name, 'w', encoding='utf-8') as code_file:
|
||||
out_text = cor.re.sub(cor.re.escape(comment_v1), comment_v2, out_text)
|
||||
intl = p_intl.search(out_text)
|
||||
if intl:
|
||||
new_intl = out_text[:intl.start(1)] \
|
||||
+ 'struct retro_core_options_v2 *options_intl[RETRO_LANGUAGE_LAST]' \
|
||||
+ out_text[intl.end(1):intl.start(2)] \
|
||||
+ '\n &options_us, /* RETRO_LANGUAGE_ENGLISH */\n' \
|
||||
' &options_ja, /* RETRO_LANGUAGE_JAPANESE */\n' \
|
||||
' &options_fr, /* RETRO_LANGUAGE_FRENCH */\n' \
|
||||
' &options_es, /* RETRO_LANGUAGE_SPANISH */\n' \
|
||||
' &options_de, /* RETRO_LANGUAGE_GERMAN */\n' \
|
||||
' &options_it, /* RETRO_LANGUAGE_ITALIAN */\n' \
|
||||
' &options_nl, /* RETRO_LANGUAGE_DUTCH */\n' \
|
||||
' &options_pt_br, /* RETRO_LANGUAGE_PORTUGUESE_BRAZIL */\n' \
|
||||
' &options_pt_pt, /* RETRO_LANGUAGE_PORTUGUESE_PORTUGAL */\n' \
|
||||
' &options_ru, /* RETRO_LANGUAGE_RUSSIAN */\n' \
|
||||
' &options_ko, /* RETRO_LANGUAGE_KOREAN */\n' \
|
||||
' &options_cht, /* RETRO_LANGUAGE_CHINESE_TRADITIONAL */\n' \
|
||||
' &options_chs, /* RETRO_LANGUAGE_CHINESE_SIMPLIFIED */\n' \
|
||||
' &options_eo, /* RETRO_LANGUAGE_ESPERANTO */\n' \
|
||||
' &options_pl, /* RETRO_LANGUAGE_POLISH */\n' \
|
||||
' &options_vn, /* RETRO_LANGUAGE_VIETNAMESE */\n' \
|
||||
' &options_ar, /* RETRO_LANGUAGE_ARABIC */\n' \
|
||||
' &options_el, /* RETRO_LANGUAGE_GREEK */\n' \
|
||||
' &options_tr, /* RETRO_LANGUAGE_TURKISH */\n' \
|
||||
' &options_sk, /* RETRO_LANGUAGE_SLOVAK */\n' \
|
||||
' &options_fa, /* RETRO_LANGUAGE_PERSIAN */\n' \
|
||||
' &options_he, /* RETRO_LANGUAGE_HEBREW */\n' \
|
||||
' &options_ast, /* RETRO_LANGUAGE_ASTURIAN */\n' \
|
||||
' &options_fi, /* RETRO_LANGUAGE_FINNISH */\n' \
|
||||
' &options_id, /* RETRO_LANGUAGE_INDONESIAN */\n' \
|
||||
' &options_sv, /* RETRO_LANGUAGE_SWEDISH */\n' \
|
||||
' &options_uk, /* RETRO_LANGUAGE_UKRAINIAN */\n' \
|
||||
' &options_cs, /* RETRO_LANGUAGE_CZECH */\n' \
|
||||
' &options_val, /* RETRO_LANGUAGE_CATALAN_VALENCIA */\n' \
|
||||
' &options_ca, /* RETRO_LANGUAGE_CATALAN */\n' \
|
||||
' &options_en, /* RETRO_LANGUAGE_BRITISH_ENGLISH */\n' \
|
||||
' &options_hu, /* RETRO_LANGUAGE_HUNGARIAN */\n' \
|
||||
+ out_text[intl.end(2):]
|
||||
out_text = p_set.sub(new_set, new_intl)
|
||||
else:
|
||||
out_text = p_set.sub(new_set, out_text)
|
||||
code_file.write(out_text)
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.basename(DIR_PATH) != "intl":
|
||||
raise RuntimeError("Script is not in intl folder!")
|
||||
|
||||
BASE_PATH = os.path.dirname(DIR_PATH)
|
||||
CORE_OP_FILE = os.path.join(BASE_PATH, "**", "libretro_core_options.h")
|
||||
|
||||
core_options_hits = glob.glob(CORE_OP_FILE, recursive=True)
|
||||
|
||||
if len(core_options_hits) == 0:
|
||||
raise RuntimeError("libretro_core_options.h not found!")
|
||||
elif len(core_options_hits) > 1:
|
||||
print("More than one libretro_core_options.h file found:\n\n")
|
||||
for i, file in enumerate(core_options_hits):
|
||||
print(f"{i} {file}\n")
|
||||
|
||||
while True:
|
||||
user_choice = input("Please choose one ('q' will exit): ")
|
||||
if user_choice == 'q':
|
||||
exit(0)
|
||||
elif user_choice.isdigit():
|
||||
core_op_file = core_options_hits[int(user_choice)]
|
||||
break
|
||||
else:
|
||||
print("Please make a valid choice!\n\n")
|
||||
else:
|
||||
core_op_file = core_options_hits[0]
|
||||
|
||||
H_FILE_PATH = core_op_file
|
||||
INTL_FILE_PATH = core_op_file.replace("libretro_core_options.h", 'libretro_core_options_intl.h')
|
||||
for file in (H_FILE_PATH, INTL_FILE_PATH):
|
||||
if os.path.isfile(file):
|
||||
with open(file, 'r+', encoding='utf-8') as h_file:
|
||||
text = h_file.read()
|
||||
try:
|
||||
test = create_v2_code_file(text, file)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
test = -1
|
||||
if -1 > test:
|
||||
print('Your file looks like it already is v2? (' + file + ')')
|
||||
continue
|
||||
if 0 > test:
|
||||
print('An error occured! Please make sure to use the complete v1 struct! (' + file + ')')
|
||||
continue
|
||||
else:
|
||||
print(file + ' not found.')
|
|
@ -13,9 +13,10 @@
|
|||
|
||||
/*
|
||||
********************************
|
||||
* VERSION: 1.3
|
||||
* VERSION: 2.0
|
||||
********************************
|
||||
*
|
||||
* - 2.0: Add support for core options v2 interface
|
||||
* - 1.3: Move translations to libretro_core_options_intl.h
|
||||
* - libretro_core_options_intl.h includes BOM and utf-8
|
||||
* fix for MSVC 2010-2013
|
||||
|
@ -48,11 +49,36 @@ extern "C" {
|
|||
* - Will be used as a fallback for any missing entries in
|
||||
* frontend language definition */
|
||||
|
||||
struct retro_core_option_definition option_defs_us[] = {
|
||||
struct retro_core_option_v2_category option_cats_us[] = {
|
||||
{
|
||||
"video", /* key (category name) */
|
||||
"Video", /* category description (label) */
|
||||
"Configure display options." /* category sublabel */
|
||||
},
|
||||
{
|
||||
"hacks",
|
||||
"Advanced",
|
||||
"Options affecting low-level emulation performance and accuracy."
|
||||
},
|
||||
{ NULL, NULL, NULL },
|
||||
};
|
||||
|
||||
struct retro_core_option_v2_definition option_defs_us[] = {
|
||||
{
|
||||
"mycore_region", /* key (option name) */
|
||||
"Console Region", /* description (label) */
|
||||
NULL, /* 'categorised' description (used instead of
|
||||
* 'description' if frontend has category
|
||||
* support; if NULL or empty, regular
|
||||
* description is always used */
|
||||
"Specify which region the system is from.", /* sublabel */
|
||||
NULL, /* 'categorised' sublabel (used instead of
|
||||
* 'sublabel' if frontend has category
|
||||
* support; if NULL or empty, regular
|
||||
* sublabel is always used */
|
||||
NULL, /* category key (must match an entry in
|
||||
* option_cats_us; if NULL or empty,
|
||||
* option is uncategorised */
|
||||
{
|
||||
{ "auto", "Auto" }, /* value_1, value_1_label */
|
||||
{ "ntsc-j", "Japan" }, /* value_2, value_2_label */
|
||||
|
@ -64,8 +90,14 @@ struct retro_core_option_definition option_defs_us[] = {
|
|||
},
|
||||
{
|
||||
"mycore_video_scale",
|
||||
"Video Scale",
|
||||
"Video > Scale", /* description: here a 'Video >' prefix is used to
|
||||
* signify a category on frontends without explicit
|
||||
* category support */
|
||||
"Scale", /* 'categorised' description: will be displayed inside
|
||||
* the 'Video' submenu */
|
||||
"Set internal video scale factor.",
|
||||
NULL,
|
||||
"video", /* category key */
|
||||
{
|
||||
{ "1x", NULL }, /* If value itself is human-readable (e.g. a number) */
|
||||
{ "2x", NULL }, /* and can displayed directly, the value_label should */
|
||||
|
@ -77,8 +109,13 @@ struct retro_core_option_definition option_defs_us[] = {
|
|||
},
|
||||
{
|
||||
"mycore_overclock",
|
||||
"Advanced > Reduce Slowdown",
|
||||
"Reduce Slowdown",
|
||||
"Enable CPU overclock (unsafe).",
|
||||
"Enabling 'Advanced > Reduce Slowdown' will reduce accuracy.", /* sublabel */
|
||||
"Enabling 'Reduce Slowdown' will reduce accuracy.", /* 'categorised' sublabel:
|
||||
* will be displayed inside the 'Advanced' submenu; note that
|
||||
* 'Advanced > Reduce Slowdown' is replaced with 'Reduce Slowdown' */
|
||||
"hacks",
|
||||
{
|
||||
{ "enabled", NULL }, /* If value is equal to 'enabled' or 'disabled', */
|
||||
{ "disabled", NULL }, /* value_label should be set to NULL */
|
||||
|
@ -86,7 +123,12 @@ struct retro_core_option_definition option_defs_us[] = {
|
|||
},
|
||||
"disabled"
|
||||
},
|
||||
{ NULL, NULL, NULL, {{0}}, NULL },
|
||||
{ NULL, NULL, NULL, NULL, NULL, NULL, {{0}}, NULL },
|
||||
};
|
||||
|
||||
struct retro_core_options_v2 options_us = {
|
||||
option_cats_us,
|
||||
option_defs_us
|
||||
};
|
||||
|
||||
/*
|
||||
|
@ -96,35 +138,39 @@ struct retro_core_option_definition option_defs_us[] = {
|
|||
*/
|
||||
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
struct retro_core_option_definition *option_defs_intl[RETRO_LANGUAGE_LAST] = {
|
||||
option_defs_us, /* RETRO_LANGUAGE_ENGLISH */
|
||||
NULL, /* RETRO_LANGUAGE_JAPANESE */
|
||||
option_defs_fr, /* RETRO_LANGUAGE_FRENCH */
|
||||
NULL, /* RETRO_LANGUAGE_SPANISH */
|
||||
NULL, /* RETRO_LANGUAGE_GERMAN */
|
||||
NULL, /* RETRO_LANGUAGE_ITALIAN */
|
||||
NULL, /* RETRO_LANGUAGE_DUTCH */
|
||||
NULL, /* RETRO_LANGUAGE_PORTUGUESE_BRAZIL */
|
||||
NULL, /* RETRO_LANGUAGE_PORTUGUESE_PORTUGAL */
|
||||
NULL, /* RETRO_LANGUAGE_RUSSIAN */
|
||||
NULL, /* RETRO_LANGUAGE_KOREAN */
|
||||
NULL, /* RETRO_LANGUAGE_CHINESE_TRADITIONAL */
|
||||
NULL, /* RETRO_LANGUAGE_CHINESE_SIMPLIFIED */
|
||||
NULL, /* RETRO_LANGUAGE_ESPERANTO */
|
||||
NULL, /* RETRO_LANGUAGE_POLISH */
|
||||
NULL, /* RETRO_LANGUAGE_VIETNAMESE */
|
||||
NULL, /* RETRO_LANGUAGE_ARABIC */
|
||||
NULL, /* RETRO_LANGUAGE_GREEK */
|
||||
NULL, /* RETRO_LANGUAGE_TURKISH */
|
||||
NULL, /* RETRO_LANGUAGE_SLOVAK */
|
||||
NULL, /* RETRO_LANGUAGE_PERSIAN */
|
||||
NULL, /* RETRO_LANGUAGE_HEBREW */
|
||||
NULL, /* RETRO_LANGUAGE_ASTURIAN */
|
||||
NULL, /* RETRO_LANGUAGE_FINNISH */
|
||||
NULL, /* RETRO_LANGUAGE_INDONESIAN */
|
||||
NULL, /* RETRO_LANGUAGE_SWEDISH */
|
||||
NULL, /* RETRO_LANGUAGE_UKRAINIAN */
|
||||
NULL, /* RETRO_LANGUAGE_CZECH */
|
||||
struct retro_core_options_v2 *options_intl[RETRO_LANGUAGE_LAST] = {
|
||||
&options_us, /* RETRO_LANGUAGE_ENGLISH */
|
||||
&options_ja, /* RETRO_LANGUAGE_JAPANESE */
|
||||
&options_fr, /* RETRO_LANGUAGE_FRENCH */
|
||||
&options_es, /* RETRO_LANGUAGE_SPANISH */
|
||||
&options_de, /* RETRO_LANGUAGE_GERMAN */
|
||||
&options_it, /* RETRO_LANGUAGE_ITALIAN */
|
||||
&options_nl, /* RETRO_LANGUAGE_DUTCH */
|
||||
&options_pt_br, /* RETRO_LANGUAGE_PORTUGUESE_BRAZIL */
|
||||
&options_pt_pt, /* RETRO_LANGUAGE_PORTUGUESE_PORTUGAL */
|
||||
&options_ru, /* RETRO_LANGUAGE_RUSSIAN */
|
||||
&options_ko, /* RETRO_LANGUAGE_KOREAN */
|
||||
&options_cht, /* RETRO_LANGUAGE_CHINESE_TRADITIONAL */
|
||||
&options_chs, /* RETRO_LANGUAGE_CHINESE_SIMPLIFIED */
|
||||
&options_eo, /* RETRO_LANGUAGE_ESPERANTO */
|
||||
&options_pl, /* RETRO_LANGUAGE_POLISH */
|
||||
&options_vn, /* RETRO_LANGUAGE_VIETNAMESE */
|
||||
&options_ar, /* RETRO_LANGUAGE_ARABIC */
|
||||
&options_el, /* RETRO_LANGUAGE_GREEK */
|
||||
&options_tr, /* RETRO_LANGUAGE_TURKISH */
|
||||
&options_sk, /* RETRO_LANGUAGE_SLOVAK */
|
||||
&options_fa, /* RETRO_LANGUAGE_PERSIAN */
|
||||
&options_he, /* RETRO_LANGUAGE_HEBREW */
|
||||
&options_ast, /* RETRO_LANGUAGE_ASTURIAN */
|
||||
&options_fi, /* RETRO_LANGUAGE_FINNISH */
|
||||
&options_id, /* RETRO_LANGUAGE_INDONESIAN */
|
||||
&options_sv, /* RETRO_LANGUAGE_SWEDISH */
|
||||
&options_uk, /* RETRO_LANGUAGE_UKRAINIAN */
|
||||
&options_cs, /* RETRO_LANGUAGE_CZECH */
|
||||
&options_val, /* RETRO_LANGUAGE_CATALAN_VALENCIA */
|
||||
&options_ca, /* RETRO_LANGUAGE_CATALAN */
|
||||
&options_en, /* RETRO_LANGUAGE_BRITISH_ENGLISH */
|
||||
&options_hu, /* RETRO_LANGUAGE_HUNGARIAN */
|
||||
};
|
||||
#endif
|
||||
|
||||
|
@ -142,124 +188,250 @@ struct retro_core_option_definition *option_defs_intl[RETRO_LANGUAGE_LAST] = {
|
|||
* be as painless as possible for core devs)
|
||||
*/
|
||||
|
||||
static INLINE void libretro_set_core_options(retro_environment_t environ_cb)
|
||||
static INLINE void libretro_set_core_options(retro_environment_t environ_cb,
|
||||
bool *categories_supported)
|
||||
{
|
||||
unsigned version = 0;
|
||||
unsigned version = 0;
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
unsigned language = 0;
|
||||
#endif
|
||||
|
||||
if (!environ_cb)
|
||||
if (!environ_cb || !categories_supported)
|
||||
return;
|
||||
|
||||
if (environ_cb(RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION, &version) && (version >= 1))
|
||||
*categories_supported = false;
|
||||
|
||||
if (!environ_cb(RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION, &version))
|
||||
version = 0;
|
||||
|
||||
if (version >= 2)
|
||||
{
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
struct retro_core_options_intl core_options_intl;
|
||||
unsigned language = 0;
|
||||
struct retro_core_options_v2_intl core_options_intl;
|
||||
|
||||
core_options_intl.us = option_defs_us;
|
||||
core_options_intl.us = &options_us;
|
||||
core_options_intl.local = NULL;
|
||||
|
||||
if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&
|
||||
(language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH))
|
||||
core_options_intl.local = option_defs_intl[language];
|
||||
core_options_intl.local = options_intl[language];
|
||||
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_INTL, &core_options_intl);
|
||||
*categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2_INTL,
|
||||
&core_options_intl);
|
||||
#else
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS, &option_defs_us);
|
||||
*categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2,
|
||||
&options_us);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
size_t i;
|
||||
size_t i, j;
|
||||
size_t option_index = 0;
|
||||
size_t num_options = 0;
|
||||
struct retro_core_option_definition
|
||||
*option_v1_defs_us = NULL;
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
size_t num_options_intl = 0;
|
||||
struct retro_core_option_v2_definition
|
||||
*option_defs_intl = NULL;
|
||||
struct retro_core_option_definition
|
||||
*option_v1_defs_intl = NULL;
|
||||
struct retro_core_options_intl
|
||||
core_options_v1_intl;
|
||||
#endif
|
||||
struct retro_variable *variables = NULL;
|
||||
char **values_buf = NULL;
|
||||
|
||||
/* Determine number of options */
|
||||
for (;;)
|
||||
/* Determine total number of options */
|
||||
while (true)
|
||||
{
|
||||
if (!option_defs_us[num_options].key)
|
||||
if (option_defs_us[num_options].key)
|
||||
num_options++;
|
||||
else
|
||||
break;
|
||||
num_options++;
|
||||
}
|
||||
|
||||
/* Allocate arrays */
|
||||
variables = (struct retro_variable *)calloc(num_options + 1, sizeof(struct retro_variable));
|
||||
values_buf = (char **)calloc(num_options, sizeof(char *));
|
||||
|
||||
if (!variables || !values_buf)
|
||||
goto error;
|
||||
|
||||
/* Copy parameters from option_defs_us array */
|
||||
for (i = 0; i < num_options; i++)
|
||||
if (version >= 1)
|
||||
{
|
||||
const char *key = option_defs_us[i].key;
|
||||
const char *desc = option_defs_us[i].desc;
|
||||
const char *default_value = option_defs_us[i].default_value;
|
||||
struct retro_core_option_value *values = option_defs_us[i].values;
|
||||
size_t buf_len = 3;
|
||||
size_t default_index = 0;
|
||||
/* Allocate US array */
|
||||
option_v1_defs_us = (struct retro_core_option_definition *)
|
||||
calloc(num_options + 1, sizeof(struct retro_core_option_definition));
|
||||
|
||||
values_buf[i] = NULL;
|
||||
|
||||
if (desc)
|
||||
/* Copy parameters from option_defs_us array */
|
||||
for (i = 0; i < num_options; i++)
|
||||
{
|
||||
size_t num_values = 0;
|
||||
struct retro_core_option_v2_definition *option_def_us = &option_defs_us[i];
|
||||
struct retro_core_option_value *option_values = option_def_us->values;
|
||||
struct retro_core_option_definition *option_v1_def_us = &option_v1_defs_us[i];
|
||||
struct retro_core_option_value *option_v1_values = option_v1_def_us->values;
|
||||
|
||||
/* Determine number of values */
|
||||
for (;;)
|
||||
option_v1_def_us->key = option_def_us->key;
|
||||
option_v1_def_us->desc = option_def_us->desc;
|
||||
option_v1_def_us->info = option_def_us->info;
|
||||
option_v1_def_us->default_value = option_def_us->default_value;
|
||||
|
||||
/* Values must be copied individually... */
|
||||
while (option_values->value)
|
||||
{
|
||||
if (!values[num_values].value)
|
||||
option_v1_values->value = option_values->value;
|
||||
option_v1_values->label = option_values->label;
|
||||
|
||||
option_values++;
|
||||
option_v1_values++;
|
||||
}
|
||||
}
|
||||
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&
|
||||
(language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH) &&
|
||||
options_intl[language])
|
||||
option_defs_intl = options_intl[language]->definitions;
|
||||
|
||||
if (option_defs_intl)
|
||||
{
|
||||
/* Determine number of intl options */
|
||||
while (true)
|
||||
{
|
||||
if (option_defs_intl[num_options_intl].key)
|
||||
num_options_intl++;
|
||||
else
|
||||
break;
|
||||
|
||||
/* Check if this is the default value */
|
||||
if (default_value)
|
||||
if (strcmp(values[num_values].value, default_value) == 0)
|
||||
default_index = num_values;
|
||||
|
||||
buf_len += strlen(values[num_values].value);
|
||||
num_values++;
|
||||
}
|
||||
|
||||
/* Build values string */
|
||||
if (num_values > 0)
|
||||
/* Allocate intl array */
|
||||
option_v1_defs_intl = (struct retro_core_option_definition *)
|
||||
calloc(num_options_intl + 1, sizeof(struct retro_core_option_definition));
|
||||
|
||||
/* Copy parameters from option_defs_intl array */
|
||||
for (i = 0; i < num_options_intl; i++)
|
||||
{
|
||||
size_t j;
|
||||
struct retro_core_option_v2_definition *option_def_intl = &option_defs_intl[i];
|
||||
struct retro_core_option_value *option_values = option_def_intl->values;
|
||||
struct retro_core_option_definition *option_v1_def_intl = &option_v1_defs_intl[i];
|
||||
struct retro_core_option_value *option_v1_values = option_v1_def_intl->values;
|
||||
|
||||
buf_len += num_values - 1;
|
||||
buf_len += strlen(desc);
|
||||
option_v1_def_intl->key = option_def_intl->key;
|
||||
option_v1_def_intl->desc = option_def_intl->desc;
|
||||
option_v1_def_intl->info = option_def_intl->info;
|
||||
option_v1_def_intl->default_value = option_def_intl->default_value;
|
||||
|
||||
values_buf[i] = (char *)calloc(buf_len, sizeof(char));
|
||||
if (!values_buf[i])
|
||||
goto error;
|
||||
|
||||
strcpy(values_buf[i], desc);
|
||||
strcat(values_buf[i], "; ");
|
||||
|
||||
/* Default value goes first */
|
||||
strcat(values_buf[i], values[default_index].value);
|
||||
|
||||
/* Add remaining values */
|
||||
for (j = 0; j < num_values; j++)
|
||||
/* Values must be copied individually... */
|
||||
while (option_values->value)
|
||||
{
|
||||
if (j != default_index)
|
||||
{
|
||||
strcat(values_buf[i], "|");
|
||||
strcat(values_buf[i], values[j].value);
|
||||
}
|
||||
option_v1_values->value = option_values->value;
|
||||
option_v1_values->label = option_values->label;
|
||||
|
||||
option_values++;
|
||||
option_v1_values++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables[i].key = key;
|
||||
variables[i].value = values_buf[i];
|
||||
core_options_v1_intl.us = option_v1_defs_us;
|
||||
core_options_v1_intl.local = option_v1_defs_intl;
|
||||
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_INTL, &core_options_v1_intl);
|
||||
#else
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS, option_v1_defs_us);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Allocate arrays */
|
||||
variables = (struct retro_variable *)calloc(num_options + 1,
|
||||
sizeof(struct retro_variable));
|
||||
values_buf = (char **)calloc(num_options, sizeof(char *));
|
||||
|
||||
if (!variables || !values_buf)
|
||||
goto error;
|
||||
|
||||
/* Copy parameters from option_defs_us array */
|
||||
for (i = 0; i < num_options; i++)
|
||||
{
|
||||
const char *key = option_defs_us[i].key;
|
||||
const char *desc = option_defs_us[i].desc;
|
||||
const char *default_value = option_defs_us[i].default_value;
|
||||
struct retro_core_option_value *values = option_defs_us[i].values;
|
||||
size_t buf_len = 3;
|
||||
size_t default_index = 0;
|
||||
|
||||
values_buf[i] = NULL;
|
||||
|
||||
if (desc)
|
||||
{
|
||||
size_t num_values = 0;
|
||||
|
||||
/* Determine number of values */
|
||||
while (true)
|
||||
{
|
||||
if (values[num_values].value)
|
||||
{
|
||||
/* Check if this is the default value */
|
||||
if (default_value)
|
||||
if (strcmp(values[num_values].value, default_value) == 0)
|
||||
default_index = num_values;
|
||||
|
||||
buf_len += strlen(values[num_values].value);
|
||||
num_values++;
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
|
||||
/* Build values string */
|
||||
if (num_values > 0)
|
||||
{
|
||||
buf_len += num_values - 1;
|
||||
buf_len += strlen(desc);
|
||||
|
||||
values_buf[i] = (char *)calloc(buf_len, sizeof(char));
|
||||
if (!values_buf[i])
|
||||
goto error;
|
||||
|
||||
strcpy(values_buf[i], desc);
|
||||
strcat(values_buf[i], "; ");
|
||||
|
||||
/* Default value goes first */
|
||||
strcat(values_buf[i], values[default_index].value);
|
||||
|
||||
/* Add remaining values */
|
||||
for (j = 0; j < num_values; j++)
|
||||
{
|
||||
if (j != default_index)
|
||||
{
|
||||
strcat(values_buf[i], "|");
|
||||
strcat(values_buf[i], values[j].value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables[option_index].key = key;
|
||||
variables[option_index].value = values_buf[i];
|
||||
option_index++;
|
||||
}
|
||||
|
||||
/* Set variables */
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_VARIABLES, variables);
|
||||
}
|
||||
|
||||
/* Set variables */
|
||||
environ_cb(RETRO_ENVIRONMENT_SET_VARIABLES, variables);
|
||||
|
||||
error:
|
||||
|
||||
/* Clean up */
|
||||
|
||||
if (option_v1_defs_us)
|
||||
{
|
||||
free(option_v1_defs_us);
|
||||
option_v1_defs_us = NULL;
|
||||
}
|
||||
|
||||
#ifndef HAVE_NO_LANGEXTRA
|
||||
if (option_v1_defs_intl)
|
||||
{
|
||||
free(option_v1_defs_intl);
|
||||
option_v1_defs_intl = NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (values_buf)
|
||||
{
|
||||
for (i = 0; i < num_options; i++)
|
||||
|
|
|
@ -11,9 +11,10 @@
|
|||
|
||||
/*
|
||||
********************************
|
||||
* VERSION: 1.3
|
||||
* VERSION: 2.0
|
||||
********************************
|
||||
*
|
||||
* - 2.0: Add support for core options v2 interface
|
||||
* - 1.3: Move translations to libretro_core_options_intl.h
|
||||
* - libretro_core_options_intl.h includes BOM and utf-8
|
||||
* fix for MSVC 2010-2013
|
||||
|
@ -41,11 +42,29 @@ extern "C" {
|
|||
|
||||
/* RETRO_LANGUAGE_FRENCH */
|
||||
|
||||
struct retro_core_option_definition option_defs_fr[] = {
|
||||
struct retro_core_option_v2_category option_cats_fr[] = {
|
||||
{
|
||||
"video", /* key must match option_cats_us entry */
|
||||
"Vidéo", /* translated category description */
|
||||
"Configurez les options d'affichage." /* translated category sublabel */
|
||||
},
|
||||
{
|
||||
"hacks",
|
||||
"Avancée",
|
||||
"Options affectant les performances et la précision de l'émulation de bas niveau."
|
||||
},
|
||||
{ NULL, NULL, NULL },
|
||||
};
|
||||
|
||||
struct retro_core_option_v2_definition option_defs_fr[] = {
|
||||
{
|
||||
"mycore_region", /* key must match option_defs_us entry */
|
||||
"Région de la console", /* translated description */
|
||||
NULL,
|
||||
"Spécifiez la région d'origine du système.", /* translated sublabel */
|
||||
NULL,
|
||||
NULL, /* category key is taken from option_defs_us
|
||||
* -> can set to NULL here */
|
||||
{
|
||||
{ "auto", "Auto" }, /* value must match option_defs_us entry */
|
||||
{ "ntsc-j", "Japon" }, /* > only value_label should be translated */
|
||||
|
@ -53,12 +72,16 @@ struct retro_core_option_definition option_defs_fr[] = {
|
|||
{ "pal", "L'Europe" },
|
||||
{ NULL, NULL },
|
||||
},
|
||||
NULL /* default_value is taken from option_defs_us -> can set to NULL here */
|
||||
NULL /* default_value is taken from option_defs_us
|
||||
* -> can set to NULL here */
|
||||
},
|
||||
{
|
||||
"mycore_video_scale",
|
||||
"Échelle vidéo",
|
||||
"Vidéo > Échelle", /* translated description */
|
||||
"Échelle", /* translated 'categorised' description */
|
||||
"Définir le facteur d'échelle vidéo interne.",
|
||||
NULL,
|
||||
NULL,
|
||||
{
|
||||
{ NULL, NULL }, /* If value_labels do not require translation (e.g. numbers), values may be omitted */
|
||||
},
|
||||
|
@ -66,14 +89,23 @@ struct retro_core_option_definition option_defs_fr[] = {
|
|||
},
|
||||
{
|
||||
"mycore_overclock",
|
||||
"Avancé > Réduire le ralentissement",
|
||||
"Réduire le ralentissement",
|
||||
"Activer l'overclocking du processeur (non sécurisé).",
|
||||
"L'activation de « Avancé > Réduire le ralentissement » réduira la précision.", /* translated sublabel */
|
||||
"L'activation de « Réduire le ralentissement » réduira la précision.", /* translated 'categorised'
|
||||
* sublabel */
|
||||
NULL,
|
||||
{
|
||||
{ NULL, NULL }, /* 'enabled' and 'disabled' values should not be translated */
|
||||
},
|
||||
NULL
|
||||
},
|
||||
{ NULL, NULL, NULL, {{0}}, NULL },
|
||||
{ NULL, NULL, NULL, NULL, NULL, NULL, {{0}}, NULL },
|
||||
};
|
||||
|
||||
struct retro_core_options_v2 options_fr = {
|
||||
option_cats_fr,
|
||||
option_defs_fr
|
||||
};
|
||||
|
||||
/* RETRO_LANGUAGE_SPANISH */
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
# Recreate libretro_core_options_intl.h using translations form Crowdin
|
||||
|
||||
name: Crowdin Translation Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'intl/*/*'
|
||||
|
||||
jobs:
|
||||
create_intl_file:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false # otherwise, the token used is the GITHUB_TOKEN, instead of your personal access token.
|
||||
fetch-depth: 0 # otherwise, there would be errors pushing refs to the destination repository.
|
||||
|
||||
- name: Create intl file
|
||||
shell: bash
|
||||
run: |
|
||||
python3 intl/crowdin_intl.py '<path/to/libretro_core_options.h directory>'
|
||||
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git add <path/to/libretro_core_options_intl.h file>
|
||||
git commit -m "Recreate libretro_core_options_intl.h" -a
|
||||
|
||||
- name: GitHub Push
|
||||
uses: ad-m/github-push-action@v0.6.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.ref }}
|
|
@ -1,41 +0,0 @@
|
|||
# Prepare source for Crowdin sync
|
||||
|
||||
name: Crowdin Upload Preparation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- '<path/to/libretro_core_options.h file>'
|
||||
|
||||
jobs:
|
||||
prepare_source_file:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false # otherwise, the token used is the GITHUB_TOKEN, instead of your personal access token.
|
||||
fetch-depth: 0 # otherwise, there would be errors pushing refs to the destination repository.
|
||||
|
||||
- name: Crowdin Prep
|
||||
shell: bash
|
||||
run: |
|
||||
python3 intl/crowdin_prep.py '<path/to/libretro_core_options.h directory>'
|
||||
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git add intl/*
|
||||
git commit -m "Recreate translation source text files" -a
|
||||
|
||||
- name: GitHub Push
|
||||
uses: ad-m/github-push-action@v0.6.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.ref }}
|
|
@ -1,3 +0,0 @@
|
|||
files:
|
||||
- source: /intl/_us/*.json
|
||||
translation: /intl/_%two_letters_code%/%original_file_name%
|
|
@ -1,47 +0,0 @@
|
|||
Place 'crowdin.yml' & the 'intl' and '.github' folder, including content, into the root of the repo.
|
||||
|
||||
In '.github/workflows' are two files: 'crowdin_intl.yml' & 'crowdin_prep.yml'
|
||||
In each of those are place holders, which need to be replaced as follows:
|
||||
|
||||
<path/to/libretro_core_options.h directory>
|
||||
-> replace with the path from the root of the repo to the directory containing
|
||||
'libretro_core_options.h' (it is assumed that 'libretro_core_options.h' &
|
||||
'libretro_core_options_intl.h' are in the same directory)
|
||||
|
||||
<path/to/libretro_core_options.h file>
|
||||
-> replace with the full path from the root of the repo to the 'libretro_core_options.h' file
|
||||
|
||||
<path/to/libretro_core_options_intl.h file>
|
||||
-> replace with the full path from the root of the repo to the 'libretro_core_options_intl.h' file
|
||||
|
||||
|
||||
From the root of the repo run (using bash):
|
||||
python3 intl/core_opt_translation.py '<path/to/libretro_core_options.h directory>'
|
||||
|
||||
(If python3 doesn't work, try just python)
|
||||
|
||||
Push changes to repo. Once merged, request Crowdin integration.
|
||||
|
||||
|
||||
Crowdin integration:
|
||||
|
||||
On the project page, go to the Applications tab. Choose GitHub.
|
||||
There are two options: connecting a GitHub account, which has write/commit permissions to the repo
|
||||
or providing a GitHub token, which will unlock these permissions.
|
||||
|
||||
Then add a repository, a new interface opens. Pick the repository as well as the branch, which you want to sync.
|
||||
On the right, Crowdin will display the default name of the repository it will use for creating PRs.
|
||||
Below, set the sync schedule and then save. With that the synchronisation should be set up.
|
||||
If there are still problems, you might need to manually modify the configuration (double click on the branch in the lower frame).
|
||||
|
||||
Here's what the file paths should look like (the '/' at the start is very important!):
|
||||
|
||||
Source files path:
|
||||
/intl/_us/*.json
|
||||
|
||||
Translated files path:
|
||||
/intl/_%two_letters_code%/%original_file_name%
|
||||
|
||||
|
||||
Once Crowdin successfully creates the PR & it has been merged, the automatically created branch can be deleted on GitHub.
|
||||
|
|
@ -1 +0,0 @@
|
|||
__pycache__
|
|
@ -1,609 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""Core options text extractor
|
||||
|
||||
The purpose of this script is to set up & provide functions for automatic generation of 'libretro_core_options_intl.h'
|
||||
from 'libretro_core_options.h' using translations from Crowdin.
|
||||
|
||||
Both v1 and v2 structs are supported. It is, however, recommended to convert v1 files to v2 using the included
|
||||
'v1_to_v2_converter.py'.
|
||||
|
||||
Usage:
|
||||
python3 path/to/core_opt_translation.py "path/to/where/libretro_core_options.h & libretro_core_options_intl.h/are"
|
||||
|
||||
This script will:
|
||||
1.) create key words for & extract the texts from libretro_core_options.h & save them into intl/_us/core_options.h
|
||||
2.) do the same for any present translations in libretro_core_options_intl.h, saving those in their respective folder
|
||||
"""
|
||||
import core_option_regex as cor
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import urllib.request as req
|
||||
import shutil
|
||||
|
||||
# for uploading translations to Crowdin, the Crowdin 'language id' is required
|
||||
LANG_CODE_TO_ID = {'_ar': 'ar',
|
||||
'_ast': 'ast',
|
||||
'_chs': 'zh-CN',
|
||||
'_cht': 'zh-TW',
|
||||
'_cs': 'cs',
|
||||
'_cy': 'cy',
|
||||
'_da': 'da',
|
||||
'_de': 'de',
|
||||
'_el': 'el',
|
||||
'_eo': 'eo',
|
||||
'_es': 'es-ES',
|
||||
'_fa': 'fa',
|
||||
'_fi': 'fi',
|
||||
'_fr': 'fr',
|
||||
'_gl': 'gl',
|
||||
'_he': 'he',
|
||||
'_hu': 'hu',
|
||||
'_id': 'id',
|
||||
'_it': 'it',
|
||||
'_ja': 'ja',
|
||||
'_ko': 'ko',
|
||||
'_nl': 'nl',
|
||||
'_pl': 'pl',
|
||||
'_pt_br': 'pt-BR',
|
||||
'_pt_pt': 'pt-PT',
|
||||
'_ru': 'ru',
|
||||
'_sk': 'sk',
|
||||
'_sv': 'sv-SE',
|
||||
'_tr': 'tr',
|
||||
'_uk': 'uk',
|
||||
'_vn': 'vi'}
|
||||
LANG_CODE_TO_R_LANG = {'_ar': 'RETRO_LANGUAGE_ARABIC',
|
||||
'_ast': 'RETRO_LANGUAGE_ASTURIAN',
|
||||
'_chs': 'RETRO_LANGUAGE_CHINESE_SIMPLIFIED',
|
||||
'_cht': 'RETRO_LANGUAGE_CHINESE_TRADITIONAL',
|
||||
'_cs': 'RETRO_LANGUAGE_CZECH',
|
||||
'_cy': 'RETRO_LANGUAGE_WELSH',
|
||||
'_da': 'RETRO_LANGUAGE_DANISH',
|
||||
'_de': 'RETRO_LANGUAGE_GERMAN',
|
||||
'_el': 'RETRO_LANGUAGE_GREEK',
|
||||
'_eo': 'RETRO_LANGUAGE_ESPERANTO',
|
||||
'_es': 'RETRO_LANGUAGE_SPANISH',
|
||||
'_fa': 'RETRO_LANGUAGE_PERSIAN',
|
||||
'_fi': 'RETRO_LANGUAGE_FINNISH',
|
||||
'_fr': 'RETRO_LANGUAGE_FRENCH',
|
||||
'_gl': 'RETRO_LANGUAGE_GALICIAN',
|
||||
'_he': 'RETRO_LANGUAGE_HEBREW',
|
||||
'_hu': 'RETRO_LANGUAGE_HUNGARIAN',
|
||||
'_id': 'RETRO_LANGUAGE_INDONESIAN',
|
||||
'_it': 'RETRO_LANGUAGE_ITALIAN',
|
||||
'_ja': 'RETRO_LANGUAGE_JAPANESE',
|
||||
'_ko': 'RETRO_LANGUAGE_KOREAN',
|
||||
'_nl': 'RETRO_LANGUAGE_DUTCH',
|
||||
'_pl': 'RETRO_LANGUAGE_POLISH',
|
||||
'_pt_br': 'RETRO_LANGUAGE_PORTUGUESE_BRAZIL',
|
||||
'_pt_pt': 'RETRO_LANGUAGE_PORTUGUESE_PORTUGAL',
|
||||
'_ru': 'RETRO_LANGUAGE_RUSSIAN',
|
||||
'_sk': 'RETRO_LANGUAGE_SLOVAK',
|
||||
'_sv': 'RETRO_LANGUAGE_SWEDISH',
|
||||
'_tr': 'RETRO_LANGUAGE_TURKISH',
|
||||
'_uk': 'RETRO_LANGUAGE_UKRAINIAN',
|
||||
'_us': 'RETRO_LANGUAGE_ENGLISH',
|
||||
'_vn': 'RETRO_LANGUAGE_VIETNAMESE'}
|
||||
|
||||
# these are handled by RetroArch directly - no need to include them in core translations
|
||||
ON_OFFS = {'"enabled"', '"disabled"', '"true"', '"false"', '"on"', '"off"'}
|
||||
|
||||
|
||||
def remove_special_chars(text: str, char_set=0) -> str:
|
||||
"""Removes special characters from a text.
|
||||
|
||||
:param text: String to be cleaned.
|
||||
:param char_set: 0 -> remove all ASCII special chars except for '_' & 'space';
|
||||
1 -> remove invalid chars from file names
|
||||
:return: Clean text.
|
||||
"""
|
||||
command_chars = [chr(unicode) for unicode in tuple(range(0, 32)) + (127,)]
|
||||
special_chars = ([chr(unicode) for unicode in tuple(range(33, 48)) + tuple(range(58, 65)) + tuple(range(91, 95))
|
||||
+ (96,) + tuple(range(123, 127))],
|
||||
('\\', '/', ':', '*', '?', '"', '<', '>', '|'))
|
||||
res = text
|
||||
for cm in command_chars:
|
||||
res = res.replace(cm, '_')
|
||||
for sp in special_chars[char_set]:
|
||||
res = res.replace(sp, '_')
|
||||
while res.startswith('_'):
|
||||
res = res[1:]
|
||||
while res.endswith('_'):
|
||||
res = res[:-1]
|
||||
return res
|
||||
|
||||
|
||||
def clean_file_name(file_name: str) -> str:
|
||||
"""Removes characters which might make file_name inappropriate for files on some OS.
|
||||
|
||||
:param file_name: File name to be cleaned.
|
||||
:return: The clean file name.
|
||||
"""
|
||||
file_name = remove_special_chars(file_name, 1)
|
||||
file_name = re.sub(r'__+', '_', file_name.replace(' ', '_'))
|
||||
return file_name
|
||||
|
||||
|
||||
def get_struct_type_name(decl: str) -> tuple:
|
||||
""" Returns relevant parts of the struct declaration:
|
||||
type, name of the struct and the language appendix, if present.
|
||||
:param decl: The struct declaration matched by cor.p_type_name.
|
||||
:return: Tuple, e.g.: ('retro_core_option_definition', 'option_defs_us', '_us')
|
||||
"""
|
||||
struct_match = cor.p_type_name.search(decl)
|
||||
if struct_match:
|
||||
if struct_match.group(3):
|
||||
struct_type_name = struct_match.group(1, 2, 3)
|
||||
return struct_type_name
|
||||
elif struct_match.group(4):
|
||||
struct_type_name = struct_match.group(1, 2, 4)
|
||||
return struct_type_name
|
||||
else:
|
||||
struct_type_name = struct_match.group(1, 2)
|
||||
return struct_type_name
|
||||
else:
|
||||
raise ValueError(f'No or incomplete struct declaration: {decl}!\n'
|
||||
'Please make sure all structs are complete, including the type and name declaration.')
|
||||
|
||||
|
||||
def is_viable_non_dupe(text: str, comparison) -> bool:
|
||||
"""text must be longer than 2 ('""'), not 'NULL' and not in comparison.
|
||||
|
||||
:param text: String to be tested.
|
||||
:param comparison: Dictionary or set to search for text in.
|
||||
:return: bool
|
||||
"""
|
||||
return 2 < len(text) and text != 'NULL' and text not in comparison
|
||||
|
||||
|
||||
def is_viable_value(text: str) -> bool:
|
||||
"""text must be longer than 2 ('""'), not 'NULL' and text.lower() not in
|
||||
{'"enabled"', '"disabled"', '"true"', '"false"', '"on"', '"off"'}.
|
||||
|
||||
:param text: String to be tested.
|
||||
:return: bool
|
||||
"""
|
||||
return 2 < len(text) and text != 'NULL' and text.lower() not in ON_OFFS
|
||||
|
||||
|
||||
def create_non_dupe(base_name: str, opt_num: int, comparison) -> str:
|
||||
"""Makes sure base_name is not in comparison, and if it is it's renamed.
|
||||
|
||||
:param base_name: Name to check/make unique.
|
||||
:param opt_num: Number of the option base_name belongs to, used in making it unique.
|
||||
:param comparison: Dictionary or set to search for base_name in.
|
||||
:return: Unique name.
|
||||
"""
|
||||
h = base_name
|
||||
if h in comparison:
|
||||
n = 0
|
||||
h = h + '_O' + str(opt_num)
|
||||
h_end = len(h)
|
||||
while h in comparison:
|
||||
h = h[:h_end] + '_' + str(n)
|
||||
n += 1
|
||||
return h
|
||||
|
||||
|
||||
def get_texts(text: str) -> dict:
|
||||
"""Extracts the strings, which are to be translated/are the translations,
|
||||
from text and creates macro names for them.
|
||||
|
||||
:param text: The string to be parsed.
|
||||
:return: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
|
||||
"""
|
||||
# all structs: group(0) full struct, group(1) beginning, group(2) content
|
||||
structs = cor.p_struct.finditer(text)
|
||||
hash_n_string = {}
|
||||
just_string = {}
|
||||
for struct in structs:
|
||||
struct_declaration = struct.group(1)
|
||||
struct_type_name = get_struct_type_name(struct_declaration)
|
||||
if 3 > len(struct_type_name):
|
||||
lang = '_us'
|
||||
else:
|
||||
lang = struct_type_name[2]
|
||||
if lang not in just_string:
|
||||
hash_n_string[lang] = {}
|
||||
just_string[lang] = set()
|
||||
|
||||
is_v2 = False
|
||||
pre_name = ''
|
||||
p = cor.p_info
|
||||
if 'retro_core_option_v2_definition' == struct_type_name[0]:
|
||||
is_v2 = True
|
||||
elif 'retro_core_option_v2_category' == struct_type_name[0]:
|
||||
pre_name = 'CATEGORY_'
|
||||
p = cor.p_info_cat
|
||||
|
||||
struct_content = struct.group(2)
|
||||
# 0: full option; 1: key; 2: description; 3: additional info; 4: key/value pairs
|
||||
struct_options = cor.p_option.finditer(struct_content)
|
||||
for opt, option in enumerate(struct_options):
|
||||
# group 1: key
|
||||
if option.group(1):
|
||||
opt_name = pre_name + option.group(1)
|
||||
# no special chars allowed in key
|
||||
opt_name = remove_special_chars(opt_name).upper().replace(' ', '_')
|
||||
else:
|
||||
raise ValueError(f'No option name (key) found in struct {struct_type_name[1]} option {opt}!')
|
||||
|
||||
# group 2: description0
|
||||
if option.group(2):
|
||||
desc0 = option.group(2)
|
||||
if is_viable_non_dupe(desc0, just_string[lang]):
|
||||
just_string[lang].add(desc0)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL'), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = desc0
|
||||
else:
|
||||
raise ValueError(f'No label found in struct {struct_type_name[1]} option {option.group(1)}!')
|
||||
|
||||
# group 3: desc1, info0, info1, category
|
||||
if option.group(3):
|
||||
infos = option.group(3)
|
||||
option_info = p.finditer(infos)
|
||||
if is_v2:
|
||||
desc1 = next(option_info).group(1)
|
||||
if is_viable_non_dupe(desc1, just_string[lang]):
|
||||
just_string[lang].add(desc1)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_LABEL_CAT'), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = desc1
|
||||
last = None
|
||||
m_h = None
|
||||
for j, info in enumerate(option_info):
|
||||
last = info.group(1)
|
||||
if is_viable_non_dupe(last, just_string[lang]):
|
||||
just_string[lang].add(last)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
|
||||
hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = last
|
||||
if last in just_string[lang]: # category key should not be translated
|
||||
hash_n_string[lang].pop(m_h)
|
||||
just_string[lang].remove(last)
|
||||
else:
|
||||
for j, info in enumerate(option_info):
|
||||
gr1 = info.group(1)
|
||||
if is_viable_non_dupe(gr1, just_string[lang]):
|
||||
just_string[lang].add(gr1)
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f'{opt_name}_INFO_{j}'), opt,
|
||||
hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = gr1
|
||||
else:
|
||||
raise ValueError(f'Too few arguments in struct {struct_type_name[1]} option {option.group(1)}!')
|
||||
|
||||
# group 4:
|
||||
if option.group(4):
|
||||
for j, kv_set in enumerate(cor.p_key_value.finditer(option.group(4))):
|
||||
set_key, set_value = kv_set.group(1, 2)
|
||||
if not is_viable_value(set_value):
|
||||
if not is_viable_value(set_key):
|
||||
continue
|
||||
set_value = set_key
|
||||
# re.fullmatch(r'(?:[+-][0-9]+)+', value[1:-1])
|
||||
if set_value not in just_string[lang] and not re.sub(r'[+-]', '', set_value[1:-1]).isdigit():
|
||||
clean_key = set_key.encode('ascii', errors='ignore').decode('unicode-escape')[1:-1]
|
||||
clean_key = remove_special_chars(clean_key).upper().replace(' ', '_')
|
||||
m_h = create_non_dupe(re.sub(r'__+', '_', f"OPTION_VAL_{clean_key}"), opt, hash_n_string[lang])
|
||||
hash_n_string[lang][m_h] = set_value
|
||||
just_string[lang].add(set_value)
|
||||
return hash_n_string
|
||||
|
||||
|
||||
def create_msg_hash(intl_dir_path: str, core_name: str, keyword_string_dict: dict) -> dict:
|
||||
"""Creates '<core_name>.h' files in 'intl/_<lang>/' containing the macro name & string combinations.
|
||||
|
||||
:param intl_dir_path: Path to the intl directory.
|
||||
:param core_name: Name of the core, used for naming the files.
|
||||
:param keyword_string_dict: Dictionary of the form { '_<lang>': { 'macro': 'string', ... }, ... }.
|
||||
:return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
|
||||
"""
|
||||
files = {}
|
||||
for localisation in keyword_string_dict:
|
||||
path = os.path.join(intl_dir_path, localisation) # intl/_<lang>
|
||||
files[localisation] = os.path.join(path, core_name + '.h') # intl/_<lang>/<core_name>.h
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
with open(files[localisation], 'w', encoding='utf-8') as crowdin_file:
|
||||
out_text = ''
|
||||
for keyword in keyword_string_dict[localisation]:
|
||||
out_text = f'{out_text}{keyword} {keyword_string_dict[localisation][keyword]}\n'
|
||||
crowdin_file.write(out_text)
|
||||
return files
|
||||
|
||||
|
||||
def h2json(file_paths: dict) -> dict:
|
||||
"""Converts .h files pointed to by file_paths into .jsons.
|
||||
|
||||
:param file_paths: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.h)', ... }.
|
||||
:return: Dictionary of the form { '_<lang>': 'path/to/file (./intl/_<lang>/<core_name>.json)', ... }.
|
||||
"""
|
||||
jsons = {}
|
||||
for file_lang in file_paths:
|
||||
jsons[file_lang] = file_paths[file_lang][:-2] + '.json'
|
||||
|
||||
p = cor.p_masked
|
||||
|
||||
with open(file_paths[file_lang], 'r+', encoding='utf-8') as h_file:
|
||||
text = h_file.read()
|
||||
result = p.finditer(text)
|
||||
messages = {}
|
||||
for msg in result:
|
||||
key, val = msg.group(1, 2)
|
||||
if key not in messages:
|
||||
if key and val:
|
||||
# unescape & remove "\n"
|
||||
messages[key] = re.sub(r'"\s*(?:(?:/\*(?:.|[\r\n])*?\*/|//.*[\r\n]+)\s*)*"',
|
||||
'\\\n', val[1:-1].replace('\\\"', '"'))
|
||||
else:
|
||||
print(f"DUPLICATE KEY in {file_paths[file_lang]}: {key}")
|
||||
with open(jsons[file_lang], 'w', encoding='utf-8') as json_file:
|
||||
json.dump(messages, json_file, indent=2)
|
||||
|
||||
return jsons
|
||||
|
||||
|
||||
def json2h(intl_dir_path: str, json_file_path: str, core_name: str) -> None:
|
||||
"""Converts .json file in json_file_path into an .h ready to be included in C code.
|
||||
|
||||
:param intl_dir_path: Path to the intl directory.
|
||||
:param json_file_path: Base path of translation .json.
|
||||
:param core_name: Name of the core, required for naming the files.
|
||||
:return: None
|
||||
"""
|
||||
h_filename = os.path.join(json_file_path, core_name + '.h')
|
||||
json_filename = os.path.join(json_file_path, core_name + '.json')
|
||||
file_lang = os.path.basename(json_file_path).upper()
|
||||
|
||||
if os.path.basename(json_file_path).lower() == '_us':
|
||||
print(' skipped')
|
||||
return
|
||||
|
||||
p = cor.p_masked
|
||||
|
||||
def update(s_messages, s_template, s_source_messages):
|
||||
translation = ''
|
||||
template_messages = p.finditer(s_template)
|
||||
for tp_msg in template_messages:
|
||||
old_key = tp_msg.group(1)
|
||||
if old_key in s_messages and s_messages[old_key] != s_source_messages[old_key]:
|
||||
tl_msg_val = s_messages[old_key]
|
||||
tl_msg_val = tl_msg_val.replace('"', '\\\"').replace('\n', '') # escape
|
||||
translation = ''.join((translation, '#define ', old_key, file_lang, f' "{tl_msg_val}"\n'))
|
||||
|
||||
else: # Remove English duplicates and non-translatable strings
|
||||
translation = ''.join((translation, '#define ', old_key, file_lang, ' NULL\n'))
|
||||
return translation
|
||||
|
||||
with open(os.path.join(intl_dir_path, '_us', core_name + '.h'), 'r', encoding='utf-8') as template_file:
|
||||
template = template_file.read()
|
||||
with open(os.path.join(intl_dir_path, '_us', core_name + '.json'), 'r+', encoding='utf-8') as source_json_file:
|
||||
source_messages = json.load(source_json_file)
|
||||
with open(json_filename, 'r+', encoding='utf-8') as json_file:
|
||||
messages = json.load(json_file)
|
||||
new_translation = update(messages, template, source_messages)
|
||||
with open(h_filename, 'w', encoding='utf-8') as h_file:
|
||||
h_file.seek(0)
|
||||
h_file.write(new_translation)
|
||||
h_file.truncate()
|
||||
return
|
||||
|
||||
|
||||
def get_crowdin_client(dir_path: str) -> str:
|
||||
"""Makes sure the Crowdin CLI client is present. If it isn't, it is fetched & extracted.
|
||||
|
||||
:return: The path to 'crowdin-cli.jar'.
|
||||
"""
|
||||
jar_name = 'crowdin-cli.jar'
|
||||
jar_path = os.path.join(dir_path, jar_name)
|
||||
|
||||
if not os.path.isfile(jar_path):
|
||||
print('Downloading crowdin-cli.jar')
|
||||
crowdin_cli_file = os.path.join(dir_path, 'crowdin-cli.zip')
|
||||
crowdin_cli_url = 'https://downloads.crowdin.com/cli/v3/crowdin-cli.zip'
|
||||
req.urlretrieve(crowdin_cli_url, crowdin_cli_file)
|
||||
import zipfile
|
||||
with zipfile.ZipFile(crowdin_cli_file, 'r') as zip_ref:
|
||||
jar_dir = zip_ref.namelist()[0]
|
||||
for file in zip_ref.namelist():
|
||||
if file.endswith(jar_name):
|
||||
jar_file = file
|
||||
break
|
||||
zip_ref.extract(jar_file)
|
||||
os.rename(jar_file, jar_path)
|
||||
os.remove(crowdin_cli_file)
|
||||
shutil.rmtree(jar_dir)
|
||||
return jar_path
|
||||
|
||||
|
||||
def create_intl_file(intl_file_path: str, intl_dir_path: str, text: str, core_name: str, file_path: str) -> None:
|
||||
"""Creates 'libretro_core_options_intl.h' from Crowdin translations.
|
||||
|
||||
:param intl_file_path: Path to 'libretro_core_options_intl.h'
|
||||
:param intl_dir_path: Path to the intl directory.
|
||||
:param text: Content of the 'libretro_core_options.h' being translated.
|
||||
:param core_name: Name of the core. Needed to identify the files to pull the translations from.
|
||||
:param file_path: Path to the '<core name>_us.h' file, containing the original English texts.
|
||||
:return: None
|
||||
"""
|
||||
msg_dict = {}
|
||||
lang_up = ''
|
||||
|
||||
def replace_pair(pair_match):
|
||||
"""Replaces a key-value-pair of an option with the macros corresponding to the language.
|
||||
|
||||
:param pair_match: The re match object representing the key-value-pair block.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
offset = pair_match.start(0)
|
||||
if pair_match.group(1): # key
|
||||
if pair_match.group(2) in msg_dict: # value
|
||||
val = msg_dict[pair_match.group(2)] + lang_up
|
||||
elif pair_match.group(1) in msg_dict: # use key if value not viable (e.g. NULL)
|
||||
val = msg_dict[pair_match.group(1)] + lang_up
|
||||
else:
|
||||
return pair_match.group(0)
|
||||
else:
|
||||
return pair_match.group(0)
|
||||
res = pair_match.group(0)[:pair_match.start(2) - offset] + val \
|
||||
+ pair_match.group(0)[pair_match.end(2) - offset:]
|
||||
return res
|
||||
|
||||
def replace_info(info_match):
|
||||
"""Replaces the 'additional strings' of an option with the macros corresponding to the language.
|
||||
|
||||
:param info_match: The re match object representing the 'additional strings' block.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
offset = info_match.start(0)
|
||||
if info_match.group(1) in msg_dict:
|
||||
res = info_match.group(0)[:info_match.start(1) - offset] + \
|
||||
msg_dict[info_match.group(1)] + lang_up + \
|
||||
info_match.group(0)[info_match.end(1) - offset:]
|
||||
return res
|
||||
else:
|
||||
return info_match.group(0)
|
||||
|
||||
def replace_option(option_match):
|
||||
"""Replaces strings within an option
|
||||
'{ "opt_key", "label", "additional strings", ..., { {"key", "value"}, ... }, ... }'
|
||||
within a struct with the macros corresponding to the language:
|
||||
'{ "opt_key", MACRO_LABEL, MACRO_STRINGS, ..., { {"key", MACRO_VALUE}, ... }, ... }'
|
||||
|
||||
:param option_match: The re match object representing the option.
|
||||
:return: Replacement string.
|
||||
"""
|
||||
# label
|
||||
offset = option_match.start(0)
|
||||
if option_match.group(2):
|
||||
res = option_match.group(0)[:option_match.start(2) - offset] + msg_dict[option_match.group(2)] + lang_up
|
||||
else:
|
||||
return option_match.group(0)
|
||||
# additional block
|
||||
if option_match.group(3):
|
||||
res = res + option_match.group(0)[option_match.end(2) - offset:option_match.start(3) - offset]
|
||||
new_info = p.sub(replace_info, option_match.group(3))
|
||||
res = res + new_info
|
||||
else:
|
||||
return res + option_match.group(0)[option_match.end(2) - offset:]
|
||||
# key-value-pairs
|
||||
if option_match.group(4):
|
||||
res = res + option_match.group(0)[option_match.end(3) - offset:option_match.start(4) - offset]
|
||||
new_pairs = cor.p_key_value.sub(replace_pair, option_match.group(4))
|
||||
res = res + new_pairs + option_match.group(0)[option_match.end(4) - offset:]
|
||||
else:
|
||||
res = res + option_match.group(0)[option_match.end(3) - offset:]
|
||||
|
||||
return res
|
||||
|
||||
with open(file_path, 'r+', encoding='utf-8') as template: # intl/_us/<core_name>.h
|
||||
masked_msgs = cor.p_masked.finditer(template.read())
|
||||
for msg in masked_msgs:
|
||||
msg_dict[msg.group(2)] = msg.group(1)
|
||||
|
||||
with open(intl_file_path, 'r', encoding='utf-8') as intl: # libretro_core_options_intl.h
|
||||
in_text = intl.read()
|
||||
intl_start = re.search(re.escape('/*\n'
|
||||
' ********************************\n'
|
||||
' * Core Option Definitions\n'
|
||||
' ********************************\n'
|
||||
'*/\n'), in_text)
|
||||
if intl_start:
|
||||
out_txt = in_text[:intl_start.end(0)]
|
||||
else:
|
||||
intl_start = re.search(re.escape('#ifdef __cplusplus\n'
|
||||
'extern "C" {\n'
|
||||
'#endif\n'), in_text)
|
||||
out_txt = in_text[:intl_start.end(0)]
|
||||
|
||||
for folder in os.listdir(intl_dir_path): # intl/_*
|
||||
if os.path.isdir(os.path.join(intl_dir_path, folder)) and folder.startswith('_')\
|
||||
and folder != '_us' and folder != '__pycache__':
|
||||
translation_path = os.path.join(intl_dir_path, folder, core_name + '.h') # <core_name>_<lang>.h
|
||||
# all structs: group(0) full struct, group(1) beginning, group(2) content
|
||||
struct_groups = cor.p_struct.finditer(text)
|
||||
lang_up = folder.upper()
|
||||
lang_low = folder.lower()
|
||||
out_txt = out_txt + f'/* {LANG_CODE_TO_R_LANG[lang_low]} */\n\n' # /* RETRO_LANGUAGE_NAME */
|
||||
with open(translation_path, 'r+', encoding='utf-8') as f_in: # <core name>.h
|
||||
out_txt = out_txt + f_in.read() + '\n'
|
||||
for construct in struct_groups:
|
||||
declaration = construct.group(1)
|
||||
struct_type_name = get_struct_type_name(declaration)
|
||||
if 3 > len(struct_type_name): # no language specifier
|
||||
new_decl = re.sub(re.escape(struct_type_name[1]), struct_type_name[1] + lang_low, declaration)
|
||||
else:
|
||||
new_decl = re.sub(re.escape(struct_type_name[2]), lang_low, declaration)
|
||||
if '_us' != struct_type_name[2]:
|
||||
continue
|
||||
|
||||
p = cor.p_info
|
||||
if 'retro_core_option_v2_category' == struct_type_name[0]:
|
||||
p = cor.p_info_cat
|
||||
offset_construct = construct.start(0)
|
||||
start = construct.end(1) - offset_construct
|
||||
end = construct.start(2) - offset_construct
|
||||
out_txt = out_txt + new_decl + construct.group(0)[start:end]
|
||||
|
||||
content = construct.group(2)
|
||||
new_content = cor.p_option.sub(replace_option, content)
|
||||
|
||||
start = construct.end(2) - offset_construct
|
||||
out_txt = out_txt + new_content + construct.group(0)[start:] + '\n'
|
||||
|
||||
if 'retro_core_option_v2_definition' == struct_type_name[0]:
|
||||
out_txt = out_txt + f'struct retro_core_options_v2 options{lang_low}' \
|
||||
' = {\n' \
|
||||
f' option_cats{lang_low},\n' \
|
||||
f' option_defs{lang_low}\n' \
|
||||
'};\n\n'
|
||||
# shutil.rmtree(JOINER.join((intl_dir_path, folder)))
|
||||
|
||||
with open(intl_file_path, 'w', encoding='utf-8') as intl:
|
||||
intl.write(out_txt + '\n#ifdef __cplusplus\n'
|
||||
'}\n#endif\n'
|
||||
'\n#endif')
|
||||
return
|
||||
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
#
|
||||
try:
|
||||
if os.path.isfile(sys.argv[1]):
|
||||
_temp = os.path.dirname(sys.argv[1])
|
||||
else:
|
||||
_temp = sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
H_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
INTL_FILE_PATH = os.path.join(TARGET_DIR_PATH, 'libretro_core_options_intl.h')
|
||||
|
||||
_core_name = 'core_options'
|
||||
try:
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = get_texts(_main_text)
|
||||
_files = create_msg_hash(DIR_PATH, _core_name, _hash_n_str)
|
||||
_source_jsons = h2json(_files)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
print('Getting texts from libretro_core_options_intl.h')
|
||||
with open(INTL_FILE_PATH, 'r+', encoding='utf-8') as _intl_file:
|
||||
_intl_text = _intl_file.read()
|
||||
_hash_n_str_intl = get_texts(_intl_text)
|
||||
_intl_files = create_msg_hash(DIR_PATH, _core_name, _hash_n_str_intl)
|
||||
_intl_jsons = h2json(_intl_files)
|
||||
|
||||
print('\nAll done!')
|
|
@ -1,43 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import core_opt_translation as t
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
if t.os.path.isfile(t.sys.argv[1]):
|
||||
_temp = t.os.path.dirname(t.sys.argv[1])
|
||||
else:
|
||||
_temp = t.sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = t.os.path.dirname(t.os.path.dirname(t.os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
H_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
INTL_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options_intl.h')
|
||||
|
||||
_core_name = 'core_options'
|
||||
_core_name = t.clean_file_name(_core_name)
|
||||
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = t.get_texts(_main_text)
|
||||
_files = t.create_msg_hash(DIR_PATH, _core_name, _hash_n_str)
|
||||
|
||||
print('Converting translations *.json to *.h:')
|
||||
for _folder in t.os.listdir(DIR_PATH):
|
||||
if t.os.path.isdir(t.os.path.join(DIR_PATH, _folder))\
|
||||
and _folder.startswith('_')\
|
||||
and _folder != '__pycache__':
|
||||
print(_folder)
|
||||
t.json2h(DIR_PATH, t.os.path.join(DIR_PATH, _folder), _core_name)
|
||||
|
||||
print('Constructing libretro_core_options_intl.h')
|
||||
t.create_intl_file(INTL_FILE_PATH, DIR_PATH, _main_text, _core_name, _files['_us'])
|
||||
|
||||
print('\nAll done!')
|
|
@ -1,34 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import core_opt_translation as t
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
_core_name = 'core_options'
|
||||
|
||||
try:
|
||||
if t.os.path.isfile(t.sys.argv[1]):
|
||||
_temp = t.os.path.dirname(t.sys.argv[1])
|
||||
else:
|
||||
_temp = t.sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
TARGET_DIR_PATH = _temp
|
||||
except IndexError:
|
||||
TARGET_DIR_PATH = t.os.path.dirname(t.os.path.dirname(t.os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + TARGET_DIR_PATH)
|
||||
|
||||
DIR_PATH = t.os.path.dirname(t.os.path.realpath(__file__))
|
||||
H_FILE_PATH = t.os.path.join(TARGET_DIR_PATH, 'libretro_core_options.h')
|
||||
|
||||
_core_name = t.clean_file_name(_core_name)
|
||||
|
||||
print('Getting texts from libretro_core_options.h')
|
||||
with open(H_FILE_PATH, 'r+', encoding='utf-8') as _h_file:
|
||||
_main_text = _h_file.read()
|
||||
_hash_n_str = t.get_texts(_main_text)
|
||||
_files = t.create_msg_hash(DIR_PATH, _core_name, _hash_n_str)
|
||||
|
||||
_source_jsons = t.h2json(_files)
|
||||
|
||||
print('\nAll done!')
|
|
@ -1,459 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""Core options v1 to v2 converter
|
||||
|
||||
Just run this script as follows, to convert 'libretro_core_options.h' & 'Libretro_coreoptions_intl.h' to v2:
|
||||
python3 "/path/to/v1_to_v2_converter.py" "/path/to/where/libretro_core_options.h & Libretro_coreoptions_intl.h/are"
|
||||
|
||||
The original files will be preserved as *.v1
|
||||
"""
|
||||
import core_option_regex as cor
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def create_v2_code_file(struct_text, file_name):
|
||||
def replace_option(option_match):
|
||||
_offset = option_match.start(0)
|
||||
|
||||
if option_match.group(3):
|
||||
res = option_match.group(0)[:option_match.end(2) - _offset] + ',\n NULL' + \
|
||||
option_match.group(0)[option_match.end(2) - _offset:option_match.end(3) - _offset] + \
|
||||
'NULL,\n NULL,\n ' + option_match.group(0)[option_match.end(3) - _offset:]
|
||||
else:
|
||||
return option_match.group(0)
|
||||
|
||||
return res
|
||||
|
||||
comment_v1 = '/*\n' \
|
||||
' ********************************\n' \
|
||||
' * VERSION: 1.3\n' \
|
||||
' ********************************\n' \
|
||||
' *\n' \
|
||||
' * - 1.3: Move translations to libretro_core_options_intl.h\n' \
|
||||
' * - libretro_core_options_intl.h includes BOM and utf-8\n' \
|
||||
' * fix for MSVC 2010-2013\n' \
|
||||
' * - Added HAVE_NO_LANGEXTRA flag to disable translations\n' \
|
||||
' * on platforms/compilers without BOM support\n' \
|
||||
' * - 1.2: Use core options v1 interface when\n' \
|
||||
' * RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION is >= 1\n' \
|
||||
' * (previously required RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION == 1)\n' \
|
||||
' * - 1.1: Support generation of core options v0 retro_core_option_value\n' \
|
||||
' * arrays containing options with a single value\n' \
|
||||
' * - 1.0: First commit\n' \
|
||||
'*/\n'
|
||||
|
||||
comment_v2 = '/*\n' \
|
||||
' ********************************\n' \
|
||||
' * VERSION: 2.0\n' \
|
||||
' ********************************\n' \
|
||||
' *\n' \
|
||||
' * - 2.0: Add support for core options v2 interface\n' \
|
||||
' * - 1.3: Move translations to libretro_core_options_intl.h\n' \
|
||||
' * - libretro_core_options_intl.h includes BOM and utf-8\n' \
|
||||
' * fix for MSVC 2010-2013\n' \
|
||||
' * - Added HAVE_NO_LANGEXTRA flag to disable translations\n' \
|
||||
' * on platforms/compilers without BOM support\n' \
|
||||
' * - 1.2: Use core options v1 interface when\n' \
|
||||
' * RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION is >= 1\n' \
|
||||
' * (previously required RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION == 1)\n' \
|
||||
' * - 1.1: Support generation of core options v0 retro_core_option_value\n' \
|
||||
' * arrays containing options with a single value\n' \
|
||||
' * - 1.0: First commit\n' \
|
||||
'*/\n'
|
||||
|
||||
p_intl = cor.p_intl
|
||||
p_set = cor.p_set
|
||||
new_set = 'static INLINE void libretro_set_core_options(retro_environment_t environ_cb,\n' \
|
||||
' bool *categories_supported)\n' \
|
||||
'{\n' \
|
||||
' unsigned version = 0;\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' unsigned language = 0;\n' \
|
||||
'#endif\n' \
|
||||
'\n' \
|
||||
' if (!environ_cb || !categories_supported)\n' \
|
||||
' return;\n' \
|
||||
'\n' \
|
||||
' *categories_supported = false;\n' \
|
||||
'\n' \
|
||||
' if (!environ_cb(RETRO_ENVIRONMENT_GET_CORE_OPTIONS_VERSION, &version))\n' \
|
||||
' version = 0;\n' \
|
||||
'\n' \
|
||||
' if (version >= 2)\n' \
|
||||
' {\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' struct retro_core_options_v2_intl core_options_intl;\n' \
|
||||
'\n' \
|
||||
' core_options_intl.us = &options_us;\n' \
|
||||
' core_options_intl.local = NULL;\n' \
|
||||
'\n' \
|
||||
' if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&\n' \
|
||||
' (language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH))\n' \
|
||||
' core_options_intl.local = options_intl[language];\n' \
|
||||
'\n' \
|
||||
' *categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2_INTL,\n' \
|
||||
' &core_options_intl);\n' \
|
||||
'#else\n' \
|
||||
' *categories_supported = environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_V2,\n' \
|
||||
' &options_us);\n' \
|
||||
'#endif\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' {\n' \
|
||||
' size_t i, j;\n' \
|
||||
' size_t option_index = 0;\n' \
|
||||
' size_t num_options = 0;\n' \
|
||||
' struct retro_core_option_definition\n' \
|
||||
' *option_v1_defs_us = NULL;\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' size_t num_options_intl = 0;\n' \
|
||||
' struct retro_core_option_v2_definition\n' \
|
||||
' *option_defs_intl = NULL;\n' \
|
||||
' struct retro_core_option_definition\n' \
|
||||
' *option_v1_defs_intl = NULL;\n' \
|
||||
' struct retro_core_options_intl\n' \
|
||||
' core_options_v1_intl;\n' \
|
||||
'#endif\n' \
|
||||
' struct retro_variable *variables = NULL;\n' \
|
||||
' char **values_buf = NULL;\n' \
|
||||
'\n' \
|
||||
' /* Determine total number of options */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (option_defs_us[num_options].key)\n' \
|
||||
' num_options++;\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' if (version >= 1)\n' \
|
||||
' {\n' \
|
||||
' /* Allocate US array */\n' \
|
||||
' option_v1_defs_us = (struct retro_core_option_definition *)\n' \
|
||||
' calloc(num_options + 1, sizeof(struct retro_core_option_definition));\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_us array */\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' struct retro_core_option_v2_definition *option_def_us = &option_defs_us[i];\n' \
|
||||
' struct retro_core_option_value *option_values = option_def_us->values;\n' \
|
||||
' struct retro_core_option_definition *option_v1_def_us = &option_v1_defs_us[i];\n' \
|
||||
' struct retro_core_option_value *option_v1_values = option_v1_def_us->values;\n' \
|
||||
'\n' \
|
||||
' option_v1_def_us->key = option_def_us->key;\n' \
|
||||
' option_v1_def_us->desc = option_def_us->desc;\n' \
|
||||
' option_v1_def_us->info = option_def_us->info;\n' \
|
||||
' option_v1_def_us->default_value = option_def_us->default_value;\n' \
|
||||
'\n' \
|
||||
' /* Values must be copied individually... */\n' \
|
||||
' while (option_values->value)\n' \
|
||||
' {\n' \
|
||||
' option_v1_values->value = option_values->value;\n' \
|
||||
' option_v1_values->label = option_values->label;\n' \
|
||||
'\n' \
|
||||
' option_values++;\n' \
|
||||
' option_v1_values++;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' if (environ_cb(RETRO_ENVIRONMENT_GET_LANGUAGE, &language) &&\n' \
|
||||
' (language < RETRO_LANGUAGE_LAST) && (language != RETRO_LANGUAGE_ENGLISH) &&\n' \
|
||||
' options_intl[language])\n' \
|
||||
' option_defs_intl = options_intl[language]->definitions;\n' \
|
||||
'\n' \
|
||||
' if (option_defs_intl)\n' \
|
||||
' {\n' \
|
||||
' /* Determine number of intl options */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (option_defs_intl[num_options_intl].key)\n' \
|
||||
' num_options_intl++;\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Allocate intl array */\n' \
|
||||
' option_v1_defs_intl = (struct retro_core_option_definition *)\n' \
|
||||
' calloc(num_options_intl + 1, sizeof(struct retro_core_option_definition));\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_intl array */\n' \
|
||||
' for (i = 0; i < num_options_intl; i++)\n' \
|
||||
' {\n' \
|
||||
' struct retro_core_option_v2_definition *option_def_intl = &option_defs_intl[i];\n' \
|
||||
' struct retro_core_option_value *option_values = option_def_intl->values;\n' \
|
||||
' struct retro_core_option_definition *option_v1_def_intl = &option_v1_defs_intl[i];\n' \
|
||||
' struct retro_core_option_value *option_v1_values = option_v1_def_intl->values;\n' \
|
||||
'\n' \
|
||||
' option_v1_def_intl->key = option_def_intl->key;\n' \
|
||||
' option_v1_def_intl->desc = option_def_intl->desc;\n' \
|
||||
' option_v1_def_intl->info = option_def_intl->info;\n' \
|
||||
' option_v1_def_intl->default_value = option_def_intl->default_value;\n' \
|
||||
'\n' \
|
||||
' /* Values must be copied individually... */\n' \
|
||||
' while (option_values->value)\n' \
|
||||
' {\n' \
|
||||
' option_v1_values->value = option_values->value;\n' \
|
||||
' option_v1_values->label = option_values->label;\n' \
|
||||
'\n' \
|
||||
' option_values++;\n' \
|
||||
' option_v1_values++;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' core_options_v1_intl.us = option_v1_defs_us;\n' \
|
||||
' core_options_v1_intl.local = option_v1_defs_intl;\n' \
|
||||
'\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS_INTL, &core_options_v1_intl);\n' \
|
||||
'#else\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_CORE_OPTIONS, option_v1_defs_us);\n' \
|
||||
'#endif\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' {\n' \
|
||||
' /* Allocate arrays */\n' \
|
||||
' variables = (struct retro_variable *)calloc(num_options + 1,\n' \
|
||||
' sizeof(struct retro_variable));\n' \
|
||||
' values_buf = (char **)calloc(num_options, sizeof(char *));\n' \
|
||||
'\n' \
|
||||
' if (!variables || !values_buf)\n' \
|
||||
' goto error;\n' \
|
||||
'\n' \
|
||||
' /* Copy parameters from option_defs_us array */\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' const char *key = option_defs_us[i].key;\n' \
|
||||
' const char *desc = option_defs_us[i].desc;\n' \
|
||||
' const char *default_value = option_defs_us[i].default_value;\n' \
|
||||
' struct retro_core_option_value *values = option_defs_us[i].values;\n' \
|
||||
' size_t buf_len = 3;\n' \
|
||||
' size_t default_index = 0;\n' \
|
||||
'\n' \
|
||||
' values_buf[i] = NULL;\n' \
|
||||
'\n' \
|
||||
' if (desc)\n' \
|
||||
' {\n' \
|
||||
' size_t num_values = 0;\n' \
|
||||
'\n' \
|
||||
' /* Determine number of values */\n' \
|
||||
' while (true)\n' \
|
||||
' {\n' \
|
||||
' if (values[num_values].value)\n' \
|
||||
' {\n' \
|
||||
' /* Check if this is the default value */\n' \
|
||||
' if (default_value)\n' \
|
||||
' if (strcmp(values[num_values].value, default_value) == 0)\n' \
|
||||
' default_index = num_values;\n' \
|
||||
'\n' \
|
||||
' buf_len += strlen(values[num_values].value);\n' \
|
||||
' num_values++;\n' \
|
||||
' }\n' \
|
||||
' else\n' \
|
||||
' break;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Build values string */\n' \
|
||||
' if (num_values > 0)\n' \
|
||||
' {\n' \
|
||||
' buf_len += num_values - 1;\n' \
|
||||
' buf_len += strlen(desc);\n' \
|
||||
'\n' \
|
||||
' values_buf[i] = (char *)calloc(buf_len, sizeof(char));\n' \
|
||||
' if (!values_buf[i])\n' \
|
||||
' goto error;\n' \
|
||||
'\n' \
|
||||
' strcpy(values_buf[i], desc);\n' \
|
||||
' strcat(values_buf[i], "; ");\n' \
|
||||
'\n' \
|
||||
' /* Default value goes first */\n' \
|
||||
' strcat(values_buf[i], values[default_index].value);\n' \
|
||||
'\n' \
|
||||
' /* Add remaining values */\n' \
|
||||
' for (j = 0; j < num_values; j++)\n' \
|
||||
' {\n' \
|
||||
' if (j != default_index)\n' \
|
||||
' {\n' \
|
||||
' strcat(values_buf[i], "|");\n' \
|
||||
' strcat(values_buf[i], values[j].value);\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' variables[option_index].key = key;\n' \
|
||||
' variables[option_index].value = values_buf[i];\n' \
|
||||
' option_index++;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' /* Set variables */\n' \
|
||||
' environ_cb(RETRO_ENVIRONMENT_SET_VARIABLES, variables);\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'error:\n' \
|
||||
' /* Clean up */\n' \
|
||||
'\n' \
|
||||
' if (option_v1_defs_us)\n' \
|
||||
' {\n' \
|
||||
' free(option_v1_defs_us);\n' \
|
||||
' option_v1_defs_us = NULL;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
'#ifndef HAVE_NO_LANGEXTRA\n' \
|
||||
' if (option_v1_defs_intl)\n' \
|
||||
' {\n' \
|
||||
' free(option_v1_defs_intl);\n' \
|
||||
' option_v1_defs_intl = NULL;\n' \
|
||||
' }\n' \
|
||||
'#endif\n' \
|
||||
'\n' \
|
||||
' if (values_buf)\n' \
|
||||
' {\n' \
|
||||
' for (i = 0; i < num_options; i++)\n' \
|
||||
' {\n' \
|
||||
' if (values_buf[i])\n' \
|
||||
' {\n' \
|
||||
' free(values_buf[i]);\n' \
|
||||
' values_buf[i] = NULL;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' free(values_buf);\n' \
|
||||
' values_buf = NULL;\n' \
|
||||
' }\n' \
|
||||
'\n' \
|
||||
' if (variables)\n' \
|
||||
' {\n' \
|
||||
' free(variables);\n' \
|
||||
' variables = NULL;\n' \
|
||||
' }\n' \
|
||||
' }\n' \
|
||||
'}\n' \
|
||||
'\n' \
|
||||
'#ifdef __cplusplus\n' \
|
||||
'}\n' \
|
||||
'#endif'
|
||||
|
||||
struct_groups = cor.p_struct.finditer(struct_text)
|
||||
out_text = struct_text
|
||||
|
||||
for construct in struct_groups:
|
||||
repl_text = ''
|
||||
declaration = construct.group(1)
|
||||
struct_match = cor.p_type_name.search(declaration)
|
||||
if struct_match:
|
||||
if struct_match.group(3):
|
||||
struct_type_name_lang = struct_match.group(1, 2, 3)
|
||||
declaration_end = declaration[struct_match.end(1):]
|
||||
elif struct_match.group(4):
|
||||
struct_type_name_lang = struct_match.group(1, 2, 4)
|
||||
declaration_end = declaration[struct_match.end(1):]
|
||||
else:
|
||||
struct_type_name_lang = sum((struct_match.group(1, 2), ('_us',)), ())
|
||||
declaration_end = f'{declaration[struct_match.end(1):struct_match.end(2)]}_us' \
|
||||
f'{declaration[struct_match.end(2):]}'
|
||||
else:
|
||||
return -1
|
||||
|
||||
if 'retro_core_option_definition' == struct_type_name_lang[0]:
|
||||
import shutil
|
||||
shutil.copy(file_name, file_name + '.v1')
|
||||
new_declaration = f'\nstruct retro_core_option_v2_category option_cats{struct_type_name_lang[2]}[] = ' \
|
||||
'{\n { NULL, NULL, NULL },\n' \
|
||||
'};\n\n' \
|
||||
+ declaration[:struct_match.start(1)] + \
|
||||
'retro_core_option_v2_definition' \
|
||||
+ declaration_end
|
||||
offset = construct.start(0)
|
||||
repl_text = repl_text + cor.re.sub(cor.re.escape(declaration), new_declaration,
|
||||
construct.group(0)[:construct.start(2) - offset])
|
||||
content = construct.group(2)
|
||||
new_content = cor.p_option.sub(replace_option, content)
|
||||
|
||||
repl_text = repl_text + new_content + cor.re.sub(r'{\s*NULL,\s*NULL,\s*NULL,\s*{\{0}},\s*NULL\s*},\s*};',
|
||||
'{ NULL, NULL, NULL, NULL, NULL, NULL, {{0}}, NULL },\n};'
|
||||
'\n\nstruct retro_core_options_v2 options' +
|
||||
struct_type_name_lang[2] + ' = {\n'
|
||||
f' option_cats{struct_type_name_lang[2]},\n'
|
||||
f' option_defs{struct_type_name_lang[2]}\n'
|
||||
'};',
|
||||
construct.group(0)[construct.end(2) - offset:])
|
||||
out_text = cor.re.sub(cor.re.escape(construct.group(0)), repl_text, out_text)
|
||||
else:
|
||||
return -2
|
||||
with open(file_name, 'w', encoding='utf-8') as code_file:
|
||||
out_text = cor.re.sub(cor.re.escape(comment_v1), comment_v2, out_text)
|
||||
intl = p_intl.search(out_text)
|
||||
if intl:
|
||||
new_intl = out_text[:intl.start(1)] \
|
||||
+ 'struct retro_core_options_v2 *options_intl[RETRO_LANGUAGE_LAST]' \
|
||||
+ out_text[intl.end(1):intl.start(2)] \
|
||||
+ '&options_us, /* RETRO_LANGUAGE_ENGLISH */' \
|
||||
' &options_ja, /* RETRO_LANGUAGE_JAPANESE */' \
|
||||
' &options_fr, /* RETRO_LANGUAGE_FRENCH */' \
|
||||
' &options_es, /* RETRO_LANGUAGE_SPANISH */' \
|
||||
' &options_de, /* RETRO_LANGUAGE_GERMAN */' \
|
||||
' &options_it, /* RETRO_LANGUAGE_ITALIAN */' \
|
||||
' &options_nl, /* RETRO_LANGUAGE_DUTCH */' \
|
||||
' &options_pt_br, /* RETRO_LANGUAGE_PORTUGUESE_BRAZIL */' \
|
||||
' &options_pt_pt, /* RETRO_LANGUAGE_PORTUGUESE_PORTUGAL */' \
|
||||
' &options_ru, /* RETRO_LANGUAGE_RUSSIAN */' \
|
||||
' &options_ko, /* RETRO_LANGUAGE_KOREAN */' \
|
||||
' &options_cht, /* RETRO_LANGUAGE_CHINESE_TRADITIONAL */' \
|
||||
' &options_chs, /* RETRO_LANGUAGE_CHINESE_SIMPLIFIED */' \
|
||||
' &options_eo, /* RETRO_LANGUAGE_ESPERANTO */' \
|
||||
' &options_pl, /* RETRO_LANGUAGE_POLISH */' \
|
||||
' &options_vn, /* RETRO_LANGUAGE_VIETNAMESE */' \
|
||||
' &options_ar, /* RETRO_LANGUAGE_ARABIC */' \
|
||||
' &options_el, /* RETRO_LANGUAGE_GREEK */' \
|
||||
' &options_tr, /* RETRO_LANGUAGE_TURKISH */' \
|
||||
' &options_sv, /* RETRO_LANGUAGE_SLOVAK */' \
|
||||
' &options_fa, /* RETRO_LANGUAGE_PERSIAN */' \
|
||||
' &options_he, /* RETRO_LANGUAGE_HEBREW */' \
|
||||
' &options_ast, /* RETRO_LANGUAGE_ASTURIAN */' \
|
||||
' &options_fi, /* RETRO_LANGUAGE_FINNISH */' \
|
||||
+ out_text[intl.end(2):]
|
||||
out_text = p_set.sub(new_set, new_intl)
|
||||
else:
|
||||
out_text = p_set.sub(new_set, out_text)
|
||||
code_file.write(out_text)
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
# -------------------- MAIN -------------------- #
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
if os.path.isfile(sys.argv[1]):
|
||||
_temp = os.path.dirname(sys.argv[1])
|
||||
else:
|
||||
_temp = sys.argv[1]
|
||||
while _temp.endswith('/') or _temp.endswith('\\'):
|
||||
_temp = _temp[:-1]
|
||||
DIR_PATH = _temp
|
||||
except IndexError:
|
||||
DIR_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
print("No path provided, assuming parent directory:\n" + DIR_PATH)
|
||||
|
||||
H_FILE_PATH = os.path.join(DIR_PATH, 'libretro_core_options.h')
|
||||
INTL_FILE_PATH = os.path.join(DIR_PATH, 'libretro_core_options_intl.h')
|
||||
|
||||
for file in (H_FILE_PATH, INTL_FILE_PATH):
|
||||
if os.path.isfile(file):
|
||||
with open(file, 'r+', encoding='utf-8') as h_file:
|
||||
text = h_file.read()
|
||||
try:
|
||||
test = create_v2_code_file(text, file)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
test = -1
|
||||
if -1 > test:
|
||||
print('Your file looks like it already is v2? (' + file + ')')
|
||||
continue
|
||||
if 0 > test:
|
||||
print('An error occurred! Please make sure to use the complete v1 struct! (' + file + ')')
|
||||
continue
|
||||
else:
|
||||
print(file + ' not found.')
|
Loading…
Reference in New Issue