diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 016a33fb..679f7810 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -55,19 +55,55 @@ jobs: uv sync --no-group dev pipx install poethepoet - - name: Process po files + - name: Produce csv files run: | cd automation - poe process ../ + poe produce-csv ../ ../output - name: Upload csv artifact uses: actions/upload-artifact@v4 with: name: artifact_csv - path: translation_build + path: output + + produce-mo: + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v5 + + - name: Checkout and update the translation-backup submodule + run: git submodule update --init --remote + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: 3.12 + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: latest + + - name: Install library and dependencies + run: | + cd automation + uv sync --no-group dev + pipx install poethepoet + + - name: Produce mo files + run: | + cd automation + poe produce-mo ../ ../output + + - name: Upload mo artifact + uses: actions/upload-artifact@v4 + with: + name: artifact_mo + path: output push-changes: - needs: produce-csv + needs: [produce-csv, produce-mo] runs-on: ubuntu-latest timeout-minutes: 60 steps: diff --git a/automation/automation/models.py b/automation/automation/models.py index 62b98340..e1e95b5b 100644 --- a/automation/automation/models.py +++ b/automation/automation/models.py @@ -21,3 +21,4 @@ class Config(BaseModel): class Context(BaseModel): config: Config working_directory: Path + destintion_directory: Path diff --git a/automation/automation/process.py b/automation/automation/produce_csv_files.py similarity index 85% rename from automation/automation/process.py rename to automation/automation/produce_csv_files.py index d7453395..326fe6e0 100644 --- a/automation/automation/process.py +++ b/automation/automation/produce_csv_files.py @@ -15,21 +15,11 @@ from automation.load_config import load_config from automation.models import Context, LanguageInfo +from automation.utils import get_po_file_path alternative_encodings.register_all() -def get_po_file_path(*, working_directory: Path, project_name: str, resource_name: str, language_code: str) -> Path: - return ( - working_directory - / "translations-backup" - / "translations" - / project_name - / resource_name - / f"{language_code}.po" - ) - - def load_po_file(file_path: Path) -> list[tuple[str, str]]: with file_path.open(encoding="utf-8") as file: return simple_read_po(file) @@ -96,9 +86,8 @@ def process_objects( @logger.catch(reraise=True) -def process(language: LanguageInfo, context: Context) -> None: - translation_build_directory = context.working_directory / "translation_build" - csv_directory = translation_build_directory / "csv" / language.name +def process_language(language: LanguageInfo, context: Context) -> None: + csv_directory = context.destintion_directory / "csv" / language.name csv_directory.mkdir(parents=True, exist_ok=True) hardcoded_csv_file_path = csv_directory / "dfint_dictionary.csv" csv_hardcoded_data = process_hardcoded( @@ -111,7 +100,7 @@ def process(language: LanguageInfo, context: Context) -> None: exclude = {first for first, _ in csv_hardcoded_data} - csv_with_objects_directory = translation_build_directory / "csv_with_objects" / language.name + csv_with_objects_directory = context.destintion_directory / "csv_with_objects" / language.name csv_with_objects_directory.mkdir(parents=True, exist_ok=True) with_objects_csv_file_path = csv_with_objects_directory / "dfint_dictionary.csv" @@ -129,16 +118,16 @@ def process(language: LanguageInfo, context: Context) -> None: def process_all(context: Context) -> None: for language in context.config.languages: - process(language, context) + process_language(language, context) app = typer.Typer() @app.command() -def main(working_directory: Path) -> None: +def main(working_directory: Path, destination_directory: Path) -> None: config = load_config(working_directory / "config.yaml") - context = Context(config=config, working_directory=working_directory) + context = Context(config=config, working_directory=working_directory, destintion_directory=destination_directory) process_all(context) diff --git a/automation/automation/produce_mo_files.py b/automation/automation/produce_mo_files.py new file mode 100644 index 00000000..66861bad --- /dev/null +++ b/automation/automation/produce_mo_files.py @@ -0,0 +1,64 @@ + +from pathlib import Path + +import typer +from df_translation_toolkit.convert import objects_po_to_mo, text_set_po_to_mo +from df_translation_toolkit.validation.validation_models import Diagnostics +from loguru import logger + +from automation.load_config import load_config +from automation.models import Context, LanguageInfo +from automation.utils import get_po_file_path + +conversion_functions = { + "objects": objects_po_to_mo.convert, + "text_set": text_set_po_to_mo.convert, +} + + +def process_resource(mo_directory: Path, language: LanguageInfo, resource: str, context: Context) -> None: + po_file_path = get_po_file_path( + working_directory=context.working_directory, + project_name=context.config.source.project, + resource_name=resource, + language_code=language.code, + ) + mo_file_path = mo_directory / f"{resource}.mo" + diagnostics = Diagnostics() + with po_file_path.open("rt", encoding="utf-8") as po_file, mo_file_path.open("wb") as mo_file: + conversion_functions[resource](po_file, mo_file, diagnostics) + + errors_file_path = mo_directory / f"{resource}_errors.txt" + if errors_file_path.exists(): + errors_file_path.unlink() + + if diagnostics.contains_problems(): + with errors_file_path.open("w", encoding="utf-8") as errors_file: + errors_file.write(str(diagnostics)) + + +@logger.catch(reraise=True) +def process_language(language: LanguageInfo, context: Context) -> None: + mo_directory = context.destintion_directory / "mo" / language.name + mo_directory.mkdir(parents=True, exist_ok=True) + for resource in ("objects", "text_set"): + process_resource(mo_directory, language, resource, context) + + +def process_all(context: Context) -> None: + for language in context.config.languages: + process_language(language, context) + + +app = typer.Typer() + + +@app.command() +def main(working_directory: Path, destination_directory: Path) -> None: + config = load_config(working_directory / "config.yaml") + context = Context(config=config, working_directory=working_directory, destintion_directory=destination_directory) + process_all(context) + + +if __name__ == "__main__": + app() diff --git a/automation/automation/utils.py b/automation/automation/utils.py new file mode 100644 index 00000000..65a48f13 --- /dev/null +++ b/automation/automation/utils.py @@ -0,0 +1,12 @@ +from pathlib import Path + + +def get_po_file_path(*, working_directory: Path, project_name: str, resource_name: str, language_code: str) -> Path: + return ( + working_directory + / "translations-backup" + / "translations" + / project_name + / resource_name + / f"{language_code}.po" + ) diff --git a/automation/pyproject.toml b/automation/pyproject.toml index 4cfc20dc..5cd37396 100644 --- a/automation/pyproject.toml +++ b/automation/pyproject.toml @@ -10,7 +10,7 @@ dependencies = [ "loguru>=0.7.3,<0.8", "pydantic>=2,<3", "strictyaml>=1.7.3,<2", - "df-translation-toolkit>=0.11.2,<0.12", + "df-translation-toolkit>=0.12.0,<0.13", "alternative-encodings>=0.3.1,<0.4", "tqdm>=4.67.1,<5", ] @@ -41,4 +41,5 @@ ignore = [ ] [tool.poe.tasks] -process.script = "automation.process:app" +produce-csv.script = "automation.produce_csv_files:app" +produce-mo.script = "automation.produce_mo_files:app" diff --git a/automation/uv.lock b/automation/uv.lock index b7640039..fc3bf97f 100644 --- a/automation/uv.lock +++ b/automation/uv.lock @@ -45,7 +45,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "alternative-encodings", specifier = ">=0.3.1,<0.4", index = "https://dfint.github.io/pypi-index/" }, - { name = "df-translation-toolkit", specifier = ">=0.11.2,<0.12", index = "https://dfint.github.io/pypi-index/" }, + { name = "df-translation-toolkit", specifier = ">=0.12.0,<0.13", index = "https://dfint.github.io/pypi-index/" }, { name = "loguru", specifier = ">=0.7.3,<0.8" }, { name = "pydantic", specifier = ">=2,<3" }, { name = "strictyaml", specifier = ">=1.7.3,<2" }, @@ -203,7 +203,7 @@ wheels = [ [[package]] name = "df-translation-toolkit" -version = "0.11.2" +version = "0.12.0" source = { registry = "https://dfint.github.io/pypi-index/" } dependencies = [ { name = "babel" }, @@ -215,7 +215,7 @@ dependencies = [ { name = "unidecode" }, ] wheels = [ - { url = "https://github.com/dfint/df-translation-toolkit/releases/download/0.11.2/df_translation_toolkit-0.11.2-py3-none-any.whl" }, + { url = "https://github.com/dfint/df-translation-toolkit/releases/download/0.12.0/df_translation_toolkit-0.12.0-py3-none-any.whl" }, ] [[package]]