New data file translation infrastructure using Python
* replaced po4a and bash scripts with Python scripts using polib * cleaned up CMake filescoolant-mod
parent
85baaaf19d
commit
9c253a3510
|
@ -1,5 +1,11 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
find_package(PythonInterp 2.6)
|
||||
|
||||
if(NOT PYTHONINTERP_FOUND)
|
||||
message(WARNING "Python not found, help and level files will NOT be translated!")
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED COLOBOT_INSTALL_DATA_DIR)
|
||||
if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
set(COLOBOT_INSTALL_DATA_DIR ${CMAKE_INSTALL_PREFIX}/data CACHE PATH "Colobot shared data directory")
|
||||
|
|
|
@ -1,28 +1,34 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
include(../i18n-tools/CommonI18N.cmake)
|
||||
include(../i18n-tools/HelpI18N.cmake)
|
||||
include(../i18n-tools/I18NTools.cmake)
|
||||
|
||||
set(HELP_INSTALL_DATA_DIR ${COLOBOT_INSTALL_DATA_DIR}/help)
|
||||
|
||||
##
|
||||
# Add help category directory
|
||||
##
|
||||
function(add_help_category help_category_dir install_dest_dir)
|
||||
file(GLOB help_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${help_category_dir}/E/*.txt)
|
||||
list(SORT help_files)
|
||||
if(PO4A AND EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${help_category_dir}/po/)
|
||||
generate_help_i18n(translated_help_dirs
|
||||
"${help_files}"
|
||||
${help_category_dir}/po
|
||||
${DATA_BINARY_DIR}/help-po/${help_category_dir})
|
||||
else()
|
||||
set(translated_help_dirs "")
|
||||
function(add_help_category help_category_dir install_subdir)
|
||||
|
||||
message(STATUS "Adding translation targets for help/${help_category_dir}")
|
||||
|
||||
file(GLOB english_help_files ${help_category_dir}/E/*)
|
||||
install(FILES ${english_help_files} DESTINATION ${HELP_INSTALL_DATA_DIR}/E/${install_subdir})
|
||||
|
||||
if(PYTHONINTERP_FOUND)
|
||||
set(work_dir ${DATA_BINARY_DIR}/help-po/${help_category_dir})
|
||||
generate_translations(translated_help_files
|
||||
"help"
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${help_category_dir}
|
||||
${help_category_dir}/po
|
||||
${work_dir}
|
||||
"${install_subdir}")
|
||||
|
||||
install_preserving_relative_paths("${translated_help_files}"
|
||||
${work_dir}
|
||||
${HELP_INSTALL_DATA_DIR})
|
||||
endif()
|
||||
install(DIRECTORY ${help_category_dir}/E/ DESTINATION ${HELP_INSTALL_DATA_DIR}/E/${install_dest_dir})
|
||||
foreach(translated_help_dir ${translated_help_dirs})
|
||||
install(DIRECTORY ${DATA_BINARY_DIR}/help-po/${help_category_dir}/${translated_help_dir}/ DESTINATION ${HELP_INSTALL_DATA_DIR}/${translated_help_dir}/${install_dest_dir})
|
||||
endforeach()
|
||||
|
||||
endfunction()
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
__pycache__/
|
||||
*.pyc
|
|
@ -1,22 +0,0 @@
|
|||
##
|
||||
# Common function used in other I18N CMake modules
|
||||
##
|
||||
|
||||
##
|
||||
# Get language code from *.po file name e.g. "de.po" -> "de"
|
||||
##
|
||||
function(get_language_code result_language_code po_file)
|
||||
get_filename_component(po_file_name ${po_file} NAME)
|
||||
string(REPLACE ".po" "" language_code ${po_file_name})
|
||||
set(${result_language_code} ${language_code} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Get language char from *.po file name e.g. "de.po" -> "D"
|
||||
##
|
||||
function(get_language_char result_language_char po_file)
|
||||
get_filename_component(po_file_name ${po_file} NAME)
|
||||
string(REGEX REPLACE ".\\.po" "" language_char ${po_file_name})
|
||||
string(TOUPPER ${language_char} language_char)
|
||||
set(${result_language_char} ${language_char} PARENT_SCOPE)
|
||||
endfunction()
|
|
@ -1,83 +0,0 @@
|
|||
##
|
||||
# Meta-infrastructure to allow po-based translation of Colobot help files
|
||||
##
|
||||
|
||||
find_program(PO4A po4a)
|
||||
|
||||
if(NOT PO4A)
|
||||
message(WARNING "PO4A not found, help files will NOT be translated!")
|
||||
endif()
|
||||
|
||||
##
|
||||
# Generate translated help files in separate directories per language
|
||||
##
|
||||
function(generate_help_i18n
|
||||
result_generated_help_dirs # output variable to return names of directories with translated files
|
||||
source_help_files # input help files
|
||||
po_dir # directory with translations
|
||||
work_dir) # directory where to save generated files
|
||||
|
||||
# generated config file for po4a
|
||||
set(po4a_cfg_file ${work_dir}/help_po4a.cfg)
|
||||
|
||||
# get translations from po directory
|
||||
get_filename_component(abs_po_dir ${po_dir} ABSOLUTE)
|
||||
file(WRITE ${po4a_cfg_file} "[po_directory] ${abs_po_dir}\n")
|
||||
|
||||
# prepare output directories
|
||||
set(output_help_subdirs "")
|
||||
file(GLOB po_files ${po_dir}/*.po)
|
||||
foreach(po_file ${po_files})
|
||||
get_language_char(language_char ${po_file})
|
||||
#set(language_help_subdir ${work_dir}/${language_char})
|
||||
list(APPEND output_help_subdirs ${language_char})
|
||||
endforeach()
|
||||
|
||||
# add translation rules for help files
|
||||
foreach(source_help_file ${source_help_files})
|
||||
get_filename_component(abs_source_help_file ${source_help_file} ABSOLUTE)
|
||||
get_filename_component(help_file_name ${source_help_file} NAME)
|
||||
|
||||
file(APPEND ${po4a_cfg_file} "\n[type:colobothelp] ${abs_source_help_file}")
|
||||
foreach(po_file ${po_files})
|
||||
# generated file for single language
|
||||
get_language_code(language_code ${po_file})
|
||||
get_language_char(language_char ${po_file})
|
||||
set(generated_help_file ${work_dir}/${language_char}/${help_file_name})
|
||||
file(APPEND ${po4a_cfg_file} " \\\n ${language_code}:${generated_help_file}")
|
||||
endforeach()
|
||||
endforeach()
|
||||
|
||||
# dummy files to signal that scripts have finished running
|
||||
set(translation_signalfile ${work_dir}/translations)
|
||||
set(po_clean_signalfile ${work_dir}/po_clean)
|
||||
|
||||
# script to run po4a and generate translated files
|
||||
add_custom_command(OUTPUT ${translation_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/run_po4a.sh
|
||||
${po4a_cfg_file}
|
||||
${translation_signalfile}
|
||||
DEPENDS ${po_files})
|
||||
|
||||
file(GLOB pot_file ${po_dir}/*.pot)
|
||||
set(po_files ${po_files} ${pot_file})
|
||||
|
||||
# script to do some cleanups in updated *.po and *.pot files
|
||||
string(REPLACE ";" ":" escaped_po_files "${po_files}")
|
||||
add_custom_command(OUTPUT ${po_clean_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/clean_po_files.sh
|
||||
${escaped_po_files}
|
||||
${translation_signalfile}
|
||||
${po_clean_signalfile}
|
||||
DEPENDS ${translation_signalfile}
|
||||
)
|
||||
|
||||
# generate some unique string for target name
|
||||
string(REGEX REPLACE "[/\\]" "_" target_suffix ${po_dir})
|
||||
|
||||
# target to run both scripts
|
||||
add_custom_target(i18n_${target_suffix} ALL DEPENDS ${translation_signalfile} ${po_clean_signalfile})
|
||||
|
||||
# return the translated files
|
||||
set(${result_generated_help_dirs} ${output_help_subdirs} PARENT_SCOPE)
|
||||
endfunction()
|
|
@ -0,0 +1,85 @@
|
|||
##
|
||||
# Meta-infrastructure to allow po-based translation of Colobot help files
|
||||
##
|
||||
|
||||
##
|
||||
# Generate translated files with Python script
|
||||
##
|
||||
function(generate_translations
|
||||
result_output_files # output variable to return file names of translated files
|
||||
type # type of files to process
|
||||
working_dir # working directory for the commands to run
|
||||
input_dir # directory with source files
|
||||
po_dir # directory with translations
|
||||
output_dir # directory where to save generated files
|
||||
output_subdir) # optional installation subdirectory
|
||||
|
||||
if(output_subdir STREQUAL "")
|
||||
set(output_subdir_opt "")
|
||||
else()
|
||||
set(output_subdir_opt "--output_subdir")
|
||||
endif()
|
||||
|
||||
# first command is used to get list of input and output files when running CMake to
|
||||
# execute appropriate CMake install commands and set up dependencies properly
|
||||
execute_process(COMMAND ${PYTHON_EXECUTABLE}
|
||||
${DATA_SOURCE_DIR}/i18n-tools/scripts/process_translations.py
|
||||
--mode print_files
|
||||
--type ${type}
|
||||
--input_dir ${input_dir}
|
||||
--po_dir ${po_dir}
|
||||
--output_dir ${output_dir}
|
||||
${output_subdir_opt} ${output_subdir}
|
||||
WORKING_DIRECTORY ${working_dir}
|
||||
OUTPUT_VARIABLE files_list)
|
||||
|
||||
string(REGEX REPLACE "(.*)\n(.*)" "\\1" input_files "${files_list}")
|
||||
string(REGEX REPLACE "(.*)\n(.*)" "\\2" output_files "${files_list}")
|
||||
|
||||
# return the list of output files to parent
|
||||
set(${result_output_files} ${output_files} PARENT_SCOPE)
|
||||
|
||||
# dummy file to indicate success
|
||||
set(signal_file ${output_dir}/translation)
|
||||
|
||||
# po files are also dependency
|
||||
file(GLOB po_files ${po_dir}/*)
|
||||
|
||||
# actual command used to generate translations executed when building project
|
||||
add_custom_command(OUTPUT ${signal_file}
|
||||
COMMAND ${PYTHON_EXECUTABLE}
|
||||
${DATA_SOURCE_DIR}/i18n-tools/scripts/process_translations.py
|
||||
--mode generate
|
||||
--type ${type}
|
||||
--input_dir ${input_dir}
|
||||
--po_dir ${po_dir}
|
||||
--output_dir ${output_dir}
|
||||
${output_subdir_opt} ${output_subdir}
|
||||
--signal_file ${signal_file}
|
||||
WORKING_DIRECTORY ${working_dir}
|
||||
DEPENDS ${input_files} ${po_files})
|
||||
|
||||
# generate some unique string for target name
|
||||
string(REGEX REPLACE "[/\\]" "_" target_suffix ${po_dir})
|
||||
|
||||
# target to run the command
|
||||
add_custom_target(i18n_${target_suffix} ALL DEPENDS ${signal_file})
|
||||
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Convenience function to installing generated files while keeping
|
||||
# their relative paths in output directory
|
||||
##
|
||||
function(install_preserving_relative_paths
|
||||
output_files # list of output files
|
||||
output_dir # output directory
|
||||
destination_dir) # install destination directory
|
||||
|
||||
foreach(output_file ${output_files})
|
||||
file(RELATIVE_PATH rel_output_file ${output_dir} ${output_file})
|
||||
get_filename_component(rel_output_file_dir ${rel_output_file} DIRECTORY)
|
||||
install(FILES ${output_file} DESTINATION ${destination_dir}/${rel_output_file_dir})
|
||||
endforeach()
|
||||
|
||||
endfunction()
|
|
@ -1,193 +0,0 @@
|
|||
##
|
||||
# Meta-infrastructure to allow po-based translation of Colobot level files
|
||||
##
|
||||
|
||||
find_program(PO4A po4a)
|
||||
|
||||
if(NOT PO4A)
|
||||
message(WARNING "PO4A not found, level files will NOT be translated!")
|
||||
endif()
|
||||
|
||||
##
|
||||
# Generate translated chaptertitle files using po4a
|
||||
##
|
||||
function(generate_chaptertitles_i18n
|
||||
result_translated_chaptertitle_files # output variable to return names of translated chaptertitle files
|
||||
source_chaptertitle_prefix_dir # prefix directory for chaptertitle files
|
||||
source_chaptertitle_files # input chaptertitle files relative to prefix dir
|
||||
po_dir # directory with translations (*.po, *.pot files)
|
||||
work_dir) # directory where to save generated files
|
||||
|
||||
# generated dummy file for translation of "E", "D", "F", "P", etc. language letters
|
||||
# TODO find a better way to provide translations than this hack
|
||||
set(langchar_file ${work_dir}/chaptertitles_langchar.txt)
|
||||
file(WRITE ${langchar_file} "E")
|
||||
|
||||
# generated config file for po4a
|
||||
set(po4a_cfg_file ${work_dir}/chaptertitles_po4a.cfg)
|
||||
|
||||
# get translations from po directory
|
||||
get_filename_component(abs_po_dir ${po_dir} ABSOLUTE)
|
||||
file(WRITE ${po4a_cfg_file} "[po_directory] ${abs_po_dir}\n")
|
||||
|
||||
# add content of dummy language file to translation
|
||||
file(APPEND ${po4a_cfg_file} "[type:text] ${langchar_file}")
|
||||
|
||||
set(abs_source_chaptertitle_files "")
|
||||
set(translated_chaptertitle_files "")
|
||||
file(GLOB po_files ${po_dir}/*.po)
|
||||
|
||||
foreach(source_chaptertitle_file ${source_chaptertitle_files})
|
||||
get_filename_component(abs_source_chaptertitle_file ${source_chaptertitle_prefix_dir}/${source_chaptertitle_file} ABSOLUTE)
|
||||
set(output_chaptertitle_file ${work_dir}/${source_chaptertitle_file})
|
||||
|
||||
# translation rule for chaptertitle file
|
||||
file(APPEND ${po4a_cfg_file} "\n[type:colobotlevel] ${abs_source_chaptertitle_file}")
|
||||
|
||||
foreach(po_file ${po_files})
|
||||
# generated file for single language
|
||||
get_language_code(language_code ${po_file})
|
||||
set(generated_language_file ${output_chaptertitle_file}.${language_code})
|
||||
file(APPEND ${po4a_cfg_file} " \\\n ${language_code}:${generated_language_file}")
|
||||
endforeach()
|
||||
|
||||
list(APPEND abs_source_chaptertitle_files ${abs_source_chaptertitle_file})
|
||||
list(APPEND translated_chaptertitle_files ${output_chaptertitle_file})
|
||||
endforeach()
|
||||
|
||||
# dummy files to signal that scripts have finished running
|
||||
set(translation_signalfile ${work_dir}/translations)
|
||||
set(po_clean_signalfile ${work_dir}/po_clean)
|
||||
|
||||
# script to run po4a and consolidate the translations
|
||||
string(REPLACE ";" ":" escaped_abs_source_chaptertitle_files "${abs_source_chaptertitle_files}")
|
||||
string(REPLACE ";" ":" escaped_translated_chaptertitle_files "${translated_chaptertitle_files}")
|
||||
add_custom_command(OUTPUT ${translation_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/run_po4a.sh ${po4a_cfg_file}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/create_level_translations.sh
|
||||
${escaped_abs_source_chaptertitle_files}
|
||||
${escaped_translated_chaptertitle_files}
|
||||
${translation_signalfile}
|
||||
DEPENDS ${po_files})
|
||||
|
||||
file(GLOB pot_file ${po_dir}/*.pot)
|
||||
set(po_files ${po_files} ${pot_file})
|
||||
|
||||
# script to do some cleanups in updated *.po and *.pot files
|
||||
string(REPLACE ";" ":" escaped_po_files "${po_files}")
|
||||
add_custom_command(OUTPUT ${po_clean_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/clean_po_files.sh
|
||||
${escaped_po_files}
|
||||
${translation_signalfile}
|
||||
${po_clean_signalfile}
|
||||
DEPENDS ${translation_signalfile}
|
||||
)
|
||||
|
||||
# generate some unique string for target name
|
||||
string(REGEX REPLACE "[/\\]" "_" target_suffix ${po_dir})
|
||||
|
||||
# target to run both scripts
|
||||
add_custom_target(i18n_${target_suffix} ALL DEPENDS ${translation_signalfile} ${po_clean_signalfile})
|
||||
|
||||
# return the translated files
|
||||
set(${result_translated_chaptertitle_files} ${translated_chaptertitle_files} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Generate translated level and help files using po4a
|
||||
##
|
||||
function(generate_level_i18n
|
||||
result_translated_level_file # output variable to return names of translaed level files
|
||||
result_translated_help_files # output variable to return names of translated help files
|
||||
source_level_file # input scene.txt files
|
||||
source_help_files # input help files
|
||||
po_dir # directory with translations (*.po, *.pot files)
|
||||
work_dir) # directory where to save generated files
|
||||
|
||||
# generated dummy file for translation of "E", "D", "F", "P", etc. language letters
|
||||
# TODO find a better way to provide translations than this hack
|
||||
set(langchar_file ${work_dir}/scene_langchar.txt)
|
||||
file(WRITE ${langchar_file} "E")
|
||||
|
||||
# generated config file for po4a
|
||||
set(po4a_cfg_file ${work_dir}/scene_po4a.cfg)
|
||||
|
||||
# get translations from po directory
|
||||
get_filename_component(abs_po_dir ${po_dir} ABSOLUTE)
|
||||
file(WRITE ${po4a_cfg_file} "[po_directory] ${abs_po_dir}\n")
|
||||
|
||||
# add content of dummy language file to translation
|
||||
file(APPEND ${po4a_cfg_file} "[type:text] ${langchar_file}")
|
||||
|
||||
# translation rule for scene file
|
||||
get_filename_component(abs_source_level_file ${source_level_file} ABSOLUTE)
|
||||
file(APPEND ${po4a_cfg_file} "\n[type:colobotlevel] ${abs_source_level_file}")
|
||||
|
||||
get_filename_component(source_level_file_name ${source_level_file} NAME)
|
||||
set(output_level_file ${work_dir}/${source_level_file_name})
|
||||
|
||||
file(GLOB po_files ${po_dir}/*.po)
|
||||
foreach(po_file ${po_files})
|
||||
get_language_code(language_code ${po_file})
|
||||
# generated file for single language
|
||||
set(generated_language_file ${output_level_file}.${language_code})
|
||||
file(APPEND ${po4a_cfg_file} " \\\n ${language_code}:${generated_language_file}")
|
||||
endforeach()
|
||||
|
||||
# translation rules for help files
|
||||
set(output_help_dir ${work_dir}/help)
|
||||
set(translated_help_files "")
|
||||
|
||||
foreach(source_help_file ${source_help_files})
|
||||
get_filename_component(help_file_name ${source_help_file} NAME)
|
||||
|
||||
file(APPEND ${po4a_cfg_file} "\n[type:colobothelp] ${source_help_file}")
|
||||
foreach(po_file ${po_files})
|
||||
# generated file for single language
|
||||
get_language_code(language_code ${po_file})
|
||||
get_language_char(language_char ${po_file})
|
||||
string(REPLACE ".E." ".${language_char}." generated_help_file_name ${help_file_name})
|
||||
set(generated_help_file ${output_help_dir}/${generated_help_file_name})
|
||||
file(APPEND ${po4a_cfg_file} " \\\n ${language_code}:${generated_help_file}")
|
||||
|
||||
list(APPEND translated_help_files ${generated_help_file})
|
||||
endforeach()
|
||||
endforeach()
|
||||
|
||||
# dummy files to signal that scripts have finished running
|
||||
set(translation_signalfile ${work_dir}/translations)
|
||||
set(po_clean_signalfile ${work_dir}/po_clean)
|
||||
|
||||
# script to run po4a and consolidate the translations
|
||||
add_custom_command(OUTPUT ${translation_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/run_po4a.sh ${po4a_cfg_file}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/create_level_translations.sh
|
||||
${abs_source_level_file}
|
||||
${output_level_file}
|
||||
${translation_signalfile}
|
||||
DEPENDS ${po_files})
|
||||
|
||||
file(GLOB pot_file ${po_dir}/*.pot)
|
||||
set(po_files ${po_files} ${pot_file})
|
||||
|
||||
# script to do some cleanups in updated *.po and *.pot files
|
||||
string(REPLACE ";" ":" escaped_po_files "${po_files}")
|
||||
add_custom_command(OUTPUT ${po_clean_signalfile}
|
||||
COMMAND ${DATA_SOURCE_DIR}/i18n-tools/scripts/clean_po_files.sh
|
||||
${escaped_po_files}
|
||||
${translation_signalfile}
|
||||
${po_clean_signalfile}
|
||||
DEPENDS ${translation_signalfile}
|
||||
)
|
||||
|
||||
# generate some unique string for target name
|
||||
string(REGEX REPLACE "[/\\]" "_" target_suffix ${po_dir})
|
||||
|
||||
# target to run both scripts
|
||||
add_custom_target(i18n_${target_suffix} ALL DEPENDS ${translation_signalfile} ${po_clean_signalfile})
|
||||
|
||||
# return the translated files
|
||||
set(${result_translated_level_file} ${output_level_file} PARENT_SCOPE)
|
||||
set(${result_translated_help_files} ${translated_help_files} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
##
|
||||
# Script to do some cleaning up of merged/generated *.po and *.pot files
|
||||
#
|
||||
# It is basically a sed wrapper that does two things:
|
||||
# - remove information about absolute filenames which were used to generate translations
|
||||
# - remove modification date of file
|
||||
#
|
||||
# By doing these two things, it makes sure that *.po and *.pot files do not change
|
||||
# compared to versions stored in repository when building the project
|
||||
#
|
||||
# The arguments are a colon-separated list of *.po or *.pot files and
|
||||
# two dummy signal files used by build system that must be updated
|
||||
##
|
||||
|
||||
# stop on errors
|
||||
set -e
|
||||
|
||||
if [ $# -ne 3 ]; then
|
||||
echo "Invalid arguments!" >&2
|
||||
echo "Usage: $0 po_file1[:po_file2;...] translation_signalfile po_clean_signalfile" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PO_FILES="$1"
|
||||
TRANSLATION_SIGNALFILE="$2"
|
||||
PO_CLEAN_SIGNALFILE="$3"
|
||||
|
||||
IFS=':' read -a po_files_array <<< "$PO_FILES"
|
||||
|
||||
for po_file in "${po_files_array[@]}"; do
|
||||
# strip unnecessary part of file names
|
||||
sed -i -e 's|^#: .*data/\(.*\)$|#: \1|' "$po_file"
|
||||
# remove the creation date
|
||||
sed -i -e 's|^\("POT-Creation-Date:\).*$|\1 DATE\\n"|' "$po_file"
|
||||
done
|
||||
|
||||
# update the dummy signal files to indicate success
|
||||
# we also have to touch translation signalfile because it's supposed to be modified later than po files
|
||||
touch "$TRANSLATION_SIGNALFILE"
|
||||
touch "$PO_CLEAN_SIGNALFILE"
|
|
@ -0,0 +1,229 @@
|
|||
import errno
|
||||
import io
|
||||
import os
|
||||
import polib
|
||||
|
||||
"""
|
||||
Works like shell's "mkdir -p" and also behaves nicely if given None argument
|
||||
"""
|
||||
def nice_mkdir(path):
|
||||
if path is None:
|
||||
return
|
||||
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EEXIST and os.path.isdir(path):
|
||||
pass
|
||||
else: raise
|
||||
|
||||
"""
|
||||
Works as os.path.join, but behaves nicely if given None argument
|
||||
"""
|
||||
def nice_path_join(*paths):
|
||||
for path in paths:
|
||||
if path is None:
|
||||
return None
|
||||
|
||||
return os.path.join(*paths)
|
||||
|
||||
"""
|
||||
Wrapper class over POFile, acting as translation template file
|
||||
|
||||
It actually hold two POFile instances:
|
||||
previous_catalog is the content of PO file read from disk
|
||||
current_catalog is created empty and filled with entries from input files
|
||||
|
||||
Once all processing is done, the content of previous_catalog is merged with current_catalog
|
||||
and the result is saved to disk.
|
||||
"""
|
||||
class TemplateFile:
|
||||
def __init__(self, file_name):
|
||||
self.file_name = file_name
|
||||
self.dir_name = os.path.dirname(file_name)
|
||||
self.language = 'en'
|
||||
self.current_catalog = polib.POFile(wrapwidth = 0)
|
||||
if os.path.exists(file_name):
|
||||
self.previous_catalog = polib.pofile(file_name, wrapwidth = 0)
|
||||
else:
|
||||
self.previous_catalog = polib.POFile(wrapwidth = 0)
|
||||
|
||||
"""
|
||||
Wrapper over inserting template file entry
|
||||
If entry does not exist, it is created;
|
||||
otherwise it is modified to indicate multiple occurrences
|
||||
"""
|
||||
def insert_entry(self, text, occurrence, type_comment):
|
||||
entry = self.current_catalog.find(text)
|
||||
relative_file_name = os.path.relpath(occurrence.file_name, self.dir_name)
|
||||
occurrence = (relative_file_name, occurrence.line_number)
|
||||
if entry:
|
||||
entry.comment = self._merge_comment(entry.comment, type_comment)
|
||||
if occurrence not in entry.occurrences:
|
||||
entry.occurrences.append(occurrence)
|
||||
else:
|
||||
comment = 'type: ' + type_comment
|
||||
new_entry = polib.POEntry(msgid = text,
|
||||
comment = comment,
|
||||
occurrences = [occurrence],
|
||||
flags = ['no-wrap'])
|
||||
|
||||
self.current_catalog.append(new_entry)
|
||||
|
||||
def _merge_comment(self, previous_comment, type_comment):
|
||||
new_comment = previous_comment
|
||||
|
||||
previous_types = previous_comment.replace('type: ', '')
|
||||
previous_types_list = previous_types.split(', ')
|
||||
|
||||
if type_comment not in previous_types_list:
|
||||
new_comment += ', ' + type_comment
|
||||
|
||||
return new_comment
|
||||
|
||||
"""
|
||||
Merges previous_catalog with current_catalog and saved the result to disk
|
||||
"""
|
||||
def merge_and_save(self):
|
||||
self.previous_catalog.merge(self.current_catalog)
|
||||
self.previous_catalog.save(self.file_name)
|
||||
|
||||
"""
|
||||
Wrapper class over POFile, acting as language translation file
|
||||
"""
|
||||
class LanguageFile:
|
||||
def __init__(self, file_name):
|
||||
self.file_name = file_name
|
||||
# get language from file name e.g. "/foo/de.po" -> "de"
|
||||
(self.language, _) = os.path.splitext(os.path.basename(file_name))
|
||||
if os.path.exists(file_name):
|
||||
self.catalog = polib.pofile(file_name, wrapwidth = 0)
|
||||
else:
|
||||
self.catalog = polib.POFile(wrapwidth = 0)
|
||||
|
||||
"""
|
||||
Return single language character e.g. "de" -> "D"
|
||||
"""
|
||||
def language_char(self):
|
||||
return self.language[0].upper()
|
||||
|
||||
"""
|
||||
Try to translate given text; if not found among translations,
|
||||
return the original
|
||||
"""
|
||||
def translate(self, text):
|
||||
entry = self.catalog.find(text)
|
||||
if entry and entry.msgstr != '':
|
||||
return entry.msgstr
|
||||
return text
|
||||
|
||||
"""
|
||||
Merges entries with current_catalog from template file and saves the result to disk
|
||||
"""
|
||||
def merge_and_save(self, template_file):
|
||||
self.catalog.merge(template_file.current_catalog)
|
||||
self.catalog.save(self.file_name)
|
||||
|
||||
"""
|
||||
Locates the translation files in po_dir
|
||||
"""
|
||||
def find_translation_file_names(po_dir):
|
||||
pot_file_name = os.path.join(po_dir, 'translations.pot') # default
|
||||
po_file_names = []
|
||||
for file_name in os.listdir(po_dir):
|
||||
if file_name.endswith('.pot'):
|
||||
pot_file_name = os.path.join(po_dir, file_name)
|
||||
elif file_name.endswith('.po'):
|
||||
po_file_names.append(os.path.join(po_dir, file_name))
|
||||
|
||||
return (pot_file_name, po_file_names)
|
||||
|
||||
"""
|
||||
Creates template and language files by reading po_dir
|
||||
"""
|
||||
def create_template_and_language_files(po_dir):
|
||||
(pot_file_name, po_file_names) = find_translation_file_names(po_dir)
|
||||
|
||||
template_file = TemplateFile(pot_file_name)
|
||||
language_files = []
|
||||
for po_file_name in po_file_names:
|
||||
language_files.append(LanguageFile(po_file_name))
|
||||
|
||||
return (template_file, language_files)
|
||||
|
||||
"""
|
||||
Structure representing occurrence of text
|
||||
"""
|
||||
class Occurrence:
|
||||
def __init__(self, file_name, line_number):
|
||||
self.file_name = file_name
|
||||
self.line_number = line_number
|
||||
|
||||
"""
|
||||
Structure representing line read from input file
|
||||
"""
|
||||
class InputLine:
|
||||
def __init__(self, text, occurrence):
|
||||
self.text = text
|
||||
self.occurrence = occurrence
|
||||
|
||||
|
||||
"""
|
||||
Base class for single translation process,
|
||||
translating one input file into one output file
|
||||
|
||||
It provides wrapper code for reading consecutive lines of text and saving the result
|
||||
"""
|
||||
class TranslationJob:
|
||||
def __init__(self, **kwargs):
|
||||
self._input_line_counter = 0
|
||||
self._input_file_name = kwargs['input_file']
|
||||
self._input_file = None
|
||||
|
||||
self._output_file_name = kwargs['output_file']
|
||||
self._output_file = None
|
||||
|
||||
"""
|
||||
Launch translation process
|
||||
Actual processing is done in process_file() function which must be implemented by subclasses
|
||||
"""
|
||||
def run(self):
|
||||
try:
|
||||
self._open_files()
|
||||
self.process_file()
|
||||
finally:
|
||||
self._close_files()
|
||||
|
||||
def _open_files(self):
|
||||
self._input_file = io.open(self._input_file_name, 'r', encoding='utf-8')
|
||||
if self._output_file_name:
|
||||
self._output_file = io.open(self._output_file_name, 'w', encoding='utf-8')
|
||||
|
||||
def _close_files(self):
|
||||
self._input_file.close()
|
||||
if self._output_file:
|
||||
self._output_file.close()
|
||||
|
||||
"""
|
||||
Return next line, occurrene pair from input file or None if at end of input
|
||||
"""
|
||||
def read_input_line(self):
|
||||
line = self._input_file.readline()
|
||||
if line == '':
|
||||
return None
|
||||
|
||||
self._input_line_counter += 1
|
||||
return InputLine(line.rstrip('\n'), Occurrence(self._input_file_name, self._input_line_counter))
|
||||
|
||||
"""
|
||||
Write line to output file, if present
|
||||
"""
|
||||
def write_output_line(self, line):
|
||||
if self._output_file:
|
||||
self._output_file.write(line + '\n')
|
||||
|
||||
def get_input_file_name(self):
|
||||
return self._input_file_name
|
||||
|
||||
def get_output_file_name(self):
|
||||
return self._output_file_name
|
|
@ -1,56 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
##
|
||||
# Script to consolidate multiple translated level files (scene.txt or chaptertitle.txt),
|
||||
# generated previously by PO4A, into a single all-in-one output file
|
||||
#
|
||||
# It supports multiple pairs of source and output files and makes the assumption that
|
||||
# each source file was processed by PO4A to yield output files named like $output_file.$language_code
|
||||
#
|
||||
# Basically, it is a simple sed wrapper that uses the source file and the translated files to copy-paste
|
||||
# content into resulting output file
|
||||
#
|
||||
# The arugments are list of source files as a colon-separated list, list of output files also as colon-separated list
|
||||
# and dummy signal file used by build system
|
||||
##
|
||||
|
||||
# Stop on errors
|
||||
set -e
|
||||
|
||||
if [ $# -ne 3 ]; then
|
||||
echo "Invalid arguments!" >&2
|
||||
echo "Usage: $0 source_file1[:source_file2:...] output_file1[:output_file2:...] translation_signalfile" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SOURCE_FILES="$1"
|
||||
OUTPUT_FILES="$2"
|
||||
TRANSLATION_SIGNALFILE="$3"
|
||||
|
||||
IFS=':' read -a source_files_array <<< "$SOURCE_FILES"
|
||||
IFS=':' read -a output_files_array <<< "$OUTPUT_FILES"
|
||||
|
||||
for index in "${!source_files_array[@]}"; do
|
||||
source_file="${source_files_array[index]}"
|
||||
output_file="${output_files_array[index]}"
|
||||
|
||||
# generate output file
|
||||
echo -n "" > "$output_file"
|
||||
|
||||
# first, write original English headers
|
||||
sed -n '/^Title/p;/^Resume/p;/^ScriptName/p' "$source_file" >> "$output_file"
|
||||
|
||||
# now, copy translated headers from translated files
|
||||
# (translated files are named output file + suffix with language code)
|
||||
for translated_file in $output_file.*; do
|
||||
sed -n '/^Title/p;/^Resume/p;/^ScriptName/p' "$translated_file" >> "$output_file"
|
||||
done
|
||||
echo "// End of level headers translations" >> "$output_file"
|
||||
echo "" >> "$output_file"
|
||||
|
||||
# copy the rest of source file, excluding headers
|
||||
sed -e '/^Title/d;/^Resume/d;/^ScriptName/d' "$source_file" >> "$output_file"
|
||||
done
|
||||
|
||||
# update the dummy signal file to indicate success
|
||||
touch "$TRANSLATION_SIGNALFILE"
|
|
@ -1,194 +0,0 @@
|
|||
# Locale::Po4a::Colobothelp -- Convert Colobot help files
|
||||
#
|
||||
# This program is free software; you may redistribute it and/or modify it
|
||||
# under the terms of GPLv3.
|
||||
#
|
||||
|
||||
use Locale::Po4a::TransTractor qw(process new);
|
||||
use Locale::Po4a::Common;
|
||||
use Locale::Po4a::Text;
|
||||
|
||||
package Locale::Po4a::Colobothelp;
|
||||
|
||||
use 5.006;
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
require Exporter;
|
||||
|
||||
use vars qw(@ISA @EXPORT $AUTOLOAD);
|
||||
@ISA = qw(Locale::Po4a::TransTractor);
|
||||
@EXPORT = qw();
|
||||
|
||||
my @comments = ();
|
||||
|
||||
sub initialize {}
|
||||
|
||||
sub parse {
|
||||
my $self = shift;
|
||||
my ($line,$ref);
|
||||
my $paragraph="";
|
||||
my $wrapped_mode = 1;
|
||||
my $s_mode = 0;
|
||||
my $expect_header = 1;
|
||||
my $end_of_paragraph = 0;
|
||||
($line,$ref)=$self->shiftline();
|
||||
while (defined($line)) {
|
||||
chomp($line);
|
||||
$self->{ref}="$ref";
|
||||
($paragraph,$wrapped_mode,$s_mode,$expect_header,$end_of_paragraph) = parse_colobothelp($self,$line,$ref,$paragraph,$wrapped_mode,$s_mode,$expect_header,$end_of_paragraph);
|
||||
if ($end_of_paragraph) {
|
||||
do_paragraph($self,offlink($paragraph),$wrapped_mode);
|
||||
$paragraph="";
|
||||
$wrapped_mode = 1;
|
||||
$end_of_paragraph = 0;
|
||||
}
|
||||
($line,$ref)=$self->shiftline();
|
||||
}
|
||||
if (length $paragraph) {
|
||||
$paragraph =~ s/\n$//;
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$self->pushline("\n");
|
||||
}
|
||||
}
|
||||
|
||||
sub parse_colobothelp {
|
||||
my ($self,$line,$ref,$paragraph,$wrapped_mode,$s_mode,$expect_header,$end_of_paragraph) = @_;
|
||||
|
||||
if (($s_mode == 1) and ($line !~ /^\\s;/)) {
|
||||
# Process the end of \s; blocks
|
||||
$s_mode = 0;
|
||||
# substr removes the last superfluous \n
|
||||
my $s_block = onlink($self->translate(substr(offlink($paragraph),0,-1),$ref,"\\s; block (usually verbatim code)"));
|
||||
$s_block =~ s/(\n|^)/$1\\s;/g;
|
||||
$self->pushline($s_block."\n");
|
||||
$paragraph="";
|
||||
$wrapped_mode = 0;
|
||||
}
|
||||
|
||||
if ( $line =~ /^\s*$/
|
||||
or $line =~ m/^\\[nctr];$/) {
|
||||
# Break paragraphs on lines containing only spaces or any of \n; \c; \t; \r; (alone)
|
||||
|
||||
# Drop the latest EOL to avoid having it in the translation
|
||||
my $dropped_eol = ($paragraph =~ s/\n$//);
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$self->pushline("\n") if $dropped_eol; # Therefore only add it back if it was removed
|
||||
$paragraph="";
|
||||
$wrapped_mode = 0;
|
||||
$self->pushline($line."\n");
|
||||
} elsif ($line =~ s/^(\\s;)//) {
|
||||
# Lines starting with \s; are special (yellow-background, usually code-block)
|
||||
# Break paragraph before them
|
||||
if($s_mode == 0) {
|
||||
$s_mode = 1;
|
||||
my $dropped_eol = ($paragraph =~ s/\n$//);
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$self->pushline("\n") if $dropped_eol; # Therefore only add it back if it was removed
|
||||
$paragraph="";
|
||||
$wrapped_mode = 0;
|
||||
}
|
||||
$paragraph .= $line."\n";
|
||||
} elsif ($line =~ s/^(\\[bt];)//) {
|
||||
# Break paragraphs on \b; or \t; headers
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$paragraph="";
|
||||
$wrapped_mode = 1;
|
||||
|
||||
$self->pushline($1.onlink($self->translate(offlink($line),$ref,"$1 header")."\n"));
|
||||
} elsif ($line =~ /^\\image (.*) (\d*) (\d*);$/) {
|
||||
# Discard lines with \image name lx ly; tags
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$paragraph="";
|
||||
$wrapped_mode = 1;
|
||||
|
||||
$self->pushline("\\image ".$self->translate($1,$ref,'Image filename')." $2 $3;\n");
|
||||
} elsif ( $line =~ /^=+$/
|
||||
or $line =~ /^_+$/
|
||||
or $line =~ /^-+$/) {
|
||||
$wrapped_mode = 0;
|
||||
$paragraph .= $line."\n";
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$paragraph="";
|
||||
$wrapped_mode = 1;
|
||||
} elsif ($line =~ s/^(\s*)([0-9]\)|[o-])(\s*)//) {
|
||||
# Break paragraphs on lines starting with either number + parenthesis or any of o- + space
|
||||
do_paragraph($self,$paragraph,$wrapped_mode);
|
||||
$paragraph="";
|
||||
$wrapped_mode = 1;
|
||||
|
||||
$self->pushline("$1$2$3".onlink($self->translate(offlink($line),$ref,"Bullet: '$2'")."\n"));
|
||||
} else {
|
||||
# All paragraphs are non-wrap paragraphs by default
|
||||
$wrapped_mode = 0;
|
||||
undef $self->{bullet};
|
||||
undef $self->{indent};
|
||||
$paragraph .= $line."\n";
|
||||
}
|
||||
return ($paragraph,$wrapped_mode,$s_mode,$expect_header,$end_of_paragraph);
|
||||
}
|
||||
|
||||
sub offlink {
|
||||
my ($paragraph) = @_;
|
||||
# Replace \button $id; as pseudo xHTML <button $id/> tags
|
||||
$paragraph =~ s#\\(button|key) ([^;]*?);#<$1 $2/>#g;
|
||||
# Put \const;Code\norm; sequences into pseudo-HTML <format const> tags
|
||||
$paragraph =~ s#\\(const|type|token|key);([^\\;]*?)\\norm;#<format $1>$2</format>#g;
|
||||
# Transform CBot links \l;text\u target; into pseudo-HTML <a target>text</a>
|
||||
$paragraph =~ s#\\l;(.*?)\\u ([^;]*?);#<a $2>$1</a>#g;
|
||||
# Cleanup pseudo-html targets separated by \\ to have a single character |
|
||||
$paragraph =~ s#<a (.*?)\\(.*?)>#<a $1|$2>#g;
|
||||
# Replace remnants of \const; \type; \token or \norm; as pseudo xHTML <const/> tags
|
||||
$paragraph =~ s#\\(const|type|token|norm|key);#<$1/>#g;
|
||||
# Put \c;Code\n; sequences into pseudo-HTML <code> tags
|
||||
$paragraph =~ s#\\c;([^\\;]*?)\\n;#<code>$1</code>#g;
|
||||
# Replace remnants of \s; \c; \b; or \n; as pseudo xHTML <s/> tags
|
||||
$paragraph =~ s#\\([scbn]);#<$1/>#g;
|
||||
return ($paragraph);
|
||||
}
|
||||
|
||||
sub onlink {
|
||||
my ($paragraph) = @_;
|
||||
# Invert the replace remnants of \s; \c; \b; or \n; as pseudo xHTML <s/> tagsyy
|
||||
$paragraph =~ s#<([scbn])/>#\\$1;#g;
|
||||
# Inverse the put of \c;Code\n; sequences into pseudo-HTML <code> tags
|
||||
$paragraph =~ s#<code>([^\\;]*?)</code>#\\c;$1\\n;#g;
|
||||
# Invert the replace remnants of \const; \type; \token or \norm; as pseudo xHTML <const/> tags
|
||||
$paragraph =~ s#<(const|type|token|norm|key)/>#\\$1;#g;
|
||||
# Inverse of the cleanup of pseudo-html targets separated by \\ to have a single character |
|
||||
$paragraph =~ s#<a (.*?)\|(.*?)>#<a $1\\$2>#g;
|
||||
# Inverse of the transform of CBot links \l;text\u target; into pseudo-HTML <a target>text</a>
|
||||
$paragraph =~ s#<a (.*?)>(.*?)</a>#\\l;$2\\u $1;#g;
|
||||
# Invert the put \const;Code\norm; sequences into pseudo-HTML <format const> tags
|
||||
$paragraph =~ s#<format (const|type|token|key)>([^\\;]*?)</format>#\\$1;$2\\norm;#g;
|
||||
# Invert the replace of \button $id; as pseudo xHTML <button $id/> tags
|
||||
$paragraph =~ s#<(button|key) ([^;]*?)/>#\\$1 $2;#g;
|
||||
return ($paragraph);
|
||||
}
|
||||
|
||||
sub do_paragraph {
|
||||
my ($self, $paragraph, $wrap) = (shift, shift, shift);
|
||||
my $type = shift || $self->{type} || "Plain text";
|
||||
return if ($paragraph eq "");
|
||||
|
||||
my $end = "";
|
||||
if ($wrap) {
|
||||
$paragraph =~ s/^(.*?)(\n*)$/$1/s;
|
||||
$end = $2 || "";
|
||||
}
|
||||
my $t = onlink($self->translate(offlink($paragraph),
|
||||
$self->{ref},
|
||||
$type,
|
||||
"wrap" => $wrap));
|
||||
if (defined $self->{bullet}) {
|
||||
my $bullet = $self->{bullet};
|
||||
my $indent1 = $self->{indent};
|
||||
my $indent2 = $indent1.(' ' x length($bullet));
|
||||
$t =~ s/^/$indent1$bullet/s;
|
||||
$t =~ s/\n(.)/\n$indent2$1/sg;
|
||||
}
|
||||
$self->pushline( $t.$end );
|
||||
}
|
||||
|
||||
1;
|
||||
__END__
|
|
@ -1,86 +0,0 @@
|
|||
# Locale::Po4a::ColobotLevels -- Convert Colobot levels
|
||||
#
|
||||
# This program is free software; you may redistribute it and/or modify it
|
||||
# under the terms of GPLv3.
|
||||
#
|
||||
|
||||
use Locale::Po4a::TransTractor qw(process new);
|
||||
use Locale::Po4a::Common;
|
||||
|
||||
package Locale::Po4a::Colobotlevel;
|
||||
|
||||
use 5.006;
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
require Exporter;
|
||||
|
||||
use vars qw(@ISA @EXPORT $AUTOLOAD);
|
||||
@ISA = qw(Locale::Po4a::TransTractor);
|
||||
@EXPORT = qw();
|
||||
|
||||
my $debug=0;
|
||||
|
||||
sub initialize {}
|
||||
|
||||
|
||||
sub parse {
|
||||
my $self=shift;
|
||||
my ($line,$line_source);
|
||||
my $language_char;
|
||||
|
||||
LINE:
|
||||
($line,$line_source)=$self->shiftline();
|
||||
|
||||
while (defined($line)) {
|
||||
chomp($line);
|
||||
|
||||
if ($line =~ /^(Title|Resume|ScriptName)/) {
|
||||
# Text before the first dot
|
||||
$line =~ m/(^[^"\r\n]*)\./;
|
||||
my $type = $1;
|
||||
|
||||
# One char just after the .
|
||||
$line =~ m/\.(.)/;
|
||||
my $E = $1;
|
||||
if (not $language_char) {
|
||||
# Take this one-char only once
|
||||
$language_char = $self->translate($E, '', 'One-char language identifier');
|
||||
}
|
||||
|
||||
# The text between .E and first quote
|
||||
$line =~ m/\.$E([^\r\n"]*?)(text|resume)="([^\r\n"]*?)"([^\r\n"]*)((text|resume)="([^\r\n"]*?)"([^\r\n"]*))?$/;
|
||||
my $spacing_1 = $1;
|
||||
my $subtype_1 = $2;
|
||||
my $quoted_1 = $3;
|
||||
my $spacing_2 = $4;
|
||||
my $secondpart = $5;
|
||||
my $subtype_2 = $6;
|
||||
my $quoted_2 = $7;
|
||||
my $spacing_3 = $8;
|
||||
|
||||
my $par_1 = $self->translate($quoted_1, $line_source, $type."-".$subtype_1);
|
||||
$par_1 =~ s/^\D*\d*://;
|
||||
if ($secondpart) {
|
||||
my $par_2 = $self->translate($quoted_2, $line_source, $type."-".$subtype_2);
|
||||
$par_2 =~ s/^\D*\d*://;
|
||||
|
||||
# This is awkward, but works
|
||||
$spacing_2 = $spacing_2.$subtype_2.'="'.$par_2.'"'.$spacing_3;
|
||||
}
|
||||
$par_1 =~ s/\n/\\n/g;
|
||||
$spacing_2 =~ s/\n/\\n/g;
|
||||
|
||||
# Now push the result
|
||||
$self->pushline($type.'.'.$language_char.$spacing_1.$subtype_1.'="'.$par_1.'"'.$spacing_2."\n");
|
||||
}
|
||||
else {
|
||||
$self->pushline("$line\n");
|
||||
}
|
||||
# Reinit the loop
|
||||
($line,$line_source)=$self->shiftline();
|
||||
}
|
||||
}
|
||||
|
||||
1;
|
||||
__END__
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from common import create_template_and_language_files, nice_mkdir
|
||||
from translate_help import create_help_translation_jobs
|
||||
from translate_level import create_level_translation_jobs
|
||||
from translate_chaptertitles import create_chaptertitles_translation_jobs
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description = 'Generate translations of Colobot data files')
|
||||
|
||||
parser.add_argument('--mode',
|
||||
choices = ['generate', 'print_files'],
|
||||
required = True,
|
||||
help = 'Mode of operation: run generation process or only print input and output files')
|
||||
parser.add_argument('--type',
|
||||
choices = ['help', 'level', 'chaptertitles'],
|
||||
required = True,
|
||||
help = 'Type of translation: help file or level file')
|
||||
parser.add_argument('--input_dir',
|
||||
required = True,
|
||||
help = 'Input file(s) or directory to translate')
|
||||
parser.add_argument('--po_dir',
|
||||
required = True,
|
||||
help = 'Translations directory (with *.pot and *.po files)')
|
||||
parser.add_argument('--output_dir',
|
||||
help = 'Output directory for translated files')
|
||||
parser.add_argument('--output_subdir',
|
||||
help = 'Install subdirectory (only for help files)')
|
||||
parser.add_argument('--signal_file',
|
||||
help = 'Signal file to indicate successful operation')
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def preprocess_args(args):
|
||||
if not os.path.isdir(args.input_dir):
|
||||
sys.stderr.write('Expected existing input directory!\n')
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(args.po_dir):
|
||||
sys.stderr.write('Expected existing translations directory!\n')
|
||||
sys.exit(1)
|
||||
|
||||
if args.output_dir:
|
||||
nice_mkdir(args.output_dir)
|
||||
|
||||
def create_translation_jobs(args, template_file, language_files):
|
||||
translation_jobs = []
|
||||
|
||||
if args.type == 'help':
|
||||
translation_jobs = create_help_translation_jobs(args.input_dir,
|
||||
args.output_dir,
|
||||
args.output_subdir,
|
||||
template_file,
|
||||
language_files)
|
||||
elif args.type == 'level':
|
||||
translation_jobs = create_level_translation_jobs(args.input_dir,
|
||||
args.output_dir,
|
||||
template_file,
|
||||
language_files)
|
||||
elif args.type == 'chaptertitles':
|
||||
translation_jobs = create_chaptertitles_translation_jobs(args.input_dir,
|
||||
args.output_dir,
|
||||
template_file,
|
||||
language_files)
|
||||
|
||||
return translation_jobs
|
||||
|
||||
def print_files(translation_jobs):
|
||||
input_files = []
|
||||
output_files = []
|
||||
for translation_job in translation_jobs:
|
||||
input_files.append(translation_job.get_input_file_name())
|
||||
output_files.append(translation_job.get_output_file_name())
|
||||
|
||||
sys.stdout.write(';'.join(input_files))
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.write(';'.join(output_files))
|
||||
|
||||
def generate_translations(translation_jobs, template_file, language_files):
|
||||
for translation_job in translation_jobs:
|
||||
translation_job.run()
|
||||
|
||||
template_file.merge_and_save()
|
||||
for language_file in language_files:
|
||||
language_file.merge_and_save(template_file)
|
||||
|
||||
def save_signalfile(signal_file_name):
|
||||
if signal_file_name:
|
||||
nice_mkdir(os.path.dirname(signal_file_name))
|
||||
with open(signal_file_name, 'w') as signal_file:
|
||||
signal_file.close()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
preprocess_args(args)
|
||||
|
||||
(template_file, language_files) = create_template_and_language_files(args.po_dir)
|
||||
translation_jobs = create_translation_jobs(args, template_file, language_files)
|
||||
|
||||
if args.mode == 'print_files':
|
||||
print_files(translation_jobs)
|
||||
|
||||
elif args.mode == 'generate':
|
||||
generate_translations(translation_jobs, template_file, language_files)
|
||||
|
||||
save_signalfile(args.signal_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
##
|
||||
# Script to execute PO4A with proper enviroment and commandline options
|
||||
#
|
||||
# The arguments are config file which is assumed to be already present and
|
||||
# optional dummy signal file which is used by build system
|
||||
##
|
||||
|
||||
# stop on errors
|
||||
set -e
|
||||
|
||||
if [ $# -ne 1 -a $# -ne 2 ]; then
|
||||
echo "Invalid arguments!" >&2
|
||||
echo "Usage: $0 po4a_config_file [po4a_signalfile]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PO4A_CONFIG_FILE="$1"
|
||||
PO4A_SIGNALFILE="$2"
|
||||
|
||||
# get the directory where the script is in
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# run po4a
|
||||
if [ -n "$VERBOSE" ]; then
|
||||
verbosity="-v"
|
||||
else
|
||||
verbosity="-q"
|
||||
fi
|
||||
PERL5LIB="${SCRIPT_DIR}/perllib${PERL5LIB+:}$PERL5LIB" po4a -k0 $verbosity -f --msgmerge-opt=--no-wrap --master-charset=UTF-8 --localized-charset=UTF-8 "$PO4A_CONFIG_FILE"
|
||||
|
||||
# if applicable, touch dummy signal file to indicate success
|
||||
if [ -n "$PO4A_SIGNALFILE" ]; then
|
||||
touch "$PO4A_SIGNALFILE"
|
||||
fi
|
|
@ -0,0 +1,42 @@
|
|||
import os
|
||||
|
||||
from translate_level import LevelTranslationJob
|
||||
from common import nice_mkdir, nice_path_join
|
||||
|
||||
"""
|
||||
Create jobs for chaptertile files translation
|
||||
|
||||
Assumes that input_dir has structure like so:
|
||||
${input_dir}/dir1/chaptertitle.txt
|
||||
...
|
||||
${input_dir}/dirN/chaptertitle.txt
|
||||
|
||||
The output files will be saved in:
|
||||
${input_dir}/dir1/chaptertitle.txt
|
||||
...
|
||||
${input_dir}/dirN/chaptertitle.txt
|
||||
|
||||
The actual translation is done using the same jobs as level files
|
||||
"""
|
||||
def create_chaptertitles_translation_jobs(input_dir, output_dir, template_file, language_files):
|
||||
translation_jobs = []
|
||||
|
||||
for subdirectory in sorted(os.listdir(input_dir)):
|
||||
input_subdirectory = os.path.join(input_dir, subdirectory)
|
||||
if not os.path.isdir(input_subdirectory):
|
||||
continue
|
||||
|
||||
input_file = os.path.join(input_subdirectory, 'chaptertitle.txt')
|
||||
if not os.path.isfile(input_file):
|
||||
continue
|
||||
|
||||
output_subdirectory = nice_path_join(output_dir, subdirectory)
|
||||
nice_mkdir(output_subdirectory)
|
||||
|
||||
translation_jobs.append(LevelTranslationJob(
|
||||
input_file = input_file,
|
||||
output_file = nice_path_join(output_subdirectory, 'chaptertitle.txt'),
|
||||
template_file = template_file,
|
||||
language_files = language_files))
|
||||
|
||||
return translation_jobs
|
|
@ -0,0 +1,229 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from common import TranslationJob, nice_mkdir, nice_path_join
|
||||
|
||||
"""
|
||||
Types of input lines
|
||||
"""
|
||||
TYPE_WHITESPACE = 1 # whitespace only
|
||||
TYPE_HEADER = 2 # header (beginning with \b or \t)
|
||||
TYPE_BULLET = 3 # bullet point
|
||||
TYPE_IMAGE = 4 # image (beginning with \image)
|
||||
TYPE_CODE = 5 # code (beginning with \s;)
|
||||
TYPE_PLAIN = 6 # plain text
|
||||
|
||||
class HelpTranslationJob(TranslationJob):
|
||||
def __init__(self, **kwargs):
|
||||
TranslationJob.__init__(self, **kwargs)
|
||||
self.template_file = kwargs['template_file']
|
||||
self.language_file = kwargs['language_file']
|
||||
self.line_buffer = None
|
||||
|
||||
def process_file(self):
|
||||
while True:
|
||||
(paragraph, paragraph_type) = self.read_input_paragraph()
|
||||
if not paragraph:
|
||||
break
|
||||
|
||||
if paragraph_type == TYPE_WHITESPACE:
|
||||
self.process_whitespace(paragraph[0])
|
||||
elif paragraph_type == TYPE_HEADER:
|
||||
self.process_header(paragraph[0])
|
||||
elif paragraph_type == TYPE_BULLET:
|
||||
self.process_bullet(paragraph[0])
|
||||
elif paragraph_type == TYPE_IMAGE:
|
||||
self.process_image(paragraph[0])
|
||||
elif paragraph_type == TYPE_CODE:
|
||||
self.process_code(paragraph)
|
||||
elif paragraph_type == TYPE_PLAIN:
|
||||
self.process_plain(paragraph)
|
||||
|
||||
"""
|
||||
Read one or more lines of input with same line type and return the list as paragraph
|
||||
Exception is types which are processed as single lines, giving only paragraph with one line
|
||||
"""
|
||||
def read_input_paragraph(self):
|
||||
paragraph = None
|
||||
paragraph_type = None
|
||||
while True:
|
||||
line = None
|
||||
line_type = None
|
||||
if self.line_buffer:
|
||||
(line, line_type) = self.line_buffer
|
||||
self.line_buffer = None
|
||||
else:
|
||||
line = self.read_input_line()
|
||||
if line:
|
||||
line_type = self.check_line_type(line.text)
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
if not paragraph_type:
|
||||
paragraph_type = line_type
|
||||
|
||||
if paragraph_type == line_type:
|
||||
if not paragraph:
|
||||
paragraph = []
|
||||
paragraph.append(line)
|
||||
else:
|
||||
self.line_buffer = (line, line_type)
|
||||
break
|
||||
|
||||
if line_type in [TYPE_WHITESPACE, TYPE_HEADER, TYPE_BULLET, TYPE_IMAGE]:
|
||||
break
|
||||
|
||||
return (paragraph, paragraph_type)
|
||||
|
||||
def check_line_type(self, line):
|
||||
if re.match(r'^\s*$', line) or re.match(r'^\\[nctr];$', line):
|
||||
return TYPE_WHITESPACE
|
||||
elif re.match(r'^\\[bt];', line):
|
||||
return TYPE_HEADER
|
||||
elif re.match(r'^\s*([0-9]\)|[o-])', line):
|
||||
return TYPE_BULLET
|
||||
elif re.match(r'^\\image.*;$', line):
|
||||
return TYPE_IMAGE
|
||||
elif re.match(r'^\\s;', line):
|
||||
return TYPE_CODE
|
||||
else:
|
||||
return TYPE_PLAIN
|
||||
|
||||
def process_whitespace(self, line):
|
||||
self.write_output_line(line.text)
|
||||
|
||||
def process_header(self, line):
|
||||
match = re.match(r'^(\\[bt];)(.*)', line.text)
|
||||
header_type = match.group(1)
|
||||
header_text = match.group(2)
|
||||
translated_header_text = self.translate_text(header_text, line.occurrence, header_type + ' header')
|
||||
self.write_output_line(header_type + translated_header_text)
|
||||
|
||||
def process_bullet(self, line):
|
||||
match = re.match(r'^(\s*)([0-9]\)|[o-])(\s*)(.*)', line.text)
|
||||
spacing_before_bullet = match.group(1)
|
||||
bullet_point = match.group(2)
|
||||
spacing_after_bullet = match.group(3)
|
||||
text = match.group(4)
|
||||
translated_text = self.translate_text(
|
||||
text, line.occurrence, "Bullet: '{0}'".format(bullet_point))
|
||||
self.write_output_line(spacing_before_bullet + bullet_point + spacing_after_bullet + translated_text)
|
||||
|
||||
def process_image(self, line):
|
||||
match = re.match(r'^(\\image )(.*)( \d* \d*;)$', line.text)
|
||||
image_command = match.group(1)
|
||||
image_source = match.group(2)
|
||||
image_coords = match.group(3)
|
||||
translated_image_source = self.translate_text(image_source, line.occurrence, 'Image filename')
|
||||
self.write_output_line(image_command + translated_image_source + image_coords)
|
||||
|
||||
def process_code(self, paragraph):
|
||||
text_lines = []
|
||||
for line in paragraph:
|
||||
match = re.match(r'^\\s;(.*)', line.text)
|
||||
code_line = match.group(1)
|
||||
text_lines.append(code_line)
|
||||
|
||||
joined_text_lines = '\n'.join(text_lines)
|
||||
translated_text_lines = self.translate_text(joined_text_lines, paragraph[0].occurrence, 'Source code')
|
||||
for line in translated_text_lines.split('\n'):
|
||||
self.write_output_line(r'\s;' + line)
|
||||
|
||||
def process_plain(self, paragraph):
|
||||
text_lines = []
|
||||
for line in paragraph:
|
||||
text_lines.append(line.text)
|
||||
|
||||
joined_text_lines = '\n'.join(text_lines)
|
||||
translated_text_lines = self.translate_text(joined_text_lines, paragraph[0].occurrence, 'Plain text')
|
||||
for line in translated_text_lines.split('\n'):
|
||||
self.write_output_line(line)
|
||||
|
||||
def translate_text(self, text, occurrence, type_comment):
|
||||
converted_text = convert_escape_syntax_to_tag_syntax(text)
|
||||
self.template_file.insert_entry(converted_text, occurrence, type_comment)
|
||||
|
||||
if not self.language_file:
|
||||
return text
|
||||
|
||||
translated_text = self.language_file.translate(converted_text)
|
||||
return convert_tag_syntax_to_escape_syntax(translated_text)
|
||||
|
||||
def convert_escape_syntax_to_tag_syntax(text):
|
||||
# Replace \button $id; as pseudo xHTML <button $id/> tags
|
||||
text = re.sub(r'\\(button|key) ([^;]*?);', r'<\1 \2/>', text)
|
||||
# Put \const;Code\norm; sequences into pseudo-HTML <format const> tags
|
||||
text = re.sub(r'\\(const|type|token|key);([^\\;]*?)\\norm;', r'<format \1>\2</format>', text)
|
||||
# Transform CBot links \l;text\u target; into pseudo-HTML <a target>text</a>
|
||||
text = re.sub(r'\\l;(.*?)\\u (.*?);', r'<a \2>\1</a>', text)
|
||||
# Cleanup pseudo-html targets separated by \\ to have a single character |
|
||||
text = re.sub(r'<a (.*?)\\(.*?)>', r'<a \1|\2>', text)
|
||||
# Replace remnants of \const; \type; \token, \norm; or \key; as pseudo xHTML <const/> tags
|
||||
text = re.sub(r'\\(const|type|token|norm|key);', r'<\1/>', text)
|
||||
# Put \c;Code\n; sequences into pseudo-HTML <code> tags
|
||||
text = re.sub(r'\\c;([^\\;]*?)\\n;', r'<code>\1</code>', text)
|
||||
# Replace remnants of \s; \c; \b; or \n; as pseudo xHTML <s/> tags
|
||||
text = re.sub(r'\\([scbn]);', r'<\1/>', text)
|
||||
return text
|
||||
|
||||
def convert_tag_syntax_to_escape_syntax(text):
|
||||
# Invert the replace remnants of \s; \c; \b; or \n; as pseudo xHTML <s/> tags
|
||||
text = re.sub(r'<([scbn])/>', r'\\\1;', text)
|
||||
# Invert the put of \c;Code\n; sequences into pseudo-HTML <code> tags
|
||||
text = re.sub(r'<code>([^\\;]*?)</code>', r'\\c;\1\\n;', text)
|
||||
# Invert the replace remnants of \const; \type; \token or \norm; as pseudo xHTML <const/> tags
|
||||
text = re.sub(r'<(const|type|token|norm|key)/>', r'\\\1;', text)
|
||||
# Invert the cleanup of pseudo-html targets separated by \\ to have a single character |
|
||||
text = re.sub(r'<a (.*?)\|(.*?)>', r'<a \1\\\2>', text)
|
||||
# Invert the transform of CBot links \l;text\u target; into pseudo-HTML <a target>text</a>
|
||||
text = re.sub(r'<a (.*?)>(.*?)</a>', r'\\l;\2\\u \1;', text)
|
||||
# Invert the put \const;Code\norm; sequences into pseudo-HTML <format const> tags
|
||||
text = re.sub(r'<format (const|type|token|key)>([^\\;]*?)</format>', r'\\\1;\2\\norm;', text)
|
||||
# Invert the replace of \button $id; as pseudo xHTML <button $id/> tags
|
||||
text = re.sub(r'<(button|key) (.*?)/>', r'\\\1 \2;', text)
|
||||
return text
|
||||
|
||||
"""
|
||||
Create jobs for help translation
|
||||
|
||||
Assumes that input_dir has structure like so:
|
||||
${input_dir}/E/help_file1.txt
|
||||
...
|
||||
${input_dir}/E/help_fileN.txt
|
||||
|
||||
The output files will be saved in:
|
||||
${output_dir}/${language_char1}/${install_subdir}/help_file1.txt
|
||||
...
|
||||
${output_dir}/${language_charM}/${install_subdir}/help_fileN.txt
|
||||
"""
|
||||
def create_help_translation_jobs(input_dir, output_dir, install_subdir, template_file, language_files):
|
||||
translation_jobs = []
|
||||
|
||||
e_dir = os.path.join(input_dir, 'E')
|
||||
input_files = sorted(os.listdir(e_dir))
|
||||
|
||||
if not install_subdir:
|
||||
install_subdir = ''
|
||||
|
||||
language_files_list = []
|
||||
if len(language_files) > 0:
|
||||
language_files_list = language_files
|
||||
else:
|
||||
# We need at least one dummy language file to create any jobs
|
||||
language_files_list = [None]
|
||||
|
||||
for language_file in language_files_list:
|
||||
output_translation_dir = None
|
||||
if language_file:
|
||||
output_translation_dir = nice_path_join(output_dir, language_file.language_char(), install_subdir)
|
||||
nice_mkdir(output_translation_dir)
|
||||
|
||||
for input_file in input_files:
|
||||
translation_jobs.append(HelpTranslationJob(
|
||||
input_file = os.path.join(e_dir, input_file),
|
||||
output_file = nice_path_join(output_translation_dir, input_file),
|
||||
template_file = template_file,
|
||||
language_file = language_file))
|
||||
|
||||
return translation_jobs
|
|
@ -0,0 +1,98 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from translate_help import HelpTranslationJob
|
||||
from common import TranslationJob, nice_mkdir, nice_path_join
|
||||
|
||||
class LevelTranslationJob(TranslationJob):
|
||||
def __init__(self, **kwargs):
|
||||
TranslationJob.__init__(self, **kwargs)
|
||||
self.template_file = kwargs['template_file']
|
||||
self.language_files = kwargs['language_files']
|
||||
self.line_buffer = None
|
||||
|
||||
def process_file(self):
|
||||
while True:
|
||||
line = self.read_input_line()
|
||||
if not line:
|
||||
break
|
||||
|
||||
# English version is always written
|
||||
self.write_output_line(line.text)
|
||||
|
||||
match = re.match(r'^(Title|Resume|ScriptName)\.E(.*)', line.text)
|
||||
if match:
|
||||
for language_file in self.language_files:
|
||||
self.add_translated_line(match, line.occurrence, language_file)
|
||||
|
||||
def add_translated_line(self, command_match, occurrence, language_file):
|
||||
command = command_match.group(1)
|
||||
arguments = command_match.group(2)
|
||||
|
||||
translated_arguments = arguments
|
||||
for attribute_match in re.finditer('(text|resume)="([^"]*)"', arguments):
|
||||
whole_attribute_match = attribute_match.group(0)
|
||||
attribute = attribute_match.group(1)
|
||||
text = attribute_match.group(2)
|
||||
|
||||
self.template_file.insert_entry(text, occurrence, command + '-' + attribute)
|
||||
|
||||
translated_arguments = translated_arguments.replace(
|
||||
whole_attribute_match,
|
||||
u'{0}="{1}"'.format(attribute, language_file.translate(text)))
|
||||
|
||||
self.write_output_line(u'{0}.{1}{2}'.format(
|
||||
command,
|
||||
language_file.language_char(),
|
||||
translated_arguments))
|
||||
|
||||
|
||||
"""
|
||||
Create jobs for chapter translation
|
||||
|
||||
Assumes that input_dir has structure like so:
|
||||
${input_dir}/scene.txt
|
||||
${input_dir}/help/help_file1.txt
|
||||
...
|
||||
${input_dir}/help/help_fileN.txt
|
||||
|
||||
The output files will be saved in:
|
||||
${output_dir}/scene.txt
|
||||
${output_dir}/help/help_file1.${language_char1}.txt
|
||||
...
|
||||
${output_dir}/help/help_fileN.${language_charM}.txt
|
||||
"""
|
||||
def create_level_translation_jobs(input_dir, output_dir, template_file, language_files):
|
||||
translation_jobs = []
|
||||
|
||||
input_file = os.path.join(input_dir, 'scene.txt')
|
||||
translation_jobs.append(LevelTranslationJob(
|
||||
input_file = input_file,
|
||||
output_file = nice_path_join(output_dir, 'scene.txt'),
|
||||
template_file = template_file,
|
||||
language_files = language_files))
|
||||
|
||||
input_help_dir = os.path.join(input_dir, 'help')
|
||||
if os.path.isdir(input_help_dir):
|
||||
output_help_dir = nice_path_join(output_dir, 'help')
|
||||
nice_mkdir(output_help_dir)
|
||||
|
||||
language_files_list = []
|
||||
if len(language_files) > 0:
|
||||
language_files_list = language_files
|
||||
else:
|
||||
# We need at least one dummy language file to create any jobs
|
||||
language_files_list = [None]
|
||||
|
||||
for language_file in language_files_list:
|
||||
for help_file in sorted(os.listdir(input_help_dir)):
|
||||
if language_file:
|
||||
translated_help_file = help_file.replace('.E.txt', '.{0}.txt'.format(language_file.language_char()))
|
||||
|
||||
translation_jobs.append(HelpTranslationJob(
|
||||
input_file = os.path.join(input_help_dir, help_file),
|
||||
output_file = nice_path_join(output_help_dir, translated_help_file),
|
||||
template_file = template_file,
|
||||
language_file = language_file))
|
||||
|
||||
return translation_jobs
|
|
@ -1,7 +1,6 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
|
||||
include(../i18n-tools/CommonI18N.cmake)
|
||||
include(../i18n-tools/LevelsI18N.cmake)
|
||||
include(../i18n-tools/I18NTools.cmake)
|
||||
|
||||
set(LEVEL_INSTALL_DATA_DIR ${COLOBOT_INSTALL_DATA_DIR}/levels)
|
||||
|
||||
|
@ -9,63 +8,65 @@ set(LEVEL_INSTALL_DATA_DIR ${COLOBOT_INSTALL_DATA_DIR}/levels)
|
|||
# Add level category directory with all chapters inside
|
||||
##
|
||||
function(add_level_category level_category_dir)
|
||||
file(GLOB chaptertitle_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR}/${level_category_dir} ${level_category_dir}/chapter*/chaptertitle.txt)
|
||||
list(SORT chaptertitle_files)
|
||||
if(PO4A AND EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${level_category_dir}/po/)
|
||||
generate_chaptertitles_i18n(translated_chaptertitle_files
|
||||
${level_category_dir}
|
||||
"${chaptertitle_files}"
|
||||
${level_category_dir}/po
|
||||
${DATA_BINARY_DIR}/levels-po/${level_category_dir})
|
||||
else()
|
||||
file(GLOB translated_chaptertitle_files ${level_category_dir}/chapter*/chaptertitle.txt)
|
||||
|
||||
message(STATUS "Adding translation targets for level/${level_category_dir}")
|
||||
|
||||
# Without Python, just install all files as they are
|
||||
if(NOT PYTHONINTERP_FOUND)
|
||||
install(DIRECTORY ${add_level_category} DESTINATION ${LEVEL_INSTALL_DATA_DIR})
|
||||
return()
|
||||
endif()
|
||||
|
||||
file(GLOB chapter_dirs RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${level_category_dir}/chapter*)
|
||||
list(SORT chapter_dirs)
|
||||
list(LENGTH chapter_dirs chapter_dirs_count)
|
||||
math(EXPR iterate_range "${chapter_dirs_count} - 1")
|
||||
foreach(index RANGE ${iterate_range})
|
||||
list(GET chapter_dirs ${index} chapter_dir)
|
||||
list(GET translated_chaptertitle_files ${index} translated_chaptertitle_file)
|
||||
install(FILES ${translated_chaptertitle_file} DESTINATION ${LEVEL_INSTALL_DATA_DIR}/${chapter_dir})
|
||||
endforeach()
|
||||
add_chaptertitles(${level_category_dir})
|
||||
|
||||
file(GLOB chapter_dirs RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${level_category_dir}/chapter*)
|
||||
foreach(chapter_dir ${chapter_dirs})
|
||||
add_chapter(${chapter_dir})
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Add chapter directory with all levels inside
|
||||
##
|
||||
function(add_chapter chapter_dir)
|
||||
file(GLOB level_dirs RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${chapter_dir}/level*)
|
||||
file(GLOB level_dirs RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${level_category_dir}/chapter*/level*)
|
||||
foreach(level_dir ${level_dirs})
|
||||
add_level(${level_dir})
|
||||
endforeach()
|
||||
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Add chaptertitles
|
||||
##
|
||||
function(add_chaptertitles level_category_dir)
|
||||
|
||||
set(work_dir ${DATA_BINARY_DIR}/levels-po/${level_category_dir})
|
||||
generate_translations(translated_chaptertitle_files
|
||||
"chaptertitles"
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${level_category_dir}
|
||||
${level_category_dir}/po
|
||||
${work_dir}
|
||||
"")
|
||||
|
||||
install_preserving_relative_paths("${translated_chaptertitle_files}"
|
||||
${work_dir}
|
||||
${LEVEL_INSTALL_DATA_DIR}/${level_category_dir})
|
||||
|
||||
endfunction()
|
||||
|
||||
##
|
||||
# Add level directory
|
||||
##
|
||||
function(add_level level_dir)
|
||||
file(GLOB original_help_files ${level_dir}/help/*.txt)
|
||||
list(SORT original_help_files)
|
||||
if(PO4A AND EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${level_dir}/po/)
|
||||
generate_level_i18n(translated_level_file
|
||||
translated_help_files
|
||||
${level_dir}/scene.txt
|
||||
"${original_help_files}"
|
||||
${level_dir}/po
|
||||
${DATA_BINARY_DIR}/levels-po/${level_dir})
|
||||
else()
|
||||
set(translated_level_file ${level_dir}/scene.txt)
|
||||
endif()
|
||||
install(FILES ${translated_level_file} DESTINATION ${LEVEL_INSTALL_DATA_DIR}/${level_dir})
|
||||
install(FILES ${original_help_files} DESTINATION ${LEVEL_INSTALL_DATA_DIR}/${level_dir}/help)
|
||||
install(FILES ${translated_help_files} DESTINATION ${LEVEL_INSTALL_DATA_DIR}/${level_dir}/help)
|
||||
|
||||
set(work_dir ${DATA_BINARY_DIR}/levels-po/${level_dir})
|
||||
generate_translations(translated_level_files
|
||||
"level"
|
||||
${CMAKE_CURRENT_SOURCE_DIR}
|
||||
${level_dir}
|
||||
${level_dir}/po
|
||||
${work_dir}
|
||||
"")
|
||||
|
||||
file(GLOB english_help_files ${level_dir}/help/*)
|
||||
install(FILES ${english_help_files} DESTINATION ${LEVEL_INSTALL_DATA_DIR}/${level_dir}/help)
|
||||
|
||||
install_preserving_relative_paths("${translated_level_files}"
|
||||
${work_dir}
|
||||
${LEVEL_INSTALL_DATA_DIR}/${level_dir})
|
||||
|
||||
endfunction()
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue