def generate(config, glob, common_vars):
requests = []
- pkg_exclusions = set()
if len(glob("misc/*")) == 0:
print("Error: Cannot find data directory; please specify --glob_dir", file=sys.stderr)
"{TMP_DIR}/brkitr"
]
+ requests += generate_cnvalias(config, glob, common_vars)
+ requests += generate_confusables(config, glob, common_vars)
+ requests += generate_conversion_mappings(config, glob, common_vars)
+ requests += generate_brkitr_brk(config, glob, common_vars)
+ requests += generate_stringprep(config, glob, common_vars)
+ requests += generate_brkitr_dictionaries(config, glob, common_vars)
+ requests += generate_normalization(config, glob, common_vars)
+ requests += generate_coll_ucadata(config, glob, common_vars)
+ requests += generate_unames(config, glob, common_vars)
+ requests += generate_misc(config, glob, common_vars)
+ requests += generate_curr_supplemental(config, glob, common_vars)
+ requests += generate_translit(config, glob, common_vars)
+
+ # FIXME: Clean this up (duplicated logic)
+ brkitr_brk_files = []
+ input_files = [InFile(filename) for filename in glob("brkitr/rules/*.txt")]
+ output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
+ brkitr_brk_files += output_files
+ dict_files = []
+ input_files = [InFile(filename) for filename in glob("brkitr/dictionaries/*.txt")]
+ output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
+ dict_files += output_files
+
+ # Res Tree Files
+ # (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files)
+ requests += generate_tree(config, glob, common_vars,
+ "locales",
+ None,
+ "resfiles.mk",
+ "GENRB_CLDR_VERSION",
+ "GENRB_SOURCE",
+ True,
+ [])
+
+ requests += generate_tree(config, glob, common_vars,
+ "curr",
+ "curr",
+ "resfiles.mk",
+ "CURR_CLDR_VERSION",
+ "CURR_SOURCE",
+ True,
+ [])
+
+ requests += generate_tree(config, glob, common_vars,
+ "lang",
+ "lang",
+ "resfiles.mk",
+ "LANG_CLDR_VERSION",
+ "LANG_SOURCE",
+ True,
+ [])
+
+ requests += generate_tree(config, glob, common_vars,
+ "region",
+ "region",
+ "resfiles.mk",
+ "REGION_CLDR_VERSION",
+ "REGION_SOURCE",
+ True,
+ [])
+
+ requests += generate_tree(config, glob, common_vars,
+ "zone",
+ "zone",
+ "resfiles.mk",
+ "ZONE_CLDR_VERSION",
+ "ZONE_SOURCE",
+ True,
+ [])
+
+ requests += generate_tree(config, glob, common_vars,
+ "unit",
+ "unit",
+ "resfiles.mk",
+ "UNIT_CLDR_VERSION",
+ "UNIT_SOURCE",
+ True,
+ [])
+
+ # TODO: We should not need timezoneTypes.res to build collation resource bundles.
+ # TODO: Maybe keyTypeData.res should be baked into the common library.
+ requests += generate_tree(config, glob, common_vars,
+ "coll",
+ "coll",
+ "colfiles.mk",
+ "COLLATION_CLDR_VERSION",
+ "COLLATION_SOURCE",
+ False,
+ [OutFile("coll/ucadata.icu"), OutFile("timezoneTypes.res"), OutFile("keyTypeData.res")])
+
+ requests += generate_tree(config, glob, common_vars,
+ "brkitr",
+ "brkitr",
+ "brkfiles.mk",
+ "BRK_RES_CLDR_VERSION",
+ "BRK_RES_SOURCE",
+ False,
+ brkitr_brk_files + dict_files)
+
+ requests += generate_tree(config, glob, common_vars,
+ "rbnf",
+ "rbnf",
+ "rbnffiles.mk",
+ "RBNF_CLDR_VERSION",
+ "RBNF_SOURCE",
+ False,
+ [])
+
+ requests += [
+ ListRequest(
+ name = "icudata_list",
+ variable_name = "icudata_all_output_files",
+ output_file = TmpFile("icudata.lst"),
+ include_tmp = False
+ )
+ ]
+
+ return (build_dirs, requests)
+
+
+def generate_cnvalias(config, glob, common_vars):
# UConv Name Aliases
- if config.has_feature("cnvalias"):
- input_file = InFile("mappings/convrtrs.txt")
- output_file = OutFile("cnvalias.icu")
- requests += [
- SingleExecutionRequest(
- name = "cnvalias",
- input_files = [input_file],
- output_files = [output_file],
- tool = IcuTool("gencnval"),
- args = "-s {IN_DIR} -d {OUT_DIR} "
- "{INPUT_FILES[0]}",
- format_with = {}
- )
- ]
+ input_file = InFile("mappings/convrtrs.txt")
+ output_file = OutFile("cnvalias.icu")
+ return [
+ SingleExecutionRequest(
+ name = "cnvalias",
+ category = "cnvalias",
+ dep_files = [],
+ input_files = [input_file],
+ output_files = [output_file],
+ tool = IcuTool("gencnval"),
+ args = "-s {IN_DIR} -d {OUT_DIR} "
+ "{INPUT_FILES[0]}",
+ format_with = {}
+ )
+ ]
+
+def generate_confusables(config, glob, common_vars):
# CONFUSABLES
- if config.has_feature("confusables"):
- txt1 = InFile("unidata/confusables.txt")
- txt2 = InFile("unidata/confusablesWholeScript.txt")
- cfu = OutFile("confusables.cfu")
- requests += [
- SingleExecutionRequest(
- name = "confusables",
- input_files = [txt1, txt2, OutFile("cnvalias.icu")],
- output_files = [cfu],
- tool = IcuTool("gencfu"),
- args = "-d {OUT_DIR} -i {OUT_DIR} "
- "-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} "
- "-o {OUTPUT_FILES[0]}",
- format_with = {}
- )
- ]
+ txt1 = InFile("unidata/confusables.txt")
+ txt2 = InFile("unidata/confusablesWholeScript.txt")
+ cfu = OutFile("confusables.cfu")
+ return [
+ SingleExecutionRequest(
+ name = "confusables",
+ category = "confusables",
+ dep_files = [OutFile("cnvalias.icu")],
+ input_files = [txt1, txt2],
+ output_files = [cfu],
+ tool = IcuTool("gencfu"),
+ args = "-d {OUT_DIR} -i {OUT_DIR} "
+ "-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} "
+ "-o {OUTPUT_FILES[0]}",
+ format_with = {}
+ )
+ ]
+
+def generate_conversion_mappings(config, glob, common_vars):
# UConv Conversion Table Files
- if config.has_feature("uconv"):
- input_files = [InFile(filename) for filename in glob("mappings/*.ucm")]
- output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files]
- # TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv
- requests += [
- RepeatedOrSingleExecutionRequest(
- name = "uconv",
- dep_files = [],
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("makeconv"),
- args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}",
- format_with = {},
- repeat_with = {
- "INPUT_FILE_PLACEHOLDER": [file.filename for file in input_files]
- },
- flatten_with = {
- "INPUT_FILE_PLACEHOLDER": " ".join(file.filename for file in input_files)
- }
- )
- ]
+ input_files = [InFile(filename) for filename in glob("mappings/*.ucm")]
+ output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files]
+ # TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv
+ return [
+ RepeatedOrSingleExecutionRequest(
+ name = "conversion_mappings",
+ category = "conversion_mappings",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("makeconv"),
+ args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}",
+ format_with = {},
+ repeat_with = {
+ "INPUT_FILE_PLACEHOLDER": utils.SpaceSeparatedList(file.filename for file in input_files)
+ }
+ )
+ ]
+
+def generate_brkitr_brk(config, glob, common_vars):
# BRK Files
- brkitr_brk_files = []
- if config.has_feature("brkitr"):
- input_files = [InFile(filename) for filename in glob("brkitr/rules/*.txt")]
- output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
- brkitr_brk_files += output_files
- requests += [
- RepeatedExecutionRequest(
- name = "brkitr_brk",
- dep_files = [OutFile("cnvalias.icu")],
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("genbrk"),
- args = "-d {OUT_DIR} -i {OUT_DIR} "
- "-c -r {IN_DIR}/{INPUT_FILE} "
- "-o {OUTPUT_FILE}",
- format_with = {},
- repeat_with = {}
- )
- ]
+ input_files = [InFile(filename) for filename in glob("brkitr/rules/*.txt")]
+ output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
+ return [
+ RepeatedExecutionRequest(
+ name = "brkitr_brk",
+ category = "brkitr_rules",
+ dep_files = [OutFile("cnvalias.icu")],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("genbrk"),
+ args = "-d {OUT_DIR} -i {OUT_DIR} "
+ "-c -r {IN_DIR}/{INPUT_FILE} "
+ "-o {OUTPUT_FILE}",
+ format_with = {},
+ repeat_with = {}
+ )
+ ]
+
+def generate_stringprep(config, glob, common_vars):
# SPP FILES
- if config.has_feature("stringprep"):
- input_files = [InFile(filename) for filename in glob("sprep/*.txt")]
- output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files]
- bundle_names = [v.filename[6:-4] for v in input_files]
- requests += [
- RepeatedExecutionRequest(
- name = "stringprep",
- dep_files = [],
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("gensprep"),
- args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} "
- "-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt",
- format_with = {},
- repeat_with = {
- "BUNDLE_NAME": bundle_names
- }
- )
- ]
+ input_files = [InFile(filename) for filename in glob("sprep/*.txt")]
+ output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files]
+ bundle_names = [v.filename[6:-4] for v in input_files]
+ return [
+ RepeatedExecutionRequest(
+ name = "stringprep",
+ category = "stringprep",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("gensprep"),
+ args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} "
+ "-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt",
+ format_with = {},
+ repeat_with = {
+ "BUNDLE_NAME": bundle_names
+ }
+ )
+ ]
+
+def generate_brkitr_dictionaries(config, glob, common_vars):
# Dict Files
- dict_files = []
- if config.has_feature("dictionaries"):
- input_files = [InFile(filename) for filename in glob("brkitr/dictionaries/*.txt")]
- output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
- dict_files += output_files
- extra_options_map = {
- "brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000",
- "brkitr/dictionaries/cjdict.txt": "--uchars",
- "brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780",
- "brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80",
- "brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00"
- }
- extra_optionses = [extra_options_map[v.filename] for v in input_files]
- requests += [
- RepeatedExecutionRequest(
- name = "dictionaries",
- dep_files = [],
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("gendict"),
- args = "-i {OUT_DIR} "
- "-c {EXTRA_OPTIONS} "
- "{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
- format_with = {},
- repeat_with = {
- "EXTRA_OPTIONS": extra_optionses
- }
- )
- ]
+ input_files = [InFile(filename) for filename in glob("brkitr/dictionaries/*.txt")]
+ output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
+ extra_options_map = {
+ "brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000",
+ "brkitr/dictionaries/cjdict.txt": "--uchars",
+ "brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780",
+ "brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80",
+ "brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00"
+ }
+ extra_optionses = [extra_options_map[v.filename] for v in input_files]
+ return [
+ RepeatedExecutionRequest(
+ name = "dictionaries",
+ category = "brkitr_dictionaries",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("gendict"),
+ args = "-i {OUT_DIR} "
+ "-c {EXTRA_OPTIONS} "
+ "{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
+ format_with = {},
+ repeat_with = {
+ "EXTRA_OPTIONS": extra_optionses
+ }
+ )
+ ]
+
+def generate_normalization(config, glob, common_vars):
# NRM Files
- if config.has_feature("normalization"):
- input_files = [InFile(filename) for filename in glob("in/*.nrm")]
- input_files.remove(InFile("in/nfc.nrm")) # nfc.nrm is pre-compiled into C++
- output_files = [OutFile(v.filename[3:]) for v in input_files]
- requests += [
- RepeatedExecutionRequest(
- name = "normalization",
- dep_files = [],
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("icupkg"),
- args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
- format_with = {},
- repeat_with = {}
- )
- ]
+ input_files = [InFile(filename) for filename in glob("in/*.nrm")]
+ input_files.remove(InFile("in/nfc.nrm")) # nfc.nrm is pre-compiled into C++
+ output_files = [OutFile(v.filename[3:]) for v in input_files]
+ return [
+ RepeatedExecutionRequest(
+ name = "normalization",
+ category = "normalization",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("icupkg"),
+ args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
+ format_with = {},
+ repeat_with = {}
+ )
+ ]
+
+def generate_coll_ucadata(config, glob, common_vars):
# Collation Dependency File (ucadata.icu)
- if config.has_feature("coll"):
- input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type())
- output_file = OutFile("coll/ucadata.icu")
- requests += [
- SingleExecutionRequest(
- name = "coll_ucadata",
- input_files = [input_file],
- output_files = [output_file],
- tool = IcuTool("icupkg"),
- args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
- format_with = {}
- )
- ]
+ input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type)
+ output_file = OutFile("coll/ucadata.icu")
+ return [
+ SingleExecutionRequest(
+ name = "coll_ucadata",
+ category = "coll_ucadata",
+ dep_files = [],
+ input_files = [input_file],
+ output_files = [output_file],
+ tool = IcuTool("icupkg"),
+ args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
+ format_with = {}
+ )
+ ]
+
+def generate_unames(config, glob, common_vars):
# Unicode Character Names
- if config.has_feature("unames"):
- input_file = InFile("in/unames.icu")
- output_file = OutFile("unames.icu")
- requests += [
- SingleExecutionRequest(
- name = "unames",
- input_files = [input_file],
- output_files = [output_file],
- tool = IcuTool("icupkg"),
- args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
- format_with = {}
- )
- ]
+ input_file = InFile("in/unames.icu")
+ output_file = OutFile("unames.icu")
+ return [
+ SingleExecutionRequest(
+ name = "unames",
+ category = "unames",
+ dep_files = [],
+ input_files = [input_file],
+ output_files = [output_file],
+ tool = IcuTool("icupkg"),
+ args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
+ format_with = {}
+ )
+ ]
+
+def generate_misc(config, glob, common_vars):
# Misc Data Res Files
- if config.has_feature("misc"):
- # TODO: Treat each misc file separately
- input_files = [InFile(filename) for filename in glob("misc/*.txt")]
- input_basenames = [v.filename[5:] for v in input_files]
- output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames]
+ input_files = [InFile(filename) for filename in glob("misc/*.txt")]
+ input_basenames = [v.filename[5:] for v in input_files]
+ output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames]
+ return [
+ RepeatedExecutionRequest(
+ name = "misc_res",
+ category = "misc",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("genrb"),
+ args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} "
+ "-k -q "
+ "{INPUT_BASENAME}",
+ format_with = {},
+ repeat_with = {
+ "INPUT_BASENAME": input_basenames
+ }
+ )
+ ]
+
+
+def generate_curr_supplemental(config, glob, common_vars):
+ # Currency Supplemental Res File
+ input_file = InFile("curr/supplementalData.txt")
+ input_basename = "supplementalData.txt"
+ output_file = OutFile("curr/supplementalData.res")
+ return [
+ SingleExecutionRequest(
+ name = "curr_supplemental_res",
+ category = "curr_supplemental",
+ dep_files = [],
+ input_files = [input_file],
+ output_files = [output_file],
+ tool = IcuTool("genrb"),
+ args = "-s {IN_DIR}/curr -d {OUT_DIR}/curr -i {OUT_DIR} "
+ "-k "
+ "{INPUT_BASENAME}",
+ format_with = {
+ "INPUT_BASENAME": input_basename
+ }
+ )
+ ]
+
+
+def generate_translit(config, glob, common_vars):
+ input_files = [
+ InFile("translit/root.txt"),
+ InFile("translit/en.txt"),
+ InFile("translit/el.txt")
+ ]
+ input_basenames = [v.filename[9:] for v in input_files]
+ output_files = [
+ OutFile("translit/%s.res" % v[:-4])
+ for v in input_basenames
+ ]
+ return [
+ RepeatedOrSingleExecutionRequest(
+ name = "translit_res",
+ category = "translit",
+ dep_files = [],
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("genrb"),
+ args = "-s {IN_DIR}/translit -d {OUT_DIR}/translit -i {OUT_DIR} "
+ "-k "
+ "{INPUT_BASENAME}",
+ format_with = {
+ },
+ repeat_with = {
+ "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
+ }
+ )
+ ]
+
+
+def generate_tree(
+ config,
+ glob,
+ common_vars,
+ sub_dir,
+ out_sub_dir,
+ resfile_name,
+ version_var,
+ source_var,
+ use_pool_bundle,
+ dep_files):
+ requests = []
+ category = "%s_tree" % sub_dir
+ out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
+ # TODO: Clean this up for curr
+ input_files = [InFile(filename) for filename in glob("%s/*.txt" % sub_dir)]
+ if sub_dir == "curr":
+ input_files.remove(InFile("curr/supplementalData.txt"))
+ input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files]
+ output_files = [
+ OutFile("%s%s.res" % (out_prefix, v[:-4]))
+ for v in input_basenames
+ ]
+
+ # Generate Pool Bundle
+ if use_pool_bundle:
+ input_pool_files = [OutFile("%spool.res" % out_prefix)]
+ use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
+ OUT_PREFIX = out_prefix,
+ **common_vars
+ )
requests += [
- RepeatedExecutionRequest(
- name = "misc",
- dep_files = [],
+ SingleExecutionRequest(
+ name = "%s_pool_write" % sub_dir,
+ category = category,
+ dep_files = dep_files,
input_files = input_files,
- output_files = output_files,
+ output_files = input_pool_files,
tool = IcuTool("genrb"),
- args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} "
- "-k -q "
- "{INPUT_BASENAME}",
- format_with = {},
- repeat_with = {
- "INPUT_BASENAME": input_basenames
+ args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
+ "--writePoolBundle -k "
+ "{INPUT_BASENAMES_SPACED}",
+ format_with = {
+ "IN_SUB_DIR": sub_dir,
+ "OUT_PREFIX": out_prefix,
+ "INPUT_BASENAMES_SPACED": utils.SpaceSeparatedList(input_basenames)
}
- )
+ ),
]
+ else:
+ input_pool_files = []
+ use_pool_bundle_option = ""
- # Specialized Locale Data Res Files
- specialized_sub_dirs = [
- # (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files)
- ("locales", None, "resfiles.mk", "GENRB_CLDR_VERSION", "GENRB_SOURCE", True,
- []),
- ("curr", "curr", "resfiles.mk", "CURR_CLDR_VERSION", "CURR_SOURCE", True,
- []),
- ("lang", "lang", "resfiles.mk", "LANG_CLDR_VERSION", "LANG_SOURCE", True,
- []),
- ("region", "region", "resfiles.mk", "REGION_CLDR_VERSION", "REGION_SOURCE", True,
- []),
- ("zone", "zone", "resfiles.mk", "ZONE_CLDR_VERSION", "ZONE_SOURCE", True,
- []),
- ("unit", "unit", "resfiles.mk", "UNIT_CLDR_VERSION", "UNIT_SOURCE", True,
- []),
- # TODO: We should not need timezoneTypes.res to build collation resource bundles.
- # TODO: Maybe keyTypeData.res should be baked into the common library.
- ("coll", "coll", "colfiles.mk", "COLLATION_CLDR_VERSION", "COLLATION_SOURCE", False,
- [OutFile("coll/ucadata.icu"), OutFile("timezoneTypes.res"), OutFile("keyTypeData.res")]),
- ("brkitr", "brkitr", "brkfiles.mk", "BRK_RES_CLDR_VERSION", "BRK_RES_SOURCE", False,
- brkitr_brk_files + dict_files),
- ("rbnf", "rbnf", "rbnffiles.mk", "RBNF_CLDR_VERSION", "RBNF_SOURCE", False,
- []),
- ("translit", "translit", "trnsfiles.mk", None, "TRANSLIT_SOURCE", False,
- [])
+ # Generate Res File Tree
+ requests += [
+ RepeatedOrSingleExecutionRequest(
+ name = "%s_res" % sub_dir,
+ category = category,
+ dep_files = dep_files + input_pool_files,
+ input_files = input_files,
+ output_files = output_files,
+ tool = IcuTool("genrb"),
+ args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
+ "{EXTRA_OPTION} -k "
+ "{INPUT_BASENAME}",
+ format_with = {
+ "IN_SUB_DIR": sub_dir,
+ "OUT_PREFIX": out_prefix,
+ "EXTRA_OPTION": use_pool_bundle_option
+ },
+ repeat_with = {
+ "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
+ }
+ )
]
- for sub_dir, out_sub_dir, resfile_name, version_var, source_var, use_pool_bundle, dep_files in specialized_sub_dirs:
- out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
- if config.has_feature(sub_dir):
- # TODO: Clean this up for translit
- if sub_dir == "translit":
- input_files = [
- InFile("translit/root.txt"),
- InFile("translit/en.txt"),
- InFile("translit/el.txt")
- ]
- else:
- input_files = [InFile(filename) for filename in glob("%s/*.txt" % sub_dir)]
- input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files]
- output_files = [
- OutFile("%s%s.res" % (out_prefix, v[:-4]))
- for v in input_basenames
- ]
- if use_pool_bundle:
- input_pool_files = [OutFile("%spool.res" % out_prefix)]
- use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
- OUT_PREFIX = out_prefix,
- **common_vars
- )
- requests += [
- SingleExecutionRequest(
- name = "%s_pool_write" % sub_dir,
- input_files = dep_files + input_files,
- output_files = input_pool_files,
- tool = IcuTool("genrb"),
- args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
- "--writePoolBundle -k "
- "{INPUT_BASENAMES_SPACED}",
- format_with = {
- "IN_SUB_DIR": sub_dir,
- "OUT_PREFIX": out_prefix,
- "INPUT_BASENAMES_SPACED": " ".join(input_basenames)
- }
- ),
- ]
- else:
- input_pool_files = []
- use_pool_bundle_option = ""
- requests += [
- RepeatedOrSingleExecutionRequest(
- name = "%s_res" % sub_dir,
- dep_files = dep_files + input_pool_files,
- input_files = input_files,
- output_files = output_files,
- tool = IcuTool("genrb"),
- args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
- "{EXTRA_OPTION} -k "
- "{INPUT_BASENAME}",
- format_with = {
- "IN_SUB_DIR": sub_dir,
- "OUT_PREFIX": out_prefix,
- "EXTRA_OPTION": use_pool_bundle_option
- },
- repeat_with = {
- "INPUT_BASENAME": input_basenames,
- },
- flatten_with = {
- "INPUT_BASENAME": " ".join(input_basenames)
- }
- )
- ]
- # Generate index txt file
- if sub_dir != "translit":
- # TODO: Change .mk files to .py files so they can be loaded directly.
- # Alternatively, figure out a way to require reading this file altogether.
- # Right now, it is required for the index list file.
- # Reading these files as .py will be required for Bazel.
- mk_values = parse_makefile("{GLOB_DIR}/{IN_SUB_DIR}/{RESFILE_NAME}".format(
- IN_SUB_DIR = sub_dir,
- RESFILE_NAME = resfile_name,
- **common_vars
- ))
- cldr_version = mk_values[version_var] if version_var and sub_dir == "locales" else None
- locales = [v[:-4] for v in mk_values[source_var].split()]
- pkg_exclusions |= set(output_files) - set(OutFile("%s%s.res" % (out_prefix, locale)) for locale in locales)
- index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
- IN_SUB_DIR = sub_dir,
- **common_vars
- ))
- requests += [
- PrintFileRequest(
- name = "%s_index_txt" % sub_dir,
- output_file = index_file_txt,
- content = utils.generate_index_file(locales, cldr_version, common_vars)
- )
- ]
- # Generate index res file
- index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
- OUT_PREFIX = out_prefix,
- **common_vars
- ))
- requests += [
- SingleExecutionRequest(
- name = "%s_index_res" % sub_dir,
- input_files = [index_file_txt],
- output_files = [index_res_file],
- tool = IcuTool("genrb"),
- args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
- "-k "
- "{INDEX_NAME}.txt",
- format_with = {
- "IN_SUB_DIR": sub_dir,
- "OUT_PREFIX": out_prefix
- }
- )
- ]
-
- # Finally, make the package.
- all_output_files = list(sorted(utils.get_all_output_files(requests)))
- icudata_list_file = TmpFile("icudata.lst")
+ # Generate index txt file
+ # TODO: Change .mk files to .py files so they can be loaded directly.
+ # Alternatively, figure out a way to not require reading this file altogether.
+ # Right now, it is required for the index list file.
+ # Reading these files as .py will be required for Bazel.
+ mk_values = parse_makefile("{GLOB_DIR}/{IN_SUB_DIR}/{RESFILE_NAME}".format(
+ IN_SUB_DIR = sub_dir,
+ RESFILE_NAME = resfile_name,
+ **common_vars
+ ))
+ cldr_version = mk_values[version_var] if version_var and sub_dir == "locales" else None
+ index_input_files = [
+ InFile("%s/%s" % (sub_dir, basename))
+ for basename in mk_values[source_var].split()
+ ]
+ index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
+ IN_SUB_DIR = sub_dir,
+ **common_vars
+ ))
requests += [
- PrintFileRequest(
- name = "icudata_list",
- output_file = icudata_list_file,
- content = "\n".join(file.filename for file in all_output_files)
- ),
- VariableRequest(
- name = "icudata_all_output_files",
- input_files = all_output_files + [icudata_list_file]
+ IndexTxtRequest(
+ name = "%s_index_txt" % sub_dir,
+ category = category,
+ input_files = index_input_files,
+ output_file = index_file_txt,
+ cldr_version = cldr_version
)
]
- return (build_dirs, requests)
+ # Generate index res file
+ index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
+ OUT_PREFIX = out_prefix,
+ **common_vars
+ ))
+ requests += [
+ SingleExecutionRequest(
+ name = "%s_index_res" % sub_dir,
+ category = category,
+ dep_files = [],
+ input_files = [index_file_txt],
+ output_files = [index_res_file],
+ tool = IcuTool("genrb"),
+ args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
+ "-k "
+ "{INDEX_NAME}.txt",
+ format_with = {
+ "IN_SUB_DIR": sub_dir,
+ "OUT_PREFIX": out_prefix
+ }
+ )
+ ]
+
+ return requests
)
-def flatten(request, max_parallel):
- """Flatten a RepeatedOrSingleExecutionRequest
-
- Becomes either a SingleExecutionRequest or a RepeatedExecutionRequest.
+def flatten_requests(raw_requests, config, common_vars):
+ """Post-processes "meta" requests into normal requests.
+
+ Affected classes:
+ - RepeatedOrSingleExecutionRequest becomes either
+ RepeatedExecutionRequest or SingleExecutionRequest
+ - ListRequest becomes PrintFileRequest and VariableRequest
+ - IndexTxtRequest becomes PrintFileRequest
"""
- if max_parallel:
- return RepeatedExecutionRequest(
- name = request.name,
- dep_files = request.dep_files,
- input_files = request.input_files,
- output_files = request.output_files,
- tool = request.tool,
- args = request.args,
- format_with = request.format_with,
- repeat_with = request.repeat_with
- )
- else:
- return SingleExecutionRequest(
- name = request.name,
- input_files = request.dep_files + request.input_files,
- output_files = request.output_files,
- tool = request.tool,
- args = request.args,
- format_with = concat_dicts(request.format_with, request.flatten_with)
- )
+ flattened_requests = []
+ for request in raw_requests:
+ if isinstance(request, RepeatedOrSingleExecutionRequest):
+ if config.max_parallel:
+ flattened_requests.append(RepeatedExecutionRequest(
+ name = request.name,
+ category = request.category,
+ dep_files = request.dep_files,
+ input_files = request.input_files,
+ output_files = request.output_files,
+ tool = request.tool,
+ args = request.args,
+ format_with = request.format_with,
+ repeat_with = request.repeat_with
+ ))
+ else:
+ flattened_requests.append(SingleExecutionRequest(
+ name = request.name,
+ category = request.category,
+ input_files = request.dep_files + request.input_files,
+ output_files = request.output_files,
+ tool = request.tool,
+ args = request.args,
+ format_with = concat_dicts(request.format_with, request.repeat_with)
+ ))
+ elif isinstance(request, ListRequest):
+ list_files = list(sorted(get_all_output_files(raw_requests)))
+ if request.include_tmp:
+ variable_files = list(sorted(get_all_output_files(raw_requests, include_tmp=True)))
+ else:
+ # Always include the list file itself
+ variable_files = list_files + [request.output_file]
+ flattened_requests += [
+ PrintFileRequest(
+ name = request.name,
+ output_file = request.output_file,
+ content = "\n".join(file.filename for file in list_files)
+ ),
+ VariableRequest(
+ name = request.variable_name,
+ input_files = variable_files
+ )
+ ]
+ elif isinstance(request, IndexTxtRequest):
+ flattened_requests += [
+ PrintFileRequest(
+ name = request.name,
+ output_file = request.output_file,
+ content = generate_index_file(request.input_files, request.cldr_version, common_vars)
+ )
+ ]
+ else:
+ flattened_requests.append(request)
+ return flattened_requests
-def generate_index_file(locales, cldr_version, common_vars):
+def generate_index_file(input_files, cldr_version, common_vars):
+ locales = [f.filename[f.filename.rfind("/")+1:-4] for f in input_files]
formatted_version = " CLDRVersion { \"%s\" }\n" % cldr_version if cldr_version else ""
formatted_locales = "\n".join([" %s {\"\"}" % v for v in locales])
# TODO: CLDRVersion is required only in the base file
)
+def get_input_files(request):
+ if isinstance(request, SingleExecutionRequest):
+ return request.dep_files + request.input_files
+ elif isinstance(request, RepeatedExecutionRequest):
+ return request.dep_files + request.input_files
+ elif isinstance(request, RepeatedOrSingleExecutionRequest):
+ return request.dep_files + request.input_files
+ elif isinstance(request, PrintFileRequest):
+ return []
+ elif isinstance(request, CopyRequest):
+ return [request.input_file]
+ elif isinstance(request, VariableRequest):
+ return []
+ elif isinstance(request, ListRequest):
+ return []
+ elif isinstance(request, IndexTxtRequest):
+ return request.input_files
+ else:
+ assert False
+
+
+def get_output_files(request):
+ if isinstance(request, SingleExecutionRequest):
+ return request.output_files
+ elif isinstance(request, RepeatedExecutionRequest):
+ return request.output_files
+ elif isinstance(request, RepeatedOrSingleExecutionRequest):
+ return request.output_files
+ elif isinstance(request, PrintFileRequest):
+ return [request.output_file]
+ elif isinstance(request, CopyRequest):
+ return [request.output_file]
+ elif isinstance(request, VariableRequest):
+ return []
+ elif isinstance(request, ListRequest):
+ return [request.output_file]
+ elif isinstance(request, IndexTxtRequest):
+ return [request.output_file]
+ else:
+ assert False
+
+
def get_all_output_files(requests, include_tmp=False):
files = []
for request in requests:
- if isinstance(request, SingleExecutionRequest):
- files += request.output_files
- elif isinstance(request, RepeatedExecutionRequest):
- files += request.output_files
- elif isinstance(request, RepeatedOrSingleExecutionRequest):
- files += request.output_files
- elif isinstance(request, PrintFileRequest):
- files += [request.output_file]
- elif isinstance(request, CopyRequest):
- files += [request.output_file]
- elif isinstance(request, VariableRequest):
- pass
- else:
- assert False
+ files += get_output_files(request)
# Filter out all files but those in OUT_DIR if necessary.
# It is also easy to filter for uniqueness; do it right now and return.
# Filter for unique values. NOTE: Cannot use set() because we need to accept same filename as
# OutFile and TmpFile as different, and by default they evaluate as equal.
return [f for _, f in set((type(f), f) for f in files)]
+
+
+class SpaceSeparatedList(list):
+ """A list that joins itself with spaces when converted to a string."""
+ def __str__(self):
+ return " ".join(self)