• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright (C) 2018 and later: Unicode, Inc. and others.
2# License & terms of use: http://www.unicode.org/copyright.html
3
4# Python 2/3 Compatibility (ICU-20299)
5# TODO(ICU-20301): Remove this.
6from __future__ import print_function
7
8from icutools.databuilder import *
9from icutools.databuilder import utils
10from icutools.databuilder.request_types import *
11
12import os
13import sys
14
15
16def generate(config, io, common_vars):
17    requests = []
18
19    # By default, exclude collation data that mimics the order of some large legacy charsets.
20    # We do this in "subtractive" strategy by inserting a resourceFilter.
21    # Later rules from an explicit filter file may override this default behavior.
22    # (In "additive" strategy this is unnecessary.)
23    if config.strategy == "subtractive":
24        filters = config.filters_json_data.setdefault("resourceFilters", [])
25        omit_charset_collations = {
26            "categories": [
27                "coll_tree"
28            ],
29            "rules": [
30                "-/collations/big5han",
31                "-/collations/gb2312han"
32            ]
33        }
34        filters.insert(0, omit_charset_collations)
35
36    if len(io.glob("misc/*")) == 0:
37        print("Error: Cannot find data directory; please specify --src_dir", file=sys.stderr)
38        exit(1)
39
40    requests += generate_cnvalias(config, io, common_vars)
41    requests += generate_ulayout(config, io, common_vars)
42    requests += generate_uemoji(config, io, common_vars)
43    requests += generate_confusables(config, io, common_vars)
44    requests += generate_conversion_mappings(config, io, common_vars)
45    requests += generate_brkitr_brk(config, io, common_vars)
46    requests += generate_brkitr_lstm(config, io, common_vars)
47    requests += generate_brkitr_adaboost(config, io, common_vars)
48    requests += generate_stringprep(config, io, common_vars)
49    requests += generate_brkitr_dictionaries(config, io, common_vars)
50    requests += generate_normalization(config, io, common_vars)
51    requests += generate_coll_ucadata(config, io, common_vars)
52    requests += generate_full_unicore_data(config, io, common_vars)
53    requests += generate_unames(config, io, common_vars)
54    requests += generate_misc(config, io, common_vars)
55    requests += generate_curr_supplemental(config, io, common_vars)
56    requests += generate_zone_supplemental(config, io, common_vars)
57    requests += generate_translit(config, io, common_vars)
58
59    # Res Tree Files
60    # (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files)
61    requests += generate_tree(config, io, common_vars,
62        "locales",
63        None,
64        config.use_pool_bundle,
65        [])
66
67    requests += generate_tree(config, io, common_vars,
68        "curr",
69        "curr",
70        config.use_pool_bundle,
71        [])
72
73    requests += generate_tree(config, io, common_vars,
74        "lang",
75        "lang",
76        config.use_pool_bundle,
77        [])
78
79    requests += generate_tree(config, io, common_vars,
80        "region",
81        "region",
82        config.use_pool_bundle,
83        [])
84
85    requests += generate_tree(config, io, common_vars,
86        "zone",
87        "zone",
88        config.use_pool_bundle,
89        [])
90
91    requests += generate_tree(config, io, common_vars,
92        "unit",
93        "unit",
94        config.use_pool_bundle,
95        [])
96
97    requests += generate_tree(config, io, common_vars,
98        "coll",
99        "coll",
100        # Never use pool bundle for coll, brkitr, or rbnf
101        False,
102        # Depends on timezoneTypes.res and keyTypeData.res.
103        # TODO: We should not need this dependency to build collation.
104        # TODO: Bake keyTypeData.res into the common library?
105        [DepTarget("coll_ucadata"), DepTarget("misc_res"), InFile("unidata/UCARules.txt")])
106
107    requests += generate_tree(config, io, common_vars,
108        "brkitr",
109        "brkitr",
110        # Never use pool bundle for coll, brkitr, or rbnf
111        False,
112        [DepTarget("brkitr_brk"), DepTarget("dictionaries")])
113
114    requests += generate_tree(config, io, common_vars,
115        "rbnf",
116        "rbnf",
117        # Never use pool bundle for coll, brkitr, or rbnf
118        False,
119        [])
120
121    requests += [
122        ListRequest(
123            name = "icudata_list",
124            variable_name = "icudata_all_output_files",
125            output_file = TmpFile("icudata.lst"),
126            include_tmp = False
127        )
128    ]
129
130    return requests
131
132
133def generate_cnvalias(config, io, common_vars):
134    # UConv Name Aliases
135    input_file = InFile("mappings/convrtrs.txt")
136    output_file = OutFile("cnvalias.icu")
137    return [
138        SingleExecutionRequest(
139            name = "cnvalias",
140            category = "cnvalias",
141            dep_targets = [],
142            input_files = [input_file],
143            output_files = [output_file],
144            tool = IcuTool("gencnval"),
145            args = "-s {IN_DIR} -d {OUT_DIR} "
146                "{INPUT_FILES[0]}",
147            format_with = {}
148        )
149    ]
150
151
152def generate_confusables(config, io, common_vars):
153    # CONFUSABLES
154    txt1 = InFile("unidata/confusables.txt")
155    txt2 = InFile("unidata/confusablesWholeScript.txt")
156    cfu = OutFile("confusables.cfu")
157    return [
158        SingleExecutionRequest(
159            name = "confusables",
160            category = "confusables",
161            dep_targets = [DepTarget("cnvalias")],
162            input_files = [txt1, txt2],
163            output_files = [cfu],
164            tool = IcuTool("gencfu"),
165            args = "-d {OUT_DIR} -i {OUT_DIR} "
166                "-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} "
167                "-o {OUTPUT_FILES[0]}",
168            format_with = {}
169        )
170    ]
171
172
173def generate_conversion_mappings(config, io, common_vars):
174    # UConv Conversion Table Files
175    input_files = [InFile(filename) for filename in io.glob("mappings/*.ucm")]
176    output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files]
177    # TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv
178    return [
179        RepeatedOrSingleExecutionRequest(
180            name = "conversion_mappings",
181            category = "conversion_mappings",
182            dep_targets = [],
183            input_files = input_files,
184            output_files = output_files,
185            tool = IcuTool("makeconv"),
186            # BEGIN android-changed
187            # args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}",
188            args = "-s {IN_DIR} -d {OUT_DIR} -c --small {INPUT_FILE_PLACEHOLDER}",
189            # END android-changed
190            format_with = {},
191            repeat_with = {
192                "INPUT_FILE_PLACEHOLDER": utils.SpaceSeparatedList(file.filename for file in input_files)
193            }
194        )
195    ]
196
197
198def generate_brkitr_brk(config, io, common_vars):
199    # BRK Files
200    input_files = [InFile(filename) for filename in io.glob("brkitr/rules/*.txt")]
201    output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
202    return [
203        RepeatedExecutionRequest(
204            name = "brkitr_brk",
205            category = "brkitr_rules",
206            dep_targets =
207                [DepTarget("cnvalias"),
208                    DepTarget("ulayout"), DepTarget("uemoji"), DepTarget("lstm_res"), DepTarget("adaboost_res")],
209            input_files = input_files,
210            output_files = output_files,
211            tool = IcuTool("genbrk"),
212            args = "-d {OUT_DIR} -i {OUT_DIR} "
213                "-c -r {IN_DIR}/{INPUT_FILE} "
214                "-o {OUTPUT_FILE}",
215            format_with = {},
216            repeat_with = {}
217        )
218    ]
219
220
221def generate_stringprep(config, io, common_vars):
222    # SPP FILES
223    input_files = [InFile(filename) for filename in io.glob("sprep/*.txt")]
224    output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files]
225    bundle_names = [v.filename[6:-4] for v in input_files]
226    return [
227        RepeatedExecutionRequest(
228            name = "stringprep",
229            category = "stringprep",
230            dep_targets = [InFile("unidata/NormalizationCorrections.txt")],
231            input_files = input_files,
232            output_files = output_files,
233            tool = IcuTool("gensprep"),
234            args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} "
235                "-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt",
236            format_with = {},
237            repeat_with = {
238                "BUNDLE_NAME": bundle_names
239            }
240        )
241    ]
242
243
244def generate_brkitr_dictionaries(config, io, common_vars):
245    # Dict Files
246    input_files = [InFile(filename) for filename in io.glob("brkitr/dictionaries/*.txt")]
247    output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
248    extra_options_map = {
249        "brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000",
250        "brkitr/dictionaries/cjdict.txt": "--uchars",
251        "brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780",
252        "brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80",
253        "brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00"
254    }
255    extra_optionses = [extra_options_map[v.filename] for v in input_files]
256    return [
257        RepeatedExecutionRequest(
258            name = "dictionaries",
259            category = "brkitr_dictionaries",
260            dep_targets = [],
261            input_files = input_files,
262            output_files = output_files,
263            tool = IcuTool("gendict"),
264            args = "-i {OUT_DIR} "
265                "-c {EXTRA_OPTIONS} "
266                "{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
267            format_with = {},
268            repeat_with = {
269                "EXTRA_OPTIONS": extra_optionses
270            }
271        )
272    ]
273
274
275def generate_normalization(config, io, common_vars):
276    # NRM Files
277    input_files = [InFile(filename) for filename in io.glob("in/*.nrm")]
278    # nfc.nrm is pre-compiled into C++; see generate_full_unicore_data
279    input_files.remove(InFile("in/nfc.nrm"))
280    output_files = [OutFile(v.filename[3:]) for v in input_files]
281    return [
282        RepeatedExecutionRequest(
283            name = "normalization",
284            category = "normalization",
285            dep_targets = [],
286            input_files = input_files,
287            output_files = output_files,
288            tool = IcuTool("icupkg"),
289            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
290            format_with = {},
291            repeat_with = {}
292        )
293    ]
294
295
296def generate_coll_ucadata(config, io, common_vars):
297    # Collation Dependency File (ucadata.icu)
298    input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type)
299    output_file = OutFile("coll/ucadata.icu")
300    return [
301        SingleExecutionRequest(
302            name = "coll_ucadata",
303            category = "coll_ucadata",
304            dep_targets = [],
305            input_files = [input_file],
306            output_files = [output_file],
307            tool = IcuTool("icupkg"),
308            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
309            format_with = {}
310        )
311    ]
312
313
314def generate_full_unicore_data(config, io, common_vars):
315    # The core Unicode properties files (pnames.icu, uprops.icu, ucase.icu, ubidi.icu)
316    # are hardcoded in the common DLL and therefore not included in the data package any more.
317    # They are not built by default but need to be built for ICU4J data,
318    # both in the .jar and in the .dat file (if ICU4J uses the .dat file).
319    # See ICU-4497.
320    if not config.include_uni_core_data:
321        return []
322
323    basenames = [
324        "pnames.icu",
325        "uprops.icu",
326        "ucase.icu",
327        "ubidi.icu",
328        "nfc.nrm"
329    ]
330    input_files = [InFile("in/%s" % bn) for bn in basenames]
331    output_files = [OutFile(bn) for bn in basenames]
332    return [
333        RepeatedExecutionRequest(
334            name = "unicore",
335            category = "unicore",
336            input_files = input_files,
337            output_files = output_files,
338            tool = IcuTool("icupkg"),
339            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}"
340        )
341    ]
342
343
344def generate_unames(config, io, common_vars):
345    # Unicode Character Names
346    input_file = InFile("in/unames.icu")
347    output_file = OutFile("unames.icu")
348    return [
349        SingleExecutionRequest(
350            name = "unames",
351            category = "unames",
352            dep_targets = [],
353            input_files = [input_file],
354            output_files = [output_file],
355            tool = IcuTool("icupkg"),
356            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
357            format_with = {}
358        )
359    ]
360
361
362def generate_ulayout(config, io, common_vars):
363    # Unicode text layout properties
364    basename = "ulayout"
365    input_file = InFile("in/%s.icu" % basename)
366    output_file = OutFile("%s.icu" % basename)
367    return [
368        SingleExecutionRequest(
369            name = basename,
370            category = basename,
371            dep_targets = [],
372            input_files = [input_file],
373            output_files = [output_file],
374            tool = IcuTool("icupkg"),
375            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
376            format_with = {}
377        )
378    ]
379
380
381def generate_uemoji(config, io, common_vars):
382    # Unicode emoji properties
383    basename = "uemoji"
384    input_file = InFile("in/%s.icu" % basename)
385    output_file = OutFile("%s.icu" % basename)
386    return [
387        SingleExecutionRequest(
388            name = basename,
389            category = basename,
390            dep_targets = [],
391            input_files = [input_file],
392            output_files = [output_file],
393            tool = IcuTool("icupkg"),
394            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
395            format_with = {}
396        )
397    ]
398
399
400def generate_misc(config, io, common_vars):
401    # Misc Data Res Files
402    input_files = [InFile(filename) for filename in io.glob("misc/*.txt")]
403    input_basenames = [v.filename[5:] for v in input_files]
404    output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames]
405    return [
406        RepeatedExecutionRequest(
407            name = "misc_res",
408            category = "misc",
409            dep_targets = [DepTarget("cnvalias")], # ICU-21175
410            input_files = input_files,
411            output_files = output_files,
412            tool = IcuTool("genrb"),
413            args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} "
414                "-k -q "
415                "{INPUT_BASENAME}",
416            format_with = {},
417            repeat_with = {
418                "INPUT_BASENAME": input_basenames
419            }
420        )
421    ]
422
423
424def generate_curr_supplemental(config, io, common_vars):
425    # Currency Supplemental Res File
426    input_file = InFile("curr/supplementalData.txt")
427    input_basename = "supplementalData.txt"
428    output_file = OutFile("curr/supplementalData.res")
429    return [
430        SingleExecutionRequest(
431            name = "curr_supplemental_res",
432            category = "curr_supplemental",
433            dep_targets = [],
434            input_files = [input_file],
435            output_files = [output_file],
436            tool = IcuTool("genrb"),
437            args = "-s {IN_DIR}/curr -d {OUT_DIR}/curr -i {OUT_DIR} "
438                "-k "
439                "{INPUT_BASENAME}",
440            format_with = {
441                "INPUT_BASENAME": input_basename
442            }
443        )
444    ]
445
446
447def generate_zone_supplemental(config, io, common_vars):
448    # tzdbNames Res File
449    input_file = InFile("zone/tzdbNames.txt")
450    input_basename = "tzdbNames.txt"
451    output_file = OutFile("zone/tzdbNames.res")
452    return [
453        SingleExecutionRequest(
454            name = "zone_supplemental_res",
455            category = "zone_supplemental",
456            dep_targets = [],
457            input_files = [input_file],
458            output_files = [output_file],
459            tool = IcuTool("genrb"),
460            args = "-s {IN_DIR}/zone -d {OUT_DIR}/zone -i {OUT_DIR} "
461                "-k "
462                "{INPUT_BASENAME}",
463            format_with = {
464                "INPUT_BASENAME": input_basename
465            }
466        )
467    ]
468
469
470def generate_translit(config, io, common_vars):
471    input_files = [
472        InFile("translit/root.txt"),
473        InFile("translit/en.txt"),
474        InFile("translit/el.txt")
475    ]
476    dep_files = set(InFile(filename) for filename in io.glob("translit/*.txt"))
477    dep_files -= set(input_files)
478    dep_files = list(sorted(dep_files))
479    input_basenames = [v.filename[9:] for v in input_files]
480    output_files = [
481        OutFile("translit/%s.res" % v[:-4])
482        for v in input_basenames
483    ]
484    return [
485        RepeatedOrSingleExecutionRequest(
486            name = "translit_res",
487            category = "translit",
488            dep_targets = dep_files,
489            input_files = input_files,
490            output_files = output_files,
491            tool = IcuTool("genrb"),
492            args = "-s {IN_DIR}/translit -d {OUT_DIR}/translit -i {OUT_DIR} "
493                "-k "
494                "{INPUT_BASENAME}",
495            format_with = {
496            },
497            repeat_with = {
498                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
499            }
500        )
501    ]
502
503
504def generate_brkitr_lstm(config, io, common_vars):
505    input_files = [InFile(filename) for filename in io.glob("brkitr/lstm/*.txt")]
506    input_basenames = [v.filename[12:] for v in input_files]
507    output_files = [
508        OutFile("brkitr/%s.res" % v[:-4])
509        for v in input_basenames
510    ]
511    return [
512        RepeatedOrSingleExecutionRequest(
513            name = "lstm_res",
514            category = "brkitr_lstm",
515            dep_targets = [],
516            input_files = input_files,
517            output_files = output_files,
518            tool = IcuTool("genrb"),
519            args = "-s {IN_DIR}/brkitr/lstm -d {OUT_DIR}/brkitr -i {OUT_DIR} "
520                "-k "
521                "{INPUT_BASENAME}",
522            format_with = {
523            },
524            repeat_with = {
525                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
526            }
527        )
528    ]
529
530def generate_brkitr_adaboost(config, io, common_vars):
531    input_files = [InFile(filename) for filename in io.glob("brkitr/adaboost/*.txt")]
532    input_basenames = [v.filename[16:] for v in input_files]
533    output_files = [
534        OutFile("brkitr/%s.res" % v[:-4])
535        for v in input_basenames
536    ]
537    return [
538        RepeatedOrSingleExecutionRequest(
539            name = "adaboost_res",
540            category = "brkitr_adaboost",
541            dep_targets = [],
542            input_files = input_files,
543            output_files = output_files,
544            tool = IcuTool("genrb"),
545            args = "-s {IN_DIR}/brkitr/adaboost -d {OUT_DIR}/brkitr -i {OUT_DIR} "
546                "-k "
547                "{INPUT_BASENAME}",
548            format_with = {
549            },
550            repeat_with = {
551                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
552            }
553        )
554    ]
555
556def generate_tree(
557        config,
558        io,
559        common_vars,
560        sub_dir,
561        out_sub_dir,
562        use_pool_bundle,
563        dep_targets):
564    requests = []
565    category = "%s_tree" % sub_dir
566    out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
567    input_files = [InFile(filename) for filename in io.glob("%s/*.txt" % sub_dir)]
568    if sub_dir == "curr":
569        input_files.remove(InFile("curr/supplementalData.txt"))
570    if sub_dir == "zone":
571        input_files.remove(InFile("zone/tzdbNames.txt"))
572    input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files]
573    output_files = [
574        OutFile("%s%s.res" % (out_prefix, v[:-4]))
575        for v in input_basenames
576    ]
577
578    # Generate Pool Bundle
579    if use_pool_bundle:
580        input_pool_files = [OutFile("%spool.res" % out_prefix)]
581        pool_target_name = "%s_pool_write" % sub_dir
582        use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
583            OUT_PREFIX = out_prefix,
584            **common_vars
585        )
586        requests += [
587            SingleExecutionRequest(
588                name = pool_target_name,
589                category = category,
590                dep_targets = dep_targets,
591                input_files = input_files,
592                output_files = input_pool_files,
593                tool = IcuTool("genrb"),
594                args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
595                    "--writePoolBundle -k "
596                    "{INPUT_BASENAMES_SPACED}",
597                format_with = {
598                    "IN_SUB_DIR": sub_dir,
599                    "OUT_PREFIX": out_prefix,
600                    "INPUT_BASENAMES_SPACED": utils.SpaceSeparatedList(input_basenames)
601                }
602            ),
603        ]
604        dep_targets = dep_targets + [DepTarget(pool_target_name)]
605    else:
606        use_pool_bundle_option = ""
607
608    # Generate Res File Tree
609    requests += [
610        RepeatedOrSingleExecutionRequest(
611            name = "%s_res" % sub_dir,
612            category = category,
613            dep_targets = dep_targets,
614            input_files = input_files,
615            output_files = output_files,
616            tool = IcuTool("genrb"),
617            # BEGIN android-changed
618            args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} " +
619                ("--omitCollationRules " if sub_dir == "coll" else "") +
620                "{EXTRA_OPTION} -k "
621                "{INPUT_BASENAME}",
622            # END android-changed
623            format_with = {
624                "IN_SUB_DIR": sub_dir,
625                "OUT_PREFIX": out_prefix,
626                "EXTRA_OPTION": use_pool_bundle_option
627            },
628            repeat_with = {
629                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
630            }
631        )
632    ]
633
634    # Generate res_index file
635    # Exclude the deprecated locale variants and root; see ICU-20628. This
636    # could be data-driven, but we do not want to perform I/O in this script
637    # (for example, we do not want to read from an XML file).
638    excluded_locales = set([
639        "ja_JP_TRADITIONAL",
640        "th_TH_TRADITIONAL",
641        "de_",
642        "de__PHONEBOOK",
643        "es_",
644        "es__TRADITIONAL",
645        "root",
646    ])
647    # Put alias locales in a separate structure; see ICU-20627
648    dependency_data = io.read_locale_deps(sub_dir)
649    if "aliases" in dependency_data:
650        alias_locales = set(dependency_data["aliases"].keys())
651    else:
652        alias_locales = set()
653    alias_files = []
654    installed_files = []
655    for f in input_files:
656        file_stem = IndexRequest.locale_file_stem(f)
657        if file_stem in excluded_locales:
658            continue
659        destination = alias_files if file_stem in alias_locales else installed_files
660        destination.append(f)
661    cldr_version = dependency_data["cldrVersion"] if sub_dir == "locales" else None
662    index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
663        IN_SUB_DIR = sub_dir,
664        **common_vars
665    ))
666    index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
667        OUT_PREFIX = out_prefix,
668        **common_vars
669    ))
670    index_file_target_name = "%s_index_txt" % sub_dir
671    requests += [
672        IndexRequest(
673            name = index_file_target_name,
674            category = category,
675            installed_files = installed_files,
676            alias_files = alias_files,
677            txt_file = index_file_txt,
678            output_file = index_res_file,
679            cldr_version = cldr_version,
680            args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
681                "-k "
682                "{INDEX_NAME}.txt",
683            format_with = {
684                "IN_SUB_DIR": sub_dir,
685                "OUT_PREFIX": out_prefix
686            }
687        )
688    ]
689
690    return requests
691