• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright (C) 2018 and later: Unicode, Inc. and others.
2# License & terms of use: http://www.unicode.org/copyright.html
3
4# Python 2/3 Compatibility (ICU-20299)
5# TODO(ICU-20301): Remove this.
6from __future__ import print_function
7
8from icutools.databuilder import *
9from icutools.databuilder import utils
10from icutools.databuilder.request_types import *
11
12import os
13import sys
14
15
16def generate(config, io, common_vars):
17    requests = []
18
19    # By default, exclude collation data that mimics the order of some large legacy charsets.
20    # We do this in "subtractive" strategy by inserting a resourceFilter.
21    # Later rules from an explicit filter file may override this default behavior.
22    # (In "additive" strategy this is unnecessary.)
23    if config.strategy == "subtractive":
24        filters = config.filters_json_data.setdefault("resourceFilters", [])
25        omit_charset_collations = {
26            "categories": [
27                "coll_tree"
28            ],
29            "rules": [
30                "-/collations/big5han",
31                "-/collations/gb2312han"
32            ]
33        }
34        filters.insert(0, omit_charset_collations)
35
36    if len(io.glob("misc/*")) == 0:
37        print("Error: Cannot find data directory; please specify --src_dir", file=sys.stderr)
38        exit(1)
39
40    requests += generate_cnvalias(config, io, common_vars)
41    requests += generate_ulayout(config, io, common_vars)
42    requests += generate_uemoji(config, io, common_vars)
43    requests += generate_confusables(config, io, common_vars)
44    requests += generate_conversion_mappings(config, io, common_vars)
45    requests += generate_brkitr_brk(config, io, common_vars)
46    requests += generate_brkitr_lstm(config, io, common_vars)
47    requests += generate_brkitr_adaboost(config, io, common_vars)
48    requests += generate_stringprep(config, io, common_vars)
49    requests += generate_brkitr_dictionaries(config, io, common_vars)
50    requests += generate_normalization(config, io, common_vars)
51    requests += generate_coll_ucadata(config, io, common_vars)
52    requests += generate_full_unicore_data(config, io, common_vars)
53    requests += generate_unames(config, io, common_vars)
54    requests += generate_misc(config, io, common_vars)
55    requests += generate_curr_supplemental(config, io, common_vars)
56    requests += generate_zone_supplemental(config, io, common_vars)
57    requests += generate_translit(config, io, common_vars)
58
59    # Res Tree Files
60    # (input dirname, output dirname, resfiles.mk path, mk version var, mk source var, use pool file, dep files)
61    requests += generate_tree(config, io, common_vars,
62        "locales",
63        None,
64        config.use_pool_bundle,
65        [])
66
67    requests += generate_tree(config, io, common_vars,
68        "curr",
69        "curr",
70        config.use_pool_bundle,
71        [])
72
73    requests += generate_tree(config, io, common_vars,
74        "lang",
75        "lang",
76        config.use_pool_bundle,
77        [])
78
79    requests += generate_tree(config, io, common_vars,
80        "region",
81        "region",
82        config.use_pool_bundle,
83        [])
84
85    requests += generate_tree(config, io, common_vars,
86        "zone",
87        "zone",
88        config.use_pool_bundle,
89        [])
90
91    requests += generate_tree(config, io, common_vars,
92        "unit",
93        "unit",
94        config.use_pool_bundle,
95        [])
96
97    requests += generate_tree(config, io, common_vars,
98        "coll",
99        "coll",
100        # Never use pool bundle for coll, brkitr, or rbnf
101        False,
102        # Depends on timezoneTypes.res and keyTypeData.res.
103        # TODO: We should not need this dependency to build collation.
104        # TODO: Bake keyTypeData.res into the common library?
105        [DepTarget("coll_ucadata"), DepTarget("misc_res"), InFile("unidata/UCARules.txt")])
106
107    requests += generate_tree(config, io, common_vars,
108        "brkitr",
109        "brkitr",
110        # Never use pool bundle for coll, brkitr, or rbnf
111        False,
112        [DepTarget("brkitr_brk"), DepTarget("dictionaries")])
113
114    requests += generate_tree(config, io, common_vars,
115        "rbnf",
116        "rbnf",
117        # Never use pool bundle for coll, brkitr, or rbnf
118        False,
119        [])
120
121    requests += [
122        ListRequest(
123            name = "icudata_list",
124            variable_name = "icudata_all_output_files",
125            output_file = TmpFile("icudata.lst"),
126            include_tmp = False
127        )
128    ]
129
130    return requests
131
132
133def generate_cnvalias(config, io, common_vars):
134    # UConv Name Aliases
135    input_file = InFile("mappings/convrtrs.txt")
136    output_file = OutFile("cnvalias.icu")
137    return [
138        SingleExecutionRequest(
139            name = "cnvalias",
140            category = "cnvalias",
141            dep_targets = [],
142            input_files = [input_file],
143            output_files = [output_file],
144            tool = IcuTool("gencnval"),
145            args = "-s {IN_DIR} -d {OUT_DIR} "
146                "{INPUT_FILES[0]}",
147            format_with = {}
148        )
149    ]
150
151
152def generate_confusables(config, io, common_vars):
153    # CONFUSABLES
154    txt1 = InFile("unidata/confusables.txt")
155    txt2 = InFile("unidata/confusablesWholeScript.txt")
156    cfu = OutFile("confusables.cfu")
157    return [
158        SingleExecutionRequest(
159            name = "confusables",
160            category = "confusables",
161            dep_targets = [DepTarget("cnvalias")],
162            input_files = [txt1, txt2],
163            output_files = [cfu],
164            tool = IcuTool("gencfu"),
165            args = "-d {OUT_DIR} -i {OUT_DIR} "
166                "-c -r {IN_DIR}/{INPUT_FILES[0]} -w {IN_DIR}/{INPUT_FILES[1]} "
167                "-o {OUTPUT_FILES[0]}",
168            format_with = {}
169        )
170    ]
171
172
173def generate_conversion_mappings(config, io, common_vars):
174    # UConv Conversion Table Files
175    input_files = [InFile(filename) for filename in io.glob("mappings/*.ucm")]
176    output_files = [OutFile("%s.cnv" % v.filename[9:-4]) for v in input_files]
177    # TODO: handle BUILD_SPECIAL_CNV_FILES? Means to add --ignore-siso-check flag to makeconv
178    return [
179        RepeatedOrSingleExecutionRequest(
180            name = "conversion_mappings",
181            category = "conversion_mappings",
182            dep_targets = [],
183            input_files = input_files,
184            output_files = output_files,
185            tool = IcuTool("makeconv"),
186            args = "-s {IN_DIR} -d {OUT_DIR} -c {INPUT_FILE_PLACEHOLDER}",
187            format_with = {},
188            repeat_with = {
189                "INPUT_FILE_PLACEHOLDER": utils.SpaceSeparatedList(file.filename for file in input_files)
190            }
191        )
192    ]
193
194
195def generate_brkitr_brk(config, io, common_vars):
196    # BRK Files
197    input_files = [InFile(filename) for filename in io.glob("brkitr/rules/*.txt")]
198    output_files = [OutFile("brkitr/%s.brk" % v.filename[13:-4]) for v in input_files]
199    return [
200        RepeatedExecutionRequest(
201            name = "brkitr_brk",
202            category = "brkitr_rules",
203            dep_targets =
204                [DepTarget("cnvalias"),
205                    DepTarget("ulayout"), DepTarget("uemoji"), DepTarget("lstm_res"), DepTarget("adaboost_res")],
206            input_files = input_files,
207            output_files = output_files,
208            tool = IcuTool("genbrk"),
209            args = "-d {OUT_DIR} -i {OUT_DIR} "
210                "-c -r {IN_DIR}/{INPUT_FILE} "
211                "-o {OUTPUT_FILE}",
212            format_with = {},
213            repeat_with = {}
214        )
215    ]
216
217
218def generate_stringprep(config, io, common_vars):
219    # SPP FILES
220    input_files = [InFile(filename) for filename in io.glob("sprep/*.txt")]
221    output_files = [OutFile("%s.spp" % v.filename[6:-4]) for v in input_files]
222    bundle_names = [v.filename[6:-4] for v in input_files]
223    return [
224        RepeatedExecutionRequest(
225            name = "stringprep",
226            category = "stringprep",
227            dep_targets = [InFile("unidata/NormalizationCorrections.txt")],
228            input_files = input_files,
229            output_files = output_files,
230            tool = IcuTool("gensprep"),
231            args = "-s {IN_DIR}/sprep -d {OUT_DIR} -i {OUT_DIR} "
232                "-b {BUNDLE_NAME} -m {IN_DIR}/unidata -u 3.2.0 {BUNDLE_NAME}.txt",
233            format_with = {},
234            repeat_with = {
235                "BUNDLE_NAME": bundle_names
236            }
237        )
238    ]
239
240
241def generate_brkitr_dictionaries(config, io, common_vars):
242    # Dict Files
243    input_files = [InFile(filename) for filename in io.glob("brkitr/dictionaries/*.txt")]
244    output_files = [OutFile("brkitr/%s.dict" % v.filename[20:-4]) for v in input_files]
245    extra_options_map = {
246        "brkitr/dictionaries/burmesedict.txt": "--bytes --transform offset-0x1000",
247        "brkitr/dictionaries/cjdict.txt": "--uchars",
248        "brkitr/dictionaries/khmerdict.txt": "--bytes --transform offset-0x1780",
249        "brkitr/dictionaries/laodict.txt": "--bytes --transform offset-0x0e80",
250        "brkitr/dictionaries/thaidict.txt": "--bytes --transform offset-0x0e00"
251    }
252    extra_optionses = [extra_options_map[v.filename] for v in input_files]
253    return [
254        RepeatedExecutionRequest(
255            name = "dictionaries",
256            category = "brkitr_dictionaries",
257            dep_targets = [],
258            input_files = input_files,
259            output_files = output_files,
260            tool = IcuTool("gendict"),
261            args = "-i {OUT_DIR} "
262                "-c {EXTRA_OPTIONS} "
263                "{IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
264            format_with = {},
265            repeat_with = {
266                "EXTRA_OPTIONS": extra_optionses
267            }
268        )
269    ]
270
271
272def generate_normalization(config, io, common_vars):
273    # NRM Files
274    input_files = [InFile(filename) for filename in io.glob("in/*.nrm")]
275    # nfc.nrm is pre-compiled into C++; see generate_full_unicore_data
276    input_files.remove(InFile("in/nfc.nrm"))
277    output_files = [OutFile(v.filename[3:]) for v in input_files]
278    return [
279        RepeatedExecutionRequest(
280            name = "normalization",
281            category = "normalization",
282            dep_targets = [],
283            input_files = input_files,
284            output_files = output_files,
285            tool = IcuTool("icupkg"),
286            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}",
287            format_with = {},
288            repeat_with = {}
289        )
290    ]
291
292
293def generate_coll_ucadata(config, io, common_vars):
294    # Collation Dependency File (ucadata.icu)
295    input_file = InFile("in/coll/ucadata-%s.icu" % config.coll_han_type)
296    output_file = OutFile("coll/ucadata.icu")
297    return [
298        SingleExecutionRequest(
299            name = "coll_ucadata",
300            category = "coll_ucadata",
301            dep_targets = [],
302            input_files = [input_file],
303            output_files = [output_file],
304            tool = IcuTool("icupkg"),
305            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
306            format_with = {}
307        )
308    ]
309
310
311def generate_full_unicore_data(config, io, common_vars):
312    # The core Unicode properties files (pnames.icu, uprops.icu, ucase.icu, ubidi.icu)
313    # are hardcoded in the common DLL and therefore not included in the data package any more.
314    # They are not built by default but need to be built for ICU4J data,
315    # both in the .jar and in the .dat file (if ICU4J uses the .dat file).
316    # See ICU-4497.
317    if not config.include_uni_core_data:
318        return []
319
320    basenames = [
321        "pnames.icu",
322        "uprops.icu",
323        "ucase.icu",
324        "ubidi.icu",
325        "nfc.nrm"
326    ]
327    input_files = [InFile("in/%s" % bn) for bn in basenames]
328    output_files = [OutFile(bn) for bn in basenames]
329    return [
330        RepeatedExecutionRequest(
331            name = "unicore",
332            category = "unicore",
333            input_files = input_files,
334            output_files = output_files,
335            tool = IcuTool("icupkg"),
336            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILE} {OUT_DIR}/{OUTPUT_FILE}"
337        )
338    ]
339
340
341def generate_unames(config, io, common_vars):
342    # Unicode Character Names
343    input_file = InFile("in/unames.icu")
344    output_file = OutFile("unames.icu")
345    return [
346        SingleExecutionRequest(
347            name = "unames",
348            category = "unames",
349            dep_targets = [],
350            input_files = [input_file],
351            output_files = [output_file],
352            tool = IcuTool("icupkg"),
353            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
354            format_with = {}
355        )
356    ]
357
358
359def generate_ulayout(config, io, common_vars):
360    # Unicode text layout properties
361    basename = "ulayout"
362    input_file = InFile("in/%s.icu" % basename)
363    output_file = OutFile("%s.icu" % basename)
364    return [
365        SingleExecutionRequest(
366            name = basename,
367            category = basename,
368            dep_targets = [],
369            input_files = [input_file],
370            output_files = [output_file],
371            tool = IcuTool("icupkg"),
372            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
373            format_with = {}
374        )
375    ]
376
377
378def generate_uemoji(config, io, common_vars):
379    # Unicode emoji properties
380    basename = "uemoji"
381    input_file = InFile("in/%s.icu" % basename)
382    output_file = OutFile("%s.icu" % basename)
383    return [
384        SingleExecutionRequest(
385            name = basename,
386            category = basename,
387            dep_targets = [],
388            input_files = [input_file],
389            output_files = [output_file],
390            tool = IcuTool("icupkg"),
391            args = "-t{ICUDATA_CHAR} {IN_DIR}/{INPUT_FILES[0]} {OUT_DIR}/{OUTPUT_FILES[0]}",
392            format_with = {}
393        )
394    ]
395
396
397def generate_misc(config, io, common_vars):
398    # Misc Data Res Files
399    input_files = [InFile(filename) for filename in io.glob("misc/*.txt")]
400    input_basenames = [v.filename[5:] for v in input_files]
401    output_files = [OutFile("%s.res" % v[:-4]) for v in input_basenames]
402    return [
403        RepeatedExecutionRequest(
404            name = "misc_res",
405            category = "misc",
406            dep_targets = [DepTarget("cnvalias")], # ICU-21175
407            input_files = input_files,
408            output_files = output_files,
409            tool = IcuTool("genrb"),
410            args = "-s {IN_DIR}/misc -d {OUT_DIR} -i {OUT_DIR} "
411                "-k -q "
412                "{INPUT_BASENAME}",
413            format_with = {},
414            repeat_with = {
415                "INPUT_BASENAME": input_basenames
416            }
417        )
418    ]
419
420
421def generate_curr_supplemental(config, io, common_vars):
422    # Currency Supplemental Res File
423    input_file = InFile("curr/supplementalData.txt")
424    input_basename = "supplementalData.txt"
425    output_file = OutFile("curr/supplementalData.res")
426    return [
427        SingleExecutionRequest(
428            name = "curr_supplemental_res",
429            category = "curr_supplemental",
430            dep_targets = [],
431            input_files = [input_file],
432            output_files = [output_file],
433            tool = IcuTool("genrb"),
434            args = "-s {IN_DIR}/curr -d {OUT_DIR}/curr -i {OUT_DIR} "
435                "-k "
436                "{INPUT_BASENAME}",
437            format_with = {
438                "INPUT_BASENAME": input_basename
439            }
440        )
441    ]
442
443
444def generate_zone_supplemental(config, io, common_vars):
445    # tzdbNames Res File
446    input_file = InFile("zone/tzdbNames.txt")
447    input_basename = "tzdbNames.txt"
448    output_file = OutFile("zone/tzdbNames.res")
449    return [
450        SingleExecutionRequest(
451            name = "zone_supplemental_res",
452            category = "zone_supplemental",
453            dep_targets = [],
454            input_files = [input_file],
455            output_files = [output_file],
456            tool = IcuTool("genrb"),
457            args = "-s {IN_DIR}/zone -d {OUT_DIR}/zone -i {OUT_DIR} "
458                "-k "
459                "{INPUT_BASENAME}",
460            format_with = {
461                "INPUT_BASENAME": input_basename
462            }
463        )
464    ]
465
466
467def generate_translit(config, io, common_vars):
468    input_files = [
469        InFile("translit/root.txt"),
470        InFile("translit/en.txt"),
471        InFile("translit/el.txt")
472    ]
473    dep_files = set(InFile(filename) for filename in io.glob("translit/*.txt"))
474    dep_files -= set(input_files)
475    dep_files = list(sorted(dep_files))
476    input_basenames = [v.filename[9:] for v in input_files]
477    output_files = [
478        OutFile("translit/%s.res" % v[:-4])
479        for v in input_basenames
480    ]
481    return [
482        RepeatedOrSingleExecutionRequest(
483            name = "translit_res",
484            category = "translit",
485            dep_targets = dep_files,
486            input_files = input_files,
487            output_files = output_files,
488            tool = IcuTool("genrb"),
489            args = "-s {IN_DIR}/translit -d {OUT_DIR}/translit -i {OUT_DIR} "
490                "-k "
491                "{INPUT_BASENAME}",
492            format_with = {
493            },
494            repeat_with = {
495                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
496            }
497        )
498    ]
499
500
501def generate_brkitr_lstm(config, io, common_vars):
502    input_files = [InFile(filename) for filename in io.glob("brkitr/lstm/*.txt")]
503    input_basenames = [v.filename[12:] for v in input_files]
504    output_files = [
505        OutFile("brkitr/%s.res" % v[:-4])
506        for v in input_basenames
507    ]
508    return [
509        RepeatedOrSingleExecutionRequest(
510            name = "lstm_res",
511            category = "brkitr_lstm",
512            dep_targets = [],
513            input_files = input_files,
514            output_files = output_files,
515            tool = IcuTool("genrb"),
516            args = "-s {IN_DIR}/brkitr/lstm -d {OUT_DIR}/brkitr -i {OUT_DIR} "
517                "-k "
518                "{INPUT_BASENAME}",
519            format_with = {
520            },
521            repeat_with = {
522                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
523            }
524        )
525    ]
526
527def generate_brkitr_adaboost(config, io, common_vars):
528    input_files = [InFile(filename) for filename in io.glob("brkitr/adaboost/*.txt")]
529    input_basenames = [v.filename[16:] for v in input_files]
530    output_files = [
531        OutFile("brkitr/%s.res" % v[:-4])
532        for v in input_basenames
533    ]
534    return [
535        RepeatedOrSingleExecutionRequest(
536            name = "adaboost_res",
537            category = "brkitr_adaboost",
538            dep_targets = [],
539            input_files = input_files,
540            output_files = output_files,
541            tool = IcuTool("genrb"),
542            args = "-s {IN_DIR}/brkitr/adaboost -d {OUT_DIR}/brkitr -i {OUT_DIR} "
543                "-k "
544                "{INPUT_BASENAME}",
545            format_with = {
546            },
547            repeat_with = {
548                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
549            }
550        )
551    ]
552
553def generate_tree(
554        config,
555        io,
556        common_vars,
557        sub_dir,
558        out_sub_dir,
559        use_pool_bundle,
560        dep_targets):
561    requests = []
562    category = "%s_tree" % sub_dir
563    out_prefix = "%s/" % out_sub_dir if out_sub_dir else ""
564    input_files = [InFile(filename) for filename in io.glob("%s/*.txt" % sub_dir)]
565    if sub_dir == "curr":
566        input_files.remove(InFile("curr/supplementalData.txt"))
567    if sub_dir == "zone":
568        input_files.remove(InFile("zone/tzdbNames.txt"))
569    input_basenames = [v.filename[len(sub_dir)+1:] for v in input_files]
570    output_files = [
571        OutFile("%s%s.res" % (out_prefix, v[:-4]))
572        for v in input_basenames
573    ]
574
575    # Generate Pool Bundle
576    if use_pool_bundle:
577        input_pool_files = [OutFile("%spool.res" % out_prefix)]
578        pool_target_name = "%s_pool_write" % sub_dir
579        use_pool_bundle_option = "--usePoolBundle {OUT_DIR}/{OUT_PREFIX}".format(
580            OUT_PREFIX = out_prefix,
581            **common_vars
582        )
583        requests += [
584            SingleExecutionRequest(
585                name = pool_target_name,
586                category = category,
587                dep_targets = dep_targets,
588                input_files = input_files,
589                output_files = input_pool_files,
590                tool = IcuTool("genrb"),
591                args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
592                    "--writePoolBundle -k "
593                    "{INPUT_BASENAMES_SPACED}",
594                format_with = {
595                    "IN_SUB_DIR": sub_dir,
596                    "OUT_PREFIX": out_prefix,
597                    "INPUT_BASENAMES_SPACED": utils.SpaceSeparatedList(input_basenames)
598                }
599            ),
600        ]
601        dep_targets = dep_targets + [DepTarget(pool_target_name)]
602    else:
603        use_pool_bundle_option = ""
604
605    # Generate Res File Tree
606    requests += [
607        RepeatedOrSingleExecutionRequest(
608            name = "%s_res" % sub_dir,
609            category = category,
610            dep_targets = dep_targets,
611            input_files = input_files,
612            output_files = output_files,
613            tool = IcuTool("genrb"),
614            args = "-s {IN_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
615                "{EXTRA_OPTION} -k "
616                "{INPUT_BASENAME}",
617            format_with = {
618                "IN_SUB_DIR": sub_dir,
619                "OUT_PREFIX": out_prefix,
620                "EXTRA_OPTION": use_pool_bundle_option
621            },
622            repeat_with = {
623                "INPUT_BASENAME": utils.SpaceSeparatedList(input_basenames)
624            }
625        )
626    ]
627
628    # Generate res_index file
629    # Exclude the deprecated locale variants and root; see ICU-20628. This
630    # could be data-driven, but we do not want to perform I/O in this script
631    # (for example, we do not want to read from an XML file).
632    excluded_locales = set([
633        "ja_JP_TRADITIONAL",
634        "th_TH_TRADITIONAL",
635        "de_",
636        "de__PHONEBOOK",
637        "es_",
638        "es__TRADITIONAL",
639        "root",
640    ])
641    # Put alias locales in a separate structure; see ICU-20627
642    dependency_data = io.read_locale_deps(sub_dir)
643    if "aliases" in dependency_data:
644        alias_locales = set(dependency_data["aliases"].keys())
645    else:
646        alias_locales = set()
647    alias_files = []
648    installed_files = []
649    for f in input_files:
650        file_stem = IndexRequest.locale_file_stem(f)
651        if file_stem in excluded_locales:
652            continue
653        destination = alias_files if file_stem in alias_locales else installed_files
654        destination.append(f)
655    cldr_version = dependency_data["cldrVersion"] if sub_dir == "locales" else None
656    index_file_txt = TmpFile("{IN_SUB_DIR}/{INDEX_NAME}.txt".format(
657        IN_SUB_DIR = sub_dir,
658        **common_vars
659    ))
660    index_res_file = OutFile("{OUT_PREFIX}{INDEX_NAME}.res".format(
661        OUT_PREFIX = out_prefix,
662        **common_vars
663    ))
664    index_file_target_name = "%s_index_txt" % sub_dir
665    requests += [
666        IndexRequest(
667            name = index_file_target_name,
668            category = category,
669            installed_files = installed_files,
670            alias_files = alias_files,
671            txt_file = index_file_txt,
672            output_file = index_res_file,
673            cldr_version = cldr_version,
674            args = "-s {TMP_DIR}/{IN_SUB_DIR} -d {OUT_DIR}/{OUT_PREFIX} -i {OUT_DIR} "
675                "-k "
676                "{INDEX_NAME}.txt",
677            format_with = {
678                "IN_SUB_DIR": sub_dir,
679                "OUT_PREFIX": out_prefix
680            }
681        )
682    ]
683
684    return requests
685