• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/python
2#
3# Copyright (C) 2009 Google Inc. All rights reserved.
4#
5# Redistribution and use in source and binary forms, with or without
6# modification, are permitted provided that the following conditions are
7# met:
8#
9#     * Redistributions of source code must retain the above copyright
10# notice, this list of conditions and the following disclaimer.
11#     * Redistributions in binary form must reproduce the above
12# copyright notice, this list of conditions and the following disclaimer
13# in the documentation and/or other materials provided with the
14# distribution.
15#     * Neither the name of Google Inc. nor the names of its
16# contributors may be used to endorse or promote products derived from
17# this software without specific prior written permission.
18#
19# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30#
31# Copyright (c) 2009 The Chromium Authors. All rights reserved.
32# Use of this source code is governed by a BSD-style license that can be
33# found in the LICENSE file.
34
35# usage: action_maketokenizer.py OUTPUTS -- INPUTS
36#
37# Multiple INPUTS may be listed. The sections are separated by -- arguments.
38#
39# OUTPUTS must contain a single item: a path to tokenizer.cpp.
40#
41# INPUTS must contain exactly two items. The first item must be the path to
42# maketokenizer. The second item must be the path to tokenizer.flex.
43
44
45import os
46import subprocess
47import sys
48
49
50def SplitArgsIntoSections(args):
51    sections = []
52    while len(args) > 0:
53        if not '--' in args:
54            # If there is no '--' left, everything remaining is an entire section.
55            dashes = len(args)
56        else:
57            dashes = args.index('--')
58
59        sections.append(args[:dashes])
60
61        # Next time through the loop, look at everything after this '--'.
62        if dashes + 1 == len(args):
63            # If the '--' is at the end of the list, we won't come back through the
64            # loop again. Add an empty section now corresponding to the nothingness
65            # following the final '--'.
66            args = []
67            sections.append(args)
68        else:
69            args = args[dashes + 1:]
70
71    return sections
72
73
74def main(args):
75    sections = SplitArgsIntoSections(args[1:])
76    assert len(sections) == 2
77    (outputs, inputs) = sections
78
79    assert len(outputs) == 1
80    output = outputs[0]
81
82    assert len(inputs) == 2
83    maketokenizer = inputs[0]
84    flexInput = inputs[1]
85
86    # Do it. check_call is new in 2.5, so simulate its behavior with call and
87    # assert.
88    outfile = open(output, 'wb')
89    p1 = subprocess.Popen(['flex', '-t', flexInput], stdout=subprocess.PIPE)
90    p2 = subprocess.Popen(['perl', maketokenizer], stdin=p1.stdout, stdout=outfile)
91
92    r1 = p1.wait()
93    r2 = p2.wait()
94    assert r1 == 0
95    assert r2 == 0
96
97    return 0
98
99
100if __name__ == '__main__':
101    sys.exit(main(sys.argv))
102