• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2023 Google LLC
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      https://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15from typing import List, Union
16
17
18def tokenize_parameters(buffer: bytes) -> List[bytes]:
19    """Split input parameters into tokens.
20    Removes space characters outside of double quote blocks:
21    T-rec-V-25 - 5.2.1 Command line general format: "Space characters (IA5 2/0)
22    are ignored [..], unless they are embedded in numeric or string constants"
23    Raises ValueError in case of invalid input string."""
24
25    tokens = []
26    in_quotes = False
27    token = bytearray()
28    for b in buffer:
29        char = bytearray([b])
30
31        if in_quotes:
32            token.extend(char)
33            if char == b'\"':
34                in_quotes = False
35                tokens.append(token[1:-1])
36                token = bytearray()
37        else:
38            if char == b' ':
39                pass
40            elif char == b',' or char == b')':
41                tokens.append(token)
42                tokens.append(char)
43                token = bytearray()
44            elif char == b'(':
45                if len(token) > 0:
46                    raise ValueError("open_paren following regular character")
47                tokens.append(char)
48            elif char == b'"':
49                if len(token) > 0:
50                    raise ValueError("quote following regular character")
51                in_quotes = True
52                token.extend(char)
53            else:
54                token.extend(char)
55
56    tokens.append(token)
57    return [bytes(token) for token in tokens if len(token) > 0]
58
59
60def parse_parameters(buffer: bytes) -> List[Union[bytes, list]]:
61    """Parse the parameters using the comma and parenthesis separators.
62    Raises ValueError in case of invalid input string."""
63
64    tokens = tokenize_parameters(buffer)
65    accumulator: List[list] = [[]]
66    current: Union[bytes, list] = bytes()
67
68    for token in tokens:
69        if token == b',':
70            accumulator[-1].append(current)
71            current = bytes()
72        elif token == b'(':
73            accumulator.append([])
74        elif token == b')':
75            if len(accumulator) < 2:
76                raise ValueError("close_paren without matching open_paren")
77            accumulator[-1].append(current)
78            current = accumulator.pop()
79        else:
80            current = token
81
82    accumulator[-1].append(current)
83    if len(accumulator) > 1:
84        raise ValueError("missing close_paren")
85    return accumulator[0]
86