• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""Configuration of parameters for strategy-searching algorithm in auto_parallel"""
16
17import threading
18from mindspore._c_expression import CostModelContext
19from mindspore._checkparam import args_type_check
20
21__all__ = ["get_algo_parameters", "reset_algo_parameters", "set_algo_parameters"]
22
23
24class _AlgoParameterConfig:
25    """
26    _AlgoParameterConfig is the configuration of setting parameters used in th algorithm.
27
28    Note:
29        Creating a config through instantiating _AlgoParameterConfig object is not recommended.
30        Use algo_parameter_config() to get the configuration since _AlgoParameterConfig is singleton.
31    """
32    _instance = None
33    _instance_lock = threading.Lock()
34
35    def __init__(self):
36        self._config_handle = CostModelContext.get_instance()
37
38    def check_config_handle(self):
39        """
40        Check config handle.
41
42        Raises:
43            ValueError: If the config handle is none.
44        """
45        if self._config_handle is None:
46            raise ValueError("Config handle is none!!!")
47
48    def set_fully_use_devices(self, not_fully):
49        """
50        Set the flag of whether ONLY generating strategies that fully use all available devices.
51        Default: True
52
53        Args:
54            not_fully (bool): The flag.
55        """
56        self.check_config_handle()
57        self._config_handle.set_fully_use_devices(not_fully)
58
59    def get_fully_use_devices(self):
60        """
61        Get the flag of whether ONLY generating strategies that fully use all available devices.
62
63        Return:
64            The flag.
65        """
66        self.check_config_handle()
67        return self._config_handle.get_fully_use_devices()
68
69    def set_elementwise_op_strategy_follow(self, element_strategy_follow):
70        """
71        Set the flag of whether the elementwise operator has the same strategies as its subsequent operators.
72        Default: False
73
74        Args:
75            element_strategy_follow (bool): The flag.
76        """
77        self.check_config_handle()
78        self._config_handle.set_elementwise_op_strategy_follow(element_strategy_follow)
79
80    def get_elementwise_op_strategy_follow(self):
81        """
82        Get the flag of whether the elementwise operator has the same strategies as its subsequent operators.
83
84        Returns:
85            The flag.
86        """
87        self.check_config_handle()
88        return self._config_handle.get_elementwise_op_strategy_follow()
89
90    def set_tensor_slice_align_enable(self, align_enable):
91        """
92        Set the flag of whether to check the shape of tensor slice of MatMul.
93        Default: False
94
95        Args:
96            align_enable (bool): The flag.
97        """
98        self.check_config_handle()
99        self._config_handle.set_tensor_slice_align_enable(align_enable)
100
101    def get_tensor_slice_align_enable(self):
102        """
103        Get the flag of whether to check the shape of tensor slice of MatMul.
104
105        Returns:
106            The flag.
107        """
108        self.check_config_handle()
109        return self._config_handle.get_tensor_slice_align_enable()
110
111    def set_tensor_slice_align_size(self, align_size):
112        """
113        Set tensor slice align size.
114
115        Args:
116            align_size (int): The minimum tensor slice shape.
117
118        Raises:
119            ValueError: If align_size is not in [1, 1024].
120        """
121        self.check_config_handle()
122        if align_size < 1 or align_size > 1024:
123            raise ValueError('Align_size must be in [1, 1024], but got {}'.format(align_size))
124        self._config_handle.set_tensor_slice_align_size(align_size)
125
126    def get_tensor_slice_align_size(self):
127        """
128        Get the tensor slice align size.
129
130        Returns:
131            The size.
132        """
133        self.check_config_handle()
134        return self._config_handle.get_tensor_slice_align_size()
135
136    def set_dp_algo_enable_approxi(self, enable_flag):
137        """
138        Set the flag of whether to enable the approximation in the DP algorithms.
139        Default: False.
140
141        Args:
142            enable_flag (bool): The flag.
143        """
144        self.check_config_handle()
145        self._config_handle.set_dp_algo_enable_approxi(enable_flag)
146
147    def get_dp_algo_enable_approxi(self):
148        """
149        Get the flag of whether to enable the approximation in the DP algorithms.
150
151        Returns:
152            The flag.
153        """
154        self.check_config_handle()
155        return self._config_handle.get_dp_algo_enable_approxi()
156
157    def set_dp_algo_approxi_epsilon(self, epsilon):
158        """
159        Set the epsilon value used in the approximation DP algorithm.
160        Default: 0.1.
161
162        Args:
163            epsilon (float): The epsilon value, should in the range dp_(0, 1].
164        """
165        self.check_config_handle()
166        self._config_handle.set_dp_algo_approxi_epsilon(epsilon)
167
168    def get_dp_algo_approxi_epsilon(self):
169        """
170        Get the epsilon value used in the approximation DP algorithm.
171
172        Returns:
173            The epsilon value.
174        """
175        self.check_config_handle()
176        return self._config_handle.get_dp_algo_approxi_epsilon()
177
178    def reset_algo_parameters(self):
179        """
180        Reset algorithm parameter attributes.
181        """
182        self.check_config_handle()
183        self._config_handle.reset_algo_parameters()
184
185
186_g_algo_parameter_config = None
187
188
189def _algo_parameter_config():
190    """
191    Get the global _g_algo_parameter_config. If it is not created, create a new one.
192
193    Returns:
194        The global _g_algo_parameter_config.
195    """
196    global _g_algo_parameter_config
197    if _g_algo_parameter_config is None:
198        _g_algo_parameter_config = _AlgoParameterConfig()
199    return _g_algo_parameter_config
200
201
202set_algo_parameters_config_func_map = {
203    "fully_use_devices": _algo_parameter_config().set_fully_use_devices,
204    "elementwise_op_strategy_follow": _algo_parameter_config().set_elementwise_op_strategy_follow,
205    "tensor_slice_align_enable": _algo_parameter_config().set_tensor_slice_align_enable,
206    "tensor_slice_align_size": _algo_parameter_config().set_tensor_slice_align_size,
207    "enable_algo_approxi": _algo_parameter_config().set_dp_algo_enable_approxi,
208    "algo_approxi_epsilon": _algo_parameter_config().set_dp_algo_approxi_epsilon}
209
210
211get_algo_parameters_config_func_map = {
212    "fully_use_devices": _algo_parameter_config().get_fully_use_devices,
213    "elementwise_op_strategy_follow": _algo_parameter_config().get_elementwise_op_strategy_follow,
214    "tensor_slice_align_enable": _algo_parameter_config().get_tensor_slice_align_enable,
215    "tensor_slice_align_size": _algo_parameter_config().get_tensor_slice_align_size,
216    "enable_algo_approxi": _algo_parameter_config().get_dp_algo_enable_approxi,
217    "algo_approxi_epsilon": _algo_parameter_config().get_dp_algo_approxi_epsilon}
218
219
220@args_type_check(tensor_slice_align_enable=bool, tensor_slice_align_size=int,
221                 fully_use_devices=bool, elementwise_op_strategy_follow=bool,
222                 enable_algo_approxi=bool, algo_approxi_epsilon=float)
223def set_algo_parameters(**kwargs):
224    """
225    Set parameters in the algorithm for parallel strategy searching. See a typical use in
226    mindspore/tests/ut/python/parallel/test_auto_parallel_resnet.py.
227
228    Note:
229        The attribute name is required. This interface works ONLY in AUTO_PARALLEL mode.
230
231    Args:
232        fully_use_devices (bool): Whether ONLY searching strategies that fully use all available devices.
233            Default: True. For example with 8 devices available, if set true, strategy (4, 1) will not be included
234            in ReLU's candidate strategies, because strategy (4, 1) only utilizes 4 devices.
235        elementwise_op_strategy_follow (bool): Whether the elementwise operator has the consistent strategies as its
236            subsequent operators. Default: False. For the example of ReLU followed by Add, where ReLU is elementwise
237            operator, if this flag is set true, then the searched strategy by the algorithm guarantees that strategies
238            of these two operators are consistent, e.g., ReLU's strategy (8, 1) and Add's strategy ((8, 1), (8, 1)).
239        enable_algo_approxi (bool): Whether to enable the approximation in the algorithms. Default: False. Due to large
240            solution space in searching parallel strategy for large DNN model, the algorithm takes fairly long time in
241            this case. To mitigate it, if this flag is set true, an approximation is made to discard some candidate
242            strategies, so that the solution space is shrunken.
243        algo_approxi_epsilon (float): The epsilon value used in the approximation algorithm. Default: 0.1. This value
244            describes the extent of approximation. For example, the number of candidate strategies of an operator is S,
245            if `enable_algo_approxi' is true, then the remaining strategies is of size: min{S, 1/epsilon}.
246        tensor_slice_align_enable (bool): Whether to check the shape of tensor slice of MatMul. Default: False. Due to
247            properties of some hardware, MatMul kernel only with large shapes can show advantages. If this flag is true,
248            then the slice shape of MatMul is checked to prevent irregular shapes.
249        tensor_slice_align_size (int): The minimum tensor slice shape of MatMul, the value must be in [1, 1024].
250            Default: 16. If `tensor_slice_align_enable' is set true, then the slice size of last dimension of MatMul
251            tensors should be multiple of this value.
252
253    Raises:
254        ValueError: If context keyword is not recognized.
255    """
256    for key, value in kwargs.items():
257        if key not in set_algo_parameters_config_func_map:
258            raise ValueError("Set context keyword %s is not recognized!" % key)
259        set_func = set_algo_parameters_config_func_map[key]
260        set_func(value)
261
262
263def get_algo_parameters(attr_key):
264    """
265    Get the algorithm parameter config attributes.
266
267    Note:
268        The attribute name is required. This interface works ONLY in AUTO_PARALLEL mode.
269
270    Args:
271        attr_key (str): The key of the attribute. The keys include: "fully_use_devices",
272            "elementwise_op_strategy_follow", "enable_algo_approxi", "algo_approxi_epsilon",
273            "tensor_slice_align_enable", "tensor_slice_align_size".
274
275    Returns:
276        Return attribute value according to the key.
277
278    Raises:
279        ValueError: If context keyword is not recognized.
280    """
281    if attr_key not in get_algo_parameters_config_func_map:
282        raise ValueError("Get context keyword %s is not recognized!" % attr_key)
283    get_func = get_algo_parameters_config_func_map[attr_key]
284    return get_func()
285
286
287def reset_algo_parameters():
288    """Reset the algorithm parameter attributes.
289
290    Note:
291        This interface works ONLY in AUTO_PARALLEL mode.
292
293    After reset, the values of the attributes are:
294    --fully_use_devices: True.
295    --elementwise_op_strategy_follow: False.
296    --enable_algo_approxi: False.
297    --algo_approxi_epsilon: 0.1.
298    --tensor_slice_align_enable: False.
299    --tensor_slice_align_size: 16.
300    """
301    _algo_parameter_config().reset_algo_parameters()
302