• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2021-2022 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef MINDSPORE_MINDSPORE_CCSRC_DEBUG_DATA_DUMP_DUMP_UTILS_H_
18 #define MINDSPORE_MINDSPORE_CCSRC_DEBUG_DATA_DUMP_DUMP_UTILS_H_
19 
20 #include <map>
21 #include <vector>
22 #include <string>
23 #include <memory>
24 
25 #include "include/backend/kernel_graph.h"
26 #include "include/backend/device_address.h"
27 
28 using DeviceTensor = mindspore::device::DeviceAddress;
29 using DeviceTensorPtr = std::shared_ptr<DeviceTensor>;
30 
31 namespace mindspore {
32 constexpr size_t kParameterOutputIndex = 0;
33 constexpr size_t kValueNodeOutputIndex = 0;
34 
35 /*
36  * Feature group: Dump.
37  * Target device group: Ascend, GPU and CPU.
38  * Runtime category: Old runtime, MindRT.
39  * Description: Generate dir path to dump data. It will be in these formats:
40  * 1) tensor/statistic: /dump_path/rank_{rank_id}/{net_name}/{graph_id}/{iter_num}.
41  * 2) constant data: /dump_path/rank_{rank_id}/{net_name}/{graph_id}/constants/.
42  */
43 std::string GenerateDumpPath(uint32_t graph_id, uint32_t rank_id = 0, bool is_cst = false);
44 
45 void GetFileKernelName(NotNull<std::string *> kernel_name);
46 
47 /*
48  * Feature group: Dump.
49  * Target device group: Ascend, GPU and CPU.
50  * Runtime category: Old runtime, MindRT.
51  * Description: Get the actual tensor shape for dumping based on trans_flag option in configuration json file.
52  */
53 void GetDumpIntShape(const AnfNodePtr &node, size_t index, NotNull<ShapeVector *> const int_shapes,
54                      bool trans_flag = false);
55 
56 const DeviceTensorPtr GetParameterInfo(const AnfNodePtr &node, NotNull<ShapeVector *> const int_shapes,
57                                        NotNull<TypeId *> const host_type, NotNull<TypeId *> const device_type);
58 
59 /*
60  * Feature group: Dump.
61  * Target device group: Ascend, CPU.
62  * Runtime category: Old runtime, MindRT.
63  * Description: Dump the data in memory into file path.
64  */
65 void DumpMemToFile(const std::string &file_path, const device::DeviceAddress &addr, const ShapeVector &int_shapes,
66                    const TypeId &type, bool trans_flag = false);
67 
68 /*
69  * Feature group: Dump.
70  * Target device group: Ascend, GPU, CPU.
71  * Runtime category: Old runtime, MindRT.
72  * Description: Dump string content into file path. Current purpose is to save operator overflow information in json
73  * file in ascend a+m dump mode.
74  */
75 BACKEND_EXPORT void DumpToFile(const std::string &file_name, const std::string &dump_str);
76 }  // namespace mindspore
77 
78 #endif  // MINDSPORE_MINDSPORE_CCSRC_DEBUG_DATA_DUMP_DUMP_UTILS_H_
79