• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright 2019-2020 Huawei Technologies Co., Ltd
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "runtime/device/ascend/ge_runtime/task/profiler_task.h"
18 #include "runtime/device/ascend/ge_runtime/task/task_factory.h"
19 
20 namespace mindspore::ge::model_runner {
ProfilerTask(const ModelContext & model_context,const std::shared_ptr<ProfilerTraceTaskInfo> & task_info)21 ProfilerTask::ProfilerTask(const ModelContext &model_context, const std::shared_ptr<ProfilerTraceTaskInfo> &task_info)
22     : TaskRepeater<ProfilerTraceTaskInfo>(model_context, task_info), task_info_(task_info), stream_(nullptr) {
23   MS_EXCEPTION_IF_NULL(task_info);
24   auto stream_list = model_context.stream_list();
25   uint32_t stream_id = task_info->stream_id();
26   MS_LOG(INFO) << "Stream list size: " << stream_list.size() << ", stream id: " << stream_id;
27   if (stream_id >= stream_list.size()) {
28     MS_LOG(EXCEPTION) << "Index: " << task_info->stream_id() << " >= stream_list.size(): " << stream_list.size();
29   }
30   stream_ = stream_list[stream_id];
31 }
32 
~ProfilerTask()33 ProfilerTask::~ProfilerTask() {}
34 
Distribute()35 void ProfilerTask::Distribute() {
36   MS_LOG(INFO) << "ProfilerTask Distribute start.";
37   MS_LOG(INFO) << "log id = " << task_info_->log_id() << ", notify = " << task_info_->notify()
38                << ", flat = " << task_info_->flat();
39   uint32_t model_id = 0;
40   rtError_t rt_model_ret = rtModelGetId(model_handle_, &model_id);
41   if (rt_model_ret != RT_ERROR_NONE) {
42     MS_LOG(EXCEPTION) << "Call rt api rtModelGetId failed, ret: " << rt_model_ret;
43   }
44   uint64_t new_model_id = 0;
45   new_model_id = static_cast<uint64_t>(model_id);
46   uint64_t first_id = 0;
47   if (task_info_->log_id() > static_cast<size_t>(std::numeric_limits<uint16_t>::max())) {
48     MS_LOG(EXCEPTION) << "Invalid log id " << task_info_->log_id() << " over max uint16_t.";
49   }
50   rtError_t rt_ret = rtProfilerTraceEx(first_id, new_model_id, static_cast<uint16_t>(task_info_->log_id()), stream_);
51   if (rt_ret != RT_ERROR_NONE) {
52     MS_LOG(EXCEPTION) << "Call rt api rtProfilerTraceEx failed, ret: " << rt_ret;
53   }
54   MS_LOG(INFO) << "DistributeTask end.";
55 }
56 
57 REGISTER_TASK(TaskInfoType::PROFILER_TRACE, ProfilerTask, ProfilerTraceTaskInfo);
58 }  // namespace mindspore::ge::model_runner
59