1 /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. 2 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 7 http://www.apache.org/licenses/LICENSE-2.0 8 9 Unless required by applicable law or agreed to in writing, software 10 distributed under the License is distributed on an "AS IS" BASIS, 11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 See the License for the specific language governing permissions and 13 limitations under the License. 14 ==============================================================================*/ 15 #include "tensorflow/stream_executor/platform/default/dso_loader.h" 16 #include "tensorflow/stream_executor/platform/logging.h" 17 #include "tensorflow/stream_executor/platform/port.h" 18 19 namespace stream_executor { 20 namespace internal { 21 namespace DsoLoader { 22 TryDlopenCUDALibraries()23port::Status TryDlopenCUDALibraries() { 24 namespace CachedLoader = ::stream_executor::internal::CachedDsoLoader; 25 auto cudart_status = CachedLoader::GetCudaRuntimeDsoHandle(); 26 auto cublas_status = CachedLoader::GetCublasDsoHandle(); 27 auto cublaslt_status = CachedLoader::GetCublasLtDsoHandle(); 28 auto cufft_status = CachedLoader::GetCufftDsoHandle(); 29 auto curand_status = CachedLoader::GetCurandDsoHandle(); 30 auto cusolver_status = CachedLoader::GetCusolverDsoHandle(); 31 auto cusparse_status = CachedLoader::GetCusparseDsoHandle(); 32 auto cudnn_status = CachedLoader::GetCudnnDsoHandle(); 33 34 if (!cudart_status.status().ok() || !cublas_status.status().ok() || 35 !cufft_status.status().ok() || !curand_status.status().ok() || 36 !cusolver_status.status().ok() || !cusparse_status.status().ok() || 37 !cudnn_status.status().ok() || !cublaslt_status.status().ok()) { 38 return port::Status(port::error::INTERNAL, 39 absl::StrCat("Cannot dlopen all CUDA libraries.")); 40 } else { 41 return port::Status::OK(); 42 } 43 } 44 TryDlopenROCmLibraries()45port::Status TryDlopenROCmLibraries() { 46 auto rocblas_status = GetRocblasDsoHandle(); 47 auto miopen_status = GetMiopenDsoHandle(); 48 auto rocfft_status = GetRocfftDsoHandle(); 49 auto rocrand_status = GetRocrandDsoHandle(); 50 if (!rocblas_status.status().ok() || !miopen_status.status().ok() || 51 !rocfft_status.status().ok() || !rocrand_status.status().ok()) { 52 return port::Status(port::error::INTERNAL, 53 absl::StrCat("Cannot dlopen all ROCm libraries.")); 54 } else { 55 return port::Status::OK(); 56 } 57 } 58 MaybeTryDlopenGPULibraries()59port::Status MaybeTryDlopenGPULibraries() { 60 #if GOOGLE_CUDA 61 return TryDlopenCUDALibraries(); 62 #elif TENSORFLOW_USE_ROCM 63 return TryDlopenROCmLibraries(); 64 #else 65 LOG(INFO) << "Not built with GPU enabled. Skip GPU library dlopen check."; 66 return port::Status::OK(); 67 #endif 68 } 69 TryDlopenTensorRTLibraries()70port::Status TryDlopenTensorRTLibraries() { 71 auto nvinfer_status = GetNvInferDsoHandle(); 72 auto nvinferplugin_status = GetNvInferPluginDsoHandle(); 73 if (!nvinfer_status.status().ok() || !nvinferplugin_status.status().ok()) { 74 return port::Status(port::error::INTERNAL, 75 absl::StrCat("Cannot dlopen all TensorRT libraries.")); 76 } else { 77 return port::Status::OK(); 78 } 79 } 80 81 } // namespace DsoLoader 82 } // namespace internal 83 } // namespace stream_executor 84