/external/tensorflow/tensorflow/compiler/xla/service/gpu/ |
D | while_thunk.cc | 50 Status WhileThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::WhileThunk 61 TF_RETURN_IF_ERROR(condition_thunk_sequence_->ExecuteOnStream( in ExecuteOnStream() 86 TF_RETURN_IF_ERROR(body_thunk_sequence_->ExecuteOnStream(buffer_allocations, in ExecuteOnStream()
|
D | memset_thunk.cc | 24 Status MemzeroThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::MemzeroThunk 33 Status Memset32BitValueThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::Memset32BitValueThunk
|
D | sequential_thunk.cc | 36 Status SequentialThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::SequentialThunk 42 thunk->ExecuteOnStream(buffer_allocations, stream, profiler)); in ExecuteOnStream()
|
D | cudnn_batchnorm_thunk.h | 63 Status ExecuteOnStream(const BufferAllocations& buffer_allocations, 94 Status ExecuteOnStream(const BufferAllocations& buffer_allocations, 128 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | for_thunk.cc | 43 Status ForThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::ForThunk 52 TF_RETURN_IF_ERROR(body_thunk_sequence_->ExecuteOnStream(buffer_allocations, in ExecuteOnStream()
|
D | memset_thunk.h | 39 Status ExecuteOnStream(const BufferAllocations& buffer_allocations, 56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | copy_thunk.h | 43 Status ExecuteOnStream(const BufferAllocations& buffer_allocations, 67 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | copy_thunk.cc | 33 Status HostToDeviceCopyThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::HostToDeviceCopyThunk 52 Status DeviceToDeviceCopyThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::DeviceToDeviceCopyThunk
|
D | conditional_thunk.cc | 59 Status ConditionalThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::ConditionalThunk 91 TF_RETURN_IF_ERROR(branch_thunks_[branch_index]->ExecuteOnStream( in ExecuteOnStream()
|
D | cudnn_batchnorm_thunk.cc | 101 Status CudnnBatchNormForwardInferenceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormForwardInferenceThunk 163 Status CudnnBatchNormForwardTrainingThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormForwardTrainingThunk 247 Status CudnnBatchNormBackwardThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormBackwardThunk
|
D | outfeed_thunk.h | 41 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | infeed_thunk.h | 43 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | nccl_all_reduce_thunk.h | 48 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | for_thunk.h | 42 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | sequential_thunk.h | 44 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | tuple_thunk.h | 48 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | while_thunk.h | 51 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | convolution_thunk.h | 56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | nccl_all_reduce_thunk.cc | 314 Status NcclAllReduceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::NcclAllReduceThunk 334 Status NcclAllReduceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::NcclAllReduceThunk
|
D | conditional_thunk.h | 56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
D | tuple_thunk.cc | 25 Status TupleThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::TupleThunk
|
D | cholesky_thunk.h | 54 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
|
/external/tensorflow/tensorflow/compiler/xla/service/ |
D | executable.cc | 42 ExecuteOnStream(&run_options[0], arguments[0], in ExecuteOnStreams() 84 ExecuteOnStream(run_options, arguments, profile_ptr.get()); in ExecuteOnStreamWrapper()
|
D | executable.h | 82 virtual StatusOr<ScopedShapedBuffer> ExecuteOnStream( 102 virtual StatusOr<ExecutionOutput> ExecuteOnStream( in ExecuteOnStream() function
|
/external/tensorflow/tensorflow/compiler/xla/service/interpreter/ |
D | executable.h | 49 StatusOr<ScopedShapedBuffer> ExecuteOnStream(
|