Home
last modified time | relevance | path

Searched refs:ExecuteOnStream (Results 1 – 25 of 47) sorted by relevance

12

/external/tensorflow/tensorflow/compiler/xla/service/gpu/
Dwhile_thunk.cc50 Status WhileThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::WhileThunk
61 TF_RETURN_IF_ERROR(condition_thunk_sequence_->ExecuteOnStream( in ExecuteOnStream()
86 TF_RETURN_IF_ERROR(body_thunk_sequence_->ExecuteOnStream(buffer_allocations, in ExecuteOnStream()
Dmemset_thunk.cc24 Status MemzeroThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::MemzeroThunk
33 Status Memset32BitValueThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::Memset32BitValueThunk
Dsequential_thunk.cc36 Status SequentialThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::SequentialThunk
42 thunk->ExecuteOnStream(buffer_allocations, stream, profiler)); in ExecuteOnStream()
Dcudnn_batchnorm_thunk.h63 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
94 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
128 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dfor_thunk.cc43 Status ForThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::ForThunk
52 TF_RETURN_IF_ERROR(body_thunk_sequence_->ExecuteOnStream(buffer_allocations, in ExecuteOnStream()
Dmemset_thunk.h39 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dcopy_thunk.h43 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
67 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dcopy_thunk.cc33 Status HostToDeviceCopyThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::HostToDeviceCopyThunk
52 Status DeviceToDeviceCopyThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::DeviceToDeviceCopyThunk
Dconditional_thunk.cc59 Status ConditionalThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::ConditionalThunk
91 TF_RETURN_IF_ERROR(branch_thunks_[branch_index]->ExecuteOnStream( in ExecuteOnStream()
Dcudnn_batchnorm_thunk.cc101 Status CudnnBatchNormForwardInferenceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormForwardInferenceThunk
163 Status CudnnBatchNormForwardTrainingThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormForwardTrainingThunk
247 Status CudnnBatchNormBackwardThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::CudnnBatchNormBackwardThunk
Doutfeed_thunk.h41 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dinfeed_thunk.h43 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dnccl_all_reduce_thunk.h48 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dfor_thunk.h42 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dsequential_thunk.h44 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dtuple_thunk.h48 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dwhile_thunk.h51 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dconvolution_thunk.h56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dnccl_all_reduce_thunk.cc314 Status NcclAllReduceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::NcclAllReduceThunk
334 Status NcclAllReduceThunk::ExecuteOnStream( in ExecuteOnStream() function in xla::gpu::NcclAllReduceThunk
Dconditional_thunk.h56 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
Dtuple_thunk.cc25 Status TupleThunk::ExecuteOnStream(const BufferAllocations& buffer_allocations, in ExecuteOnStream() function in xla::gpu::TupleThunk
Dcholesky_thunk.h54 Status ExecuteOnStream(const BufferAllocations& buffer_allocations,
/external/tensorflow/tensorflow/compiler/xla/service/
Dexecutable.cc42 ExecuteOnStream(&run_options[0], arguments[0], in ExecuteOnStreams()
84 ExecuteOnStream(run_options, arguments, profile_ptr.get()); in ExecuteOnStreamWrapper()
Dexecutable.h82 virtual StatusOr<ScopedShapedBuffer> ExecuteOnStream(
102 virtual StatusOr<ExecutionOutput> ExecuteOnStream( in ExecuteOnStream() function
/external/tensorflow/tensorflow/compiler/xla/service/interpreter/
Dexecutable.h49 StatusOr<ScopedShapedBuffer> ExecuteOnStream(

12