Home
last modified time | relevance | path

Searched refs:memory_allocator (Results 1 – 25 of 58) sorted by relevance

123

/external/tensorflow/tensorflow/compiler/xla/python/
Dshared_device_buffer_test.cc34 client->backend().memory_allocator(), 0, nullptr)); in TEST()
37 EXPECT_EQ(buffer->allocator(), client->backend().memory_allocator()); in TEST()
51 client->backend().memory_allocator(), 0, nullptr)); in TEST()
55 client->backend().memory_allocator(), 0, nullptr)); in TEST()
60 client->backend().memory_allocator(), 0, nullptr)); in TEST()
66 EXPECT_EQ(tuple_buffer->allocator(), client->backend().memory_allocator()); in TEST()
81 client->backend().memory_allocator(), 0, nullptr)); in TEST()
85 client->backend().memory_allocator(), 0, nullptr)); in TEST()
90 client->backend().memory_allocator(), 0, in TEST()
95 client->backend().memory_allocator(), 0, nullptr)); in TEST()
[all …]
/external/tensorflow/tensorflow/compiler/xla/service/gpu/
Dbuffer_allocations.h53 se::DeviceMemoryAllocator* memory_allocator);
65 se::DeviceMemoryAllocator* memory_allocator() const { in memory_allocator() function
89 se::DeviceMemoryAllocator* memory_allocator, in BufferAllocations() argument
93 memory_allocator_(memory_allocator), in BufferAllocations()
Dbuffer_allocations.cc42 se::DeviceMemoryAllocator* memory_allocator) { in Build() argument
45 num_buffers, device_ordinal, memory_allocator, buffer_assignment)); in Build()
82 buffer, memory_allocator->Allocate(device_ordinal, buffer_size)); in Build()
Dfft_thunk.cc32 int device_ordinal, se::DeviceMemoryAllocator* memory_allocator) in FftScratchAllocator() argument
33 : device_ordinal_(device_ordinal), memory_allocator_(memory_allocator) {} in FftScratchAllocator()
119 buffer_allocations.memory_allocator()); in ExecuteOnStream()
Dgpu_executable.cc306 se::DeviceMemoryAllocator* const memory_allocator = run_options->allocator(); in ExecuteAsyncOnStream() local
309 !memory_allocator->AllowsAsynchronousDeallocation(); in ExecuteAsyncOnStream()
366 assignment_.get(), executor->device_ordinal(), memory_allocator)); in ExecuteAsyncOnStream()
376 memory_allocator, device_ordinal); in ExecuteAsyncOnStream()
Dcudnn_fused_conv_rewriter_test.cc41 backend().default_stream_executor(), backend().memory_allocator()) in GetOptimizedHlo()
337 backend().default_stream_executor(), backend().memory_allocator()) in TEST_F()
Dfft_thunk.h41 se::DeviceMemoryAllocator* memory_allocator);
/external/v8/src/heap/
Dread-only-spaces.cc135 ->memory_allocator() in CreateReadOnlySpace()
243 heap->read_only_space()->TearDown(heap->memory_allocator()); in ReinstallReadOnlySpace()
285 void SharedReadOnlySpace::TearDown(MemoryAllocator* memory_allocator) { in TearDown() argument
294 void ReadOnlySpace::TearDown(MemoryAllocator* memory_allocator) { in TearDown() argument
296 memory_allocator->FreeReadOnlyPage(chunk); in TearDown()
321 void ReadOnlySpace::SetPermissionsForPages(MemoryAllocator* memory_allocator, in SetPermissionsForPages() argument
327 memory_allocator->page_allocator(NOT_EXECUTABLE); in SetPermissionsForPages()
377 auto* memory_allocator = heap()->memory_allocator(); in Seal() local
383 memory_allocator->UnregisterMemory(p); in Seal()
391 SetPermissionsForPages(memory_allocator, PageAllocator::kRead); in Seal()
[all …]
Dnew-spaces.cc60 heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>( in EnsureCurrentCapacity()
71 heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>( in EnsureCurrentCapacity()
115 heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>( in Commit()
138 heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>(chunk); in Uncommit()
143 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); in Uncommit()
171 heap()->memory_allocator()->AllocatePage<MemoryAllocator::kPooled>( in GrowTo()
194 heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>(last); in RewindPages()
209 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); in ShrinkTo()
Dlarge-spaces.cc109 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page); in TearDown()
185 LargePage* page = heap()->memory_allocator()->AllocateLargePage( in AllocateLargePage()
304 heap()->memory_allocator()->PartialFreeMemory( in FreeUnmarkedObjects()
312 heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>( in FreeUnmarkedObjects()
509 heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>(page); in FreeDeadObjects()
521 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); in FreeDeadObjects()
Dread-only-spaces.h178 V8_EXPORT_PRIVATE virtual void TearDown(MemoryAllocator* memory_allocator);
235 void SetPermissionsForPages(MemoryAllocator* memory_allocator,
288 void TearDown(MemoryAllocator* memory_allocator) override;
Dpaged-spaces.cc96 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(chunk); in TearDown()
306 return heap()->memory_allocator()->AllocatePage(AreaSize(), this, in AllocatePage()
463 heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>(page); in ReleasePage()
469 CHECK(heap()->memory_allocator()->IsMemoryChunkExecutable(page)); in SetReadable()
477 CHECK(heap()->memory_allocator()->IsMemoryChunkExecutable(page)); in SetReadAndExecutable()
485 CHECK(heap()->memory_allocator()->IsMemoryChunkExecutable(page)); in SetReadAndWritable()
Dmemory-chunk.cc21 MemoryAllocator* memory_allocator = heap_->memory_allocator(); in DiscardUnusedMemory() local
23 memory_allocator->page_allocator(executable()); in DiscardUnusedMemory()
/external/libchrome/base/metrics/
Dpersistent_histogram_allocator_unittest.cc40 allocator_ = GlobalHistogramAllocator::Get()->memory_allocator(); in CreatePersistentHistogramAllocator()
170 GlobalHistogramAllocator::Get()->memory_allocator()->Name()); in TEST_F()
176 GlobalHistogramAllocator::Get()->memory_allocator()->Name()); in TEST_F()
182 GlobalHistogramAllocator::Get()->memory_allocator()->Name()); in TEST_F()
261 const_cast<void*>(new_allocator->memory_allocator()->data()), in TEST_F()
262 new_allocator->memory_allocator()->size(), 0, 0, "", false)); in TEST_F()
308 const_cast<void*>(new_allocator->memory_allocator()->data()), in TEST_F()
309 new_allocator->memory_allocator()->size(), 0, 0, "", false)); in TEST_F()
Dpersistent_histogram_allocator.cc259 : allocator_(allocator), memory_iter_(allocator->memory_allocator()) {} in Iterator()
914 PersistentMemoryAllocator* memory_allocator = in ReleaseForTesting() local
915 histogram_allocator->memory_allocator(); in ReleaseForTesting()
920 PersistentMemoryAllocator::Iterator iter(memory_allocator); in ReleaseForTesting()
964 memory_allocator()->SetMemoryState(PersistentMemoryAllocator::MEMORY_DELETED); in DeletePersistentLocation()
/external/tensorflow/tensorflow/compiler/xla/tests/
Dbuffer_donation_test.cc73 auto memory_allocator = in RunAndCheck() local
78 run_options.set_allocator(memory_allocator.get()); in RunAndCheck()
85 argument_literal.shape(), memory_allocator.get(), in RunAndCheck()
96 memory_allocator.get()); in RunAndCheck()
/external/tensorflow/tensorflow/compiler/xrt/
Dxrt_state.cc96 shape, on_device_shape, backend->memory_allocator(), device_ordinal); in AllocateScopedShapedBuffer()
188 device_ordinal, backend->memory_allocator(), in CreateAndTransfer()
191 ->InitializeFromShapedBuffer(shaped_buffer, backend->memory_allocator(), in CreateAndTransfer()
211 device_ordinal, backend->memory_allocator(), in CreateUninitialized()
214 ->InitializeFromShapedBuffer(shaped_buffer, backend->memory_allocator(), in CreateUninitialized()
224 auto allocator = backend->memory_allocator(); in CreateFromBuffer()
318 InitializeFromShapedBuffer(shaped_buffer, backend->memory_allocator(), in SwapIn()
459 auto allocator = backend->memory_allocator(); in MakeTuple()
/external/tensorflow/tensorflow/compiler/xla/service/cpu/
Dcpu_executable.cc82 se::DeviceMemoryAllocator* memory_allocator, int device_ordinal, in CreateBufferTable() argument
122 TF_ASSIGN_OR_RETURN(owning_buffers[i], memory_allocator->Allocate( in CreateBufferTable()
313 se::DeviceMemoryAllocator* memory_allocator = run_options->allocator(); in ExecuteAsyncOnStream() local
319 CreateBufferTable(memory_allocator, stream->parent()->device_ordinal(), in ExecuteAsyncOnStream()
Dcpu_executable.h105 CreateBufferTable(se::DeviceMemoryAllocator* memory_allocator,
/external/tensorflow/tensorflow/compiler/jit/
Dxla_tensor.cc55 client->backend().memory_allocator(), in AllocateShapedBuffer()
63 client->backend().memory_allocator()->Allocate( in AllocateShapedBuffer()
Dxla_compile_on_demand_op.cc61 client, client->backend().memory_allocator(), in Run()
78 run_options.set_allocator(client->backend().memory_allocator()); in Run()
/external/tensorflow/tensorflow/compiler/xla/service/
Dhlo_runner.cc102 literal.shape(), backend().memory_allocator(), in TransferLiteralToDevice()
312 argument->shape(), backend().memory_allocator(), device)); in ExecuteReplicated()
438 backend().memory_allocator())); in CreateExecutable()
443 backend().memory_allocator()); in CreateExecutable()
452 run_options.set_allocator(backend().memory_allocator()); in GetServiceRunOptionsForDevice()
/external/tensorflow/tensorflow/compiler/xla/service/gpu/tests/
Dgpu_convolution_regression_test.cc31 backend().default_stream_executor(), backend().memory_allocator()); in CheckForHloText()
/external/tensorflow/tensorflow/stream_executor/gpu/
Dredzone_allocator.h45 RedzoneAllocator(Stream* stream, DeviceMemoryAllocator* memory_allocator,
/external/tensorflow/tensorflow/compiler/xla/client/
Dlocal_client.cc158 run_options.set_allocator(backend_->memory_allocator()); in RunHelper()
370 allocator = backend().memory_allocator(); in LiteralToShapedBuffer()
423 shape, backend().memory_allocator(), device_ordinal)); in TransferToLocalServer()

123