blob: 56c66b2285c0d4d353f6eabe40124a789833c6b4 [file] [log] [blame]
siggi07856a72016-10-18 17:18:291// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "base/debug/thread_heap_usage_tracker.h"
6
7#include <map>
8
9#include "base/allocator/allocator_shim.h"
10#include "base/allocator/features.h"
11#include "testing/gtest/include/gtest/gtest.h"
12
erikchen2f9bf052017-03-28 01:11:2613#if defined(OS_MACOSX)
14#include "base/allocator/allocator_interception_mac.h"
15#endif
16
siggi07856a72016-10-18 17:18:2917namespace base {
18namespace debug {
19
20namespace {
21
22class TestingThreadHeapUsageTracker : public ThreadHeapUsageTracker {
23 public:
24 using ThreadHeapUsageTracker::DisableHeapTrackingForTesting;
25 using ThreadHeapUsageTracker::EnsureTLSInitialized;
26 using ThreadHeapUsageTracker::GetDispatchForTesting;
27};
28
29// A fixture class that allows testing the AllocatorDispatch associated with
30// the ThreadHeapUsageTracker class in isolation against a mocked
31// underlying
32// heap implementation.
33class ThreadHeapUsageTrackerTest : public testing::Test {
34 public:
35 using AllocatorDispatch = base::allocator::AllocatorDispatch;
36
37 static const size_t kAllocationPadding;
38 enum SizeFunctionKind {
39 EXACT_SIZE_FUNCTION,
40 PADDING_SIZE_FUNCTION,
41 ZERO_SIZE_FUNCTION,
42 };
43
44 ThreadHeapUsageTrackerTest() : size_function_kind_(EXACT_SIZE_FUNCTION) {
45 EXPECT_EQ(nullptr, g_self);
46 g_self = this;
47 }
48
49 ~ThreadHeapUsageTrackerTest() override {
50 EXPECT_EQ(this, g_self);
51 g_self = nullptr;
52 }
53
54 void set_size_function_kind(SizeFunctionKind kind) {
55 size_function_kind_ = kind;
56 }
57
58 void SetUp() override {
59 TestingThreadHeapUsageTracker::EnsureTLSInitialized();
60
61 dispatch_under_test_ =
62 TestingThreadHeapUsageTracker::GetDispatchForTesting();
63 ASSERT_EQ(nullptr, dispatch_under_test_->next);
64
65 dispatch_under_test_->next = &g_mock_dispatch;
66 }
67
68 void TearDown() override {
69 ASSERT_EQ(&g_mock_dispatch, dispatch_under_test_->next);
70
71 dispatch_under_test_->next = nullptr;
72 }
73
74 void* MockMalloc(size_t size) {
erikcheneff0ecb2017-02-20 13:04:5075 return dispatch_under_test_->alloc_function(dispatch_under_test_, size,
76 nullptr);
siggi07856a72016-10-18 17:18:2977 }
78
79 void* MockCalloc(size_t n, size_t size) {
80 return dispatch_under_test_->alloc_zero_initialized_function(
erikcheneff0ecb2017-02-20 13:04:5081 dispatch_under_test_, n, size, nullptr);
siggi07856a72016-10-18 17:18:2982 }
83
84 void* MockAllocAligned(size_t alignment, size_t size) {
erikcheneff0ecb2017-02-20 13:04:5085 return dispatch_under_test_->alloc_aligned_function(
86 dispatch_under_test_, alignment, size, nullptr);
siggi07856a72016-10-18 17:18:2987 }
88
89 void* MockRealloc(void* address, size_t size) {
90 return dispatch_under_test_->realloc_function(dispatch_under_test_, address,
erikcheneff0ecb2017-02-20 13:04:5091 size, nullptr);
siggi07856a72016-10-18 17:18:2992 }
93
94 void MockFree(void* address) {
erikcheneff0ecb2017-02-20 13:04:5095 dispatch_under_test_->free_function(dispatch_under_test_, address, nullptr);
siggi07856a72016-10-18 17:18:2996 }
97
98 size_t MockGetSizeEstimate(void* address) {
99 return dispatch_under_test_->get_size_estimate_function(
erikcheneff0ecb2017-02-20 13:04:50100 dispatch_under_test_, address, nullptr);
siggi07856a72016-10-18 17:18:29101 }
102
103 private:
104 void RecordAlloc(void* address, size_t size) {
105 if (address != nullptr)
106 allocation_size_map_[address] = size;
107 }
108
109 void DeleteAlloc(void* address) {
110 if (address != nullptr)
111 EXPECT_EQ(1U, allocation_size_map_.erase(address));
112 }
113
114 size_t GetSizeEstimate(void* address) {
115 auto it = allocation_size_map_.find(address);
116 if (it == allocation_size_map_.end())
117 return 0;
118
119 size_t ret = it->second;
120 switch (size_function_kind_) {
121 case EXACT_SIZE_FUNCTION:
122 break;
123 case PADDING_SIZE_FUNCTION:
124 ret += kAllocationPadding;
125 break;
126 case ZERO_SIZE_FUNCTION:
127 ret = 0;
128 break;
129 }
130
131 return ret;
132 }
133
erikcheneff0ecb2017-02-20 13:04:50134 static void* OnAllocFn(const AllocatorDispatch* self,
135 size_t size,
136 void* context) {
siggi07856a72016-10-18 17:18:29137 EXPECT_EQ(&g_mock_dispatch, self);
138
139 void* ret = malloc(size);
140 g_self->RecordAlloc(ret, size);
141 return ret;
142 }
143
144 static void* OnAllocZeroInitializedFn(const AllocatorDispatch* self,
145 size_t n,
erikcheneff0ecb2017-02-20 13:04:50146 size_t size,
147 void* context) {
siggi07856a72016-10-18 17:18:29148 EXPECT_EQ(&g_mock_dispatch, self);
149
150 void* ret = calloc(n, size);
151 g_self->RecordAlloc(ret, n * size);
152 return ret;
153 }
154
155 static void* OnAllocAlignedFn(const AllocatorDispatch* self,
156 size_t alignment,
erikcheneff0ecb2017-02-20 13:04:50157 size_t size,
158 void* context) {
siggi07856a72016-10-18 17:18:29159 EXPECT_EQ(&g_mock_dispatch, self);
160
161 // This is a cheat as it doesn't return aligned allocations. This has the
162 // advantage of working for all platforms for this test.
163 void* ret = malloc(size);
164 g_self->RecordAlloc(ret, size);
165 return ret;
166 }
167
168 static void* OnReallocFn(const AllocatorDispatch* self,
169 void* address,
erikcheneff0ecb2017-02-20 13:04:50170 size_t size,
171 void* context) {
siggi07856a72016-10-18 17:18:29172 EXPECT_EQ(&g_mock_dispatch, self);
173
174 g_self->DeleteAlloc(address);
175 void* ret = realloc(address, size);
176 g_self->RecordAlloc(ret, size);
177 return ret;
178 }
179
erikcheneff0ecb2017-02-20 13:04:50180 static void OnFreeFn(const AllocatorDispatch* self,
181 void* address,
182 void* context) {
siggi07856a72016-10-18 17:18:29183 EXPECT_EQ(&g_mock_dispatch, self);
184
185 g_self->DeleteAlloc(address);
186 free(address);
187 }
188
189 static size_t OnGetSizeEstimateFn(const AllocatorDispatch* self,
erikcheneff0ecb2017-02-20 13:04:50190 void* address,
191 void* context) {
siggi07856a72016-10-18 17:18:29192 EXPECT_EQ(&g_mock_dispatch, self);
193
194 return g_self->GetSizeEstimate(address);
195 }
196
197 using AllocationSizeMap = std::map<void*, size_t>;
198
199 SizeFunctionKind size_function_kind_;
200 AllocationSizeMap allocation_size_map_;
201 AllocatorDispatch* dispatch_under_test_;
202
203 static base::allocator::AllocatorDispatch g_mock_dispatch;
204 static ThreadHeapUsageTrackerTest* g_self;
205};
206
207const size_t ThreadHeapUsageTrackerTest::kAllocationPadding = 23;
208
209ThreadHeapUsageTrackerTest* ThreadHeapUsageTrackerTest::g_self = nullptr;
210
211base::allocator::AllocatorDispatch ThreadHeapUsageTrackerTest::g_mock_dispatch =
212 {
213 &ThreadHeapUsageTrackerTest::OnAllocFn, // alloc_function
214 &ThreadHeapUsageTrackerTest::
215 OnAllocZeroInitializedFn, // alloc_zero_initialized_function
216 &ThreadHeapUsageTrackerTest::
217 OnAllocAlignedFn, // alloc_aligned_function
218 &ThreadHeapUsageTrackerTest::OnReallocFn, // realloc_function
219 &ThreadHeapUsageTrackerTest::OnFreeFn, // free_function
220 &ThreadHeapUsageTrackerTest::
221 OnGetSizeEstimateFn, // get_size_estimate_function
erikchen0d0395a2017-02-02 06:16:29222 nullptr, // batch_malloc
223 nullptr, // batch_free
224 nullptr, // free_definite_size_function
siggi07856a72016-10-18 17:18:29225 nullptr, // next
226};
227
228} // namespace
229
230TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithExactSizeFunction) {
231 set_size_function_kind(EXACT_SIZE_FUNCTION);
232
233 ThreadHeapUsageTracker usage_tracker;
234 usage_tracker.Start();
235
236 ThreadHeapUsage u1 = ThreadHeapUsageTracker::GetUsageSnapshot();
237
238 EXPECT_EQ(0U, u1.alloc_ops);
239 EXPECT_EQ(0U, u1.alloc_bytes);
240 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
241 EXPECT_EQ(0U, u1.free_ops);
242 EXPECT_EQ(0U, u1.free_bytes);
243 EXPECT_EQ(0U, u1.max_allocated_bytes);
244
245 const size_t kAllocSize = 1029U;
246 void* ptr = MockMalloc(kAllocSize);
247 MockFree(ptr);
248
249 usage_tracker.Stop(false);
250 ThreadHeapUsage u2 = usage_tracker.usage();
251
252 EXPECT_EQ(1U, u2.alloc_ops);
253 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
254 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
255 EXPECT_EQ(1U, u2.free_ops);
256 EXPECT_EQ(kAllocSize, u2.free_bytes);
257 EXPECT_EQ(kAllocSize, u2.max_allocated_bytes);
258}
259
260TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithPaddingSizeFunction) {
261 set_size_function_kind(PADDING_SIZE_FUNCTION);
262
263 ThreadHeapUsageTracker usage_tracker;
264 usage_tracker.Start();
265
266 ThreadHeapUsage u1 = ThreadHeapUsageTracker::GetUsageSnapshot();
267
268 EXPECT_EQ(0U, u1.alloc_ops);
269 EXPECT_EQ(0U, u1.alloc_bytes);
270 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
271 EXPECT_EQ(0U, u1.free_ops);
272 EXPECT_EQ(0U, u1.free_bytes);
273 EXPECT_EQ(0U, u1.max_allocated_bytes);
274
275 const size_t kAllocSize = 1029U;
276 void* ptr = MockMalloc(kAllocSize);
277 MockFree(ptr);
278
279 usage_tracker.Stop(false);
280 ThreadHeapUsage u2 = usage_tracker.usage();
281
282 EXPECT_EQ(1U, u2.alloc_ops);
283 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.alloc_bytes);
284 EXPECT_EQ(kAllocationPadding, u2.alloc_overhead_bytes);
285 EXPECT_EQ(1U, u2.free_ops);
286 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.free_bytes);
287 EXPECT_EQ(kAllocSize + kAllocationPadding, u2.max_allocated_bytes);
288}
289
290TEST_F(ThreadHeapUsageTrackerTest, SimpleUsageWithZeroSizeFunction) {
291 set_size_function_kind(ZERO_SIZE_FUNCTION);
292
293 ThreadHeapUsageTracker usage_tracker;
294 usage_tracker.Start();
295
296 ThreadHeapUsage u1 = ThreadHeapUsageTracker::GetUsageSnapshot();
297 EXPECT_EQ(0U, u1.alloc_ops);
298 EXPECT_EQ(0U, u1.alloc_bytes);
299 EXPECT_EQ(0U, u1.alloc_overhead_bytes);
300 EXPECT_EQ(0U, u1.free_ops);
301 EXPECT_EQ(0U, u1.free_bytes);
302 EXPECT_EQ(0U, u1.max_allocated_bytes);
303
304 const size_t kAllocSize = 1029U;
305 void* ptr = MockMalloc(kAllocSize);
306 MockFree(ptr);
307
308 usage_tracker.Stop(false);
309 ThreadHeapUsage u2 = usage_tracker.usage();
310
311 // With a get-size function that returns zero, there's no way to get the size
312 // of an allocation that's being freed, hence the shim can't tally freed bytes
313 // nor the high-watermark allocated bytes.
314 EXPECT_EQ(1U, u2.alloc_ops);
315 EXPECT_EQ(kAllocSize, u2.alloc_bytes);
316 EXPECT_EQ(0U, u2.alloc_overhead_bytes);
317 EXPECT_EQ(1U, u2.free_ops);
318 EXPECT_EQ(0U, u2.free_bytes);
319 EXPECT_EQ(0U, u2.max_allocated_bytes);
320}
321
322TEST_F(ThreadHeapUsageTrackerTest, ReallocCorrectlyTallied) {
323 const size_t kAllocSize = 237U;
324
325 {
326 ThreadHeapUsageTracker usage_tracker;
327 usage_tracker.Start();
328
329 // Reallocating nullptr should count as a single alloc.
330 void* ptr = MockRealloc(nullptr, kAllocSize);
331 ThreadHeapUsage usage = ThreadHeapUsageTracker::GetUsageSnapshot();
332 EXPECT_EQ(1U, usage.alloc_ops);
333 EXPECT_EQ(kAllocSize, usage.alloc_bytes);
334 EXPECT_EQ(0U, usage.alloc_overhead_bytes);
335 EXPECT_EQ(0U, usage.free_ops);
336 EXPECT_EQ(0U, usage.free_bytes);
337 EXPECT_EQ(kAllocSize, usage.max_allocated_bytes);
338
339 // Reallocating a valid pointer to a zero size should count as a single
340 // free.
341 ptr = MockRealloc(ptr, 0U);
342
343 usage_tracker.Stop(false);
344 EXPECT_EQ(1U, usage_tracker.usage().alloc_ops);
345 EXPECT_EQ(kAllocSize, usage_tracker.usage().alloc_bytes);
346 EXPECT_EQ(0U, usage_tracker.usage().alloc_overhead_bytes);
347 EXPECT_EQ(1U, usage_tracker.usage().free_ops);
348 EXPECT_EQ(kAllocSize, usage_tracker.usage().free_bytes);
349 EXPECT_EQ(kAllocSize, usage_tracker.usage().max_allocated_bytes);
350
351 // Realloc to zero size may or may not return a nullptr - make sure to
352 // free the zero-size alloc in the latter case.
353 if (ptr != nullptr)
354 MockFree(ptr);
355 }
356
357 {
358 ThreadHeapUsageTracker usage_tracker;
359 usage_tracker.Start();
360
361 void* ptr = MockMalloc(kAllocSize);
362 ThreadHeapUsage usage = ThreadHeapUsageTracker::GetUsageSnapshot();
363 EXPECT_EQ(1U, usage.alloc_ops);
364
365 // Now try reallocating a valid pointer to a larger size, this should count
366 // as one free and one alloc.
367 const size_t kLargerAllocSize = kAllocSize + 928U;
368 ptr = MockRealloc(ptr, kLargerAllocSize);
369
370 usage_tracker.Stop(false);
371 EXPECT_EQ(2U, usage_tracker.usage().alloc_ops);
372 EXPECT_EQ(kAllocSize + kLargerAllocSize, usage_tracker.usage().alloc_bytes);
373 EXPECT_EQ(0U, usage_tracker.usage().alloc_overhead_bytes);
374 EXPECT_EQ(1U, usage_tracker.usage().free_ops);
375 EXPECT_EQ(kAllocSize, usage_tracker.usage().free_bytes);
376 EXPECT_EQ(kLargerAllocSize, usage_tracker.usage().max_allocated_bytes);
377
378 MockFree(ptr);
379 }
380}
381
382TEST_F(ThreadHeapUsageTrackerTest, NestedMaxWorks) {
383 ThreadHeapUsageTracker usage_tracker;
384 usage_tracker.Start();
385
386 const size_t kOuterAllocSize = 1029U;
387 void* ptr = MockMalloc(kOuterAllocSize);
388 MockFree(ptr);
389
390 EXPECT_EQ(kOuterAllocSize,
391 ThreadHeapUsageTracker::GetUsageSnapshot().max_allocated_bytes);
392
393 {
394 ThreadHeapUsageTracker inner_usage_tracker;
395 inner_usage_tracker.Start();
396
397 const size_t kInnerAllocSize = 673U;
398 ptr = MockMalloc(kInnerAllocSize);
399 MockFree(ptr);
400
401 inner_usage_tracker.Stop(false);
402
403 EXPECT_EQ(kInnerAllocSize, inner_usage_tracker.usage().max_allocated_bytes);
404 }
405
406 // The greater, outer allocation size should have been restored.
407 EXPECT_EQ(kOuterAllocSize,
408 ThreadHeapUsageTracker::GetUsageSnapshot().max_allocated_bytes);
409
410 const size_t kLargerInnerAllocSize = kOuterAllocSize + 673U;
411 {
412 ThreadHeapUsageTracker inner_usage_tracker;
413 inner_usage_tracker.Start();
414
415 ptr = MockMalloc(kLargerInnerAllocSize);
416 MockFree(ptr);
417
418 inner_usage_tracker.Stop(false);
419 EXPECT_EQ(kLargerInnerAllocSize,
420 inner_usage_tracker.usage().max_allocated_bytes);
421 }
422
423 // The greater, inner allocation size should have been preserved.
424 EXPECT_EQ(kLargerInnerAllocSize,
425 ThreadHeapUsageTracker::GetUsageSnapshot().max_allocated_bytes);
426
427 // Now try the case with an outstanding net alloc size when entering the
428 // inner scope.
429 void* outer_ptr = MockMalloc(kOuterAllocSize);
430 EXPECT_EQ(kLargerInnerAllocSize,
431 ThreadHeapUsageTracker::GetUsageSnapshot().max_allocated_bytes);
432 {
433 ThreadHeapUsageTracker inner_usage_tracker;
434 inner_usage_tracker.Start();
435
436 ptr = MockMalloc(kLargerInnerAllocSize);
437 MockFree(ptr);
438
439 inner_usage_tracker.Stop(false);
440 EXPECT_EQ(kLargerInnerAllocSize,
441 inner_usage_tracker.usage().max_allocated_bytes);
442 }
443
444 // While the inner scope saw only the inner net outstanding allocation size,
445 // the outer scope saw both outstanding at the same time.
446 EXPECT_EQ(kOuterAllocSize + kLargerInnerAllocSize,
447 ThreadHeapUsageTracker::GetUsageSnapshot().max_allocated_bytes);
448
449 MockFree(outer_ptr);
450
451 // Test a net-negative scope.
452 ptr = MockMalloc(kLargerInnerAllocSize);
453 {
454 ThreadHeapUsageTracker inner_usage_tracker;
455 inner_usage_tracker.Start();
456
457 MockFree(ptr);
458
459 const size_t kInnerAllocSize = 1;
460 ptr = MockMalloc(kInnerAllocSize);
461
462 inner_usage_tracker.Stop(false);
463 // Since the scope is still net-negative, the max is clamped at zero.
464 EXPECT_EQ(0U, inner_usage_tracker.usage().max_allocated_bytes);
465 }
466
467 MockFree(ptr);
468}
469
470TEST_F(ThreadHeapUsageTrackerTest, NoStopImpliesInclusive) {
471 ThreadHeapUsageTracker usage_tracker;
472 usage_tracker.Start();
473
474 const size_t kOuterAllocSize = 1029U;
475 void* ptr = MockMalloc(kOuterAllocSize);
476 MockFree(ptr);
477
478 ThreadHeapUsage usage = ThreadHeapUsageTracker::GetUsageSnapshot();
479 EXPECT_EQ(kOuterAllocSize, usage.max_allocated_bytes);
480
481 const size_t kInnerLargerAllocSize = kOuterAllocSize + 673U;
482
483 {
484 ThreadHeapUsageTracker inner_usage_tracker;
485 inner_usage_tracker.Start();
486
487 // Make a larger allocation than the outer scope.
488 ptr = MockMalloc(kInnerLargerAllocSize);
489 MockFree(ptr);
490
491 // inner_usage_tracker goes out of scope without a Stop().
492 }
493
494 ThreadHeapUsage current = ThreadHeapUsageTracker::GetUsageSnapshot();
495 EXPECT_EQ(usage.alloc_ops + 1, current.alloc_ops);
496 EXPECT_EQ(usage.alloc_bytes + kInnerLargerAllocSize, current.alloc_bytes);
497 EXPECT_EQ(usage.free_ops + 1, current.free_ops);
498 EXPECT_EQ(usage.free_bytes + kInnerLargerAllocSize, current.free_bytes);
499 EXPECT_EQ(kInnerLargerAllocSize, current.max_allocated_bytes);
500}
501
502TEST_F(ThreadHeapUsageTrackerTest, ExclusiveScopesWork) {
503 ThreadHeapUsageTracker usage_tracker;
504 usage_tracker.Start();
505
506 const size_t kOuterAllocSize = 1029U;
507 void* ptr = MockMalloc(kOuterAllocSize);
508 MockFree(ptr);
509
510 ThreadHeapUsage usage = ThreadHeapUsageTracker::GetUsageSnapshot();
511 EXPECT_EQ(kOuterAllocSize, usage.max_allocated_bytes);
512
513 {
514 ThreadHeapUsageTracker inner_usage_tracker;
515 inner_usage_tracker.Start();
516
517 // Make a larger allocation than the outer scope.
518 ptr = MockMalloc(kOuterAllocSize + 673U);
519 MockFree(ptr);
520
521 // This tracker is exlusive, all activity should be private to this scope.
522 inner_usage_tracker.Stop(true);
523 }
524
525 ThreadHeapUsage current = ThreadHeapUsageTracker::GetUsageSnapshot();
526 EXPECT_EQ(usage.alloc_ops, current.alloc_ops);
527 EXPECT_EQ(usage.alloc_bytes, current.alloc_bytes);
528 EXPECT_EQ(usage.alloc_overhead_bytes, current.alloc_overhead_bytes);
529 EXPECT_EQ(usage.free_ops, current.free_ops);
530 EXPECT_EQ(usage.free_bytes, current.free_bytes);
531 EXPECT_EQ(usage.max_allocated_bytes, current.max_allocated_bytes);
532}
533
534TEST_F(ThreadHeapUsageTrackerTest, AllShimFunctionsAreProvided) {
535 const size_t kAllocSize = 100;
536 void* alloc = MockMalloc(kAllocSize);
537 size_t estimate = MockGetSizeEstimate(alloc);
538 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
539 MockFree(alloc);
540
541 alloc = MockCalloc(kAllocSize, 1);
542 estimate = MockGetSizeEstimate(alloc);
543 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
544 MockFree(alloc);
545
546 alloc = MockAllocAligned(1, kAllocSize);
547 estimate = MockGetSizeEstimate(alloc);
548 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
549
550 alloc = MockRealloc(alloc, kAllocSize);
551 estimate = MockGetSizeEstimate(alloc);
552 ASSERT_TRUE(estimate == 0 || estimate >= kAllocSize);
553 MockFree(alloc);
554}
555
primiano73228cd2017-05-25 15:16:09556#if BUILDFLAG(USE_ALLOCATOR_SHIM)
erikchen2f9bf052017-03-28 01:11:26557class ThreadHeapUsageShimTest : public testing::Test {
erikchen0d0395a2017-02-02 06:16:29558#if defined(OS_MACOSX)
erikchen2f9bf052017-03-28 01:11:26559 void SetUp() override { allocator::InitializeAllocatorShim(); }
560 void TearDown() override { allocator::UninterceptMallocZonesForTesting(); }
erikchen0d0395a2017-02-02 06:16:29561#endif
erikchen2f9bf052017-03-28 01:11:26562};
erikchen0d0395a2017-02-02 06:16:29563
erikchen2f9bf052017-03-28 01:11:26564TEST_F(ThreadHeapUsageShimTest, HooksIntoMallocWhenShimAvailable) {
siggi07856a72016-10-18 17:18:29565 ASSERT_FALSE(ThreadHeapUsageTracker::IsHeapTrackingEnabled());
566
567 ThreadHeapUsageTracker::EnableHeapTracking();
568
569 ASSERT_TRUE(ThreadHeapUsageTracker::IsHeapTrackingEnabled());
570
571 const size_t kAllocSize = 9993;
572 // This test verifies that the scoped heap data is affected by malloc &
573 // free only when the shim is available.
574 ThreadHeapUsageTracker usage_tracker;
575 usage_tracker.Start();
576
577 ThreadHeapUsage u1 = ThreadHeapUsageTracker::GetUsageSnapshot();
578 void* ptr = malloc(kAllocSize);
579 // Prevent the compiler from optimizing out the malloc/free pair.
580 ASSERT_NE(nullptr, ptr);
581
582 ThreadHeapUsage u2 = ThreadHeapUsageTracker::GetUsageSnapshot();
583 free(ptr);
584
585 usage_tracker.Stop(false);
586 ThreadHeapUsage u3 = usage_tracker.usage();
587
588 // Verify that at least one allocation operation was recorded, and that free
589 // operations are at least monotonically growing.
590 EXPECT_LE(0U, u1.alloc_ops);
591 EXPECT_LE(u1.alloc_ops + 1, u2.alloc_ops);
592 EXPECT_LE(u1.alloc_ops + 1, u3.alloc_ops);
593
594 // Verify that at least the bytes above were recorded.
595 EXPECT_LE(u1.alloc_bytes + kAllocSize, u2.alloc_bytes);
596
597 // Verify that at least the one free operation above was recorded.
598 EXPECT_LE(u2.free_ops + 1, u3.free_ops);
599
600 TestingThreadHeapUsageTracker::DisableHeapTrackingForTesting();
601
602 ASSERT_FALSE(ThreadHeapUsageTracker::IsHeapTrackingEnabled());
603}
primiano73228cd2017-05-25 15:16:09604#endif // BUILDFLAG(USE_ALLOCATOR_SHIM)
siggi07856a72016-10-18 17:18:29605
606} // namespace debug
607} // namespace base