@@ -33,6 +33,13 @@ using namespace folly;
33
33
34
34
static_assert (AllocatorHasTrivialDeallocate<SysArena>::value, " " );
35
35
36
+ void * alloc (SysArena& arena, size_t size) {
37
+ void * const mem = arena.allocate (size);
38
+ // Fill with garbage to detect heap corruption.
39
+ memset (mem, 0xff , size);
40
+ return mem;
41
+ }
42
+
36
43
TEST (Arena, SizeSanity) {
37
44
std::set<size_t *> allocatedItems;
38
45
@@ -42,7 +49,7 @@ TEST(Arena, SizeSanity) {
42
49
EXPECT_EQ (arena.totalSize (), minimum_size);
43
50
44
51
// Insert a single small element to get a new block
45
- size_t * ptr = static_cast <size_t *>(arena. allocate ( sizeof (long )));
52
+ size_t * ptr = static_cast <size_t *>(alloc (arena, sizeof (long )));
46
53
allocatedItems.insert (ptr);
47
54
minimum_size += requestedBlockSize;
48
55
maximum_size += goodMallocSize (requestedBlockSize + SysArena::kBlockOverhead );
@@ -52,7 +59,7 @@ TEST(Arena, SizeSanity) {
52
59
<< maximum_size;
53
60
54
61
// Insert a larger element, size should be the same
55
- ptr = static_cast <size_t *>(arena. allocate ( requestedBlockSize / 2 ));
62
+ ptr = static_cast <size_t *>(alloc (arena, requestedBlockSize / 2 ));
56
63
allocatedItems.insert (ptr);
57
64
EXPECT_GE (arena.totalSize (), minimum_size);
58
65
EXPECT_LE (arena.totalSize (), maximum_size);
@@ -61,7 +68,7 @@ TEST(Arena, SizeSanity) {
61
68
62
69
// Insert 10 full block sizes to get 10 new blocks
63
70
for (int i = 0 ; i < 10 ; i++) {
64
- ptr = static_cast <size_t *>(arena. allocate ( requestedBlockSize));
71
+ ptr = static_cast <size_t *>(alloc (arena, requestedBlockSize));
65
72
allocatedItems.insert (ptr);
66
73
}
67
74
minimum_size += 10 * requestedBlockSize;
@@ -73,7 +80,7 @@ TEST(Arena, SizeSanity) {
73
80
<< maximum_size;
74
81
75
82
// Insert something huge
76
- ptr = static_cast <size_t *>(arena. allocate ( 10 * requestedBlockSize));
83
+ ptr = static_cast <size_t *>(alloc (arena, 10 * requestedBlockSize));
77
84
allocatedItems.insert (ptr);
78
85
minimum_size += 10 * requestedBlockSize;
79
86
maximum_size +=
@@ -109,30 +116,30 @@ TEST(Arena, BytesUsedSanity) {
109
116
EXPECT_EQ (arena.bytesUsed (), bytesUsed);
110
117
111
118
// Insert 2 small chunks
112
- arena. allocate ( smallChunkSize);
113
- arena. allocate ( smallChunkSize);
119
+ alloc (arena, smallChunkSize);
120
+ alloc (arena, smallChunkSize);
114
121
bytesUsed += 2 * smallChunkSize;
115
122
EXPECT_EQ (arena.bytesUsed (), bytesUsed);
116
123
EXPECT_GE (arena.totalSize (), blockSize);
117
124
EXPECT_LE (arena.totalSize (), 2 * blockSize);
118
125
119
126
// Insert big chunk, should still fit in one block
120
- arena. allocate ( bigChunkSize);
127
+ alloc (arena, bigChunkSize);
121
128
bytesUsed += bigChunkSize;
122
129
EXPECT_EQ (arena.bytesUsed (), bytesUsed);
123
130
EXPECT_GE (arena.totalSize (), blockSize);
124
131
EXPECT_LE (arena.totalSize (), 2 * blockSize);
125
132
126
133
// Insert big chunk once more, should trigger new block allocation
127
- arena. allocate ( bigChunkSize);
134
+ alloc (arena, bigChunkSize);
128
135
bytesUsed += bigChunkSize;
129
136
EXPECT_EQ (arena.bytesUsed (), bytesUsed);
130
137
EXPECT_GE (arena.totalSize (), 2 * blockSize);
131
138
EXPECT_LE (arena.totalSize (), 3 * blockSize);
132
139
133
140
// Test that bytesUsed() accounts for alignment
134
141
static const size_t tinyChunkSize = 7 ;
135
- arena. allocate ( tinyChunkSize);
142
+ alloc (arena, tinyChunkSize);
136
143
EXPECT_GE (arena.bytesUsed (), bytesUsed + tinyChunkSize);
137
144
size_t delta = arena.bytesUsed () - bytesUsed;
138
145
EXPECT_EQ (delta & (delta - 1 ), 0 );
@@ -195,7 +202,7 @@ TEST(Arena, FallbackSysArenaDoesFallbackToHeap) {
195
202
SysArena arena0;
196
203
FallBackIntAlloc f_no_init;
197
204
FallBackIntAlloc f_do_init (arena0);
198
- arena0. allocate ( 1 ); // First allocation to prime the arena
205
+ alloc (arena0, 1 ); // First allocation to prime the arena
199
206
200
207
std::vector<int , FallBackIntAlloc> vec_arg_empty__fallback;
201
208
std::vector<int , FallBackIntAlloc> vec_arg_noinit_fallback (f_no_init);
@@ -239,19 +246,19 @@ TEST(Arena, SizeLimit) {
239
246
240
247
SysArena arena (requestedBlockSize, maxSize);
241
248
242
- void * a = arena. allocate ( sizeof (size_t ));
249
+ void * a = alloc (arena, sizeof (size_t ));
243
250
EXPECT_TRUE (a != nullptr );
244
- EXPECT_THROW (arena. allocate ( maxSize + 1 ), std::bad_alloc);
251
+ EXPECT_THROW (alloc (arena, maxSize + 1 ), std::bad_alloc);
245
252
}
246
253
247
254
TEST (Arena, ExtremeSize) {
248
255
static const size_t requestedBlockSize = sizeof (size_t );
249
256
250
257
SysArena arena (requestedBlockSize);
251
258
252
- void * a = arena. allocate ( sizeof (size_t ));
259
+ void * a = alloc (arena, sizeof (size_t ));
253
260
EXPECT_TRUE (a != nullptr );
254
- EXPECT_THROW (arena. allocate ( SIZE_MAX - 2 ), std::bad_alloc);
261
+ EXPECT_THROW (alloc (arena, SIZE_MAX - 2 ), std::bad_alloc);
255
262
}
256
263
257
264
TEST (Arena, MaxAlign) {
@@ -263,19 +270,30 @@ TEST(Arena, MaxAlign) {
263
270
SysArena arena (blockSize, SysArena::kNoSizeLimit , maxAlign);
264
271
265
272
for (int i = 0 ; i < 100 ; i++) {
266
- void * ptr = arena. allocate ( Random::rand32 (100 ));
273
+ void * ptr = alloc (arena, Random::rand32 (100 ));
267
274
EXPECT_EQ (reinterpret_cast <uintptr_t >(ptr) & (maxAlign - 1 ), 0 );
268
275
}
269
276
270
277
// Reusing blocks also respects alignment
271
278
arena.clear ();
272
279
for (int i = 0 ; i < 100 ; i++) {
273
- void * ptr = arena. allocate ( Random::rand32 (100 ));
280
+ void * ptr = alloc (arena, Random::rand32 (100 ));
274
281
EXPECT_EQ (reinterpret_cast <uintptr_t >(ptr) & (maxAlign - 1 ), 0 );
275
282
}
276
283
}
277
284
}
278
285
286
+ // This used to cause heap corruption due to incorrect allocation size.
287
+ TEST (Arena, AllocFullBlock) {
288
+ static const size_t blockSize = 128 ;
289
+
290
+ for (const size_t maxAlign : {4 , 8 , 16 , 32 , 64 }) {
291
+ SCOPED_TRACE (maxAlign);
292
+ SysArena arena (blockSize, SysArena::kNoSizeLimit , maxAlign);
293
+ alloc (arena, blockSize);
294
+ }
295
+ }
296
+
279
297
TEST (Arena, Clear) {
280
298
static const size_t blockSize = 1024 ;
281
299
SysArena arena (blockSize);
@@ -288,7 +306,7 @@ TEST(Arena, Clear) {
288
306
289
307
std::vector<void *> addresses;
290
308
for (auto s : sizes) {
291
- addresses.push_back (arena. allocate ( s));
309
+ addresses.push_back (alloc (arena, s));
292
310
}
293
311
294
312
const size_t totalSize = arena.totalSize ();
@@ -298,7 +316,7 @@ TEST(Arena, Clear) {
298
316
299
317
int j = 0 ;
300
318
for (auto s : sizes) {
301
- auto addr = arena. allocate ( s);
319
+ auto addr = alloc (arena, s);
302
320
if (s <= blockSize) {
303
321
EXPECT_EQ (addr, addresses[j]);
304
322
}
@@ -317,7 +335,7 @@ TEST(Arena, ClearAfterLarge) {
317
335
constexpr size_t mult = 10 ;
318
336
SysArena arena (blockSize);
319
337
EXPECT_EQ (0 , arena.bytesUsed ());
320
- arena. allocate ( blockSize * mult);
338
+ alloc (arena, blockSize * mult);
321
339
EXPECT_EQ (blockSize * mult, arena.bytesUsed ());
322
340
arena.clear ();
323
341
EXPECT_EQ (0 , arena.bytesUsed ());
@@ -330,8 +348,8 @@ TEST(Arena, Merge) {
330
348
SysArena arena1 (blockSize);
331
349
SysArena arena2 (blockSize);
332
350
333
- arena1. allocate ( 16 );
334
- arena2. allocate ( 32 );
351
+ alloc (arena1, 16 );
352
+ alloc (arena2, 32 );
335
353
336
354
EXPECT_EQ (blockAllocSize + sizeof (SysArena), arena1.totalSize ());
337
355
EXPECT_EQ (blockAllocSize + sizeof (SysArena), arena2.totalSize ());
0 commit comments