Skip to content

Commit 498594b

Browse files
committed
Import the source code of mimalloc
This commit imports mimalloc's source code as per 114d05c (bump to version 1.1, 2019-10-07), as fetched from the v1.1.0 tag at https://github.com/microsoft/mimalloc. The .c files are from the src/ subdirectory, and the .h files from the include/ subdirectory. We will subsequently modify the source code to accommodate building within Git's context. Since we plan on using the `mi_*()` family of functions, we skip the C++-specific source code, some POSIX compliant functions to interact with mimalloc, and the code that wants to support auto-magic overriding of the `malloc()` function (mimalloc-new-delete.h, alloc-posix.c, mimalloc-override.h, alloc-override.c, alloc-override-osx.c, alloc-override-win.c and static.c). Signed-off-by: Johannes Schindelin <[email protected]>
1 parent 8bd3034 commit 498594b

15 files changed

+7510
-0
lines changed

compat/mimalloc/alloc-aligned.c

Lines changed: 204 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,204 @@
1+
/* ----------------------------------------------------------------------------
2+
Copyright (c) 2018, Microsoft Research, Daan Leijen
3+
This is free software; you can redistribute it and/or modify it under the
4+
terms of the MIT license. A copy of the license can be found in the file
5+
"LICENSE" at the root of this distribution.
6+
-----------------------------------------------------------------------------*/
7+
8+
#include "mimalloc.h"
9+
#include "mimalloc-internal.h"
10+
11+
#include <string.h> // memset, memcpy
12+
13+
// ------------------------------------------------------
14+
// Aligned Allocation
15+
// ------------------------------------------------------
16+
17+
static void* mi_heap_malloc_zero_aligned_at(mi_heap_t* const heap, const size_t size, const size_t alignment, const size_t offset, const bool zero) mi_attr_noexcept {
18+
// note: we don't require `size > offset`, we just guarantee that
19+
// the address at offset is aligned regardless of the allocated size.
20+
mi_assert(alignment > 0 && alignment % sizeof(void*) == 0);
21+
if (mi_unlikely(size > PTRDIFF_MAX)) return NULL; // we don't allocate more than PTRDIFF_MAX (see <https://sourceware.org/ml/libc-announce/2019/msg00001.html>)
22+
if (mi_unlikely(alignment==0 || !_mi_is_power_of_two(alignment))) return NULL; // require power-of-two (see <https://en.cppreference.com/w/c/memory/aligned_alloc>)
23+
const uintptr_t align_mask = alignment-1; // for any x, `(x & align_mask) == (x % alignment)`
24+
25+
// try if there is a small block available with just the right alignment
26+
if (mi_likely(size <= MI_SMALL_SIZE_MAX)) {
27+
mi_page_t* page = _mi_heap_get_free_small_page(heap,size);
28+
const bool is_aligned = (((uintptr_t)page->free+offset) & align_mask)==0;
29+
if (mi_likely(page->free != NULL && is_aligned))
30+
{
31+
#if MI_STAT>1
32+
mi_heap_stat_increase( heap, malloc, size);
33+
#endif
34+
void* p = _mi_page_malloc(heap,page,size); // TODO: inline _mi_page_malloc
35+
mi_assert_internal(p != NULL);
36+
mi_assert_internal(((uintptr_t)p + offset) % alignment == 0);
37+
if (zero) _mi_block_zero_init(page,p,size);
38+
return p;
39+
}
40+
}
41+
42+
// use regular allocation if it is guaranteed to fit the alignment constraints
43+
if (offset==0 && alignment<=size && size<=MI_MEDIUM_OBJ_SIZE_MAX && (size&align_mask)==0) {
44+
void* p = _mi_heap_malloc_zero(heap, size, zero);
45+
mi_assert_internal(p == NULL || ((uintptr_t)p % alignment) == 0);
46+
return p;
47+
}
48+
49+
// otherwise over-allocate
50+
void* p = _mi_heap_malloc_zero(heap, size + alignment - 1, zero);
51+
if (p == NULL) return NULL;
52+
53+
// .. and align within the allocation
54+
uintptr_t adjust = alignment - (((uintptr_t)p + offset) & align_mask);
55+
mi_assert_internal(adjust % sizeof(uintptr_t) == 0);
56+
void* aligned_p = (adjust == alignment ? p : (void*)((uintptr_t)p + adjust));
57+
if (aligned_p != p) mi_page_set_has_aligned(_mi_ptr_page(p), true);
58+
mi_assert_internal(((uintptr_t)aligned_p + offset) % alignment == 0);
59+
mi_assert_internal( p == _mi_page_ptr_unalign(_mi_ptr_segment(aligned_p),_mi_ptr_page(aligned_p),aligned_p) );
60+
return aligned_p;
61+
}
62+
63+
64+
void* mi_heap_malloc_aligned_at(mi_heap_t* heap, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
65+
return mi_heap_malloc_zero_aligned_at(heap, size, alignment, offset, false);
66+
}
67+
68+
void* mi_heap_malloc_aligned(mi_heap_t* heap, size_t size, size_t alignment) mi_attr_noexcept {
69+
return mi_heap_malloc_aligned_at(heap, size, alignment, 0);
70+
}
71+
72+
void* mi_heap_zalloc_aligned_at(mi_heap_t* heap, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
73+
return mi_heap_malloc_zero_aligned_at(heap, size, alignment, offset, true);
74+
}
75+
76+
void* mi_heap_zalloc_aligned(mi_heap_t* heap, size_t size, size_t alignment) mi_attr_noexcept {
77+
return mi_heap_zalloc_aligned_at(heap, size, alignment, 0);
78+
}
79+
80+
void* mi_heap_calloc_aligned_at(mi_heap_t* heap, size_t count, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
81+
size_t total;
82+
if (mi_mul_overflow(count, size, &total)) return NULL;
83+
return mi_heap_zalloc_aligned_at(heap, total, alignment, offset);
84+
}
85+
86+
void* mi_heap_calloc_aligned(mi_heap_t* heap, size_t count, size_t size, size_t alignment) mi_attr_noexcept {
87+
return mi_heap_calloc_aligned_at(heap,count,size,alignment,0);
88+
}
89+
90+
void* mi_malloc_aligned_at(size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
91+
return mi_heap_malloc_aligned_at(mi_get_default_heap(), size, alignment, offset);
92+
}
93+
94+
void* mi_malloc_aligned(size_t size, size_t alignment) mi_attr_noexcept {
95+
return mi_heap_malloc_aligned(mi_get_default_heap(), size, alignment);
96+
}
97+
98+
void* mi_zalloc_aligned_at(size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
99+
return mi_heap_zalloc_aligned_at(mi_get_default_heap(), size, alignment, offset);
100+
}
101+
102+
void* mi_zalloc_aligned(size_t size, size_t alignment) mi_attr_noexcept {
103+
return mi_heap_zalloc_aligned(mi_get_default_heap(), size, alignment);
104+
}
105+
106+
void* mi_calloc_aligned_at(size_t count, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
107+
return mi_heap_calloc_aligned_at(mi_get_default_heap(), count, size, alignment, offset);
108+
}
109+
110+
void* mi_calloc_aligned(size_t count, size_t size, size_t alignment) mi_attr_noexcept {
111+
return mi_heap_calloc_aligned(mi_get_default_heap(), count, size, alignment);
112+
}
113+
114+
115+
static void* mi_heap_realloc_zero_aligned_at(mi_heap_t* heap, void* p, size_t newsize, size_t alignment, size_t offset, bool zero) mi_attr_noexcept {
116+
mi_assert(alignment > 0);
117+
if (alignment <= sizeof(uintptr_t)) return _mi_heap_realloc_zero(heap,p,newsize,zero);
118+
if (p == NULL) return mi_heap_malloc_zero_aligned_at(heap,newsize,alignment,offset,zero);
119+
size_t size = mi_usable_size(p);
120+
if (newsize <= size && newsize >= (size - (size / 2))
121+
&& (((uintptr_t)p + offset) % alignment) == 0) {
122+
return p; // reallocation still fits, is aligned and not more than 50% waste
123+
}
124+
else {
125+
void* newp = mi_heap_malloc_aligned_at(heap,newsize,alignment,offset);
126+
if (newp != NULL) {
127+
if (zero && newsize > size) {
128+
const mi_page_t* page = _mi_ptr_page(newp);
129+
if (page->flags.is_zero) {
130+
// already zero initialized
131+
mi_assert_expensive(mi_mem_is_zero(newp,newsize));
132+
}
133+
else {
134+
// also set last word in the previous allocation to zero to ensure any padding is zero-initialized
135+
size_t start = (size >= sizeof(intptr_t) ? size - sizeof(intptr_t) : 0);
136+
memset((uint8_t*)newp + start, 0, newsize - start);
137+
}
138+
}
139+
memcpy(newp, p, (newsize > size ? size : newsize));
140+
mi_free(p); // only free if successful
141+
}
142+
return newp;
143+
}
144+
}
145+
146+
static void* mi_heap_realloc_zero_aligned(mi_heap_t* heap, void* p, size_t newsize, size_t alignment, bool zero) mi_attr_noexcept {
147+
mi_assert(alignment > 0);
148+
if (alignment <= sizeof(uintptr_t)) return _mi_heap_realloc_zero(heap,p,newsize,zero);
149+
size_t offset = ((uintptr_t)p % alignment); // use offset of previous allocation (p can be NULL)
150+
return mi_heap_realloc_zero_aligned_at(heap,p,newsize,alignment,offset,zero);
151+
}
152+
153+
void* mi_heap_realloc_aligned_at(mi_heap_t* heap, void* p, size_t newsize, size_t alignment, size_t offset) mi_attr_noexcept {
154+
return mi_heap_realloc_zero_aligned_at(heap,p,newsize,alignment,offset,false);
155+
}
156+
157+
void* mi_heap_realloc_aligned(mi_heap_t* heap, void* p, size_t newsize, size_t alignment) mi_attr_noexcept {
158+
return mi_heap_realloc_zero_aligned(heap,p,newsize,alignment,false);
159+
}
160+
161+
void* mi_heap_rezalloc_aligned_at(mi_heap_t* heap, void* p, size_t newsize, size_t alignment, size_t offset) mi_attr_noexcept {
162+
return mi_heap_realloc_zero_aligned_at(heap, p, newsize, alignment, offset, true);
163+
}
164+
165+
void* mi_heap_rezalloc_aligned(mi_heap_t* heap, void* p, size_t newsize, size_t alignment) mi_attr_noexcept {
166+
return mi_heap_realloc_zero_aligned(heap, p, newsize, alignment, true);
167+
}
168+
169+
void* mi_heap_recalloc_aligned_at(mi_heap_t* heap, void* p, size_t newcount, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
170+
size_t total;
171+
if (mi_mul_overflow(newcount, size, &total)) return NULL;
172+
return mi_heap_rezalloc_aligned_at(heap, p, total, alignment, offset);
173+
}
174+
175+
void* mi_heap_recalloc_aligned(mi_heap_t* heap, void* p, size_t newcount, size_t size, size_t alignment) mi_attr_noexcept {
176+
size_t total;
177+
if (mi_mul_overflow(newcount, size, &total)) return NULL;
178+
return mi_heap_rezalloc_aligned(heap, p, total, alignment);
179+
}
180+
181+
void* mi_realloc_aligned_at(void* p, size_t newsize, size_t alignment, size_t offset) mi_attr_noexcept {
182+
return mi_heap_realloc_aligned_at(mi_get_default_heap(), p, newsize, alignment, offset);
183+
}
184+
185+
void* mi_realloc_aligned(void* p, size_t newsize, size_t alignment) mi_attr_noexcept {
186+
return mi_heap_realloc_aligned(mi_get_default_heap(), p, newsize, alignment);
187+
}
188+
189+
void* mi_rezalloc_aligned_at(void* p, size_t newsize, size_t alignment, size_t offset) mi_attr_noexcept {
190+
return mi_heap_rezalloc_aligned_at(mi_get_default_heap(), p, newsize, alignment, offset);
191+
}
192+
193+
void* mi_rezalloc_aligned(void* p, size_t newsize, size_t alignment) mi_attr_noexcept {
194+
return mi_heap_rezalloc_aligned(mi_get_default_heap(), p, newsize, alignment);
195+
}
196+
197+
void* mi_recalloc_aligned_at(void* p, size_t newcount, size_t size, size_t alignment, size_t offset) mi_attr_noexcept {
198+
return mi_heap_recalloc_aligned_at(mi_get_default_heap(), p, newcount, size, alignment, offset);
199+
}
200+
201+
void* mi_recalloc_aligned(void* p, size_t newcount, size_t size, size_t alignment) mi_attr_noexcept {
202+
return mi_heap_recalloc_aligned(mi_get_default_heap(), p, newcount, size, alignment);
203+
}
204+

0 commit comments

Comments
 (0)