Skip to content

Commit

Permalink
Merge pull request #172 from microsoft/callback-fixes
Browse files Browse the repository at this point in the history
Callback fixes
  • Loading branch information
mjp41 authored Apr 14, 2020
2 parents cbaf0f7 + 60005c8 commit 2b92574
Show file tree
Hide file tree
Showing 5 changed files with 90 additions and 22 deletions.
8 changes: 0 additions & 8 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,6 @@ macro(warnings_high)
add_compile_options(-Wsign-conversion)
endif ()
add_compile_options(-Wall -Wextra -Werror -Wundef)
# There are a few places with subtle reasons for array access being correct
# GCC's warnings are too aggressive and incorrectly assume the code is wrong.
# Disabling only in Release is so the ASSUME can be mapped to assert and check
# at runtime in debug. This ensures we are covering the cases of concern with
# debug checks, but not incurring runtime penalties in release.
if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Release"))
add_compile_options(-Wno-array-bounds)
endif ()
endif()
endmacro()

Expand Down
23 changes: 14 additions & 9 deletions src/mem/alloc.h
Original file line number Diff line number Diff line change
Expand Up @@ -1131,18 +1131,21 @@ namespace snmalloc
stats().sizeclass_alloc(sizeclass);
return small_alloc_new_free_list<zero_mem, allow_reserve>(sizeclass);
}
return small_alloc_first_alloc<zero_mem, allow_reserve>(size);
return small_alloc_first_alloc<zero_mem, allow_reserve>(sizeclass, size);
}

/**
* Called on first allocation to set up the thread local allocator,
* then directs the allocation request to the newly created allocator.
*/
template<ZeroMem zero_mem, AllowReserve allow_reserve>
SNMALLOC_SLOW_PATH void* small_alloc_first_alloc(size_t size)
SNMALLOC_SLOW_PATH void*
small_alloc_first_alloc(sizeclass_t sizeclass, size_t size)
{
return InitThreadAllocator([size](void* alloc) {
return reinterpret_cast<Allocator*>(alloc)->alloc(size);
return InitThreadAllocator([sizeclass, size](void* alloc) {
return reinterpret_cast<Allocator*>(alloc)
->template small_alloc_inner<zero_mem, allow_reserve>(
sizeclass, size);
});
}

Expand Down Expand Up @@ -1320,8 +1323,9 @@ namespace snmalloc
{
if (NeedsInitialisation(this))
{
return InitThreadAllocator([size](void* alloc) {
return reinterpret_cast<Allocator*>(alloc)->alloc(size);
return InitThreadAllocator([size, rsize, sizeclass](void* alloc) {
return reinterpret_cast<Allocator*>(alloc)
->medium_alloc<zero_mem, allow_reserve>(sizeclass, rsize, size);
});
}
slab = reinterpret_cast<Mediumslab*>(
Expand Down Expand Up @@ -1394,7 +1398,8 @@ namespace snmalloc
if (NeedsInitialisation(this))
{
return InitThreadAllocator([size](void* alloc) {
return reinterpret_cast<Allocator*>(alloc)->alloc(size);
return reinterpret_cast<Allocator*>(alloc)
->large_alloc<zero_mem, allow_reserve>(size);
});
}

Expand All @@ -1420,8 +1425,8 @@ namespace snmalloc

if (NeedsInitialisation(this))
{
InitThreadAllocator([p](void* alloc) {
reinterpret_cast<Allocator*>(alloc)->dealloc(p);
InitThreadAllocator([p, size](void* alloc) {
reinterpret_cast<Allocator*>(alloc)->large_dealloc(p, size);
return nullptr;
});
return;
Expand Down
3 changes: 2 additions & 1 deletion src/mem/largealloc.h
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,8 @@ namespace snmalloc
p = memory_provider.template reserve<false>(large_class);
if (p == nullptr)
return nullptr;
memory_provider.template notify_using<zero_mem>(p, size);
memory_provider.template notify_using<zero_mem>(
p, bits::align_up(size, OS_PAGE_SIZE));
}
else
{
Expand Down
63 changes: 59 additions & 4 deletions src/test/func/first_operation/first_operation.cc
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,52 @@ void alloc4(size_t size)
a->dealloc(r);
}

void check_calloc(void* p, size_t size)
{
if (p != nullptr)
{
for (size_t i = 0; i < size; i++)
{
if (((uint8_t*)p)[i] != 0)
abort();
// ((uint8_t*)p)[i] = 0x5a;
}
}
}

void calloc1(size_t size)
{
void* r =
snmalloc::ThreadAlloc::get_noncachable()->alloc<snmalloc::ZeroMem::YesZero>(
size);
check_calloc(r, size);
snmalloc::ThreadAlloc::get_noncachable()->dealloc(r);
}

void calloc2(size_t size)
{
auto a = snmalloc::ThreadAlloc::get_noncachable();
void* r = a->alloc<snmalloc::ZeroMem::YesZero>(size);
check_calloc(r, size);
a->dealloc(r);
}

void calloc3(size_t size)
{
auto a = snmalloc::ThreadAlloc::get_noncachable();
void* r = a->alloc<snmalloc::ZeroMem::YesZero>(size);
check_calloc(r, size);
a->dealloc(r, size);
}

void calloc4(size_t size)
{
auto a = snmalloc::ThreadAlloc::get();
void* r = a->alloc<snmalloc::ZeroMem::YesZero>(size);
check_calloc(r, size);
a->dealloc(r);
}

void dealloc1(void* p, size_t)
{
snmalloc::ThreadAlloc::get_noncachable()->dealloc(p);
Expand Down Expand Up @@ -97,16 +143,21 @@ void f(size_t size)
auto t3 = std::thread(alloc3, size);
auto t4 = std::thread(alloc4, size);

auto t5 = std::thread(calloc1, size);
auto t6 = std::thread(calloc2, size);
auto t7 = std::thread(calloc3, size);
auto t8 = std::thread(calloc4, size);

auto a = snmalloc::current_alloc_pool()->acquire();
auto p1 = a->alloc(size);
auto p2 = a->alloc(size);
auto p3 = a->alloc(size);
auto p4 = a->alloc(size);

auto t5 = std::thread(dealloc1, p1, size);
auto t6 = std::thread(dealloc2, p2, size);
auto t7 = std::thread(dealloc3, p3, size);
auto t8 = std::thread(dealloc4, p4, size);
auto t9 = std::thread(dealloc1, p1, size);
auto t10 = std::thread(dealloc2, p2, size);
auto t11 = std::thread(dealloc3, p3, size);
auto t12 = std::thread(dealloc4, p4, size);

t1.join();
t2.join();
Expand All @@ -116,6 +167,10 @@ void f(size_t size)
t6.join();
t7.join();
t8.join();
t9.join();
t10.join();
t11.join();
t12.join();
snmalloc::current_alloc_pool()->release(a);
snmalloc::current_alloc_pool()->debug_in_use(0);
printf(".");
Expand Down
15 changes: 15 additions & 0 deletions src/test/func/memory/memory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,20 @@ void test_calloc_16M()
alloc->dealloc(p1);
}

void test_calloc_large_bug()
{
auto alloc = ThreadAlloc::get();
// Perform large calloc, to check for correct zeroing from PAL.
// Some PALS have special paths for PAGE aligned zeroing of large
// allocations. This is a large allocation that is intentionally
// not a multiple of page size.
const size_t size = (SUPERSLAB_SIZE << 3) - 7;

void* p1 = alloc->alloc<YesZero>(size);
SNMALLOC_ASSERT(Alloc::alloc_size(Alloc::external_pointer(p1)) >= size);
alloc->dealloc(p1);
}

int main(int argc, char** argv)
{
setup();
Expand All @@ -309,6 +323,7 @@ int main(int argc, char** argv)
UNUSED(argv);
#endif

test_calloc_large_bug();
test_external_pointer_dealloc_bug();
test_external_pointer_large();
test_alloc_dealloc_64k();
Expand Down

0 comments on commit 2b92574

Please sign in to comment.