#include "Python.h"
#include "pycore_code.h"
#include "pycore_object.h"
#include "pycore_obmalloc.h"
#include "pycore_pyerrors.h"
#include "pycore_pymem.h"
#include "pycore_pystate.h"
#include "pycore_obmalloc_init.h"
#include <stdlib.h>
#include <stdbool.h>
#ifdef WITH_MIMALLOC
static void _PyMem_mi_page_clear_qsbr(mi_page_t *page);
static bool _PyMem_mi_page_is_safe_to_free(mi_page_t *page);
static bool _PyMem_mi_page_maybe_free(mi_page_t *page, mi_page_queue_t *pq, bool force);
static void _PyMem_mi_page_reclaimed(mi_page_t *page);
static void _PyMem_mi_heap_collect_qsbr(mi_heap_t *heap);
# include "pycore_mimalloc.h"
# include "mimalloc/static.c"
# include "mimalloc/internal.h"
#endif
#if defined(Py_GIL_DISABLED) && !defined(WITH_MIMALLOC)
# error "Py_GIL_DISABLED requires WITH_MIMALLOC"
#endif
#undef uint
#define uint …
extern void _PyMem_DumpTraceback(int fd, const void *ptr);
static void _PyObject_DebugDumpAddress(const void *p);
static void _PyMem_DebugCheckAddress(const char *func, char api_id, const void *p);
static void set_up_debug_hooks_domain_unlocked(PyMemAllocatorDomain domain);
static void set_up_debug_hooks_unlocked(void);
static void get_allocator_unlocked(PyMemAllocatorDomain, PyMemAllocatorEx *);
static void set_allocator_unlocked(PyMemAllocatorDomain, PyMemAllocatorEx *);
void *
_PyMem_RawMalloc(void *Py_UNUSED(ctx), size_t size)
{ … }
void *
_PyMem_RawCalloc(void *Py_UNUSED(ctx), size_t nelem, size_t elsize)
{ … }
void *
_PyMem_RawRealloc(void *Py_UNUSED(ctx), void *ptr, size_t size)
{ … }
void
_PyMem_RawFree(void *Py_UNUSED(ctx), void *ptr)
{ … }
#ifdef WITH_MIMALLOC
static void
_PyMem_mi_page_clear_qsbr(mi_page_t *page)
{ … }
static bool
_PyMem_mi_page_is_safe_to_free(mi_page_t *page)
{ … }
static bool
_PyMem_mi_page_maybe_free(mi_page_t *page, mi_page_queue_t *pq, bool force)
{ … }
static void
_PyMem_mi_page_reclaimed(mi_page_t *page)
{ … }
static void
_PyMem_mi_heap_collect_qsbr(mi_heap_t *heap)
{ … }
void *
_PyMem_MiMalloc(void *ctx, size_t size)
{ … }
void *
_PyMem_MiCalloc(void *ctx, size_t nelem, size_t elsize)
{ … }
void *
_PyMem_MiRealloc(void *ctx, void *ptr, size_t size)
{ … }
void
_PyMem_MiFree(void *ctx, void *ptr)
{ … }
void *
_PyObject_MiMalloc(void *ctx, size_t nbytes)
{ … }
void *
_PyObject_MiCalloc(void *ctx, size_t nelem, size_t elsize)
{ … }
void *
_PyObject_MiRealloc(void *ctx, void *ptr, size_t nbytes)
{ … }
void
_PyObject_MiFree(void *ctx, void *ptr)
{ … }
#endif
#define MALLOC_ALLOC …
#ifdef WITH_MIMALLOC
#define MIMALLOC_ALLOC …
#define MIMALLOC_OBJALLOC …
#endif
#if defined(WITH_PYMALLOC)
void* _PyObject_Malloc(void *ctx, size_t size);
void* _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize);
void _PyObject_Free(void *ctx, void *p);
void* _PyObject_Realloc(void *ctx, void *ptr, size_t size);
#define PYMALLOC_ALLOC …
#endif
#if defined(Py_GIL_DISABLED)
#define PYRAW_ALLOC …
#define PYMEM_ALLOC …
#define PYOBJ_ALLOC …
#elif defined(WITH_PYMALLOC)
#define PYRAW_ALLOC …
#define PYMEM_ALLOC …
#define PYOBJ_ALLOC …
#else
#define PYRAW_ALLOC …
#define PYMEM_ALLOC …
#define PYOBJ_ALLOC …
#endif
void* _PyMem_DebugRawMalloc(void *ctx, size_t size);
void* _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize);
void* _PyMem_DebugRawRealloc(void *ctx, void *ptr, size_t size);
void _PyMem_DebugRawFree(void *ctx, void *ptr);
void* _PyMem_DebugMalloc(void *ctx, size_t size);
void* _PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize);
void* _PyMem_DebugRealloc(void *ctx, void *ptr, size_t size);
void _PyMem_DebugFree(void *ctx, void *p);
#define PYDBGRAW_ALLOC …
#define PYDBGMEM_ALLOC …
#define PYDBGOBJ_ALLOC …
#ifdef WITH_PYMALLOC
# ifdef MS_WINDOWS
# include <windows.h>
# elif defined(HAVE_MMAP)
# include <sys/mman.h>
# ifdef MAP_ANONYMOUS
#define ARENAS_USE_MMAP
# endif
# endif
#endif
void *
_PyMem_ArenaAlloc(void *Py_UNUSED(ctx), size_t size)
{ … }
void
_PyMem_ArenaFree(void *Py_UNUSED(ctx), void *ptr,
#if defined(ARENAS_USE_MMAP)
size_t size
#else
size_t Py_UNUSED(size)
#endif
)
{ … }
#if defined(__has_feature)
# if __has_feature(address_sanitizer)
#define _Py_NO_SANITIZE_ADDRESS …
# endif
# if __has_feature(thread_sanitizer)
#define _Py_NO_SANITIZE_THREAD …
# endif
# if __has_feature(memory_sanitizer)
#define _Py_NO_SANITIZE_MEMORY …
# endif
#elif defined(__GNUC__)
# if defined(__SANITIZE_ADDRESS__)
#define _Py_NO_SANITIZE_ADDRESS …
# endif
# if __GNUC__ > 5 || (__GNUC__ == 5 && __GNUC_MINOR__ >= 1)
#define _Py_NO_SANITIZE_THREAD …
# endif
#endif
#ifndef _Py_NO_SANITIZE_ADDRESS
#define _Py_NO_SANITIZE_ADDRESS
#endif
#ifndef _Py_NO_SANITIZE_THREAD
#define _Py_NO_SANITIZE_THREAD
#endif
#ifndef _Py_NO_SANITIZE_MEMORY
#define _Py_NO_SANITIZE_MEMORY
#endif
#define ALLOCATORS_MUTEX …
#define _PyMem_Raw …
#define _PyMem …
#define _PyObject …
#define _PyMem_Debug …
#define _PyObject_Arena …
static int
set_default_allocator_unlocked(PyMemAllocatorDomain domain, int debug,
PyMemAllocatorEx *old_alloc)
{ … }
#ifdef Py_DEBUG
static const int pydebug = 1;
#else
static const int pydebug = …;
#endif
int
_PyMem_SetDefaultAllocator(PyMemAllocatorDomain domain,
PyMemAllocatorEx *old_alloc)
{ … }
int
_PyMem_GetAllocatorName(const char *name, PyMemAllocatorName *allocator)
{ … }
static int
set_up_allocators_unlocked(PyMemAllocatorName allocator)
{ … }
int
_PyMem_SetupAllocators(PyMemAllocatorName allocator)
{ … }
static int
pymemallocator_eq(PyMemAllocatorEx *a, PyMemAllocatorEx *b)
{ … }
static const char*
get_current_allocator_name_unlocked(void)
{ … }
const char*
_PyMem_GetCurrentAllocatorName(void)
{ … }
int
_PyMem_DebugEnabled(void)
{ … }
#ifdef WITH_PYMALLOC
static int
_PyMem_PymallocEnabled(void)
{ … }
#ifdef WITH_MIMALLOC
static int
_PyMem_MimallocEnabled(void)
{ … }
#endif
#endif
static void
set_up_debug_hooks_domain_unlocked(PyMemAllocatorDomain domain)
{ … }
static void
set_up_debug_hooks_unlocked(void)
{ … }
void
PyMem_SetupDebugHooks(void)
{ … }
static void
get_allocator_unlocked(PyMemAllocatorDomain domain, PyMemAllocatorEx *allocator)
{ … }
static void
set_allocator_unlocked(PyMemAllocatorDomain domain, PyMemAllocatorEx *allocator)
{ … }
void
PyMem_GetAllocator(PyMemAllocatorDomain domain, PyMemAllocatorEx *allocator)
{ … }
void
PyMem_SetAllocator(PyMemAllocatorDomain domain, PyMemAllocatorEx *allocator)
{ … }
void
PyObject_GetArenaAllocator(PyObjectArenaAllocator *allocator)
{ … }
void
PyObject_SetArenaAllocator(PyObjectArenaAllocator *allocator)
{ … }
void *
_PyObject_VirtualAlloc(size_t size)
{ … }
void
_PyObject_VirtualFree(void *obj, size_t size)
{ … }
void *
PyMem_RawMalloc(size_t size)
{ … }
void *
PyMem_RawCalloc(size_t nelem, size_t elsize)
{ … }
void*
PyMem_RawRealloc(void *ptr, size_t new_size)
{ … }
void PyMem_RawFree(void *ptr)
{ … }
void *
PyMem_Malloc(size_t size)
{ … }
void *
PyMem_Calloc(size_t nelem, size_t elsize)
{ … }
void *
PyMem_Realloc(void *ptr, size_t new_size)
{ … }
void
PyMem_Free(void *ptr)
{ … }
wchar_t*
_PyMem_RawWcsdup(const wchar_t *str)
{ … }
char *
_PyMem_RawStrdup(const char *str)
{ … }
char *
_PyMem_Strdup(const char *str)
{ … }
#define WORK_ITEMS_PER_CHUNK …
struct _mem_work_item { … };
struct _mem_work_chunk { … };
static void
free_work_item(uintptr_t ptr, delayed_dealloc_cb cb, void *state)
{ … }
static void
free_delayed(uintptr_t ptr)
{ … }
void
_PyMem_FreeDelayed(void *ptr)
{ … }
#ifdef Py_GIL_DISABLED
void
_PyObject_XDecRefDelayed(PyObject *ptr)
{
assert(!((uintptr_t)ptr & 0x01));
if (ptr != NULL) {
free_delayed(((uintptr_t)ptr)|0x01);
}
}
#endif
static struct _mem_work_chunk *
work_queue_first(struct llist_node *head)
{ … }
static void
process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr,
bool keep_empty, delayed_dealloc_cb cb, void *state)
{ … }
static void
process_interp_queue(struct _Py_mem_interp_free_queue *queue,
struct _qsbr_thread_state *qsbr, delayed_dealloc_cb cb,
void *state)
{ … }
void
_PyMem_ProcessDelayed(PyThreadState *tstate)
{ … }
void
_PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, delayed_dealloc_cb cb, void *state)
{ … }
void
_PyMem_AbandonDelayed(PyThreadState *tstate)
{ … }
void
_PyMem_FiniDelayed(PyInterpreterState *interp)
{ … }
void *
PyObject_Malloc(size_t size)
{ … }
void *
PyObject_Calloc(size_t nelem, size_t elsize)
{ … }
void *
PyObject_Realloc(void *ptr, size_t new_size)
{ … }
void
PyObject_Free(void *ptr)
{ … }
#if defined(__GNUC__) && (__GNUC__ > 2) && defined(__OPTIMIZE__)
#define UNLIKELY(value) …
#define LIKELY(value) …
#else
#define UNLIKELY …
#define LIKELY …
#endif
#ifdef WITH_PYMALLOC
#ifdef WITH_VALGRIND
#include <valgrind/valgrind.h>
static int running_on_valgrind = -1;
#endif
OMState;
static struct _obmalloc_state obmalloc_state_main;
static bool obmalloc_state_initialized;
static inline int
has_own_state(PyInterpreterState *interp)
{ … }
static inline OMState *
get_state(void)
{ … }
#define usedpools …
#define allarenas …
#define maxarenas …
#define unused_arena_objects …
#define usable_arenas …
#define nfp2lasta …
#define narenas_currently_allocated …
#define ntimes_arena_allocated …
#define narenas_highwater …
#define raw_allocated_blocks …
#ifdef WITH_MIMALLOC
static bool count_blocks(
const mi_heap_t* heap, const mi_heap_area_t* area,
void* block, size_t block_size, void* allocated_blocks)
{ … }
static Py_ssize_t
get_mimalloc_allocated_blocks(PyInterpreterState *interp)
{ … }
#endif
Py_ssize_t
_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *interp)
{ … }
static void free_obmalloc_arenas(PyInterpreterState *interp);
void
_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *interp)
{ … }
static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *);
static Py_ssize_t last_final_leaks = …;
void
_Py_FinalizeAllocatedBlocks(_PyRuntimeState *runtime)
{ … }
static Py_ssize_t
get_num_global_allocated_blocks(_PyRuntimeState *runtime)
{ … }
Py_ssize_t
_Py_GetGlobalAllocatedBlocks(void)
{ … }
#if WITH_PYMALLOC_RADIX_TREE
#define arena_map_root …
#ifdef USE_INTERIOR_NODES
#define arena_map_mid_count …
#define arena_map_bot_count …
#endif
static inline Py_ALWAYS_INLINE arena_map_bot_t *
arena_map_get(OMState *state, pymem_block *p, int create)
{ … }
static int
arena_map_mark_used(OMState *state, uintptr_t arena_base, int is_used)
{ … }
static int
arena_map_is_used(OMState *state, pymem_block *p)
{ … }
#endif
static struct arena_object*
new_arena(OMState *state)
{ … }
#if WITH_PYMALLOC_RADIX_TREE
static bool
address_in_range(OMState *state, void *p, poolp Py_UNUSED(pool))
{ … }
#else
static bool _Py_NO_SANITIZE_ADDRESS
_Py_NO_SANITIZE_THREAD
_Py_NO_SANITIZE_MEMORY
address_in_range(OMState *state, void *p, poolp pool)
{
uint arenaindex = *((volatile uint *)&pool->arenaindex);
return arenaindex < maxarenas &&
(uintptr_t)p - allarenas[arenaindex].address < ARENA_SIZE &&
allarenas[arenaindex].address != 0;
}
#endif
static void
pymalloc_pool_extend(poolp pool, uint size)
{ … }
static void*
allocate_from_new_pool(OMState *state, uint size)
{ … }
static inline void*
pymalloc_alloc(OMState *state, void *Py_UNUSED(ctx), size_t nbytes)
{ … }
void *
_PyObject_Malloc(void *ctx, size_t nbytes)
{ … }
void *
_PyObject_Calloc(void *ctx, size_t nelem, size_t elsize)
{ … }
static void
insert_to_usedpool(OMState *state, poolp pool)
{ … }
static void
insert_to_freepool(OMState *state, poolp pool)
{ … }
static inline int
pymalloc_free(OMState *state, void *Py_UNUSED(ctx), void *p)
{ … }
void
_PyObject_Free(void *ctx, void *p)
{ … }
static int
pymalloc_realloc(OMState *state, void *ctx,
void **newptr_p, void *p, size_t nbytes)
{ … }
void *
_PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
{ … }
#else
Py_ssize_t
_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
{
return 0;
}
Py_ssize_t
_Py_GetGlobalAllocatedBlocks(void)
{
return 0;
}
void
_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
{
return;
}
void
_Py_FinalizeAllocatedBlocks(_PyRuntimeState *Py_UNUSED(runtime))
{
return;
}
#endif
#ifdef PYMEM_DEBUG_SERIALNO
static size_t serialno = 0;
static void
bumpserialno(void)
{
++serialno;
}
#endif
#define SST …
#ifdef PYMEM_DEBUG_SERIALNO
#define PYMEM_DEBUG_EXTRA_BYTES …
#else
#define PYMEM_DEBUG_EXTRA_BYTES …
#endif
static size_t
read_size_t(const void *p)
{ … }
static void
write_size_t(void *p, size_t n)
{ … }
static void
fill_mem_debug(debug_alloc_api_t *api, void *data, int c, size_t nbytes,
bool is_alloc)
{ … }
static void *
_PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes)
{ … }
void *
_PyMem_DebugRawMalloc(void *ctx, size_t nbytes)
{ … }
void *
_PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize)
{ … }
void
_PyMem_DebugRawFree(void *ctx, void *p)
{ … }
void *
_PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes)
{ … }
static inline void
_PyMem_DebugCheckGIL(const char *func)
{ … }
void *
_PyMem_DebugMalloc(void *ctx, size_t nbytes)
{ … }
void *
_PyMem_DebugCalloc(void *ctx, size_t nelem, size_t elsize)
{ … }
void
_PyMem_DebugFree(void *ctx, void *ptr)
{ … }
void *
_PyMem_DebugRealloc(void *ctx, void *ptr, size_t nbytes)
{ … }
static void
_PyMem_DebugCheckAddress(const char *func, char api, const void *p)
{ … }
static void
_PyObject_DebugDumpAddress(const void *p)
{ … }
static size_t
printone(FILE *out, const char* msg, size_t value)
{ … }
void
_PyDebugAllocatorStats(FILE *out,
const char *block_name, int num_blocks, size_t sizeof_block)
{ … }
bool _PyMem_obmalloc_state_on_heap(PyInterpreterState *interp)
{ … }
#ifdef WITH_PYMALLOC
static void
init_obmalloc_pools(PyInterpreterState *interp)
{ … }
#endif
int _PyMem_init_obmalloc(PyInterpreterState *interp)
{ … }
#ifdef WITH_PYMALLOC
static void
free_obmalloc_arenas(PyInterpreterState *interp)
{ … }
#ifdef Py_DEBUG
static int
pool_is_in_list(const poolp target, poolp list)
{
poolp origlist = list;
assert(target != NULL);
if (list == NULL)
return 0;
do {
if (target == list)
return 1;
list = list->nextpool;
} while (list != NULL && list != origlist);
return 0;
}
#endif
#ifdef WITH_MIMALLOC
struct _alloc_stats { … };
static bool _collect_alloc_stats(
const mi_heap_t* heap, const mi_heap_area_t* area,
void* block, size_t block_size, void* arg)
{ … }
static void
py_mimalloc_print_stats(FILE *out)
{ … }
#endif
static void
pymalloc_print_stats(FILE *out)
{ … }
int
_PyObject_DebugMallocStats(FILE *out)
{ … }
#endif