DECLARE_GLOBAL_DATA_PTR;
+#ifdef MCHECK_HEAP_PROTECTION
+ #define STATIC_IF_MCHECK static
+#else
+ #define STATIC_IF_MCHECK
+ #define mALLOc_impl mALLOc
+ #define fREe_impl fREe
+ #define rEALLOc_impl rEALLOc
+ #define mEMALIGn_impl mEMALIGn
+ #define cALLOc_impl cALLOc
+#endif
+
/*
Emulation of sbrk for WIN32
All code within the ifdef WIN32 is untested by me.
*/
+STATIC_IF_MCHECK
#if __STD_C
-Void_t* mALLOc(size_t bytes)
+Void_t* mALLOc_impl(size_t bytes)
#else
-Void_t* mALLOc(bytes) size_t bytes;
+Void_t* mALLOc_impl(bytes) size_t bytes;
#endif
{
mchunkptr victim; /* inspected/selected chunk */
*/
+STATIC_IF_MCHECK
#if __STD_C
-void fREe(Void_t* mem)
+void fREe_impl(Void_t* mem)
#else
-void fREe(mem) Void_t* mem;
+void fREe_impl(mem) Void_t* mem;
#endif
{
mchunkptr p; /* chunk corresponding to mem */
*/
+STATIC_IF_MCHECK
#if __STD_C
-Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
+Void_t* rEALLOc_impl(Void_t* oldmem, size_t bytes)
#else
-Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
+Void_t* rEALLOc_impl(oldmem, bytes) Void_t* oldmem; size_t bytes;
#endif
{
INTERNAL_SIZE_T nb; /* padded request size */
#ifdef REALLOC_ZERO_BYTES_FREES
if (!bytes) {
- fREe(oldmem);
+ fREe_impl(oldmem);
return NULL;
}
#endif
if ((long)bytes < 0) return NULL;
/* realloc of null is supposed to be same as malloc */
- if (oldmem == NULL) return mALLOc(bytes);
+ if (oldmem == NULL) return mALLOc_impl(bytes);
#if CONFIG_IS_ENABLED(SYS_MALLOC_F)
if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
/* Note the extra SIZE_SZ overhead. */
if(oldsize - SIZE_SZ >= nb) return oldmem; /* do nothing */
/* Must alloc, copy, free. */
- newmem = mALLOc(bytes);
+ newmem = mALLOc_impl(bytes);
if (!newmem)
return NULL; /* propagate failure */
MALLOC_COPY(newmem, oldmem, oldsize - 2*SIZE_SZ);
/* Must allocate */
- newmem = mALLOc (bytes);
+ newmem = mALLOc_impl (bytes);
if (newmem == NULL) /* propagate failure */
return NULL;
/* Otherwise copy, free, and exit */
MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
- fREe(oldmem);
+ fREe_impl(oldmem);
return newmem;
} else {
VALGRIND_RESIZEINPLACE_BLOCK(oldmem, 0, bytes, SIZE_SZ);
set_inuse_bit_at_offset(remainder, remainder_size);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(remainder), remainder_size, SIZE_SZ,
false);
- fREe(chunk2mem(remainder)); /* let free() deal with it */
+ fREe_impl(chunk2mem(remainder)); /* let free() deal with it */
}
else
{
*/
+STATIC_IF_MCHECK
#if __STD_C
-Void_t* mEMALIGn(size_t alignment, size_t bytes)
+Void_t* mEMALIGn_impl(size_t alignment, size_t bytes)
#else
-Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
+Void_t* mEMALIGn_impl(alignment, bytes) size_t alignment; size_t bytes;
#endif
{
INTERNAL_SIZE_T nb; /* padded request size */
/* If need less alignment than we give anyway, just relay to malloc */
- if (alignment <= MALLOC_ALIGNMENT) return mALLOc(bytes);
+ if (alignment <= MALLOC_ALIGNMENT) return mALLOc_impl(bytes);
/* Otherwise, ensure that it is at least a minimum chunk size */
/* Call malloc with worst case padding to hit alignment. */
nb = request2size(bytes);
- m = (char*)(mALLOc(nb + alignment + MINSIZE));
+ m = (char*)(mALLOc_impl(nb + alignment + MINSIZE));
/*
* The attempt to over-allocate (with a size large enough to guarantee the
* Use bytes not nb, since mALLOc internally calls request2size too, and
* each call increases the size to allocate, to account for the header.
*/
- m = (char*)(mALLOc(bytes));
+ m = (char*)(mALLOc_impl(bytes));
/* Aligned -> return it */
if ((((unsigned long)(m)) % alignment) == 0)
return m;
* Otherwise, try again, requesting enough extra space to be able to
* acquire alignment.
*/
- fREe(m);
+ fREe_impl(m);
/* Add in extra bytes to match misalignment of unexpanded allocation */
extra = alignment - (((unsigned long)(m)) % alignment);
- m = (char*)(mALLOc(bytes + extra));
+ m = (char*)(mALLOc_impl(bytes + extra));
/*
* m might not be the same as before. Validate that the previous value of
* extra still works for the current value of m.
if (m) {
extra2 = alignment - (((unsigned long)(m)) % alignment);
if (extra2 > extra) {
- fREe(m);
+ fREe_impl(m);
m = NULL;
}
}
set_head(newp, newsize | PREV_INUSE);
set_inuse_bit_at_offset(newp, newsize);
set_head_size(p, leadsize);
- fREe(chunk2mem(p));
+ fREe_impl(chunk2mem(p));
p = newp;
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(p), bytes, SIZE_SZ, false);
set_head_size(p, nb);
VALGRIND_MALLOCLIKE_BLOCK(chunk2mem(remainder), remainder_size, SIZE_SZ,
false);
- fREe(chunk2mem(remainder));
+ fREe_impl(chunk2mem(remainder));
}
check_inuse_chunk(p);
*/
+STATIC_IF_MCHECK
#if __STD_C
-Void_t* cALLOc(size_t n, size_t elem_size)
+Void_t* cALLOc_impl(size_t n, size_t elem_size)
#else
-Void_t* cALLOc(n, elem_size) size_t n; size_t elem_size;
+Void_t* cALLOc_impl(n, elem_size) size_t n; size_t elem_size;
#endif
{
mchunkptr p;
INTERNAL_SIZE_T oldtopsize = chunksize(top);
#endif
#endif
- Void_t* mem = mALLOc (sz);
+ Void_t* mem = mALLOc_impl (sz);
if ((long)n < 0) return NULL;