88 #if MEM_LIBC_MALLOC || MEM_USE_POOLS 116 #ifndef mem_clib_free 117 #define mem_clib_free free 119 #ifndef mem_clib_malloc 120 #define mem_clib_malloc malloc 122 #ifndef mem_clib_calloc 123 #define mem_clib_calloc calloc 126 #if LWIP_STATS && MEM_STATS 127 #define MEM_LIBC_STATSHELPER_SIZE LWIP_MEM_ALIGN_SIZE(sizeof(mem_size_t)) 129 #define MEM_LIBC_STATSHELPER_SIZE 0 143 void* ret = mem_clib_malloc(size + MEM_LIBC_STATSHELPER_SIZE);
147 LWIP_ASSERT(
"malloc() must return aligned memory",
LWIP_MEM_ALIGN(ret) == ret);
148 #if LWIP_STATS && MEM_STATS 149 *(mem_size_t*)ret = size;
150 ret = (u8_t*)ret + MEM_LIBC_STATSHELPER_SIZE;
151 MEM_STATS_INC_USED(used, size);
164 LWIP_ASSERT(
"rmem != NULL", (rmem != NULL));
165 LWIP_ASSERT(
"rmem == MEM_ALIGN(rmem)", (rmem ==
LWIP_MEM_ALIGN(rmem)));
166 #if LWIP_STATS && MEM_STATS 167 rmem = (u8_t*)rmem - MEM_LIBC_STATSHELPER_SIZE;
168 MEM_STATS_DEC_USED(used, *(mem_size_t*)rmem);
188 struct memp_malloc_helper *element = NULL;
192 for (poolnr = MEMP_POOL_FIRST; poolnr <= MEMP_POOL_LAST; poolnr = (
memp_t)(poolnr + 1)) {
195 if (required_size <= memp_pools[poolnr]->size) {
196 element = (
struct memp_malloc_helper*)
memp_malloc(poolnr);
197 if (element == NULL) {
199 #if MEM_USE_POOLS_TRY_BIGGER_POOL 201 if (poolnr < MEMP_POOL_LAST) {
211 if (poolnr > MEMP_POOL_LAST) {
212 LWIP_ASSERT(
"mem_malloc(): no pool is that big!", 0);
218 element->poolnr = poolnr;
222 #if MEMP_OVERFLOW_CHECK || (LWIP_STATS && MEM_STATS) 224 element->size = (u16_t)size;
225 MEM_STATS_INC_USED(used, element->size);
227 #if MEMP_OVERFLOW_CHECK 229 memset((u8_t*)ret + size, 0xcd, memp_pools[poolnr]->size - size);
244 struct memp_malloc_helper *hmem;
246 LWIP_ASSERT(
"rmem != NULL", (rmem != NULL));
247 LWIP_ASSERT(
"rmem == MEM_ALIGN(rmem)", (rmem ==
LWIP_MEM_ALIGN(rmem)));
251 hmem = (
struct memp_malloc_helper*)(
void*)((u8_t*)rmem -
LWIP_MEM_ALIGN_SIZE(
sizeof(
struct memp_malloc_helper)));
253 LWIP_ASSERT(
"hmem != NULL", (hmem != NULL));
254 LWIP_ASSERT(
"hmem == MEM_ALIGN(hmem)", (hmem ==
LWIP_MEM_ALIGN(hmem)));
255 LWIP_ASSERT(
"hmem->poolnr < MEMP_MAX", (hmem->poolnr < MEMP_MAX));
257 MEM_STATS_DEC_USED(used, hmem->size);
258 #if MEMP_OVERFLOW_CHECK 261 LWIP_ASSERT(
"MEM_USE_POOLS: invalid chunk size",
262 hmem->size <= memp_pools[hmem->poolnr]->size);
264 for (i = hmem->size; i < memp_pools[hmem->poolnr]->size; i++) {
265 u8_t data = *((u8_t*)rmem + i);
266 LWIP_ASSERT(
"MEM_USE_POOLS: mem overflow detected", data == 0xcd);
299 #define MIN_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MIN_SIZE) 300 #define SIZEOF_STRUCT_MEM LWIP_MEM_ALIGN_SIZE(sizeof(struct mem)) 301 #define MEM_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MEM_SIZE) 307 #ifndef LWIP_RAM_HEAP_POINTER 310 #define LWIP_RAM_HEAP_POINTER ram_heap 316 static struct mem *ram_end;
318 static struct mem *lfree;
322 static sys_mutex_t mem_mutex;
325 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 327 static volatile u8_t mem_free_count;
330 #define LWIP_MEM_FREE_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_free) 331 #define LWIP_MEM_FREE_PROTECT() SYS_ARCH_PROTECT(lev_free) 332 #define LWIP_MEM_FREE_UNPROTECT() SYS_ARCH_UNPROTECT(lev_free) 333 #define LWIP_MEM_ALLOC_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_alloc) 334 #define LWIP_MEM_ALLOC_PROTECT() SYS_ARCH_PROTECT(lev_alloc) 335 #define LWIP_MEM_ALLOC_UNPROTECT() SYS_ARCH_UNPROTECT(lev_alloc) 340 #define LWIP_MEM_FREE_DECL_PROTECT() 341 #define LWIP_MEM_FREE_PROTECT() sys_mutex_lock(&mem_mutex) 342 #define LWIP_MEM_FREE_UNPROTECT() sys_mutex_unlock(&mem_mutex) 344 #define LWIP_MEM_ALLOC_DECL_PROTECT() 345 #define LWIP_MEM_ALLOC_PROTECT() 346 #define LWIP_MEM_ALLOC_UNPROTECT() 363 plug_holes(
struct mem *
mem)
368 LWIP_ASSERT(
"plug_holes: mem >= ram", (u8_t *)mem >= ram);
369 LWIP_ASSERT(
"plug_holes: mem < ram_end", (u8_t *)mem < (u8_t *)ram_end);
370 LWIP_ASSERT(
"plug_holes: mem->used == 0", mem->
used == 0);
373 LWIP_ASSERT(
"plug_holes: mem->next <= MEM_SIZE_ALIGNED", mem->
next <= MEM_SIZE_ALIGNED);
375 nmem = (
struct mem *)(
void *)&ram[mem->
next];
376 if (mem != nmem && nmem->
used == 0 && (u8_t *)nmem != (u8_t *)ram_end) {
382 ((
struct mem *)(
void *)&ram[nmem->
next])->
prev = (mem_size_t)((u8_t *)mem - ram);
386 pmem = (
struct mem *)(
void *)&ram[mem->
prev];
387 if (pmem != mem && pmem->
used == 0) {
393 ((
struct mem *)(
void *)&ram[mem->
next])->
prev = (mem_size_t)((u8_t *)pmem - ram);
405 LWIP_ASSERT(
"Sanity check alignment",
406 (SIZEOF_STRUCT_MEM & (MEM_ALIGNMENT-1)) == 0);
411 mem = (
struct mem *)(
void *)ram;
412 mem->
next = MEM_SIZE_ALIGNED;
416 ram_end = (
struct mem *)(
void *)&ram[MEM_SIZE_ALIGNED];
418 ram_end->
next = MEM_SIZE_ALIGNED;
419 ram_end->
prev = MEM_SIZE_ALIGNED;
422 lfree = (
struct mem *)(
void *)ram;
424 MEM_STATS_AVAIL(avail, MEM_SIZE_ALIGNED);
427 LWIP_ASSERT(
"failed to create mem_mutex", 0);
441 LWIP_MEM_FREE_DECL_PROTECT();
447 LWIP_ASSERT(
"mem_free: sanity check alignment", (((mem_ptr_t)rmem) & (MEM_ALIGNMENT-1)) == 0);
449 LWIP_ASSERT(
"mem_free: legal memory", (u8_t *)rmem >= (u8_t *)ram &&
450 (u8_t *)rmem < (u8_t *)ram_end);
452 if ((u8_t *)rmem < (u8_t *)ram || (u8_t *)rmem >= (u8_t *)ram_end) {
456 SYS_ARCH_PROTECT(lev);
457 MEM_STATS_INC(illegal);
458 SYS_ARCH_UNPROTECT(lev);
462 LWIP_MEM_FREE_PROTECT();
465 mem = (
struct mem *)(
void *)((u8_t *)rmem - SIZEOF_STRUCT_MEM);
467 LWIP_ASSERT(
"mem_free: mem->used", mem->
used);
476 MEM_STATS_DEC_USED(
used, mem->
next - (mem_size_t)(((u8_t *)mem - ram)));
480 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 483 LWIP_MEM_FREE_UNPROTECT();
500 mem_size_t ptr, ptr2;
501 struct mem *mem, *mem2;
503 LWIP_MEM_FREE_DECL_PROTECT();
509 if (newsize < MIN_SIZE_ALIGNED) {
511 newsize = MIN_SIZE_ALIGNED;
514 if (newsize > MEM_SIZE_ALIGNED) {
518 LWIP_ASSERT(
"mem_trim: legal memory", (u8_t *)rmem >= (u8_t *)ram &&
519 (u8_t *)rmem < (u8_t *)ram_end);
521 if ((u8_t *)rmem < (u8_t *)ram || (u8_t *)rmem >= (u8_t *)ram_end) {
525 SYS_ARCH_PROTECT(lev);
526 MEM_STATS_INC(illegal);
527 SYS_ARCH_UNPROTECT(lev);
532 mem = (
struct mem *)(
void *)((u8_t *)rmem - SIZEOF_STRUCT_MEM);
534 ptr = (mem_size_t)((u8_t *)mem - ram);
536 size = mem->
next - ptr - SIZEOF_STRUCT_MEM;
537 LWIP_ASSERT(
"mem_trim can only shrink memory", newsize <= size);
538 if (newsize > size) {
542 if (newsize == size) {
548 LWIP_MEM_FREE_PROTECT();
550 mem2 = (
struct mem *)(
void *)&ram[mem->
next];
551 if (mem2->
used == 0) {
557 ptr2 = ptr + SIZEOF_STRUCT_MEM + newsize;
559 lfree = (
struct mem *)(
void *)&ram[ptr2];
561 mem2 = (
struct mem *)(
void *)&ram[ptr2];
572 if (mem2->
next != MEM_SIZE_ALIGNED) {
573 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
575 MEM_STATS_DEC_USED(
used, (size - newsize));
577 }
else if (newsize + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED <= size) {
585 ptr2 = ptr + SIZEOF_STRUCT_MEM + newsize;
586 mem2 = (
struct mem *)(
void *)&ram[ptr2];
594 if (mem2->
next != MEM_SIZE_ALIGNED) {
595 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
597 MEM_STATS_DEC_USED(
used, (size - newsize));
606 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 609 LWIP_MEM_FREE_UNPROTECT();
624 mem_size_t ptr, ptr2;
625 struct mem *mem, *mem2;
626 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 627 u8_t local_mem_free_count = 0;
629 LWIP_MEM_ALLOC_DECL_PROTECT();
639 if (size < MIN_SIZE_ALIGNED) {
641 size = MIN_SIZE_ALIGNED;
644 if (size > MEM_SIZE_ALIGNED) {
650 LWIP_MEM_ALLOC_PROTECT();
651 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 654 local_mem_free_count = 0;
660 for (ptr = (mem_size_t)((u8_t *)lfree - ram); ptr < MEM_SIZE_ALIGNED - size;
661 ptr = ((
struct mem *)(
void *)&ram[ptr])->
next) {
662 mem = (
struct mem *)(
void *)&ram[ptr];
663 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 665 LWIP_MEM_ALLOC_UNPROTECT();
667 LWIP_MEM_ALLOC_PROTECT();
668 if (mem_free_count != 0) {
671 local_mem_free_count = 1;
677 (mem->
next - (ptr + SIZEOF_STRUCT_MEM)) >= size) {
681 if (mem->
next - (ptr + SIZEOF_STRUCT_MEM) >= (size + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED)) {
692 ptr2 = ptr + SIZEOF_STRUCT_MEM + size;
694 mem2 = (
struct mem *)(
void *)&ram[ptr2];
702 if (mem2->
next != MEM_SIZE_ALIGNED) {
703 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
705 MEM_STATS_INC_USED(
used, (size + SIZEOF_STRUCT_MEM));
715 MEM_STATS_INC_USED(
used, mem->
next - (mem_size_t)((u8_t *)mem - ram));
717 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 718 mem_malloc_adjust_lfree:
721 struct mem *cur = lfree;
723 while (cur->
used && cur != ram_end) {
724 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 726 LWIP_MEM_ALLOC_UNPROTECT();
728 LWIP_MEM_ALLOC_PROTECT();
729 if (mem_free_count != 0) {
732 goto mem_malloc_adjust_lfree;
735 cur = (
struct mem *)(
void *)&ram[cur->
next];
738 LWIP_ASSERT(
"mem_malloc: !lfree->used", ((lfree == ram_end) || (!lfree->
used)));
740 LWIP_MEM_ALLOC_UNPROTECT();
742 LWIP_ASSERT(
"mem_malloc: allocated memory not above ram_end.",
743 (mem_ptr_t)mem + SIZEOF_STRUCT_MEM + size <= (mem_ptr_t)ram_end);
744 LWIP_ASSERT(
"mem_malloc: allocated memory properly aligned.",
745 ((mem_ptr_t)mem + SIZEOF_STRUCT_MEM) % MEM_ALIGNMENT == 0);
746 LWIP_ASSERT(
"mem_malloc: sanity check alignment",
747 (((mem_ptr_t)mem) & (MEM_ALIGNMENT-1)) == 0);
749 return (u8_t *)mem + SIZEOF_STRUCT_MEM;
752 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT 754 }
while (local_mem_free_count != 0);
758 LWIP_MEM_ALLOC_UNPROTECT();
765 #if MEM_LIBC_MALLOC && (!LWIP_STATS || !MEM_STATS) 769 return mem_clib_calloc(count, size);
792 memset(p, 0, (
size_t)count * (
size_t)size);
void mem_free(void *rmem)
void memp_free(memp_t type, void *mem)
#define LWIP_DBG_LEVEL_SERIOUS
void sys_mutex_unlock(sys_mutex_t *mutex)
#define SYS_ARCH_DECL_PROTECT(lev)
void * mem_trim(void *rmem, mem_size_t newsize)
void * mem_calloc(mem_size_t count, mem_size_t size)
void sys_mutex_lock(sys_mutex_t *mutex)
#define LWIP_DEBUGF(debug, message)
#define LWIP_MEM_ALIGN_SIZE(size)
LWIP_DECLARE_MEMORY_ALIGNED(ram_heap, MEM_SIZE_ALIGNED+(2U *SIZEOF_STRUCT_MEM))
#define LWIP_MEM_ALIGN(addr)
#define LWIP_UNUSED_ARG(x)
void * mem_malloc(mem_size_t size)
#define LWIP_DBG_LEVEL_SEVERE
void * memp_malloc(memp_t type)
err_t sys_mutex_new(sys_mutex_t *mutex)