81 struct memp_malloc_helper *element;
83 mem_size_t required_size = size +
sizeof(
struct memp_malloc_helper);
85 for (poolnr = MEMP_POOL_FIRST; poolnr <= MEMP_POOL_LAST; poolnr = (
memp_t)(poolnr + 1)) {
86 #if MEM_USE_POOLS_TRY_BIGGER_POOL
91 if (required_size <= memp_sizes[poolnr]) {
95 if (poolnr > MEMP_POOL_LAST) {
96 LWIP_ASSERT(
"mem_malloc(): no pool is that big!", 0);
99 element = (
struct memp_malloc_helper*)
memp_malloc(poolnr);
100 if (element ==
NULL) {
103 #if MEM_USE_POOLS_TRY_BIGGER_POOL
105 if (poolnr < MEMP_POOL_LAST) {
114 element->poolnr = poolnr;
131 struct memp_malloc_helper *hmem = (
struct memp_malloc_helper*)rmem;
171 #define MIN_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MIN_SIZE)
172 #define SIZEOF_STRUCT_MEM LWIP_MEM_ALIGN_SIZE(sizeof(struct mem))
173 #define MEM_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MEM_SIZE)
179 #ifndef LWIP_RAM_HEAP_POINTER
182 #define LWIP_RAM_HEAP_POINTER ram_heap
188 static struct mem *ram_end;
190 static struct mem *lfree;
195 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
197 static volatile u8_t mem_free_count;
200 #define LWIP_MEM_FREE_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_free)
201 #define LWIP_MEM_FREE_PROTECT() SYS_ARCH_PROTECT(lev_free)
202 #define LWIP_MEM_FREE_UNPROTECT() SYS_ARCH_UNPROTECT(lev_free)
203 #define LWIP_MEM_ALLOC_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_alloc)
204 #define LWIP_MEM_ALLOC_PROTECT() SYS_ARCH_PROTECT(lev_alloc)
205 #define LWIP_MEM_ALLOC_UNPROTECT() SYS_ARCH_UNPROTECT(lev_alloc)
210 #define LWIP_MEM_FREE_DECL_PROTECT()
211 #define LWIP_MEM_FREE_PROTECT() sys_mutex_lock(&mem_mutex)
212 #define LWIP_MEM_FREE_UNPROTECT() sys_mutex_unlock(&mem_mutex)
214 #define LWIP_MEM_ALLOC_DECL_PROTECT()
215 #define LWIP_MEM_ALLOC_PROTECT()
216 #define LWIP_MEM_ALLOC_UNPROTECT()
233 plug_holes(
struct mem *
mem)
245 nmem = (
struct mem *)(
void *)&ram[mem->
next];
246 if (mem != nmem && nmem->
used == 0 && (
u8_t *)nmem != (
u8_t *)ram_end) {
256 pmem = (
struct mem *)(
void *)&ram[mem->
prev];
257 if (pmem != mem && pmem->
used == 0) {
281 mem = (
struct mem *)(
void *)ram;
292 lfree = (
struct mem *)(
void *)ram;
349 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
405 LWIP_ASSERT(
"mem_trim can only shrink memory", newsize <= size);
406 if (newsize > size) {
410 if (newsize == size) {
418 mem2 = (
struct mem *)(
void *)&ram[mem->
next];
419 if(mem2->
used == 0) {
425 ptr2 = ptr + SIZEOF_STRUCT_MEM + newsize;
427 lfree = (
struct mem *)(
void *)&ram[ptr2];
429 mem2 = (
struct mem *)(
void *)&ram[ptr2];
441 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
453 ptr2 = ptr + SIZEOF_STRUCT_MEM + newsize;
454 mem2 = (
struct mem *)(
void *)&ram[ptr2];
463 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
474 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
495 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
496 u8_t local_mem_free_count = 0;
520 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
523 local_mem_free_count = 0;
530 ptr = ((
struct mem *)(
void *)&ram[ptr])->
next) {
531 mem = (
struct mem *)(
void *)&ram[ptr];
532 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
537 if (mem_free_count != 0) {
538 local_mem_free_count = mem_free_count;
561 mem2 = (
struct mem *)(
void *)&ram[ptr2];
570 ((
struct mem *)(
void *)&ram[mem2->
next])->
prev = ptr2;
587 while (lfree->
used && lfree != ram_end) {
591 lfree = (
struct mem *)(
void *)&ram[lfree->
next];
593 LWIP_ASSERT(
"mem_malloc: !lfree->used", ((lfree == ram_end) || (!lfree->
used)));
597 LWIP_ASSERT(
"mem_malloc: allocated memory not above ram_end.",
599 LWIP_ASSERT(
"mem_malloc: allocated memory properly aligned.",
607 #if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
609 }
while(local_mem_free_count != 0);
637 memset(p, 0, count * size);