Lines Matching refs:p

1009 #define chunk2mem(p)   ((void*)((char*)(p) + 2*SIZE_SZ))  argument
1046 #define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) )) argument
1050 #define prev_chunk(p)\ argument
1051 ((mchunkptr)( ((char*)(p)) - ((p)->prev_size) ))
1056 #define chunk_at_offset(p, s) ((mchunkptr)(((char*)(p)) + (s))) argument
1067 #define inuse(p)\ argument
1068 ((((mchunkptr)(((char*)(p))+((p)->size & ~PREV_INUSE)))->size) & PREV_INUSE)
1072 #define prev_inuse(p) ((p)->size & PREV_INUSE) argument
1076 #define chunk_is_mmapped(p) ((p)->size & IS_MMAPPED) argument
1080 #define set_inuse(p)\ argument
1081 ((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size |= PREV_INUSE
1083 #define clear_inuse(p)\ argument
1084 ((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size &= ~(PREV_INUSE)
1088 #define inuse_bit_at_offset(p, s)\ argument
1089 (((mchunkptr)(((char*)(p)) + (s)))->size & PREV_INUSE)
1091 #define set_inuse_bit_at_offset(p, s)\ argument
1092 (((mchunkptr)(((char*)(p)) + (s)))->size |= PREV_INUSE)
1094 #define clear_inuse_bit_at_offset(p, s)\ argument
1095 (((mchunkptr)(((char*)(p)) + (s)))->size &= ~(PREV_INUSE))
1106 #define chunksize(p) ((p)->size & ~(SIZE_BITS)) argument
1110 #define set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s))) argument
1114 #define set_head(p, s) ((p)->size = (s)) argument
1118 #define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s)) argument
1374 static void do_check_chunk(mchunkptr p) in do_check_chunk() argument
1376 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE; in do_check_chunk()
1379 assert(!chunk_is_mmapped(p)); in do_check_chunk()
1382 assert((char*)p >= sbrk_base); in do_check_chunk()
1383 if (p != top) in do_check_chunk()
1384 assert((char*)p + sz <= (char*)top); in do_check_chunk()
1386 assert((char*)p + sz <= sbrk_base + sbrked_mem); in do_check_chunk()
1391 static void do_check_free_chunk(mchunkptr p) in do_check_free_chunk() argument
1393 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE; in do_check_free_chunk()
1394 mchunkptr next = chunk_at_offset(p, sz); in do_check_free_chunk()
1396 do_check_chunk(p); in do_check_free_chunk()
1399 assert(!inuse(p)); in do_check_free_chunk()
1405 assert(aligned_OK(chunk2mem(p))); in do_check_free_chunk()
1409 assert(prev_inuse(p)); in do_check_free_chunk()
1413 assert(p->fd->bk == p); in do_check_free_chunk()
1414 assert(p->bk->fd == p); in do_check_free_chunk()
1420 static void do_check_inuse_chunk(mchunkptr p) in do_check_inuse_chunk() argument
1422 mchunkptr next = next_chunk(p); in do_check_inuse_chunk()
1423 do_check_chunk(p); in do_check_inuse_chunk()
1426 assert(inuse(p)); in do_check_inuse_chunk()
1432 if (!prev_inuse(p)) in do_check_inuse_chunk()
1434 mchunkptr prv = prev_chunk(p); in do_check_inuse_chunk()
1435 assert(next_chunk(prv) == p); in do_check_inuse_chunk()
1448 static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s) in do_check_malloced_chunk() argument
1450 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE; in do_check_malloced_chunk()
1453 do_check_inuse_chunk(p); in do_check_malloced_chunk()
1462 assert(aligned_OK(chunk2mem(p))); in do_check_malloced_chunk()
1466 assert(prev_inuse(p)); in do_check_malloced_chunk()
1567 mchunkptr p; in mmap_chunk() local
1581 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE, in mmap_chunk()
1589 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE, fd, 0); in mmap_chunk()
1592 if(p == (mchunkptr)-1) return 0; in mmap_chunk()
1598 assert(aligned_OK(chunk2mem(p))); in mmap_chunk()
1604 p->prev_size = 0; in mmap_chunk()
1605 set_head(p, size|IS_MMAPPED); in mmap_chunk()
1612 return p; in mmap_chunk()
1623 STATIC void munmap_chunk(mchunkptr p) in munmap_chunk() argument
1625 INTERNAL_SIZE_T size = chunksize(p); in munmap_chunk()
1628 assert (chunk_is_mmapped(p)); in munmap_chunk()
1629 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem)); in munmap_chunk()
1631 assert(((p->prev_size + size) & (malloc_getpagesize-1)) == 0); in munmap_chunk()
1634 mmapped_mem -= (size + p->prev_size); in munmap_chunk()
1636 ret = munmap((char *)p - p->prev_size, size + p->prev_size); in munmap_chunk()
1652 static mchunkptr mremap_chunk(mchunkptr p, size_t new_size) in mremap_chunk() argument
1655 INTERNAL_SIZE_T offset = p->prev_size; in mremap_chunk()
1656 INTERNAL_SIZE_T size = chunksize(p); in mremap_chunk()
1659 assert (chunk_is_mmapped(p)); in mremap_chunk()
1660 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem)); in mremap_chunk()
1667 cp = (char *)mremap((char *)p - offset, size + offset, new_size, 1); in mremap_chunk()
1671 p = (mchunkptr)(cp + offset); in mremap_chunk()
1673 assert(aligned_OK(chunk2mem(p))); in mremap_chunk()
1675 assert((p->prev_size == offset)); in mremap_chunk()
1676 set_head(p, (new_size - offset)|IS_MMAPPED); in mremap_chunk()
1684 return p; in mremap_chunk()
2194 mchunkptr p; /* chunk corresponding to mem */ in fREe()
2210 p = mem2chunk(mem); in fREe()
2211 hd = p->size; in fREe()
2216 munmap_chunk(p); in fREe()
2222 check_inuse_chunk(p); in fREe()
2225 next = chunk_at_offset(p, sz); in fREe()
2234 prevsz = p->prev_size; in fREe()
2235 p = chunk_at_offset(p, -prevsz); in fREe()
2237 unlink(p, bck, fwd); in fREe()
2240 set_head(p, sz | PREV_INUSE); in fREe()
2241 top = p; in fREe()
2254 prevsz = p->prev_size; in fREe()
2255 p = chunk_at_offset(p, -prevsz); in fREe()
2258 if (p->fd == last_remainder) /* keep as last_remainder */ in fREe()
2261 unlink(p, bck, fwd); in fREe()
2271 link_last_remainder(p); in fREe()
2278 set_head(p, sz | PREV_INUSE); in fREe()
2279 set_foot(p, sz); in fREe()
2281 frontlink(p, sz, idx, bck, fwd); in fREe()
2592 mchunkptr p; /* corresponding chunk */ in mEMALIGn() local
2625 p = mem2chunk(m); in mEMALIGn()
2630 if(chunk_is_mmapped(p)) in mEMALIGn()
2633 return chunk2mem(p); /* nothing more to do */ in mEMALIGn()
2649 if ((long)(brk - (char*)(p)) < (long)MINSIZE) brk = brk + alignment; in mEMALIGn()
2652 leadsize = brk - (char*)(p); in mEMALIGn()
2653 newsize = chunksize(p) - leadsize; in mEMALIGn()
2656 if(chunk_is_mmapped(p)) in mEMALIGn()
2658 newp->prev_size = p->prev_size + leadsize; in mEMALIGn()
2669 set_head_size(p, leadsize); in mEMALIGn()
2670 __malloc_free(chunk2mem(p)); in mEMALIGn()
2671 p = newp; in mEMALIGn()
2673 assert (newsize >= nb && (((unsigned long)(chunk2mem(p))) % alignment) == 0); in mEMALIGn()
2678 remainder_size = long_sub_size_t(chunksize(p), nb); in mEMALIGn()
2682 remainder = chunk_at_offset(p, nb); in mEMALIGn()
2684 set_head_size(p, nb); in mEMALIGn()
2688 check_inuse_chunk(p); in mEMALIGn()
2690 return chunk2mem(p); in mEMALIGn()
2745 mchunkptr p; in cALLOc() local
2780 p = mem2chunk(mem); in cALLOc()
2786 if (chunk_is_mmapped(p)) in cALLOc()
2795 csz = chunksize(p); in cALLOc()
2798 if (p == oldtop && csz > oldtopsize) in cALLOc()
2937 mchunkptr p; in malloc_usable_size() local
2942 p = mem2chunk(mem); in malloc_usable_size()
2943 if(!chunk_is_mmapped(p)) in malloc_usable_size()
2945 if (!inuse(p)) return 0; in malloc_usable_size()
2948 check_inuse_chunk(p); in malloc_usable_size()
2951 return chunksize(p) - SIZE_SZ; in malloc_usable_size()
2953 return chunksize(p) - 2*SIZE_SZ; in malloc_usable_size()
2970 mchunkptr p; in malloc_update_mallinfo() local
2981 for (p = last(b); p != b; p = p->bk) in malloc_update_mallinfo()
2984 check_free_chunk(p); in malloc_update_mallinfo()
2985 for (q = next_chunk(p); in malloc_update_mallinfo()
2990 avail += chunksize(p); in malloc_update_mallinfo()