heap.c 77.1 KB
Newer Older
1 2 3 4 5
/*
 * Win32 heap functions
 *
 * Copyright 1996 Alexandre Julliard
 * Copyright 1998 Ulrich Weigand
6 7 8 9 10 11 12 13 14 15 16 17 18
 *
 * This library is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
 * version 2.1 of the License, or (at your option) any later version.
 *
 * This library is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public
 * License along with this library; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
20 21 22
 */

#include "config.h"
23
#include "wine/port.h"
24 25 26

#include <assert.h>
#include <stdlib.h>
27
#include <stdarg.h>
28 29
#include <stdio.h>
#include <string.h>
30 31
#ifdef HAVE_VALGRIND_MEMCHECK_H
#include <valgrind/memcheck.h>
32 33
#else
#define RUNNING_ON_VALGRIND 0
34
#endif
35

36 37
#define NONAMELESSUNION
#define NONAMELESSSTRUCT
38
#include "ntstatus.h"
39
#define WIN32_NO_STATUS
40
#include "windef.h"
41
#include "winnt.h"
42
#include "winternl.h"
43 44 45
#include "wine/list.h"
#include "wine/debug.h"
#include "wine/server.h"
46

47
WINE_DEFAULT_DEBUG_CHANNEL(heap);
48

Juan Lang's avatar
Juan Lang committed
49
/* Note: the heap data structures are loosely based on what Pietrek describes in his
50 51
 * book 'Windows 95 System Programming Secrets', with some adaptations for
 * better compatibility with NT.
52 53 54 55 56
 */

typedef struct tagARENA_INUSE
{
    DWORD  size;                    /* Block size; must be the first field */
57 58
    DWORD  magic : 24;              /* Magic number */
    DWORD  unused_bytes : 8;        /* Number of bytes in the block not used by user data (max value is HEAP_MIN_DATA_SIZE+HEAP_MIN_SHRINK_SIZE) */
59 60 61 62 63 64
} ARENA_INUSE;

typedef struct tagARENA_FREE
{
    DWORD                 size;     /* Block size; must be the first field */
    DWORD                 magic;    /* Magic number */
65
    struct list           entry;    /* Entry in free list */
66 67
} ARENA_FREE;

68 69 70 71 72 73 74 75 76 77
typedef struct
{
    struct list           entry;      /* entry in heap large blocks list */
    SIZE_T                data_size;  /* size of user data */
    SIZE_T                block_size; /* total size of virtual memory block */
    DWORD                 pad[2];     /* padding to ensure 16-byte alignment of data */
    DWORD                 size;       /* fields for compatibility with normal arenas */
    DWORD                 magic;      /* these must remain at the end of the structure */
} ARENA_LARGE;

78 79 80
#define ARENA_FLAG_FREE        0x00000001  /* flags OR'ed with arena size */
#define ARENA_FLAG_PREV_FREE   0x00000002
#define ARENA_SIZE_MASK        (~3)
81 82 83 84
#define ARENA_LARGE_SIZE       0xfedcba90  /* magic value for 'size' field in large blocks */

/* Value for arena 'magic' field */
#define ARENA_INUSE_MAGIC      0x455355
85
#define ARENA_PENDING_MAGIC    0xbedead
86 87
#define ARENA_FREE_MAGIC       0x45455246
#define ARENA_LARGE_MAGIC      0x6752614c
88 89

#define ARENA_INUSE_FILLER     0x55
90
#define ARENA_TAIL_FILLER      0xab
91
#define ARENA_FREE_FILLER      0xfeeefeee
92

93 94
/* everything is aligned on 8 byte boundaries (16 for Win64) */
#define ALIGNMENT              (2*sizeof(void*))
95
#define LARGE_ALIGNMENT        16  /* large blocks have stricter alignment */
96
#define ARENA_OFFSET           (ALIGNMENT - sizeof(ARENA_INUSE))
97

98 99
C_ASSERT( sizeof(ARENA_LARGE) % LARGE_ALIGNMENT == 0 );

100
#define ROUND_SIZE(size)       ((((size) + ALIGNMENT - 1) & ~(ALIGNMENT-1)) + ARENA_OFFSET)
101

102 103 104
#define QUIET                  1           /* Suppress messages  */
#define NOISY                  0           /* Report all errors  */

105
/* minimum data size (without arenas) of an allocated block */
106
/* make sure that it's larger than a free list entry */
107
#define HEAP_MIN_DATA_SIZE    ROUND_SIZE(2 * sizeof(struct list))
108 109
/* minimum size that must remain to shrink an allocated block */
#define HEAP_MIN_SHRINK_SIZE  (HEAP_MIN_DATA_SIZE+sizeof(ARENA_FREE))
110 111
/* minimum size to start allocating large blocks */
#define HEAP_MIN_LARGE_BLOCK_SIZE  0x7f000
112
/* extra size to add at the end of block for tail checking */
113 114
#define HEAP_TAIL_EXTRA_SIZE(flags) \
    ((flags & HEAP_TAIL_CHECKING_ENABLED) || RUNNING_ON_VALGRIND ? ALIGNMENT : 0)
115

116
/* Max size of the blocks on the free lists */
117
static const SIZE_T HEAP_freeListSizes[] =
118
{
119
    0x10, 0x20, 0x30, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x1000, ~0UL
120
};
121
#define HEAP_NB_FREE_LISTS  (sizeof(HEAP_freeListSizes)/sizeof(HEAP_freeListSizes[0]))
122

123
typedef union
124 125
{
    ARENA_FREE  arena;
126
    void       *alignment[4];
127 128 129 130 131 132
} FREE_LIST_ENTRY;

struct tagHEAP;

typedef struct tagSUBHEAP
{
133 134
    void               *base;       /* Base address of the sub-heap memory block */
    SIZE_T              size;       /* Size of the whole sub-heap */
135
    SIZE_T              min_commit; /* Minimum committed size */
136
    SIZE_T              commitSize; /* Committed size of the sub-heap */
137
    struct list         entry;      /* Entry in sub-heap list */
138
    struct tagHEAP     *heap;       /* Main heap structure */
139
    DWORD               headerSize; /* Size of the heap header */
140 141 142 143 144 145 146
    DWORD               magic;      /* Magic number */
} SUBHEAP;

#define SUBHEAP_MAGIC    ((DWORD)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))

typedef struct tagHEAP
{
147 148
    DWORD_PTR        unknown1[2];
    DWORD            unknown2;
149 150
    DWORD            flags;         /* Heap flags */
    DWORD            force_flags;   /* Forced heap flags for debugging */
151
    SUBHEAP          subheap;       /* First sub-heap */
152
    struct list      entry;         /* Entry in process heap list */
153
    struct list      subheap_list;  /* Sub-heap list */
154
    struct list      large_list;    /* Large blocks list */
155
    SIZE_T           grow_size;     /* Size of next subheap for growing heap */
156
    DWORD            magic;         /* Magic number */
157 158
    DWORD            pending_pos;   /* Position in pending free requests ring */
    ARENA_INUSE    **pending_free;  /* Ring buffer for pending free requests */
159
    RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
160
    FREE_LIST_ENTRY *freeList;      /* Free lists */
161 162 163 164 165 166
} HEAP;

#define HEAP_MAGIC       ((DWORD)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))

#define HEAP_DEF_SIZE        0x110000   /* Default heap size = 1Mb + 64Kb */
#define COMMIT_MASK          0xffff  /* bitmask for commit/decommit granularity */
167
#define MAX_FREE_PENDING     1024    /* max number of free requests to delay */
168

169 170 171 172 173 174
/* some undocumented flags (names are made up) */
#define HEAP_PAGE_ALLOCS      0x01000000
#define HEAP_VALIDATE         0x10000000
#define HEAP_VALIDATE_ALL     0x20000000
#define HEAP_VALIDATE_PARAMS  0x40000000

175
static HEAP *processHeap;  /* main process heap */
176 177 178

static BOOL HEAP_IsRealArena( HEAP *heapPtr, DWORD flags, LPCVOID block, BOOL quiet );

179
/* mark a block of memory as free for debugging purposes */
180
static inline void mark_block_free( void *ptr, SIZE_T size, DWORD flags )
181
{
182 183 184 185 186
    if (flags & HEAP_FREE_CHECKING_ENABLED)
    {
        SIZE_T i;
        for (i = 0; i < size / sizeof(DWORD); i++) ((DWORD *)ptr)[i] = ARENA_FREE_FILLER;
    }
187 188 189
#if defined(VALGRIND_MAKE_MEM_NOACCESS)
    VALGRIND_DISCARD( VALGRIND_MAKE_MEM_NOACCESS( ptr, size ));
#elif defined( VALGRIND_MAKE_NOACCESS)
190 191 192 193 194
    VALGRIND_DISCARD( VALGRIND_MAKE_NOACCESS( ptr, size ));
#endif
}

/* mark a block of memory as initialized for debugging purposes */
195
static inline void mark_block_initialized( void *ptr, SIZE_T size )
196
{
197 198 199
#if defined(VALGRIND_MAKE_MEM_DEFINED)
    VALGRIND_DISCARD( VALGRIND_MAKE_MEM_DEFINED( ptr, size ));
#elif defined(VALGRIND_MAKE_READABLE)
200 201 202 203 204
    VALGRIND_DISCARD( VALGRIND_MAKE_READABLE( ptr, size ));
#endif
}

/* mark a block of memory as uninitialized for debugging purposes */
205
static inline void mark_block_uninitialized( void *ptr, SIZE_T size )
206
{
207 208 209
#if defined(VALGRIND_MAKE_MEM_UNDEFINED)
    VALGRIND_DISCARD( VALGRIND_MAKE_MEM_UNDEFINED( ptr, size ));
#elif defined(VALGRIND_MAKE_WRITABLE)
210 211
    VALGRIND_DISCARD( VALGRIND_MAKE_WRITABLE( ptr, size ));
#endif
212 213 214 215 216 217
}

/* mark a block of memory as a tail block */
static inline void mark_block_tail( void *ptr, SIZE_T size, DWORD flags )
{
    if (flags & HEAP_TAIL_CHECKING_ENABLED)
218
    {
219
        mark_block_uninitialized( ptr, size );
220
        memset( ptr, ARENA_TAIL_FILLER, size );
221
    }
222
#if defined(VALGRIND_MAKE_MEM_NOACCESS)
223
    VALGRIND_DISCARD( VALGRIND_MAKE_MEM_NOACCESS( ptr, size ));
224
#elif defined( VALGRIND_MAKE_NOACCESS)
225
    VALGRIND_DISCARD( VALGRIND_MAKE_NOACCESS( ptr, size ));
226 227 228
#endif
}

229 230
/* initialize contents of a newly created block of memory */
static inline void initialize_block( void *ptr, SIZE_T size, SIZE_T unused, DWORD flags )
231
{
232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247
    if (flags & HEAP_ZERO_MEMORY)
    {
        mark_block_initialized( ptr, size );
        memset( ptr, 0, size );
    }
    else
    {
        mark_block_uninitialized( ptr, size );
        if (flags & HEAP_FREE_CHECKING_ENABLED)
        {
            memset( ptr, ARENA_INUSE_FILLER, size );
            mark_block_uninitialized( ptr, size );
        }
    }

    mark_block_tail( (char *)ptr + size, unused, flags );
248
}
249

250 251 252 253 254 255 256 257 258
/* notify that a new block of memory has been allocated for debugging purposes */
static inline void notify_alloc( void *ptr, SIZE_T size, BOOL init )
{
#ifdef VALGRIND_MALLOCLIKE_BLOCK
    VALGRIND_MALLOCLIKE_BLOCK( ptr, size, 0, init );
#endif
}

/* notify that a block of memory has been freed for debugging purposes */
259
static inline void notify_free( void const *ptr )
260 261 262 263 264 265
{
#ifdef VALGRIND_FREELIKE_BLOCK
    VALGRIND_FREELIKE_BLOCK( ptr, 0 );
#endif
}

266 267 268 269 270 271 272
static inline void notify_realloc( void const *ptr, SIZE_T size_old, SIZE_T size_new )
{
#ifdef VALGRIND_RESIZEINPLACE_BLOCK
    VALGRIND_RESIZEINPLACE_BLOCK( ptr, size_old, size_new, 0 );
#endif
}

273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290
static void subheap_notify_free_all(SUBHEAP const *subheap)
{
#ifdef VALGRIND_FREELIKE_BLOCK
    char const *ptr = (char const *)subheap->base + subheap->headerSize;

    if (!RUNNING_ON_VALGRIND) return;

    while (ptr < (char const *)subheap->base + subheap->size)
    {
        if (*(const DWORD *)ptr & ARENA_FLAG_FREE)
        {
            ARENA_FREE const *pArena = (ARENA_FREE const *)ptr;
            if (pArena->magic!=ARENA_FREE_MAGIC) ERR("bad free_magic @%p\n", pArena);
            ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
        }
        else
        {
            ARENA_INUSE const *pArena = (ARENA_INUSE const *)ptr;
291 292
            if (pArena->magic == ARENA_INUSE_MAGIC) notify_free(pArena + 1);
            else if (pArena->magic != ARENA_PENDING_MAGIC) ERR("bad inuse_magic @%p\n", pArena);
293 294 295 296 297 298
            ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
        }
    }
#endif
}

299 300 301 302 303 304 305 306 307 308 309
/* locate a free list entry of the appropriate size */
/* size is the size of the whole block including the arena header */
static inline unsigned int get_freelist_index( SIZE_T size )
{
    unsigned int i;

    size -= sizeof(ARENA_FREE);
    for (i = 0; i < HEAP_NB_FREE_LISTS - 1; i++) if (size <= HEAP_freeListSizes[i]) break;
    return i;
}

310 311 312 313 314 315
/* get the memory protection type to use for a given heap */
static inline ULONG get_protection_type( DWORD flags )
{
    return (flags & HEAP_CREATE_ENABLE_EXECUTE) ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
}

316 317 318 319
static RTL_CRITICAL_SECTION_DEBUG process_heap_critsect_debug =
{
    0, 0, NULL,  /* will be set later */
    { &process_heap_critsect_debug.ProcessLocksList, &process_heap_critsect_debug.ProcessLocksList },
320
      0, 0, { (DWORD_PTR)(__FILE__ ": main process heap section") }
321 322 323
};


324 325 326
/***********************************************************************
 *           HEAP_Dump
 */
327
static void HEAP_Dump( HEAP *heap )
328
{
329
    unsigned int i;
330 331 332
    SUBHEAP *subheap;
    char *ptr;

333
    DPRINTF( "Heap: %p\n", heap );
334 335
    DPRINTF( "Next: %p  Sub-heaps:", LIST_ENTRY( heap->entry.next, HEAP, entry ) );
    LIST_FOR_EACH_ENTRY( subheap, &heap->subheap_list, SUBHEAP, entry ) DPRINTF( " %p", subheap );
336 337 338

    DPRINTF( "\nFree lists:\n Block   Stat   Size    Id\n" );
    for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
339
        DPRINTF( "%p free %08lx prev=%p next=%p\n",
340
                 &heap->freeList[i].arena, HEAP_freeListSizes[i],
341 342
                 LIST_ENTRY( heap->freeList[i].arena.entry.prev, ARENA_FREE, entry ),
                 LIST_ENTRY( heap->freeList[i].arena.entry.next, ARENA_FREE, entry ));
343

344
    LIST_FOR_EACH_ENTRY( subheap, &heap->subheap_list, SUBHEAP, entry )
345
    {
346
        SIZE_T freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
347 348
        DPRINTF( "\n\nSub-heap %p: base=%p size=%08lx committed=%08lx\n",
                 subheap, subheap->base, subheap->size, subheap->commitSize );
349

350
        DPRINTF( "\n Block    Arena   Stat   Size    Id\n" );
351 352
        ptr = (char *)subheap->base + subheap->headerSize;
        while (ptr < (char *)subheap->base + subheap->size)
353 354 355 356
        {
            if (*(DWORD *)ptr & ARENA_FLAG_FREE)
            {
                ARENA_FREE *pArena = (ARENA_FREE *)ptr;
357 358 359
                DPRINTF( "%p %08x free %08x prev=%p next=%p\n",
                         pArena, pArena->magic,
                         pArena->size & ARENA_SIZE_MASK,
360 361
                         LIST_ENTRY( pArena->entry.prev, ARENA_FREE, entry ),
                         LIST_ENTRY( pArena->entry.next, ARENA_FREE, entry ) );
362 363 364 365 366 367 368
                ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
                arenaSize += sizeof(ARENA_FREE);
                freeSize += pArena->size & ARENA_SIZE_MASK;
            }
            else if (*(DWORD *)ptr & ARENA_FLAG_PREV_FREE)
            {
                ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
369 370
                DPRINTF( "%p %08x Used %08x back=%p\n",
                        pArena, pArena->magic, pArena->size & ARENA_SIZE_MASK, *((ARENA_FREE **)pArena - 1) );
371 372 373 374 375 376 377
                ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
                arenaSize += sizeof(ARENA_INUSE);
                usedSize += pArena->size & ARENA_SIZE_MASK;
            }
            else
            {
                ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
378 379 380
                DPRINTF( "%p %08x %s %08x\n",
                         pArena, pArena->magic, pArena->magic == ARENA_INUSE_MAGIC ? "used" : "pend",
                         pArena->size & ARENA_SIZE_MASK );
381 382 383 384 385
                ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
                arenaSize += sizeof(ARENA_INUSE);
                usedSize += pArena->size & ARENA_SIZE_MASK;
            }
        }
386
        DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
387 388 389 390 391 392
	      subheap->size, subheap->commitSize, freeSize, usedSize,
	      arenaSize, (arenaSize * 100) / subheap->size );
    }
}


393
static void HEAP_DumpEntry( LPPROCESS_HEAP_ENTRY entry )
Uwe Bonnes's avatar
Uwe Bonnes committed
394 395 396 397
{
    WORD rem_flags;
    TRACE( "Dumping entry %p\n", entry );
    TRACE( "lpData\t\t: %p\n", entry->lpData );
398
    TRACE( "cbData\t\t: %08x\n", entry->cbData);
Uwe Bonnes's avatar
Uwe Bonnes committed
399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
    TRACE( "cbOverhead\t: %08x\n", entry->cbOverhead);
    TRACE( "iRegionIndex\t: %08x\n", entry->iRegionIndex);
    TRACE( "WFlags\t\t: ");
    if (entry->wFlags & PROCESS_HEAP_REGION)
        TRACE( "PROCESS_HEAP_REGION ");
    if (entry->wFlags & PROCESS_HEAP_UNCOMMITTED_RANGE)
        TRACE( "PROCESS_HEAP_UNCOMMITTED_RANGE ");
    if (entry->wFlags & PROCESS_HEAP_ENTRY_BUSY)
        TRACE( "PROCESS_HEAP_ENTRY_BUSY ");
    if (entry->wFlags & PROCESS_HEAP_ENTRY_MOVEABLE)
        TRACE( "PROCESS_HEAP_ENTRY_MOVEABLE ");
    if (entry->wFlags & PROCESS_HEAP_ENTRY_DDESHARE)
        TRACE( "PROCESS_HEAP_ENTRY_DDESHARE ");
    rem_flags = entry->wFlags &
        ~(PROCESS_HEAP_REGION | PROCESS_HEAP_UNCOMMITTED_RANGE |
          PROCESS_HEAP_ENTRY_BUSY | PROCESS_HEAP_ENTRY_MOVEABLE|
          PROCESS_HEAP_ENTRY_DDESHARE);
    if (rem_flags)
        TRACE( "Unknown %08x", rem_flags);
    TRACE( "\n");
    if ((entry->wFlags & PROCESS_HEAP_ENTRY_BUSY )
        && (entry->wFlags & PROCESS_HEAP_ENTRY_MOVEABLE))
    {
        /* Treat as block */
        TRACE( "BLOCK->hMem\t\t:%p\n", entry->u.Block.hMem);
    }
    if (entry->wFlags & PROCESS_HEAP_REGION)
    {
427 428
        TRACE( "Region.dwCommittedSize\t:%08x\n",entry->u.Region.dwCommittedSize);
        TRACE( "Region.dwUnCommittedSize\t:%08x\n",entry->u.Region.dwUnCommittedSize);
Uwe Bonnes's avatar
Uwe Bonnes committed
429 430 431 432 433
        TRACE( "Region.lpFirstBlock\t:%p\n",entry->u.Region.lpFirstBlock);
        TRACE( "Region.lpLastBlock\t:%p\n",entry->u.Region.lpLastBlock);
    }
}

434 435 436 437 438 439 440 441 442
/***********************************************************************
 *           HEAP_GetPtr
 * RETURNS
 *	Pointer to the heap
 *	NULL: Failure
 */
static HEAP *HEAP_GetPtr(
             HANDLE heap /* [in] Handle to the heap */
) {
443
    HEAP *heapPtr = heap;
444 445
    if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
    {
446
        ERR("Invalid heap %p!\n", heap );
447 448
        return NULL;
    }
449
    if ((heapPtr->flags & HEAP_VALIDATE_ALL) && !HEAP_IsRealArena( heapPtr, 0, NULL, NOISY ))
450
    {
451 452 453 454 455
        if (TRACE_ON(heap))
        {
            HEAP_Dump( heapPtr );
            assert( FALSE );
        }
456 457 458 459 460 461 462 463 464 465 466
        return NULL;
    }
    return heapPtr;
}


/***********************************************************************
 *           HEAP_InsertFreeBlock
 *
 * Insert a free block into the free list.
 */
467
static inline void HEAP_InsertFreeBlock( HEAP *heap, ARENA_FREE *pArena, BOOL last )
468
{
469
    FREE_LIST_ENTRY *pEntry = heap->freeList + get_freelist_index( pArena->size + sizeof(*pArena) );
470 471 472 473 474
    if (last)
    {
        /* insert at end of free list, i.e. before the next free list entry */
        pEntry++;
        if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS]) pEntry = heap->freeList;
475
        list_add_before( &pEntry->arena.entry, &pArena->entry );
476 477 478 479
    }
    else
    {
        /* insert at head of free list */
480
        list_add_after( &pEntry->arena.entry, &pArena->entry );
481 482
    }
    pArena->size |= ARENA_FLAG_FREE;
483 484 485 486 487 488 489 490 491 492 493 494
}


/***********************************************************************
 *           HEAP_FindSubHeap
 * Find the sub-heap containing a given address.
 *
 * RETURNS
 *	Pointer: Success
 *	NULL: Failure
 */
static SUBHEAP *HEAP_FindSubHeap(
Eric Pouech's avatar
Eric Pouech committed
495
                const HEAP *heap, /* [in] Heap pointer */
496 497 498 499
                LPCVOID ptr ) /* [in] Address */
{
    SUBHEAP *sub;
    LIST_FOR_EACH_ENTRY( sub, &heap->subheap_list, SUBHEAP, entry )
500
        if ((ptr >= sub->base) &&
501
            ((const char *)ptr < (const char *)sub->base + sub->size - sizeof(ARENA_INUSE)))
502
            return sub;
503 504 505 506 507 508 509
    return NULL;
}


/***********************************************************************
 *           HEAP_Commit
 *
510
 * Make sure the heap storage is committed for a given size in the specified arena.
511
 */
512
static inline BOOL HEAP_Commit( SUBHEAP *subheap, ARENA_INUSE *pArena, SIZE_T data_size )
513
{
514
    void *ptr = (char *)(pArena + 1) + data_size + sizeof(ARENA_FREE);
515
    SIZE_T size = (char *)ptr - (char *)subheap->base;
516 517 518
    size = (size + COMMIT_MASK) & ~COMMIT_MASK;
    if (size > subheap->size) size = subheap->size;
    if (size <= subheap->commitSize) return TRUE;
519
    size -= subheap->commitSize;
520
    ptr = (char *)subheap->base + subheap->commitSize;
521
    if (NtAllocateVirtualMemory( NtCurrentProcess(), &ptr, 0,
522
                                 &size, MEM_COMMIT, get_protection_type( subheap->heap->flags ) ))
523
    {
524 525
        WARN("Could not commit %08lx bytes at %p for heap %p\n",
                 size, ptr, subheap->heap );
526 527
        return FALSE;
    }
528
    subheap->commitSize += size;
529 530 531 532 533 534 535 536 537 538 539
    return TRUE;
}


/***********************************************************************
 *           HEAP_Decommit
 *
 * If possible, decommit the heap storage from (including) 'ptr'.
 */
static inline BOOL HEAP_Decommit( SUBHEAP *subheap, void *ptr )
{
540
    void *addr;
541
    SIZE_T decommit_size;
542
    SIZE_T size = (char *)ptr - (char *)subheap->base;
543

544 545
    /* round to next block and add one full block */
    size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
546
    size = max( size, subheap->min_commit );
547
    if (size >= subheap->commitSize) return TRUE;
548
    decommit_size = subheap->commitSize - size;
549
    addr = (char *)subheap->base + size;
550

551
    if (NtFreeVirtualMemory( NtCurrentProcess(), &addr, &decommit_size, MEM_DECOMMIT ))
552
    {
553
        WARN("Could not decommit %08lx bytes at %p for heap %p\n",
554
             decommit_size, (char *)subheap->base + size, subheap->heap );
555 556
        return FALSE;
    }
557
    subheap->commitSize -= decommit_size;
558 559 560 561 562 563 564 565 566 567
    return TRUE;
}


/***********************************************************************
 *           HEAP_CreateFreeBlock
 *
 * Create a free block at a specified address. 'size' is the size of the
 * whole block, including the new arena.
 */
568
static void HEAP_CreateFreeBlock( SUBHEAP *subheap, void *ptr, SIZE_T size )
569 570
{
    ARENA_FREE *pFree;
571
    char *pEnd;
572
    BOOL last;
573
    DWORD flags = subheap->heap->flags;
574 575

    /* Create a free arena */
576
    mark_block_uninitialized( ptr, sizeof(ARENA_FREE) );
577
    pFree = ptr;
578 579 580 581
    pFree->magic = ARENA_FREE_MAGIC;

    /* If debugging, erase the freed block content */

582
    pEnd = (char *)ptr + size;
583 584
    if (pEnd > (char *)subheap->base + subheap->commitSize)
        pEnd = (char *)subheap->base + subheap->commitSize;
585
    if (pEnd > (char *)(pFree + 1)) mark_block_free( pFree + 1, pEnd - (char *)(pFree + 1), flags );
586 587 588

    /* Check if next block is free also */

589
    if (((char *)ptr + size < (char *)subheap->base + subheap->size) &&
590 591 592 593
        (*(DWORD *)((char *)ptr + size) & ARENA_FLAG_FREE))
    {
        /* Remove the next arena from the free list */
        ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
594
        list_remove( &pNext->entry );
595
        size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
596
        mark_block_free( pNext, sizeof(ARENA_FREE), flags );
597 598 599 600
    }

    /* Set the next block PREV_FREE flag and pointer */

601
    last = ((char *)ptr + size >= (char *)subheap->base + subheap->size);
602
    if (!last)
603 604 605
    {
        DWORD *pNext = (DWORD *)((char *)ptr + size);
        *pNext |= ARENA_FLAG_PREV_FREE;
606
        mark_block_initialized( (ARENA_FREE **)pNext - 1, sizeof( ARENA_FREE * ) );
607
        *((ARENA_FREE **)pNext - 1) = pFree;
608 609 610 611 612
    }

    /* Last, insert the new block into the free list */

    pFree->size = size - sizeof(*pFree);
613
    HEAP_InsertFreeBlock( subheap->heap, pFree, last );
614 615 616 617 618 619 620 621 622 623 624
}


/***********************************************************************
 *           HEAP_MakeInUseBlockFree
 *
 * Turn an in-use block into a free block. Can also decommit the end of
 * the heap, and possibly even free the sub-heap altogether.
 */
static void HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena )
{
625
    HEAP *heap = subheap->heap;
626
    ARENA_FREE *pFree;
627 628 629 630 631 632 633 634 635 636 637 638 639
    SIZE_T size;

    if (heap->pending_free)
    {
        ARENA_INUSE *prev = heap->pending_free[heap->pending_pos];
        heap->pending_free[heap->pending_pos] = pArena;
        heap->pending_pos = (heap->pending_pos + 1) % MAX_FREE_PENDING;
        pArena->magic = ARENA_PENDING_MAGIC;
        mark_block_free( pArena + 1, pArena->size & ARENA_SIZE_MASK, heap->flags );
        if (!prev) return;
        pArena = prev;
        subheap = HEAP_FindSubHeap( heap, pArena );
    }
640 641 642

    /* Check if we can merge with previous block */

643
    size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
644 645 646 647 648
    if (pArena->size & ARENA_FLAG_PREV_FREE)
    {
        pFree = *((ARENA_FREE **)pArena - 1);
        size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
        /* Remove it from the free list */
649
        list_remove( &pFree->entry );
650 651 652 653 654 655 656
    }
    else pFree = (ARENA_FREE *)pArena;

    /* Create a free block */

    HEAP_CreateFreeBlock( subheap, pFree, size );
    size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
657
    if ((char *)pFree + size < (char *)subheap->base + subheap->size)
658 659 660 661
        return;  /* Not the last block, so nothing more to do */

    /* Free the whole sub-heap if it's empty and not the original one */

662
    if (((char *)pFree == (char *)subheap->base + subheap->headerSize) &&
663 664
        (subheap != &subheap->heap->subheap))
    {
665
        void *addr = subheap->base;
666 667

        size = 0;
668
        /* Remove the free block from the list */
669
        list_remove( &pFree->entry );
670
        /* Remove the subheap from the list */
671
        list_remove( &subheap->entry );
672 673
        /* Free the memory */
        subheap->magic = 0;
674
        NtFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
675 676 677 678 679 680 681 682 683 684 685 686 687 688
        return;
    }

    /* Decommit the end of the heap */

    if (!(subheap->heap->flags & HEAP_SHARED)) HEAP_Decommit( subheap, pFree + 1 );
}


/***********************************************************************
 *           HEAP_ShrinkBlock
 *
 * Shrink an in-use block.
 */
689
static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, SIZE_T size)
690
{
691
    if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_SHRINK_SIZE)
692 693 694 695 696 697 698 699 700 701
    {
        HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
                              (pArena->size & ARENA_SIZE_MASK) - size );
	/* assign size plus previous arena flags */
        pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
    }
    else
    {
        /* Turn off PREV_FREE flag in next block */
        char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
702
        if (pNext < (char *)subheap->base + subheap->size)
703 704 705 706
            *(DWORD *)pNext &= ~ARENA_FLAG_PREV_FREE;
    }
}

707 708 709 710 711 712 713

/***********************************************************************
 *           allocate_large_block
 */
static void *allocate_large_block( HEAP *heap, DWORD flags, SIZE_T size )
{
    ARENA_LARGE *arena;
714
    SIZE_T block_size = sizeof(*arena) + ROUND_SIZE(size) + HEAP_TAIL_EXTRA_SIZE(flags);
715 716 717
    LPVOID address = NULL;

    if (block_size < size) return NULL;  /* overflow */
718
    if (NtAllocateVirtualMemory( NtCurrentProcess(), &address, 5,
719 720 721 722 723 724 725 726 727 728
                                 &block_size, MEM_COMMIT, get_protection_type( flags ) ))
    {
        WARN("Could not allocate block for %08lx bytes\n", size );
        return NULL;
    }
    arena = address;
    arena->data_size = size;
    arena->block_size = block_size;
    arena->size = ARENA_LARGE_SIZE;
    arena->magic = ARENA_LARGE_MAGIC;
729
    mark_block_tail( (char *)(arena + 1) + size, block_size - sizeof(*arena) - size, flags );
730
    list_add_tail( &heap->large_list, &arena->entry );
731
    notify_alloc( arena + 1, size, flags & HEAP_ZERO_MEMORY );
732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759
    return arena + 1;
}


/***********************************************************************
 *           free_large_block
 */
static void free_large_block( HEAP *heap, DWORD flags, void *ptr )
{
    ARENA_LARGE *arena = (ARENA_LARGE *)ptr - 1;
    LPVOID address = arena;
    SIZE_T size = 0;

    list_remove( &arena->entry );
    NtFreeVirtualMemory( NtCurrentProcess(), &address, &size, MEM_RELEASE );
}


/***********************************************************************
 *           realloc_large_block
 */
static void *realloc_large_block( HEAP *heap, DWORD flags, void *ptr, SIZE_T size )
{
    ARENA_LARGE *arena = (ARENA_LARGE *)ptr - 1;
    void *new_ptr;

    if (arena->block_size - sizeof(*arena) >= size)
    {
760 761
        SIZE_T unused = arena->block_size - sizeof(*arena) - size;

762
        /* FIXME: we could remap zero-pages instead */
763 764 765 766 767
#ifdef VALGRIND_RESIZEINPLACE_BLOCK
        if (RUNNING_ON_VALGRIND)
            notify_realloc( arena + 1, arena->data_size, size );
        else
#endif
768 769 770 771
        if (size > arena->data_size)
            initialize_block( (char *)ptr + arena->data_size, size - arena->data_size, unused, flags );
        else
            mark_block_tail( (char *)ptr + size, unused, flags );
772 773 774 775 776 777 778 779 780 781 782
        arena->data_size = size;
        return ptr;
    }
    if (flags & HEAP_REALLOC_IN_PLACE_ONLY) return NULL;
    if (!(new_ptr = allocate_large_block( heap, flags, size )))
    {
        WARN("Could not allocate block for %08lx bytes\n", size );
        return NULL;
    }
    memcpy( new_ptr, ptr, arena->data_size );
    free_large_block( heap, flags, ptr );
783
    notify_free( ptr );
784 785 786 787 788 789 790 791 792 793 794 795
    return new_ptr;
}


/***********************************************************************
 *           find_large_block
 */
static ARENA_LARGE *find_large_block( HEAP *heap, const void *ptr )
{
    ARENA_LARGE *arena;

    LIST_FOR_EACH_ENTRY( arena, &heap->large_list, ARENA_LARGE, entry )
796
        if (ptr == arena + 1) return arena;
797 798 799 800 801 802 803 804 805 806

    return NULL;
}


/***********************************************************************
 *           validate_large_arena
 */
static BOOL validate_large_arena( HEAP *heap, const ARENA_LARGE *arena, BOOL quiet )
{
807 808
    DWORD flags = heap->flags;

809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838
    if ((ULONG_PTR)arena % getpagesize())
    {
        if (quiet == NOISY)
        {
            ERR( "Heap %p: invalid large arena pointer %p\n", heap, arena );
            if (TRACE_ON(heap)) HEAP_Dump( heap );
        }
        else if (WARN_ON(heap))
        {
            WARN( "Heap %p: unaligned arena pointer %p\n", heap, arena );
            if (TRACE_ON(heap)) HEAP_Dump( heap );
        }
        return FALSE;
    }
    if (arena->size != ARENA_LARGE_SIZE || arena->magic != ARENA_LARGE_MAGIC)
    {
        if (quiet == NOISY)
        {
            ERR( "Heap %p: invalid large arena %p values %x/%x\n",
                 heap, arena, arena->size, arena->magic );
            if (TRACE_ON(heap)) HEAP_Dump( heap );
        }
        else if (WARN_ON(heap))
        {
            WARN( "Heap %p: invalid large arena %p values %x/%x\n",
                  heap, arena, arena->size, arena->magic );
            if (TRACE_ON(heap)) HEAP_Dump( heap );
        }
        return FALSE;
    }
839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857
    if (arena->data_size > arena->block_size - sizeof(*arena))
    {
        ERR( "Heap %p: invalid large arena %p size %lx/%lx\n",
             heap, arena, arena->data_size, arena->block_size );
        return FALSE;
    }
    if (flags & HEAP_TAIL_CHECKING_ENABLED)
    {
        SIZE_T i, unused = arena->block_size - sizeof(*arena) - arena->data_size;
        const unsigned char *data = (const unsigned char *)(arena + 1) + arena->data_size;

        for (i = 0; i < unused; i++)
        {
            if (data[i] == ARENA_TAIL_FILLER) continue;
            ERR("Heap %p: block %p tail overwritten at %p (byte %lu/%lu == 0x%02x)\n",
                heap, arena + 1, data + i, i, unused, data[i] );
            return FALSE;
        }
    }
858 859 860 861
    return TRUE;
}


862
/***********************************************************************
863
 *           HEAP_CreateSubHeap
864
 */
865 866
static SUBHEAP *HEAP_CreateSubHeap( HEAP *heap, LPVOID address, DWORD flags,
                                    SIZE_T commitSize, SIZE_T totalSize )
867
{
868
    SUBHEAP *subheap;
869
    FREE_LIST_ENTRY *pEntry;
870
    unsigned int i;
871

872
    if (!address)
873
    {
874
        if (!commitSize) commitSize = COMMIT_MASK + 1;
875
        totalSize = min( totalSize, 0xffff0000 );  /* don't allow a heap larger than 4Gb */
876 877
        if (totalSize < commitSize) totalSize = commitSize;
        if (flags & HEAP_SHARED) commitSize = totalSize;  /* always commit everything in a shared heap */
878
        commitSize = min( totalSize, (commitSize + COMMIT_MASK) & ~COMMIT_MASK );
879 880

        /* allocate the memory block */
881
        if (NtAllocateVirtualMemory( NtCurrentProcess(), &address, 5, &totalSize,
882 883 884 885 886 887 888 889 890 891 892
                                     MEM_RESERVE, get_protection_type( flags ) ))
        {
            WARN("Could not allocate %08lx bytes\n", totalSize );
            return NULL;
        }
        if (NtAllocateVirtualMemory( NtCurrentProcess(), &address, 0,
                                     &commitSize, MEM_COMMIT, get_protection_type( flags ) ))
        {
            WARN("Could not commit %08lx bytes for sub-heap %p\n", commitSize, address );
            return NULL;
        }
893 894
    }

895
    if (heap)
896 897 898
    {
        /* If this is a secondary subheap, insert it into list */

899
        subheap = address;
900 901 902
        subheap->base       = address;
        subheap->heap       = heap;
        subheap->size       = totalSize;
903
        subheap->min_commit = 0x10000;
904 905
        subheap->commitSize = commitSize;
        subheap->magic      = SUBHEAP_MAGIC;
906
        subheap->headerSize = ROUND_SIZE( sizeof(SUBHEAP) );
907
        list_add_head( &heap->subheap_list, &subheap->entry );
908 909 910 911 912
    }
    else
    {
        /* If this is a primary subheap, initialize main heap */

913
        heap = address;
914 915
        heap->flags         = flags;
        heap->magic         = HEAP_MAGIC;
916
        heap->grow_size     = max( HEAP_DEF_SIZE, totalSize );
917
        list_init( &heap->subheap_list );
918
        list_init( &heap->large_list );
919

920 921 922 923
        subheap = &heap->subheap;
        subheap->base       = address;
        subheap->heap       = heap;
        subheap->size       = totalSize;
924
        subheap->min_commit = commitSize;
925 926 927
        subheap->commitSize = commitSize;
        subheap->magic      = SUBHEAP_MAGIC;
        subheap->headerSize = ROUND_SIZE( sizeof(HEAP) );
928
        list_add_head( &heap->subheap_list, &subheap->entry );
929

930 931
        /* Build the free lists */

932 933
        heap->freeList = (FREE_LIST_ENTRY *)((char *)heap + subheap->headerSize);
        subheap->headerSize += HEAP_NB_FREE_LISTS * sizeof(FREE_LIST_ENTRY);
934
        list_init( &heap->freeList[0].arena.entry );
935 936 937 938
        for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
        {
            pEntry->arena.size = 0 | ARENA_FLAG_FREE;
            pEntry->arena.magic = ARENA_FREE_MAGIC;
939
            if (i) list_add_after( &pEntry[-1].arena.entry, &pEntry->arena.entry );
940 941 942 943
        }

        /* Initialize critical section */

944 945 946 947 948 949 950 951 952 953
        if (!processHeap)  /* do it by hand to avoid memory allocations */
        {
            heap->critSection.DebugInfo      = &process_heap_critsect_debug;
            heap->critSection.LockCount      = -1;
            heap->critSection.RecursionCount = 0;
            heap->critSection.OwningThread   = 0;
            heap->critSection.LockSemaphore  = 0;
            heap->critSection.SpinCount      = 0;
            process_heap_critsect_debug.CriticalSection = &heap->critSection;
        }
954 955 956 957 958
        else
        {
            RtlInitializeCriticalSection( &heap->critSection );
            heap->critSection.DebugInfo->Spare[0] = (DWORD_PTR)(__FILE__ ": HEAP.critSection");
        }
959

960
        if (flags & HEAP_SHARED)
961 962 963 964 965
        {
            /* let's assume that only one thread at a time will try to do this */
            HANDLE sem = heap->critSection.LockSemaphore;
            if (!sem) NtCreateSemaphore( &sem, SEMAPHORE_ALL_ACCESS, NULL, 0, 1 );

966
            NtDuplicateObject( NtCurrentProcess(), sem, NtCurrentProcess(), &sem, 0, 0,
967 968
                               DUP_HANDLE_MAKE_GLOBAL | DUP_HANDLE_SAME_ACCESS | DUP_HANDLE_CLOSE_SOURCE );
            heap->critSection.LockSemaphore = sem;
969 970
            RtlFreeHeap( processHeap, 0, heap->critSection.DebugInfo );
            heap->critSection.DebugInfo = NULL;
971
        }
972 973 974 975
    }

    /* Create the first free block */

976
    HEAP_CreateFreeBlock( subheap, (LPBYTE)subheap->base + subheap->headerSize,
977 978
                          subheap->size - subheap->headerSize );

979
    return subheap;
980 981 982 983 984 985 986 987 988
}


/***********************************************************************
 *           HEAP_FindFreeBlock
 *
 * Find a free block at least as large as the requested size, and make sure
 * the requested size is committed.
 */
989
static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, SIZE_T size,
990 991 992
                                       SUBHEAP **ppSubHeap )
{
    SUBHEAP *subheap;
993
    struct list *ptr;
994
    SIZE_T total_size;
995
    FREE_LIST_ENTRY *pEntry = heap->freeList + get_freelist_index( size + sizeof(ARENA_INUSE) );
996 997 998

    /* Find a suitable free list, and in it find a block large enough */

999 1000
    ptr = &pEntry->arena.entry;
    while ((ptr = list_next( &heap->freeList[0].arena.entry, ptr )))
1001
    {
1002
        ARENA_FREE *pArena = LIST_ENTRY( ptr, ARENA_FREE, entry );
1003
        SIZE_T arena_size = (pArena->size & ARENA_SIZE_MASK) +
1004 1005 1006 1007
                            sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
        if (arena_size >= size)
        {
            subheap = HEAP_FindSubHeap( heap, pArena );
1008
            if (!HEAP_Commit( subheap, (ARENA_INUSE *)pArena, size )) return NULL;
1009 1010 1011 1012 1013 1014 1015 1016 1017
            *ppSubHeap = subheap;
            return pArena;
        }
    }

    /* If no block was found, attempt to grow the heap */

    if (!(heap->flags & HEAP_GROWABLE))
    {
1018
        WARN("Not enough space in heap %p for %08lx bytes\n", heap, size );
1019 1020 1021 1022 1023
        return NULL;
    }
    /* make sure that we have a big enough size *committed* to fit another
     * last free arena in !
     * So just one heap struct, one first free arena which will eventually
1024
     * get used, and a second free arena that might get assigned all remaining
1025
     * free space in HEAP_ShrinkBlock() */
1026 1027 1028
    total_size = size + ROUND_SIZE(sizeof(SUBHEAP)) + sizeof(ARENA_INUSE) + sizeof(ARENA_FREE);
    if (total_size < size) return NULL;  /* overflow */

1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
    if ((subheap = HEAP_CreateSubHeap( heap, NULL, heap->flags, total_size,
                                       max( heap->grow_size, total_size ) )))
    {
        if (heap->grow_size < 128 * 1024 * 1024) heap->grow_size *= 2;
    }
    else while (!subheap)  /* shrink the grow size again if we are running out of space */
    {
        if (heap->grow_size <= total_size || heap->grow_size <= 4 * 1024 * 1024) return NULL;
        heap->grow_size /= 2;
        subheap = HEAP_CreateSubHeap( heap, NULL, heap->flags, total_size,
                                      max( heap->grow_size, total_size ) );
    }
1041

1042
    TRACE("created new sub-heap %p of %08lx bytes for heap %p\n",
1043
          subheap, subheap->size, heap );
1044 1045

    *ppSubHeap = subheap;
1046
    return (ARENA_FREE *)((char *)subheap->base + subheap->headerSize);
1047 1048 1049 1050 1051 1052 1053 1054
}


/***********************************************************************
 *           HEAP_IsValidArenaPtr
 *
 * Check that the pointer is inside the range possible for arenas.
 */
1055
static BOOL HEAP_IsValidArenaPtr( const HEAP *heap, const ARENA_FREE *ptr )
1056
{
1057
    unsigned int i;
Eric Pouech's avatar
Eric Pouech committed
1058
    const SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
1059
    if (!subheap) return FALSE;
1060
    if ((const char *)ptr >= (const char *)subheap->base + subheap->headerSize) return TRUE;
1061 1062
    if (subheap != &heap->subheap) return FALSE;
    for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
1063
        if (ptr == &heap->freeList[i].arena) return TRUE;
1064 1065 1066 1067 1068 1069 1070 1071 1072
    return FALSE;
}


/***********************************************************************
 *           HEAP_ValidateFreeArena
 */
static BOOL HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
{
1073 1074
    DWORD flags = subheap->heap->flags;
    SIZE_T size;
1075
    ARENA_FREE *prev, *next;
1076
    char *heapEnd = (char *)subheap->base + subheap->size;
1077 1078

    /* Check for unaligned pointers */
1079
    if ((ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET)
1080
    {
1081
        ERR("Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
1082 1083 1084 1085 1086 1087
        return FALSE;
    }

    /* Check magic number */
    if (pArena->magic != ARENA_FREE_MAGIC)
    {
1088
        ERR("Heap %p: invalid free arena magic %08x for %p\n", subheap->heap, pArena->magic, pArena );
1089 1090 1091 1092 1093 1094
        return FALSE;
    }
    /* Check size flags */
    if (!(pArena->size & ARENA_FLAG_FREE) ||
        (pArena->size & ARENA_FLAG_PREV_FREE))
    {
1095
        ERR("Heap %p: bad flags %08x for free arena %p\n",
1096
            subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena );
1097
        return FALSE;
1098 1099
    }
    /* Check arena size */
1100 1101
    size = pArena->size & ARENA_SIZE_MASK;
    if ((char *)(pArena + 1) + size > heapEnd)
1102
    {
1103
        ERR("Heap %p: bad size %08lx for free arena %p\n", subheap->heap, size, pArena );
1104 1105 1106
        return FALSE;
    }
    /* Check that next pointer is valid */
1107 1108
    next = LIST_ENTRY( pArena->entry.next, ARENA_FREE, entry );
    if (!HEAP_IsValidArenaPtr( subheap->heap, next ))
1109
    {
1110
        ERR("Heap %p: bad next ptr %p for arena %p\n",
1111
            subheap->heap, next, pArena );
1112 1113 1114
        return FALSE;
    }
    /* Check that next arena is free */
1115
    if (!(next->size & ARENA_FLAG_FREE) || (next->magic != ARENA_FREE_MAGIC))
1116
    {
1117
        ERR("Heap %p: next arena %p invalid for %p\n",
1118
            subheap->heap, next, pArena );
1119 1120 1121
        return FALSE;
    }
    /* Check that prev pointer is valid */
1122 1123
    prev = LIST_ENTRY( pArena->entry.prev, ARENA_FREE, entry );
    if (!HEAP_IsValidArenaPtr( subheap->heap, prev ))
1124
    {
1125
        ERR("Heap %p: bad prev ptr %p for arena %p\n",
1126
            subheap->heap, prev, pArena );
1127 1128 1129
        return FALSE;
    }
    /* Check that prev arena is free */
1130
    if (!(prev->size & ARENA_FLAG_FREE) || (prev->magic != ARENA_FREE_MAGIC))
1131
    {
1132 1133
	/* this often means that the prev arena got overwritten
	 * by a memory write before that prev arena */
1134
        ERR("Heap %p: prev arena %p invalid for %p\n",
1135
            subheap->heap, prev, pArena );
1136 1137 1138
        return FALSE;
    }
    /* Check that next block has PREV_FREE flag */
1139
    if ((char *)(pArena + 1) + size < heapEnd)
1140
    {
1141
        if (!(*(DWORD *)((char *)(pArena + 1) + size) & ARENA_FLAG_PREV_FREE))
1142
        {
1143 1144
            ERR("Heap %p: free arena %p next block has no PREV_FREE flag\n",
                subheap->heap, pArena );
1145 1146 1147
            return FALSE;
        }
        /* Check next block back pointer */
1148
        if (*((ARENA_FREE **)((char *)(pArena + 1) + size) - 1) != pArena)
1149
        {
1150
            ERR("Heap %p: arena %p has wrong back ptr %p\n",
1151
                subheap->heap, pArena,
1152
                *((ARENA_FREE **)((char *)(pArena+1) + size) - 1));
1153 1154 1155
            return FALSE;
        }
    }
1156 1157 1158 1159 1160 1161
    if (flags & HEAP_FREE_CHECKING_ENABLED)
    {
        DWORD *ptr = (DWORD *)(pArena + 1);
        char *end = (char *)(pArena + 1) + size;

        if (end >= heapEnd) end = (char *)subheap->base + subheap->commitSize;
1162 1163
        else end -= sizeof(ARENA_FREE *);
        while (ptr < (DWORD *)end)
1164 1165 1166 1167 1168 1169 1170 1171 1172 1173
        {
            if (*ptr != ARENA_FREE_FILLER)
            {
                ERR("Heap %p: free block %p overwritten at %p by %08x\n",
                    subheap->heap, (ARENA_INUSE *)pArena + 1, ptr, *ptr );
                return FALSE;
            }
            ptr++;
        }
    }
1174 1175 1176 1177 1178 1179 1180
    return TRUE;
}


/***********************************************************************
 *           HEAP_ValidateInUseArena
 */
Eric Pouech's avatar
Eric Pouech committed
1181
static BOOL HEAP_ValidateInUseArena( const SUBHEAP *subheap, const ARENA_INUSE *pArena, BOOL quiet )
1182
{
1183 1184
    SIZE_T size;
    DWORD i, flags = subheap->heap->flags;
1185
    const char *heapEnd = (const char *)subheap->base + subheap->size;
1186 1187

    /* Check for unaligned pointers */
1188
    if ((ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET)
1189 1190 1191
    {
        if ( quiet == NOISY )
        {
1192
            ERR( "Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
1193 1194 1195 1196 1197
            if ( TRACE_ON(heap) )
                HEAP_Dump( subheap->heap );
        }
        else if ( WARN_ON(heap) )
        {
1198
            WARN( "Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
1199 1200 1201 1202 1203 1204 1205
            if ( TRACE_ON(heap) )
                HEAP_Dump( subheap->heap );
        }
        return FALSE;
    }

    /* Check magic number */
1206
    if (pArena->magic != ARENA_INUSE_MAGIC && pArena->magic != ARENA_PENDING_MAGIC)
1207 1208
    {
        if (quiet == NOISY) {
1209
            ERR("Heap %p: invalid in-use arena magic %08x for %p\n", subheap->heap, pArena->magic, pArena );
1210 1211 1212
            if (TRACE_ON(heap))
               HEAP_Dump( subheap->heap );
        }  else if (WARN_ON(heap)) {
1213
            WARN("Heap %p: invalid in-use arena magic %08x for %p\n", subheap->heap, pArena->magic, pArena );
1214 1215 1216 1217 1218 1219
            if (TRACE_ON(heap))
               HEAP_Dump( subheap->heap );
        }
        return FALSE;
    }
    /* Check size flags */
1220
    if (pArena->size & ARENA_FLAG_FREE)
1221
    {
1222
        ERR("Heap %p: bad flags %08x for in-use arena %p\n",
1223
            subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena );
1224
        return FALSE;
1225 1226
    }
    /* Check arena size */
1227 1228 1229
    size = pArena->size & ARENA_SIZE_MASK;
    if ((const char *)(pArena + 1) + size > heapEnd ||
        (const char *)(pArena + 1) + size < (const char *)(pArena + 1))
1230
    {
1231
        ERR("Heap %p: bad size %08lx for in-use arena %p\n", subheap->heap, size, pArena );
1232 1233 1234
        return FALSE;
    }
    /* Check next arena PREV_FREE flag */
1235 1236
    if (((const char *)(pArena + 1) + size < heapEnd) &&
        (*(const DWORD *)((const char *)(pArena + 1) + size) & ARENA_FLAG_PREV_FREE))
1237
    {
1238 1239
        ERR("Heap %p: in-use arena %p next block %p has PREV_FREE flag %x\n",
            subheap->heap, pArena, (const char *)(pArena + 1) + size,*(const DWORD *)((const char *)(pArena + 1) + size) );
1240 1241 1242 1243 1244
        return FALSE;
    }
    /* Check prev free arena */
    if (pArena->size & ARENA_FLAG_PREV_FREE)
    {
Eric Pouech's avatar
Eric Pouech committed
1245
        const ARENA_FREE *pPrev = *((const ARENA_FREE * const*)pArena - 1);
1246 1247 1248
        /* Check prev pointer */
        if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
        {
1249 1250
            ERR("Heap %p: bad back ptr %p for arena %p\n",
                subheap->heap, pPrev, pArena );
1251 1252 1253 1254 1255
            return FALSE;
        }
        /* Check that prev arena is free */
        if (!(pPrev->size & ARENA_FLAG_FREE) ||
            (pPrev->magic != ARENA_FREE_MAGIC))
1256
        {
1257 1258
            ERR("Heap %p: prev arena %p invalid for in-use %p\n",
                subheap->heap, pPrev, pArena );
1259 1260 1261
            return FALSE;
        }
        /* Check that prev arena is really the previous block */
Eric Pouech's avatar
Eric Pouech committed
1262
        if ((const char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (const char *)pArena)
1263
        {
1264 1265
            ERR("Heap %p: prev arena %p is not prev for in-use %p\n",
                subheap->heap, pPrev, pArena );
1266 1267 1268
            return FALSE;
        }
    }
1269 1270 1271 1272 1273 1274 1275
    /* Check unused size */
    if (pArena->unused_bytes > size)
    {
        ERR("Heap %p: invalid unused size %08x/%08lx\n", subheap->heap, pArena->unused_bytes, size );
        return FALSE;
    }
    /* Check unused bytes */
1276 1277
    if (pArena->magic == ARENA_PENDING_MAGIC)
    {
1278 1279
        const DWORD *ptr = (const DWORD *)(pArena + 1);
        const DWORD *end = (const DWORD *)((const char *)ptr + size);
1280 1281 1282 1283 1284 1285

        while (ptr < end)
        {
            if (*ptr != ARENA_FREE_FILLER)
            {
                ERR("Heap %p: free block %p overwritten at %p by %08x\n",
1286
                    subheap->heap, (const ARENA_INUSE *)pArena + 1, ptr, *ptr );
1287 1288 1289 1290 1291 1292 1293
                if (!*ptr) { HEAP_Dump( subheap->heap ); DbgBreakPoint(); }
                return FALSE;
            }
            ptr++;
        }
    }
    else if (flags & HEAP_TAIL_CHECKING_ENABLED)
1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304
    {
        const unsigned char *data = (const unsigned char *)(pArena + 1) + size - pArena->unused_bytes;

        for (i = 0; i < pArena->unused_bytes; i++)
        {
            if (data[i] == ARENA_TAIL_FILLER) continue;
            ERR("Heap %p: block %p tail overwritten at %p (byte %u/%u == 0x%02x)\n",
                subheap->heap, pArena + 1, data + i, i, pArena->unused_bytes, data[i] );
            return FALSE;
        }
    }
1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324
    return TRUE;
}


/***********************************************************************
 *           HEAP_IsRealArena  [Internal]
 * Validates a block is a valid arena.
 *
 * RETURNS
 *	TRUE: Success
 *	FALSE: Failure
 */
static BOOL HEAP_IsRealArena( HEAP *heapPtr,   /* [in] ptr to the heap */
              DWORD flags,   /* [in] Bit flags that control access during operation */
              LPCVOID block, /* [in] Optional pointer to memory block to validate */
              BOOL quiet )   /* [in] Flag - if true, HEAP_ValidateInUseArena
                              *             does not complain    */
{
    SUBHEAP *subheap;
    BOOL ret = TRUE;
1325
    const ARENA_LARGE *large_arena;
1326 1327 1328 1329 1330 1331 1332

    flags &= HEAP_NO_SERIALIZE;
    flags |= heapPtr->flags;
    /* calling HeapLock may result in infinite recursion, so do the critsect directly */
    if (!(flags & HEAP_NO_SERIALIZE))
        RtlEnterCriticalSection( &heapPtr->critSection );

1333
    if (block)  /* only check this single memory block */
1334
    {
1335
        const ARENA_INUSE *arena = (const ARENA_INUSE *)block - 1;
1336

1337
        if (!(subheap = HEAP_FindSubHeap( heapPtr, arena )) ||
1338
            ((const char *)arena < (char *)subheap->base + subheap->headerSize))
1339
        {
1340 1341 1342 1343 1344 1345 1346 1347 1348 1349
            if (!(large_arena = find_large_block( heapPtr, block )))
            {
                if (quiet == NOISY)
                    ERR("Heap %p: block %p is not inside heap\n", heapPtr, block );
                else if (WARN_ON(heap))
                    WARN("Heap %p: block %p is not inside heap\n", heapPtr, block );
                ret = FALSE;
            }
            else
                ret = validate_large_arena( heapPtr, large_arena, quiet );
1350
        } else
1351
            ret = HEAP_ValidateInUseArena( subheap, arena, quiet );
1352 1353 1354 1355 1356 1357

        if (!(flags & HEAP_NO_SERIALIZE))
            RtlLeaveCriticalSection( &heapPtr->critSection );
        return ret;
    }

1358
    LIST_FOR_EACH_ENTRY( subheap, &heapPtr->subheap_list, SUBHEAP, entry )
1359
    {
1360 1361
        char *ptr = (char *)subheap->base + subheap->headerSize;
        while (ptr < (char *)subheap->base + subheap->size)
1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379
        {
            if (*(DWORD *)ptr & ARENA_FLAG_FREE)
            {
                if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr )) {
                    ret = FALSE;
                    break;
                }
                ptr += sizeof(ARENA_FREE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
            }
            else
            {
                if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY )) {
                    ret = FALSE;
                    break;
                }
                ptr += sizeof(ARENA_INUSE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
            }
        }
1380
        if (!ret) break;
1381 1382
    }

1383 1384 1385
    LIST_FOR_EACH_ENTRY( large_arena, &heapPtr->large_list, ARENA_LARGE, entry )
        if (!(ret = validate_large_arena( heapPtr, large_arena, quiet ))) break;

1386 1387 1388 1389 1390
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
    return ret;
}


1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414
/***********************************************************************
 *           validate_block_pointer
 *
 * Minimum validation needed to catch bad parameters in heap functions.
 */
static BOOL validate_block_pointer( HEAP *heap, SUBHEAP **ret_subheap, const ARENA_INUSE *arena )
{
    SUBHEAP *subheap;
    BOOL ret = FALSE;

    if (!(*ret_subheap = subheap = HEAP_FindSubHeap( heap, arena )))
    {
        ARENA_LARGE *large_arena = find_large_block( heap, arena + 1 );

        if (!large_arena)
        {
            WARN( "Heap %p: pointer %p is not inside heap\n", heap, arena + 1 );
            return FALSE;
        }
        if ((heap->flags & HEAP_VALIDATE) && !validate_large_arena( heap, large_arena, QUIET ))
            return FALSE;
        return TRUE;
    }

1415
    if ((const char *)arena < (char *)subheap->base + subheap->headerSize)
1416 1417 1418 1419 1420
        WARN( "Heap %p: pointer %p is inside subheap %p header\n", subheap->heap, arena + 1, subheap );
    else if (subheap->heap->flags & HEAP_VALIDATE)  /* do the full validation */
        ret = HEAP_ValidateInUseArena( subheap, arena, QUIET );
    else if ((ULONG_PTR)arena % ALIGNMENT != ARENA_OFFSET)
        WARN( "Heap %p: unaligned arena pointer %p\n", subheap->heap, arena );
1421 1422
    else if (arena->magic == ARENA_PENDING_MAGIC)
        WARN( "Heap %p: block %p used after free\n", subheap->heap, arena + 1 );
1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438
    else if (arena->magic != ARENA_INUSE_MAGIC)
        WARN( "Heap %p: invalid in-use arena magic %08x for %p\n", subheap->heap, arena->magic, arena );
    else if (arena->size & ARENA_FLAG_FREE)
        ERR( "Heap %p: bad flags %08x for in-use arena %p\n",
             subheap->heap, arena->size & ~ARENA_SIZE_MASK, arena );
    else if ((const char *)(arena + 1) + (arena->size & ARENA_SIZE_MASK) > (const char *)subheap->base + subheap->size ||
             (const char *)(arena + 1) + (arena->size & ARENA_SIZE_MASK) < (const char *)(arena + 1))
        ERR( "Heap %p: bad size %08x for in-use arena %p\n",
             subheap->heap, arena->size & ARENA_SIZE_MASK, arena );
    else
        ret = TRUE;

    return ret;
}


1439 1440 1441 1442 1443 1444 1445 1446 1447
/***********************************************************************
 *           heap_set_debug_flags
 */
void heap_set_debug_flags( HANDLE handle )
{
    HEAP *heap = HEAP_GetPtr( handle );
    ULONG global_flags = RtlGetNtGlobalFlags();
    ULONG flags = 0;

1448
    if (TRACE_ON(heap)) global_flags |= FLG_HEAP_VALIDATE_ALL;
1449
    if (WARN_ON(heap)) global_flags |= FLG_HEAP_VALIDATE_PARAMETERS;
1450

1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462
    if (global_flags & FLG_HEAP_ENABLE_TAIL_CHECK) flags |= HEAP_TAIL_CHECKING_ENABLED;
    if (global_flags & FLG_HEAP_ENABLE_FREE_CHECK) flags |= HEAP_FREE_CHECKING_ENABLED;
    if (global_flags & FLG_HEAP_DISABLE_COALESCING) flags |= HEAP_DISABLE_COALESCE_ON_FREE;
    if (global_flags & FLG_HEAP_PAGE_ALLOCS) flags |= HEAP_PAGE_ALLOCS | HEAP_GROWABLE;

    if (global_flags & FLG_HEAP_VALIDATE_PARAMETERS)
        flags |= HEAP_VALIDATE | HEAP_VALIDATE_PARAMS |
                 HEAP_TAIL_CHECKING_ENABLED | HEAP_FREE_CHECKING_ENABLED;
    if (global_flags & FLG_HEAP_VALIDATE_ALL)
        flags |= HEAP_VALIDATE | HEAP_VALIDATE_ALL |
                 HEAP_TAIL_CHECKING_ENABLED | HEAP_FREE_CHECKING_ENABLED;

1463 1464
    if (RUNNING_ON_VALGRIND) flags = 0; /* no sense in validating since Valgrind catches accesses */

1465 1466
    heap->flags |= flags;
    heap->force_flags |= flags & ~(HEAP_VALIDATE | HEAP_DISABLE_COALESCE_ON_FREE);
1467 1468 1469 1470

    if (flags & (HEAP_FREE_CHECKING_ENABLED | HEAP_TAIL_CHECKING_ENABLED))  /* fix existing blocks */
    {
        SUBHEAP *subheap;
1471
        ARENA_LARGE *large;
1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485

        LIST_FOR_EACH_ENTRY( subheap, &heap->subheap_list, SUBHEAP, entry )
        {
            char *ptr = (char *)subheap->base + subheap->headerSize;
            char *end = (char *)subheap->base + subheap->commitSize;
            while (ptr < end)
            {
                ARENA_INUSE *arena = (ARENA_INUSE *)ptr;
                SIZE_T size = arena->size & ARENA_SIZE_MASK;
                if (arena->size & ARENA_FLAG_FREE)
                {
                    SIZE_T count = size;

                    ptr += sizeof(ARENA_FREE) + size;
1486 1487
                    if (ptr >= end) count = end - (char *)((ARENA_FREE *)arena + 1);
                    else count -= sizeof(ARENA_FREE *);
1488 1489 1490 1491
                    mark_block_free( (ARENA_FREE *)arena + 1, count, flags );
                }
                else
                {
1492 1493 1494 1495 1496
                    if (arena->magic == ARENA_PENDING_MAGIC)
                        mark_block_free( arena + 1, size, flags );
                    else
                        mark_block_tail( (char *)(arena + 1) + size - arena->unused_bytes,
                                         arena->unused_bytes, flags );
1497 1498 1499 1500
                    ptr += sizeof(ARENA_INUSE) + size;
                }
            }
        }
1501 1502 1503 1504

        LIST_FOR_EACH_ENTRY( large, &heap->large_list, ARENA_LARGE, entry )
            mark_block_tail( (char *)(large + 1) + large->data_size,
                             large->block_size - sizeof(*large) - large->data_size, flags );
1505
    }
1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518

    if ((heap->flags & HEAP_GROWABLE) && !heap->pending_free &&
        ((flags & HEAP_FREE_CHECKING_ENABLED) || RUNNING_ON_VALGRIND))
    {
        void *ptr = NULL;
        SIZE_T size = MAX_FREE_PENDING * sizeof(*heap->pending_free);

        if (!NtAllocateVirtualMemory( NtCurrentProcess(), &ptr, 4, &size, MEM_COMMIT, PAGE_READWRITE ))
        {
            heap->pending_free = ptr;
            heap->pending_pos = 0;
        }
    }
1519 1520 1521
}


1522 1523
/***********************************************************************
 *           RtlCreateHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537
 *
 * Create a new Heap.
 *
 * PARAMS
 *  flags      [I] HEAP_ flags from "winnt.h"
 *  addr       [I] Desired base address
 *  totalSize  [I] Total size of the heap, or 0 for a growable heap
 *  commitSize [I] Amount of heap space to commit
 *  unknown    [I] Not yet understood
 *  definition [I] Heap definition
 *
 * RETURNS
 *  Success: A HANDLE to the newly created heap.
 *  Failure: a NULL HANDLE.
1538
 */
1539
HANDLE WINAPI RtlCreateHeap( ULONG flags, PVOID addr, SIZE_T totalSize, SIZE_T commitSize,
1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553
                             PVOID unknown, PRTL_HEAP_DEFINITION definition )
{
    SUBHEAP *subheap;

    /* Allocate the heap block */

    if (!totalSize)
    {
        totalSize = HEAP_DEF_SIZE;
        flags |= HEAP_GROWABLE;
    }

    if (!(subheap = HEAP_CreateSubHeap( NULL, addr, flags, commitSize, totalSize ))) return 0;

1554 1555
    heap_set_debug_flags( subheap->heap );

1556 1557 1558 1559
    /* link it into the per-process heap list */
    if (processHeap)
    {
        HEAP *heapPtr = subheap->heap;
1560 1561 1562 1563
        RtlEnterCriticalSection( &processHeap->critSection );
        list_add_head( &processHeap->entry, &heapPtr->entry );
        RtlLeaveCriticalSection( &processHeap->critSection );
    }
1564
    else if (!addr)
1565 1566 1567
    {
        processHeap = subheap->heap;  /* assume the first heap we create is the process main heap */
        list_init( &processHeap->entry );
1568
    }
1569

1570
    return subheap->heap;
1571 1572 1573 1574 1575
}


/***********************************************************************
 *           RtlDestroyHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1576 1577 1578 1579 1580 1581 1582 1583 1584
 *
 * Destroy a Heap created with RtlCreateHeap().
 *
 * PARAMS
 *  heap [I] Heap to destroy.
 *
 * RETURNS
 *  Success: A NULL HANDLE, if heap is NULL or it was destroyed
 *  Failure: The Heap handle, if heap is the process heap.
1585 1586 1587 1588
 */
HANDLE WINAPI RtlDestroyHeap( HANDLE heap )
{
    HEAP *heapPtr = HEAP_GetPtr( heap );
1589
    SUBHEAP *subheap, *next;
1590
    ARENA_LARGE *arena, *arena_next;
1591 1592
    SIZE_T size;
    void *addr;
1593

1594
    TRACE("%p\n", heap );
1595 1596 1597
    if (!heapPtr) return heap;

    if (heap == processHeap) return heap; /* cannot delete the main process heap */
1598 1599 1600 1601 1602

    /* remove it from the per-process list */
    RtlEnterCriticalSection( &processHeap->critSection );
    list_remove( &heapPtr->entry );
    RtlLeaveCriticalSection( &processHeap->critSection );
1603

1604
    heapPtr->critSection.DebugInfo->Spare[0] = 0;
1605
    RtlDeleteCriticalSection( &heapPtr->critSection );
1606

1607 1608 1609 1610 1611 1612 1613
    LIST_FOR_EACH_ENTRY_SAFE( arena, arena_next, &heapPtr->large_list, ARENA_LARGE, entry )
    {
        list_remove( &arena->entry );
        size = 0;
        addr = arena;
        NtFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
    }
1614
    LIST_FOR_EACH_ENTRY_SAFE( subheap, next, &heapPtr->subheap_list, SUBHEAP, entry )
1615
    {
1616
        if (subheap == &heapPtr->subheap) continue;  /* do this one last */
1617
        subheap_notify_free_all(subheap);
1618 1619 1620
        list_remove( &subheap->entry );
        size = 0;
        addr = subheap->base;
1621
        NtFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
1622
    }
1623
    subheap_notify_free_all(&heapPtr->subheap);
1624 1625 1626 1627 1628 1629
    if (heapPtr->pending_free)
    {
        size = 0;
        addr = heapPtr->pending_free;
        NtFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
    }
1630 1631 1632
    size = 0;
    addr = heapPtr->subheap.base;
    NtFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
1633 1634 1635 1636 1637 1638 1639
    return 0;
}


/***********************************************************************
 *           RtlAllocateHeap   (NTDLL.@)
 *
Jon Griffiths's avatar
Jon Griffiths committed
1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652
 * Allocate a memory block from a Heap.
 *
 * PARAMS
 *  heap  [I] Heap to allocate block from
 *  flags [I] HEAP_ flags from "winnt.h"
 *  size  [I] Size of the memory block to allocate
 *
 * RETURNS
 *  Success: A pointer to the newly allocated block
 *  Failure: NULL.
 *
 * NOTES
 *  This call does not SetLastError().
1653
 */
1654
PVOID WINAPI RtlAllocateHeap( HANDLE heap, ULONG flags, SIZE_T size )
1655 1656 1657 1658 1659
{
    ARENA_FREE *pArena;
    ARENA_INUSE *pInUse;
    SUBHEAP *subheap;
    HEAP *heapPtr = HEAP_GetPtr( heap );
1660
    SIZE_T rounded_size;
1661 1662 1663 1664 1665 1666

    /* Validate the parameters */

    if (!heapPtr) return NULL;
    flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
    flags |= heapPtr->flags;
1667
    rounded_size = ROUND_SIZE(size) + HEAP_TAIL_EXTRA_SIZE( flags );
1668 1669 1670 1671 1672
    if (rounded_size < size)  /* overflow */
    {
        if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
        return NULL;
    }
1673
    if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
1674 1675

    if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1676 1677 1678 1679 1680 1681 1682 1683 1684 1685

    if (rounded_size >= HEAP_MIN_LARGE_BLOCK_SIZE && (flags & HEAP_GROWABLE))
    {
        void *ret = allocate_large_block( heap, flags, size );
        if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
        if (!ret && (flags & HEAP_GENERATE_EXCEPTIONS)) RtlRaiseStatus( STATUS_NO_MEMORY );
        TRACE("(%p,%08x,%08lx): returning %p\n", heap, flags, size, ret );
        return ret;
    }

1686 1687
    /* Locate a suitable free block */

1688
    if (!(pArena = HEAP_FindFreeBlock( heapPtr, rounded_size, &subheap )))
1689
    {
1690
        TRACE("(%p,%08x,%08lx): returning NULL\n",
1691 1692 1693 1694 1695 1696 1697 1698
                  heap, flags, size  );
        if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
        if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
        return NULL;
    }

    /* Remove the arena from the free list */

1699
    list_remove( &pArena->entry );
1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711

    /* Build the in-use arena */

    pInUse = (ARENA_INUSE *)pArena;

    /* in-use arena is smaller than free arena,
     * so we have to add the difference to the size */
    pInUse->size  = (pInUse->size & ~ARENA_FLAG_FREE) + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
    pInUse->magic = ARENA_INUSE_MAGIC;

    /* Shrink the block */

1712 1713
    HEAP_ShrinkBlock( subheap, pInUse, rounded_size );
    pInUse->unused_bytes = (pInUse->size & ARENA_SIZE_MASK) - size;
1714

1715
    notify_alloc( pInUse + 1, size, flags & HEAP_ZERO_MEMORY );
1716
    initialize_block( pInUse + 1, size, pInUse->unused_bytes, flags );
1717 1718 1719

    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );

1720
    TRACE("(%p,%08x,%08lx): returning %p\n", heap, flags, size, pInUse + 1 );
1721
    return pInUse + 1;
1722 1723 1724 1725 1726
}


/***********************************************************************
 *           RtlFreeHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737
 *
 * Free a memory block allocated with RtlAllocateHeap().
 *
 * PARAMS
 *  heap  [I] Heap that block was allocated from
 *  flags [I] HEAP_ flags from "winnt.h"
 *  ptr   [I] Block to free
 *
 * RETURNS
 *  Success: TRUE, if ptr is NULL or was freed successfully.
 *  Failure: FALSE.
1738 1739 1740 1741 1742
 */
BOOLEAN WINAPI RtlFreeHeap( HANDLE heap, ULONG flags, PVOID ptr )
{
    ARENA_INUSE *pInUse;
    SUBHEAP *subheap;
1743
    HEAP *heapPtr;
1744 1745 1746 1747

    /* Validate the parameters */

    if (!ptr) return TRUE;  /* freeing a NULL ptr isn't an error in Win2k */
1748 1749

    heapPtr = HEAP_GetPtr( heap );
1750 1751
    if (!heapPtr)
    {
1752
        RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1753 1754 1755 1756 1757 1758 1759
        return FALSE;
    }

    flags &= HEAP_NO_SERIALIZE;
    flags |= heapPtr->flags;
    if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );

1760 1761
    /* Inform valgrind we are trying to free memory, so it can throw up an error message */
    notify_free( ptr );
1762

1763
    /* Some sanity checks */
1764
    pInUse  = (ARENA_INUSE *)ptr - 1;
1765
    if (!validate_block_pointer( heapPtr, &subheap, pInUse )) goto error;
1766

1767
    if (!subheap)
1768
        free_large_block( heapPtr, flags, ptr );
1769 1770
    else
        HEAP_MakeInUseBlockFree( subheap, pInUse );
1771 1772

    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1773
    TRACE("(%p,%08x,%p): returning TRUE\n", heap, flags, ptr );
1774
    return TRUE;
1775 1776 1777 1778

error:
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
    RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1779
    TRACE("(%p,%08x,%p): returning FALSE\n", heap, flags, ptr );
1780
    return FALSE;
1781 1782 1783 1784 1785
}


/***********************************************************************
 *           RtlReAllocateHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797
 *
 * Change the size of a memory block allocated with RtlAllocateHeap().
 *
 * PARAMS
 *  heap  [I] Heap that block was allocated from
 *  flags [I] HEAP_ flags from "winnt.h"
 *  ptr   [I] Block to resize
 *  size  [I] Size of the memory block to allocate
 *
 * RETURNS
 *  Success: A pointer to the resized block (which may be different).
 *  Failure: NULL.
1798
 */
1799
PVOID WINAPI RtlReAllocateHeap( HANDLE heap, ULONG flags, PVOID ptr, SIZE_T size )
1800 1801 1802 1803
{
    ARENA_INUSE *pArena;
    HEAP *heapPtr;
    SUBHEAP *subheap;
1804
    SIZE_T oldBlockSize, oldActualSize, rounded_size;
1805
    void *ret;
1806

1807
    if (!ptr) return NULL;
1808 1809
    if (!(heapPtr = HEAP_GetPtr( heap )))
    {
1810
        RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1811
        return NULL;
1812 1813 1814 1815 1816 1817 1818
    }

    /* Validate the parameters */

    flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
             HEAP_REALLOC_IN_PLACE_ONLY;
    flags |= heapPtr->flags;
1819 1820
    if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );

1821
    rounded_size = ROUND_SIZE(size) + HEAP_TAIL_EXTRA_SIZE(flags);
1822
    if (rounded_size < size) goto oom;  /* overflow */
1823
    if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
1824

1825
    pArena = (ARENA_INUSE *)ptr - 1;
1826 1827
    if (!validate_block_pointer( heapPtr, &subheap, pArena )) goto error;
    if (!subheap)
1828 1829 1830 1831
    {
        if (!(ret = realloc_large_block( heapPtr, flags, ptr, size ))) goto oom;
        goto done;
    }
1832 1833 1834

    /* Check if we need to grow the block */

1835 1836 1837
    oldBlockSize = (pArena->size & ARENA_SIZE_MASK);
    oldActualSize = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
    if (rounded_size > oldBlockSize)
1838
    {
1839
        char *pNext = (char *)(pArena + 1) + oldBlockSize;
1840 1841 1842

        if (rounded_size >= HEAP_MIN_LARGE_BLOCK_SIZE && (flags & HEAP_GROWABLE))
        {
1843
            if (flags & HEAP_REALLOC_IN_PLACE_ONLY) goto oom;
1844 1845
            if (!(ret = allocate_large_block( heapPtr, flags, size ))) goto oom;
            memcpy( ret, pArena + 1, oldActualSize );
1846 1847
            notify_free( pArena + 1 );
            HEAP_MakeInUseBlockFree( subheap, pArena );
1848 1849
            goto done;
        }
1850
        if ((pNext < (char *)subheap->base + subheap->size) &&
1851
            (*(DWORD *)pNext & ARENA_FLAG_FREE) &&
1852
            (oldBlockSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= rounded_size))
1853 1854 1855
        {
            /* The next block is free and large enough */
            ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1856
            list_remove( &pFree->entry );
1857
            pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1858
            if (!HEAP_Commit( subheap, pArena, rounded_size )) goto oom;
1859
            notify_realloc( pArena + 1, oldActualSize, size );
1860
            HEAP_ShrinkBlock( subheap, pArena, rounded_size );
1861 1862 1863 1864 1865 1866 1867 1868
        }
        else  /* Do it the hard way */
        {
            ARENA_FREE *pNew;
            ARENA_INUSE *pInUse;
            SUBHEAP *newsubheap;

            if ((flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1869
                !(pNew = HEAP_FindFreeBlock( heapPtr, rounded_size, &newsubheap )))
1870
                goto oom;
1871 1872 1873

            /* Build the in-use arena */

1874
            list_remove( &pNew->entry );
1875 1876 1877 1878
            pInUse = (ARENA_INUSE *)pNew;
            pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
                           + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
            pInUse->magic = ARENA_INUSE_MAGIC;
1879
            HEAP_ShrinkBlock( newsubheap, pInUse, rounded_size );
1880 1881

            mark_block_initialized( pInUse + 1, oldActualSize );
1882
            notify_alloc( pInUse + 1, size, FALSE );
1883
            memcpy( pInUse + 1, pArena + 1, oldActualSize );
1884 1885 1886

            /* Free the previous block */

1887
            notify_free( pArena + 1 );
1888 1889 1890 1891 1892
            HEAP_MakeInUseBlockFree( subheap, pArena );
            subheap = newsubheap;
            pArena  = pInUse;
        }
    }
1893 1894 1895
    else
    {
        HEAP_ShrinkBlock( subheap, pArena, rounded_size );
1896
        notify_realloc( pArena + 1, oldActualSize, size );
1897
    }
1898 1899

    pArena->unused_bytes = (pArena->size & ARENA_SIZE_MASK) - size;
1900 1901 1902

    /* Clear the extra bytes if needed */

1903
    if (size > oldActualSize)
1904 1905 1906 1907
        initialize_block( (char *)(pArena + 1) + oldActualSize, size - oldActualSize,
                          pArena->unused_bytes, flags );
    else
        mark_block_tail( (char *)(pArena + 1) + size, pArena->unused_bytes, flags );
1908 1909 1910

    /* Return the new arena */

1911 1912
    ret = pArena + 1;
done:
1913
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1914 1915
    TRACE("(%p,%08x,%p,%08lx): returning %p\n", heap, flags, ptr, size, ret );
    return ret;
1916

1917 1918 1919 1920 1921 1922
oom:
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
    if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
    RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY );
    TRACE("(%p,%08x,%p,%08lx): returning NULL\n", heap, flags, ptr, size );
    return NULL;
1923 1924 1925 1926

error:
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
    RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1927
    TRACE("(%p,%08x,%p,%08lx): returning NULL\n", heap, flags, ptr, size );
1928
    return NULL;
1929 1930 1931 1932 1933
}


/***********************************************************************
 *           RtlCompactHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945
 *
 * Compact the free space in a Heap.
 *
 * PARAMS
 *  heap  [I] Heap that block was allocated from
 *  flags [I] HEAP_ flags from "winnt.h"
 *
 * RETURNS
 *  The number of bytes compacted.
 *
 * NOTES
 *  This function is a harmless stub.
1946 1947 1948
 */
ULONG WINAPI RtlCompactHeap( HANDLE heap, ULONG flags )
{
1949 1950
    static BOOL reported;
    if (!reported++) FIXME( "(%p, 0x%x) stub\n", heap, flags );
1951 1952 1953 1954 1955 1956
    return 0;
}


/***********************************************************************
 *           RtlLockHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1957 1958 1959 1960 1961 1962 1963 1964 1965
 *
 * Lock a Heap.
 *
 * PARAMS
 *  heap  [I] Heap to lock
 *
 * RETURNS
 *  Success: TRUE. The Heap is locked.
 *  Failure: FALSE, if heap is invalid.
1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977
 */
BOOLEAN WINAPI RtlLockHeap( HANDLE heap )
{
    HEAP *heapPtr = HEAP_GetPtr( heap );
    if (!heapPtr) return FALSE;
    RtlEnterCriticalSection( &heapPtr->critSection );
    return TRUE;
}


/***********************************************************************
 *           RtlUnlockHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1978 1979 1980 1981 1982 1983 1984 1985 1986
 *
 * Unlock a Heap.
 *
 * PARAMS
 *  heap  [I] Heap to unlock
 *
 * RETURNS
 *  Success: TRUE. The Heap is unlocked.
 *  Failure: FALSE, if heap is invalid.
1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998
 */
BOOLEAN WINAPI RtlUnlockHeap( HANDLE heap )
{
    HEAP *heapPtr = HEAP_GetPtr( heap );
    if (!heapPtr) return FALSE;
    RtlLeaveCriticalSection( &heapPtr->critSection );
    return TRUE;
}


/***********************************************************************
 *           RtlSizeHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012
 *
 * Get the actual size of a memory block allocated from a Heap.
 *
 * PARAMS
 *  heap  [I] Heap that block was allocated from
 *  flags [I] HEAP_ flags from "winnt.h"
 *  ptr   [I] Block to get the size of
 *
 * RETURNS
 *  Success: The size of the block.
 *  Failure: -1, heap or ptr are invalid.
 *
 * NOTES
 *  The size may be bigger than what was passed to RtlAllocateHeap().
2013
 */
2014
SIZE_T WINAPI RtlSizeHeap( HANDLE heap, ULONG flags, const void *ptr )
2015
{
2016
    SIZE_T ret;
2017 2018
    const ARENA_INUSE *pArena;
    SUBHEAP *subheap;
2019 2020 2021 2022
    HEAP *heapPtr = HEAP_GetPtr( heap );

    if (!heapPtr)
    {
2023
        RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
2024
        return ~0UL;
2025 2026 2027 2028
    }
    flags &= HEAP_NO_SERIALIZE;
    flags |= heapPtr->flags;
    if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
2029 2030 2031

    pArena = (const ARENA_INUSE *)ptr - 1;
    if (!validate_block_pointer( heapPtr, &subheap, pArena ))
2032
    {
2033
        RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
2034
        ret = ~0UL;
2035
    }
2036 2037 2038 2039 2040
    else if (!subheap)
    {
        const ARENA_LARGE *large_arena = (const ARENA_LARGE *)ptr - 1;
        ret = large_arena->data_size;
    }
2041 2042
    else
    {
2043
        ret = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
2044 2045 2046
    }
    if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );

2047
    TRACE("(%p,%08x,%p): returning %08lx\n", heap, flags, ptr, ret );
2048 2049 2050 2051 2052 2053
    return ret;
}


/***********************************************************************
 *           RtlValidateHeap   (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
2054
 *
2055
 * Determine if a block is a valid allocation from a heap.
Jon Griffiths's avatar
Jon Griffiths committed
2056 2057 2058 2059 2060 2061 2062 2063 2064
 *
 * PARAMS
 *  heap  [I] Heap that block was allocated from
 *  flags [I] HEAP_ flags from "winnt.h"
 *  ptr   [I] Block to check
 *
 * RETURNS
 *  Success: TRUE. The block was allocated from heap.
 *  Failure: FALSE, if heap is invalid or ptr was not allocated from it.
2065
 */
Jon Griffiths's avatar
Jon Griffiths committed
2066
BOOLEAN WINAPI RtlValidateHeap( HANDLE heap, ULONG flags, LPCVOID ptr )
2067 2068 2069
{
    HEAP *heapPtr = HEAP_GetPtr( heap );
    if (!heapPtr) return FALSE;
Jon Griffiths's avatar
Jon Griffiths committed
2070
    return HEAP_IsRealArena( heapPtr, flags, ptr, QUIET );
2071 2072 2073 2074 2075 2076
}


/***********************************************************************
 *           RtlWalkHeap    (NTDLL.@)
 *
Jon Griffiths's avatar
Jon Griffiths committed
2077 2078 2079
 * FIXME
 *  The PROCESS_HEAP_ENTRY flag values seem different between this
 *  function and HeapWalk(). To be checked.
2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091
 */
NTSTATUS WINAPI RtlWalkHeap( HANDLE heap, PVOID entry_ptr )
{
    LPPROCESS_HEAP_ENTRY entry = entry_ptr; /* FIXME */
    HEAP *heapPtr = HEAP_GetPtr(heap);
    SUBHEAP *sub, *currentheap = NULL;
    NTSTATUS ret;
    char *ptr;
    int region_index = 0;

    if (!heapPtr || !entry) return STATUS_INVALID_PARAMETER;

2092
    if (!(heapPtr->flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
2093

2094 2095
    /* FIXME: enumerate large blocks too */

2096 2097 2098 2099
    /* set ptr to the next arena to be examined */

    if (!entry->lpData) /* first call (init) ? */
    {
Patrik Stridvall's avatar
Patrik Stridvall committed
2100
        TRACE("begin walking of heap %p.\n", heap);
2101
        currentheap = &heapPtr->subheap;
2102
        ptr = (char*)currentheap->base + currentheap->headerSize;
2103 2104 2105 2106
    }
    else
    {
        ptr = entry->lpData;
2107
        LIST_FOR_EACH_ENTRY( sub, &heapPtr->subheap_list, SUBHEAP, entry )
2108
        {
2109 2110
            if ((ptr >= (char *)sub->base) &&
                (ptr < (char *)sub->base + sub->size))
2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123
            {
                currentheap = sub;
                break;
            }
            region_index++;
        }
        if (currentheap == NULL)
        {
            ERR("no matching subheap found, shouldn't happen !\n");
            ret = STATUS_NO_MORE_ENTRIES;
            goto HW_end;
        }

2124 2125
        if (((ARENA_INUSE *)ptr - 1)->magic == ARENA_INUSE_MAGIC ||
            ((ARENA_INUSE *)ptr - 1)->magic == ARENA_PENDING_MAGIC)
2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137
        {
            ARENA_INUSE *pArena = (ARENA_INUSE *)ptr - 1;
            ptr += pArena->size & ARENA_SIZE_MASK;
        }
        else if (((ARENA_FREE *)ptr - 1)->magic == ARENA_FREE_MAGIC)
        {
            ARENA_FREE *pArena = (ARENA_FREE *)ptr - 1;
            ptr += pArena->size & ARENA_SIZE_MASK;
        }
        else
            ptr += entry->cbData; /* point to next arena */

2138
        if (ptr > (char *)currentheap->base + currentheap->size - 1)
2139
        {   /* proceed with next subheap */
2140 2141
            struct list *next = list_next( &heapPtr->subheap_list, &currentheap->entry );
            if (!next)
2142 2143 2144 2145 2146
            {  /* successfully finished */
                TRACE("end reached.\n");
                ret = STATUS_NO_MORE_ENTRIES;
                goto HW_end;
            }
2147
            currentheap = LIST_ENTRY( next, SUBHEAP, entry );
2148
            ptr = (char *)currentheap->base + currentheap->headerSize;
2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172
        }
    }

    entry->wFlags = 0;
    if (*(DWORD *)ptr & ARENA_FLAG_FREE)
    {
        ARENA_FREE *pArena = (ARENA_FREE *)ptr;

        /*TRACE("free, magic: %04x\n", pArena->magic);*/

        entry->lpData = pArena + 1;
        entry->cbData = pArena->size & ARENA_SIZE_MASK;
        entry->cbOverhead = sizeof(ARENA_FREE);
        entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
    }
    else
    {
        ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;

        /*TRACE("busy, magic: %04x\n", pArena->magic);*/

        entry->lpData = pArena + 1;
        entry->cbData = pArena->size & ARENA_SIZE_MASK;
        entry->cbOverhead = sizeof(ARENA_INUSE);
2173 2174
        entry->wFlags = (pArena->magic == ARENA_PENDING_MAGIC) ?
                        PROCESS_HEAP_UNCOMMITTED_RANGE : PROCESS_HEAP_ENTRY_BUSY;
2175 2176 2177 2178 2179 2180 2181
        /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
        and PROCESS_HEAP_ENTRY_DDESHARE yet */
    }

    entry->iRegionIndex = region_index;

    /* first element of heap ? */
2182
    if (ptr == (char *)currentheap->base + currentheap->headerSize)
2183 2184 2185 2186 2187 2188
    {
        entry->wFlags |= PROCESS_HEAP_REGION;
        entry->u.Region.dwCommittedSize = currentheap->commitSize;
        entry->u.Region.dwUnCommittedSize =
                currentheap->size - currentheap->commitSize;
        entry->u.Region.lpFirstBlock = /* first valid block */
2189
                (char *)currentheap->base + currentheap->headerSize;
2190
        entry->u.Region.lpLastBlock  = /* first invalid block */
2191
                (char *)currentheap->base + currentheap->size;
2192 2193
    }
    ret = STATUS_SUCCESS;
Uwe Bonnes's avatar
Uwe Bonnes committed
2194
    if (TRACE_ON(heap)) HEAP_DumpEntry(entry);
2195 2196

HW_end:
2197
    if (!(heapPtr->flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
2198 2199 2200 2201 2202 2203
    return ret;
}


/***********************************************************************
 *           RtlGetProcessHeaps    (NTDLL.@)
Jon Griffiths's avatar
Jon Griffiths committed
2204 2205 2206 2207 2208 2209 2210 2211 2212 2213
 *
 * Get the Heaps belonging to the current process.
 *
 * PARAMS
 *  count [I] size of heaps
 *  heaps [O] Destination array for heap HANDLE's
 *
 * RETURNS
 *  Success: The number of Heaps allocated by the process.
 *  Failure: 0.
2214 2215 2216
 */
ULONG WINAPI RtlGetProcessHeaps( ULONG count, HANDLE *heaps )
{
2217 2218
    ULONG total = 1;  /* main heap */
    struct list *ptr;
2219

2220 2221
    RtlEnterCriticalSection( &processHeap->critSection );
    LIST_FOR_EACH( ptr, &processHeap->entry ) total++;
2222 2223
    if (total <= count)
    {
2224 2225 2226
        *heaps++ = processHeap;
        LIST_FOR_EACH( ptr, &processHeap->entry )
            *heaps++ = LIST_ENTRY( ptr, HEAP, entry );
2227
    }
2228
    RtlLeaveCriticalSection( &processHeap->critSection );
2229 2230
    return total;
}
2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253

/***********************************************************************
 *           RtlQueryHeapInformation    (NTDLL.@)
 */
NTSTATUS WINAPI RtlQueryHeapInformation( HANDLE heap, HEAP_INFORMATION_CLASS info_class,
                                         PVOID info, SIZE_T size_in, PSIZE_T size_out)
{
    switch (info_class)
    {
    case HeapCompatibilityInformation:
        if (size_out) *size_out = sizeof(ULONG);

        if (size_in < sizeof(ULONG))
            return STATUS_BUFFER_TOO_SMALL;

        *(ULONG *)info = 0; /* standard heap */
        return STATUS_SUCCESS;

    default:
        FIXME("Unknown heap information class %u\n", info_class);
        return STATUS_INVALID_INFO_CLASS;
    }
}