Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100644 121 lines (99 sloc) 3.147 kB
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
1 /* -----------------------------------------------------------------------------
2 (c) The University of Glasgow 2001
3
4 Arena allocation. Arenas provide fast memory allocation at the
5 expense of fine-grained recycling of storage: memory may be
6 only be returned to the system by freeing the entire arena, it
7 isn't possible to return individual objects within an arena.
8
9 Do not assume that sequentially allocated objects will be adjacent
10 in memory.
11
12 Quirks: this allocator makes use of the RTS block allocator. If
13 the current block doesn't have enough room for the requested
14 object, then a new block is allocated. This means that allocating
15 large objects will tend to result in wasted space at the end of
16 each block. In the worst case, half of the allocated space is
17 wasted. This allocator is therefore best suited to situations in
18 which most allocations are small.
19 -------------------------------------------------------------------------- */
20
a2a67cd @simonmar RTS tidyup sweep, first phase
simonmar authored
21 #include "PosixSource.h"
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
22 #include "Rts.h"
a2a67cd @simonmar RTS tidyup sweep, first phase
simonmar authored
23
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
24 #include "RtsUtils.h"
25 #include "Arena.h"
26
27 // Each arena struct is allocated using malloc().
28 struct _Arena {
29 bdescr *current;
30 StgWord *free; // ptr to next free byte in current block
31 StgWord *lim; // limit (== last free byte + 1)
32 };
33
34 // We like to keep track of how many blocks we've allocated for
35 // Storage.c:memInventory().
36 static long arena_blocks = 0;
37
38 // Begin a new arena
39 Arena *
40 newArena( void )
41 {
42 Arena *arena;
43
44 arena = stgMallocBytes(sizeof(Arena), "newArena");
cbeb99e Basic heap profile support without -prof
Simon Marlow authored
45 arena->current = allocBlock_lock();
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
46 arena->current->link = NULL;
47 arena->free = arena->current->start;
48 arena->lim = arena->current->start + BLOCK_SIZE_W;
49 arena_blocks++;
50
51 return arena;
52 }
53
9508bce [project @ 2003-02-18 05:40:20 by sof]
sof authored
54 // The minimum alignment of an allocated block.
55 #define MIN_ALIGN 8
56
57 /* 'n' is assumed to be a power of 2 */
58 #define ROUNDUP(x,n) (((x)+((n)-1))&(~((n)-1)))
59 #define B_TO_W(x) ((x) / sizeof(W_))
60
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
61 // Allocate some memory in an arena
62 void *
63 arenaAlloc( Arena *arena, size_t size )
64 {
65 void *p;
66 nat size_w;
67 nat req_blocks;
68 bdescr *bd;
69
9508bce [project @ 2003-02-18 05:40:20 by sof]
sof authored
70 // round up to nearest alignment chunk.
71 size = ROUNDUP(size,MIN_ALIGN);
cecac85 [project @ 2002-07-26 09:35:46 by simonmar]
simonmar authored
72
9508bce [project @ 2003-02-18 05:40:20 by sof]
sof authored
73 // size of allocated block in words.
74 size_w = B_TO_W(size);
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
75
76 if ( arena->free + size_w < arena->lim ) {
77 // enough room in the current block...
78 p = arena->free;
79 arena->free += size_w;
80 return p;
81 } else {
82 // allocate a fresh block...
9508bce [project @ 2003-02-18 05:40:20 by sof]
sof authored
83 req_blocks = (lnat)BLOCK_ROUND_UP(size) / BLOCK_SIZE;
cbeb99e Basic heap profile support without -prof
Simon Marlow authored
84 bd = allocGroup_lock(req_blocks);
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
85 arena_blocks += req_blocks;
86
87 bd->gen_no = 0;
214b366 @simonmar GC refactoring, remove "steps"
simonmar authored
88 bd->gen = NULL;
bef3da1 @simonmar A small GC optimisation
simonmar authored
89 bd->dest_no = 0;
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
90 bd->flags = 0;
91 bd->free = bd->start;
92 bd->link = arena->current;
93 arena->current = bd;
94 arena->free = bd->free + size_w;
95 arena->lim = bd->free + bd->blocks * BLOCK_SIZE_W;
96 return bd->start;
97 }
98 }
99
100 // Free an entire arena
101 void
102 arenaFree( Arena *arena )
103 {
104 bdescr *bd, *next;
105
106 for (bd = arena->current; bd != NULL; bd = next) {
107 next = bd->link;
108 arena_blocks -= bd->blocks;
109 ASSERT(arena_blocks >= 0);
cbeb99e Basic heap profile support without -prof
Simon Marlow authored
110 freeGroup_lock(bd);
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
111 }
2548429 [project @ 2003-03-25 17:58:47 by sof]
sof authored
112 stgFree(arena);
bda9431 [project @ 2001-10-18 14:41:01 by simonmar]
simonmar authored
113 }
114
115 unsigned long
116 arenaBlocks( void )
117 {
118 return arena_blocks;
119 }
120
Something went wrong with that request. Please try again.