1 /* $Id: mal.c,v 1.5 1994/06/24 11:55:29 ceriel Exp $ */
3 * (c) copyright 1987 by the Vrije Universiteit, Amsterdam, The Netherlands.
4 * See the copyright notice in the ACK home directory, in the file "Copyright".
14 /* Malloc space is traversed by N doubly-linked lists of chunks, each
15 containing a couple of house-keeping data addressed as a
16 'mallink' and a piece of useful space, called the block.
17 The N lists are accessed through their starting pointers in
18 free_list[]. Free_list[n] points to a list of chunks between
19 2**(n+LOG_MIN_SIZE) and 2**(n+LOG_MIN_SIZE+1)-1, which means
20 that the smallest chunk is 2**LOG_MIN_SIZE (== MIN_SIZE).
25 #define SBRK sys_break
28 #define ILL_BREAK (void *)(-1) /* funny failure value */
30 extern void *SBRK(int incr);
33 private do_free(mallink *ml), sell_out(void);
34 privatedata mallink *store[MAX_STORE];
38 malloc(register size_t n)
39 {check_mallinks("malloc entry");{
41 register int min_class;
46 if (n < MIN_SIZE) n = align(MIN_SIZE); else n = align(n);
48 if (n <= MAX_STORE*MIN_SIZE) {
49 /* look in the store first */
50 register mallink **stp = &store[(n >> LOG_MIN_SIZE) - 1];
53 *stp = log_next_of(ml);
55 check_mallinks("malloc fast exit");
56 assert(! in_store(ml));
57 return block_of_mallink(ml);
62 check_work_empty("malloc, entry");
64 /* Acquire a chunk of at least size n if at all possible;
68 /* Inline substitution of "smallest".
70 register size_t n1 = n;
72 assert(n1 < (1L << LOG_MAX_SIZE));
78 } while (n1 >= MIN_SIZE);
81 if (min_class >= MAX_FLIST)
82 return NULL; /* we don't deal in blocks that big */
83 ml = first_present(min_class);
87 #define GRABSIZE 4096 /* Power of 2 */
89 ((MIN_SIZE<<min_class)+ mallink_size() + GRABSIZE - 1) &
93 /* first align SBRK() */
96 SBRK((int) (align((size_type) p) - (size_type) p));
99 /* SBRK takes an int; sorry ... */
105 if (p == ILL_BREAK) {
106 req = n + mallink_size();
107 if ((int) req >= 0) p = SBRK((int)req);
109 if (p == ILL_BREAK) {
110 /* Now this is bad. The system will not give us
111 more memory. We can only liquidate our store
116 ml = first_present(min_class);
117 if (ml == MAL_NULL) {
119 /* In this emergency we try to locate a suitable
120 chunk in the free_list just below the safe
121 one; some of these chunks may fit the job.
123 ml = search_free_list(min_class - 1, n);
124 if (!ml) /* really out of space */
126 started_working_on(ml);
127 unlink_free_chunk(ml);
128 check_mallinks("suitable_chunk, forced");
131 else started_working_on(ml);
135 assert((size_type)p == align((size_type)p));
136 ml = create_chunk(p, req);
138 check_mallinks("suitable_chunk, extended");
140 else started_working_on(ml);
142 /* we have a chunk */
145 check_mallinks("suitable_chunk, removed");
147 if (n + MIN_SIZE <= size_of(ml)) {
150 stopped_working_on(ml);
151 check_mallinks("malloc exit");
152 check_work_empty("malloc exit");
154 assert(! in_store(ml));
156 return block_of_mallink(ml);
161 {check_mallinks("free entry");{
162 register mallink *ml;
165 check_mallinks("free(0) very fast exit");
169 ml = mallink_of_block(addr);
172 if (free_of(ml) || in_store(ml))
173 return; /* user frees free block */
174 if (size_of(ml) <= MAX_STORE*MIN_SIZE) {
175 /* return to store */
176 mallink **stp = &store[(size_of(ml) >> LOG_MIN_SIZE) - 1];
178 set_log_next(ml, *stp);
182 check_mallinks("free fast exit");
186 check_mallinks("free exit");
191 do_free(register mallink *ml)
196 if (free_of(ml)) return;
198 started_working_on(ml);
201 if (! last_mallink(ml)) {
202 register mallink *next = phys_next_of(ml);
204 if (free_of(next)) coalesce_forw(ml, next);
207 if (! first_mallink(ml)) {
208 register mallink *prev = phys_prev_of(ml);
211 coalesce_backw(ml, prev);
216 stopped_working_on(ml);
217 check_work_empty("free");
219 /* Compile-time checks on param.h */
221 case MIN_SIZE < OFF_SET * sizeof(mallink): break;
223 /* If this statement does not compile due to duplicate case
224 entry, the minimum size block cannot hold the links for
225 the free blocks. Either raise LOG_MIN_SIZE or switch
230 case sizeof(void *) != sizeof(size_type): break;
232 /* If this statement does not compile due to duplicate
233 case entry, size_type is not defined correctly.
234 Redefine and compile again.
240 realloc(void *addr, register size_t n)
241 {check_mallinks("realloc entry");{
242 register mallink *ml, *ph_next;
243 register size_type size;
246 /* Behave like most Unix realloc's when handed a
255 ml = mallink_of_block(addr);
256 if (n < MIN_SIZE) n = align(MIN_SIZE); else n = align(n);
259 register mallink *stp = store[(size_of(ml) >> LOG_MIN_SIZE) - 1];
260 mallink *stp1 = NULL;
263 stp = log_next_of(stp);
265 stp = log_next_of(stp);
266 if (! stp1) store[(size_of(ml) >> LOG_MIN_SIZE) - 1] = stp;
267 else set_log_next(stp1, stp);
273 unlink_free_chunk(ml);
274 set_free(ml, 0); /* user reallocs free block */
276 started_working_on(ml);
278 if ( /* we can simplify the problem by adding the next chunk: */
281 (ph_next = phys_next_of(ml), free_of(ph_next)) &&
282 n <= size + mallink_size() + size_of(ph_next)
284 /* add in the physically next chunk */
285 unlink_free_chunk(ph_next);
286 combine_chunks(ml, ph_next);
288 check_mallinks("realloc, combining");
290 if (n > size) { /* this didn't help */
292 register char *l1, *l2 = addr;
294 stopped_working_on(ml);
295 if (!(new = l1 = malloc(n))) return NULL; /* no way */
296 while (size--) *l1++ = *l2++;
298 check_work_empty("mv_realloc");
300 assert(! in_store(mallink_of_block(new)));
304 /* it helped, but maybe too well */
306 if (n + MIN_SIZE <= size_of(ml)) {
309 stopped_working_on(ml);
310 check_mallinks("realloc exit");
311 check_work_empty("realloc");
313 assert(! in_store(ml));
319 calloc(size_t nmemb, size_t size)
320 {check_mallinks("calloc entry");{
324 if (size == 0) return NULL;
325 if (nmemb == 0) return NULL;
327 /* Check for overflow on the multiplication. The peephole-optimizer
328 * will eliminate all but one of the possibilities.
330 if (sizeof(size_t) == sizeof(int)) {
331 if (UINT_MAX / size < nmemb) return NULL;
332 } else if (sizeof(size_t) == sizeof(long)) {
333 if (ULONG_MAX / size < nmemb) return NULL;
334 } else return NULL; /* can't happen, can it ? */
337 if (n < MIN_SIZE) n = align(MIN_SIZE); else n = align(n);
338 if (n >= (1L << LOG_MAX_SIZE)) return NULL;
339 l1 = (long *) malloc(n);
340 l2 = l1 + (n / sizeof(long)); /* n is at least long aligned */
341 while ( l2 != l1 ) *--l2 = 0;
342 check_mallinks("calloc exit");
343 check_work_empty("calloc exit");
346 /* Auxiliary routines */
351 /* Frees all block in store.
353 register mallink **stp;
355 for (stp = &store[0]; stp < &store[MAX_STORE]; stp++) {
356 register mallink *ml = *stp;
359 *stp = log_next_of(ml);
371 m_assert(const char *fn, int ln)
377 write(2, ": malloc assert failed in line ", 31);
378 ch = (ln / 100) + '0'; write(2, &ch, 1); ln %= 100;
379 ch = (ln / 10) + '0'; write(2, &ch, 1); ln %= 10;
380 ch = (ln / 1) + '0'; write(2, &ch, 1);