1#include <common.h>
2
3#if defined(CONFIG_UNIT_TEST)
4#define DEBUG
5#endif
6
7#include <malloc.h>
8#include <asm/io.h>
9
10#ifdef DEBUG
11#if __STD_C
12static void malloc_update_mallinfo (void);
13void malloc_stats (void);
14#else
15static void malloc_update_mallinfo ();
16void malloc_stats();
17#endif
18#endif
19
20DECLARE_GLOBAL_DATA_PTR;
21
22
23
24
25
26
27
28
29
30#ifdef WIN32
31
32#define AlignPage(add) (((add) + (malloc_getpagesize-1)) & \
33~(malloc_getpagesize-1))
34#define AlignPage64K(add) (((add) + (0x10000 - 1)) & ~(0x10000 - 1))
35
36
37#define RESERVED_SIZE (1024*1024*64)
38#define NEXT_SIZE (2048*1024)
39#define TOP_MEMORY ((unsigned long)2*1024*1024*1024)
40
41struct GmListElement;
42typedef struct GmListElement GmListElement;
43
44struct GmListElement
45{
46 GmListElement* next;
47 void* base;
48};
49
50static GmListElement* head = 0;
51static unsigned int gNextAddress = 0;
52static unsigned int gAddressBase = 0;
53static unsigned int gAllocatedSize = 0;
54
55static
56GmListElement* makeGmListElement (void* bas)
57{
58 GmListElement* this;
59 this = (GmListElement*)(void*)LocalAlloc (0, sizeof (GmListElement));
60 assert (this);
61 if (this)
62 {
63 this->base = bas;
64 this->next = head;
65 head = this;
66 }
67 return this;
68}
69
70void gcleanup ()
71{
72 BOOL rval;
73 assert ( (head == NULL) || (head->base == (void*)gAddressBase));
74 if (gAddressBase && (gNextAddress - gAddressBase))
75 {
76 rval = VirtualFree ((void*)gAddressBase,
77 gNextAddress - gAddressBase,
78 MEM_DECOMMIT);
79 assert (rval);
80 }
81 while (head)
82 {
83 GmListElement* next = head->next;
84 rval = VirtualFree (head->base, 0, MEM_RELEASE);
85 assert (rval);
86 LocalFree (head);
87 head = next;
88 }
89}
90
91static
92void* findRegion (void* start_address, unsigned long size)
93{
94 MEMORY_BASIC_INFORMATION info;
95 if (size >= TOP_MEMORY) return NULL;
96
97 while ((unsigned long)start_address + size < TOP_MEMORY)
98 {
99 VirtualQuery (start_address, &info, sizeof (info));
100 if ((info.State == MEM_FREE) && (info.RegionSize >= size))
101 return start_address;
102 else
103 {
104
105
106
107
108
109 start_address = (char*)info.BaseAddress + info.RegionSize;
110
111
112
113
114
115
116
117
118
119
120
121
122
123 start_address =
124 (void *) AlignPage64K((unsigned long) start_address);
125 }
126 }
127 return NULL;
128
129}
130
131
132void* wsbrk (long size)
133{
134 void* tmp;
135 if (size > 0)
136 {
137 if (gAddressBase == 0)
138 {
139 gAllocatedSize = max (RESERVED_SIZE, AlignPage (size));
140 gNextAddress = gAddressBase =
141 (unsigned int)VirtualAlloc (NULL, gAllocatedSize,
142 MEM_RESERVE, PAGE_NOACCESS);
143 } else if (AlignPage (gNextAddress + size) > (gAddressBase +
144gAllocatedSize))
145 {
146 long new_size = max (NEXT_SIZE, AlignPage (size));
147 void* new_address = (void*)(gAddressBase+gAllocatedSize);
148 do
149 {
150 new_address = findRegion (new_address, new_size);
151
152 if (!new_address)
153 return (void*)-1;
154
155 gAddressBase = gNextAddress =
156 (unsigned int)VirtualAlloc (new_address, new_size,
157 MEM_RESERVE, PAGE_NOACCESS);
158
159
160
161 }
162 while (gAddressBase == 0);
163
164 assert (new_address == (void*)gAddressBase);
165
166 gAllocatedSize = new_size;
167
168 if (!makeGmListElement ((void*)gAddressBase))
169 return (void*)-1;
170 }
171 if ((size + gNextAddress) > AlignPage (gNextAddress))
172 {
173 void* res;
174 res = VirtualAlloc ((void*)AlignPage (gNextAddress),
175 (size + gNextAddress -
176 AlignPage (gNextAddress)),
177 MEM_COMMIT, PAGE_READWRITE);
178 if (!res)
179 return (void*)-1;
180 }
181 tmp = (void*)gNextAddress;
182 gNextAddress = (unsigned int)tmp + size;
183 return tmp;
184 }
185 else if (size < 0)
186 {
187 unsigned int alignedGoal = AlignPage (gNextAddress + size);
188
189 if (alignedGoal >= gAddressBase)
190 {
191 VirtualFree ((void*)alignedGoal, gNextAddress - alignedGoal,
192 MEM_DECOMMIT);
193 gNextAddress = gNextAddress + size;
194 return (void*)gNextAddress;
195 }
196 else
197 {
198 VirtualFree ((void*)gAddressBase, gNextAddress - gAddressBase,
199 MEM_DECOMMIT);
200 gNextAddress = gAddressBase;
201 return (void*)-1;
202 }
203 }
204 else
205 {
206 return (void*)gNextAddress;
207 }
208}
209
210#endif
211
212
213
214
215
216
217
218
219struct malloc_chunk
220{
221 INTERNAL_SIZE_T prev_size;
222 INTERNAL_SIZE_T size;
223 struct malloc_chunk* fd;
224 struct malloc_chunk* bk;
225} __attribute__((__may_alias__)) ;
226
227typedef struct malloc_chunk* mchunkptr;
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355#define SIZE_SZ (sizeof(INTERNAL_SIZE_T))
356#define MALLOC_ALIGNMENT (SIZE_SZ + SIZE_SZ)
357#define MALLOC_ALIGN_MASK (MALLOC_ALIGNMENT - 1)
358#define MINSIZE (sizeof(struct malloc_chunk))
359
360
361
362#define chunk2mem(p) ((Void_t*)((char*)(p) + 2*SIZE_SZ))
363#define mem2chunk(mem) ((mchunkptr)((char*)(mem) - 2*SIZE_SZ))
364
365
366
367#define request2size(req) \
368 (((long)((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) < \
369 (long)(MINSIZE + MALLOC_ALIGN_MASK)) ? MINSIZE : \
370 (((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) & ~(MALLOC_ALIGN_MASK)))
371
372
373
374#define aligned_OK(m) (((unsigned long)((m)) & (MALLOC_ALIGN_MASK)) == 0)
375
376
377
378
379
380
381
382
383
384
385
386#define PREV_INUSE 0x1
387
388
389
390#define IS_MMAPPED 0x2
391
392
393
394#define SIZE_BITS (PREV_INUSE|IS_MMAPPED)
395
396
397
398
399#define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) ))
400
401
402
403#define prev_chunk(p)\
404 ((mchunkptr)( ((char*)(p)) - ((p)->prev_size) ))
405
406
407
408
409#define chunk_at_offset(p, s) ((mchunkptr)(((char*)(p)) + (s)))
410
411
412
413
414
415
416
417
418
419
420#define inuse(p)\
421((((mchunkptr)(((char*)(p))+((p)->size & ~PREV_INUSE)))->size) & PREV_INUSE)
422
423
424
425#define prev_inuse(p) ((p)->size & PREV_INUSE)
426
427
428
429#define chunk_is_mmapped(p) ((p)->size & IS_MMAPPED)
430
431
432
433#define set_inuse(p)\
434((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size |= PREV_INUSE
435
436#define clear_inuse(p)\
437((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size &= ~(PREV_INUSE)
438
439
440
441#define inuse_bit_at_offset(p, s)\
442 (((mchunkptr)(((char*)(p)) + (s)))->size & PREV_INUSE)
443
444#define set_inuse_bit_at_offset(p, s)\
445 (((mchunkptr)(((char*)(p)) + (s)))->size |= PREV_INUSE)
446
447#define clear_inuse_bit_at_offset(p, s)\
448 (((mchunkptr)(((char*)(p)) + (s)))->size &= ~(PREV_INUSE))
449
450
451
452
453
454
455
456
457
458
459#define chunksize(p) ((p)->size & ~(SIZE_BITS))
460
461
462
463#define set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s)))
464
465
466
467#define set_head(p, s) ((p)->size = (s))
468
469
470
471#define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s))
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511#define NAV 128
512
513typedef struct malloc_chunk* mbinptr;
514
515
516
517#define bin_at(i) ((mbinptr)((char*)&(av_[2*(i) + 2]) - 2*SIZE_SZ))
518#define next_bin(b) ((mbinptr)((char*)(b) + 2 * sizeof(mbinptr)))
519#define prev_bin(b) ((mbinptr)((char*)(b) - 2 * sizeof(mbinptr)))
520
521
522
523
524
525
526
527#define top (av_[2])
528#define last_remainder (bin_at(1))
529
530
531
532
533
534
535
536
537
538#define initial_top ((mchunkptr)(bin_at(0)))
539
540
541
542#define IAV(i) bin_at(i), bin_at(i)
543
544static mbinptr av_[NAV * 2 + 2] = {
545 NULL, NULL,
546 IAV(0), IAV(1), IAV(2), IAV(3), IAV(4), IAV(5), IAV(6), IAV(7),
547 IAV(8), IAV(9), IAV(10), IAV(11), IAV(12), IAV(13), IAV(14), IAV(15),
548 IAV(16), IAV(17), IAV(18), IAV(19), IAV(20), IAV(21), IAV(22), IAV(23),
549 IAV(24), IAV(25), IAV(26), IAV(27), IAV(28), IAV(29), IAV(30), IAV(31),
550 IAV(32), IAV(33), IAV(34), IAV(35), IAV(36), IAV(37), IAV(38), IAV(39),
551 IAV(40), IAV(41), IAV(42), IAV(43), IAV(44), IAV(45), IAV(46), IAV(47),
552 IAV(48), IAV(49), IAV(50), IAV(51), IAV(52), IAV(53), IAV(54), IAV(55),
553 IAV(56), IAV(57), IAV(58), IAV(59), IAV(60), IAV(61), IAV(62), IAV(63),
554 IAV(64), IAV(65), IAV(66), IAV(67), IAV(68), IAV(69), IAV(70), IAV(71),
555 IAV(72), IAV(73), IAV(74), IAV(75), IAV(76), IAV(77), IAV(78), IAV(79),
556 IAV(80), IAV(81), IAV(82), IAV(83), IAV(84), IAV(85), IAV(86), IAV(87),
557 IAV(88), IAV(89), IAV(90), IAV(91), IAV(92), IAV(93), IAV(94), IAV(95),
558 IAV(96), IAV(97), IAV(98), IAV(99), IAV(100), IAV(101), IAV(102), IAV(103),
559 IAV(104), IAV(105), IAV(106), IAV(107), IAV(108), IAV(109), IAV(110), IAV(111),
560 IAV(112), IAV(113), IAV(114), IAV(115), IAV(116), IAV(117), IAV(118), IAV(119),
561 IAV(120), IAV(121), IAV(122), IAV(123), IAV(124), IAV(125), IAV(126), IAV(127)
562};
563
564#ifdef CONFIG_NEEDS_MANUAL_RELOC
565static void malloc_bin_reloc(void)
566{
567 mbinptr *p = &av_[2];
568 size_t i;
569
570 for (i = 2; i < ARRAY_SIZE(av_); ++i, ++p)
571 *p = (mbinptr)((ulong)*p + gd->reloc_off);
572}
573#else
574static inline void malloc_bin_reloc(void) {}
575#endif
576
577ulong mem_malloc_start = 0;
578ulong mem_malloc_end = 0;
579ulong mem_malloc_brk = 0;
580
581void *sbrk(ptrdiff_t increment)
582{
583 ulong old = mem_malloc_brk;
584 ulong new = old + increment;
585
586
587
588
589
590 if (increment < 0)
591 memset((void *)new, 0, -increment);
592
593 if ((new < mem_malloc_start) || (new > mem_malloc_end))
594 return (void *)MORECORE_FAILURE;
595
596 mem_malloc_brk = new;
597
598 return (void *)old;
599}
600
601void mem_malloc_init(ulong start, ulong size)
602{
603 mem_malloc_start = start;
604 mem_malloc_end = start + size;
605 mem_malloc_brk = start;
606
607 debug("using memory %#lx-%#lx for malloc()\n", mem_malloc_start,
608 mem_malloc_end);
609#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
610 memset((void *)mem_malloc_start, 0x0, size);
611#endif
612 malloc_bin_reloc();
613}
614
615
616
617#define first(b) ((b)->fd)
618#define last(b) ((b)->bk)
619
620
621
622
623
624#define bin_index(sz) \
625(((((unsigned long)(sz)) >> 9) == 0) ? (((unsigned long)(sz)) >> 3): \
626 ((((unsigned long)(sz)) >> 9) <= 4) ? 56 + (((unsigned long)(sz)) >> 6): \
627 ((((unsigned long)(sz)) >> 9) <= 20) ? 91 + (((unsigned long)(sz)) >> 9): \
628 ((((unsigned long)(sz)) >> 9) <= 84) ? 110 + (((unsigned long)(sz)) >> 12): \
629 ((((unsigned long)(sz)) >> 9) <= 340) ? 119 + (((unsigned long)(sz)) >> 15): \
630 ((((unsigned long)(sz)) >> 9) <= 1364) ? 124 + (((unsigned long)(sz)) >> 18): \
631 126)
632
633
634
635
636
637#define MAX_SMALLBIN 63
638#define MAX_SMALLBIN_SIZE 512
639#define SMALLBIN_WIDTH 8
640
641#define smallbin_index(sz) (((unsigned long)(sz)) >> 3)
642
643
644
645
646
647#define is_small_request(nb) (nb < MAX_SMALLBIN_SIZE - SMALLBIN_WIDTH)
648
649
650
651
652
653
654
655
656
657
658
659
660
661#define BINBLOCKWIDTH 4
662
663#define binblocks_r ((INTERNAL_SIZE_T)av_[1])
664#define binblocks_w (av_[1])
665
666
667
668#define idx2binblock(ix) ((unsigned)1 << (ix / BINBLOCKWIDTH))
669#define mark_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r | idx2binblock(ii)))
670#define clear_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r & ~(idx2binblock(ii))))
671
672
673
674
675
676
677
678
679
680static unsigned long trim_threshold = DEFAULT_TRIM_THRESHOLD;
681static unsigned long top_pad = DEFAULT_TOP_PAD;
682static unsigned int n_mmaps_max = DEFAULT_MMAP_MAX;
683static unsigned long mmap_threshold = DEFAULT_MMAP_THRESHOLD;
684
685
686static char* sbrk_base = (char*)(-1);
687
688
689static unsigned long max_sbrked_mem = 0;
690
691
692static unsigned long max_total_mem = 0;
693
694
695static struct mallinfo current_mallinfo = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
696
697
698#define sbrked_mem (current_mallinfo.arena)
699
700
701
702#ifdef DEBUG
703static unsigned int n_mmaps = 0;
704#endif
705static unsigned long mmapped_mem = 0;
706#if HAVE_MMAP
707static unsigned int max_n_mmaps = 0;
708static unsigned long max_mmapped_mem = 0;
709#endif
710
711
712
713
714
715
716
717#ifdef DEBUG
718
719
720
721
722
723
724
725
726
727
728#if __STD_C
729static void do_check_chunk(mchunkptr p)
730#else
731static void do_check_chunk(p) mchunkptr p;
732#endif
733{
734 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
735
736
737 assert(!chunk_is_mmapped(p));
738
739
740 assert((char*)p >= sbrk_base);
741 if (p != top)
742 assert((char*)p + sz <= (char*)top);
743 else
744 assert((char*)p + sz <= sbrk_base + sbrked_mem);
745
746}
747
748
749#if __STD_C
750static void do_check_free_chunk(mchunkptr p)
751#else
752static void do_check_free_chunk(p) mchunkptr p;
753#endif
754{
755 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
756 mchunkptr next = chunk_at_offset(p, sz);
757
758 do_check_chunk(p);
759
760
761 assert(!inuse(p));
762
763
764 if ((long)sz >= (long)MINSIZE)
765 {
766 assert((sz & MALLOC_ALIGN_MASK) == 0);
767 assert(aligned_OK(chunk2mem(p)));
768
769 assert(next->prev_size == sz);
770
771 assert(prev_inuse(p));
772 assert (next == top || inuse(next));
773
774
775 assert(p->fd->bk == p);
776 assert(p->bk->fd == p);
777 }
778 else
779 assert(sz == SIZE_SZ);
780}
781
782#if __STD_C
783static void do_check_inuse_chunk(mchunkptr p)
784#else
785static void do_check_inuse_chunk(p) mchunkptr p;
786#endif
787{
788 mchunkptr next = next_chunk(p);
789 do_check_chunk(p);
790
791
792 assert(inuse(p));
793
794
795
796
797
798 if (!prev_inuse(p))
799 {
800 mchunkptr prv = prev_chunk(p);
801 assert(next_chunk(prv) == p);
802 do_check_free_chunk(prv);
803 }
804 if (next == top)
805 {
806 assert(prev_inuse(next));
807 assert(chunksize(next) >= MINSIZE);
808 }
809 else if (!inuse(next))
810 do_check_free_chunk(next);
811
812}
813
814#if __STD_C
815static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
816#else
817static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
818#endif
819{
820 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
821 long room = sz - s;
822
823 do_check_inuse_chunk(p);
824
825
826 assert((long)sz >= (long)MINSIZE);
827 assert((sz & MALLOC_ALIGN_MASK) == 0);
828 assert(room >= 0);
829 assert(room < (long)MINSIZE);
830
831
832 assert(aligned_OK(chunk2mem(p)));
833
834
835
836 assert(prev_inuse(p));
837
838}
839
840
841#define check_free_chunk(P) do_check_free_chunk(P)
842#define check_inuse_chunk(P) do_check_inuse_chunk(P)
843#define check_chunk(P) do_check_chunk(P)
844#define check_malloced_chunk(P,N) do_check_malloced_chunk(P,N)
845#else
846#define check_free_chunk(P)
847#define check_inuse_chunk(P)
848#define check_chunk(P)
849#define check_malloced_chunk(P,N)
850#endif
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870#define frontlink(P, S, IDX, BK, FD) \
871{ \
872 if (S < MAX_SMALLBIN_SIZE) \
873 { \
874 IDX = smallbin_index(S); \
875 mark_binblock(IDX); \
876 BK = bin_at(IDX); \
877 FD = BK->fd; \
878 P->bk = BK; \
879 P->fd = FD; \
880 FD->bk = BK->fd = P; \
881 } \
882 else \
883 { \
884 IDX = bin_index(S); \
885 BK = bin_at(IDX); \
886 FD = BK->fd; \
887 if (FD == BK) mark_binblock(IDX); \
888 else \
889 { \
890 while (FD != BK && S < chunksize(FD)) FD = FD->fd; \
891 BK = FD->bk; \
892 } \
893 P->bk = BK; \
894 P->fd = FD; \
895 FD->bk = BK->fd = P; \
896 } \
897}
898
899
900
901
902#define unlink(P, BK, FD) \
903{ \
904 BK = P->bk; \
905 FD = P->fd; \
906 FD->bk = BK; \
907 BK->fd = FD; \
908} \
909
910
911
912#define link_last_remainder(P) \
913{ \
914 last_remainder->fd = last_remainder->bk = P; \
915 P->fd = P->bk = last_remainder; \
916}
917
918
919
920#define clear_last_remainder \
921 (last_remainder->fd = last_remainder->bk = last_remainder)
922
923
924
925
926
927
928
929#if HAVE_MMAP
930
931#if __STD_C
932static mchunkptr mmap_chunk(size_t size)
933#else
934static mchunkptr mmap_chunk(size) size_t size;
935#endif
936{
937 size_t page_mask = malloc_getpagesize - 1;
938 mchunkptr p;
939
940#ifndef MAP_ANONYMOUS
941 static int fd = -1;
942#endif
943
944 if(n_mmaps >= n_mmaps_max) return 0;
945
946
947
948
949 size = (size + SIZE_SZ + page_mask) & ~page_mask;
950
951#ifdef MAP_ANONYMOUS
952 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE,
953 MAP_PRIVATE|MAP_ANONYMOUS, -1, 0);
954#else
955 if (fd < 0)
956 {
957 fd = open("/dev/zero", O_RDWR);
958 if(fd < 0) return 0;
959 }
960 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE, fd, 0);
961#endif
962
963 if(p == (mchunkptr)-1) return 0;
964
965 n_mmaps++;
966 if (n_mmaps > max_n_mmaps) max_n_mmaps = n_mmaps;
967
968
969 assert(aligned_OK(chunk2mem(p)));
970
971
972
973
974
975 p->prev_size = 0;
976 set_head(p, size|IS_MMAPPED);
977
978 mmapped_mem += size;
979 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
980 max_mmapped_mem = mmapped_mem;
981 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
982 max_total_mem = mmapped_mem + sbrked_mem;
983 return p;
984}
985
986#if __STD_C
987static void munmap_chunk(mchunkptr p)
988#else
989static void munmap_chunk(p) mchunkptr p;
990#endif
991{
992 INTERNAL_SIZE_T size = chunksize(p);
993 int ret;
994
995 assert (chunk_is_mmapped(p));
996 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
997 assert((n_mmaps > 0));
998 assert(((p->prev_size + size) & (malloc_getpagesize-1)) == 0);
999
1000 n_mmaps--;
1001 mmapped_mem -= (size + p->prev_size);
1002
1003 ret = munmap((char *)p - p->prev_size, size + p->prev_size);
1004
1005
1006 assert(ret == 0);
1007}
1008
1009#if HAVE_MREMAP
1010
1011#if __STD_C
1012static mchunkptr mremap_chunk(mchunkptr p, size_t new_size)
1013#else
1014static mchunkptr mremap_chunk(p, new_size) mchunkptr p; size_t new_size;
1015#endif
1016{
1017 size_t page_mask = malloc_getpagesize - 1;
1018 INTERNAL_SIZE_T offset = p->prev_size;
1019 INTERNAL_SIZE_T size = chunksize(p);
1020 char *cp;
1021
1022 assert (chunk_is_mmapped(p));
1023 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
1024 assert((n_mmaps > 0));
1025 assert(((size + offset) & (malloc_getpagesize-1)) == 0);
1026
1027
1028 new_size = (new_size + offset + SIZE_SZ + page_mask) & ~page_mask;
1029
1030 cp = (char *)mremap((char *)p - offset, size + offset, new_size, 1);
1031
1032 if (cp == (char *)-1) return 0;
1033
1034 p = (mchunkptr)(cp + offset);
1035
1036 assert(aligned_OK(chunk2mem(p)));
1037
1038 assert((p->prev_size == offset));
1039 set_head(p, (new_size - offset)|IS_MMAPPED);
1040
1041 mmapped_mem -= size + offset;
1042 mmapped_mem += new_size;
1043 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
1044 max_mmapped_mem = mmapped_mem;
1045 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1046 max_total_mem = mmapped_mem + sbrked_mem;
1047 return p;
1048}
1049
1050#endif
1051
1052#endif
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062#if __STD_C
1063static void malloc_extend_top(INTERNAL_SIZE_T nb)
1064#else
1065static void malloc_extend_top(nb) INTERNAL_SIZE_T nb;
1066#endif
1067{
1068 char* brk;
1069 INTERNAL_SIZE_T front_misalign;
1070 INTERNAL_SIZE_T correction;
1071 char* new_brk;
1072 INTERNAL_SIZE_T top_size;
1073
1074 mchunkptr old_top = top;
1075 INTERNAL_SIZE_T old_top_size = chunksize(old_top);
1076 char* old_end = (char*)(chunk_at_offset(old_top, old_top_size));
1077
1078
1079
1080 INTERNAL_SIZE_T sbrk_size = nb + top_pad + MINSIZE;
1081 unsigned long pagesz = malloc_getpagesize;
1082
1083
1084
1085
1086
1087 if (sbrk_base != (char*)(-1))
1088 sbrk_size = (sbrk_size + (pagesz - 1)) & ~(pagesz - 1);
1089
1090 brk = (char*)(MORECORE (sbrk_size));
1091
1092
1093 if (brk == (char*)(MORECORE_FAILURE) ||
1094 (brk < old_end && old_top != initial_top))
1095 return;
1096
1097 sbrked_mem += sbrk_size;
1098
1099 if (brk == old_end)
1100 {
1101 top_size = sbrk_size + old_top_size;
1102 set_head(top, top_size | PREV_INUSE);
1103 }
1104 else
1105 {
1106 if (sbrk_base == (char*)(-1))
1107 sbrk_base = brk;
1108 else
1109 sbrked_mem += brk - (char*)old_end;
1110
1111
1112 front_misalign = (unsigned long)chunk2mem(brk) & MALLOC_ALIGN_MASK;
1113 if (front_misalign > 0)
1114 {
1115 correction = (MALLOC_ALIGNMENT) - front_misalign;
1116 brk += correction;
1117 }
1118 else
1119 correction = 0;
1120
1121
1122
1123 correction += ((((unsigned long)(brk + sbrk_size))+(pagesz-1)) &
1124 ~(pagesz - 1)) - ((unsigned long)(brk + sbrk_size));
1125
1126
1127 new_brk = (char*)(MORECORE (correction));
1128 if (new_brk == (char*)(MORECORE_FAILURE)) return;
1129
1130 sbrked_mem += correction;
1131
1132 top = (mchunkptr)brk;
1133 top_size = new_brk - brk + correction;
1134 set_head(top, top_size | PREV_INUSE);
1135
1136 if (old_top != initial_top)
1137 {
1138
1139
1140
1141
1142
1143 if (old_top_size < MINSIZE)
1144 {
1145 set_head(top, PREV_INUSE);
1146 return;
1147 }
1148
1149
1150 old_top_size = (old_top_size - 3*SIZE_SZ) & ~MALLOC_ALIGN_MASK;
1151 set_head_size(old_top, old_top_size);
1152 chunk_at_offset(old_top, old_top_size )->size =
1153 SIZE_SZ|PREV_INUSE;
1154 chunk_at_offset(old_top, old_top_size + SIZE_SZ)->size =
1155 SIZE_SZ|PREV_INUSE;
1156
1157 if (old_top_size >= MINSIZE)
1158 fREe(chunk2mem(old_top));
1159 }
1160 }
1161
1162 if ((unsigned long)sbrked_mem > (unsigned long)max_sbrked_mem)
1163 max_sbrked_mem = sbrked_mem;
1164 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1165 max_total_mem = mmapped_mem + sbrked_mem;
1166
1167
1168 assert(((unsigned long)((char*)top + top_size) & (pagesz - 1)) == 0);
1169}
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236#if __STD_C
1237Void_t* mALLOc(size_t bytes)
1238#else
1239Void_t* mALLOc(bytes) size_t bytes;
1240#endif
1241{
1242 mchunkptr victim;
1243 INTERNAL_SIZE_T victim_size;
1244 int idx;
1245 mbinptr bin;
1246 mchunkptr remainder;
1247 long remainder_size;
1248 int remainder_index;
1249 unsigned long block;
1250 int startidx;
1251 mchunkptr fwd;
1252 mchunkptr bck;
1253 mbinptr q;
1254
1255 INTERNAL_SIZE_T nb;
1256
1257#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1258 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT))
1259 return malloc_simple(bytes);
1260#endif
1261
1262
1263 if ((mem_malloc_start == 0) && (mem_malloc_end == 0)) {
1264
1265 return NULL;
1266 }
1267
1268 if ((long)bytes < 0) return NULL;
1269
1270 nb = request2size(bytes);
1271
1272
1273
1274 if (is_small_request(nb))
1275 {
1276 idx = smallbin_index(nb);
1277
1278
1279
1280 q = bin_at(idx);
1281 victim = last(q);
1282
1283
1284 if (victim == q)
1285 {
1286 q = next_bin(q);
1287 victim = last(q);
1288 }
1289 if (victim != q)
1290 {
1291 victim_size = chunksize(victim);
1292 unlink(victim, bck, fwd);
1293 set_inuse_bit_at_offset(victim, victim_size);
1294 check_malloced_chunk(victim, nb);
1295 return chunk2mem(victim);
1296 }
1297
1298 idx += 2;
1299
1300 }
1301 else
1302 {
1303 idx = bin_index(nb);
1304 bin = bin_at(idx);
1305
1306 for (victim = last(bin); victim != bin; victim = victim->bk)
1307 {
1308 victim_size = chunksize(victim);
1309 remainder_size = victim_size - nb;
1310
1311 if (remainder_size >= (long)MINSIZE)
1312 {
1313 --idx;
1314 break;
1315 }
1316
1317 else if (remainder_size >= 0)
1318 {
1319 unlink(victim, bck, fwd);
1320 set_inuse_bit_at_offset(victim, victim_size);
1321 check_malloced_chunk(victim, nb);
1322 return chunk2mem(victim);
1323 }
1324 }
1325
1326 ++idx;
1327
1328 }
1329
1330
1331
1332 if ( (victim = last_remainder->fd) != last_remainder)
1333 {
1334 victim_size = chunksize(victim);
1335 remainder_size = victim_size - nb;
1336
1337 if (remainder_size >= (long)MINSIZE)
1338 {
1339 remainder = chunk_at_offset(victim, nb);
1340 set_head(victim, nb | PREV_INUSE);
1341 link_last_remainder(remainder);
1342 set_head(remainder, remainder_size | PREV_INUSE);
1343 set_foot(remainder, remainder_size);
1344 check_malloced_chunk(victim, nb);
1345 return chunk2mem(victim);
1346 }
1347
1348 clear_last_remainder;
1349
1350 if (remainder_size >= 0)
1351 {
1352 set_inuse_bit_at_offset(victim, victim_size);
1353 check_malloced_chunk(victim, nb);
1354 return chunk2mem(victim);
1355 }
1356
1357
1358
1359 frontlink(victim, victim_size, remainder_index, bck, fwd);
1360 }
1361
1362
1363
1364
1365
1366
1367 if ( (block = idx2binblock(idx)) <= binblocks_r)
1368 {
1369
1370
1371
1372 if ( (block & binblocks_r) == 0)
1373 {
1374
1375 idx = (idx & ~(BINBLOCKWIDTH - 1)) + BINBLOCKWIDTH;
1376 block <<= 1;
1377 while ((block & binblocks_r) == 0)
1378 {
1379 idx += BINBLOCKWIDTH;
1380 block <<= 1;
1381 }
1382 }
1383
1384
1385 for (;;)
1386 {
1387 startidx = idx;
1388 q = bin = bin_at(idx);
1389
1390
1391 do
1392 {
1393
1394
1395 for (victim = last(bin); victim != bin; victim = victim->bk)
1396 {
1397 victim_size = chunksize(victim);
1398 remainder_size = victim_size - nb;
1399
1400 if (remainder_size >= (long)MINSIZE)
1401 {
1402 remainder = chunk_at_offset(victim, nb);
1403 set_head(victim, nb | PREV_INUSE);
1404 unlink(victim, bck, fwd);
1405 link_last_remainder(remainder);
1406 set_head(remainder, remainder_size | PREV_INUSE);
1407 set_foot(remainder, remainder_size);
1408 check_malloced_chunk(victim, nb);
1409 return chunk2mem(victim);
1410 }
1411
1412 else if (remainder_size >= 0)
1413 {
1414 set_inuse_bit_at_offset(victim, victim_size);
1415 unlink(victim, bck, fwd);
1416 check_malloced_chunk(victim, nb);
1417 return chunk2mem(victim);
1418 }
1419
1420 }
1421
1422 bin = next_bin(bin);
1423
1424 } while ((++idx & (BINBLOCKWIDTH - 1)) != 0);
1425
1426
1427
1428 do
1429 {
1430 if ((startidx & (BINBLOCKWIDTH - 1)) == 0)
1431 {
1432 av_[1] = (mbinptr)(binblocks_r & ~block);
1433 break;
1434 }
1435 --startidx;
1436 q = prev_bin(q);
1437 } while (first(q) == q);
1438
1439
1440
1441 if ( (block <<= 1) <= binblocks_r && (block != 0) )
1442 {
1443 while ((block & binblocks_r) == 0)
1444 {
1445 idx += BINBLOCKWIDTH;
1446 block <<= 1;
1447 }
1448 }
1449 else
1450 break;
1451 }
1452 }
1453
1454
1455
1456
1457
1458 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
1459 {
1460
1461#if HAVE_MMAP
1462
1463 if ((unsigned long)nb >= (unsigned long)mmap_threshold &&
1464 (victim = mmap_chunk(nb)))
1465 return chunk2mem(victim);
1466#endif
1467
1468
1469 malloc_extend_top(nb);
1470 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
1471 return NULL;
1472 }
1473
1474 victim = top;
1475 set_head(victim, nb | PREV_INUSE);
1476 top = chunk_at_offset(victim, nb);
1477 set_head(top, remainder_size | PREV_INUSE);
1478 check_malloced_chunk(victim, nb);
1479 return chunk2mem(victim);
1480
1481}
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508#if __STD_C
1509void fREe(Void_t* mem)
1510#else
1511void fREe(mem) Void_t* mem;
1512#endif
1513{
1514 mchunkptr p;
1515 INTERNAL_SIZE_T hd;
1516 INTERNAL_SIZE_T sz;
1517 int idx;
1518 mchunkptr next;
1519 INTERNAL_SIZE_T nextsz;
1520 INTERNAL_SIZE_T prevsz;
1521 mchunkptr bck;
1522 mchunkptr fwd;
1523 int islr;
1524
1525#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1526
1527 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT))
1528 return;
1529#endif
1530
1531 if (mem == NULL)
1532 return;
1533
1534 p = mem2chunk(mem);
1535 hd = p->size;
1536
1537#if HAVE_MMAP
1538 if (hd & IS_MMAPPED)
1539 {
1540 munmap_chunk(p);
1541 return;
1542 }
1543#endif
1544
1545 check_inuse_chunk(p);
1546
1547 sz = hd & ~PREV_INUSE;
1548 next = chunk_at_offset(p, sz);
1549 nextsz = chunksize(next);
1550
1551 if (next == top)
1552 {
1553 sz += nextsz;
1554
1555 if (!(hd & PREV_INUSE))
1556 {
1557 prevsz = p->prev_size;
1558 p = chunk_at_offset(p, -((long) prevsz));
1559 sz += prevsz;
1560 unlink(p, bck, fwd);
1561 }
1562
1563 set_head(p, sz | PREV_INUSE);
1564 top = p;
1565 if ((unsigned long)(sz) >= (unsigned long)trim_threshold)
1566 malloc_trim(top_pad);
1567 return;
1568 }
1569
1570 set_head(next, nextsz);
1571
1572 islr = 0;
1573
1574 if (!(hd & PREV_INUSE))
1575 {
1576 prevsz = p->prev_size;
1577 p = chunk_at_offset(p, -((long) prevsz));
1578 sz += prevsz;
1579
1580 if (p->fd == last_remainder)
1581 islr = 1;
1582 else
1583 unlink(p, bck, fwd);
1584 }
1585
1586 if (!(inuse_bit_at_offset(next, nextsz)))
1587 {
1588 sz += nextsz;
1589
1590 if (!islr && next->fd == last_remainder)
1591 {
1592 islr = 1;
1593 link_last_remainder(p);
1594 }
1595 else
1596 unlink(next, bck, fwd);
1597 }
1598
1599
1600 set_head(p, sz | PREV_INUSE);
1601 set_foot(p, sz);
1602 if (!islr)
1603 frontlink(p, sz, idx, bck, fwd);
1604}
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646#if __STD_C
1647Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
1648#else
1649Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
1650#endif
1651{
1652 INTERNAL_SIZE_T nb;
1653
1654 mchunkptr oldp;
1655 INTERNAL_SIZE_T oldsize;
1656
1657 mchunkptr newp;
1658 INTERNAL_SIZE_T newsize;
1659 Void_t* newmem;
1660
1661 mchunkptr next;
1662 INTERNAL_SIZE_T nextsize;
1663
1664 mchunkptr prev;
1665 INTERNAL_SIZE_T prevsize;
1666
1667 mchunkptr remainder;
1668 INTERNAL_SIZE_T remainder_size;
1669
1670 mchunkptr bck;
1671 mchunkptr fwd;
1672
1673#ifdef REALLOC_ZERO_BYTES_FREES
1674 if (!bytes) {
1675 fREe(oldmem);
1676 return NULL;
1677 }
1678#endif
1679
1680 if ((long)bytes < 0) return NULL;
1681
1682
1683 if (oldmem == NULL) return mALLOc(bytes);
1684
1685#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1686 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
1687
1688 panic("pre-reloc realloc() is not supported");
1689 }
1690#endif
1691
1692 newp = oldp = mem2chunk(oldmem);
1693 newsize = oldsize = chunksize(oldp);
1694
1695
1696 nb = request2size(bytes);
1697
1698#if HAVE_MMAP
1699 if (chunk_is_mmapped(oldp))
1700 {
1701#if HAVE_MREMAP
1702 newp = mremap_chunk(oldp, nb);
1703 if(newp) return chunk2mem(newp);
1704#endif
1705
1706 if(oldsize - SIZE_SZ >= nb) return oldmem;
1707
1708 newmem = mALLOc(bytes);
1709 if (!newmem)
1710 return NULL;
1711 MALLOC_COPY(newmem, oldmem, oldsize - 2*SIZE_SZ);
1712 munmap_chunk(oldp);
1713 return newmem;
1714 }
1715#endif
1716
1717 check_inuse_chunk(oldp);
1718
1719 if ((long)(oldsize) < (long)(nb))
1720 {
1721
1722
1723
1724 next = chunk_at_offset(oldp, oldsize);
1725 if (next == top || !inuse(next))
1726 {
1727 nextsize = chunksize(next);
1728
1729
1730 if (next == top)
1731 {
1732 if ((long)(nextsize + newsize) >= (long)(nb + MINSIZE))
1733 {
1734 newsize += nextsize;
1735 top = chunk_at_offset(oldp, nb);
1736 set_head(top, (newsize - nb) | PREV_INUSE);
1737 set_head_size(oldp, nb);
1738 return chunk2mem(oldp);
1739 }
1740 }
1741
1742
1743 else if (((long)(nextsize + newsize) >= (long)(nb)))
1744 {
1745 unlink(next, bck, fwd);
1746 newsize += nextsize;
1747 goto split;
1748 }
1749 }
1750 else
1751 {
1752 next = NULL;
1753 nextsize = 0;
1754 }
1755
1756
1757
1758 if (!prev_inuse(oldp))
1759 {
1760 prev = prev_chunk(oldp);
1761 prevsize = chunksize(prev);
1762
1763
1764
1765 if (next != NULL)
1766 {
1767
1768 if (next == top)
1769 {
1770 if ((long)(nextsize + prevsize + newsize) >= (long)(nb + MINSIZE))
1771 {
1772 unlink(prev, bck, fwd);
1773 newp = prev;
1774 newsize += prevsize + nextsize;
1775 newmem = chunk2mem(newp);
1776 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1777 top = chunk_at_offset(newp, nb);
1778 set_head(top, (newsize - nb) | PREV_INUSE);
1779 set_head_size(newp, nb);
1780 return newmem;
1781 }
1782 }
1783
1784
1785 else if (((long)(nextsize + prevsize + newsize) >= (long)(nb)))
1786 {
1787 unlink(next, bck, fwd);
1788 unlink(prev, bck, fwd);
1789 newp = prev;
1790 newsize += nextsize + prevsize;
1791 newmem = chunk2mem(newp);
1792 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1793 goto split;
1794 }
1795 }
1796
1797
1798 if (prev != NULL && (long)(prevsize + newsize) >= (long)nb)
1799 {
1800 unlink(prev, bck, fwd);
1801 newp = prev;
1802 newsize += prevsize;
1803 newmem = chunk2mem(newp);
1804 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1805 goto split;
1806 }
1807 }
1808
1809
1810
1811 newmem = mALLOc (bytes);
1812
1813 if (newmem == NULL)
1814 return NULL;
1815
1816
1817
1818
1819 if ( (newp = mem2chunk(newmem)) == next_chunk(oldp))
1820 {
1821 newsize += chunksize(newp);
1822 newp = oldp;
1823 goto split;
1824 }
1825
1826
1827 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1828 fREe(oldmem);
1829 return newmem;
1830 }
1831
1832
1833 split:
1834
1835 if (newsize - nb >= MINSIZE)
1836 {
1837 remainder = chunk_at_offset(newp, nb);
1838 remainder_size = newsize - nb;
1839 set_head_size(newp, nb);
1840 set_head(remainder, remainder_size | PREV_INUSE);
1841 set_inuse_bit_at_offset(remainder, remainder_size);
1842 fREe(chunk2mem(remainder));
1843 }
1844 else
1845 {
1846 set_head_size(newp, newsize);
1847 set_inuse_bit_at_offset(newp, newsize);
1848 }
1849
1850 check_inuse_chunk(newp);
1851 return chunk2mem(newp);
1852}
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876#if __STD_C
1877Void_t* mEMALIGn(size_t alignment, size_t bytes)
1878#else
1879Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
1880#endif
1881{
1882 INTERNAL_SIZE_T nb;
1883 char* m;
1884 mchunkptr p;
1885 char* brk;
1886 mchunkptr newp;
1887 INTERNAL_SIZE_T newsize;
1888 INTERNAL_SIZE_T leadsize;
1889 mchunkptr remainder;
1890 long remainder_size;
1891
1892 if ((long)bytes < 0) return NULL;
1893
1894
1895
1896 if (alignment <= MALLOC_ALIGNMENT) return mALLOc(bytes);
1897
1898
1899
1900 if (alignment < MINSIZE) alignment = MINSIZE;
1901
1902
1903
1904 nb = request2size(bytes);
1905 m = (char*)(mALLOc(nb + alignment + MINSIZE));
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915 if (m == NULL) {
1916 size_t extra, extra2;
1917
1918
1919
1920
1921 m = (char*)(mALLOc(bytes));
1922
1923 if ((((unsigned long)(m)) % alignment) == 0)
1924 return m;
1925
1926
1927
1928
1929 fREe(m);
1930
1931 extra = alignment - (((unsigned long)(m)) % alignment);
1932 m = (char*)(mALLOc(bytes + extra));
1933
1934
1935
1936
1937
1938 if (m) {
1939 extra2 = alignment - (((unsigned long)(m)) % alignment);
1940 if (extra2 > extra) {
1941 fREe(m);
1942 m = NULL;
1943 }
1944 }
1945
1946 }
1947
1948 if (m == NULL) return NULL;
1949
1950 p = mem2chunk(m);
1951
1952 if ((((unsigned long)(m)) % alignment) == 0)
1953 {
1954#if HAVE_MMAP
1955 if(chunk_is_mmapped(p))
1956 return chunk2mem(p);
1957#endif
1958 }
1959 else
1960 {
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970 brk = (char*)mem2chunk(((unsigned long)(m + alignment - 1)) & -((signed) alignment));
1971 if ((long)(brk - (char*)(p)) < MINSIZE) brk = brk + alignment;
1972
1973 newp = (mchunkptr)brk;
1974 leadsize = brk - (char*)(p);
1975 newsize = chunksize(p) - leadsize;
1976
1977#if HAVE_MMAP
1978 if(chunk_is_mmapped(p))
1979 {
1980 newp->prev_size = p->prev_size + leadsize;
1981 set_head(newp, newsize|IS_MMAPPED);
1982 return chunk2mem(newp);
1983 }
1984#endif
1985
1986
1987
1988 set_head(newp, newsize | PREV_INUSE);
1989 set_inuse_bit_at_offset(newp, newsize);
1990 set_head_size(p, leadsize);
1991 fREe(chunk2mem(p));
1992 p = newp;
1993
1994 assert (newsize >= nb && (((unsigned long)(chunk2mem(p))) % alignment) == 0);
1995 }
1996
1997
1998
1999 remainder_size = chunksize(p) - nb;
2000
2001 if (remainder_size >= (long)MINSIZE)
2002 {
2003 remainder = chunk_at_offset(p, nb);
2004 set_head(remainder, remainder_size | PREV_INUSE);
2005 set_head_size(p, nb);
2006 fREe(chunk2mem(remainder));
2007 }
2008
2009 check_inuse_chunk(p);
2010 return chunk2mem(p);
2011
2012}
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023#if __STD_C
2024Void_t* vALLOc(size_t bytes)
2025#else
2026Void_t* vALLOc(bytes) size_t bytes;
2027#endif
2028{
2029 return mEMALIGn (malloc_getpagesize, bytes);
2030}
2031
2032
2033
2034
2035
2036
2037
2038#if __STD_C
2039Void_t* pvALLOc(size_t bytes)
2040#else
2041Void_t* pvALLOc(bytes) size_t bytes;
2042#endif
2043{
2044 size_t pagesize = malloc_getpagesize;
2045 return mEMALIGn (pagesize, (bytes + pagesize - 1) & ~(pagesize - 1));
2046}
2047
2048
2049
2050
2051
2052
2053
2054#if __STD_C
2055Void_t* cALLOc(size_t n, size_t elem_size)
2056#else
2057Void_t* cALLOc(n, elem_size) size_t n; size_t elem_size;
2058#endif
2059{
2060 mchunkptr p;
2061 INTERNAL_SIZE_T csz;
2062
2063 INTERNAL_SIZE_T sz = n * elem_size;
2064
2065
2066
2067#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
2068#if MORECORE_CLEARS
2069 mchunkptr oldtop = top;
2070 INTERNAL_SIZE_T oldtopsize = chunksize(top);
2071#endif
2072#endif
2073 Void_t* mem = mALLOc (sz);
2074
2075 if ((long)n < 0) return NULL;
2076
2077 if (mem == NULL)
2078 return NULL;
2079 else
2080 {
2081#if CONFIG_VAL(SYS_MALLOC_F_LEN)
2082 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
2083 MALLOC_ZERO(mem, sz);
2084 return mem;
2085 }
2086#endif
2087 p = mem2chunk(mem);
2088
2089
2090
2091
2092#if HAVE_MMAP
2093 if (chunk_is_mmapped(p)) return mem;
2094#endif
2095
2096 csz = chunksize(p);
2097
2098#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
2099#if MORECORE_CLEARS
2100 if (p == oldtop && csz > oldtopsize)
2101 {
2102
2103 csz = oldtopsize;
2104 }
2105#endif
2106#endif
2107
2108 MALLOC_ZERO(mem, csz - SIZE_SZ);
2109 return mem;
2110 }
2111}
2112
2113
2114
2115
2116
2117
2118
2119
2120#if !defined(INTERNAL_LINUX_C_LIB) || !defined(__ELF__)
2121#if __STD_C
2122void cfree(Void_t *mem)
2123#else
2124void cfree(mem) Void_t *mem;
2125#endif
2126{
2127 fREe(mem);
2128}
2129#endif
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156#if __STD_C
2157int malloc_trim(size_t pad)
2158#else
2159int malloc_trim(pad) size_t pad;
2160#endif
2161{
2162 long top_size;
2163 long extra;
2164 char* current_brk;
2165 char* new_brk;
2166
2167 unsigned long pagesz = malloc_getpagesize;
2168
2169 top_size = chunksize(top);
2170 extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;
2171
2172 if (extra < (long)pagesz)
2173 return 0;
2174
2175 else
2176 {
2177
2178 current_brk = (char*)(MORECORE (0));
2179 if (current_brk != (char*)(top) + top_size)
2180 return 0;
2181
2182 else
2183 {
2184 new_brk = (char*)(MORECORE (-extra));
2185
2186 if (new_brk == (char*)(MORECORE_FAILURE))
2187 {
2188
2189 current_brk = (char*)(MORECORE (0));
2190 top_size = current_brk - (char*)top;
2191 if (top_size >= (long)MINSIZE)
2192 {
2193 sbrked_mem = current_brk - sbrk_base;
2194 set_head(top, top_size | PREV_INUSE);
2195 }
2196 check_chunk(top);
2197 return 0;
2198 }
2199
2200 else
2201 {
2202
2203 set_head(top, (top_size - extra) | PREV_INUSE);
2204 sbrked_mem -= extra;
2205 check_chunk(top);
2206 return 1;
2207 }
2208 }
2209 }
2210}
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225#if __STD_C
2226size_t malloc_usable_size(Void_t* mem)
2227#else
2228size_t malloc_usable_size(mem) Void_t* mem;
2229#endif
2230{
2231 mchunkptr p;
2232 if (mem == NULL)
2233 return 0;
2234 else
2235 {
2236 p = mem2chunk(mem);
2237 if(!chunk_is_mmapped(p))
2238 {
2239 if (!inuse(p)) return 0;
2240 check_inuse_chunk(p);
2241 return chunksize(p) - SIZE_SZ;
2242 }
2243 return chunksize(p) - 2*SIZE_SZ;
2244 }
2245}
2246
2247
2248
2249
2250
2251
2252#ifdef DEBUG
2253static void malloc_update_mallinfo()
2254{
2255 int i;
2256 mbinptr b;
2257 mchunkptr p;
2258#ifdef DEBUG
2259 mchunkptr q;
2260#endif
2261
2262 INTERNAL_SIZE_T avail = chunksize(top);
2263 int navail = ((long)(avail) >= (long)MINSIZE)? 1 : 0;
2264
2265 for (i = 1; i < NAV; ++i)
2266 {
2267 b = bin_at(i);
2268 for (p = last(b); p != b; p = p->bk)
2269 {
2270#ifdef DEBUG
2271 check_free_chunk(p);
2272 for (q = next_chunk(p);
2273 q < top && inuse(q) && (long)(chunksize(q)) >= (long)MINSIZE;
2274 q = next_chunk(q))
2275 check_inuse_chunk(q);
2276#endif
2277 avail += chunksize(p);
2278 navail++;
2279 }
2280 }
2281
2282 current_mallinfo.ordblks = navail;
2283 current_mallinfo.uordblks = sbrked_mem - avail;
2284 current_mallinfo.fordblks = avail;
2285 current_mallinfo.hblks = n_mmaps;
2286 current_mallinfo.hblkhd = mmapped_mem;
2287 current_mallinfo.keepcost = chunksize(top);
2288
2289}
2290#endif
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309#ifdef DEBUG
2310void malloc_stats()
2311{
2312 malloc_update_mallinfo();
2313 printf("max system bytes = %10u\n",
2314 (unsigned int)(max_total_mem));
2315 printf("system bytes = %10u\n",
2316 (unsigned int)(sbrked_mem + mmapped_mem));
2317 printf("in use bytes = %10u\n",
2318 (unsigned int)(current_mallinfo.uordblks + mmapped_mem));
2319#if HAVE_MMAP
2320 printf("max mmap regions = %10u\n",
2321 (unsigned int)max_n_mmaps);
2322#endif
2323}
2324#endif
2325
2326
2327
2328
2329
2330#ifdef DEBUG
2331struct mallinfo mALLINFo()
2332{
2333 malloc_update_mallinfo();
2334 return current_mallinfo;
2335}
2336#endif
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354#if __STD_C
2355int mALLOPt(int param_number, int value)
2356#else
2357int mALLOPt(param_number, value) int param_number; int value;
2358#endif
2359{
2360 switch(param_number)
2361 {
2362 case M_TRIM_THRESHOLD:
2363 trim_threshold = value; return 1;
2364 case M_TOP_PAD:
2365 top_pad = value; return 1;
2366 case M_MMAP_THRESHOLD:
2367 mmap_threshold = value; return 1;
2368 case M_MMAP_MAX:
2369#if HAVE_MMAP
2370 n_mmaps_max = value; return 1;
2371#else
2372 if (value != 0) return 0; else n_mmaps_max = value; return 1;
2373#endif
2374
2375 default:
2376 return 0;
2377 }
2378}
2379
2380int initf_malloc(void)
2381{
2382#if CONFIG_VAL(SYS_MALLOC_F_LEN)
2383 assert(gd->malloc_base);
2384 gd->malloc_limit = CONFIG_VAL(SYS_MALLOC_F_LEN);
2385 gd->malloc_ptr = 0;
2386#endif
2387
2388 return 0;
2389}
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429
2430
2431
2432
2433
2434
2435
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497