1
2
3
4
5
6
7
8
9
10
11#include <common.h>
12#include <log.h>
13#include <asm/global_data.h>
14
15#if CONFIG_IS_ENABLED(UNIT_TEST)
16#define DEBUG
17#endif
18
19#include <malloc.h>
20#include <asm/io.h>
21
22#ifdef DEBUG
23#if __STD_C
24static void malloc_update_mallinfo (void);
25void malloc_stats (void);
26#else
27static void malloc_update_mallinfo ();
28void malloc_stats();
29#endif
30#endif
31
32DECLARE_GLOBAL_DATA_PTR;
33
34
35
36
37
38
39
40
41
42#ifdef WIN32
43
44#define AlignPage(add) (((add) + (malloc_getpagesize-1)) & \
45~(malloc_getpagesize-1))
46#define AlignPage64K(add) (((add) + (0x10000 - 1)) & ~(0x10000 - 1))
47
48
49#define RESERVED_SIZE (1024*1024*64)
50#define NEXT_SIZE (2048*1024)
51#define TOP_MEMORY ((unsigned long)2*1024*1024*1024)
52
53struct GmListElement;
54typedef struct GmListElement GmListElement;
55
56struct GmListElement
57{
58 GmListElement* next;
59 void* base;
60};
61
62static GmListElement* head = 0;
63static unsigned int gNextAddress = 0;
64static unsigned int gAddressBase = 0;
65static unsigned int gAllocatedSize = 0;
66
67static
68GmListElement* makeGmListElement (void* bas)
69{
70 GmListElement* this;
71 this = (GmListElement*)(void*)LocalAlloc (0, sizeof (GmListElement));
72 assert (this);
73 if (this)
74 {
75 this->base = bas;
76 this->next = head;
77 head = this;
78 }
79 return this;
80}
81
82void gcleanup ()
83{
84 BOOL rval;
85 assert ( (head == NULL) || (head->base == (void*)gAddressBase));
86 if (gAddressBase && (gNextAddress - gAddressBase))
87 {
88 rval = VirtualFree ((void*)gAddressBase,
89 gNextAddress - gAddressBase,
90 MEM_DECOMMIT);
91 assert (rval);
92 }
93 while (head)
94 {
95 GmListElement* next = head->next;
96 rval = VirtualFree (head->base, 0, MEM_RELEASE);
97 assert (rval);
98 LocalFree (head);
99 head = next;
100 }
101}
102
103static
104void* findRegion (void* start_address, unsigned long size)
105{
106 MEMORY_BASIC_INFORMATION info;
107 if (size >= TOP_MEMORY) return NULL;
108
109 while ((unsigned long)start_address + size < TOP_MEMORY)
110 {
111 VirtualQuery (start_address, &info, sizeof (info));
112 if ((info.State == MEM_FREE) && (info.RegionSize >= size))
113 return start_address;
114 else
115 {
116
117
118
119
120
121 start_address = (char*)info.BaseAddress + info.RegionSize;
122
123
124
125
126
127
128
129
130
131
132
133
134
135 start_address =
136 (void *) AlignPage64K((unsigned long) start_address);
137 }
138 }
139 return NULL;
140
141}
142
143
144void* wsbrk (long size)
145{
146 void* tmp;
147 if (size > 0)
148 {
149 if (gAddressBase == 0)
150 {
151 gAllocatedSize = max (RESERVED_SIZE, AlignPage (size));
152 gNextAddress = gAddressBase =
153 (unsigned int)VirtualAlloc (NULL, gAllocatedSize,
154 MEM_RESERVE, PAGE_NOACCESS);
155 } else if (AlignPage (gNextAddress + size) > (gAddressBase +
156gAllocatedSize))
157 {
158 long new_size = max (NEXT_SIZE, AlignPage (size));
159 void* new_address = (void*)(gAddressBase+gAllocatedSize);
160 do
161 {
162 new_address = findRegion (new_address, new_size);
163
164 if (!new_address)
165 return (void*)-1;
166
167 gAddressBase = gNextAddress =
168 (unsigned int)VirtualAlloc (new_address, new_size,
169 MEM_RESERVE, PAGE_NOACCESS);
170
171
172
173 }
174 while (gAddressBase == 0);
175
176 assert (new_address == (void*)gAddressBase);
177
178 gAllocatedSize = new_size;
179
180 if (!makeGmListElement ((void*)gAddressBase))
181 return (void*)-1;
182 }
183 if ((size + gNextAddress) > AlignPage (gNextAddress))
184 {
185 void* res;
186 res = VirtualAlloc ((void*)AlignPage (gNextAddress),
187 (size + gNextAddress -
188 AlignPage (gNextAddress)),
189 MEM_COMMIT, PAGE_READWRITE);
190 if (!res)
191 return (void*)-1;
192 }
193 tmp = (void*)gNextAddress;
194 gNextAddress = (unsigned int)tmp + size;
195 return tmp;
196 }
197 else if (size < 0)
198 {
199 unsigned int alignedGoal = AlignPage (gNextAddress + size);
200
201 if (alignedGoal >= gAddressBase)
202 {
203 VirtualFree ((void*)alignedGoal, gNextAddress - alignedGoal,
204 MEM_DECOMMIT);
205 gNextAddress = gNextAddress + size;
206 return (void*)gNextAddress;
207 }
208 else
209 {
210 VirtualFree ((void*)gAddressBase, gNextAddress - gAddressBase,
211 MEM_DECOMMIT);
212 gNextAddress = gAddressBase;
213 return (void*)-1;
214 }
215 }
216 else
217 {
218 return (void*)gNextAddress;
219 }
220}
221
222#endif
223
224
225
226
227
228
229
230
231struct malloc_chunk
232{
233 INTERNAL_SIZE_T prev_size;
234 INTERNAL_SIZE_T size;
235 struct malloc_chunk* fd;
236 struct malloc_chunk* bk;
237} __attribute__((__may_alias__)) ;
238
239typedef struct malloc_chunk* mchunkptr;
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368#define SIZE_SZ (sizeof(INTERNAL_SIZE_T))
369#define MALLOC_ALIGNMENT (SIZE_SZ + SIZE_SZ)
370#define MALLOC_ALIGN_MASK (MALLOC_ALIGNMENT - 1)
371#define MINSIZE (sizeof(struct malloc_chunk))
372
373
374
375#define chunk2mem(p) ((Void_t*)((char*)(p) + 2*SIZE_SZ))
376#define mem2chunk(mem) ((mchunkptr)((char*)(mem) - 2*SIZE_SZ))
377
378
379
380#define request2size(req) \
381 (((long)((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) < \
382 (long)(MINSIZE + MALLOC_ALIGN_MASK)) ? MINSIZE : \
383 (((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) & ~(MALLOC_ALIGN_MASK)))
384
385
386
387#define aligned_OK(m) (((unsigned long)((m)) & (MALLOC_ALIGN_MASK)) == 0)
388
389
390
391
392
393
394
395
396
397
398
399#define PREV_INUSE 0x1
400
401
402
403#define IS_MMAPPED 0x2
404
405
406
407#define SIZE_BITS (PREV_INUSE|IS_MMAPPED)
408
409
410
411
412#define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) ))
413
414
415
416#define prev_chunk(p)\
417 ((mchunkptr)( ((char*)(p)) - ((p)->prev_size) ))
418
419
420
421
422#define chunk_at_offset(p, s) ((mchunkptr)(((char*)(p)) + (s)))
423
424
425
426
427
428
429
430
431
432
433#define inuse(p)\
434((((mchunkptr)(((char*)(p))+((p)->size & ~PREV_INUSE)))->size) & PREV_INUSE)
435
436
437
438#define prev_inuse(p) ((p)->size & PREV_INUSE)
439
440
441
442#define chunk_is_mmapped(p) ((p)->size & IS_MMAPPED)
443
444
445
446#define set_inuse(p)\
447((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size |= PREV_INUSE
448
449#define clear_inuse(p)\
450((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size &= ~(PREV_INUSE)
451
452
453
454#define inuse_bit_at_offset(p, s)\
455 (((mchunkptr)(((char*)(p)) + (s)))->size & PREV_INUSE)
456
457#define set_inuse_bit_at_offset(p, s)\
458 (((mchunkptr)(((char*)(p)) + (s)))->size |= PREV_INUSE)
459
460#define clear_inuse_bit_at_offset(p, s)\
461 (((mchunkptr)(((char*)(p)) + (s)))->size &= ~(PREV_INUSE))
462
463
464
465
466
467
468
469
470
471
472#define chunksize(p) ((p)->size & ~(SIZE_BITS))
473
474
475
476#define set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s)))
477
478
479
480#define set_head(p, s) ((p)->size = (s))
481
482
483
484#define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s))
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524#define NAV 128
525
526typedef struct malloc_chunk* mbinptr;
527
528
529
530#define bin_at(i) ((mbinptr)((char*)&(av_[2*(i) + 2]) - 2*SIZE_SZ))
531#define next_bin(b) ((mbinptr)((char*)(b) + 2 * sizeof(mbinptr)))
532#define prev_bin(b) ((mbinptr)((char*)(b) - 2 * sizeof(mbinptr)))
533
534
535
536
537
538
539
540#define top (av_[2])
541#define last_remainder (bin_at(1))
542
543
544
545
546
547
548
549
550
551#define initial_top ((mchunkptr)(bin_at(0)))
552
553
554
555#define IAV(i) bin_at(i), bin_at(i)
556
557static mbinptr av_[NAV * 2 + 2] = {
558 NULL, NULL,
559 IAV(0), IAV(1), IAV(2), IAV(3), IAV(4), IAV(5), IAV(6), IAV(7),
560 IAV(8), IAV(9), IAV(10), IAV(11), IAV(12), IAV(13), IAV(14), IAV(15),
561 IAV(16), IAV(17), IAV(18), IAV(19), IAV(20), IAV(21), IAV(22), IAV(23),
562 IAV(24), IAV(25), IAV(26), IAV(27), IAV(28), IAV(29), IAV(30), IAV(31),
563 IAV(32), IAV(33), IAV(34), IAV(35), IAV(36), IAV(37), IAV(38), IAV(39),
564 IAV(40), IAV(41), IAV(42), IAV(43), IAV(44), IAV(45), IAV(46), IAV(47),
565 IAV(48), IAV(49), IAV(50), IAV(51), IAV(52), IAV(53), IAV(54), IAV(55),
566 IAV(56), IAV(57), IAV(58), IAV(59), IAV(60), IAV(61), IAV(62), IAV(63),
567 IAV(64), IAV(65), IAV(66), IAV(67), IAV(68), IAV(69), IAV(70), IAV(71),
568 IAV(72), IAV(73), IAV(74), IAV(75), IAV(76), IAV(77), IAV(78), IAV(79),
569 IAV(80), IAV(81), IAV(82), IAV(83), IAV(84), IAV(85), IAV(86), IAV(87),
570 IAV(88), IAV(89), IAV(90), IAV(91), IAV(92), IAV(93), IAV(94), IAV(95),
571 IAV(96), IAV(97), IAV(98), IAV(99), IAV(100), IAV(101), IAV(102), IAV(103),
572 IAV(104), IAV(105), IAV(106), IAV(107), IAV(108), IAV(109), IAV(110), IAV(111),
573 IAV(112), IAV(113), IAV(114), IAV(115), IAV(116), IAV(117), IAV(118), IAV(119),
574 IAV(120), IAV(121), IAV(122), IAV(123), IAV(124), IAV(125), IAV(126), IAV(127)
575};
576
577#ifdef CONFIG_NEEDS_MANUAL_RELOC
578static void malloc_bin_reloc(void)
579{
580 mbinptr *p = &av_[2];
581 size_t i;
582
583 for (i = 2; i < ARRAY_SIZE(av_); ++i, ++p)
584 *p = (mbinptr)((ulong)*p + gd->reloc_off);
585}
586#else
587static inline void malloc_bin_reloc(void) {}
588#endif
589
590#ifdef CONFIG_SYS_MALLOC_DEFAULT_TO_INIT
591static void malloc_init(void);
592#endif
593
594ulong mem_malloc_start = 0;
595ulong mem_malloc_end = 0;
596ulong mem_malloc_brk = 0;
597
598void *sbrk(ptrdiff_t increment)
599{
600 ulong old = mem_malloc_brk;
601 ulong new = old + increment;
602
603
604
605
606
607 if (increment < 0)
608 memset((void *)new, 0, -increment);
609
610 if ((new < mem_malloc_start) || (new > mem_malloc_end))
611 return (void *)MORECORE_FAILURE;
612
613 mem_malloc_brk = new;
614
615 return (void *)old;
616}
617
618void mem_malloc_init(ulong start, ulong size)
619{
620 mem_malloc_start = start;
621 mem_malloc_end = start + size;
622 mem_malloc_brk = start;
623
624#ifdef CONFIG_SYS_MALLOC_DEFAULT_TO_INIT
625 malloc_init();
626#endif
627
628 debug("using memory %#lx-%#lx for malloc()\n", mem_malloc_start,
629 mem_malloc_end);
630#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
631 memset((void *)mem_malloc_start, 0x0, size);
632#endif
633 malloc_bin_reloc();
634}
635
636
637
638#define first(b) ((b)->fd)
639#define last(b) ((b)->bk)
640
641
642
643
644
645#define bin_index(sz) \
646(((((unsigned long)(sz)) >> 9) == 0) ? (((unsigned long)(sz)) >> 3): \
647 ((((unsigned long)(sz)) >> 9) <= 4) ? 56 + (((unsigned long)(sz)) >> 6): \
648 ((((unsigned long)(sz)) >> 9) <= 20) ? 91 + (((unsigned long)(sz)) >> 9): \
649 ((((unsigned long)(sz)) >> 9) <= 84) ? 110 + (((unsigned long)(sz)) >> 12): \
650 ((((unsigned long)(sz)) >> 9) <= 340) ? 119 + (((unsigned long)(sz)) >> 15): \
651 ((((unsigned long)(sz)) >> 9) <= 1364) ? 124 + (((unsigned long)(sz)) >> 18): \
652 126)
653
654
655
656
657
658#define MAX_SMALLBIN 63
659#define MAX_SMALLBIN_SIZE 512
660#define SMALLBIN_WIDTH 8
661
662#define smallbin_index(sz) (((unsigned long)(sz)) >> 3)
663
664
665
666
667
668#define is_small_request(nb) (nb < MAX_SMALLBIN_SIZE - SMALLBIN_WIDTH)
669
670
671
672
673
674
675
676
677
678
679
680
681
682#define BINBLOCKWIDTH 4
683
684#define binblocks_r ((INTERNAL_SIZE_T)av_[1])
685#define binblocks_w (av_[1])
686
687
688
689#define idx2binblock(ix) ((unsigned)1 << (ix / BINBLOCKWIDTH))
690#define mark_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r | idx2binblock(ii)))
691#define clear_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r & ~(idx2binblock(ii))))
692
693
694
695
696
697
698
699
700
701static unsigned long trim_threshold = DEFAULT_TRIM_THRESHOLD;
702static unsigned long top_pad = DEFAULT_TOP_PAD;
703static unsigned int n_mmaps_max = DEFAULT_MMAP_MAX;
704static unsigned long mmap_threshold = DEFAULT_MMAP_THRESHOLD;
705
706
707static char* sbrk_base = (char*)(-1);
708
709
710static unsigned long max_sbrked_mem = 0;
711
712
713static unsigned long max_total_mem = 0;
714
715
716static struct mallinfo current_mallinfo = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
717
718
719#define sbrked_mem (current_mallinfo.arena)
720
721
722
723#ifdef DEBUG
724static unsigned int n_mmaps = 0;
725#endif
726static unsigned long mmapped_mem = 0;
727#if HAVE_MMAP
728static unsigned int max_n_mmaps = 0;
729static unsigned long max_mmapped_mem = 0;
730#endif
731
732#ifdef CONFIG_SYS_MALLOC_DEFAULT_TO_INIT
733static void malloc_init(void)
734{
735 int i, j;
736
737 debug("bins (av_ array) are at %p\n", (void *)av_);
738
739 av_[0] = NULL; av_[1] = NULL;
740 for (i = 2, j = 2; i < NAV * 2 + 2; i += 2, j++) {
741 av_[i] = bin_at(j - 2);
742 av_[i + 1] = bin_at(j - 2);
743
744
745
746
747 if (i < 10)
748 debug("av_[%d]=%lx av_[%d]=%lx\n",
749 i, (ulong)av_[i],
750 i + 1, (ulong)av_[i + 1]);
751 }
752
753
754 sbrk_base = (char *)(-1);
755 max_sbrked_mem = 0;
756 max_total_mem = 0;
757#ifdef DEBUG
758 memset((void *)¤t_mallinfo, 0, sizeof(struct mallinfo));
759#endif
760}
761#endif
762
763
764
765
766
767#ifdef DEBUG
768
769
770
771
772
773
774
775
776
777
778#if __STD_C
779static void do_check_chunk(mchunkptr p)
780#else
781static void do_check_chunk(p) mchunkptr p;
782#endif
783{
784 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
785
786
787 assert(!chunk_is_mmapped(p));
788
789
790 assert((char*)p >= sbrk_base);
791 if (p != top)
792 assert((char*)p + sz <= (char*)top);
793 else
794 assert((char*)p + sz <= sbrk_base + sbrked_mem);
795
796}
797
798
799#if __STD_C
800static void do_check_free_chunk(mchunkptr p)
801#else
802static void do_check_free_chunk(p) mchunkptr p;
803#endif
804{
805 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
806 mchunkptr next = chunk_at_offset(p, sz);
807
808 do_check_chunk(p);
809
810
811 assert(!inuse(p));
812
813
814 if ((long)sz >= (long)MINSIZE)
815 {
816 assert((sz & MALLOC_ALIGN_MASK) == 0);
817 assert(aligned_OK(chunk2mem(p)));
818
819 assert(next->prev_size == sz);
820
821 assert(prev_inuse(p));
822 assert (next == top || inuse(next));
823
824
825 assert(p->fd->bk == p);
826 assert(p->bk->fd == p);
827 }
828 else
829 assert(sz == SIZE_SZ);
830}
831
832#if __STD_C
833static void do_check_inuse_chunk(mchunkptr p)
834#else
835static void do_check_inuse_chunk(p) mchunkptr p;
836#endif
837{
838 mchunkptr next = next_chunk(p);
839 do_check_chunk(p);
840
841
842 assert(inuse(p));
843
844
845
846
847
848 if (!prev_inuse(p))
849 {
850 mchunkptr prv = prev_chunk(p);
851 assert(next_chunk(prv) == p);
852 do_check_free_chunk(prv);
853 }
854 if (next == top)
855 {
856 assert(prev_inuse(next));
857 assert(chunksize(next) >= MINSIZE);
858 }
859 else if (!inuse(next))
860 do_check_free_chunk(next);
861
862}
863
864#if __STD_C
865static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
866#else
867static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
868#endif
869{
870 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
871 long room = sz - s;
872
873 do_check_inuse_chunk(p);
874
875
876 assert((long)sz >= (long)MINSIZE);
877 assert((sz & MALLOC_ALIGN_MASK) == 0);
878 assert(room >= 0);
879 assert(room < (long)MINSIZE);
880
881
882 assert(aligned_OK(chunk2mem(p)));
883
884
885
886 assert(prev_inuse(p));
887
888}
889
890
891#define check_free_chunk(P) do_check_free_chunk(P)
892#define check_inuse_chunk(P) do_check_inuse_chunk(P)
893#define check_chunk(P) do_check_chunk(P)
894#define check_malloced_chunk(P,N) do_check_malloced_chunk(P,N)
895#else
896#define check_free_chunk(P)
897#define check_inuse_chunk(P)
898#define check_chunk(P)
899#define check_malloced_chunk(P,N)
900#endif
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920#define frontlink(P, S, IDX, BK, FD) \
921{ \
922 if (S < MAX_SMALLBIN_SIZE) \
923 { \
924 IDX = smallbin_index(S); \
925 mark_binblock(IDX); \
926 BK = bin_at(IDX); \
927 FD = BK->fd; \
928 P->bk = BK; \
929 P->fd = FD; \
930 FD->bk = BK->fd = P; \
931 } \
932 else \
933 { \
934 IDX = bin_index(S); \
935 BK = bin_at(IDX); \
936 FD = BK->fd; \
937 if (FD == BK) mark_binblock(IDX); \
938 else \
939 { \
940 while (FD != BK && S < chunksize(FD)) FD = FD->fd; \
941 BK = FD->bk; \
942 } \
943 P->bk = BK; \
944 P->fd = FD; \
945 FD->bk = BK->fd = P; \
946 } \
947}
948
949
950
951
952#define unlink(P, BK, FD) \
953{ \
954 BK = P->bk; \
955 FD = P->fd; \
956 FD->bk = BK; \
957 BK->fd = FD; \
958} \
959
960
961
962#define link_last_remainder(P) \
963{ \
964 last_remainder->fd = last_remainder->bk = P; \
965 P->fd = P->bk = last_remainder; \
966}
967
968
969
970#define clear_last_remainder \
971 (last_remainder->fd = last_remainder->bk = last_remainder)
972
973
974
975
976
977
978
979#if HAVE_MMAP
980
981#if __STD_C
982static mchunkptr mmap_chunk(size_t size)
983#else
984static mchunkptr mmap_chunk(size) size_t size;
985#endif
986{
987 size_t page_mask = malloc_getpagesize - 1;
988 mchunkptr p;
989
990#ifndef MAP_ANONYMOUS
991 static int fd = -1;
992#endif
993
994 if(n_mmaps >= n_mmaps_max) return 0;
995
996
997
998
999 size = (size + SIZE_SZ + page_mask) & ~page_mask;
1000
1001#ifdef MAP_ANONYMOUS
1002 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE,
1003 MAP_PRIVATE|MAP_ANONYMOUS, -1, 0);
1004#else
1005 if (fd < 0)
1006 {
1007 fd = open("/dev/zero", O_RDWR);
1008 if(fd < 0) return 0;
1009 }
1010 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE, fd, 0);
1011#endif
1012
1013 if(p == (mchunkptr)-1) return 0;
1014
1015 n_mmaps++;
1016 if (n_mmaps > max_n_mmaps) max_n_mmaps = n_mmaps;
1017
1018
1019 assert(aligned_OK(chunk2mem(p)));
1020
1021
1022
1023
1024
1025 p->prev_size = 0;
1026 set_head(p, size|IS_MMAPPED);
1027
1028 mmapped_mem += size;
1029 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
1030 max_mmapped_mem = mmapped_mem;
1031 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1032 max_total_mem = mmapped_mem + sbrked_mem;
1033 return p;
1034}
1035
1036#if __STD_C
1037static void munmap_chunk(mchunkptr p)
1038#else
1039static void munmap_chunk(p) mchunkptr p;
1040#endif
1041{
1042 INTERNAL_SIZE_T size = chunksize(p);
1043 int ret;
1044
1045 assert (chunk_is_mmapped(p));
1046 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
1047 assert((n_mmaps > 0));
1048 assert(((p->prev_size + size) & (malloc_getpagesize-1)) == 0);
1049
1050 n_mmaps--;
1051 mmapped_mem -= (size + p->prev_size);
1052
1053 ret = munmap((char *)p - p->prev_size, size + p->prev_size);
1054
1055
1056 assert(ret == 0);
1057}
1058
1059#if HAVE_MREMAP
1060
1061#if __STD_C
1062static mchunkptr mremap_chunk(mchunkptr p, size_t new_size)
1063#else
1064static mchunkptr mremap_chunk(p, new_size) mchunkptr p; size_t new_size;
1065#endif
1066{
1067 size_t page_mask = malloc_getpagesize - 1;
1068 INTERNAL_SIZE_T offset = p->prev_size;
1069 INTERNAL_SIZE_T size = chunksize(p);
1070 char *cp;
1071
1072 assert (chunk_is_mmapped(p));
1073 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
1074 assert((n_mmaps > 0));
1075 assert(((size + offset) & (malloc_getpagesize-1)) == 0);
1076
1077
1078 new_size = (new_size + offset + SIZE_SZ + page_mask) & ~page_mask;
1079
1080 cp = (char *)mremap((char *)p - offset, size + offset, new_size, 1);
1081
1082 if (cp == (char *)-1) return 0;
1083
1084 p = (mchunkptr)(cp + offset);
1085
1086 assert(aligned_OK(chunk2mem(p)));
1087
1088 assert((p->prev_size == offset));
1089 set_head(p, (new_size - offset)|IS_MMAPPED);
1090
1091 mmapped_mem -= size + offset;
1092 mmapped_mem += new_size;
1093 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
1094 max_mmapped_mem = mmapped_mem;
1095 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1096 max_total_mem = mmapped_mem + sbrked_mem;
1097 return p;
1098}
1099
1100#endif
1101
1102#endif
1103
1104
1105
1106
1107
1108
1109#if __STD_C
1110static void malloc_extend_top(INTERNAL_SIZE_T nb)
1111#else
1112static void malloc_extend_top(nb) INTERNAL_SIZE_T nb;
1113#endif
1114{
1115 char* brk;
1116 INTERNAL_SIZE_T front_misalign;
1117 INTERNAL_SIZE_T correction;
1118 char* new_brk;
1119 INTERNAL_SIZE_T top_size;
1120
1121 mchunkptr old_top = top;
1122 INTERNAL_SIZE_T old_top_size = chunksize(old_top);
1123 char* old_end = (char*)(chunk_at_offset(old_top, old_top_size));
1124
1125
1126
1127 INTERNAL_SIZE_T sbrk_size = nb + top_pad + MINSIZE;
1128 unsigned long pagesz = malloc_getpagesize;
1129
1130
1131
1132
1133
1134 if (sbrk_base != (char*)(-1))
1135 sbrk_size = (sbrk_size + (pagesz - 1)) & ~(pagesz - 1);
1136
1137 brk = (char*)(MORECORE (sbrk_size));
1138
1139
1140 if (brk == (char*)(MORECORE_FAILURE) ||
1141 (brk < old_end && old_top != initial_top))
1142 return;
1143
1144 sbrked_mem += sbrk_size;
1145
1146 if (brk == old_end)
1147 {
1148 top_size = sbrk_size + old_top_size;
1149 set_head(top, top_size | PREV_INUSE);
1150 }
1151 else
1152 {
1153 if (sbrk_base == (char*)(-1))
1154 sbrk_base = brk;
1155 else
1156 sbrked_mem += brk - (char*)old_end;
1157
1158
1159 front_misalign = (unsigned long)chunk2mem(brk) & MALLOC_ALIGN_MASK;
1160 if (front_misalign > 0)
1161 {
1162 correction = (MALLOC_ALIGNMENT) - front_misalign;
1163 brk += correction;
1164 }
1165 else
1166 correction = 0;
1167
1168
1169
1170 correction += ((((unsigned long)(brk + sbrk_size))+(pagesz-1)) &
1171 ~(pagesz - 1)) - ((unsigned long)(brk + sbrk_size));
1172
1173
1174 new_brk = (char*)(MORECORE (correction));
1175 if (new_brk == (char*)(MORECORE_FAILURE)) return;
1176
1177 sbrked_mem += correction;
1178
1179 top = (mchunkptr)brk;
1180 top_size = new_brk - brk + correction;
1181 set_head(top, top_size | PREV_INUSE);
1182
1183 if (old_top != initial_top)
1184 {
1185
1186
1187
1188
1189
1190 if (old_top_size < MINSIZE)
1191 {
1192 set_head(top, PREV_INUSE);
1193 return;
1194 }
1195
1196
1197 old_top_size = (old_top_size - 3*SIZE_SZ) & ~MALLOC_ALIGN_MASK;
1198 set_head_size(old_top, old_top_size);
1199 chunk_at_offset(old_top, old_top_size )->size =
1200 SIZE_SZ|PREV_INUSE;
1201 chunk_at_offset(old_top, old_top_size + SIZE_SZ)->size =
1202 SIZE_SZ|PREV_INUSE;
1203
1204 if (old_top_size >= MINSIZE)
1205 fREe(chunk2mem(old_top));
1206 }
1207 }
1208
1209 if ((unsigned long)sbrked_mem > (unsigned long)max_sbrked_mem)
1210 max_sbrked_mem = sbrked_mem;
1211 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1212 max_total_mem = mmapped_mem + sbrked_mem;
1213
1214
1215 assert(((unsigned long)((char*)top + top_size) & (pagesz - 1)) == 0);
1216}
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283#if __STD_C
1284Void_t* mALLOc(size_t bytes)
1285#else
1286Void_t* mALLOc(bytes) size_t bytes;
1287#endif
1288{
1289 mchunkptr victim;
1290 INTERNAL_SIZE_T victim_size;
1291 int idx;
1292 mbinptr bin;
1293 mchunkptr remainder;
1294 long remainder_size;
1295 int remainder_index;
1296 unsigned long block;
1297 int startidx;
1298 mchunkptr fwd;
1299 mchunkptr bck;
1300 mbinptr q;
1301
1302 INTERNAL_SIZE_T nb;
1303
1304#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1305 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT))
1306 return malloc_simple(bytes);
1307#endif
1308
1309
1310 if ((mem_malloc_start == 0) && (mem_malloc_end == 0)) {
1311
1312 return NULL;
1313 }
1314
1315 if ((long)bytes < 0) return NULL;
1316
1317 nb = request2size(bytes);
1318
1319
1320
1321 if (is_small_request(nb))
1322 {
1323 idx = smallbin_index(nb);
1324
1325
1326
1327 q = bin_at(idx);
1328 victim = last(q);
1329
1330
1331 if (victim == q)
1332 {
1333 q = next_bin(q);
1334 victim = last(q);
1335 }
1336 if (victim != q)
1337 {
1338 victim_size = chunksize(victim);
1339 unlink(victim, bck, fwd);
1340 set_inuse_bit_at_offset(victim, victim_size);
1341 check_malloced_chunk(victim, nb);
1342 return chunk2mem(victim);
1343 }
1344
1345 idx += 2;
1346
1347 }
1348 else
1349 {
1350 idx = bin_index(nb);
1351 bin = bin_at(idx);
1352
1353 for (victim = last(bin); victim != bin; victim = victim->bk)
1354 {
1355 victim_size = chunksize(victim);
1356 remainder_size = victim_size - nb;
1357
1358 if (remainder_size >= (long)MINSIZE)
1359 {
1360 --idx;
1361 break;
1362 }
1363
1364 else if (remainder_size >= 0)
1365 {
1366 unlink(victim, bck, fwd);
1367 set_inuse_bit_at_offset(victim, victim_size);
1368 check_malloced_chunk(victim, nb);
1369 return chunk2mem(victim);
1370 }
1371 }
1372
1373 ++idx;
1374
1375 }
1376
1377
1378
1379 if ( (victim = last_remainder->fd) != last_remainder)
1380 {
1381 victim_size = chunksize(victim);
1382 remainder_size = victim_size - nb;
1383
1384 if (remainder_size >= (long)MINSIZE)
1385 {
1386 remainder = chunk_at_offset(victim, nb);
1387 set_head(victim, nb | PREV_INUSE);
1388 link_last_remainder(remainder);
1389 set_head(remainder, remainder_size | PREV_INUSE);
1390 set_foot(remainder, remainder_size);
1391 check_malloced_chunk(victim, nb);
1392 return chunk2mem(victim);
1393 }
1394
1395 clear_last_remainder;
1396
1397 if (remainder_size >= 0)
1398 {
1399 set_inuse_bit_at_offset(victim, victim_size);
1400 check_malloced_chunk(victim, nb);
1401 return chunk2mem(victim);
1402 }
1403
1404
1405
1406 frontlink(victim, victim_size, remainder_index, bck, fwd);
1407 }
1408
1409
1410
1411
1412
1413
1414 if ( (block = idx2binblock(idx)) <= binblocks_r)
1415 {
1416
1417
1418
1419 if ( (block & binblocks_r) == 0)
1420 {
1421
1422 idx = (idx & ~(BINBLOCKWIDTH - 1)) + BINBLOCKWIDTH;
1423 block <<= 1;
1424 while ((block & binblocks_r) == 0)
1425 {
1426 idx += BINBLOCKWIDTH;
1427 block <<= 1;
1428 }
1429 }
1430
1431
1432 for (;;)
1433 {
1434 startidx = idx;
1435 q = bin = bin_at(idx);
1436
1437
1438 do
1439 {
1440
1441
1442 for (victim = last(bin); victim != bin; victim = victim->bk)
1443 {
1444 victim_size = chunksize(victim);
1445 remainder_size = victim_size - nb;
1446
1447 if (remainder_size >= (long)MINSIZE)
1448 {
1449 remainder = chunk_at_offset(victim, nb);
1450 set_head(victim, nb | PREV_INUSE);
1451 unlink(victim, bck, fwd);
1452 link_last_remainder(remainder);
1453 set_head(remainder, remainder_size | PREV_INUSE);
1454 set_foot(remainder, remainder_size);
1455 check_malloced_chunk(victim, nb);
1456 return chunk2mem(victim);
1457 }
1458
1459 else if (remainder_size >= 0)
1460 {
1461 set_inuse_bit_at_offset(victim, victim_size);
1462 unlink(victim, bck, fwd);
1463 check_malloced_chunk(victim, nb);
1464 return chunk2mem(victim);
1465 }
1466
1467 }
1468
1469 bin = next_bin(bin);
1470
1471 } while ((++idx & (BINBLOCKWIDTH - 1)) != 0);
1472
1473
1474
1475 do
1476 {
1477 if ((startidx & (BINBLOCKWIDTH - 1)) == 0)
1478 {
1479 av_[1] = (mbinptr)(binblocks_r & ~block);
1480 break;
1481 }
1482 --startidx;
1483 q = prev_bin(q);
1484 } while (first(q) == q);
1485
1486
1487
1488 if ( (block <<= 1) <= binblocks_r && (block != 0) )
1489 {
1490 while ((block & binblocks_r) == 0)
1491 {
1492 idx += BINBLOCKWIDTH;
1493 block <<= 1;
1494 }
1495 }
1496 else
1497 break;
1498 }
1499 }
1500
1501
1502
1503
1504
1505 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
1506 {
1507
1508#if HAVE_MMAP
1509
1510 if ((unsigned long)nb >= (unsigned long)mmap_threshold &&
1511 (victim = mmap_chunk(nb)))
1512 return chunk2mem(victim);
1513#endif
1514
1515
1516 malloc_extend_top(nb);
1517 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
1518 return NULL;
1519 }
1520
1521 victim = top;
1522 set_head(victim, nb | PREV_INUSE);
1523 top = chunk_at_offset(victim, nb);
1524 set_head(top, remainder_size | PREV_INUSE);
1525 check_malloced_chunk(victim, nb);
1526 return chunk2mem(victim);
1527
1528}
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555#if __STD_C
1556void fREe(Void_t* mem)
1557#else
1558void fREe(mem) Void_t* mem;
1559#endif
1560{
1561 mchunkptr p;
1562 INTERNAL_SIZE_T hd;
1563 INTERNAL_SIZE_T sz;
1564 int idx;
1565 mchunkptr next;
1566 INTERNAL_SIZE_T nextsz;
1567 INTERNAL_SIZE_T prevsz;
1568 mchunkptr bck;
1569 mchunkptr fwd;
1570 int islr;
1571
1572#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1573
1574 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT))
1575 return;
1576#endif
1577
1578 if (mem == NULL)
1579 return;
1580
1581 p = mem2chunk(mem);
1582 hd = p->size;
1583
1584#if HAVE_MMAP
1585 if (hd & IS_MMAPPED)
1586 {
1587 munmap_chunk(p);
1588 return;
1589 }
1590#endif
1591
1592 check_inuse_chunk(p);
1593
1594 sz = hd & ~PREV_INUSE;
1595 next = chunk_at_offset(p, sz);
1596 nextsz = chunksize(next);
1597
1598 if (next == top)
1599 {
1600 sz += nextsz;
1601
1602 if (!(hd & PREV_INUSE))
1603 {
1604 prevsz = p->prev_size;
1605 p = chunk_at_offset(p, -((long) prevsz));
1606 sz += prevsz;
1607 unlink(p, bck, fwd);
1608 }
1609
1610 set_head(p, sz | PREV_INUSE);
1611 top = p;
1612 if ((unsigned long)(sz) >= (unsigned long)trim_threshold)
1613 malloc_trim(top_pad);
1614 return;
1615 }
1616
1617 set_head(next, nextsz);
1618
1619 islr = 0;
1620
1621 if (!(hd & PREV_INUSE))
1622 {
1623 prevsz = p->prev_size;
1624 p = chunk_at_offset(p, -((long) prevsz));
1625 sz += prevsz;
1626
1627 if (p->fd == last_remainder)
1628 islr = 1;
1629 else
1630 unlink(p, bck, fwd);
1631 }
1632
1633 if (!(inuse_bit_at_offset(next, nextsz)))
1634 {
1635 sz += nextsz;
1636
1637 if (!islr && next->fd == last_remainder)
1638 {
1639 islr = 1;
1640 link_last_remainder(p);
1641 }
1642 else
1643 unlink(next, bck, fwd);
1644 }
1645
1646
1647 set_head(p, sz | PREV_INUSE);
1648 set_foot(p, sz);
1649 if (!islr)
1650 frontlink(p, sz, idx, bck, fwd);
1651}
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693#if __STD_C
1694Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
1695#else
1696Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
1697#endif
1698{
1699 INTERNAL_SIZE_T nb;
1700
1701 mchunkptr oldp;
1702 INTERNAL_SIZE_T oldsize;
1703
1704 mchunkptr newp;
1705 INTERNAL_SIZE_T newsize;
1706 Void_t* newmem;
1707
1708 mchunkptr next;
1709 INTERNAL_SIZE_T nextsize;
1710
1711 mchunkptr prev;
1712 INTERNAL_SIZE_T prevsize;
1713
1714 mchunkptr remainder;
1715 INTERNAL_SIZE_T remainder_size;
1716
1717 mchunkptr bck;
1718 mchunkptr fwd;
1719
1720#ifdef REALLOC_ZERO_BYTES_FREES
1721 if (!bytes) {
1722 fREe(oldmem);
1723 return NULL;
1724 }
1725#endif
1726
1727 if ((long)bytes < 0) return NULL;
1728
1729
1730 if (oldmem == NULL) return mALLOc(bytes);
1731
1732#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1733 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
1734
1735 panic("pre-reloc realloc() is not supported");
1736 }
1737#endif
1738
1739 newp = oldp = mem2chunk(oldmem);
1740 newsize = oldsize = chunksize(oldp);
1741
1742
1743 nb = request2size(bytes);
1744
1745#if HAVE_MMAP
1746 if (chunk_is_mmapped(oldp))
1747 {
1748#if HAVE_MREMAP
1749 newp = mremap_chunk(oldp, nb);
1750 if(newp) return chunk2mem(newp);
1751#endif
1752
1753 if(oldsize - SIZE_SZ >= nb) return oldmem;
1754
1755 newmem = mALLOc(bytes);
1756 if (!newmem)
1757 return NULL;
1758 MALLOC_COPY(newmem, oldmem, oldsize - 2*SIZE_SZ);
1759 munmap_chunk(oldp);
1760 return newmem;
1761 }
1762#endif
1763
1764 check_inuse_chunk(oldp);
1765
1766 if ((long)(oldsize) < (long)(nb))
1767 {
1768
1769
1770
1771 next = chunk_at_offset(oldp, oldsize);
1772 if (next == top || !inuse(next))
1773 {
1774 nextsize = chunksize(next);
1775
1776
1777 if (next == top)
1778 {
1779 if ((long)(nextsize + newsize) >= (long)(nb + MINSIZE))
1780 {
1781 newsize += nextsize;
1782 top = chunk_at_offset(oldp, nb);
1783 set_head(top, (newsize - nb) | PREV_INUSE);
1784 set_head_size(oldp, nb);
1785 return chunk2mem(oldp);
1786 }
1787 }
1788
1789
1790 else if (((long)(nextsize + newsize) >= (long)(nb)))
1791 {
1792 unlink(next, bck, fwd);
1793 newsize += nextsize;
1794 goto split;
1795 }
1796 }
1797 else
1798 {
1799 next = NULL;
1800 nextsize = 0;
1801 }
1802
1803
1804
1805 if (!prev_inuse(oldp))
1806 {
1807 prev = prev_chunk(oldp);
1808 prevsize = chunksize(prev);
1809
1810
1811
1812 if (next != NULL)
1813 {
1814
1815 if (next == top)
1816 {
1817 if ((long)(nextsize + prevsize + newsize) >= (long)(nb + MINSIZE))
1818 {
1819 unlink(prev, bck, fwd);
1820 newp = prev;
1821 newsize += prevsize + nextsize;
1822 newmem = chunk2mem(newp);
1823 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1824 top = chunk_at_offset(newp, nb);
1825 set_head(top, (newsize - nb) | PREV_INUSE);
1826 set_head_size(newp, nb);
1827 return newmem;
1828 }
1829 }
1830
1831
1832 else if (((long)(nextsize + prevsize + newsize) >= (long)(nb)))
1833 {
1834 unlink(next, bck, fwd);
1835 unlink(prev, bck, fwd);
1836 newp = prev;
1837 newsize += nextsize + prevsize;
1838 newmem = chunk2mem(newp);
1839 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1840 goto split;
1841 }
1842 }
1843
1844
1845 if (prev != NULL && (long)(prevsize + newsize) >= (long)nb)
1846 {
1847 unlink(prev, bck, fwd);
1848 newp = prev;
1849 newsize += prevsize;
1850 newmem = chunk2mem(newp);
1851 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1852 goto split;
1853 }
1854 }
1855
1856
1857
1858 newmem = mALLOc (bytes);
1859
1860 if (newmem == NULL)
1861 return NULL;
1862
1863
1864
1865
1866 if ( (newp = mem2chunk(newmem)) == next_chunk(oldp))
1867 {
1868 newsize += chunksize(newp);
1869 newp = oldp;
1870 goto split;
1871 }
1872
1873
1874 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
1875 fREe(oldmem);
1876 return newmem;
1877 }
1878
1879
1880 split:
1881
1882 if (newsize - nb >= MINSIZE)
1883 {
1884 remainder = chunk_at_offset(newp, nb);
1885 remainder_size = newsize - nb;
1886 set_head_size(newp, nb);
1887 set_head(remainder, remainder_size | PREV_INUSE);
1888 set_inuse_bit_at_offset(remainder, remainder_size);
1889 fREe(chunk2mem(remainder));
1890 }
1891 else
1892 {
1893 set_head_size(newp, newsize);
1894 set_inuse_bit_at_offset(newp, newsize);
1895 }
1896
1897 check_inuse_chunk(newp);
1898 return chunk2mem(newp);
1899}
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923#if __STD_C
1924Void_t* mEMALIGn(size_t alignment, size_t bytes)
1925#else
1926Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
1927#endif
1928{
1929 INTERNAL_SIZE_T nb;
1930 char* m;
1931 mchunkptr p;
1932 char* brk;
1933 mchunkptr newp;
1934 INTERNAL_SIZE_T newsize;
1935 INTERNAL_SIZE_T leadsize;
1936 mchunkptr remainder;
1937 long remainder_size;
1938
1939 if ((long)bytes < 0) return NULL;
1940
1941#if CONFIG_VAL(SYS_MALLOC_F_LEN)
1942 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
1943 return memalign_simple(alignment, bytes);
1944 }
1945#endif
1946
1947
1948
1949 if (alignment <= MALLOC_ALIGNMENT) return mALLOc(bytes);
1950
1951
1952
1953 if (alignment < MINSIZE) alignment = MINSIZE;
1954
1955
1956
1957 nb = request2size(bytes);
1958 m = (char*)(mALLOc(nb + alignment + MINSIZE));
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968 if (m == NULL) {
1969 size_t extra, extra2;
1970
1971
1972
1973
1974 m = (char*)(mALLOc(bytes));
1975
1976 if ((((unsigned long)(m)) % alignment) == 0)
1977 return m;
1978
1979
1980
1981
1982 fREe(m);
1983
1984 extra = alignment - (((unsigned long)(m)) % alignment);
1985 m = (char*)(mALLOc(bytes + extra));
1986
1987
1988
1989
1990
1991 if (m) {
1992 extra2 = alignment - (((unsigned long)(m)) % alignment);
1993 if (extra2 > extra) {
1994 fREe(m);
1995 m = NULL;
1996 }
1997 }
1998
1999 }
2000
2001 if (m == NULL) return NULL;
2002
2003 p = mem2chunk(m);
2004
2005 if ((((unsigned long)(m)) % alignment) == 0)
2006 {
2007#if HAVE_MMAP
2008 if(chunk_is_mmapped(p))
2009 return chunk2mem(p);
2010#endif
2011 }
2012 else
2013 {
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023 brk = (char*)mem2chunk(((unsigned long)(m + alignment - 1)) & -((signed) alignment));
2024 if ((long)(brk - (char*)(p)) < MINSIZE) brk = brk + alignment;
2025
2026 newp = (mchunkptr)brk;
2027 leadsize = brk - (char*)(p);
2028 newsize = chunksize(p) - leadsize;
2029
2030#if HAVE_MMAP
2031 if(chunk_is_mmapped(p))
2032 {
2033 newp->prev_size = p->prev_size + leadsize;
2034 set_head(newp, newsize|IS_MMAPPED);
2035 return chunk2mem(newp);
2036 }
2037#endif
2038
2039
2040
2041 set_head(newp, newsize | PREV_INUSE);
2042 set_inuse_bit_at_offset(newp, newsize);
2043 set_head_size(p, leadsize);
2044 fREe(chunk2mem(p));
2045 p = newp;
2046
2047 assert (newsize >= nb && (((unsigned long)(chunk2mem(p))) % alignment) == 0);
2048 }
2049
2050
2051
2052 remainder_size = chunksize(p) - nb;
2053
2054 if (remainder_size >= (long)MINSIZE)
2055 {
2056 remainder = chunk_at_offset(p, nb);
2057 set_head(remainder, remainder_size | PREV_INUSE);
2058 set_head_size(p, nb);
2059 fREe(chunk2mem(remainder));
2060 }
2061
2062 check_inuse_chunk(p);
2063 return chunk2mem(p);
2064
2065}
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076#if __STD_C
2077Void_t* vALLOc(size_t bytes)
2078#else
2079Void_t* vALLOc(bytes) size_t bytes;
2080#endif
2081{
2082 return mEMALIGn (malloc_getpagesize, bytes);
2083}
2084
2085
2086
2087
2088
2089
2090
2091#if __STD_C
2092Void_t* pvALLOc(size_t bytes)
2093#else
2094Void_t* pvALLOc(bytes) size_t bytes;
2095#endif
2096{
2097 size_t pagesize = malloc_getpagesize;
2098 return mEMALIGn (pagesize, (bytes + pagesize - 1) & ~(pagesize - 1));
2099}
2100
2101
2102
2103
2104
2105
2106
2107#if __STD_C
2108Void_t* cALLOc(size_t n, size_t elem_size)
2109#else
2110Void_t* cALLOc(n, elem_size) size_t n; size_t elem_size;
2111#endif
2112{
2113 mchunkptr p;
2114 INTERNAL_SIZE_T csz;
2115
2116 INTERNAL_SIZE_T sz = n * elem_size;
2117
2118
2119
2120#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
2121#if MORECORE_CLEARS
2122 mchunkptr oldtop = top;
2123 INTERNAL_SIZE_T oldtopsize = chunksize(top);
2124#endif
2125#endif
2126 Void_t* mem = mALLOc (sz);
2127
2128 if ((long)n < 0) return NULL;
2129
2130 if (mem == NULL)
2131 return NULL;
2132 else
2133 {
2134#if CONFIG_VAL(SYS_MALLOC_F_LEN)
2135 if (!(gd->flags & GD_FLG_FULL_MALLOC_INIT)) {
2136 memset(mem, 0, sz);
2137 return mem;
2138 }
2139#endif
2140 p = mem2chunk(mem);
2141
2142
2143
2144
2145#if HAVE_MMAP
2146 if (chunk_is_mmapped(p)) return mem;
2147#endif
2148
2149 csz = chunksize(p);
2150
2151#ifdef CONFIG_SYS_MALLOC_CLEAR_ON_INIT
2152#if MORECORE_CLEARS
2153 if (p == oldtop && csz > oldtopsize)
2154 {
2155
2156 csz = oldtopsize;
2157 }
2158#endif
2159#endif
2160
2161 MALLOC_ZERO(mem, csz - SIZE_SZ);
2162 return mem;
2163 }
2164}
2165
2166
2167
2168
2169
2170
2171
2172
2173#if !defined(INTERNAL_LINUX_C_LIB) || !defined(__ELF__)
2174#if __STD_C
2175void cfree(Void_t *mem)
2176#else
2177void cfree(mem) Void_t *mem;
2178#endif
2179{
2180 fREe(mem);
2181}
2182#endif
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209#if __STD_C
2210int malloc_trim(size_t pad)
2211#else
2212int malloc_trim(pad) size_t pad;
2213#endif
2214{
2215 long top_size;
2216 long extra;
2217 char* current_brk;
2218 char* new_brk;
2219
2220 unsigned long pagesz = malloc_getpagesize;
2221
2222 top_size = chunksize(top);
2223 extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;
2224
2225 if (extra < (long)pagesz)
2226 return 0;
2227
2228 else
2229 {
2230
2231 current_brk = (char*)(MORECORE (0));
2232 if (current_brk != (char*)(top) + top_size)
2233 return 0;
2234
2235 else
2236 {
2237 new_brk = (char*)(MORECORE (-extra));
2238
2239 if (new_brk == (char*)(MORECORE_FAILURE))
2240 {
2241
2242 current_brk = (char*)(MORECORE (0));
2243 top_size = current_brk - (char*)top;
2244 if (top_size >= (long)MINSIZE)
2245 {
2246 sbrked_mem = current_brk - sbrk_base;
2247 set_head(top, top_size | PREV_INUSE);
2248 }
2249 check_chunk(top);
2250 return 0;
2251 }
2252
2253 else
2254 {
2255
2256 set_head(top, (top_size - extra) | PREV_INUSE);
2257 sbrked_mem -= extra;
2258 check_chunk(top);
2259 return 1;
2260 }
2261 }
2262 }
2263}
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278#if __STD_C
2279size_t malloc_usable_size(Void_t* mem)
2280#else
2281size_t malloc_usable_size(mem) Void_t* mem;
2282#endif
2283{
2284 mchunkptr p;
2285 if (mem == NULL)
2286 return 0;
2287 else
2288 {
2289 p = mem2chunk(mem);
2290 if(!chunk_is_mmapped(p))
2291 {
2292 if (!inuse(p)) return 0;
2293 check_inuse_chunk(p);
2294 return chunksize(p) - SIZE_SZ;
2295 }
2296 return chunksize(p) - 2*SIZE_SZ;
2297 }
2298}
2299
2300
2301
2302
2303
2304
2305#ifdef DEBUG
2306static void malloc_update_mallinfo()
2307{
2308 int i;
2309 mbinptr b;
2310 mchunkptr p;
2311#ifdef DEBUG
2312 mchunkptr q;
2313#endif
2314
2315 INTERNAL_SIZE_T avail = chunksize(top);
2316 int navail = ((long)(avail) >= (long)MINSIZE)? 1 : 0;
2317
2318 for (i = 1; i < NAV; ++i)
2319 {
2320 b = bin_at(i);
2321 for (p = last(b); p != b; p = p->bk)
2322 {
2323#ifdef DEBUG
2324 check_free_chunk(p);
2325 for (q = next_chunk(p);
2326 q < top && inuse(q) && (long)(chunksize(q)) >= (long)MINSIZE;
2327 q = next_chunk(q))
2328 check_inuse_chunk(q);
2329#endif
2330 avail += chunksize(p);
2331 navail++;
2332 }
2333 }
2334
2335 current_mallinfo.ordblks = navail;
2336 current_mallinfo.uordblks = sbrked_mem - avail;
2337 current_mallinfo.fordblks = avail;
2338 current_mallinfo.hblks = n_mmaps;
2339 current_mallinfo.hblkhd = mmapped_mem;
2340 current_mallinfo.keepcost = chunksize(top);
2341
2342}
2343#endif
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362#ifdef DEBUG
2363void malloc_stats()
2364{
2365 malloc_update_mallinfo();
2366 printf("max system bytes = %10u\n",
2367 (unsigned int)(max_total_mem));
2368 printf("system bytes = %10u\n",
2369 (unsigned int)(sbrked_mem + mmapped_mem));
2370 printf("in use bytes = %10u\n",
2371 (unsigned int)(current_mallinfo.uordblks + mmapped_mem));
2372#if HAVE_MMAP
2373 printf("max mmap regions = %10u\n",
2374 (unsigned int)max_n_mmaps);
2375#endif
2376}
2377#endif
2378
2379
2380
2381
2382
2383#ifdef DEBUG
2384struct mallinfo mALLINFo()
2385{
2386 malloc_update_mallinfo();
2387 return current_mallinfo;
2388}
2389#endif
2390
2391
2392
2393
2394
2395
2396
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407#if __STD_C
2408int mALLOPt(int param_number, int value)
2409#else
2410int mALLOPt(param_number, value) int param_number; int value;
2411#endif
2412{
2413 switch(param_number)
2414 {
2415 case M_TRIM_THRESHOLD:
2416 trim_threshold = value; return 1;
2417 case M_TOP_PAD:
2418 top_pad = value; return 1;
2419 case M_MMAP_THRESHOLD:
2420 mmap_threshold = value; return 1;
2421 case M_MMAP_MAX:
2422#if HAVE_MMAP
2423 n_mmaps_max = value; return 1;
2424#else
2425 if (value != 0) return 0; else n_mmaps_max = value; return 1;
2426#endif
2427
2428 default:
2429 return 0;
2430 }
2431}
2432
2433int initf_malloc(void)
2434{
2435#if CONFIG_VAL(SYS_MALLOC_F_LEN)
2436 assert(gd->malloc_base);
2437 gd->malloc_limit = CONFIG_VAL(SYS_MALLOC_F_LEN);
2438 gd->malloc_ptr = 0;
2439#endif
2440
2441 return 0;
2442}
2443
2444
2445
2446
2447
2448
2449
2450
2451
2452
2453
2454
2455
2456
2457
2458
2459
2460
2461
2462
2463
2464
2465
2466
2467
2468
2469
2470
2471
2472
2473
2474
2475
2476
2477
2478
2479
2480
2481
2482
2483
2484
2485
2486
2487
2488
2489
2490
2491
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550