1#include <common.h>
2
3#if 0
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227#ifndef __STD_C
228#ifdef __STDC__
229#define __STD_C 1
230#else
231#if __cplusplus
232#define __STD_C 1
233#else
234#define __STD_C 0
235#endif
236#endif
237#endif
238
239#ifndef Void_t
240#if (__STD_C || defined(WIN32))
241#define Void_t void
242#else
243#define Void_t char
244#endif
245#endif
246
247#if __STD_C
248#include <stddef.h>
249#else
250#include <sys/types.h>
251#endif
252
253#ifdef __cplusplus
254extern "C" {
255#endif
256
257#include <stdio.h>
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298#ifndef INTERNAL_SIZE_T
299#define INTERNAL_SIZE_T size_t
300#endif
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319#ifdef WIN32
320#define MORECORE wsbrk
321#define HAVE_MMAP 0
322
323#define LACKS_UNISTD_H
324#define LACKS_SYS_PARAM_H
325
326
327
328
329
330
331
332
333
334#define WIN32_LEAN_AND_MEAN
335#include <windows.h>
336#endif
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352#define HAVE_MEMCPY
353
354#ifndef USE_MEMCPY
355#ifdef HAVE_MEMCPY
356#define USE_MEMCPY 1
357#else
358#define USE_MEMCPY 0
359#endif
360#endif
361
362#if (__STD_C || defined(HAVE_MEMCPY))
363
364#if __STD_C
365void* memset(void*, int, size_t);
366void* memcpy(void*, const void*, size_t);
367#else
368#ifdef WIN32
369
370
371#else
372Void_t* memset();
373Void_t* memcpy();
374#endif
375#endif
376#endif
377
378#if USE_MEMCPY
379
380
381
382
383
384#define MALLOC_ZERO(charp, nbytes) \
385do { \
386 INTERNAL_SIZE_T mzsz = (nbytes); \
387 if(mzsz <= 9*sizeof(mzsz)) { \
388 INTERNAL_SIZE_T* mz = (INTERNAL_SIZE_T*) (charp); \
389 if(mzsz >= 5*sizeof(mzsz)) { *mz++ = 0; \
390 *mz++ = 0; \
391 if(mzsz >= 7*sizeof(mzsz)) { *mz++ = 0; \
392 *mz++ = 0; \
393 if(mzsz >= 9*sizeof(mzsz)) { *mz++ = 0; \
394 *mz++ = 0; }}} \
395 *mz++ = 0; \
396 *mz++ = 0; \
397 *mz = 0; \
398 } else memset((charp), 0, mzsz); \
399} while(0)
400
401#define MALLOC_COPY(dest,src,nbytes) \
402do { \
403 INTERNAL_SIZE_T mcsz = (nbytes); \
404 if(mcsz <= 9*sizeof(mcsz)) { \
405 INTERNAL_SIZE_T* mcsrc = (INTERNAL_SIZE_T*) (src); \
406 INTERNAL_SIZE_T* mcdst = (INTERNAL_SIZE_T*) (dest); \
407 if(mcsz >= 5*sizeof(mcsz)) { *mcdst++ = *mcsrc++; \
408 *mcdst++ = *mcsrc++; \
409 if(mcsz >= 7*sizeof(mcsz)) { *mcdst++ = *mcsrc++; \
410 *mcdst++ = *mcsrc++; \
411 if(mcsz >= 9*sizeof(mcsz)) { *mcdst++ = *mcsrc++; \
412 *mcdst++ = *mcsrc++; }}} \
413 *mcdst++ = *mcsrc++; \
414 *mcdst++ = *mcsrc++; \
415 *mcdst = *mcsrc ; \
416 } else memcpy(dest, src, mcsz); \
417} while(0)
418
419#else
420
421
422
423#define MALLOC_ZERO(charp, nbytes) \
424do { \
425 INTERNAL_SIZE_T* mzp = (INTERNAL_SIZE_T*)(charp); \
426 long mctmp = (nbytes)/sizeof(INTERNAL_SIZE_T), mcn; \
427 if (mctmp < 8) mcn = 0; else { mcn = (mctmp-1)/8; mctmp %= 8; } \
428 switch (mctmp) { \
429 case 0: for(;;) { *mzp++ = 0; \
430 case 7: *mzp++ = 0; \
431 case 6: *mzp++ = 0; \
432 case 5: *mzp++ = 0; \
433 case 4: *mzp++ = 0; \
434 case 3: *mzp++ = 0; \
435 case 2: *mzp++ = 0; \
436 case 1: *mzp++ = 0; if(mcn <= 0) break; mcn--; } \
437 } \
438} while(0)
439
440#define MALLOC_COPY(dest,src,nbytes) \
441do { \
442 INTERNAL_SIZE_T* mcsrc = (INTERNAL_SIZE_T*) src; \
443 INTERNAL_SIZE_T* mcdst = (INTERNAL_SIZE_T*) dest; \
444 long mctmp = (nbytes)/sizeof(INTERNAL_SIZE_T), mcn; \
445 if (mctmp < 8) mcn = 0; else { mcn = (mctmp-1)/8; mctmp %= 8; } \
446 switch (mctmp) { \
447 case 0: for(;;) { *mcdst++ = *mcsrc++; \
448 case 7: *mcdst++ = *mcsrc++; \
449 case 6: *mcdst++ = *mcsrc++; \
450 case 5: *mcdst++ = *mcsrc++; \
451 case 4: *mcdst++ = *mcsrc++; \
452 case 3: *mcdst++ = *mcsrc++; \
453 case 2: *mcdst++ = *mcsrc++; \
454 case 1: *mcdst++ = *mcsrc++; if(mcn <= 0) break; mcn--; } \
455 } \
456} while(0)
457
458#endif
459
460
461
462
463
464
465
466
467#ifndef HAVE_MMAP
468#define HAVE_MMAP 1
469#endif
470
471
472
473
474
475
476
477#ifndef HAVE_MREMAP
478#ifdef INTERNAL_LINUX_C_LIB
479#define HAVE_MREMAP 1
480#else
481#define HAVE_MREMAP 0
482#endif
483#endif
484
485#if HAVE_MMAP
486
487#include <unistd.h>
488#include <fcntl.h>
489#include <sys/mman.h>
490
491#if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
492#define MAP_ANONYMOUS MAP_ANON
493#endif
494
495#endif
496
497
498
499
500
501
502
503
504
505#ifndef LACKS_UNISTD_H
506# include <unistd.h>
507#endif
508
509#ifndef malloc_getpagesize
510# ifdef _SC_PAGESIZE
511# ifndef _SC_PAGE_SIZE
512# define _SC_PAGE_SIZE _SC_PAGESIZE
513# endif
514# endif
515# ifdef _SC_PAGE_SIZE
516# define malloc_getpagesize sysconf(_SC_PAGE_SIZE)
517# else
518# if defined(BSD) || defined(DGUX) || defined(HAVE_GETPAGESIZE)
519 extern size_t getpagesize();
520# define malloc_getpagesize getpagesize()
521# else
522# ifdef WIN32
523# define malloc_getpagesize (4096)
524# else
525# ifndef LACKS_SYS_PARAM_H
526# include <sys/param.h>
527# endif
528# ifdef EXEC_PAGESIZE
529# define malloc_getpagesize EXEC_PAGESIZE
530# else
531# ifdef NBPG
532# ifndef CLSIZE
533# define malloc_getpagesize NBPG
534# else
535# define malloc_getpagesize (NBPG * CLSIZE)
536# endif
537# else
538# ifdef NBPC
539# define malloc_getpagesize NBPC
540# else
541# ifdef PAGESIZE
542# define malloc_getpagesize PAGESIZE
543# else
544# define malloc_getpagesize (4096)
545# endif
546# endif
547# endif
548# endif
549# endif
550# endif
551# endif
552#endif
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582#if HAVE_USR_INCLUDE_MALLOC_H
583#include "/usr/include/malloc.h"
584#else
585
586
587
588struct mallinfo {
589 int arena;
590 int ordblks;
591 int smblks;
592 int hblks;
593 int hblkhd;
594 int usmblks;
595 int fsmblks;
596 int uordblks;
597 int fordblks;
598 int keepcost;
599};
600
601
602
603#define M_MXFAST 1
604#define M_NLBLKS 2
605#define M_GRAIN 3
606#define M_KEEP 4
607
608#endif
609
610
611
612#define M_TRIM_THRESHOLD -1
613#define M_TOP_PAD -2
614#define M_MMAP_THRESHOLD -3
615#define M_MMAP_MAX -4
616
617
618#ifndef DEFAULT_TRIM_THRESHOLD
619#define DEFAULT_TRIM_THRESHOLD (128 * 1024)
620#endif
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672#ifndef DEFAULT_TOP_PAD
673#define DEFAULT_TOP_PAD (0)
674#endif
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705#ifndef DEFAULT_MMAP_THRESHOLD
706#define DEFAULT_MMAP_THRESHOLD (128 * 1024)
707#endif
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748#ifndef DEFAULT_MMAP_MAX
749#if HAVE_MMAP
750#define DEFAULT_MMAP_MAX (64)
751#else
752#define DEFAULT_MMAP_MAX (0)
753#endif
754#endif
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802#ifdef INTERNAL_LINUX_C_LIB
803
804#if __STD_C
805
806Void_t * __default_morecore_init (ptrdiff_t);
807Void_t *(*__morecore)(ptrdiff_t) = __default_morecore_init;
808
809#else
810
811Void_t * __default_morecore_init ();
812Void_t *(*__morecore)() = __default_morecore_init;
813
814#endif
815
816#define MORECORE (*__morecore)
817#define MORECORE_FAILURE 0
818#define MORECORE_CLEARS 1
819
820#else
821
822#if __STD_C
823extern Void_t* sbrk(ptrdiff_t);
824#else
825extern Void_t* sbrk();
826#endif
827
828#ifndef MORECORE
829#define MORECORE sbrk
830#endif
831
832#ifndef MORECORE_FAILURE
833#define MORECORE_FAILURE -1
834#endif
835
836#ifndef MORECORE_CLEARS
837#define MORECORE_CLEARS 1
838#endif
839
840#endif
841
842#if defined(INTERNAL_LINUX_C_LIB) && defined(__ELF__)
843
844#define cALLOc __libc_calloc
845#define fREe __libc_free
846#define mALLOc __libc_malloc
847#define mEMALIGn __libc_memalign
848#define rEALLOc __libc_realloc
849#define vALLOc __libc_valloc
850#define pvALLOc __libc_pvalloc
851#define mALLINFo __libc_mallinfo
852#define mALLOPt __libc_mallopt
853
854#pragma weak calloc = __libc_calloc
855#pragma weak free = __libc_free
856#pragma weak cfree = __libc_free
857#pragma weak malloc = __libc_malloc
858#pragma weak memalign = __libc_memalign
859#pragma weak realloc = __libc_realloc
860#pragma weak valloc = __libc_valloc
861#pragma weak pvalloc = __libc_pvalloc
862#pragma weak mallinfo = __libc_mallinfo
863#pragma weak mallopt = __libc_mallopt
864
865#else
866
867#ifdef USE_DL_PREFIX
868#define cALLOc dlcalloc
869#define fREe dlfree
870#define mALLOc dlmalloc
871#define mEMALIGn dlmemalign
872#define rEALLOc dlrealloc
873#define vALLOc dlvalloc
874#define pvALLOc dlpvalloc
875#define mALLINFo dlmallinfo
876#define mALLOPt dlmallopt
877#else
878#define cALLOc calloc
879#define fREe free
880#define mALLOc malloc
881#define mEMALIGn memalign
882#define rEALLOc realloc
883#define vALLOc valloc
884#define pvALLOc pvalloc
885#define mALLINFo mallinfo
886#define mALLOPt mallopt
887#endif
888
889#endif
890
891
892
893#if __STD_C
894
895Void_t* mALLOc(size_t);
896void fREe(Void_t*);
897Void_t* rEALLOc(Void_t*, size_t);
898Void_t* mEMALIGn(size_t, size_t);
899Void_t* vALLOc(size_t);
900Void_t* pvALLOc(size_t);
901Void_t* cALLOc(size_t, size_t);
902void cfree(Void_t*);
903int malloc_trim(size_t);
904size_t malloc_usable_size(Void_t*);
905void malloc_stats();
906int mALLOPt(int, int);
907struct mallinfo mALLINFo(void);
908#else
909Void_t* mALLOc();
910void fREe();
911Void_t* rEALLOc();
912Void_t* mEMALIGn();
913Void_t* vALLOc();
914Void_t* pvALLOc();
915Void_t* cALLOc();
916void cfree();
917int malloc_trim();
918size_t malloc_usable_size();
919void malloc_stats();
920int mALLOPt();
921struct mallinfo mALLINFo();
922#endif
923
924
925#ifdef __cplusplus
926};
927#endif
928
929
930#endif
931
932#include <malloc.h>
933#ifdef DEBUG
934#if __STD_C
935static void malloc_update_mallinfo (void);
936void malloc_stats (void);
937#else
938static void malloc_update_mallinfo ();
939void malloc_stats();
940#endif
941#endif
942
943DECLARE_GLOBAL_DATA_PTR;
944
945
946
947
948
949
950
951
952
953#ifdef WIN32
954
955#define AlignPage(add) (((add) + (malloc_getpagesize-1)) & \
956~(malloc_getpagesize-1))
957#define AlignPage64K(add) (((add) + (0x10000 - 1)) & ~(0x10000 - 1))
958
959
960#define RESERVED_SIZE (1024*1024*64)
961#define NEXT_SIZE (2048*1024)
962#define TOP_MEMORY ((unsigned long)2*1024*1024*1024)
963
964struct GmListElement;
965typedef struct GmListElement GmListElement;
966
967struct GmListElement
968{
969 GmListElement* next;
970 void* base;
971};
972
973static GmListElement* head = 0;
974static unsigned int gNextAddress = 0;
975static unsigned int gAddressBase = 0;
976static unsigned int gAllocatedSize = 0;
977
978static
979GmListElement* makeGmListElement (void* bas)
980{
981 GmListElement* this;
982 this = (GmListElement*)(void*)LocalAlloc (0, sizeof (GmListElement));
983 assert (this);
984 if (this)
985 {
986 this->base = bas;
987 this->next = head;
988 head = this;
989 }
990 return this;
991}
992
993void gcleanup ()
994{
995 BOOL rval;
996 assert ( (head == NULL) || (head->base == (void*)gAddressBase));
997 if (gAddressBase && (gNextAddress - gAddressBase))
998 {
999 rval = VirtualFree ((void*)gAddressBase,
1000 gNextAddress - gAddressBase,
1001 MEM_DECOMMIT);
1002 assert (rval);
1003 }
1004 while (head)
1005 {
1006 GmListElement* next = head->next;
1007 rval = VirtualFree (head->base, 0, MEM_RELEASE);
1008 assert (rval);
1009 LocalFree (head);
1010 head = next;
1011 }
1012}
1013
1014static
1015void* findRegion (void* start_address, unsigned long size)
1016{
1017 MEMORY_BASIC_INFORMATION info;
1018 if (size >= TOP_MEMORY) return NULL;
1019
1020 while ((unsigned long)start_address + size < TOP_MEMORY)
1021 {
1022 VirtualQuery (start_address, &info, sizeof (info));
1023 if ((info.State == MEM_FREE) && (info.RegionSize >= size))
1024 return start_address;
1025 else
1026 {
1027
1028
1029
1030
1031
1032 start_address = (char*)info.BaseAddress + info.RegionSize;
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046 start_address =
1047 (void *) AlignPage64K((unsigned long) start_address);
1048 }
1049 }
1050 return NULL;
1051
1052}
1053
1054
1055void* wsbrk (long size)
1056{
1057 void* tmp;
1058 if (size > 0)
1059 {
1060 if (gAddressBase == 0)
1061 {
1062 gAllocatedSize = max (RESERVED_SIZE, AlignPage (size));
1063 gNextAddress = gAddressBase =
1064 (unsigned int)VirtualAlloc (NULL, gAllocatedSize,
1065 MEM_RESERVE, PAGE_NOACCESS);
1066 } else if (AlignPage (gNextAddress + size) > (gAddressBase +
1067gAllocatedSize))
1068 {
1069 long new_size = max (NEXT_SIZE, AlignPage (size));
1070 void* new_address = (void*)(gAddressBase+gAllocatedSize);
1071 do
1072 {
1073 new_address = findRegion (new_address, new_size);
1074
1075 if (new_address == 0)
1076 return (void*)-1;
1077
1078 gAddressBase = gNextAddress =
1079 (unsigned int)VirtualAlloc (new_address, new_size,
1080 MEM_RESERVE, PAGE_NOACCESS);
1081
1082
1083
1084 }
1085 while (gAddressBase == 0);
1086
1087 assert (new_address == (void*)gAddressBase);
1088
1089 gAllocatedSize = new_size;
1090
1091 if (!makeGmListElement ((void*)gAddressBase))
1092 return (void*)-1;
1093 }
1094 if ((size + gNextAddress) > AlignPage (gNextAddress))
1095 {
1096 void* res;
1097 res = VirtualAlloc ((void*)AlignPage (gNextAddress),
1098 (size + gNextAddress -
1099 AlignPage (gNextAddress)),
1100 MEM_COMMIT, PAGE_READWRITE);
1101 if (res == 0)
1102 return (void*)-1;
1103 }
1104 tmp = (void*)gNextAddress;
1105 gNextAddress = (unsigned int)tmp + size;
1106 return tmp;
1107 }
1108 else if (size < 0)
1109 {
1110 unsigned int alignedGoal = AlignPage (gNextAddress + size);
1111
1112 if (alignedGoal >= gAddressBase)
1113 {
1114 VirtualFree ((void*)alignedGoal, gNextAddress - alignedGoal,
1115 MEM_DECOMMIT);
1116 gNextAddress = gNextAddress + size;
1117 return (void*)gNextAddress;
1118 }
1119 else
1120 {
1121 VirtualFree ((void*)gAddressBase, gNextAddress - gAddressBase,
1122 MEM_DECOMMIT);
1123 gNextAddress = gAddressBase;
1124 return (void*)-1;
1125 }
1126 }
1127 else
1128 {
1129 return (void*)gNextAddress;
1130 }
1131}
1132
1133#endif
1134
1135
1136
1137
1138
1139
1140
1141
1142struct malloc_chunk
1143{
1144 INTERNAL_SIZE_T prev_size;
1145 INTERNAL_SIZE_T size;
1146 struct malloc_chunk* fd;
1147 struct malloc_chunk* bk;
1148} __attribute__((__may_alias__)) ;
1149
1150typedef struct malloc_chunk* mchunkptr;
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278#define SIZE_SZ (sizeof(INTERNAL_SIZE_T))
1279#define MALLOC_ALIGNMENT (SIZE_SZ + SIZE_SZ)
1280#define MALLOC_ALIGN_MASK (MALLOC_ALIGNMENT - 1)
1281#define MINSIZE (sizeof(struct malloc_chunk))
1282
1283
1284
1285#define chunk2mem(p) ((Void_t*)((char*)(p) + 2*SIZE_SZ))
1286#define mem2chunk(mem) ((mchunkptr)((char*)(mem) - 2*SIZE_SZ))
1287
1288
1289
1290#define request2size(req) \
1291 (((long)((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) < \
1292 (long)(MINSIZE + MALLOC_ALIGN_MASK)) ? MINSIZE : \
1293 (((req) + (SIZE_SZ + MALLOC_ALIGN_MASK)) & ~(MALLOC_ALIGN_MASK)))
1294
1295
1296
1297#define aligned_OK(m) (((unsigned long)((m)) & (MALLOC_ALIGN_MASK)) == 0)
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309#define PREV_INUSE 0x1
1310
1311
1312
1313#define IS_MMAPPED 0x2
1314
1315
1316
1317#define SIZE_BITS (PREV_INUSE|IS_MMAPPED)
1318
1319
1320
1321
1322#define next_chunk(p) ((mchunkptr)( ((char*)(p)) + ((p)->size & ~PREV_INUSE) ))
1323
1324
1325
1326#define prev_chunk(p)\
1327 ((mchunkptr)( ((char*)(p)) - ((p)->prev_size) ))
1328
1329
1330
1331
1332#define chunk_at_offset(p, s) ((mchunkptr)(((char*)(p)) + (s)))
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343#define inuse(p)\
1344((((mchunkptr)(((char*)(p))+((p)->size & ~PREV_INUSE)))->size) & PREV_INUSE)
1345
1346
1347
1348#define prev_inuse(p) ((p)->size & PREV_INUSE)
1349
1350
1351
1352#define chunk_is_mmapped(p) ((p)->size & IS_MMAPPED)
1353
1354
1355
1356#define set_inuse(p)\
1357((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size |= PREV_INUSE
1358
1359#define clear_inuse(p)\
1360((mchunkptr)(((char*)(p)) + ((p)->size & ~PREV_INUSE)))->size &= ~(PREV_INUSE)
1361
1362
1363
1364#define inuse_bit_at_offset(p, s)\
1365 (((mchunkptr)(((char*)(p)) + (s)))->size & PREV_INUSE)
1366
1367#define set_inuse_bit_at_offset(p, s)\
1368 (((mchunkptr)(((char*)(p)) + (s)))->size |= PREV_INUSE)
1369
1370#define clear_inuse_bit_at_offset(p, s)\
1371 (((mchunkptr)(((char*)(p)) + (s)))->size &= ~(PREV_INUSE))
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382#define chunksize(p) ((p)->size & ~(SIZE_BITS))
1383
1384
1385
1386#define set_head_size(p, s) ((p)->size = (((p)->size & PREV_INUSE) | (s)))
1387
1388
1389
1390#define set_head(p, s) ((p)->size = (s))
1391
1392
1393
1394#define set_foot(p, s) (((mchunkptr)((char*)(p) + (s)))->prev_size = (s))
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434#define NAV 128
1435
1436typedef struct malloc_chunk* mbinptr;
1437
1438
1439
1440#define bin_at(i) ((mbinptr)((char*)&(av_[2*(i) + 2]) - 2*SIZE_SZ))
1441#define next_bin(b) ((mbinptr)((char*)(b) + 2 * sizeof(mbinptr)))
1442#define prev_bin(b) ((mbinptr)((char*)(b) - 2 * sizeof(mbinptr)))
1443
1444
1445
1446
1447
1448
1449
1450#define top (av_[2])
1451#define last_remainder (bin_at(1))
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461#define initial_top ((mchunkptr)(bin_at(0)))
1462
1463
1464
1465#define IAV(i) bin_at(i), bin_at(i)
1466
1467static mbinptr av_[NAV * 2 + 2] = {
1468 NULL, NULL,
1469 IAV(0), IAV(1), IAV(2), IAV(3), IAV(4), IAV(5), IAV(6), IAV(7),
1470 IAV(8), IAV(9), IAV(10), IAV(11), IAV(12), IAV(13), IAV(14), IAV(15),
1471 IAV(16), IAV(17), IAV(18), IAV(19), IAV(20), IAV(21), IAV(22), IAV(23),
1472 IAV(24), IAV(25), IAV(26), IAV(27), IAV(28), IAV(29), IAV(30), IAV(31),
1473 IAV(32), IAV(33), IAV(34), IAV(35), IAV(36), IAV(37), IAV(38), IAV(39),
1474 IAV(40), IAV(41), IAV(42), IAV(43), IAV(44), IAV(45), IAV(46), IAV(47),
1475 IAV(48), IAV(49), IAV(50), IAV(51), IAV(52), IAV(53), IAV(54), IAV(55),
1476 IAV(56), IAV(57), IAV(58), IAV(59), IAV(60), IAV(61), IAV(62), IAV(63),
1477 IAV(64), IAV(65), IAV(66), IAV(67), IAV(68), IAV(69), IAV(70), IAV(71),
1478 IAV(72), IAV(73), IAV(74), IAV(75), IAV(76), IAV(77), IAV(78), IAV(79),
1479 IAV(80), IAV(81), IAV(82), IAV(83), IAV(84), IAV(85), IAV(86), IAV(87),
1480 IAV(88), IAV(89), IAV(90), IAV(91), IAV(92), IAV(93), IAV(94), IAV(95),
1481 IAV(96), IAV(97), IAV(98), IAV(99), IAV(100), IAV(101), IAV(102), IAV(103),
1482 IAV(104), IAV(105), IAV(106), IAV(107), IAV(108), IAV(109), IAV(110), IAV(111),
1483 IAV(112), IAV(113), IAV(114), IAV(115), IAV(116), IAV(117), IAV(118), IAV(119),
1484 IAV(120), IAV(121), IAV(122), IAV(123), IAV(124), IAV(125), IAV(126), IAV(127)
1485};
1486
1487#ifdef CONFIG_NEEDS_MANUAL_RELOC
1488static void malloc_bin_reloc(void)
1489{
1490 mbinptr *p = &av_[2];
1491 size_t i;
1492
1493 for (i = 2; i < ARRAY_SIZE(av_); ++i, ++p)
1494 *p = (mbinptr)((ulong)*p + gd->reloc_off);
1495}
1496#else
1497static inline void malloc_bin_reloc(void) {}
1498#endif
1499
1500ulong mem_malloc_start = 0;
1501ulong mem_malloc_end = 0;
1502ulong mem_malloc_brk = 0;
1503
1504void *sbrk(ptrdiff_t increment)
1505{
1506 ulong old = mem_malloc_brk;
1507 ulong new = old + increment;
1508
1509
1510
1511
1512
1513 if (increment < 0)
1514 memset((void *)new, 0, -increment);
1515
1516 if ((new < mem_malloc_start) || (new > mem_malloc_end))
1517 return (void *)MORECORE_FAILURE;
1518
1519 mem_malloc_brk = new;
1520
1521 return (void *)old;
1522}
1523
1524void mem_malloc_init(ulong start, ulong size)
1525{
1526 mem_malloc_start = start;
1527 mem_malloc_end = start + size;
1528 mem_malloc_brk = start;
1529
1530 memset((void *)mem_malloc_start, 0, size);
1531
1532 malloc_bin_reloc();
1533}
1534
1535
1536
1537#define first(b) ((b)->fd)
1538#define last(b) ((b)->bk)
1539
1540
1541
1542
1543
1544#define bin_index(sz) \
1545(((((unsigned long)(sz)) >> 9) == 0) ? (((unsigned long)(sz)) >> 3): \
1546 ((((unsigned long)(sz)) >> 9) <= 4) ? 56 + (((unsigned long)(sz)) >> 6): \
1547 ((((unsigned long)(sz)) >> 9) <= 20) ? 91 + (((unsigned long)(sz)) >> 9): \
1548 ((((unsigned long)(sz)) >> 9) <= 84) ? 110 + (((unsigned long)(sz)) >> 12): \
1549 ((((unsigned long)(sz)) >> 9) <= 340) ? 119 + (((unsigned long)(sz)) >> 15): \
1550 ((((unsigned long)(sz)) >> 9) <= 1364) ? 124 + (((unsigned long)(sz)) >> 18): \
1551 126)
1552
1553
1554
1555
1556
1557#define MAX_SMALLBIN 63
1558#define MAX_SMALLBIN_SIZE 512
1559#define SMALLBIN_WIDTH 8
1560
1561#define smallbin_index(sz) (((unsigned long)(sz)) >> 3)
1562
1563
1564
1565
1566
1567#define is_small_request(nb) (nb < MAX_SMALLBIN_SIZE - SMALLBIN_WIDTH)
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581#define BINBLOCKWIDTH 4
1582
1583#define binblocks_r ((INTERNAL_SIZE_T)av_[1])
1584#define binblocks_w (av_[1])
1585
1586
1587
1588#define idx2binblock(ix) ((unsigned)1 << (ix / BINBLOCKWIDTH))
1589#define mark_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r | idx2binblock(ii)))
1590#define clear_binblock(ii) (binblocks_w = (mbinptr)(binblocks_r & ~(idx2binblock(ii))))
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600static unsigned long trim_threshold = DEFAULT_TRIM_THRESHOLD;
1601static unsigned long top_pad = DEFAULT_TOP_PAD;
1602static unsigned int n_mmaps_max = DEFAULT_MMAP_MAX;
1603static unsigned long mmap_threshold = DEFAULT_MMAP_THRESHOLD;
1604
1605
1606static char* sbrk_base = (char*)(-1);
1607
1608
1609static unsigned long max_sbrked_mem = 0;
1610
1611
1612static unsigned long max_total_mem = 0;
1613
1614
1615static struct mallinfo current_mallinfo = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
1616
1617
1618#define sbrked_mem (current_mallinfo.arena)
1619
1620
1621
1622#ifdef DEBUG
1623static unsigned int n_mmaps = 0;
1624#endif
1625static unsigned long mmapped_mem = 0;
1626#if HAVE_MMAP
1627static unsigned int max_n_mmaps = 0;
1628static unsigned long max_mmapped_mem = 0;
1629#endif
1630
1631
1632
1633
1634
1635
1636
1637#ifdef DEBUG
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648#if __STD_C
1649static void do_check_chunk(mchunkptr p)
1650#else
1651static void do_check_chunk(p) mchunkptr p;
1652#endif
1653{
1654 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
1655
1656
1657 assert(!chunk_is_mmapped(p));
1658
1659
1660 assert((char*)p >= sbrk_base);
1661 if (p != top)
1662 assert((char*)p + sz <= (char*)top);
1663 else
1664 assert((char*)p + sz <= sbrk_base + sbrked_mem);
1665
1666}
1667
1668
1669#if __STD_C
1670static void do_check_free_chunk(mchunkptr p)
1671#else
1672static void do_check_free_chunk(p) mchunkptr p;
1673#endif
1674{
1675 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
1676 mchunkptr next = chunk_at_offset(p, sz);
1677
1678 do_check_chunk(p);
1679
1680
1681 assert(!inuse(p));
1682
1683
1684 if ((long)sz >= (long)MINSIZE)
1685 {
1686 assert((sz & MALLOC_ALIGN_MASK) == 0);
1687 assert(aligned_OK(chunk2mem(p)));
1688
1689 assert(next->prev_size == sz);
1690
1691 assert(prev_inuse(p));
1692 assert (next == top || inuse(next));
1693
1694
1695 assert(p->fd->bk == p);
1696 assert(p->bk->fd == p);
1697 }
1698 else
1699 assert(sz == SIZE_SZ);
1700}
1701
1702#if __STD_C
1703static void do_check_inuse_chunk(mchunkptr p)
1704#else
1705static void do_check_inuse_chunk(p) mchunkptr p;
1706#endif
1707{
1708 mchunkptr next = next_chunk(p);
1709 do_check_chunk(p);
1710
1711
1712 assert(inuse(p));
1713
1714
1715
1716
1717
1718 if (!prev_inuse(p))
1719 {
1720 mchunkptr prv = prev_chunk(p);
1721 assert(next_chunk(prv) == p);
1722 do_check_free_chunk(prv);
1723 }
1724 if (next == top)
1725 {
1726 assert(prev_inuse(next));
1727 assert(chunksize(next) >= MINSIZE);
1728 }
1729 else if (!inuse(next))
1730 do_check_free_chunk(next);
1731
1732}
1733
1734#if __STD_C
1735static void do_check_malloced_chunk(mchunkptr p, INTERNAL_SIZE_T s)
1736#else
1737static void do_check_malloced_chunk(p, s) mchunkptr p; INTERNAL_SIZE_T s;
1738#endif
1739{
1740 INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
1741 long room = sz - s;
1742
1743 do_check_inuse_chunk(p);
1744
1745
1746 assert((long)sz >= (long)MINSIZE);
1747 assert((sz & MALLOC_ALIGN_MASK) == 0);
1748 assert(room >= 0);
1749 assert(room < (long)MINSIZE);
1750
1751
1752 assert(aligned_OK(chunk2mem(p)));
1753
1754
1755
1756 assert(prev_inuse(p));
1757
1758}
1759
1760
1761#define check_free_chunk(P) do_check_free_chunk(P)
1762#define check_inuse_chunk(P) do_check_inuse_chunk(P)
1763#define check_chunk(P) do_check_chunk(P)
1764#define check_malloced_chunk(P,N) do_check_malloced_chunk(P,N)
1765#else
1766#define check_free_chunk(P)
1767#define check_inuse_chunk(P)
1768#define check_chunk(P)
1769#define check_malloced_chunk(P,N)
1770#endif
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790#define frontlink(P, S, IDX, BK, FD) \
1791{ \
1792 if (S < MAX_SMALLBIN_SIZE) \
1793 { \
1794 IDX = smallbin_index(S); \
1795 mark_binblock(IDX); \
1796 BK = bin_at(IDX); \
1797 FD = BK->fd; \
1798 P->bk = BK; \
1799 P->fd = FD; \
1800 FD->bk = BK->fd = P; \
1801 } \
1802 else \
1803 { \
1804 IDX = bin_index(S); \
1805 BK = bin_at(IDX); \
1806 FD = BK->fd; \
1807 if (FD == BK) mark_binblock(IDX); \
1808 else \
1809 { \
1810 while (FD != BK && S < chunksize(FD)) FD = FD->fd; \
1811 BK = FD->bk; \
1812 } \
1813 P->bk = BK; \
1814 P->fd = FD; \
1815 FD->bk = BK->fd = P; \
1816 } \
1817}
1818
1819
1820
1821
1822#define unlink(P, BK, FD) \
1823{ \
1824 BK = P->bk; \
1825 FD = P->fd; \
1826 FD->bk = BK; \
1827 BK->fd = FD; \
1828} \
1829
1830
1831
1832#define link_last_remainder(P) \
1833{ \
1834 last_remainder->fd = last_remainder->bk = P; \
1835 P->fd = P->bk = last_remainder; \
1836}
1837
1838
1839
1840#define clear_last_remainder \
1841 (last_remainder->fd = last_remainder->bk = last_remainder)
1842
1843
1844
1845
1846
1847
1848
1849#if HAVE_MMAP
1850
1851#if __STD_C
1852static mchunkptr mmap_chunk(size_t size)
1853#else
1854static mchunkptr mmap_chunk(size) size_t size;
1855#endif
1856{
1857 size_t page_mask = malloc_getpagesize - 1;
1858 mchunkptr p;
1859
1860#ifndef MAP_ANONYMOUS
1861 static int fd = -1;
1862#endif
1863
1864 if(n_mmaps >= n_mmaps_max) return 0;
1865
1866
1867
1868
1869 size = (size + SIZE_SZ + page_mask) & ~page_mask;
1870
1871#ifdef MAP_ANONYMOUS
1872 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE,
1873 MAP_PRIVATE|MAP_ANONYMOUS, -1, 0);
1874#else
1875 if (fd < 0)
1876 {
1877 fd = open("/dev/zero", O_RDWR);
1878 if(fd < 0) return 0;
1879 }
1880 p = (mchunkptr)mmap(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE, fd, 0);
1881#endif
1882
1883 if(p == (mchunkptr)-1) return 0;
1884
1885 n_mmaps++;
1886 if (n_mmaps > max_n_mmaps) max_n_mmaps = n_mmaps;
1887
1888
1889 assert(aligned_OK(chunk2mem(p)));
1890
1891
1892
1893
1894
1895 p->prev_size = 0;
1896 set_head(p, size|IS_MMAPPED);
1897
1898 mmapped_mem += size;
1899 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
1900 max_mmapped_mem = mmapped_mem;
1901 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1902 max_total_mem = mmapped_mem + sbrked_mem;
1903 return p;
1904}
1905
1906#if __STD_C
1907static void munmap_chunk(mchunkptr p)
1908#else
1909static void munmap_chunk(p) mchunkptr p;
1910#endif
1911{
1912 INTERNAL_SIZE_T size = chunksize(p);
1913 int ret;
1914
1915 assert (chunk_is_mmapped(p));
1916 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
1917 assert((n_mmaps > 0));
1918 assert(((p->prev_size + size) & (malloc_getpagesize-1)) == 0);
1919
1920 n_mmaps--;
1921 mmapped_mem -= (size + p->prev_size);
1922
1923 ret = munmap((char *)p - p->prev_size, size + p->prev_size);
1924
1925
1926 assert(ret == 0);
1927}
1928
1929#if HAVE_MREMAP
1930
1931#if __STD_C
1932static mchunkptr mremap_chunk(mchunkptr p, size_t new_size)
1933#else
1934static mchunkptr mremap_chunk(p, new_size) mchunkptr p; size_t new_size;
1935#endif
1936{
1937 size_t page_mask = malloc_getpagesize - 1;
1938 INTERNAL_SIZE_T offset = p->prev_size;
1939 INTERNAL_SIZE_T size = chunksize(p);
1940 char *cp;
1941
1942 assert (chunk_is_mmapped(p));
1943 assert(! ((char*)p >= sbrk_base && (char*)p < sbrk_base + sbrked_mem));
1944 assert((n_mmaps > 0));
1945 assert(((size + offset) & (malloc_getpagesize-1)) == 0);
1946
1947
1948 new_size = (new_size + offset + SIZE_SZ + page_mask) & ~page_mask;
1949
1950 cp = (char *)mremap((char *)p - offset, size + offset, new_size, 1);
1951
1952 if (cp == (char *)-1) return 0;
1953
1954 p = (mchunkptr)(cp + offset);
1955
1956 assert(aligned_OK(chunk2mem(p)));
1957
1958 assert((p->prev_size == offset));
1959 set_head(p, (new_size - offset)|IS_MMAPPED);
1960
1961 mmapped_mem -= size + offset;
1962 mmapped_mem += new_size;
1963 if ((unsigned long)mmapped_mem > (unsigned long)max_mmapped_mem)
1964 max_mmapped_mem = mmapped_mem;
1965 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
1966 max_total_mem = mmapped_mem + sbrked_mem;
1967 return p;
1968}
1969
1970#endif
1971
1972#endif
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982#if __STD_C
1983static void malloc_extend_top(INTERNAL_SIZE_T nb)
1984#else
1985static void malloc_extend_top(nb) INTERNAL_SIZE_T nb;
1986#endif
1987{
1988 char* brk;
1989 INTERNAL_SIZE_T front_misalign;
1990 INTERNAL_SIZE_T correction;
1991 char* new_brk;
1992 INTERNAL_SIZE_T top_size;
1993
1994 mchunkptr old_top = top;
1995 INTERNAL_SIZE_T old_top_size = chunksize(old_top);
1996 char* old_end = (char*)(chunk_at_offset(old_top, old_top_size));
1997
1998
1999
2000 INTERNAL_SIZE_T sbrk_size = nb + top_pad + MINSIZE;
2001 unsigned long pagesz = malloc_getpagesize;
2002
2003
2004
2005
2006
2007 if (sbrk_base != (char*)(-1))
2008 sbrk_size = (sbrk_size + (pagesz - 1)) & ~(pagesz - 1);
2009
2010 brk = (char*)(MORECORE (sbrk_size));
2011
2012
2013 if (brk == (char*)(MORECORE_FAILURE) ||
2014 (brk < old_end && old_top != initial_top))
2015 return;
2016
2017 sbrked_mem += sbrk_size;
2018
2019 if (brk == old_end)
2020 {
2021 top_size = sbrk_size + old_top_size;
2022 set_head(top, top_size | PREV_INUSE);
2023 }
2024 else
2025 {
2026 if (sbrk_base == (char*)(-1))
2027 sbrk_base = brk;
2028 else
2029 sbrked_mem += brk - (char*)old_end;
2030
2031
2032 front_misalign = (unsigned long)chunk2mem(brk) & MALLOC_ALIGN_MASK;
2033 if (front_misalign > 0)
2034 {
2035 correction = (MALLOC_ALIGNMENT) - front_misalign;
2036 brk += correction;
2037 }
2038 else
2039 correction = 0;
2040
2041
2042
2043 correction += ((((unsigned long)(brk + sbrk_size))+(pagesz-1)) &
2044 ~(pagesz - 1)) - ((unsigned long)(brk + sbrk_size));
2045
2046
2047 new_brk = (char*)(MORECORE (correction));
2048 if (new_brk == (char*)(MORECORE_FAILURE)) return;
2049
2050 sbrked_mem += correction;
2051
2052 top = (mchunkptr)brk;
2053 top_size = new_brk - brk + correction;
2054 set_head(top, top_size | PREV_INUSE);
2055
2056 if (old_top != initial_top)
2057 {
2058
2059
2060
2061
2062
2063 if (old_top_size < MINSIZE)
2064 {
2065 set_head(top, PREV_INUSE);
2066 return;
2067 }
2068
2069
2070 old_top_size = (old_top_size - 3*SIZE_SZ) & ~MALLOC_ALIGN_MASK;
2071 set_head_size(old_top, old_top_size);
2072 chunk_at_offset(old_top, old_top_size )->size =
2073 SIZE_SZ|PREV_INUSE;
2074 chunk_at_offset(old_top, old_top_size + SIZE_SZ)->size =
2075 SIZE_SZ|PREV_INUSE;
2076
2077 if (old_top_size >= MINSIZE)
2078 fREe(chunk2mem(old_top));
2079 }
2080 }
2081
2082 if ((unsigned long)sbrked_mem > (unsigned long)max_sbrked_mem)
2083 max_sbrked_mem = sbrked_mem;
2084 if ((unsigned long)(mmapped_mem + sbrked_mem) > (unsigned long)max_total_mem)
2085 max_total_mem = mmapped_mem + sbrked_mem;
2086
2087
2088 assert(((unsigned long)((char*)top + top_size) & (pagesz - 1)) == 0);
2089}
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156#if __STD_C
2157Void_t* mALLOc(size_t bytes)
2158#else
2159Void_t* mALLOc(bytes) size_t bytes;
2160#endif
2161{
2162 mchunkptr victim;
2163 INTERNAL_SIZE_T victim_size;
2164 int idx;
2165 mbinptr bin;
2166 mchunkptr remainder;
2167 long remainder_size;
2168 int remainder_index;
2169 unsigned long block;
2170 int startidx;
2171 mchunkptr fwd;
2172 mchunkptr bck;
2173 mbinptr q;
2174
2175 INTERNAL_SIZE_T nb;
2176
2177
2178 if ((mem_malloc_start == 0) && (mem_malloc_end == 0)) {
2179
2180 return NULL;
2181 }
2182
2183 if ((long)bytes < 0) return NULL;
2184
2185 nb = request2size(bytes);
2186
2187
2188
2189 if (is_small_request(nb))
2190 {
2191 idx = smallbin_index(nb);
2192
2193
2194
2195 q = bin_at(idx);
2196 victim = last(q);
2197
2198
2199 if (victim == q)
2200 {
2201 q = next_bin(q);
2202 victim = last(q);
2203 }
2204 if (victim != q)
2205 {
2206 victim_size = chunksize(victim);
2207 unlink(victim, bck, fwd);
2208 set_inuse_bit_at_offset(victim, victim_size);
2209 check_malloced_chunk(victim, nb);
2210 return chunk2mem(victim);
2211 }
2212
2213 idx += 2;
2214
2215 }
2216 else
2217 {
2218 idx = bin_index(nb);
2219 bin = bin_at(idx);
2220
2221 for (victim = last(bin); victim != bin; victim = victim->bk)
2222 {
2223 victim_size = chunksize(victim);
2224 remainder_size = victim_size - nb;
2225
2226 if (remainder_size >= (long)MINSIZE)
2227 {
2228 --idx;
2229 break;
2230 }
2231
2232 else if (remainder_size >= 0)
2233 {
2234 unlink(victim, bck, fwd);
2235 set_inuse_bit_at_offset(victim, victim_size);
2236 check_malloced_chunk(victim, nb);
2237 return chunk2mem(victim);
2238 }
2239 }
2240
2241 ++idx;
2242
2243 }
2244
2245
2246
2247 if ( (victim = last_remainder->fd) != last_remainder)
2248 {
2249 victim_size = chunksize(victim);
2250 remainder_size = victim_size - nb;
2251
2252 if (remainder_size >= (long)MINSIZE)
2253 {
2254 remainder = chunk_at_offset(victim, nb);
2255 set_head(victim, nb | PREV_INUSE);
2256 link_last_remainder(remainder);
2257 set_head(remainder, remainder_size | PREV_INUSE);
2258 set_foot(remainder, remainder_size);
2259 check_malloced_chunk(victim, nb);
2260 return chunk2mem(victim);
2261 }
2262
2263 clear_last_remainder;
2264
2265 if (remainder_size >= 0)
2266 {
2267 set_inuse_bit_at_offset(victim, victim_size);
2268 check_malloced_chunk(victim, nb);
2269 return chunk2mem(victim);
2270 }
2271
2272
2273
2274 frontlink(victim, victim_size, remainder_index, bck, fwd);
2275 }
2276
2277
2278
2279
2280
2281
2282 if ( (block = idx2binblock(idx)) <= binblocks_r)
2283 {
2284
2285
2286
2287 if ( (block & binblocks_r) == 0)
2288 {
2289
2290 idx = (idx & ~(BINBLOCKWIDTH - 1)) + BINBLOCKWIDTH;
2291 block <<= 1;
2292 while ((block & binblocks_r) == 0)
2293 {
2294 idx += BINBLOCKWIDTH;
2295 block <<= 1;
2296 }
2297 }
2298
2299
2300 for (;;)
2301 {
2302 startidx = idx;
2303 q = bin = bin_at(idx);
2304
2305
2306 do
2307 {
2308
2309
2310 for (victim = last(bin); victim != bin; victim = victim->bk)
2311 {
2312 victim_size = chunksize(victim);
2313 remainder_size = victim_size - nb;
2314
2315 if (remainder_size >= (long)MINSIZE)
2316 {
2317 remainder = chunk_at_offset(victim, nb);
2318 set_head(victim, nb | PREV_INUSE);
2319 unlink(victim, bck, fwd);
2320 link_last_remainder(remainder);
2321 set_head(remainder, remainder_size | PREV_INUSE);
2322 set_foot(remainder, remainder_size);
2323 check_malloced_chunk(victim, nb);
2324 return chunk2mem(victim);
2325 }
2326
2327 else if (remainder_size >= 0)
2328 {
2329 set_inuse_bit_at_offset(victim, victim_size);
2330 unlink(victim, bck, fwd);
2331 check_malloced_chunk(victim, nb);
2332 return chunk2mem(victim);
2333 }
2334
2335 }
2336
2337 bin = next_bin(bin);
2338
2339 } while ((++idx & (BINBLOCKWIDTH - 1)) != 0);
2340
2341
2342
2343 do
2344 {
2345 if ((startidx & (BINBLOCKWIDTH - 1)) == 0)
2346 {
2347 av_[1] = (mbinptr)(binblocks_r & ~block);
2348 break;
2349 }
2350 --startidx;
2351 q = prev_bin(q);
2352 } while (first(q) == q);
2353
2354
2355
2356 if ( (block <<= 1) <= binblocks_r && (block != 0) )
2357 {
2358 while ((block & binblocks_r) == 0)
2359 {
2360 idx += BINBLOCKWIDTH;
2361 block <<= 1;
2362 }
2363 }
2364 else
2365 break;
2366 }
2367 }
2368
2369
2370
2371
2372
2373 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
2374 {
2375
2376#if HAVE_MMAP
2377
2378 if ((unsigned long)nb >= (unsigned long)mmap_threshold &&
2379 (victim = mmap_chunk(nb)) != 0)
2380 return chunk2mem(victim);
2381#endif
2382
2383
2384 malloc_extend_top(nb);
2385 if ( (remainder_size = chunksize(top) - nb) < (long)MINSIZE)
2386 return NULL;
2387 }
2388
2389 victim = top;
2390 set_head(victim, nb | PREV_INUSE);
2391 top = chunk_at_offset(victim, nb);
2392 set_head(top, remainder_size | PREV_INUSE);
2393 check_malloced_chunk(victim, nb);
2394 return chunk2mem(victim);
2395
2396}
2397
2398
2399
2400
2401
2402
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423#if __STD_C
2424void fREe(Void_t* mem)
2425#else
2426void fREe(mem) Void_t* mem;
2427#endif
2428{
2429 mchunkptr p;
2430 INTERNAL_SIZE_T hd;
2431 INTERNAL_SIZE_T sz;
2432 int idx;
2433 mchunkptr next;
2434 INTERNAL_SIZE_T nextsz;
2435 INTERNAL_SIZE_T prevsz;
2436 mchunkptr bck;
2437 mchunkptr fwd;
2438 int islr;
2439
2440 if (mem == NULL)
2441 return;
2442
2443 p = mem2chunk(mem);
2444 hd = p->size;
2445
2446#if HAVE_MMAP
2447 if (hd & IS_MMAPPED)
2448 {
2449 munmap_chunk(p);
2450 return;
2451 }
2452#endif
2453
2454 check_inuse_chunk(p);
2455
2456 sz = hd & ~PREV_INUSE;
2457 next = chunk_at_offset(p, sz);
2458 nextsz = chunksize(next);
2459
2460 if (next == top)
2461 {
2462 sz += nextsz;
2463
2464 if (!(hd & PREV_INUSE))
2465 {
2466 prevsz = p->prev_size;
2467 p = chunk_at_offset(p, -((long) prevsz));
2468 sz += prevsz;
2469 unlink(p, bck, fwd);
2470 }
2471
2472 set_head(p, sz | PREV_INUSE);
2473 top = p;
2474 if ((unsigned long)(sz) >= (unsigned long)trim_threshold)
2475 malloc_trim(top_pad);
2476 return;
2477 }
2478
2479 set_head(next, nextsz);
2480
2481 islr = 0;
2482
2483 if (!(hd & PREV_INUSE))
2484 {
2485 prevsz = p->prev_size;
2486 p = chunk_at_offset(p, -((long) prevsz));
2487 sz += prevsz;
2488
2489 if (p->fd == last_remainder)
2490 islr = 1;
2491 else
2492 unlink(p, bck, fwd);
2493 }
2494
2495 if (!(inuse_bit_at_offset(next, nextsz)))
2496 {
2497 sz += nextsz;
2498
2499 if (!islr && next->fd == last_remainder)
2500 {
2501 islr = 1;
2502 link_last_remainder(p);
2503 }
2504 else
2505 unlink(next, bck, fwd);
2506 }
2507
2508
2509 set_head(p, sz | PREV_INUSE);
2510 set_foot(p, sz);
2511 if (!islr)
2512 frontlink(p, sz, idx, bck, fwd);
2513}
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555#if __STD_C
2556Void_t* rEALLOc(Void_t* oldmem, size_t bytes)
2557#else
2558Void_t* rEALLOc(oldmem, bytes) Void_t* oldmem; size_t bytes;
2559#endif
2560{
2561 INTERNAL_SIZE_T nb;
2562
2563 mchunkptr oldp;
2564 INTERNAL_SIZE_T oldsize;
2565
2566 mchunkptr newp;
2567 INTERNAL_SIZE_T newsize;
2568 Void_t* newmem;
2569
2570 mchunkptr next;
2571 INTERNAL_SIZE_T nextsize;
2572
2573 mchunkptr prev;
2574 INTERNAL_SIZE_T prevsize;
2575
2576 mchunkptr remainder;
2577 INTERNAL_SIZE_T remainder_size;
2578
2579 mchunkptr bck;
2580 mchunkptr fwd;
2581
2582#ifdef REALLOC_ZERO_BYTES_FREES
2583 if (bytes == 0) { fREe(oldmem); return 0; }
2584#endif
2585
2586 if ((long)bytes < 0) return NULL;
2587
2588
2589 if (oldmem == NULL) return mALLOc(bytes);
2590
2591 newp = oldp = mem2chunk(oldmem);
2592 newsize = oldsize = chunksize(oldp);
2593
2594
2595 nb = request2size(bytes);
2596
2597#if HAVE_MMAP
2598 if (chunk_is_mmapped(oldp))
2599 {
2600#if HAVE_MREMAP
2601 newp = mremap_chunk(oldp, nb);
2602 if(newp) return chunk2mem(newp);
2603#endif
2604
2605 if(oldsize - SIZE_SZ >= nb) return oldmem;
2606
2607 newmem = mALLOc(bytes);
2608 if (newmem == 0) return 0;
2609 MALLOC_COPY(newmem, oldmem, oldsize - 2*SIZE_SZ);
2610 munmap_chunk(oldp);
2611 return newmem;
2612 }
2613#endif
2614
2615 check_inuse_chunk(oldp);
2616
2617 if ((long)(oldsize) < (long)(nb))
2618 {
2619
2620
2621
2622 next = chunk_at_offset(oldp, oldsize);
2623 if (next == top || !inuse(next))
2624 {
2625 nextsize = chunksize(next);
2626
2627
2628 if (next == top)
2629 {
2630 if ((long)(nextsize + newsize) >= (long)(nb + MINSIZE))
2631 {
2632 newsize += nextsize;
2633 top = chunk_at_offset(oldp, nb);
2634 set_head(top, (newsize - nb) | PREV_INUSE);
2635 set_head_size(oldp, nb);
2636 return chunk2mem(oldp);
2637 }
2638 }
2639
2640
2641 else if (((long)(nextsize + newsize) >= (long)(nb)))
2642 {
2643 unlink(next, bck, fwd);
2644 newsize += nextsize;
2645 goto split;
2646 }
2647 }
2648 else
2649 {
2650 next = NULL;
2651 nextsize = 0;
2652 }
2653
2654
2655
2656 if (!prev_inuse(oldp))
2657 {
2658 prev = prev_chunk(oldp);
2659 prevsize = chunksize(prev);
2660
2661
2662
2663 if (next != NULL)
2664 {
2665
2666 if (next == top)
2667 {
2668 if ((long)(nextsize + prevsize + newsize) >= (long)(nb + MINSIZE))
2669 {
2670 unlink(prev, bck, fwd);
2671 newp = prev;
2672 newsize += prevsize + nextsize;
2673 newmem = chunk2mem(newp);
2674 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2675 top = chunk_at_offset(newp, nb);
2676 set_head(top, (newsize - nb) | PREV_INUSE);
2677 set_head_size(newp, nb);
2678 return newmem;
2679 }
2680 }
2681
2682
2683 else if (((long)(nextsize + prevsize + newsize) >= (long)(nb)))
2684 {
2685 unlink(next, bck, fwd);
2686 unlink(prev, bck, fwd);
2687 newp = prev;
2688 newsize += nextsize + prevsize;
2689 newmem = chunk2mem(newp);
2690 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2691 goto split;
2692 }
2693 }
2694
2695
2696 if (prev != NULL && (long)(prevsize + newsize) >= (long)nb)
2697 {
2698 unlink(prev, bck, fwd);
2699 newp = prev;
2700 newsize += prevsize;
2701 newmem = chunk2mem(newp);
2702 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2703 goto split;
2704 }
2705 }
2706
2707
2708
2709 newmem = mALLOc (bytes);
2710
2711 if (newmem == NULL)
2712 return NULL;
2713
2714
2715
2716
2717 if ( (newp = mem2chunk(newmem)) == next_chunk(oldp))
2718 {
2719 newsize += chunksize(newp);
2720 newp = oldp;
2721 goto split;
2722 }
2723
2724
2725 MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ);
2726 fREe(oldmem);
2727 return newmem;
2728 }
2729
2730
2731 split:
2732
2733 if (newsize - nb >= MINSIZE)
2734 {
2735 remainder = chunk_at_offset(newp, nb);
2736 remainder_size = newsize - nb;
2737 set_head_size(newp, nb);
2738 set_head(remainder, remainder_size | PREV_INUSE);
2739 set_inuse_bit_at_offset(remainder, remainder_size);
2740 fREe(chunk2mem(remainder));
2741 }
2742 else
2743 {
2744 set_head_size(newp, newsize);
2745 set_inuse_bit_at_offset(newp, newsize);
2746 }
2747
2748 check_inuse_chunk(newp);
2749 return chunk2mem(newp);
2750}
2751
2752
2753
2754
2755
2756
2757
2758
2759
2760
2761
2762
2763
2764
2765
2766
2767
2768
2769
2770
2771
2772
2773
2774#if __STD_C
2775Void_t* mEMALIGn(size_t alignment, size_t bytes)
2776#else
2777Void_t* mEMALIGn(alignment, bytes) size_t alignment; size_t bytes;
2778#endif
2779{
2780 INTERNAL_SIZE_T nb;
2781 char* m;
2782 mchunkptr p;
2783 char* brk;
2784 mchunkptr newp;
2785 INTERNAL_SIZE_T newsize;
2786 INTERNAL_SIZE_T leadsize;
2787 mchunkptr remainder;
2788 long remainder_size;
2789
2790 if ((long)bytes < 0) return NULL;
2791
2792
2793
2794 if (alignment <= MALLOC_ALIGNMENT) return mALLOc(bytes);
2795
2796
2797
2798 if (alignment < MINSIZE) alignment = MINSIZE;
2799
2800
2801
2802 nb = request2size(bytes);
2803 m = (char*)(mALLOc(nb + alignment + MINSIZE));
2804
2805 if (m == NULL) return NULL;
2806
2807 p = mem2chunk(m);
2808
2809 if ((((unsigned long)(m)) % alignment) == 0)
2810 {
2811#if HAVE_MMAP
2812 if(chunk_is_mmapped(p))
2813 return chunk2mem(p);
2814#endif
2815 }
2816 else
2817 {
2818
2819
2820
2821
2822
2823
2824
2825
2826
2827 brk = (char*)mem2chunk(((unsigned long)(m + alignment - 1)) & -((signed) alignment));
2828 if ((long)(brk - (char*)(p)) < MINSIZE) brk = brk + alignment;
2829
2830 newp = (mchunkptr)brk;
2831 leadsize = brk - (char*)(p);
2832 newsize = chunksize(p) - leadsize;
2833
2834#if HAVE_MMAP
2835 if(chunk_is_mmapped(p))
2836 {
2837 newp->prev_size = p->prev_size + leadsize;
2838 set_head(newp, newsize|IS_MMAPPED);
2839 return chunk2mem(newp);
2840 }
2841#endif
2842
2843
2844
2845 set_head(newp, newsize | PREV_INUSE);
2846 set_inuse_bit_at_offset(newp, newsize);
2847 set_head_size(p, leadsize);
2848 fREe(chunk2mem(p));
2849 p = newp;
2850
2851 assert (newsize >= nb && (((unsigned long)(chunk2mem(p))) % alignment) == 0);
2852 }
2853
2854
2855
2856 remainder_size = chunksize(p) - nb;
2857
2858 if (remainder_size >= (long)MINSIZE)
2859 {
2860 remainder = chunk_at_offset(p, nb);
2861 set_head(remainder, remainder_size | PREV_INUSE);
2862 set_head_size(p, nb);
2863 fREe(chunk2mem(remainder));
2864 }
2865
2866 check_inuse_chunk(p);
2867 return chunk2mem(p);
2868
2869}
2870
2871
2872
2873
2874
2875
2876
2877
2878
2879
2880#if __STD_C
2881Void_t* vALLOc(size_t bytes)
2882#else
2883Void_t* vALLOc(bytes) size_t bytes;
2884#endif
2885{
2886 return mEMALIGn (malloc_getpagesize, bytes);
2887}
2888
2889
2890
2891
2892
2893
2894
2895#if __STD_C
2896Void_t* pvALLOc(size_t bytes)
2897#else
2898Void_t* pvALLOc(bytes) size_t bytes;
2899#endif
2900{
2901 size_t pagesize = malloc_getpagesize;
2902 return mEMALIGn (pagesize, (bytes + pagesize - 1) & ~(pagesize - 1));
2903}
2904
2905
2906
2907
2908
2909
2910
2911#if __STD_C
2912Void_t* cALLOc(size_t n, size_t elem_size)
2913#else
2914Void_t* cALLOc(n, elem_size) size_t n; size_t elem_size;
2915#endif
2916{
2917 mchunkptr p;
2918 INTERNAL_SIZE_T csz;
2919
2920 INTERNAL_SIZE_T sz = n * elem_size;
2921
2922
2923
2924#if MORECORE_CLEARS
2925 mchunkptr oldtop = top;
2926 INTERNAL_SIZE_T oldtopsize = chunksize(top);
2927#endif
2928 Void_t* mem = mALLOc (sz);
2929
2930 if ((long)n < 0) return NULL;
2931
2932 if (mem == NULL)
2933 return NULL;
2934 else
2935 {
2936 p = mem2chunk(mem);
2937
2938
2939
2940
2941#if HAVE_MMAP
2942 if (chunk_is_mmapped(p)) return mem;
2943#endif
2944
2945 csz = chunksize(p);
2946
2947#if MORECORE_CLEARS
2948 if (p == oldtop && csz > oldtopsize)
2949 {
2950
2951 csz = oldtopsize;
2952 }
2953#endif
2954
2955 MALLOC_ZERO(mem, csz - SIZE_SZ);
2956 return mem;
2957 }
2958}
2959
2960
2961
2962
2963
2964
2965
2966
2967#if !defined(INTERNAL_LINUX_C_LIB) || !defined(__ELF__)
2968#if __STD_C
2969void cfree(Void_t *mem)
2970#else
2971void cfree(mem) Void_t *mem;
2972#endif
2973{
2974 fREe(mem);
2975}
2976#endif
2977
2978
2979
2980
2981
2982
2983
2984
2985
2986
2987
2988
2989
2990
2991
2992
2993
2994
2995
2996
2997
2998
2999
3000
3001
3002
3003#if __STD_C
3004int malloc_trim(size_t pad)
3005#else
3006int malloc_trim(pad) size_t pad;
3007#endif
3008{
3009 long top_size;
3010 long extra;
3011 char* current_brk;
3012 char* new_brk;
3013
3014 unsigned long pagesz = malloc_getpagesize;
3015
3016 top_size = chunksize(top);
3017 extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;
3018
3019 if (extra < (long)pagesz)
3020 return 0;
3021
3022 else
3023 {
3024
3025 current_brk = (char*)(MORECORE (0));
3026 if (current_brk != (char*)(top) + top_size)
3027 return 0;
3028
3029 else
3030 {
3031 new_brk = (char*)(MORECORE (-extra));
3032
3033 if (new_brk == (char*)(MORECORE_FAILURE))
3034 {
3035
3036 current_brk = (char*)(MORECORE (0));
3037 top_size = current_brk - (char*)top;
3038 if (top_size >= (long)MINSIZE)
3039 {
3040 sbrked_mem = current_brk - sbrk_base;
3041 set_head(top, top_size | PREV_INUSE);
3042 }
3043 check_chunk(top);
3044 return 0;
3045 }
3046
3047 else
3048 {
3049
3050 set_head(top, (top_size - extra) | PREV_INUSE);
3051 sbrked_mem -= extra;
3052 check_chunk(top);
3053 return 1;
3054 }
3055 }
3056 }
3057}
3058
3059
3060
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070
3071
3072#if __STD_C
3073size_t malloc_usable_size(Void_t* mem)
3074#else
3075size_t malloc_usable_size(mem) Void_t* mem;
3076#endif
3077{
3078 mchunkptr p;
3079 if (mem == NULL)
3080 return 0;
3081 else
3082 {
3083 p = mem2chunk(mem);
3084 if(!chunk_is_mmapped(p))
3085 {
3086 if (!inuse(p)) return 0;
3087 check_inuse_chunk(p);
3088 return chunksize(p) - SIZE_SZ;
3089 }
3090 return chunksize(p) - 2*SIZE_SZ;
3091 }
3092}
3093
3094
3095
3096
3097
3098
3099#ifdef DEBUG
3100static void malloc_update_mallinfo()
3101{
3102 int i;
3103 mbinptr b;
3104 mchunkptr p;
3105#ifdef DEBUG
3106 mchunkptr q;
3107#endif
3108
3109 INTERNAL_SIZE_T avail = chunksize(top);
3110 int navail = ((long)(avail) >= (long)MINSIZE)? 1 : 0;
3111
3112 for (i = 1; i < NAV; ++i)
3113 {
3114 b = bin_at(i);
3115 for (p = last(b); p != b; p = p->bk)
3116 {
3117#ifdef DEBUG
3118 check_free_chunk(p);
3119 for (q = next_chunk(p);
3120 q < top && inuse(q) && (long)(chunksize(q)) >= (long)MINSIZE;
3121 q = next_chunk(q))
3122 check_inuse_chunk(q);
3123#endif
3124 avail += chunksize(p);
3125 navail++;
3126 }
3127 }
3128
3129 current_mallinfo.ordblks = navail;
3130 current_mallinfo.uordblks = sbrked_mem - avail;
3131 current_mallinfo.fordblks = avail;
3132 current_mallinfo.hblks = n_mmaps;
3133 current_mallinfo.hblkhd = mmapped_mem;
3134 current_mallinfo.keepcost = chunksize(top);
3135
3136}
3137#endif
3138
3139
3140
3141
3142
3143
3144
3145
3146
3147
3148
3149
3150
3151
3152
3153
3154
3155
3156#ifdef DEBUG
3157void malloc_stats()
3158{
3159 malloc_update_mallinfo();
3160 printf("max system bytes = %10u\n",
3161 (unsigned int)(max_total_mem));
3162 printf("system bytes = %10u\n",
3163 (unsigned int)(sbrked_mem + mmapped_mem));
3164 printf("in use bytes = %10u\n",
3165 (unsigned int)(current_mallinfo.uordblks + mmapped_mem));
3166#if HAVE_MMAP
3167 printf("max mmap regions = %10u\n",
3168 (unsigned int)max_n_mmaps);
3169#endif
3170}
3171#endif
3172
3173
3174
3175
3176
3177#ifdef DEBUG
3178struct mallinfo mALLINFo()
3179{
3180 malloc_update_mallinfo();
3181 return current_mallinfo;
3182}
3183#endif
3184
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194
3195
3196
3197
3198
3199
3200
3201#if __STD_C
3202int mALLOPt(int param_number, int value)
3203#else
3204int mALLOPt(param_number, value) int param_number; int value;
3205#endif
3206{
3207 switch(param_number)
3208 {
3209 case M_TRIM_THRESHOLD:
3210 trim_threshold = value; return 1;
3211 case M_TOP_PAD:
3212 top_pad = value; return 1;
3213 case M_MMAP_THRESHOLD:
3214 mmap_threshold = value; return 1;
3215 case M_MMAP_MAX:
3216#if HAVE_MMAP
3217 n_mmaps_max = value; return 1;
3218#else
3219 if (value != 0) return 0; else n_mmaps_max = value; return 1;
3220#endif
3221
3222 default:
3223 return 0;
3224 }
3225}
3226
3227
3228
3229
3230
3231
3232
3233
3234
3235
3236
3237
3238
3239
3240
3241
3242
3243
3244
3245
3246
3247
3248
3249
3250
3251
3252
3253
3254
3255
3256
3257
3258
3259
3260
3261
3262
3263
3264
3265
3266
3267
3268
3269
3270
3271
3272
3273
3274
3275
3276
3277
3278
3279
3280
3281
3282
3283
3284
3285
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3297
3298
3299
3300
3301
3302
3303
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315
3316
3317
3318
3319
3320
3321
3322
3323
3324
3325
3326
3327
3328
3329
3330
3331
3332
3333