1 module vibe.internal.utilallocator;
2 
3 public import stdx.allocator : allocatorObject, CAllocatorImpl, dispose,
4 	   expandArray, IAllocator, make, makeArray, shrinkArray, theAllocator;
5 public import stdx.allocator.mallocator;
6 public import stdx.allocator.building_blocks.affix_allocator;
7 
8 // NOTE: this needs to be used instead of theAllocator due to Phobos issue 17564
9 @property IAllocator vibeThreadAllocator()
10 @safe nothrow @nogc {
11 	import stdx.allocator.gc_allocator;
12 	static IAllocator s_threadAllocator;
13 	if (!s_threadAllocator)
14 		s_threadAllocator = () @trusted { return allocatorObject(GCAllocator.instance); } ();
15 	return s_threadAllocator;
16 }
17 
18 final class RegionListAllocator(Allocator, bool leak = false) : IAllocator {
19 	import vibe.internal.memory_legacy : AllocSize, alignedSize;
20 	import std.algorithm.comparison : min, max;
21 	import std.conv : emplace;
22 
23 	import std.typecons : Ternary;
24 
25 	static struct Pool { Pool* next; void[] data; void[] remaining; }
26 	private {
27 		Allocator m_baseAllocator;
28 		Pool* m_freePools;
29 		Pool* m_fullPools;
30 		size_t m_poolSize;
31 	}
32 
33 	this(size_t pool_size, Allocator base) @safe nothrow
34 	{
35 		m_poolSize = pool_size;
36 		m_baseAllocator = base;
37 	}
38 
39 	~this()
40 	{
41 		deallocateAll();
42 	}
43 
44 	override @property uint alignment() const { return 0x10; }
45 
46 	@property size_t totalSize()
47 	@safe nothrow @nogc {
48 		size_t amt = 0;
49 		for (auto p = m_fullPools; p; p = p.next)
50 			amt += p.data.length;
51 		for (auto p = m_freePools; p; p = p.next)
52 			amt += p.data.length;
53 		return amt;
54 	}
55 
56 	@property size_t allocatedSize()
57 	@safe nothrow @nogc {
58 		size_t amt = 0;
59 		for (auto p = m_fullPools; p; p = p.next)
60 			amt += p.data.length;
61 		for (auto p = m_freePools; p; p = p.next)
62 			amt += p.data.length - p.remaining.length;
63 		return amt;
64 	}
65 
66 	override void[] allocate(size_t sz, TypeInfo ti = null)
67 	{
68 		auto aligned_sz = alignedSize(sz);
69 
70 		Pool* pprev = null;
71 		Pool* p = cast(Pool*)m_freePools;
72 		while( p && p.remaining.length < aligned_sz ){
73 			pprev = p;
74 			p = p.next;
75 		}
76 
77 		if( !p ){
78 			auto pmem = m_baseAllocator.allocate(AllocSize!Pool);
79 
80 			p = emplace!Pool(cast(Pool*)pmem.ptr);
81 			p.data = m_baseAllocator.allocate(max(aligned_sz, m_poolSize));
82 			p.remaining = p.data;
83 			p.next = cast(Pool*)m_freePools;
84 			m_freePools = p;
85 			pprev = null;
86 		}
87 
88 		auto ret = p.remaining[0 .. aligned_sz];
89 		p.remaining = p.remaining[aligned_sz .. $];
90 		if( !p.remaining.length ){
91 			if( pprev ){
92 				pprev.next = p.next;
93 			} else {
94 				m_freePools = p.next;
95 			}
96 			p.next = cast(Pool*)m_fullPools;
97 			m_fullPools = p;
98 		}
99 
100 		return ret[0 .. sz];
101 	}
102 
103 	override void[] alignedAllocate(size_t n, uint a) { return null; }
104 	override bool alignedReallocate(ref void[] b, size_t size, uint alignment) { return false; }
105 	override void[] allocateAll() { return null; }
106 	override @property Ternary empty() const { return m_fullPools !is null ? Ternary.no : Ternary.yes; }
107 	override size_t goodAllocSize(size_t s) { return alignedSize(s); }
108 
109 	import std.traits : Parameters;
110 	static if (is(Parameters!(IAllocator.resolveInternalPointer)[0] == const(void*))) {
111 		override Ternary resolveInternalPointer(const void* p, ref void[] result) { return Ternary.unknown; }
112 	} else {
113 		override Ternary resolveInternalPointer(void* p, ref void[] result) { return Ternary.unknown; }
114 	}
115 	static if (is(Parameters!(IAllocator.owns)[0] == const(void[]))) {
116 	    override Ternary owns(const void[] b) { return Ternary.unknown; }
117 	} else {
118 	    override Ternary owns(void[] b) { return Ternary.unknown; }
119 	}
120 
121 
122 	override bool reallocate(ref void[] arr, size_t newsize)
123 	{
124 		return expand(arr, newsize);
125 	}
126 
127 	override bool expand(ref void[] arr, size_t newsize)
128 	{
129 		auto aligned_sz = alignedSize(arr.length);
130 		auto aligned_newsz = alignedSize(newsize);
131 
132 		if (aligned_newsz <= aligned_sz) {
133 			arr = arr[0 .. newsize]; // TODO: back up remaining
134 			return true;
135 		}
136 
137 		auto pool = m_freePools;
138 		bool last_in_pool = pool && arr.ptr+aligned_sz == pool.remaining.ptr;
139 		if (last_in_pool && pool.remaining.length+aligned_sz >= aligned_newsz) {
140 			pool.remaining = pool.remaining[aligned_newsz-aligned_sz .. $];
141 			arr = arr.ptr[0 .. aligned_newsz];
142 			assert(arr.ptr+arr.length == pool.remaining.ptr, "Last block does not align with the remaining space!?");
143 			arr = arr[0 .. newsize];
144 		} else {
145 			auto ret = allocate(newsize);
146 			assert(ret.ptr >= arr.ptr+aligned_sz || ret.ptr+ret.length <= arr.ptr, "New block overlaps old one!?");
147 			ret[0 .. min(arr.length, newsize)] = arr[0 .. min(arr.length, newsize)];
148 			arr = ret;
149 		}
150 		return true;
151 	}
152 
153 	override bool deallocate(void[] mem)
154 	{
155 		return false;
156 	}
157 
158 	override bool deallocateAll()
159 	{
160 		// put all full Pools into the free pools list
161 		for (Pool* p = cast(Pool*)m_fullPools, pnext; p; p = pnext) {
162 			pnext = p.next;
163 			p.next = cast(Pool*)m_freePools;
164 			m_freePools = cast(Pool*)p;
165 		}
166 
167 		// free up all pools
168 		for (Pool* p = cast(Pool*)m_freePools; p; p = p.next)
169 			p.remaining = p.data;
170 
171 		Pool* pnext;
172 		for (auto p = cast(Pool*)m_freePools; p; p = pnext) {
173 			pnext = p.next;
174 			static if (!leak) {
175 				m_baseAllocator.deallocate(p.data);
176 				m_baseAllocator.deallocate((cast(void*)p)[0 .. AllocSize!Pool]);
177 			}
178 		}
179 		m_freePools = null;
180 
181 		return true;
182 	}
183 }