1 module vibe.internal.utilallocator;
2 
3 public import std.experimental.allocator : allocatorObject, CAllocatorImpl, dispose,
4 	   expandArray, IAllocator, make, makeArray, shrinkArray, theAllocator;
5 public import std.experimental.allocator.mallocator;
6 public import std.experimental.allocator.building_blocks.affix_allocator;
7 
8 // NOTE: this needs to be used instead of theAllocator due to Phobos issue 17564
9 @property IAllocator vibeThreadAllocator()
10 @safe nothrow @nogc {
11 	import std.experimental.allocator.gc_allocator;
12 	static IAllocator s_threadAllocator;
13 	if (!s_threadAllocator)
14 		s_threadAllocator = () @trusted { return allocatorObject(GCAllocator.instance); } ();
15 	return s_threadAllocator;
16 }
17 
18 final class RegionListAllocator(Allocator, bool leak = false) : IAllocator {
19 	import vibe.internal.memory_legacy : AllocSize, alignedSize;
20 	import std.algorithm.comparison : min, max;
21 	import std.conv : emplace;
22 
23 	static if (__VERSION__ < 2072)
24 		import std.experimental.allocator.common : Ternary;
25 	else
26 		import std.typecons : Ternary;
27 
28 	static struct Pool { Pool* next; void[] data; void[] remaining; }
29 	private {
30 		Allocator m_baseAllocator;
31 		Pool* m_freePools;
32 		Pool* m_fullPools;
33 		size_t m_poolSize;
34 	}
35 
36 	this(size_t pool_size, Allocator base) @safe nothrow
37 	{
38 		m_poolSize = pool_size;
39 		m_baseAllocator = base;
40 	}
41 
42 	~this()
43 	{
44 		deallocateAll();
45 	}
46 
47 	override @property uint alignment() const { return 0x10; }
48 
49 	@property size_t totalSize()
50 	@safe nothrow @nogc {
51 		size_t amt = 0;
52 		for (auto p = m_fullPools; p; p = p.next)
53 			amt += p.data.length;
54 		for (auto p = m_freePools; p; p = p.next)
55 			amt += p.data.length;
56 		return amt;
57 	}
58 
59 	@property size_t allocatedSize()
60 	@safe nothrow @nogc {
61 		size_t amt = 0;
62 		for (auto p = m_fullPools; p; p = p.next)
63 			amt += p.data.length;
64 		for (auto p = m_freePools; p; p = p.next)
65 			amt += p.data.length - p.remaining.length;
66 		return amt;
67 	}
68 
69 	override void[] allocate(size_t sz, TypeInfo ti = null)
70 	{
71 		auto aligned_sz = alignedSize(sz);
72 
73 		Pool* pprev = null;
74 		Pool* p = cast(Pool*)m_freePools;
75 		while( p && p.remaining.length < aligned_sz ){
76 			pprev = p;
77 			p = p.next;
78 		}
79 
80 		if( !p ){
81 			auto pmem = m_baseAllocator.allocate(AllocSize!Pool);
82 
83 			p = emplace!Pool(cast(Pool*)pmem.ptr);
84 			p.data = m_baseAllocator.allocate(max(aligned_sz, m_poolSize));
85 			p.remaining = p.data;
86 			p.next = cast(Pool*)m_freePools;
87 			m_freePools = p;
88 			pprev = null;
89 		}
90 
91 		auto ret = p.remaining[0 .. aligned_sz];
92 		p.remaining = p.remaining[aligned_sz .. $];
93 		if( !p.remaining.length ){
94 			if( pprev ){
95 				pprev.next = p.next;
96 			} else {
97 				m_freePools = p.next;
98 			}
99 			p.next = cast(Pool*)m_fullPools;
100 			m_fullPools = p;
101 		}
102 
103 		return ret[0 .. sz];
104 	}
105 
106 	override void[] alignedAllocate(size_t n, uint a) { return null; }
107 	override bool alignedReallocate(ref void[] b, size_t size, uint alignment) { return false; }
108 	override void[] allocateAll() { return null; }
109 	override @property Ternary empty() const { return m_fullPools !is null ? Ternary.no : Ternary.yes; }
110 	override size_t goodAllocSize(size_t s) { return alignedSize(s); }
111 
112 	import std.traits : Parameters;
113 	static if (is(Parameters!(IAllocator.resolveInternalPointer)[0] == const(void*))) {
114 		override Ternary resolveInternalPointer(const void* p, ref void[] result) { return Ternary.unknown; }
115 	} else {
116 		override Ternary resolveInternalPointer(void* p, ref void[] result) { return Ternary.unknown; }
117 	}
118 	override Ternary owns(void[] b) { return Ternary.unknown; }
119 
120 
121 	override bool reallocate(ref void[] arr, size_t newsize)
122 	{
123 		return expand(arr, newsize);
124 	}
125 
126 	override bool expand(ref void[] arr, size_t newsize)
127 	{
128 		auto aligned_sz = alignedSize(arr.length);
129 		auto aligned_newsz = alignedSize(newsize);
130 
131 		if (aligned_newsz <= aligned_sz) {
132 			arr = arr[0 .. newsize]; // TODO: back up remaining
133 			return true;
134 		}
135 
136 		auto pool = m_freePools;
137 		bool last_in_pool = pool && arr.ptr+aligned_sz == pool.remaining.ptr;
138 		if (last_in_pool && pool.remaining.length+aligned_sz >= aligned_newsz) {
139 			pool.remaining = pool.remaining[aligned_newsz-aligned_sz .. $];
140 			arr = arr.ptr[0 .. aligned_newsz];
141 			assert(arr.ptr+arr.length == pool.remaining.ptr, "Last block does not align with the remaining space!?");
142 			arr = arr[0 .. newsize];
143 		} else {
144 			auto ret = allocate(newsize);
145 			assert(ret.ptr >= arr.ptr+aligned_sz || ret.ptr+ret.length <= arr.ptr, "New block overlaps old one!?");
146 			ret[0 .. min(arr.length, newsize)] = arr[0 .. min(arr.length, newsize)];
147 			arr = ret;
148 		}
149 		return true;
150 	}
151 
152 	override bool deallocate(void[] mem)
153 	{
154 		return false;
155 	}
156 
157 	override bool deallocateAll()
158 	{
159 		// put all full Pools into the free pools list
160 		for (Pool* p = cast(Pool*)m_fullPools, pnext; p; p = pnext) {
161 			pnext = p.next;
162 			p.next = cast(Pool*)m_freePools;
163 			m_freePools = cast(Pool*)p;
164 		}
165 
166 		// free up all pools
167 		for (Pool* p = cast(Pool*)m_freePools; p; p = p.next)
168 			p.remaining = p.data;
169 
170 		Pool* pnext;
171 		for (auto p = cast(Pool*)m_freePools; p; p = pnext) {
172 			pnext = p.next;
173 			static if (!leak) {
174 				m_baseAllocator.deallocate(p.data);
175 				m_baseAllocator.deallocate((cast(void*)p)[0 .. AllocSize!Pool]);
176 			}
177 		}
178 		m_freePools = null;
179 
180 		return true;
181 	}
182 }