File: | clang/lib/CodeGen/CGCoroutine.cpp |
Warning: | line 95, column 21 Called C++ object pointer is null |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===// | |||
2 | // | |||
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | |||
4 | // See https://llvm.org/LICENSE.txt for license information. | |||
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | |||
6 | // | |||
7 | //===----------------------------------------------------------------------===// | |||
8 | // | |||
9 | // This contains code dealing with C++ code generation of coroutines. | |||
10 | // | |||
11 | //===----------------------------------------------------------------------===// | |||
12 | ||||
13 | #include "CGCleanup.h" | |||
14 | #include "CodeGenFunction.h" | |||
15 | #include "llvm/ADT/ScopeExit.h" | |||
16 | #include "clang/AST/StmtCXX.h" | |||
17 | #include "clang/AST/StmtVisitor.h" | |||
18 | ||||
19 | using namespace clang; | |||
20 | using namespace CodeGen; | |||
21 | ||||
22 | using llvm::Value; | |||
23 | using llvm::BasicBlock; | |||
24 | ||||
25 | namespace { | |||
26 | enum class AwaitKind { Init, Normal, Yield, Final }; | |||
27 | static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield", | |||
28 | "final"}; | |||
29 | } | |||
30 | ||||
31 | struct clang::CodeGen::CGCoroData { | |||
32 | // What is the current await expression kind and how many | |||
33 | // await/yield expressions were encountered so far. | |||
34 | // These are used to generate pretty labels for await expressions in LLVM IR. | |||
35 | AwaitKind CurrentAwaitKind = AwaitKind::Init; | |||
36 | unsigned AwaitNum = 0; | |||
37 | unsigned YieldNum = 0; | |||
38 | ||||
39 | // How many co_return statements are in the coroutine. Used to decide whether | |||
40 | // we need to add co_return; equivalent at the end of the user authored body. | |||
41 | unsigned CoreturnCount = 0; | |||
42 | ||||
43 | // A branch to this block is emitted when coroutine needs to suspend. | |||
44 | llvm::BasicBlock *SuspendBB = nullptr; | |||
45 | ||||
46 | // The promise type's 'unhandled_exception' handler, if it defines one. | |||
47 | Stmt *ExceptionHandler = nullptr; | |||
48 | ||||
49 | // A temporary i1 alloca that stores whether 'await_resume' threw an | |||
50 | // exception. If it did, 'true' is stored in this variable, and the coroutine | |||
51 | // body must be skipped. If the promise type does not define an exception | |||
52 | // handler, this is null. | |||
53 | llvm::Value *ResumeEHVar = nullptr; | |||
54 | ||||
55 | // Stores the jump destination just before the coroutine memory is freed. | |||
56 | // This is the destination that every suspend point jumps to for the cleanup | |||
57 | // branch. | |||
58 | CodeGenFunction::JumpDest CleanupJD; | |||
59 | ||||
60 | // Stores the jump destination just before the final suspend. The co_return | |||
61 | // statements jumps to this point after calling return_xxx promise member. | |||
62 | CodeGenFunction::JumpDest FinalJD; | |||
63 | ||||
64 | // Stores the llvm.coro.id emitted in the function so that we can supply it | |||
65 | // as the first argument to coro.begin, coro.alloc and coro.free intrinsics. | |||
66 | // Note: llvm.coro.id returns a token that cannot be directly expressed in a | |||
67 | // builtin. | |||
68 | llvm::CallInst *CoroId = nullptr; | |||
69 | ||||
70 | // Stores the llvm.coro.begin emitted in the function so that we can replace | |||
71 | // all coro.frame intrinsics with direct SSA value of coro.begin that returns | |||
72 | // the address of the coroutine frame of the current coroutine. | |||
73 | llvm::CallInst *CoroBegin = nullptr; | |||
74 | ||||
75 | // Stores the last emitted coro.free for the deallocate expressions, we use it | |||
76 | // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem). | |||
77 | llvm::CallInst *LastCoroFree = nullptr; | |||
78 | ||||
79 | // If coro.id came from the builtin, remember the expression to give better | |||
80 | // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by | |||
81 | // EmitCoroutineBody. | |||
82 | CallExpr const *CoroIdExpr = nullptr; | |||
83 | }; | |||
84 | ||||
85 | // Defining these here allows to keep CGCoroData private to this file. | |||
86 | clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {} | |||
87 | CodeGenFunction::CGCoroInfo::~CGCoroInfo() {} | |||
88 | ||||
89 | static void createCoroData(CodeGenFunction &CGF, | |||
90 | CodeGenFunction::CGCoroInfo &CurCoro, | |||
91 | llvm::CallInst *CoroId, | |||
92 | CallExpr const *CoroIdExpr = nullptr) { | |||
93 | if (CurCoro.Data) { | |||
94 | if (CurCoro.Data->CoroIdExpr) | |||
95 | CGF.CGM.Error(CoroIdExpr->getBeginLoc(), | |||
| ||||
96 | "only one __builtin_coro_id can be used in a function"); | |||
97 | else if (CoroIdExpr) | |||
98 | CGF.CGM.Error(CoroIdExpr->getBeginLoc(), | |||
99 | "__builtin_coro_id shall not be used in a C++ coroutine"); | |||
100 | else | |||
101 | llvm_unreachable("EmitCoroutineBodyStatement called twice?")::llvm::llvm_unreachable_internal("EmitCoroutineBodyStatement called twice?" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 101); | |||
102 | ||||
103 | return; | |||
104 | } | |||
105 | ||||
106 | CurCoro.Data = std::unique_ptr<CGCoroData>(new CGCoroData); | |||
107 | CurCoro.Data->CoroId = CoroId; | |||
108 | CurCoro.Data->CoroIdExpr = CoroIdExpr; | |||
109 | } | |||
110 | ||||
111 | // Synthesize a pretty name for a suspend point. | |||
112 | static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) { | |||
113 | unsigned No = 0; | |||
114 | switch (Kind) { | |||
115 | case AwaitKind::Init: | |||
116 | case AwaitKind::Final: | |||
117 | break; | |||
118 | case AwaitKind::Normal: | |||
119 | No = ++Coro.AwaitNum; | |||
120 | break; | |||
121 | case AwaitKind::Yield: | |||
122 | No = ++Coro.YieldNum; | |||
123 | break; | |||
124 | } | |||
125 | SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]); | |||
126 | if (No > 1) { | |||
127 | Twine(No).toVector(Prefix); | |||
128 | } | |||
129 | return Prefix; | |||
130 | } | |||
131 | ||||
132 | static bool memberCallExpressionCanThrow(const Expr *E) { | |||
133 | if (const auto *CE = dyn_cast<CXXMemberCallExpr>(E)) | |||
134 | if (const auto *Proto = | |||
135 | CE->getMethodDecl()->getType()->getAs<FunctionProtoType>()) | |||
136 | if (isNoexceptExceptionSpec(Proto->getExceptionSpecType()) && | |||
137 | Proto->canThrow() == CT_Cannot) | |||
138 | return false; | |||
139 | return true; | |||
140 | } | |||
141 | ||||
142 | // Emit suspend expression which roughly looks like: | |||
143 | // | |||
144 | // auto && x = CommonExpr(); | |||
145 | // if (!x.await_ready()) { | |||
146 | // llvm_coro_save(); | |||
147 | // x.await_suspend(...); (*) | |||
148 | // llvm_coro_suspend(); (**) | |||
149 | // } | |||
150 | // x.await_resume(); | |||
151 | // | |||
152 | // where the result of the entire expression is the result of x.await_resume() | |||
153 | // | |||
154 | // (*) If x.await_suspend return type is bool, it allows to veto a suspend: | |||
155 | // if (x.await_suspend(...)) | |||
156 | // llvm_coro_suspend(); | |||
157 | // | |||
158 | // (**) llvm_coro_suspend() encodes three possible continuations as | |||
159 | // a switch instruction: | |||
160 | // | |||
161 | // %where-to = call i8 @llvm.coro.suspend(...) | |||
162 | // switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend | |||
163 | // i8 0, label %yield.ready ; go here when resumed | |||
164 | // i8 1, label %yield.cleanup ; go here when destroyed | |||
165 | // ] | |||
166 | // | |||
167 | // See llvm's docs/Coroutines.rst for more details. | |||
168 | // | |||
169 | namespace { | |||
170 | struct LValueOrRValue { | |||
171 | LValue LV; | |||
172 | RValue RV; | |||
173 | }; | |||
174 | } | |||
175 | static LValueOrRValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro, | |||
176 | CoroutineSuspendExpr const &S, | |||
177 | AwaitKind Kind, AggValueSlot aggSlot, | |||
178 | bool ignoreResult, bool forLValue) { | |||
179 | auto *E = S.getCommonExpr(); | |||
180 | ||||
181 | auto Binder = | |||
182 | CodeGenFunction::OpaqueValueMappingData::bind(CGF, S.getOpaqueValue(), E); | |||
183 | auto UnbindOnExit = llvm::make_scope_exit([&] { Binder.unbind(CGF); }); | |||
184 | ||||
185 | auto Prefix = buildSuspendPrefixStr(Coro, Kind); | |||
186 | BasicBlock *ReadyBlock = CGF.createBasicBlock(Prefix + Twine(".ready")); | |||
187 | BasicBlock *SuspendBlock = CGF.createBasicBlock(Prefix + Twine(".suspend")); | |||
188 | BasicBlock *CleanupBlock = CGF.createBasicBlock(Prefix + Twine(".cleanup")); | |||
189 | ||||
190 | // If expression is ready, no need to suspend. | |||
191 | CGF.EmitBranchOnBoolExpr(S.getReadyExpr(), ReadyBlock, SuspendBlock, 0); | |||
192 | ||||
193 | // Otherwise, emit suspend logic. | |||
194 | CGF.EmitBlock(SuspendBlock); | |||
195 | ||||
196 | auto &Builder = CGF.Builder; | |||
197 | llvm::Function *CoroSave = CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_save); | |||
198 | auto *NullPtr = llvm::ConstantPointerNull::get(CGF.CGM.Int8PtrTy); | |||
199 | auto *SaveCall = Builder.CreateCall(CoroSave, {NullPtr}); | |||
200 | ||||
201 | auto *SuspendRet = CGF.EmitScalarExpr(S.getSuspendExpr()); | |||
202 | if (SuspendRet != nullptr && SuspendRet->getType()->isIntegerTy(1)) { | |||
203 | // Veto suspension if requested by bool returning await_suspend. | |||
204 | BasicBlock *RealSuspendBlock = | |||
205 | CGF.createBasicBlock(Prefix + Twine(".suspend.bool")); | |||
206 | CGF.Builder.CreateCondBr(SuspendRet, RealSuspendBlock, ReadyBlock); | |||
207 | CGF.EmitBlock(RealSuspendBlock); | |||
208 | } | |||
209 | ||||
210 | // Emit the suspend point. | |||
211 | const bool IsFinalSuspend = (Kind == AwaitKind::Final); | |||
212 | llvm::Function *CoroSuspend = | |||
213 | CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_suspend); | |||
214 | auto *SuspendResult = Builder.CreateCall( | |||
215 | CoroSuspend, {SaveCall, Builder.getInt1(IsFinalSuspend)}); | |||
216 | ||||
217 | // Create a switch capturing three possible continuations. | |||
218 | auto *Switch = Builder.CreateSwitch(SuspendResult, Coro.SuspendBB, 2); | |||
219 | Switch->addCase(Builder.getInt8(0), ReadyBlock); | |||
220 | Switch->addCase(Builder.getInt8(1), CleanupBlock); | |||
221 | ||||
222 | // Emit cleanup for this suspend point. | |||
223 | CGF.EmitBlock(CleanupBlock); | |||
224 | CGF.EmitBranchThroughCleanup(Coro.CleanupJD); | |||
225 | ||||
226 | // Emit await_resume expression. | |||
227 | CGF.EmitBlock(ReadyBlock); | |||
228 | ||||
229 | // Exception handling requires additional IR. If the 'await_resume' function | |||
230 | // is marked as 'noexcept', we avoid generating this additional IR. | |||
231 | CXXTryStmt *TryStmt = nullptr; | |||
232 | if (Coro.ExceptionHandler && Kind == AwaitKind::Init && | |||
233 | memberCallExpressionCanThrow(S.getResumeExpr())) { | |||
234 | Coro.ResumeEHVar = | |||
235 | CGF.CreateTempAlloca(Builder.getInt1Ty(), Prefix + Twine("resume.eh")); | |||
236 | Builder.CreateFlagStore(true, Coro.ResumeEHVar); | |||
237 | ||||
238 | auto Loc = S.getResumeExpr()->getExprLoc(); | |||
239 | auto *Catch = new (CGF.getContext()) | |||
240 | CXXCatchStmt(Loc, /*exDecl=*/nullptr, Coro.ExceptionHandler); | |||
241 | auto *TryBody = | |||
242 | CompoundStmt::Create(CGF.getContext(), S.getResumeExpr(), Loc, Loc); | |||
243 | TryStmt = CXXTryStmt::Create(CGF.getContext(), Loc, TryBody, Catch); | |||
244 | CGF.EnterCXXTryStmt(*TryStmt); | |||
245 | } | |||
246 | ||||
247 | LValueOrRValue Res; | |||
248 | if (forLValue) | |||
249 | Res.LV = CGF.EmitLValue(S.getResumeExpr()); | |||
250 | else | |||
251 | Res.RV = CGF.EmitAnyExpr(S.getResumeExpr(), aggSlot, ignoreResult); | |||
252 | ||||
253 | if (TryStmt) { | |||
254 | Builder.CreateFlagStore(false, Coro.ResumeEHVar); | |||
255 | CGF.ExitCXXTryStmt(*TryStmt); | |||
256 | } | |||
257 | ||||
258 | return Res; | |||
259 | } | |||
260 | ||||
261 | RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E, | |||
262 | AggValueSlot aggSlot, | |||
263 | bool ignoreResult) { | |||
264 | return emitSuspendExpression(*this, *CurCoro.Data, E, | |||
265 | CurCoro.Data->CurrentAwaitKind, aggSlot, | |||
266 | ignoreResult, /*forLValue*/false).RV; | |||
267 | } | |||
268 | RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E, | |||
269 | AggValueSlot aggSlot, | |||
270 | bool ignoreResult) { | |||
271 | return emitSuspendExpression(*this, *CurCoro.Data, E, AwaitKind::Yield, | |||
272 | aggSlot, ignoreResult, /*forLValue*/false).RV; | |||
273 | } | |||
274 | ||||
275 | void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) { | |||
276 | ++CurCoro.Data->CoreturnCount; | |||
277 | const Expr *RV = S.getOperand(); | |||
278 | if (RV && RV->getType()->isVoidType() && !isa<InitListExpr>(RV)) { | |||
279 | // Make sure to evaluate the non initlist expression of a co_return | |||
280 | // with a void expression for side effects. | |||
281 | RunCleanupsScope cleanupScope(*this); | |||
282 | EmitIgnoredExpr(RV); | |||
283 | } | |||
284 | EmitStmt(S.getPromiseCall()); | |||
285 | EmitBranchThroughCleanup(CurCoro.Data->FinalJD); | |||
286 | } | |||
287 | ||||
288 | ||||
289 | #ifndef NDEBUG | |||
290 | static QualType getCoroutineSuspendExprReturnType(const ASTContext &Ctx, | |||
291 | const CoroutineSuspendExpr *E) { | |||
292 | const auto *RE = E->getResumeExpr(); | |||
293 | // Is it possible for RE to be a CXXBindTemporaryExpr wrapping | |||
294 | // a MemberCallExpr? | |||
295 | assert(isa<CallExpr>(RE) && "unexpected suspend expression type")((isa<CallExpr>(RE) && "unexpected suspend expression type" ) ? static_cast<void> (0) : __assert_fail ("isa<CallExpr>(RE) && \"unexpected suspend expression type\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 295, __PRETTY_FUNCTION__)); | |||
296 | return cast<CallExpr>(RE)->getCallReturnType(Ctx); | |||
297 | } | |||
298 | #endif | |||
299 | ||||
300 | LValue | |||
301 | CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr *E) { | |||
302 | assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 304, __PRETTY_FUNCTION__)) | |||
303 | "Can't have a scalar return unless the return type is a "((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 304, __PRETTY_FUNCTION__)) | |||
304 | "reference type!")((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 304, __PRETTY_FUNCTION__)); | |||
305 | return emitSuspendExpression(*this, *CurCoro.Data, *E, | |||
306 | CurCoro.Data->CurrentAwaitKind, AggValueSlot::ignored(), | |||
307 | /*ignoreResult*/false, /*forLValue*/true).LV; | |||
308 | } | |||
309 | ||||
310 | LValue | |||
311 | CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr *E) { | |||
312 | assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 314, __PRETTY_FUNCTION__)) | |||
313 | "Can't have a scalar return unless the return type is a "((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 314, __PRETTY_FUNCTION__)) | |||
314 | "reference type!")((getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType () && "Can't have a scalar return unless the return type is a " "reference type!") ? static_cast<void> (0) : __assert_fail ("getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && \"Can't have a scalar return unless the return type is a \" \"reference type!\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 314, __PRETTY_FUNCTION__)); | |||
315 | return emitSuspendExpression(*this, *CurCoro.Data, *E, | |||
316 | AwaitKind::Yield, AggValueSlot::ignored(), | |||
317 | /*ignoreResult*/false, /*forLValue*/true).LV; | |||
318 | } | |||
319 | ||||
320 | // Hunts for the parameter reference in the parameter copy/move declaration. | |||
321 | namespace { | |||
322 | struct GetParamRef : public StmtVisitor<GetParamRef> { | |||
323 | public: | |||
324 | DeclRefExpr *Expr = nullptr; | |||
325 | GetParamRef() {} | |||
326 | void VisitDeclRefExpr(DeclRefExpr *E) { | |||
327 | assert(Expr == nullptr && "multilple declref in param move")((Expr == nullptr && "multilple declref in param move" ) ? static_cast<void> (0) : __assert_fail ("Expr == nullptr && \"multilple declref in param move\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 327, __PRETTY_FUNCTION__)); | |||
328 | Expr = E; | |||
329 | } | |||
330 | void VisitStmt(Stmt *S) { | |||
331 | for (auto *C : S->children()) { | |||
332 | if (C) | |||
333 | Visit(C); | |||
334 | } | |||
335 | } | |||
336 | }; | |||
337 | } | |||
338 | ||||
339 | // This class replaces references to parameters to their copies by changing | |||
340 | // the addresses in CGF.LocalDeclMap and restoring back the original values in | |||
341 | // its destructor. | |||
342 | ||||
343 | namespace { | |||
344 | struct ParamReferenceReplacerRAII { | |||
345 | CodeGenFunction::DeclMapTy SavedLocals; | |||
346 | CodeGenFunction::DeclMapTy& LocalDeclMap; | |||
347 | ||||
348 | ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap) | |||
349 | : LocalDeclMap(LocalDeclMap) {} | |||
350 | ||||
351 | void addCopy(DeclStmt const *PM) { | |||
352 | // Figure out what param it refers to. | |||
353 | ||||
354 | assert(PM->isSingleDecl())((PM->isSingleDecl()) ? static_cast<void> (0) : __assert_fail ("PM->isSingleDecl()", "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 354, __PRETTY_FUNCTION__)); | |||
355 | VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl()); | |||
356 | Expr const *InitExpr = VD->getInit(); | |||
357 | GetParamRef Visitor; | |||
358 | Visitor.Visit(const_cast<Expr*>(InitExpr)); | |||
359 | assert(Visitor.Expr)((Visitor.Expr) ? static_cast<void> (0) : __assert_fail ("Visitor.Expr", "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 359, __PRETTY_FUNCTION__)); | |||
360 | DeclRefExpr *DREOrig = Visitor.Expr; | |||
361 | auto *PD = DREOrig->getDecl(); | |||
362 | ||||
363 | auto it = LocalDeclMap.find(PD); | |||
364 | assert(it != LocalDeclMap.end() && "parameter is not found")((it != LocalDeclMap.end() && "parameter is not found" ) ? static_cast<void> (0) : __assert_fail ("it != LocalDeclMap.end() && \"parameter is not found\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 364, __PRETTY_FUNCTION__)); | |||
365 | SavedLocals.insert({ PD, it->second }); | |||
366 | ||||
367 | auto copyIt = LocalDeclMap.find(VD); | |||
368 | assert(copyIt != LocalDeclMap.end() && "parameter copy is not found")((copyIt != LocalDeclMap.end() && "parameter copy is not found" ) ? static_cast<void> (0) : __assert_fail ("copyIt != LocalDeclMap.end() && \"parameter copy is not found\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 368, __PRETTY_FUNCTION__)); | |||
369 | it->second = copyIt->getSecond(); | |||
370 | } | |||
371 | ||||
372 | ~ParamReferenceReplacerRAII() { | |||
373 | for (auto&& SavedLocal : SavedLocals) { | |||
374 | LocalDeclMap.insert({SavedLocal.first, SavedLocal.second}); | |||
375 | } | |||
376 | } | |||
377 | }; | |||
378 | } | |||
379 | ||||
380 | // For WinEH exception representation backend needs to know what funclet coro.end | |||
381 | // belongs to. That information is passed in a funclet bundle. | |||
382 | static SmallVector<llvm::OperandBundleDef, 1> | |||
383 | getBundlesForCoroEnd(CodeGenFunction &CGF) { | |||
384 | SmallVector<llvm::OperandBundleDef, 1> BundleList; | |||
385 | ||||
386 | if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad) | |||
387 | BundleList.emplace_back("funclet", EHPad); | |||
388 | ||||
389 | return BundleList; | |||
390 | } | |||
391 | ||||
392 | namespace { | |||
393 | // We will insert coro.end to cut any of the destructors for objects that | |||
394 | // do not need to be destroyed once the coroutine is resumed. | |||
395 | // See llvm/docs/Coroutines.rst for more details about coro.end. | |||
396 | struct CallCoroEnd final : public EHScopeStack::Cleanup { | |||
397 | void Emit(CodeGenFunction &CGF, Flags flags) override { | |||
398 | auto &CGM = CGF.CGM; | |||
399 | auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy); | |||
400 | llvm::Function *CoroEndFn = CGM.getIntrinsic(llvm::Intrinsic::coro_end); | |||
401 | // See if we have a funclet bundle to associate coro.end with. (WinEH) | |||
402 | auto Bundles = getBundlesForCoroEnd(CGF); | |||
403 | auto *CoroEnd = CGF.Builder.CreateCall( | |||
404 | CoroEndFn, {NullPtr, CGF.Builder.getTrue()}, Bundles); | |||
405 | if (Bundles.empty()) { | |||
406 | // Otherwise, (landingpad model), create a conditional branch that leads | |||
407 | // either to a cleanup block or a block with EH resume instruction. | |||
408 | auto *ResumeBB = CGF.getEHResumeBlock(/*isCleanup=*/true); | |||
409 | auto *CleanupContBB = CGF.createBasicBlock("cleanup.cont"); | |||
410 | CGF.Builder.CreateCondBr(CoroEnd, ResumeBB, CleanupContBB); | |||
411 | CGF.EmitBlock(CleanupContBB); | |||
412 | } | |||
413 | } | |||
414 | }; | |||
415 | } | |||
416 | ||||
417 | namespace { | |||
418 | // Make sure to call coro.delete on scope exit. | |||
419 | struct CallCoroDelete final : public EHScopeStack::Cleanup { | |||
420 | Stmt *Deallocate; | |||
421 | ||||
422 | // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;" | |||
423 | ||||
424 | // Note: That deallocation will be emitted twice: once for a normal exit and | |||
425 | // once for exceptional exit. This usage is safe because Deallocate does not | |||
426 | // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr() | |||
427 | // builds a single call to a deallocation function which is safe to emit | |||
428 | // multiple times. | |||
429 | void Emit(CodeGenFunction &CGF, Flags) override { | |||
430 | // Remember the current point, as we are going to emit deallocation code | |||
431 | // first to get to coro.free instruction that is an argument to a delete | |||
432 | // call. | |||
433 | BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock(); | |||
434 | ||||
435 | auto *FreeBB = CGF.createBasicBlock("coro.free"); | |||
436 | CGF.EmitBlock(FreeBB); | |||
437 | CGF.EmitStmt(Deallocate); | |||
438 | ||||
439 | auto *AfterFreeBB = CGF.createBasicBlock("after.coro.free"); | |||
440 | CGF.EmitBlock(AfterFreeBB); | |||
441 | ||||
442 | // We should have captured coro.free from the emission of deallocate. | |||
443 | auto *CoroFree = CGF.CurCoro.Data->LastCoroFree; | |||
444 | if (!CoroFree) { | |||
445 | CGF.CGM.Error(Deallocate->getBeginLoc(), | |||
446 | "Deallocation expressoin does not refer to coro.free"); | |||
447 | return; | |||
448 | } | |||
449 | ||||
450 | // Get back to the block we were originally and move coro.free there. | |||
451 | auto *InsertPt = SaveInsertBlock->getTerminator(); | |||
452 | CoroFree->moveBefore(InsertPt); | |||
453 | CGF.Builder.SetInsertPoint(InsertPt); | |||
454 | ||||
455 | // Add if (auto *mem = coro.free) Deallocate; | |||
456 | auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy); | |||
457 | auto *Cond = CGF.Builder.CreateICmpNE(CoroFree, NullPtr); | |||
458 | CGF.Builder.CreateCondBr(Cond, FreeBB, AfterFreeBB); | |||
459 | ||||
460 | // No longer need old terminator. | |||
461 | InsertPt->eraseFromParent(); | |||
462 | CGF.Builder.SetInsertPoint(AfterFreeBB); | |||
463 | } | |||
464 | explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {} | |||
465 | }; | |||
466 | } | |||
467 | ||||
468 | namespace { | |||
469 | struct GetReturnObjectManager { | |||
470 | CodeGenFunction &CGF; | |||
471 | CGBuilderTy &Builder; | |||
472 | const CoroutineBodyStmt &S; | |||
473 | ||||
474 | Address GroActiveFlag; | |||
475 | CodeGenFunction::AutoVarEmission GroEmission; | |||
476 | ||||
477 | GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S) | |||
478 | : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()), | |||
479 | GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {} | |||
480 | ||||
481 | // The gro variable has to outlive coroutine frame and coroutine promise, but, | |||
482 | // it can only be initialized after coroutine promise was created, thus, we | |||
483 | // split its emission in two parts. EmitGroAlloca emits an alloca and sets up | |||
484 | // cleanups. Later when coroutine promise is available we initialize the gro | |||
485 | // and sets the flag that the cleanup is now active. | |||
486 | ||||
487 | void EmitGroAlloca() { | |||
488 | auto *GroDeclStmt = dyn_cast<DeclStmt>(S.getResultDecl()); | |||
489 | if (!GroDeclStmt) { | |||
490 | // If get_return_object returns void, no need to do an alloca. | |||
491 | return; | |||
492 | } | |||
493 | ||||
494 | auto *GroVarDecl = cast<VarDecl>(GroDeclStmt->getSingleDecl()); | |||
495 | ||||
496 | // Set GRO flag that it is not initialized yet | |||
497 | GroActiveFlag = | |||
498 | CGF.CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(), "gro.active"); | |||
499 | Builder.CreateStore(Builder.getFalse(), GroActiveFlag); | |||
500 | ||||
501 | GroEmission = CGF.EmitAutoVarAlloca(*GroVarDecl); | |||
502 | ||||
503 | // Remember the top of EHStack before emitting the cleanup. | |||
504 | auto old_top = CGF.EHStack.stable_begin(); | |||
505 | CGF.EmitAutoVarCleanups(GroEmission); | |||
506 | auto top = CGF.EHStack.stable_begin(); | |||
507 | ||||
508 | // Make the cleanup conditional on gro.active | |||
509 | for (auto b = CGF.EHStack.find(top), e = CGF.EHStack.find(old_top); | |||
510 | b != e; b++) { | |||
511 | if (auto *Cleanup = dyn_cast<EHCleanupScope>(&*b)) { | |||
512 | assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?")((!Cleanup->hasActiveFlag() && "cleanup already has active flag?" ) ? static_cast<void> (0) : __assert_fail ("!Cleanup->hasActiveFlag() && \"cleanup already has active flag?\"" , "/build/llvm-toolchain-snapshot-12~++20210115100614+a14c36fe27f5/clang/lib/CodeGen/CGCoroutine.cpp" , 512, __PRETTY_FUNCTION__)); | |||
513 | Cleanup->setActiveFlag(GroActiveFlag); | |||
514 | Cleanup->setTestFlagInEHCleanup(); | |||
515 | Cleanup->setTestFlagInNormalCleanup(); | |||
516 | } | |||
517 | } | |||
518 | } | |||
519 | ||||
520 | void EmitGroInit() { | |||
521 | if (!GroActiveFlag.isValid()) { | |||
522 | // No Gro variable was allocated. Simply emit the call to | |||
523 | // get_return_object. | |||
524 | CGF.EmitStmt(S.getResultDecl()); | |||
525 | return; | |||
526 | } | |||
527 | ||||
528 | CGF.EmitAutoVarInit(GroEmission); | |||
529 | Builder.CreateStore(Builder.getTrue(), GroActiveFlag); | |||
530 | } | |||
531 | }; | |||
532 | } | |||
533 | ||||
534 | static void emitBodyAndFallthrough(CodeGenFunction &CGF, | |||
535 | const CoroutineBodyStmt &S, Stmt *Body) { | |||
536 | CGF.EmitStmt(Body); | |||
537 | const bool CanFallthrough = CGF.Builder.GetInsertBlock(); | |||
538 | if (CanFallthrough) | |||
539 | if (Stmt *OnFallthrough = S.getFallthroughHandler()) | |||
540 | CGF.EmitStmt(OnFallthrough); | |||
541 | } | |||
542 | ||||
543 | void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) { | |||
544 | auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy()); | |||
545 | auto &TI = CGM.getContext().getTargetInfo(); | |||
546 | unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth(); | |||
547 | ||||
548 | auto *EntryBB = Builder.GetInsertBlock(); | |||
549 | auto *AllocBB = createBasicBlock("coro.alloc"); | |||
550 | auto *InitBB = createBasicBlock("coro.init"); | |||
551 | auto *FinalBB = createBasicBlock("coro.final"); | |||
552 | auto *RetBB = createBasicBlock("coro.ret"); | |||
553 | ||||
554 | auto *CoroId = Builder.CreateCall( | |||
555 | CGM.getIntrinsic(llvm::Intrinsic::coro_id), | |||
556 | {Builder.getInt32(NewAlign), NullPtr, NullPtr, NullPtr}); | |||
557 | createCoroData(*this, CurCoro, CoroId); | |||
| ||||
558 | CurCoro.Data->SuspendBB = RetBB; | |||
559 | ||||
560 | // Backend is allowed to elide memory allocations, to help it, emit | |||
561 | // auto mem = coro.alloc() ? 0 : ... allocation code ...; | |||
562 | auto *CoroAlloc = Builder.CreateCall( | |||
563 | CGM.getIntrinsic(llvm::Intrinsic::coro_alloc), {CoroId}); | |||
564 | ||||
565 | Builder.CreateCondBr(CoroAlloc, AllocBB, InitBB); | |||
566 | ||||
567 | EmitBlock(AllocBB); | |||
568 | auto *AllocateCall = EmitScalarExpr(S.getAllocate()); | |||
569 | auto *AllocOrInvokeContBB = Builder.GetInsertBlock(); | |||
570 | ||||
571 | // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided. | |||
572 | if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) { | |||
573 | auto *RetOnFailureBB = createBasicBlock("coro.ret.on.failure"); | |||
574 | ||||
575 | // See if allocation was successful. | |||
576 | auto *NullPtr = llvm::ConstantPointerNull::get(Int8PtrTy); | |||
577 | auto *Cond = Builder.CreateICmpNE(AllocateCall, NullPtr); | |||
578 | Builder.CreateCondBr(Cond, InitBB, RetOnFailureBB); | |||
579 | ||||
580 | // If not, return OnAllocFailure object. | |||
581 | EmitBlock(RetOnFailureBB); | |||
582 | EmitStmt(RetOnAllocFailure); | |||
583 | } | |||
584 | else { | |||
585 | Builder.CreateBr(InitBB); | |||
586 | } | |||
587 | ||||
588 | EmitBlock(InitBB); | |||
589 | ||||
590 | // Pass the result of the allocation to coro.begin. | |||
591 | auto *Phi = Builder.CreatePHI(VoidPtrTy, 2); | |||
592 | Phi->addIncoming(NullPtr, EntryBB); | |||
593 | Phi->addIncoming(AllocateCall, AllocOrInvokeContBB); | |||
594 | auto *CoroBegin = Builder.CreateCall( | |||
595 | CGM.getIntrinsic(llvm::Intrinsic::coro_begin), {CoroId, Phi}); | |||
596 | CurCoro.Data->CoroBegin = CoroBegin; | |||
597 | ||||
598 | GetReturnObjectManager GroManager(*this, S); | |||
599 | GroManager.EmitGroAlloca(); | |||
600 | ||||
601 | CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(RetBB); | |||
602 | { | |||
603 | ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap); | |||
604 | CodeGenFunction::RunCleanupsScope ResumeScope(*this); | |||
605 | EHStack.pushCleanup<CallCoroDelete>(NormalAndEHCleanup, S.getDeallocate()); | |||
606 | ||||
607 | // Create parameter copies. We do it before creating a promise, since an | |||
608 | // evolution of coroutine TS may allow promise constructor to observe | |||
609 | // parameter copies. | |||
610 | for (auto *PM : S.getParamMoves()) { | |||
611 | EmitStmt(PM); | |||
612 | ParamReplacer.addCopy(cast<DeclStmt>(PM)); | |||
613 | // TODO: if(CoroParam(...)) need to surround ctor and dtor | |||
614 | // for the copy, so that llvm can elide it if the copy is | |||
615 | // not needed. | |||
616 | } | |||
617 | ||||
618 | EmitStmt(S.getPromiseDeclStmt()); | |||
619 | ||||
620 | Address PromiseAddr = GetAddrOfLocalVar(S.getPromiseDecl()); | |||
621 | auto *PromiseAddrVoidPtr = | |||
622 | new llvm::BitCastInst(PromiseAddr.getPointer(), VoidPtrTy, "", CoroId); | |||
623 | // Update CoroId to refer to the promise. We could not do it earlier because | |||
624 | // promise local variable was not emitted yet. | |||
625 | CoroId->setArgOperand(1, PromiseAddrVoidPtr); | |||
626 | ||||
627 | // Now we have the promise, initialize the GRO | |||
628 | GroManager.EmitGroInit(); | |||
629 | ||||
630 | EHStack.pushCleanup<CallCoroEnd>(EHCleanup); | |||
631 | ||||
632 | CurCoro.Data->CurrentAwaitKind = AwaitKind::Init; | |||
633 | CurCoro.Data->ExceptionHandler = S.getExceptionHandler(); | |||
634 | EmitStmt(S.getInitSuspendStmt()); | |||
635 | CurCoro.Data->FinalJD = getJumpDestInCurrentScope(FinalBB); | |||
636 | ||||
637 | CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal; | |||
638 | ||||
639 | if (CurCoro.Data->ExceptionHandler) { | |||
640 | // If we generated IR to record whether an exception was thrown from | |||
641 | // 'await_resume', then use that IR to determine whether the coroutine | |||
642 | // body should be skipped. | |||
643 | // If we didn't generate the IR (perhaps because 'await_resume' was marked | |||
644 | // as 'noexcept'), then we skip this check. | |||
645 | BasicBlock *ContBB = nullptr; | |||
646 | if (CurCoro.Data->ResumeEHVar) { | |||
647 | BasicBlock *BodyBB = createBasicBlock("coro.resumed.body"); | |||
648 | ContBB = createBasicBlock("coro.resumed.cont"); | |||
649 | Value *SkipBody = Builder.CreateFlagLoad(CurCoro.Data->ResumeEHVar, | |||
650 | "coro.resumed.eh"); | |||
651 | Builder.CreateCondBr(SkipBody, ContBB, BodyBB); | |||
652 | EmitBlock(BodyBB); | |||
653 | } | |||
654 | ||||
655 | auto Loc = S.getBeginLoc(); | |||
656 | CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr, | |||
657 | CurCoro.Data->ExceptionHandler); | |||
658 | auto *TryStmt = | |||
659 | CXXTryStmt::Create(getContext(), Loc, S.getBody(), &Catch); | |||
660 | ||||
661 | EnterCXXTryStmt(*TryStmt); | |||
662 | emitBodyAndFallthrough(*this, S, TryStmt->getTryBlock()); | |||
663 | ExitCXXTryStmt(*TryStmt); | |||
664 | ||||
665 | if (ContBB) | |||
666 | EmitBlock(ContBB); | |||
667 | } | |||
668 | else { | |||
669 | emitBodyAndFallthrough(*this, S, S.getBody()); | |||
670 | } | |||
671 | ||||
672 | // See if we need to generate final suspend. | |||
673 | const bool CanFallthrough = Builder.GetInsertBlock(); | |||
674 | const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0; | |||
675 | if (CanFallthrough || HasCoreturns) { | |||
676 | EmitBlock(FinalBB); | |||
677 | CurCoro.Data->CurrentAwaitKind = AwaitKind::Final; | |||
678 | EmitStmt(S.getFinalSuspendStmt()); | |||
679 | } else { | |||
680 | // We don't need FinalBB. Emit it to make sure the block is deleted. | |||
681 | EmitBlock(FinalBB, /*IsFinished=*/true); | |||
682 | } | |||
683 | } | |||
684 | ||||
685 | EmitBlock(RetBB); | |||
686 | // Emit coro.end before getReturnStmt (and parameter destructors), since | |||
687 | // resume and destroy parts of the coroutine should not include them. | |||
688 | llvm::Function *CoroEnd = CGM.getIntrinsic(llvm::Intrinsic::coro_end); | |||
689 | Builder.CreateCall(CoroEnd, {NullPtr, Builder.getFalse()}); | |||
690 | ||||
691 | if (Stmt *Ret = S.getReturnStmt()) | |||
692 | EmitStmt(Ret); | |||
693 | } | |||
694 | ||||
695 | // Emit coroutine intrinsic and patch up arguments of the token type. | |||
696 | RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E, | |||
697 | unsigned int IID) { | |||
698 | SmallVector<llvm::Value *, 8> Args; | |||
699 | switch (IID) { | |||
700 | default: | |||
701 | break; | |||
702 | // The coro.frame builtin is replaced with an SSA value of the coro.begin | |||
703 | // intrinsic. | |||
704 | case llvm::Intrinsic::coro_frame: { | |||
705 | if (CurCoro.Data && CurCoro.Data->CoroBegin) { | |||
706 | return RValue::get(CurCoro.Data->CoroBegin); | |||
707 | } | |||
708 | CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_begin " | |||
709 | "has been used earlier in this function"); | |||
710 | auto NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy()); | |||
711 | return RValue::get(NullPtr); | |||
712 | } | |||
713 | // The following three intrinsics take a token parameter referring to a token | |||
714 | // returned by earlier call to @llvm.coro.id. Since we cannot represent it in | |||
715 | // builtins, we patch it up here. | |||
716 | case llvm::Intrinsic::coro_alloc: | |||
717 | case llvm::Intrinsic::coro_begin: | |||
718 | case llvm::Intrinsic::coro_free: { | |||
719 | if (CurCoro.Data && CurCoro.Data->CoroId) { | |||
720 | Args.push_back(CurCoro.Data->CoroId); | |||
721 | break; | |||
722 | } | |||
723 | CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_id has" | |||
724 | " been used earlier in this function"); | |||
725 | // Fallthrough to the next case to add TokenNone as the first argument. | |||
726 | LLVM_FALLTHROUGH[[gnu::fallthrough]]; | |||
727 | } | |||
728 | // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first | |||
729 | // argument. | |||
730 | case llvm::Intrinsic::coro_suspend: | |||
731 | Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext())); | |||
732 | break; | |||
733 | } | |||
734 | for (const Expr *Arg : E->arguments()) | |||
735 | Args.push_back(EmitScalarExpr(Arg)); | |||
736 | ||||
737 | llvm::Function *F = CGM.getIntrinsic(IID); | |||
738 | llvm::CallInst *Call = Builder.CreateCall(F, Args); | |||
739 | ||||
740 | // Note: The following code is to enable to emit coro.id and coro.begin by | |||
741 | // hand to experiment with coroutines in C. | |||
742 | // If we see @llvm.coro.id remember it in the CoroData. We will update | |||
743 | // coro.alloc, coro.begin and coro.free intrinsics to refer to it. | |||
744 | if (IID == llvm::Intrinsic::coro_id) { | |||
745 | createCoroData(*this, CurCoro, Call, E); | |||
746 | } | |||
747 | else if (IID == llvm::Intrinsic::coro_begin) { | |||
748 | if (CurCoro.Data) | |||
749 | CurCoro.Data->CoroBegin = Call; | |||
750 | } | |||
751 | else if (IID == llvm::Intrinsic::coro_free) { | |||
752 | // Remember the last coro_free as we need it to build the conditional | |||
753 | // deletion of the coroutine frame. | |||
754 | if (CurCoro.Data) | |||
755 | CurCoro.Data->LastCoroFree = Call; | |||
756 | } | |||
757 | return RValue::get(Call); | |||
758 | } |
1 | // unique_ptr implementation -*- C++ -*- |
2 | |
3 | // Copyright (C) 2008-2016 Free Software Foundation, Inc. |
4 | // |
5 | // This file is part of the GNU ISO C++ Library. This library is free |
6 | // software; you can redistribute it and/or modify it under the |
7 | // terms of the GNU General Public License as published by the |
8 | // Free Software Foundation; either version 3, or (at your option) |
9 | // any later version. |
10 | |
11 | // This library is distributed in the hope that it will be useful, |
12 | // but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | // GNU General Public License for more details. |
15 | |
16 | // Under Section 7 of GPL version 3, you are granted additional |
17 | // permissions described in the GCC Runtime Library Exception, version |
18 | // 3.1, as published by the Free Software Foundation. |
19 | |
20 | // You should have received a copy of the GNU General Public License and |
21 | // a copy of the GCC Runtime Library Exception along with this program; |
22 | // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
23 | // <http://www.gnu.org/licenses/>. |
24 | |
25 | /** @file bits/unique_ptr.h |
26 | * This is an internal header file, included by other library headers. |
27 | * Do not attempt to use it directly. @headername{memory} |
28 | */ |
29 | |
30 | #ifndef _UNIQUE_PTR_H1 |
31 | #define _UNIQUE_PTR_H1 1 |
32 | |
33 | #include <bits/c++config.h> |
34 | #include <debug/assertions.h> |
35 | #include <type_traits> |
36 | #include <utility> |
37 | #include <tuple> |
38 | |
39 | namespace std _GLIBCXX_VISIBILITY(default)__attribute__ ((__visibility__ ("default"))) |
40 | { |
41 | _GLIBCXX_BEGIN_NAMESPACE_VERSION |
42 | |
43 | /** |
44 | * @addtogroup pointer_abstractions |
45 | * @{ |
46 | */ |
47 | |
48 | #if _GLIBCXX_USE_DEPRECATED1 |
49 | template<typename> class auto_ptr; |
50 | #endif |
51 | |
52 | /// Primary template of default_delete, used by unique_ptr |
53 | template<typename _Tp> |
54 | struct default_delete |
55 | { |
56 | /// Default constructor |
57 | constexpr default_delete() noexcept = default; |
58 | |
59 | /** @brief Converting constructor. |
60 | * |
61 | * Allows conversion from a deleter for arrays of another type, @p _Up, |
62 | * only if @p _Up* is convertible to @p _Tp*. |
63 | */ |
64 | template<typename _Up, typename = typename |
65 | enable_if<is_convertible<_Up*, _Tp*>::value>::type> |
66 | default_delete(const default_delete<_Up>&) noexcept { } |
67 | |
68 | /// Calls @c delete @p __ptr |
69 | void |
70 | operator()(_Tp* __ptr) const |
71 | { |
72 | static_assert(!is_void<_Tp>::value, |
73 | "can't delete pointer to incomplete type"); |
74 | static_assert(sizeof(_Tp)>0, |
75 | "can't delete pointer to incomplete type"); |
76 | delete __ptr; |
77 | } |
78 | }; |
79 | |
80 | // _GLIBCXX_RESOLVE_LIB_DEFECTS |
81 | // DR 740 - omit specialization for array objects with a compile time length |
82 | /// Specialization for arrays, default_delete. |
83 | template<typename _Tp> |
84 | struct default_delete<_Tp[]> |
85 | { |
86 | public: |
87 | /// Default constructor |
88 | constexpr default_delete() noexcept = default; |
89 | |
90 | /** @brief Converting constructor. |
91 | * |
92 | * Allows conversion from a deleter for arrays of another type, such as |
93 | * a const-qualified version of @p _Tp. |
94 | * |
95 | * Conversions from types derived from @c _Tp are not allowed because |
96 | * it is unsafe to @c delete[] an array of derived types through a |
97 | * pointer to the base type. |
98 | */ |
99 | template<typename _Up, typename = typename |
100 | enable_if<is_convertible<_Up(*)[], _Tp(*)[]>::value>::type> |
101 | default_delete(const default_delete<_Up[]>&) noexcept { } |
102 | |
103 | /// Calls @c delete[] @p __ptr |
104 | template<typename _Up> |
105 | typename enable_if<is_convertible<_Up(*)[], _Tp(*)[]>::value>::type |
106 | operator()(_Up* __ptr) const |
107 | { |
108 | static_assert(sizeof(_Tp)>0, |
109 | "can't delete pointer to incomplete type"); |
110 | delete [] __ptr; |
111 | } |
112 | }; |
113 | |
114 | /// 20.7.1.2 unique_ptr for single objects. |
115 | template <typename _Tp, typename _Dp = default_delete<_Tp> > |
116 | class unique_ptr |
117 | { |
118 | // use SFINAE to determine whether _Del::pointer exists |
119 | class _Pointer |
120 | { |
121 | template<typename _Up> |
122 | static typename _Up::pointer __test(typename _Up::pointer*); |
123 | |
124 | template<typename _Up> |
125 | static _Tp* __test(...); |
126 | |
127 | typedef typename remove_reference<_Dp>::type _Del; |
128 | |
129 | public: |
130 | typedef decltype(__test<_Del>(0)) type; |
131 | }; |
132 | |
133 | typedef std::tuple<typename _Pointer::type, _Dp> __tuple_type; |
134 | __tuple_type _M_t; |
135 | |
136 | public: |
137 | typedef typename _Pointer::type pointer; |
138 | typedef _Tp element_type; |
139 | typedef _Dp deleter_type; |
140 | |
141 | |
142 | // helper template for detecting a safe conversion from another |
143 | // unique_ptr |
144 | template<typename _Up, typename _Ep> |
145 | using __safe_conversion_up = __and_< |
146 | is_convertible<typename unique_ptr<_Up, _Ep>::pointer, pointer>, |
147 | __not_<is_array<_Up>>, |
148 | __or_<__and_<is_reference<deleter_type>, |
149 | is_same<deleter_type, _Ep>>, |
150 | __and_<__not_<is_reference<deleter_type>>, |
151 | is_convertible<_Ep, deleter_type>> |
152 | > |
153 | >; |
154 | |
155 | // Constructors. |
156 | |
157 | /// Default constructor, creates a unique_ptr that owns nothing. |
158 | constexpr unique_ptr() noexcept |
159 | : _M_t() |
160 | { static_assert(!is_pointer<deleter_type>::value, |
161 | "constructed with null function pointer deleter"); } |
162 | |
163 | /** Takes ownership of a pointer. |
164 | * |
165 | * @param __p A pointer to an object of @c element_type |
166 | * |
167 | * The deleter will be value-initialized. |
168 | */ |
169 | explicit |
170 | unique_ptr(pointer __p) noexcept |
171 | : _M_t() |
172 | { |
173 | std::get<0>(_M_t) = __p; |
174 | static_assert(!is_pointer<deleter_type>::value, |
175 | "constructed with null function pointer deleter"); |
176 | } |
177 | |
178 | /** Takes ownership of a pointer. |
179 | * |
180 | * @param __p A pointer to an object of @c element_type |
181 | * @param __d A reference to a deleter. |
182 | * |
183 | * The deleter will be initialized with @p __d |
184 | */ |
185 | unique_ptr(pointer __p, |
186 | typename conditional<is_reference<deleter_type>::value, |
187 | deleter_type, const deleter_type&>::type __d) noexcept |
188 | : _M_t(__p, __d) { } |
189 | |
190 | /** Takes ownership of a pointer. |
191 | * |
192 | * @param __p A pointer to an object of @c element_type |
193 | * @param __d An rvalue reference to a deleter. |
194 | * |
195 | * The deleter will be initialized with @p std::move(__d) |
196 | */ |
197 | unique_ptr(pointer __p, |
198 | typename remove_reference<deleter_type>::type&& __d) noexcept |
199 | : _M_t(std::move(__p), std::move(__d)) |
200 | { static_assert(!std::is_reference<deleter_type>::value, |
201 | "rvalue deleter bound to reference"); } |
202 | |
203 | /// Creates a unique_ptr that owns nothing. |
204 | constexpr unique_ptr(nullptr_t) noexcept : unique_ptr() { } |
205 | |
206 | // Move constructors. |
207 | |
208 | /// Move constructor. |
209 | unique_ptr(unique_ptr&& __u) noexcept |
210 | : _M_t(__u.release(), std::forward<deleter_type>(__u.get_deleter())) { } |
211 | |
212 | /** @brief Converting constructor from another type |
213 | * |
214 | * Requires that the pointer owned by @p __u is convertible to the |
215 | * type of pointer owned by this object, @p __u does not own an array, |
216 | * and @p __u has a compatible deleter type. |
217 | */ |
218 | template<typename _Up, typename _Ep, typename = _Require< |
219 | __safe_conversion_up<_Up, _Ep>, |
220 | typename conditional<is_reference<_Dp>::value, |
221 | is_same<_Ep, _Dp>, |
222 | is_convertible<_Ep, _Dp>>::type>> |
223 | unique_ptr(unique_ptr<_Up, _Ep>&& __u) noexcept |
224 | : _M_t(__u.release(), std::forward<_Ep>(__u.get_deleter())) |
225 | { } |
226 | |
227 | #if _GLIBCXX_USE_DEPRECATED1 |
228 | /// Converting constructor from @c auto_ptr |
229 | template<typename _Up, typename = _Require< |
230 | is_convertible<_Up*, _Tp*>, is_same<_Dp, default_delete<_Tp>>>> |
231 | unique_ptr(auto_ptr<_Up>&& __u) noexcept; |
232 | #endif |
233 | |
234 | /// Destructor, invokes the deleter if the stored pointer is not null. |
235 | ~unique_ptr() noexcept |
236 | { |
237 | auto& __ptr = std::get<0>(_M_t); |
238 | if (__ptr != nullptr) |
239 | get_deleter()(__ptr); |
240 | __ptr = pointer(); |
241 | } |
242 | |
243 | // Assignment. |
244 | |
245 | /** @brief Move assignment operator. |
246 | * |
247 | * @param __u The object to transfer ownership from. |
248 | * |
249 | * Invokes the deleter first if this object owns a pointer. |
250 | */ |
251 | unique_ptr& |
252 | operator=(unique_ptr&& __u) noexcept |
253 | { |
254 | reset(__u.release()); |
255 | get_deleter() = std::forward<deleter_type>(__u.get_deleter()); |
256 | return *this; |
257 | } |
258 | |
259 | /** @brief Assignment from another type. |
260 | * |
261 | * @param __u The object to transfer ownership from, which owns a |
262 | * convertible pointer to a non-array object. |
263 | * |
264 | * Invokes the deleter first if this object owns a pointer. |
265 | */ |
266 | template<typename _Up, typename _Ep> |
267 | typename enable_if< __and_< |
268 | __safe_conversion_up<_Up, _Ep>, |
269 | is_assignable<deleter_type&, _Ep&&> |
270 | >::value, |
271 | unique_ptr&>::type |
272 | operator=(unique_ptr<_Up, _Ep>&& __u) noexcept |
273 | { |
274 | reset(__u.release()); |
275 | get_deleter() = std::forward<_Ep>(__u.get_deleter()); |
276 | return *this; |
277 | } |
278 | |
279 | /// Reset the %unique_ptr to empty, invoking the deleter if necessary. |
280 | unique_ptr& |
281 | operator=(nullptr_t) noexcept |
282 | { |
283 | reset(); |
284 | return *this; |
285 | } |
286 | |
287 | // Observers. |
288 | |
289 | /// Dereference the stored pointer. |
290 | typename add_lvalue_reference<element_type>::type |
291 | operator*() const |
292 | { |
293 | __glibcxx_assert(get() != pointer()); |
294 | return *get(); |
295 | } |
296 | |
297 | /// Return the stored pointer. |
298 | pointer |
299 | operator->() const noexcept |
300 | { |
301 | _GLIBCXX_DEBUG_PEDASSERT(get() != pointer()); |
302 | return get(); |
303 | } |
304 | |
305 | /// Return the stored pointer. |
306 | pointer |
307 | get() const noexcept |
308 | { return std::get<0>(_M_t); } |
309 | |
310 | /// Return a reference to the stored deleter. |
311 | deleter_type& |
312 | get_deleter() noexcept |
313 | { return std::get<1>(_M_t); } |
314 | |
315 | /// Return a reference to the stored deleter. |
316 | const deleter_type& |
317 | get_deleter() const noexcept |
318 | { return std::get<1>(_M_t); } |
319 | |
320 | /// Return @c true if the stored pointer is not null. |
321 | explicit operator bool() const noexcept |
322 | { return get() == pointer() ? false : true; } |
323 | |
324 | // Modifiers. |
325 | |
326 | /// Release ownership of any stored pointer. |
327 | pointer |
328 | release() noexcept |
329 | { |
330 | pointer __p = get(); |
331 | std::get<0>(_M_t) = pointer(); |
332 | return __p; |
333 | } |
334 | |
335 | /** @brief Replace the stored pointer. |
336 | * |
337 | * @param __p The new pointer to store. |
338 | * |
339 | * The deleter will be invoked if a pointer is already owned. |
340 | */ |
341 | void |
342 | reset(pointer __p = pointer()) noexcept |
343 | { |
344 | using std::swap; |
345 | swap(std::get<0>(_M_t), __p); |
346 | if (__p != pointer()) |
347 | get_deleter()(__p); |
348 | } |
349 | |
350 | /// Exchange the pointer and deleter with another object. |
351 | void |
352 | swap(unique_ptr& __u) noexcept |
353 | { |
354 | using std::swap; |
355 | swap(_M_t, __u._M_t); |
356 | } |
357 | |
358 | // Disable copy from lvalue. |
359 | unique_ptr(const unique_ptr&) = delete; |
360 | unique_ptr& operator=(const unique_ptr&) = delete; |
361 | }; |
362 | |
363 | /// 20.7.1.3 unique_ptr for array objects with a runtime length |
364 | // [unique.ptr.runtime] |
365 | // _GLIBCXX_RESOLVE_LIB_DEFECTS |
366 | // DR 740 - omit specialization for array objects with a compile time length |
367 | template<typename _Tp, typename _Dp> |
368 | class unique_ptr<_Tp[], _Dp> |
369 | { |
370 | // use SFINAE to determine whether _Del::pointer exists |
371 | class _Pointer |
372 | { |
373 | template<typename _Up> |
374 | static typename _Up::pointer __test(typename _Up::pointer*); |
375 | |
376 | template<typename _Up> |
377 | static _Tp* __test(...); |
378 | |
379 | typedef typename remove_reference<_Dp>::type _Del; |
380 | |
381 | public: |
382 | typedef decltype(__test<_Del>(0)) type; |
383 | }; |
384 | |
385 | typedef std::tuple<typename _Pointer::type, _Dp> __tuple_type; |
386 | __tuple_type _M_t; |
387 | |
388 | template<typename _Up> |
389 | using __remove_cv = typename remove_cv<_Up>::type; |
390 | |
391 | // like is_base_of<_Tp, _Up> but false if unqualified types are the same |
392 | template<typename _Up> |
393 | using __is_derived_Tp |
394 | = __and_< is_base_of<_Tp, _Up>, |
395 | __not_<is_same<__remove_cv<_Tp>, __remove_cv<_Up>>> >; |
396 | |
397 | |
398 | public: |
399 | typedef typename _Pointer::type pointer; |
400 | typedef _Tp element_type; |
401 | typedef _Dp deleter_type; |
402 | |
403 | // helper template for detecting a safe conversion from another |
404 | // unique_ptr |
405 | template<typename _Up, typename _Ep, |
406 | typename _Up_up = unique_ptr<_Up, _Ep>, |
407 | typename _Up_element_type = typename _Up_up::element_type> |
408 | using __safe_conversion_up = __and_< |
409 | is_array<_Up>, |
410 | is_same<pointer, element_type*>, |
411 | is_same<typename _Up_up::pointer, _Up_element_type*>, |
412 | is_convertible<_Up_element_type(*)[], element_type(*)[]>, |
413 | __or_<__and_<is_reference<deleter_type>, is_same<deleter_type, _Ep>>, |
414 | __and_<__not_<is_reference<deleter_type>>, |
415 | is_convertible<_Ep, deleter_type>>> |
416 | >; |
417 | |
418 | // helper template for detecting a safe conversion from a raw pointer |
419 | template<typename _Up> |
420 | using __safe_conversion_raw = __and_< |
421 | __or_<__or_<is_same<_Up, pointer>, |
422 | is_same<_Up, nullptr_t>>, |
423 | __and_<is_pointer<_Up>, |
424 | is_same<pointer, element_type*>, |
425 | is_convertible< |
426 | typename remove_pointer<_Up>::type(*)[], |
427 | element_type(*)[]> |
428 | > |
429 | > |
430 | >; |
431 | |
432 | // Constructors. |
433 | |
434 | /// Default constructor, creates a unique_ptr that owns nothing. |
435 | constexpr unique_ptr() noexcept |
436 | : _M_t() |
437 | { static_assert(!std::is_pointer<deleter_type>::value, |
438 | "constructed with null function pointer deleter"); } |
439 | |
440 | /** Takes ownership of a pointer. |
441 | * |
442 | * @param __p A pointer to an array of a type safely convertible |
443 | * to an array of @c element_type |
444 | * |
445 | * The deleter will be value-initialized. |
446 | */ |
447 | template<typename _Up, |
448 | typename = typename enable_if< |
449 | __safe_conversion_raw<_Up>::value, bool>::type> |
450 | explicit |
451 | unique_ptr(_Up __p) noexcept |
452 | : _M_t(__p, deleter_type()) |
453 | { static_assert(!is_pointer<deleter_type>::value, |
454 | "constructed with null function pointer deleter"); } |
455 | |
456 | /** Takes ownership of a pointer. |
457 | * |
458 | * @param __p A pointer to an array of a type safely convertible |
459 | * to an array of @c element_type |
460 | * @param __d A reference to a deleter. |
461 | * |
462 | * The deleter will be initialized with @p __d |
463 | */ |
464 | template<typename _Up, |
465 | typename = typename enable_if< |
466 | __safe_conversion_raw<_Up>::value, bool>::type> |
467 | unique_ptr(_Up __p, |
468 | typename conditional<is_reference<deleter_type>::value, |
469 | deleter_type, const deleter_type&>::type __d) noexcept |
470 | : _M_t(__p, __d) { } |
471 | |
472 | /** Takes ownership of a pointer. |
473 | * |
474 | * @param __p A pointer to an array of a type safely convertible |
475 | * to an array of @c element_type |
476 | * @param __d A reference to a deleter. |
477 | * |
478 | * The deleter will be initialized with @p std::move(__d) |
479 | */ |
480 | template<typename _Up, |
481 | typename = typename enable_if< |
482 | __safe_conversion_raw<_Up>::value, bool>::type> |
483 | unique_ptr(_Up __p, typename |
484 | remove_reference<deleter_type>::type&& __d) noexcept |
485 | : _M_t(std::move(__p), std::move(__d)) |
486 | { static_assert(!is_reference<deleter_type>::value, |
487 | "rvalue deleter bound to reference"); } |
488 | |
489 | /// Move constructor. |
490 | unique_ptr(unique_ptr&& __u) noexcept |
491 | : _M_t(__u.release(), std::forward<deleter_type>(__u.get_deleter())) { } |
492 | |
493 | /// Creates a unique_ptr that owns nothing. |
494 | constexpr unique_ptr(nullptr_t) noexcept : unique_ptr() { } |
495 | |
496 | template<typename _Up, typename _Ep, |
497 | typename = _Require<__safe_conversion_up<_Up, _Ep>>> |
498 | unique_ptr(unique_ptr<_Up, _Ep>&& __u) noexcept |
499 | : _M_t(__u.release(), std::forward<_Ep>(__u.get_deleter())) |
500 | { } |
501 | |
502 | /// Destructor, invokes the deleter if the stored pointer is not null. |
503 | ~unique_ptr() |
504 | { |
505 | auto& __ptr = std::get<0>(_M_t); |
506 | if (__ptr != nullptr) |
507 | get_deleter()(__ptr); |
508 | __ptr = pointer(); |
509 | } |
510 | |
511 | // Assignment. |
512 | |
513 | /** @brief Move assignment operator. |
514 | * |
515 | * @param __u The object to transfer ownership from. |
516 | * |
517 | * Invokes the deleter first if this object owns a pointer. |
518 | */ |
519 | unique_ptr& |
520 | operator=(unique_ptr&& __u) noexcept |
521 | { |
522 | reset(__u.release()); |
523 | get_deleter() = std::forward<deleter_type>(__u.get_deleter()); |
524 | return *this; |
525 | } |
526 | |
527 | /** @brief Assignment from another type. |
528 | * |
529 | * @param __u The object to transfer ownership from, which owns a |
530 | * convertible pointer to an array object. |
531 | * |
532 | * Invokes the deleter first if this object owns a pointer. |
533 | */ |
534 | template<typename _Up, typename _Ep> |
535 | typename |
536 | enable_if<__and_<__safe_conversion_up<_Up, _Ep>, |
537 | is_assignable<deleter_type&, _Ep&&> |
538 | >::value, |
539 | unique_ptr&>::type |
540 | operator=(unique_ptr<_Up, _Ep>&& __u) noexcept |
541 | { |
542 | reset(__u.release()); |
543 | get_deleter() = std::forward<_Ep>(__u.get_deleter()); |
544 | return *this; |
545 | } |
546 | |
547 | /// Reset the %unique_ptr to empty, invoking the deleter if necessary. |
548 | unique_ptr& |
549 | operator=(nullptr_t) noexcept |
550 | { |
551 | reset(); |
552 | return *this; |
553 | } |
554 | |
555 | // Observers. |
556 | |
557 | /// Access an element of owned array. |
558 | typename std::add_lvalue_reference<element_type>::type |
559 | operator[](size_t __i) const |
560 | { |
561 | __glibcxx_assert(get() != pointer()); |
562 | return get()[__i]; |
563 | } |
564 | |
565 | /// Return the stored pointer. |
566 | pointer |
567 | get() const noexcept |
568 | { return std::get<0>(_M_t); } |
569 | |
570 | /// Return a reference to the stored deleter. |
571 | deleter_type& |
572 | get_deleter() noexcept |
573 | { return std::get<1>(_M_t); } |
574 | |
575 | /// Return a reference to the stored deleter. |
576 | const deleter_type& |
577 | get_deleter() const noexcept |
578 | { return std::get<1>(_M_t); } |
579 | |
580 | /// Return @c true if the stored pointer is not null. |
581 | explicit operator bool() const noexcept |
582 | { return get() == pointer() ? false : true; } |
583 | |
584 | // Modifiers. |
585 | |
586 | /// Release ownership of any stored pointer. |
587 | pointer |
588 | release() noexcept |
589 | { |
590 | pointer __p = get(); |
591 | std::get<0>(_M_t) = pointer(); |
592 | return __p; |
593 | } |
594 | |
595 | /** @brief Replace the stored pointer. |
596 | * |
597 | * @param __p The new pointer to store. |
598 | * |
599 | * The deleter will be invoked if a pointer is already owned. |
600 | */ |
601 | template <typename _Up, |
602 | typename = _Require< |
603 | __or_<is_same<_Up, pointer>, |
604 | __and_<is_same<pointer, element_type*>, |
605 | is_pointer<_Up>, |
606 | is_convertible< |
607 | typename remove_pointer<_Up>::type(*)[], |
608 | element_type(*)[] |
609 | > |
610 | > |
611 | > |
612 | >> |
613 | void |
614 | reset(_Up __p) noexcept |
615 | { |
616 | pointer __ptr = __p; |
617 | using std::swap; |
618 | swap(std::get<0>(_M_t), __ptr); |
619 | if (__ptr != nullptr) |
620 | get_deleter()(__ptr); |
621 | } |
622 | |
623 | void reset(nullptr_t = nullptr) noexcept |
624 | { |
625 | reset(pointer()); |
626 | } |
627 | |
628 | /// Exchange the pointer and deleter with another object. |
629 | void |
630 | swap(unique_ptr& __u) noexcept |
631 | { |
632 | using std::swap; |
633 | swap(_M_t, __u._M_t); |
634 | } |
635 | |
636 | // Disable copy from lvalue. |
637 | unique_ptr(const unique_ptr&) = delete; |
638 | unique_ptr& operator=(const unique_ptr&) = delete; |
639 | }; |
640 | |
641 | template<typename _Tp, typename _Dp> |
642 | inline void |
643 | swap(unique_ptr<_Tp, _Dp>& __x, |
644 | unique_ptr<_Tp, _Dp>& __y) noexcept |
645 | { __x.swap(__y); } |
646 | |
647 | template<typename _Tp, typename _Dp, |
648 | typename _Up, typename _Ep> |
649 | inline bool |
650 | operator==(const unique_ptr<_Tp, _Dp>& __x, |
651 | const unique_ptr<_Up, _Ep>& __y) |
652 | { return __x.get() == __y.get(); } |
653 | |
654 | template<typename _Tp, typename _Dp> |
655 | inline bool |
656 | operator==(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) noexcept |
657 | { return !__x; } |
658 | |
659 | template<typename _Tp, typename _Dp> |
660 | inline bool |
661 | operator==(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) noexcept |
662 | { return !__x; } |
663 | |
664 | template<typename _Tp, typename _Dp, |
665 | typename _Up, typename _Ep> |
666 | inline bool |
667 | operator!=(const unique_ptr<_Tp, _Dp>& __x, |
668 | const unique_ptr<_Up, _Ep>& __y) |
669 | { return __x.get() != __y.get(); } |
670 | |
671 | template<typename _Tp, typename _Dp> |
672 | inline bool |
673 | operator!=(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) noexcept |
674 | { return (bool)__x; } |
675 | |
676 | template<typename _Tp, typename _Dp> |
677 | inline bool |
678 | operator!=(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) noexcept |
679 | { return (bool)__x; } |
680 | |
681 | template<typename _Tp, typename _Dp, |
682 | typename _Up, typename _Ep> |
683 | inline bool |
684 | operator<(const unique_ptr<_Tp, _Dp>& __x, |
685 | const unique_ptr<_Up, _Ep>& __y) |
686 | { |
687 | typedef typename |
688 | std::common_type<typename unique_ptr<_Tp, _Dp>::pointer, |
689 | typename unique_ptr<_Up, _Ep>::pointer>::type _CT; |
690 | return std::less<_CT>()(__x.get(), __y.get()); |
691 | } |
692 | |
693 | template<typename _Tp, typename _Dp> |
694 | inline bool |
695 | operator<(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) |
696 | { return std::less<typename unique_ptr<_Tp, _Dp>::pointer>()(__x.get(), |
697 | nullptr); } |
698 | |
699 | template<typename _Tp, typename _Dp> |
700 | inline bool |
701 | operator<(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) |
702 | { return std::less<typename unique_ptr<_Tp, _Dp>::pointer>()(nullptr, |
703 | __x.get()); } |
704 | |
705 | template<typename _Tp, typename _Dp, |
706 | typename _Up, typename _Ep> |
707 | inline bool |
708 | operator<=(const unique_ptr<_Tp, _Dp>& __x, |
709 | const unique_ptr<_Up, _Ep>& __y) |
710 | { return !(__y < __x); } |
711 | |
712 | template<typename _Tp, typename _Dp> |
713 | inline bool |
714 | operator<=(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) |
715 | { return !(nullptr < __x); } |
716 | |
717 | template<typename _Tp, typename _Dp> |
718 | inline bool |
719 | operator<=(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) |
720 | { return !(__x < nullptr); } |
721 | |
722 | template<typename _Tp, typename _Dp, |
723 | typename _Up, typename _Ep> |
724 | inline bool |
725 | operator>(const unique_ptr<_Tp, _Dp>& __x, |
726 | const unique_ptr<_Up, _Ep>& __y) |
727 | { return (__y < __x); } |
728 | |
729 | template<typename _Tp, typename _Dp> |
730 | inline bool |
731 | operator>(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) |
732 | { return std::less<typename unique_ptr<_Tp, _Dp>::pointer>()(nullptr, |
733 | __x.get()); } |
734 | |
735 | template<typename _Tp, typename _Dp> |
736 | inline bool |
737 | operator>(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) |
738 | { return std::less<typename unique_ptr<_Tp, _Dp>::pointer>()(__x.get(), |
739 | nullptr); } |
740 | |
741 | template<typename _Tp, typename _Dp, |
742 | typename _Up, typename _Ep> |
743 | inline bool |
744 | operator>=(const unique_ptr<_Tp, _Dp>& __x, |
745 | const unique_ptr<_Up, _Ep>& __y) |
746 | { return !(__x < __y); } |
747 | |
748 | template<typename _Tp, typename _Dp> |
749 | inline bool |
750 | operator>=(const unique_ptr<_Tp, _Dp>& __x, nullptr_t) |
751 | { return !(__x < nullptr); } |
752 | |
753 | template<typename _Tp, typename _Dp> |
754 | inline bool |
755 | operator>=(nullptr_t, const unique_ptr<_Tp, _Dp>& __x) |
756 | { return !(nullptr < __x); } |
757 | |
758 | /// std::hash specialization for unique_ptr. |
759 | template<typename _Tp, typename _Dp> |
760 | struct hash<unique_ptr<_Tp, _Dp>> |
761 | : public __hash_base<size_t, unique_ptr<_Tp, _Dp>> |
762 | { |
763 | size_t |
764 | operator()(const unique_ptr<_Tp, _Dp>& __u) const noexcept |
765 | { |
766 | typedef unique_ptr<_Tp, _Dp> _UP; |
767 | return std::hash<typename _UP::pointer>()(__u.get()); |
768 | } |
769 | }; |
770 | |
771 | #if __cplusplus201402L > 201103L |
772 | |
773 | #define __cpp_lib_make_unique201304 201304 |
774 | |
775 | template<typename _Tp> |
776 | struct _MakeUniq |
777 | { typedef unique_ptr<_Tp> __single_object; }; |
778 | |
779 | template<typename _Tp> |
780 | struct _MakeUniq<_Tp[]> |
781 | { typedef unique_ptr<_Tp[]> __array; }; |
782 | |
783 | template<typename _Tp, size_t _Bound> |
784 | struct _MakeUniq<_Tp[_Bound]> |
785 | { struct __invalid_type { }; }; |
786 | |
787 | /// std::make_unique for single objects |
788 | template<typename _Tp, typename... _Args> |
789 | inline typename _MakeUniq<_Tp>::__single_object |
790 | make_unique(_Args&&... __args) |
791 | { return unique_ptr<_Tp>(new _Tp(std::forward<_Args>(__args)...)); } |
792 | |
793 | /// std::make_unique for arrays of unknown bound |
794 | template<typename _Tp> |
795 | inline typename _MakeUniq<_Tp>::__array |
796 | make_unique(size_t __num) |
797 | { return unique_ptr<_Tp>(new remove_extent_t<_Tp>[__num]()); } |
798 | |
799 | /// Disable std::make_unique for arrays of known bound |
800 | template<typename _Tp, typename... _Args> |
801 | inline typename _MakeUniq<_Tp>::__invalid_type |
802 | make_unique(_Args&&...) = delete; |
803 | #endif |
804 | |
805 | // @} group pointer_abstractions |
806 | |
807 | _GLIBCXX_END_NAMESPACE_VERSION |
808 | } // namespace |
809 | |
810 | #endif /* _UNIQUE_PTR_H */ |