clang  3.7.0
CGCleanup.cpp
Go to the documentation of this file.
1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file contains code dealing with the IR generation for cleanups
11 // and related information.
12 //
13 // A "cleanup" is a piece of code which needs to be executed whenever
14 // control transfers out of a particular scope. This can be
15 // conditionalized to occur only on exceptional control flow, only on
16 // normal control flow, or both.
17 //
18 //===----------------------------------------------------------------------===//
19 
20 #include "CGCleanup.h"
21 #include "CodeGenFunction.h"
22 
23 using namespace clang;
24 using namespace CodeGen;
25 
27  if (rv.isScalar())
29  if (rv.isAggregate())
31  return true;
32 }
33 
36  if (rv.isScalar()) {
37  llvm::Value *V = rv.getScalarVal();
38 
39  // These automatically dominate and don't need to be saved.
41  return saved_type(V, ScalarLiteral);
42 
43  // Everything else needs an alloca.
44  llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
45  CGF.Builder.CreateStore(V, addr);
46  return saved_type(addr, ScalarAddress);
47  }
48 
49  if (rv.isComplex()) {
51  llvm::Type *ComplexTy =
52  llvm::StructType::get(V.first->getType(), V.second->getType(),
53  (void*) nullptr);
54  llvm::Value *addr = CGF.CreateTempAlloca(ComplexTy, "saved-complex");
55  CGF.Builder.CreateStore(V.first,
56  CGF.Builder.CreateStructGEP(ComplexTy, addr, 0));
57  CGF.Builder.CreateStore(V.second,
58  CGF.Builder.CreateStructGEP(ComplexTy, addr, 1));
59  return saved_type(addr, ComplexAddress);
60  }
61 
62  assert(rv.isAggregate());
63  llvm::Value *V = rv.getAggregateAddr(); // TODO: volatile?
65  return saved_type(V, AggregateLiteral);
66 
67  llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
68  CGF.Builder.CreateStore(V, addr);
69  return saved_type(addr, AggregateAddress);
70 }
71 
72 /// Given a saved r-value produced by SaveRValue, perform the code
73 /// necessary to restore it to usability at the current insertion
74 /// point.
76  switch (K) {
77  case ScalarLiteral:
78  return RValue::get(Value);
79  case ScalarAddress:
80  return RValue::get(CGF.Builder.CreateLoad(Value));
81  case AggregateLiteral:
83  case AggregateAddress:
84  return RValue::getAggregate(CGF.Builder.CreateLoad(Value));
85  case ComplexAddress: {
86  llvm::Value *real =
87  CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(nullptr, Value, 0));
88  llvm::Value *imag =
89  CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(nullptr, Value, 1));
90  return RValue::getComplex(real, imag);
91  }
92  }
93 
94  llvm_unreachable("bad saved r-value kind");
95 }
96 
97 /// Push an entry of the given size onto this protected-scope stack.
98 char *EHScopeStack::allocate(size_t Size) {
99  if (!StartOfBuffer) {
100  unsigned Capacity = 1024;
101  while (Capacity < Size) Capacity *= 2;
102  StartOfBuffer = new char[Capacity];
103  StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
104  } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
105  unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
106  unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
107 
108  unsigned NewCapacity = CurrentCapacity;
109  do {
110  NewCapacity *= 2;
111  } while (NewCapacity < UsedCapacity + Size);
112 
113  char *NewStartOfBuffer = new char[NewCapacity];
114  char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
115  char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
116  memcpy(NewStartOfData, StartOfData, UsedCapacity);
117  delete [] StartOfBuffer;
118  StartOfBuffer = NewStartOfBuffer;
119  EndOfBuffer = NewEndOfBuffer;
120  StartOfData = NewStartOfData;
121  }
122 
123  assert(StartOfBuffer + Size <= StartOfData);
124  StartOfData -= Size;
125  return StartOfData;
126 }
127 
129  EHScopeStack::stable_iterator Old) const {
130  for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
131  EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
132  if (!cleanup || !cleanup->isLifetimeMarker())
133  return false;
134  }
135 
136  return true;
137 }
138 
142  si != se; ) {
143  EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
144  if (cleanup.isActive()) return si;
145  si = cleanup.getEnclosingNormalCleanup();
146  }
147  return stable_end();
148 }
149 
151  for (stable_iterator si = getInnermostEHScope(), se = stable_end();
152  si != se; ) {
153  // Skip over inactive cleanups.
154  EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*find(si));
155  if (cleanup && !cleanup->isActive()) {
156  si = cleanup->getEnclosingEHScope();
157  continue;
158  }
159 
160  // All other scopes are always active.
161  return si;
162  }
163 
164  return stable_end();
165 }
166 
167 
168 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
169  assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
170  char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
171  bool IsNormalCleanup = Kind & NormalCleanup;
172  bool IsEHCleanup = Kind & EHCleanup;
173  bool IsActive = !(Kind & InactiveCleanup);
175  new (Buffer) EHCleanupScope(IsNormalCleanup,
176  IsEHCleanup,
177  IsActive,
178  Size,
179  BranchFixups.size(),
180  InnermostNormalCleanup,
181  InnermostEHScope);
182  if (IsNormalCleanup)
183  InnermostNormalCleanup = stable_begin();
184  if (IsEHCleanup)
185  InnermostEHScope = stable_begin();
186 
187  return Scope->getCleanupBuffer();
188 }
189 
191  assert(!empty() && "popping exception stack when not empty");
192 
193  assert(isa<EHCleanupScope>(*begin()));
194  EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
195  InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
196  InnermostEHScope = Cleanup.getEnclosingEHScope();
197  StartOfData += Cleanup.getAllocatedSize();
198 
199  // Destroy the cleanup.
200  Cleanup.Destroy();
201 
202  // Check whether we can shrink the branch-fixups stack.
203  if (!BranchFixups.empty()) {
204  // If we no longer have any normal cleanups, all the fixups are
205  // complete.
206  if (!hasNormalCleanups())
207  BranchFixups.clear();
208 
209  // Otherwise we can still trim out unnecessary nulls.
210  else
211  popNullFixups();
212  }
213 }
214 
216  assert(getInnermostEHScope() == stable_end());
217  char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
218  EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
219  InnermostEHScope = stable_begin();
220  return filter;
221 }
222 
224  assert(!empty() && "popping exception stack when not empty");
225 
226  EHFilterScope &filter = cast<EHFilterScope>(*begin());
227  StartOfData += EHFilterScope::getSizeForNumFilters(filter.getNumFilters());
228 
229  InnermostEHScope = filter.getEnclosingEHScope();
230 }
231 
232 EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
233  char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
234  EHCatchScope *scope =
235  new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
236  InnermostEHScope = stable_begin();
237  return scope;
238 }
239 
241  char *Buffer = allocate(EHTerminateScope::getSize());
242  new (Buffer) EHTerminateScope(InnermostEHScope);
243  InnermostEHScope = stable_begin();
244 }
245 
246 /// Remove any 'null' fixups on the stack. However, we can't pop more
247 /// fixups than the fixup depth on the innermost normal cleanup, or
248 /// else fixups that we try to add to that cleanup will end up in the
249 /// wrong place. We *could* try to shrink fixup depths, but that's
250 /// actually a lot of work for little benefit.
252  // We expect this to only be called when there's still an innermost
253  // normal cleanup; otherwise there really shouldn't be any fixups.
254  assert(hasNormalCleanups());
255 
256  EHScopeStack::iterator it = find(InnermostNormalCleanup);
257  unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
258  assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
259 
260  while (BranchFixups.size() > MinSize &&
261  BranchFixups.back().Destination == nullptr)
262  BranchFixups.pop_back();
263 }
264 
266  // Create a variable to decide whether the cleanup needs to be run.
267  llvm::AllocaInst *active
268  = CreateTempAlloca(Builder.getInt1Ty(), "cleanup.cond");
269 
270  // Initialize it to false at a site that's guaranteed to be run
271  // before each evaluation.
272  setBeforeOutermostConditional(Builder.getFalse(), active);
273 
274  // Initialize it to true at the current location.
275  Builder.CreateStore(Builder.getTrue(), active);
276 
277  // Set that as the active flag in the cleanup.
278  EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
279  assert(!cleanup.getActiveFlag() && "cleanup already has active flag?");
280  cleanup.setActiveFlag(active);
281 
282  if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
283  if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
284 }
285 
286 void EHScopeStack::Cleanup::anchor() {}
287 
288 /// All the branch fixups on the EH stack have propagated out past the
289 /// outermost normal cleanup; resolve them all by adding cases to the
290 /// given switch instruction.
292  llvm::SwitchInst *Switch,
293  llvm::BasicBlock *CleanupEntry) {
294  llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
295 
296  for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
297  // Skip this fixup if its destination isn't set.
298  BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
299  if (Fixup.Destination == nullptr) continue;
300 
301  // If there isn't an OptimisticBranchBlock, then InitialBranch is
302  // still pointing directly to its destination; forward it to the
303  // appropriate cleanup entry. This is required in the specific
304  // case of
305  // { std::string s; goto lbl; }
306  // lbl:
307  // i.e. where there's an unresolved fixup inside a single cleanup
308  // entry which we're currently popping.
309  if (Fixup.OptimisticBranchBlock == nullptr) {
310  new llvm::StoreInst(CGF.Builder.getInt32(Fixup.DestinationIndex),
312  Fixup.InitialBranch);
313  Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
314  }
315 
316  // Don't add this case to the switch statement twice.
317  if (!CasesAdded.insert(Fixup.Destination).second)
318  continue;
319 
320  Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
321  Fixup.Destination);
322  }
323 
324  CGF.EHStack.clearFixups();
325 }
326 
327 /// Transitions the terminator of the given exit-block of a cleanup to
328 /// be a cleanup switch.
329 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
330  llvm::BasicBlock *Block) {
331  // If it's a branch, turn it into a switch whose default
332  // destination is its original target.
333  llvm::TerminatorInst *Term = Block->getTerminator();
334  assert(Term && "can't transition block without terminator");
335 
336  if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
337  assert(Br->isUnconditional());
338  llvm::LoadInst *Load =
339  new llvm::LoadInst(CGF.getNormalCleanupDestSlot(), "cleanup.dest", Term);
340  llvm::SwitchInst *Switch =
341  llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
342  Br->eraseFromParent();
343  return Switch;
344  } else {
345  return cast<llvm::SwitchInst>(Term);
346  }
347 }
348 
349 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
350  assert(Block && "resolving a null target block");
351  if (!EHStack.getNumBranchFixups()) return;
352 
353  assert(EHStack.hasNormalCleanups() &&
354  "branch fixups exist with no normal cleanups on stack");
355 
356  llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
357  bool ResolvedAny = false;
358 
359  for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
360  // Skip this fixup if its destination doesn't match.
361  BranchFixup &Fixup = EHStack.getBranchFixup(I);
362  if (Fixup.Destination != Block) continue;
363 
364  Fixup.Destination = nullptr;
365  ResolvedAny = true;
366 
367  // If it doesn't have an optimistic branch block, LatestBranch is
368  // already pointing to the right place.
369  llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
370  if (!BranchBB)
371  continue;
372 
373  // Don't process the same optimistic branch block twice.
374  if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
375  continue;
376 
377  llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
378 
379  // Add a case to the switch.
380  Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
381  }
382 
383  if (ResolvedAny)
384  EHStack.popNullFixups();
385 }
386 
387 /// Pops cleanup blocks until the given savepoint is reached.
389  assert(Old.isValid());
390 
391  while (EHStack.stable_begin() != Old) {
392  EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
393 
394  // As long as Old strictly encloses the scope's enclosing normal
395  // cleanup, we're going to emit another normal cleanup which
396  // fallthrough can propagate through.
397  bool FallThroughIsBranchThrough =
399 
400  PopCleanupBlock(FallThroughIsBranchThrough);
401  }
402 }
403 
404 /// Pops cleanup blocks until the given savepoint is reached, then add the
405 /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
406 void
408  size_t OldLifetimeExtendedSize) {
409  PopCleanupBlocks(Old);
410 
411  // Move our deferred cleanups onto the EH stack.
412  for (size_t I = OldLifetimeExtendedSize,
413  E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
414  // Alignment should be guaranteed by the vptrs in the individual cleanups.
415  assert((I % llvm::alignOf<LifetimeExtendedCleanupHeader>() == 0) &&
416  "misaligned cleanup stack entry");
417 
419  reinterpret_cast<LifetimeExtendedCleanupHeader&>(
420  LifetimeExtendedCleanupStack[I]);
421  I += sizeof(Header);
422 
423  EHStack.pushCopyOfCleanup(Header.getKind(),
424  &LifetimeExtendedCleanupStack[I],
425  Header.getSize());
426  I += Header.getSize();
427  }
428  LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
429 }
430 
431 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
432  EHCleanupScope &Scope) {
433  assert(Scope.isNormalCleanup());
434  llvm::BasicBlock *Entry = Scope.getNormalBlock();
435  if (!Entry) {
436  Entry = CGF.createBasicBlock("cleanup");
437  Scope.setNormalBlock(Entry);
438  }
439  return Entry;
440 }
441 
442 /// Attempts to reduce a cleanup's entry block to a fallthrough. This
443 /// is basically llvm::MergeBlockIntoPredecessor, except
444 /// simplified/optimized for the tighter constraints on cleanup blocks.
445 ///
446 /// Returns the new block, whatever it is.
447 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
448  llvm::BasicBlock *Entry) {
449  llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
450  if (!Pred) return Entry;
451 
452  llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
453  if (!Br || Br->isConditional()) return Entry;
454  assert(Br->getSuccessor(0) == Entry);
455 
456  // If we were previously inserting at the end of the cleanup entry
457  // block, we'll need to continue inserting at the end of the
458  // predecessor.
459  bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
460  assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
461 
462  // Kill the branch.
463  Br->eraseFromParent();
464 
465  // Replace all uses of the entry with the predecessor, in case there
466  // are phis in the cleanup.
467  Entry->replaceAllUsesWith(Pred);
468 
469  // Merge the blocks.
470  Pred->getInstList().splice(Pred->end(), Entry->getInstList());
471 
472  // Kill the entry block.
473  Entry->eraseFromParent();
474 
475  if (WasInsertBlock)
476  CGF.Builder.SetInsertPoint(Pred);
477 
478  return Pred;
479 }
480 
481 static void EmitCleanup(CodeGenFunction &CGF,
484  llvm::Value *ActiveFlag) {
485  // Itanium EH cleanups occur within a terminate scope. Microsoft SEH doesn't
486  // have this behavior, and the Microsoft C++ runtime will call terminate for
487  // us if the cleanup throws.
488  bool PushedTerminate = false;
489  if (flags.isForEHCleanup() && !CGF.getTarget().getCXXABI().isMicrosoft()) {
490  CGF.EHStack.pushTerminate();
491  PushedTerminate = true;
492  }
493 
494  // If there's an active flag, load it and skip the cleanup if it's
495  // false.
496  llvm::BasicBlock *ContBB = nullptr;
497  if (ActiveFlag) {
498  ContBB = CGF.createBasicBlock("cleanup.done");
499  llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
500  llvm::Value *IsActive
501  = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
502  CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
503  CGF.EmitBlock(CleanupBB);
504  }
505 
506  // Ask the cleanup to emit itself.
507  Fn->Emit(CGF, flags);
508  assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
509 
510  // Emit the continuation block if there was an active flag.
511  if (ActiveFlag)
512  CGF.EmitBlock(ContBB);
513 
514  // Leave the terminate scope.
515  if (PushedTerminate)
516  CGF.EHStack.popTerminate();
517 }
518 
519 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
520  llvm::BasicBlock *From,
521  llvm::BasicBlock *To) {
522  // Exit is the exit block of a cleanup, so it always terminates in
523  // an unconditional branch or a switch.
524  llvm::TerminatorInst *Term = Exit->getTerminator();
525 
526  if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
527  assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
528  Br->setSuccessor(0, To);
529  } else {
530  llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
531  for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
532  if (Switch->getSuccessor(I) == From)
533  Switch->setSuccessor(I, To);
534  }
535 }
536 
537 /// We don't need a normal entry block for the given cleanup.
538 /// Optimistic fixup branches can cause these blocks to come into
539 /// existence anyway; if so, destroy it.
540 ///
541 /// The validity of this transformation is very much specific to the
542 /// exact ways in which we form branches to cleanup entries.
544  EHCleanupScope &scope) {
545  llvm::BasicBlock *entry = scope.getNormalBlock();
546  if (!entry) return;
547 
548  // Replace all the uses with unreachable.
549  llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
550  for (llvm::BasicBlock::use_iterator
551  i = entry->use_begin(), e = entry->use_end(); i != e; ) {
552  llvm::Use &use = *i;
553  ++i;
554 
555  use.set(unreachableBB);
556 
557  // The only uses should be fixup switches.
558  llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
559  if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
560  // Replace the switch with a branch.
561  llvm::BranchInst::Create(si->case_begin().getCaseSuccessor(), si);
562 
563  // The switch operand is a load from the cleanup-dest alloca.
564  llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
565 
566  // Destroy the switch.
567  si->eraseFromParent();
568 
569  // Destroy the load.
570  assert(condition->getOperand(0) == CGF.NormalCleanupDest);
571  assert(condition->use_empty());
572  condition->eraseFromParent();
573  }
574  }
575 
576  assert(entry->use_empty());
577  delete entry;
578 }
579 
580 /// Pops a cleanup block. If the block includes a normal cleanup, the
581 /// current insertion point is threaded through the cleanup, as are
582 /// any branch fixups on the cleanup.
583 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
584  assert(!EHStack.empty() && "cleanup stack is empty!");
585  assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
586  EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
587  assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
588 
589  // Remember activation information.
590  bool IsActive = Scope.isActive();
591  llvm::Value *NormalActiveFlag =
592  Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() : nullptr;
593  llvm::Value *EHActiveFlag =
594  Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() : nullptr;
595 
596  // Check whether we need an EH cleanup. This is only true if we've
597  // generated a lazy EH cleanup block.
598  llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
599  assert(Scope.hasEHBranches() == (EHEntry != nullptr));
600  bool RequiresEHCleanup = (EHEntry != nullptr);
602 
603  // Check the three conditions which might require a normal cleanup:
604 
605  // - whether there are branch fix-ups through this cleanup
606  unsigned FixupDepth = Scope.getFixupDepth();
607  bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
608 
609  // - whether there are branch-throughs or branch-afters
610  bool HasExistingBranches = Scope.hasBranches();
611 
612  // - whether there's a fallthrough
613  llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
614  bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
615 
616  // Branch-through fall-throughs leave the insertion point set to the
617  // end of the last cleanup, which points to the current scope. The
618  // rest of IR gen doesn't need to worry about this; it only happens
619  // during the execution of PopCleanupBlocks().
620  bool HasPrebranchedFallthrough =
621  (FallthroughSource && FallthroughSource->getTerminator());
622 
623  // If this is a normal cleanup, then having a prebranched
624  // fallthrough implies that the fallthrough source unconditionally
625  // jumps here.
626  assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
627  (Scope.getNormalBlock() &&
628  FallthroughSource->getTerminator()->getSuccessor(0)
629  == Scope.getNormalBlock()));
630 
631  bool RequiresNormalCleanup = false;
632  if (Scope.isNormalCleanup() &&
633  (HasFixups || HasExistingBranches || HasFallthrough)) {
634  RequiresNormalCleanup = true;
635  }
636 
637  // If we have a prebranched fallthrough into an inactive normal
638  // cleanup, rewrite it so that it leads to the appropriate place.
639  if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
640  llvm::BasicBlock *prebranchDest;
641 
642  // If the prebranch is semantically branching through the next
643  // cleanup, just forward it to the next block, leaving the
644  // insertion point in the prebranched block.
645  if (FallthroughIsBranchThrough) {
646  EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
647  prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
648 
649  // Otherwise, we need to make a new block. If the normal cleanup
650  // isn't being used at all, we could actually reuse the normal
651  // entry block, but this is simpler, and it avoids conflicts with
652  // dead optimistic fixup branches.
653  } else {
654  prebranchDest = createBasicBlock("forwarded-prebranch");
655  EmitBlock(prebranchDest);
656  }
657 
658  llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
659  assert(normalEntry && !normalEntry->use_empty());
660 
661  ForwardPrebranchedFallthrough(FallthroughSource,
662  normalEntry, prebranchDest);
663  }
664 
665  // If we don't need the cleanup at all, we're done.
666  if (!RequiresNormalCleanup && !RequiresEHCleanup) {
667  destroyOptimisticNormalEntry(*this, Scope);
668  EHStack.popCleanup(); // safe because there are no fixups
669  assert(EHStack.getNumBranchFixups() == 0 ||
670  EHStack.hasNormalCleanups());
671  return;
672  }
673 
674  // Copy the cleanup emission data out. Note that SmallVector
675  // guarantees maximal alignment for its buffer regardless of its
676  // type parameter.
678  CleanupBuffer.reserve(Scope.getCleanupSize());
679  memcpy(CleanupBuffer.data(),
680  Scope.getCleanupBuffer(), Scope.getCleanupSize());
681  CleanupBuffer.set_size(Scope.getCleanupSize());
683  reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
684 
685  EHScopeStack::Cleanup::Flags cleanupFlags;
686  if (Scope.isNormalCleanup())
687  cleanupFlags.setIsNormalCleanupKind();
688  if (Scope.isEHCleanup())
689  cleanupFlags.setIsEHCleanupKind();
690 
691  if (!RequiresNormalCleanup) {
692  destroyOptimisticNormalEntry(*this, Scope);
693  EHStack.popCleanup();
694  } else {
695  // If we have a fallthrough and no other need for the cleanup,
696  // emit it directly.
697  if (HasFallthrough && !HasPrebranchedFallthrough &&
698  !HasFixups && !HasExistingBranches) {
699 
700  destroyOptimisticNormalEntry(*this, Scope);
701  EHStack.popCleanup();
702 
703  EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
704 
705  // Otherwise, the best approach is to thread everything through
706  // the cleanup block and then try to clean up after ourselves.
707  } else {
708  // Force the entry block to exist.
709  llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
710 
711  // I. Set up the fallthrough edge in.
712 
713  CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
714 
715  // If there's a fallthrough, we need to store the cleanup
716  // destination index. For fall-throughs this is always zero.
717  if (HasFallthrough) {
718  if (!HasPrebranchedFallthrough)
719  Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
720 
721  // Otherwise, save and clear the IP if we don't have fallthrough
722  // because the cleanup is inactive.
723  } else if (FallthroughSource) {
724  assert(!IsActive && "source without fallthrough for active cleanup");
725  savedInactiveFallthroughIP = Builder.saveAndClearIP();
726  }
727 
728  // II. Emit the entry block. This implicitly branches to it if
729  // we have fallthrough. All the fixups and existing branches
730  // should already be branched to it.
731  EmitBlock(NormalEntry);
732 
733  // III. Figure out where we're going and build the cleanup
734  // epilogue.
735 
736  bool HasEnclosingCleanups =
737  (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
738 
739  // Compute the branch-through dest if we need it:
740  // - if there are branch-throughs threaded through the scope
741  // - if fall-through is a branch-through
742  // - if there are fixups that will be optimistically forwarded
743  // to the enclosing cleanup
744  llvm::BasicBlock *BranchThroughDest = nullptr;
745  if (Scope.hasBranchThroughs() ||
746  (FallthroughSource && FallthroughIsBranchThrough) ||
747  (HasFixups && HasEnclosingCleanups)) {
748  assert(HasEnclosingCleanups);
749  EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
750  BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
751  }
752 
753  llvm::BasicBlock *FallthroughDest = nullptr;
755 
756  // If there's exactly one branch-after and no other threads,
757  // we can route it without a switch.
758  if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
759  Scope.getNumBranchAfters() == 1) {
760  assert(!BranchThroughDest || !IsActive);
761 
762  // Clean up the possibly dead store to the cleanup dest slot.
763  llvm::Instruction *NormalCleanupDestSlot =
764  cast<llvm::Instruction>(getNormalCleanupDestSlot());
765  if (NormalCleanupDestSlot->hasOneUse()) {
766  NormalCleanupDestSlot->user_back()->eraseFromParent();
767  NormalCleanupDestSlot->eraseFromParent();
768  NormalCleanupDest = nullptr;
769  }
770 
771  llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
772  InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
773 
774  // Build a switch-out if we need it:
775  // - if there are branch-afters threaded through the scope
776  // - if fall-through is a branch-after
777  // - if there are fixups that have nowhere left to go and
778  // so must be immediately resolved
779  } else if (Scope.getNumBranchAfters() ||
780  (HasFallthrough && !FallthroughIsBranchThrough) ||
781  (HasFixups && !HasEnclosingCleanups)) {
782 
783  llvm::BasicBlock *Default =
784  (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
785 
786  // TODO: base this on the number of branch-afters and fixups
787  const unsigned SwitchCapacity = 10;
788 
789  llvm::LoadInst *Load =
790  new llvm::LoadInst(getNormalCleanupDestSlot(), "cleanup.dest");
791  llvm::SwitchInst *Switch =
792  llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
793 
794  InstsToAppend.push_back(Load);
795  InstsToAppend.push_back(Switch);
796 
797  // Branch-after fallthrough.
798  if (FallthroughSource && !FallthroughIsBranchThrough) {
799  FallthroughDest = createBasicBlock("cleanup.cont");
800  if (HasFallthrough)
801  Switch->addCase(Builder.getInt32(0), FallthroughDest);
802  }
803 
804  for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
805  Switch->addCase(Scope.getBranchAfterIndex(I),
806  Scope.getBranchAfterBlock(I));
807  }
808 
809  // If there aren't any enclosing cleanups, we can resolve all
810  // the fixups now.
811  if (HasFixups && !HasEnclosingCleanups)
812  ResolveAllBranchFixups(*this, Switch, NormalEntry);
813  } else {
814  // We should always have a branch-through destination in this case.
815  assert(BranchThroughDest);
816  InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
817  }
818 
819  // IV. Pop the cleanup and emit it.
820  EHStack.popCleanup();
821  assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
822 
823  EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
824 
825  // Append the prepared cleanup prologue from above.
826  llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
827  for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
828  NormalExit->getInstList().push_back(InstsToAppend[I]);
829 
830  // Optimistically hope that any fixups will continue falling through.
831  for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
832  I < E; ++I) {
833  BranchFixup &Fixup = EHStack.getBranchFixup(I);
834  if (!Fixup.Destination) continue;
835  if (!Fixup.OptimisticBranchBlock) {
836  new llvm::StoreInst(Builder.getInt32(Fixup.DestinationIndex),
837  getNormalCleanupDestSlot(),
838  Fixup.InitialBranch);
839  Fixup.InitialBranch->setSuccessor(0, NormalEntry);
840  }
841  Fixup.OptimisticBranchBlock = NormalExit;
842  }
843 
844  // V. Set up the fallthrough edge out.
845 
846  // Case 1: a fallthrough source exists but doesn't branch to the
847  // cleanup because the cleanup is inactive.
848  if (!HasFallthrough && FallthroughSource) {
849  // Prebranched fallthrough was forwarded earlier.
850  // Non-prebranched fallthrough doesn't need to be forwarded.
851  // Either way, all we need to do is restore the IP we cleared before.
852  assert(!IsActive);
853  Builder.restoreIP(savedInactiveFallthroughIP);
854 
855  // Case 2: a fallthrough source exists and should branch to the
856  // cleanup, but we're not supposed to branch through to the next
857  // cleanup.
858  } else if (HasFallthrough && FallthroughDest) {
859  assert(!FallthroughIsBranchThrough);
860  EmitBlock(FallthroughDest);
861 
862  // Case 3: a fallthrough source exists and should branch to the
863  // cleanup and then through to the next.
864  } else if (HasFallthrough) {
865  // Everything is already set up for this.
866 
867  // Case 4: no fallthrough source exists.
868  } else {
869  Builder.ClearInsertionPoint();
870  }
871 
872  // VI. Assorted cleaning.
873 
874  // Check whether we can merge NormalEntry into a single predecessor.
875  // This might invalidate (non-IR) pointers to NormalEntry.
876  llvm::BasicBlock *NewNormalEntry =
877  SimplifyCleanupEntry(*this, NormalEntry);
878 
879  // If it did invalidate those pointers, and NormalEntry was the same
880  // as NormalExit, go back and patch up the fixups.
881  if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
882  for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
883  I < E; ++I)
884  EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
885  }
886  }
887 
888  assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
889 
890  // Emit the EH cleanup if required.
891  if (RequiresEHCleanup) {
892  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
893 
894  EmitBlock(EHEntry);
895 
896  // We only actually emit the cleanup code if the cleanup is either
897  // active or was used before it was deactivated.
898  if (EHActiveFlag || IsActive) {
899 
900  cleanupFlags.setIsForEHCleanup();
901  EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
902  }
903 
904  Builder.CreateBr(getEHDispatchBlock(EHParent));
905 
906  Builder.restoreIP(SavedIP);
907 
908  SimplifyCleanupEntry(*this, EHEntry);
909  }
910 }
911 
912 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
913 /// specified destination obviously has no cleanups to run. 'false' is always
914 /// a conservatively correct answer for this method.
916  assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
917  && "stale jump destination");
918 
919  // Calculate the innermost active normal cleanup.
920  EHScopeStack::stable_iterator TopCleanup =
921  EHStack.getInnermostActiveNormalCleanup();
922 
923  // If we're not in an active normal cleanup scope, or if the
924  // destination scope is within the innermost active normal cleanup
925  // scope, we don't need to worry about fixups.
926  if (TopCleanup == EHStack.stable_end() ||
927  TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
928  return true;
929 
930  // Otherwise, we might need some cleanups.
931  return false;
932 }
933 
934 
935 /// Terminate the current block by emitting a branch which might leave
936 /// the current cleanup-protected scope. The target scope may not yet
937 /// be known, in which case this will require a fixup.
938 ///
939 /// As a side-effect, this method clears the insertion point.
941  assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
942  && "stale jump destination");
943 
944  if (!HaveInsertPoint())
945  return;
946 
947  // Create the branch.
948  llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
949 
950  // Calculate the innermost active normal cleanup.
952  TopCleanup = EHStack.getInnermostActiveNormalCleanup();
953 
954  // If we're not in an active normal cleanup scope, or if the
955  // destination scope is within the innermost active normal cleanup
956  // scope, we don't need to worry about fixups.
957  if (TopCleanup == EHStack.stable_end() ||
958  TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
959  Builder.ClearInsertionPoint();
960  return;
961  }
962 
963  // If we can't resolve the destination cleanup scope, just add this
964  // to the current cleanup scope as a branch fixup.
965  if (!Dest.getScopeDepth().isValid()) {
966  BranchFixup &Fixup = EHStack.addBranchFixup();
967  Fixup.Destination = Dest.getBlock();
968  Fixup.DestinationIndex = Dest.getDestIndex();
969  Fixup.InitialBranch = BI;
970  Fixup.OptimisticBranchBlock = nullptr;
971 
972  Builder.ClearInsertionPoint();
973  return;
974  }
975 
976  // Otherwise, thread through all the normal cleanups in scope.
977 
978  // Store the index at the start.
979  llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
980  new llvm::StoreInst(Index, getNormalCleanupDestSlot(), BI);
981 
982  // Adjust BI to point to the first cleanup block.
983  {
984  EHCleanupScope &Scope =
985  cast<EHCleanupScope>(*EHStack.find(TopCleanup));
986  BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
987  }
988 
989  // Add this destination to all the scopes involved.
990  EHScopeStack::stable_iterator I = TopCleanup;
992  if (E.strictlyEncloses(I)) {
993  while (true) {
994  EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
995  assert(Scope.isNormalCleanup());
996  I = Scope.getEnclosingNormalCleanup();
997 
998  // If this is the last cleanup we're propagating through, tell it
999  // that there's a resolved jump moving through it.
1000  if (!E.strictlyEncloses(I)) {
1001  Scope.addBranchAfter(Index, Dest.getBlock());
1002  break;
1003  }
1004 
1005  // Otherwise, tell the scope that there's a jump propoagating
1006  // through it. If this isn't new information, all the rest of
1007  // the work has been done before.
1008  if (!Scope.addBranchThrough(Dest.getBlock()))
1009  break;
1010  }
1011  }
1012 
1013  Builder.ClearInsertionPoint();
1014 }
1015 
1018  // If we needed a normal block for any reason, that counts.
1019  if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1020  return true;
1021 
1022  // Check whether any enclosed cleanups were needed.
1024  I = EHStack.getInnermostNormalCleanup();
1025  I != C; ) {
1026  assert(C.strictlyEncloses(I));
1027  EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1028  if (S.getNormalBlock()) return true;
1029  I = S.getEnclosingNormalCleanup();
1030  }
1031 
1032  return false;
1033 }
1034 
1035 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1037  // If we needed an EH block for any reason, that counts.
1038  if (EHStack.find(cleanup)->hasEHBranches())
1039  return true;
1040 
1041  // Check whether any enclosed cleanups were needed.
1043  i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1044  assert(cleanup.strictlyEncloses(i));
1045 
1046  EHScope &scope = *EHStack.find(i);
1047  if (scope.hasEHBranches())
1048  return true;
1049 
1050  i = scope.getEnclosingEHScope();
1051  }
1052 
1053  return false;
1054 }
1055 
1059 };
1060 
1061 /// The given cleanup block is changing activation state. Configure a
1062 /// cleanup variable if necessary.
1063 ///
1064 /// It would be good if we had some way of determining if there were
1065 /// extra uses *after* the change-over point.
1069  llvm::Instruction *dominatingIP) {
1070  EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1071 
1072  // We always need the flag if we're activating the cleanup in a
1073  // conditional context, because we have to assume that the current
1074  // location doesn't necessarily dominate the cleanup's code.
1075  bool isActivatedInConditional =
1076  (kind == ForActivation && CGF.isInConditionalBranch());
1077 
1078  bool needFlag = false;
1079 
1080  // Calculate whether the cleanup was used:
1081 
1082  // - as a normal cleanup
1083  if (Scope.isNormalCleanup() &&
1084  (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
1086  needFlag = true;
1087  }
1088 
1089  // - as an EH cleanup
1090  if (Scope.isEHCleanup() &&
1091  (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1092  Scope.setTestFlagInEHCleanup();
1093  needFlag = true;
1094  }
1095 
1096  // If it hasn't yet been used as either, we're done.
1097  if (!needFlag) return;
1098 
1099  llvm::AllocaInst *var = Scope.getActiveFlag();
1100  if (!var) {
1101  var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive");
1102  Scope.setActiveFlag(var);
1103 
1104  assert(dominatingIP && "no existing variable and no dominating IP!");
1105 
1106  // Initialize to true or false depending on whether it was
1107  // active up to this point.
1108  llvm::Value *value = CGF.Builder.getInt1(kind == ForDeactivation);
1109 
1110  // If we're in a conditional block, ignore the dominating IP and
1111  // use the outermost conditional branch.
1112  if (CGF.isInConditionalBranch()) {
1113  CGF.setBeforeOutermostConditional(value, var);
1114  } else {
1115  new llvm::StoreInst(value, var, dominatingIP);
1116  }
1117  }
1118 
1119  CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1120 }
1121 
1122 /// Activate a cleanup that was created in an inactivated state.
1124  llvm::Instruction *dominatingIP) {
1125  assert(C != EHStack.stable_end() && "activating bottom of stack?");
1126  EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1127  assert(!Scope.isActive() && "double activation");
1128 
1129  SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1130 
1131  Scope.setActive(true);
1132 }
1133 
1134 /// Deactive a cleanup that was created in an active state.
1136  llvm::Instruction *dominatingIP) {
1137  assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1138  EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1139  assert(Scope.isActive() && "double deactivation");
1140 
1141  // If it's the top of the stack, just pop it.
1142  if (C == EHStack.stable_begin()) {
1143  // If it's a normal cleanup, we need to pretend that the
1144  // fallthrough is unreachable.
1145  CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1146  PopCleanupBlock();
1147  Builder.restoreIP(SavedIP);
1148  return;
1149  }
1150 
1151  // Otherwise, follow the general case.
1152  SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1153 
1154  Scope.setActive(false);
1155 }
1156 
1158  if (!NormalCleanupDest)
1159  NormalCleanupDest =
1160  CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1161  return NormalCleanupDest;
1162 }
1163 
1164 /// Emits all the code to cause the given temporary to be cleaned up.
1166  QualType TempType,
1167  llvm::Value *Ptr) {
1168  pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1169  /*useEHCleanup*/ true);
1170 }
void pushTerminate()
Push a terminate handler on the stack.
Definition: CGCleanup.cpp:240
static llvm::BasicBlock * CreateNormalEntry(CodeGenFunction &CGF, EHCleanupScope &Scope)
Definition: CGCleanup.cpp:431
void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)
Activate a cleanup that was created in an inactivated state.
Definition: CGCleanup.cpp:1123
static void destroyOptimisticNormalEntry(CodeGenFunction &CGF, EHCleanupScope &scope)
Definition: CGCleanup.cpp:543
const TargetInfo & getTarget() const
std::pair< llvm::Value *, llvm::Value * > getComplexVal() const
Definition: CGValue.h:61
static stable_iterator stable_end()
Create a stable reference to the bottom of the EH stack.
Definition: EHScopeStack.h:369
llvm::AllocaInst * CreateTempAlloca(llvm::Type *Ty, const Twine &Name="tmp")
Definition: CGExpr.cpp:57
static llvm::SwitchInst * TransitionToCleanupSwitch(CodeGenFunction &CGF, llvm::BasicBlock *Block)
Definition: CGCleanup.cpp:329
static bool needsSaving(llvm::Value *value)
Answer whether the given value needs extra work to be saved.
stable_iterator getInnermostActiveEHScope() const
Definition: CGCleanup.cpp:150
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const
Definition: CGCleanup.h:323
A protected scope for zero-cost EH handling.
Definition: CGCleanup.h:32
llvm::BasicBlock * getCachedEHDispatchBlock() const
Definition: CGCleanup.h:117
llvm::Value * getNormalCleanupDestSlot()
Definition: CGCleanup.cpp:1157
bool addBranchThrough(llvm::BasicBlock *Block)
Definition: CGCleanup.h:385
static size_t getSizeForCleanupSize(size_t Size)
Definition: CGCleanup.h:261
stable_iterator stabilize(iterator it) const
Translates an iterator into a stable_iterator.
Definition: CGCleanup.h:549
ForActivation_t
Definition: CGCleanup.cpp:1056
bool hasEHBranches() const
Definition: CGCleanup.h:125
llvm::AllocaInst * getActiveFlag() const
Definition: CGCleanup.h:305
class EHCatchScope * pushCatch(unsigned NumHandlers)
Definition: CGCleanup.cpp:232
bool isLifetimeMarker() const
Definition: CGCleanup.h:302
iterator begin() const
Returns an iterator pointing to the innermost EH scope.
Definition: CGCleanup.h:518
BranchFixup & getBranchFixup(unsigned I)
Definition: EHScopeStack.h:391
void setNormalBlock(llvm::BasicBlock *BB)
Definition: CGCleanup.h:295
llvm::Value * getAggregateAddr() const
getAggregateAddr() - Return the Value* of the address of the aggregate.
Definition: CGValue.h:66
static OMPLinearClause * Create(const ASTContext &C, SourceLocation StartLoc, SourceLocation LParenLoc, SourceLocation ColonLoc, SourceLocation EndLoc, ArrayRef< Expr * > VL, ArrayRef< Expr * > IL, Expr *Step, Expr *CalcStep)
Creates clause with a list of variables VL and a linear step Step.
bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const
Definition: CGCleanup.cpp:915
llvm::BasicBlock * createBasicBlock(const Twine &name="", llvm::Function *parent=nullptr, llvm::BasicBlock *before=nullptr)
createBasicBlock - Create an LLVM basic block.
size_t getCleanupSize() const
Definition: CGCleanup.h:327
void popFilter()
Pops an exceptions filter off the stack.
Definition: CGCleanup.cpp:223
static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator cleanup)
Definition: CGCleanup.cpp:1035
static size_t getSizeForNumHandlers(unsigned N)
Definition: CGCleanup.h:171
iterator find(stable_iterator save) const
Definition: CGCleanup.h:542
std::pair< llvm::Value *, llvm::Value * > ComplexPairTy
unsigned getNumBranchFixups() const
Definition: EHScopeStack.h:390
static void ResolveAllBranchFixups(CodeGenFunction &CGF, llvm::SwitchInst *Switch, llvm::BasicBlock *CleanupEntry)
Definition: CGCleanup.cpp:291
llvm::BranchInst * InitialBranch
The initial branch of the fixup.
Definition: EHScopeStack.h:53
bool empty() const
Determines whether the exception-scopes stack is empty.
Definition: EHScopeStack.h:327
bool isMicrosoft() const
Is this ABI an MSVC-compatible ABI?
Definition: TargetCXXABI.h:133
bool isAggregate() const
Definition: CGValue.h:49
llvm::AllocaInst * NormalCleanupDest
i32s containing the indexes of the cleanup destinations.
llvm::BasicBlock * getBranchAfterBlock(unsigned I) const
Definition: CGCleanup.h:360
llvm::BasicBlock * getBlock() const
EHScopeStack::stable_iterator getScopeDepth() const
EHScopeStack::stable_iterator getEnclosingEHScope() const
Definition: CGCleanup.h:131
bool hasBranchThroughs() const
Determines if this cleanup scope has any branch throughs.
Definition: CGCleanup.h:390
stable_iterator stable_begin() const
Definition: EHScopeStack.h:364
size_t getAllocatedSize() const
Definition: CGCleanup.h:265
bool containsOnlyLifetimeMarkers(stable_iterator Old) const
Definition: CGCleanup.cpp:128
void ResolveBranchFixups(llvm::BasicBlock *Target)
Definition: CGCleanup.cpp:349
bool shouldTestFlagInNormalCleanup() const
Definition: CGCleanup.h:311
void setBeforeOutermostConditional(llvm::Value *value, llvm::Value *addr)
llvm::BasicBlock * OptimisticBranchBlock
Definition: EHScopeStack.h:41
void popCleanup()
Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.
Definition: CGCleanup.cpp:190
llvm::BasicBlock * Destination
Definition: EHScopeStack.h:47
Kind
Represents a C++ temporary.
Definition: ExprCXX.h:1001
llvm::BasicBlock * getUnreachableBlock()
void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)
Deactive a cleanup that was created in an active state.
Definition: CGCleanup.cpp:1135
llvm::BasicBlock * getNormalBlock() const
Definition: CGCleanup.h:294
static RValue getAggregate(llvm::Value *V, bool Volatile=false)
Definition: CGValue.h:92
unsigned DestinationIndex
The destination index value.
Definition: EHScopeStack.h:50
virtual void Emit(CodeGenFunction &CGF, Flags flags)=0
static size_t getSizeForNumFilters(unsigned numFilters)
Definition: CGCleanup.h:423
static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator C)
Definition: CGCleanup.cpp:1016
static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, llvm::BasicBlock *From, llvm::BasicBlock *To)
Definition: CGCleanup.cpp:519
class EHFilterScope * pushFilter(unsigned NumFilters)
Push an exceptions filter on the stack.
Definition: CGCleanup.cpp:215
bool encloses(stable_iterator I) const
Definition: EHScopeStack.h:118
bool isScalar() const
Definition: CGValue.h:47
static RValue getComplex(llvm::Value *V1, llvm::Value *V2)
Definition: CGValue.h:78
llvm::Value * getScalarVal() const
getScalarVal() - Return the Value* of this scalar value.
Definition: CGValue.h:54
bool hasBranches() const
True if this cleanup scope has any branch-afters or branch-throughs.
Definition: CGCleanup.h:335
void addBranchAfter(llvm::ConstantInt *Index, llvm::BasicBlock *Block)
Definition: CGCleanup.h:348
static llvm::BasicBlock * SimplifyCleanupEntry(CodeGenFunction &CGF, llvm::BasicBlock *Entry)
Definition: CGCleanup.cpp:447
static void EmitCleanup(CodeGenFunction &CGF, EHScopeStack::Cleanup *Fn, EHScopeStack::Cleanup::Flags flags, llvm::Value *ActiveFlag)
Definition: CGCleanup.cpp:481
Header for data within LifetimeExtendedCleanupStack.
unsigned getFixupDepth() const
Definition: CGCleanup.h:322
static void SetupCleanupBlockActivation(CodeGenFunction &CGF, EHScopeStack::stable_iterator C, ForActivation_t kind, llvm::Instruction *dominatingIP)
Definition: CGCleanup.cpp:1066
void setActiveFlag(llvm::AllocaInst *Var)
Definition: CGCleanup.h:306
bool isComplex() const
Definition: CGValue.h:48
void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false)
Definition: CGStmt.cpp:348
void EmitCXXTemporary(const CXXTemporary *Temporary, QualType TempType, llvm::Value *Ptr)
Emits all the code to cause the given temporary to be cleaned up.
Definition: CGCleanup.cpp:1165
BoundNodesTreeBuilder *const Builder
TargetCXXABI getCXXABI() const
Get the C++ ABI currently in use.
stable_iterator getInnermostActiveNormalCleanup() const
Definition: CGCleanup.cpp:140
unsigned getNumBranchAfters() const
Return the number of unique branch-afters on this scope.
Definition: CGCleanup.h:356
void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize)
Takes the old cleanup stack size and emits the cleanup blocks that have been added.
Definition: CGCleanup.cpp:388
void popTerminate()
Pops a terminate handler off the stack.
Definition: CGCleanup.h:534
unsigned kind
All of the diagnostics that can be emitted by the frontend.
Definition: DiagnosticIDs.h:43
A cleanup scope which generates the cleanup blocks lazily.
Definition: CGCleanup.h:219
bool hasNormalCleanups() const
Determines whether there are any normal cleanups on the stack.
Definition: EHScopeStack.h:334
llvm::ConstantInt * getBranchAfterIndex(unsigned I) const
Definition: CGCleanup.h:365
unsigned getNumFilters() const
Definition: CGCleanup.h:427
stable_iterator getInnermostEHScope() const
Definition: EHScopeStack.h:345
bool strictlyEncloses(stable_iterator I) const
Definition: EHScopeStack.h:124
stable_iterator getInnermostNormalCleanup() const
Definition: EHScopeStack.h:340
static RValue get(llvm::Value *V)
Definition: CGValue.h:71
void EmitBranchThroughCleanup(JumpDest Dest)
Definition: CGCleanup.cpp:940
bool isForEHCleanup() const
isForEH - true if the current emission is for an EH cleanup.
Definition: EHScopeStack.h:158
A non-stable pointer into the scope stack.
Definition: CGCleanup.h:458
void PopCleanupBlock(bool FallThroughIsBranchThrough=false)
Definition: CGCleanup.cpp:583
bool shouldTestFlagInEHCleanup() const
Definition: CGCleanup.h:318