24 #include "llvm/ADT/StringExtras.h"
25 #include "llvm/IR/CallSite.h"
26 #include "llvm/IR/DataLayout.h"
27 #include "llvm/IR/InlineAsm.h"
28 #include "llvm/IR/Intrinsics.h"
31 using namespace clang;
32 using namespace CodeGen;
48 if (FD->
hasAttr<AsmLabelAttr>())
49 Name = getMangledName(D);
53 llvm::FunctionType *Ty =
54 cast<llvm::FunctionType>(getTypes().ConvertType(FD->
getType()));
56 return GetOrCreateLLVMFunction(Name, Ty, D,
false);
62 QualType T, llvm::IntegerType *IntType) {
65 if (V->getType()->isPointerTy())
66 return CGF.
Builder.CreatePtrToInt(V, IntType);
68 assert(V->getType() == IntType);
76 if (ResultType->isPointerTy())
77 return CGF.
Builder.CreateIntToPtr(V, ResultType);
79 assert(V->getType() == ResultType);
86 llvm::AtomicRMWInst::BinOp
Kind,
95 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
97 llvm::IntegerType *IntType =
100 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
106 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
109 CGF.
Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
110 llvm::SequentiallyConsistent);
121 Address, llvm::PointerType::getUnqual(Val->getType()),
"cast");
137 llvm::AtomicRMWInst::BinOp
Kind,
146 llvm::AtomicRMWInst::BinOp
Kind,
148 Instruction::BinaryOps Op,
149 bool Invert =
false) {
157 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
159 llvm::IntegerType *IntType =
162 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
167 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
171 CGF.
Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
172 llvm::SequentiallyConsistent);
173 Result = CGF.
Builder.CreateBinOp(Op, Result, Args[1]);
175 Result = CGF.
Builder.CreateBinOp(llvm::Instruction::Xor, Result,
176 llvm::ConstantInt::get(IntType, -1));
196 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
198 llvm::IntegerType *IntType = llvm::IntegerType::get(
200 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
206 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
209 Value *Pair = CGF.
Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
210 llvm::SequentiallyConsistent,
211 llvm::SequentiallyConsistent);
214 return CGF.
Builder.CreateZExt(CGF.
Builder.CreateExtractValue(Pair, 1),
225 llvm::CallInst *Call = CGF.
Builder.CreateCall(F, V);
226 Call->setDoesNotAccessMemory();
236 int Width = Ty->getPrimitiveSizeInBits();
237 llvm::Type *IntTy = llvm::IntegerType::get(C, Width);
239 if (Ty->isPPC_FP128Ty()) {
249 Value *ShiftCst = llvm::ConstantInt::get(IntTy, Width);
250 V = CGF.
Builder.CreateLShr(V, ShiftCst);
254 IntTy = llvm::IntegerType::get(C, Width);
255 V = CGF.
Builder.CreateTrunc(V, IntTy);
257 Value *Zero = llvm::Constant::getNullValue(IntTy);
258 return CGF.
Builder.CreateICmpSLT(V, Zero);
281 assert(X->getType() == Y->getType() &&
282 "Arguments must be the same type. (Did you forget to make sure both "
283 "arguments have the same integer width?)");
287 Carry = CGF.
Builder.CreateExtractValue(Tmp, 1);
288 return CGF.
Builder.CreateExtractValue(Tmp, 0);
292 struct WidthAndSignedness {
298 static WidthAndSignedness
301 assert(Type->
isIntegerType() &&
"Given type is not an integer.");
304 return {Width, Signed};
310 static struct WidthAndSignedness
312 assert(Types.size() > 0 &&
"Empty list of types.");
316 for (
const auto &
Type : Types) {
317 Signed |=
Type.Signed;
325 for (
const auto &
Type : Types) {
326 unsigned MinWidth =
Type.Width + (Signed && !
Type.Signed);
327 if (Width < MinWidth) {
332 return {Width, Signed};
337 if (ArgValue->getType() != DestType)
339 Builder.CreateBitCast(ArgValue, DestType, ArgValue->getName().data());
341 Intrinsic::ID inst = IsStart ? Intrinsic::vastart : Intrinsic::vaend;
342 return Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue);
351 return From == To || (From == 0 && To == 1) || (From == 3 && To == 2);
356 return ConstantInt::get(ResType, (Type & 2) ? 0 : -1,
true);
360 CodeGenFunction::evaluateOrEmitBuiltinObjectSize(
const Expr *
E,
unsigned Type,
361 llvm::IntegerType *ResType) {
364 return emitBuiltinObjectSize(E, Type, ResType);
365 return ConstantInt::get(ResType, ObjectSize,
true);
374 CodeGenFunction::emitBuiltinObjectSize(
const Expr *E,
unsigned Type,
375 llvm::IntegerType *ResType) {
380 auto *PS = D->getDecl()->
getAttr<PassObjectSizeAttr>();
381 if (Param !=
nullptr && PS !=
nullptr &&
383 auto Iter = SizeArguments.find(Param);
384 assert(Iter != SizeArguments.end());
387 auto DIter = LocalDeclMap.find(D);
388 assert(DIter != LocalDeclMap.end());
390 return EmitLoadOfScalar(DIter->second,
false,
391 getContext().getSizeType(), E->getLocStart());
403 auto *CI = ConstantInt::get(
Builder.getInt1Ty(), (Type & 2) >> 1);
406 Value *F = CGM.getIntrinsic(Intrinsic::objectsize, Tys);
407 return Builder.CreateCall(F, {EmitScalarExpr(E), CI});
411 unsigned BuiltinID,
const CallExpr *E,
418 return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
421 return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
427 case Builtin::BI__builtin___CFStringMakeConstantString:
428 case Builtin::BI__builtin___NSStringMakeConstantString:
430 case Builtin::BI__builtin_stdarg_start:
431 case Builtin::BI__builtin_va_start:
432 case Builtin::BI__va_start:
433 case Builtin::BI__builtin_va_end:
435 EmitVAStartEnd(BuiltinID == Builtin::BI__va_start
436 ? EmitScalarExpr(E->
getArg(0))
437 : EmitVAListRef(E->
getArg(0)).getPointer(),
438 BuiltinID != Builtin::BI__builtin_va_end));
439 case Builtin::BI__builtin_va_copy: {
440 Value *DstPtr = EmitVAListRef(E->
getArg(0)).getPointer();
441 Value *SrcPtr = EmitVAListRef(E->
getArg(1)).getPointer();
445 DstPtr =
Builder.CreateBitCast(DstPtr, Type);
446 SrcPtr =
Builder.CreateBitCast(SrcPtr, Type);
450 case Builtin::BI__builtin_abs:
451 case Builtin::BI__builtin_labs:
452 case Builtin::BI__builtin_llabs: {
457 Builder.CreateICmpSGE(ArgValue,
458 llvm::Constant::getNullValue(ArgValue->getType()),
461 Builder.CreateSelect(CmpResult, ArgValue, NegOp,
"abs");
465 case Builtin::BI__builtin_fabs:
466 case Builtin::BI__builtin_fabsf:
467 case Builtin::BI__builtin_fabsl: {
472 case Builtin::BI__builtin_fmod:
473 case Builtin::BI__builtin_fmodf:
474 case Builtin::BI__builtin_fmodl: {
481 case Builtin::BI__builtin_conj:
482 case Builtin::BI__builtin_conjf:
483 case Builtin::BI__builtin_conjl: {
485 Value *Real = ComplexVal.first;
486 Value *Imag = ComplexVal.second;
488 Imag->getType()->isFPOrFPVectorTy()
489 ? llvm::ConstantFP::getZeroValueForNegation(Imag->getType())
490 : llvm::Constant::getNullValue(Imag->getType());
492 Imag =
Builder.CreateFSub(Zero, Imag,
"sub");
495 case Builtin::BI__builtin_creal:
496 case Builtin::BI__builtin_crealf:
497 case Builtin::BI__builtin_creall:
498 case Builtin::BIcreal:
499 case Builtin::BIcrealf:
500 case Builtin::BIcreall: {
505 case Builtin::BI__builtin_cimag:
506 case Builtin::BI__builtin_cimagf:
507 case Builtin::BI__builtin_cimagl:
508 case Builtin::BIcimag:
509 case Builtin::BIcimagf:
510 case Builtin::BIcimagl: {
515 case Builtin::BI__builtin_ctzs:
516 case Builtin::BI__builtin_ctz:
517 case Builtin::BI__builtin_ctzl:
518 case Builtin::BI__builtin_ctzll: {
522 Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
525 Value *ZeroUndef =
Builder.getInt1(getTarget().isCLZForZeroUndef());
526 Value *Result =
Builder.CreateCall(F, {ArgValue, ZeroUndef});
527 if (Result->getType() != ResultType)
528 Result =
Builder.CreateIntCast(Result, ResultType,
true,
532 case Builtin::BI__builtin_clzs:
533 case Builtin::BI__builtin_clz:
534 case Builtin::BI__builtin_clzl:
535 case Builtin::BI__builtin_clzll: {
539 Value *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
542 Value *ZeroUndef =
Builder.getInt1(getTarget().isCLZForZeroUndef());
543 Value *Result =
Builder.CreateCall(F, {ArgValue, ZeroUndef});
544 if (Result->getType() != ResultType)
545 Result =
Builder.CreateIntCast(Result, ResultType,
true,
549 case Builtin::BI__builtin_ffs:
550 case Builtin::BI__builtin_ffsl:
551 case Builtin::BI__builtin_ffsll: {
556 Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
561 llvm::ConstantInt::get(ArgType, 1));
562 Value *Zero = llvm::Constant::getNullValue(ArgType);
563 Value *IsZero =
Builder.CreateICmpEQ(ArgValue, Zero,
"iszero");
564 Value *Result =
Builder.CreateSelect(IsZero, Zero, Tmp,
"ffs");
565 if (Result->getType() != ResultType)
566 Result =
Builder.CreateIntCast(Result, ResultType,
true,
570 case Builtin::BI__builtin_parity:
571 case Builtin::BI__builtin_parityl:
572 case Builtin::BI__builtin_parityll: {
577 Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
581 Value *Result =
Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
582 if (Result->getType() != ResultType)
583 Result =
Builder.CreateIntCast(Result, ResultType,
true,
587 case Builtin::BI__builtin_popcount:
588 case Builtin::BI__builtin_popcountl:
589 case Builtin::BI__builtin_popcountll: {
593 Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
597 if (Result->getType() != ResultType)
598 Result =
Builder.CreateIntCast(Result, ResultType,
true,
602 case Builtin::BI__builtin_unpredictable: {
608 case Builtin::BI__builtin_expect: {
612 Value *ExpectedValue = EmitScalarExpr(E->
getArg(1));
616 if (CGM.getCodeGenOpts().OptimizationLevel == 0)
619 Value *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
621 Builder.CreateCall(FnExpect, {ArgValue, ExpectedValue},
"expval");
624 case Builtin::BI__builtin_assume_aligned: {
629 Value *AlignmentValue = EmitScalarExpr(E->
getArg(1));
630 ConstantInt *AlignmentCI = cast<ConstantInt>(AlignmentValue);
631 unsigned Alignment = (
unsigned) AlignmentCI->getZExtValue();
633 EmitAlignmentAssumption(PtrValue, Alignment, OffsetValue);
636 case Builtin::BI__assume:
637 case Builtin::BI__builtin_assume: {
642 Value *FnAssume = CGM.getIntrinsic(Intrinsic::assume);
645 case Builtin::BI__builtin_bswap16:
646 case Builtin::BI__builtin_bswap32:
647 case Builtin::BI__builtin_bswap64: {
650 Value *F = CGM.getIntrinsic(Intrinsic::bswap, ArgType);
653 case Builtin::BI__builtin_object_size: {
656 auto *ResType = cast<llvm::IntegerType>(ConvertType(E->
getType()));
662 case Builtin::BI__builtin_prefetch: {
666 llvm::ConstantInt::get(Int32Ty, 0);
668 llvm::ConstantInt::get(Int32Ty, 3);
669 Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
670 Value *F = CGM.getIntrinsic(Intrinsic::prefetch);
673 case Builtin::BI__builtin_readcyclecounter: {
674 Value *F = CGM.getIntrinsic(Intrinsic::readcyclecounter);
677 case Builtin::BI__builtin___clear_cache: {
678 Value *Begin = EmitScalarExpr(E->getArg(0));
679 Value *
End = EmitScalarExpr(E->getArg(1));
680 Value *F = CGM.getIntrinsic(Intrinsic::clear_cache);
683 case Builtin::BI__builtin_trap:
685 case Builtin::BI__debugbreak:
686 return RValue::get(EmitTrapCall(Intrinsic::debugtrap));
687 case Builtin::BI__builtin_unreachable: {
688 if (SanOpts.has(SanitizerKind::Unreachable)) {
689 SanitizerScope SanScope(
this);
690 EmitCheck(std::make_pair(static_cast<llvm::Value *>(
Builder.getFalse()),
691 SanitizerKind::Unreachable),
692 "builtin_unreachable", EmitCheckSourceLocation(E->
getExprLoc()),
698 EmitBlock(createBasicBlock(
"unreachable.cont"));
703 case Builtin::BI__builtin_powi:
704 case Builtin::BI__builtin_powif:
705 case Builtin::BI__builtin_powil: {
706 Value *
Base = EmitScalarExpr(E->getArg(0));
707 Value *Exponent = EmitScalarExpr(E->getArg(1));
709 Value *F = CGM.getIntrinsic(Intrinsic::powi, ArgType);
713 case Builtin::BI__builtin_isgreater:
714 case Builtin::BI__builtin_isgreaterequal:
715 case Builtin::BI__builtin_isless:
716 case Builtin::BI__builtin_islessequal:
717 case Builtin::BI__builtin_islessgreater:
718 case Builtin::BI__builtin_isunordered: {
721 Value *LHS = EmitScalarExpr(E->getArg(0));
722 Value *RHS = EmitScalarExpr(E->getArg(1));
725 default: llvm_unreachable(
"Unknown ordered comparison");
726 case Builtin::BI__builtin_isgreater:
727 LHS =
Builder.CreateFCmpOGT(LHS, RHS,
"cmp");
729 case Builtin::BI__builtin_isgreaterequal:
730 LHS =
Builder.CreateFCmpOGE(LHS, RHS,
"cmp");
732 case Builtin::BI__builtin_isless:
733 LHS =
Builder.CreateFCmpOLT(LHS, RHS,
"cmp");
735 case Builtin::BI__builtin_islessequal:
736 LHS =
Builder.CreateFCmpOLE(LHS, RHS,
"cmp");
738 case Builtin::BI__builtin_islessgreater:
739 LHS =
Builder.CreateFCmpONE(LHS, RHS,
"cmp");
741 case Builtin::BI__builtin_isunordered:
742 LHS =
Builder.CreateFCmpUNO(LHS, RHS,
"cmp");
746 return RValue::get(
Builder.CreateZExt(LHS, ConvertType(E->
getType())));
748 case Builtin::BI__builtin_isnan: {
749 Value *V = EmitScalarExpr(E->getArg(0));
750 V =
Builder.CreateFCmpUNO(V, V,
"cmp");
751 return RValue::get(
Builder.CreateZExt(V, ConvertType(E->
getType())));
754 case Builtin::BI__builtin_isinf: {
756 Value *V = EmitScalarExpr(E->getArg(0));
759 V =
Builder.CreateFCmpOEQ(V, ConstantFP::getInfinity(V->getType()),
"isinf");
760 return RValue::get(
Builder.CreateZExt(V, ConvertType(E->
getType())));
763 case Builtin::BI__builtin_isinf_sign: {
765 Value *Arg = EmitScalarExpr(E->getArg(0));
768 AbsArg, ConstantFP::getInfinity(Arg->getType()),
"isinf");
771 llvm::Type *IntTy = ConvertType(E->
getType());
772 Value *Zero = Constant::getNullValue(IntTy);
773 Value *One = ConstantInt::get(IntTy, 1);
774 Value *NegativeOne = ConstantInt::get(IntTy, -1);
775 Value *SignResult =
Builder.CreateSelect(IsNeg, NegativeOne, One);
777 return RValue::get(Result);
780 case Builtin::BI__builtin_isnormal: {
782 Value *V = EmitScalarExpr(E->getArg(0));
786 Value *IsLessThanInf =
787 Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(V->getType()),
"isinf");
788 APFloat Smallest = APFloat::getSmallestNormalized(
789 getContext().getFloatTypeSemantics(E->getArg(0)->
getType()));
791 Builder.CreateFCmpUGE(Abs, ConstantFP::get(V->getContext(), Smallest),
793 V =
Builder.CreateAnd(Eq, IsLessThanInf,
"and");
794 V =
Builder.CreateAnd(V, IsNormal,
"and");
795 return RValue::get(
Builder.CreateZExt(V, ConvertType(E->
getType())));
798 case Builtin::BI__builtin_isfinite: {
800 Value *V = EmitScalarExpr(E->getArg(0));
805 Builder.CreateFCmpUNE(Abs, ConstantFP::getInfinity(V->getType()),
"isinf");
807 V =
Builder.CreateAnd(Eq, IsNotInf,
"and");
808 return RValue::get(
Builder.CreateZExt(V, ConvertType(E->
getType())));
811 case Builtin::BI__builtin_fpclassify: {
812 Value *V = EmitScalarExpr(E->getArg(5));
813 llvm::Type *Ty = ConvertType(E->getArg(5)->
getType());
816 BasicBlock *Begin =
Builder.GetInsertBlock();
817 BasicBlock *End = createBasicBlock(
"fpclassify_end", this->CurFn);
821 "fpclassify_result");
825 Value *IsZero =
Builder.CreateFCmpOEQ(V, Constant::getNullValue(Ty),
827 Value *ZeroLiteral = EmitScalarExpr(E->getArg(4));
828 BasicBlock *NotZero = createBasicBlock(
"fpclassify_not_zero", this->CurFn);
829 Builder.CreateCondBr(IsZero, End, NotZero);
830 Result->addIncoming(ZeroLiteral, Begin);
833 Builder.SetInsertPoint(NotZero);
835 Value *NanLiteral = EmitScalarExpr(E->getArg(0));
836 BasicBlock *NotNan = createBasicBlock(
"fpclassify_not_nan", this->CurFn);
837 Builder.CreateCondBr(IsNan, End, NotNan);
838 Result->addIncoming(NanLiteral, NotZero);
841 Builder.SetInsertPoint(NotNan);
844 Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(V->getType()),
846 Value *InfLiteral = EmitScalarExpr(E->getArg(1));
847 BasicBlock *NotInf = createBasicBlock(
"fpclassify_not_inf", this->CurFn);
848 Builder.CreateCondBr(IsInf, End, NotInf);
849 Result->addIncoming(InfLiteral, NotNan);
852 Builder.SetInsertPoint(NotInf);
853 APFloat Smallest = APFloat::getSmallestNormalized(
854 getContext().getFloatTypeSemantics(E->getArg(5)->
getType()));
856 Builder.CreateFCmpUGE(VAbs, ConstantFP::get(V->getContext(), Smallest),
858 Value *NormalResult =
859 Builder.CreateSelect(IsNormal, EmitScalarExpr(E->getArg(2)),
860 EmitScalarExpr(E->getArg(3)));
862 Result->addIncoming(NormalResult, NotInf);
866 return RValue::get(Result);
869 case Builtin::BIalloca:
870 case Builtin::BI_alloca:
871 case Builtin::BI__builtin_alloca: {
872 Value *Size = EmitScalarExpr(E->getArg(0));
873 return RValue::get(
Builder.CreateAlloca(
Builder.getInt8Ty(), Size));
875 case Builtin::BIbzero:
876 case Builtin::BI__builtin_bzero: {
877 Address Dest = EmitPointerWithAlignment(E->getArg(0));
878 Value *SizeVal = EmitScalarExpr(E->getArg(1));
884 case Builtin::BImemcpy:
885 case Builtin::BI__builtin_memcpy: {
886 Address Dest = EmitPointerWithAlignment(E->getArg(0));
887 Address Src = EmitPointerWithAlignment(E->getArg(1));
888 Value *SizeVal = EmitScalarExpr(E->getArg(2));
891 EmitNonNullArgCheck(RValue::get(Src.getPointer()), E->getArg(1)->
getType(),
893 Builder.CreateMemCpy(Dest, Src, SizeVal,
false);
897 case Builtin::BI__builtin___memcpy_chk: {
899 llvm::APSInt Size, DstSize;
903 if (Size.ugt(DstSize))
905 Address Dest = EmitPointerWithAlignment(E->getArg(0));
906 Address Src = EmitPointerWithAlignment(E->getArg(1));
907 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
908 Builder.CreateMemCpy(Dest, Src, SizeVal,
false);
912 case Builtin::BI__builtin_objc_memmove_collectable: {
913 Address DestAddr = EmitPointerWithAlignment(E->getArg(0));
914 Address SrcAddr = EmitPointerWithAlignment(E->getArg(1));
915 Value *SizeVal = EmitScalarExpr(E->getArg(2));
916 CGM.getObjCRuntime().EmitGCMemmoveCollectable(*
this,
917 DestAddr, SrcAddr, SizeVal);
921 case Builtin::BI__builtin___memmove_chk: {
923 llvm::APSInt Size, DstSize;
927 if (Size.ugt(DstSize))
929 Address Dest = EmitPointerWithAlignment(E->getArg(0));
930 Address Src = EmitPointerWithAlignment(E->getArg(1));
931 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
932 Builder.CreateMemMove(Dest, Src, SizeVal,
false);
936 case Builtin::BImemmove:
937 case Builtin::BI__builtin_memmove: {
938 Address Dest = EmitPointerWithAlignment(E->getArg(0));
939 Address Src = EmitPointerWithAlignment(E->getArg(1));
940 Value *SizeVal = EmitScalarExpr(E->getArg(2));
943 EmitNonNullArgCheck(RValue::get(Src.getPointer()), E->getArg(1)->
getType(),
945 Builder.CreateMemMove(Dest, Src, SizeVal,
false);
948 case Builtin::BImemset:
949 case Builtin::BI__builtin_memset: {
950 Address Dest = EmitPointerWithAlignment(E->getArg(0));
951 Value *ByteVal =
Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
953 Value *SizeVal = EmitScalarExpr(E->getArg(2));
956 Builder.CreateMemSet(Dest, ByteVal, SizeVal,
false);
959 case Builtin::BI__builtin___memset_chk: {
961 llvm::APSInt Size, DstSize;
965 if (Size.ugt(DstSize))
967 Address Dest = EmitPointerWithAlignment(E->getArg(0));
968 Value *ByteVal =
Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
970 Value *SizeVal = llvm::ConstantInt::get(
Builder.getContext(), Size);
971 Builder.CreateMemSet(Dest, ByteVal, SizeVal,
false);
974 case Builtin::BI__builtin_dwarf_cfa: {
985 Value *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
986 return RValue::get(
Builder.CreateCall(F,
987 llvm::ConstantInt::get(Int32Ty, Offset)));
989 case Builtin::BI__builtin_return_address: {
991 CGM.EmitConstantExpr(E->getArg(0), getContext().UnsignedIntTy,
this);
992 Value *F = CGM.getIntrinsic(Intrinsic::returnaddress);
993 return RValue::get(
Builder.CreateCall(F, Depth));
995 case Builtin::BI__builtin_frame_address: {
997 CGM.EmitConstantExpr(E->getArg(0), getContext().UnsignedIntTy,
this);
998 Value *F = CGM.getIntrinsic(Intrinsic::frameaddress);
999 return RValue::get(
Builder.CreateCall(F, Depth));
1001 case Builtin::BI__builtin_extract_return_addr: {
1003 Value *Result = getTargetHooks().decodeReturnAddress(*
this, Address);
1004 return RValue::get(Result);
1006 case Builtin::BI__builtin_frob_return_addr: {
1007 Value *Address = EmitScalarExpr(E->getArg(0));
1008 Value *Result = getTargetHooks().encodeReturnAddress(*
this, Address);
1009 return RValue::get(Result);
1011 case Builtin::BI__builtin_dwarf_sp_column: {
1012 llvm::IntegerType *Ty
1013 = cast<llvm::IntegerType>(ConvertType(E->
getType()));
1014 int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
1016 CGM.ErrorUnsupported(E,
"__builtin_dwarf_sp_column");
1017 return RValue::get(llvm::UndefValue::get(Ty));
1019 return RValue::get(llvm::ConstantInt::get(Ty, Column,
true));
1021 case Builtin::BI__builtin_init_dwarf_reg_size_table: {
1022 Value *Address = EmitScalarExpr(E->getArg(0));
1023 if (getTargetHooks().initDwarfEHRegSizeTable(*
this, Address))
1024 CGM.ErrorUnsupported(E,
"__builtin_init_dwarf_reg_size_table");
1025 return RValue::get(llvm::UndefValue::get(ConvertType(E->
getType())));
1027 case Builtin::BI__builtin_eh_return: {
1028 Value *Int = EmitScalarExpr(E->getArg(0));
1029 Value *Ptr = EmitScalarExpr(E->getArg(1));
1031 llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
1032 assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
1033 "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
1034 Value *F = CGM.getIntrinsic(IntTy->getBitWidth() == 32
1035 ? Intrinsic::eh_return_i32
1036 : Intrinsic::eh_return_i64);
1037 Builder.CreateCall(F, {Int, Ptr});
1041 EmitBlock(createBasicBlock(
"builtin_eh_return.cont"));
1043 return RValue::get(
nullptr);
1045 case Builtin::BI__builtin_unwind_init: {
1046 Value *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
1047 return RValue::get(
Builder.CreateCall(F));
1049 case Builtin::BI__builtin_extend_pointer: {
1061 Value *Ptr = EmitScalarExpr(E->getArg(0));
1062 Value *Result =
Builder.CreatePtrToInt(Ptr, IntPtrTy,
"extend.cast");
1065 if (IntPtrTy->getBitWidth() == 64)
1066 return RValue::get(Result);
1069 if (getTargetHooks().extendPointerWithSExt())
1070 return RValue::get(
Builder.CreateSExt(Result, Int64Ty,
"extend.sext"));
1072 return RValue::get(
Builder.CreateZExt(Result, Int64Ty,
"extend.zext"));
1074 case Builtin::BI__builtin_setjmp: {
1076 Address Buf = EmitPointerWithAlignment(E->getArg(0));
1080 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
1081 ConstantInt::get(Int32Ty, 0));
1082 Builder.CreateStore(FrameAddr, Buf);
1086 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
1087 Address StackSaveSlot =
1088 Builder.CreateConstInBoundsGEP(Buf, 2, getPointerSize());
1089 Builder.CreateStore(StackAddr, StackSaveSlot);
1092 Value *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
1093 Buf =
Builder.CreateBitCast(Buf, Int8PtrTy);
1096 case Builtin::BI__builtin_longjmp: {
1097 Value *Buf = EmitScalarExpr(E->getArg(0));
1098 Buf =
Builder.CreateBitCast(Buf, Int8PtrTy);
1101 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
1107 EmitBlock(createBasicBlock(
"longjmp.cont"));
1109 return RValue::get(
nullptr);
1111 case Builtin::BI__sync_fetch_and_add:
1112 case Builtin::BI__sync_fetch_and_sub:
1113 case Builtin::BI__sync_fetch_and_or:
1114 case Builtin::BI__sync_fetch_and_and:
1115 case Builtin::BI__sync_fetch_and_xor:
1116 case Builtin::BI__sync_fetch_and_nand:
1117 case Builtin::BI__sync_add_and_fetch:
1118 case Builtin::BI__sync_sub_and_fetch:
1119 case Builtin::BI__sync_and_and_fetch:
1120 case Builtin::BI__sync_or_and_fetch:
1121 case Builtin::BI__sync_xor_and_fetch:
1122 case Builtin::BI__sync_nand_and_fetch:
1123 case Builtin::BI__sync_val_compare_and_swap:
1124 case Builtin::BI__sync_bool_compare_and_swap:
1125 case Builtin::BI__sync_lock_test_and_set:
1126 case Builtin::BI__sync_lock_release:
1127 case Builtin::BI__sync_swap:
1128 llvm_unreachable(
"Shouldn't make it through sema");
1129 case Builtin::BI__sync_fetch_and_add_1:
1130 case Builtin::BI__sync_fetch_and_add_2:
1131 case Builtin::BI__sync_fetch_and_add_4:
1132 case Builtin::BI__sync_fetch_and_add_8:
1133 case Builtin::BI__sync_fetch_and_add_16:
1135 case Builtin::BI__sync_fetch_and_sub_1:
1136 case Builtin::BI__sync_fetch_and_sub_2:
1137 case Builtin::BI__sync_fetch_and_sub_4:
1138 case Builtin::BI__sync_fetch_and_sub_8:
1139 case Builtin::BI__sync_fetch_and_sub_16:
1141 case Builtin::BI__sync_fetch_and_or_1:
1142 case Builtin::BI__sync_fetch_and_or_2:
1143 case Builtin::BI__sync_fetch_and_or_4:
1144 case Builtin::BI__sync_fetch_and_or_8:
1145 case Builtin::BI__sync_fetch_and_or_16:
1147 case Builtin::BI__sync_fetch_and_and_1:
1148 case Builtin::BI__sync_fetch_and_and_2:
1149 case Builtin::BI__sync_fetch_and_and_4:
1150 case Builtin::BI__sync_fetch_and_and_8:
1151 case Builtin::BI__sync_fetch_and_and_16:
1153 case Builtin::BI__sync_fetch_and_xor_1:
1154 case Builtin::BI__sync_fetch_and_xor_2:
1155 case Builtin::BI__sync_fetch_and_xor_4:
1156 case Builtin::BI__sync_fetch_and_xor_8:
1157 case Builtin::BI__sync_fetch_and_xor_16:
1159 case Builtin::BI__sync_fetch_and_nand_1:
1160 case Builtin::BI__sync_fetch_and_nand_2:
1161 case Builtin::BI__sync_fetch_and_nand_4:
1162 case Builtin::BI__sync_fetch_and_nand_8:
1163 case Builtin::BI__sync_fetch_and_nand_16:
1167 case Builtin::BI__sync_fetch_and_min:
1169 case Builtin::BI__sync_fetch_and_max:
1171 case Builtin::BI__sync_fetch_and_umin:
1173 case Builtin::BI__sync_fetch_and_umax:
1176 case Builtin::BI__sync_add_and_fetch_1:
1177 case Builtin::BI__sync_add_and_fetch_2:
1178 case Builtin::BI__sync_add_and_fetch_4:
1179 case Builtin::BI__sync_add_and_fetch_8:
1180 case Builtin::BI__sync_add_and_fetch_16:
1182 llvm::Instruction::Add);
1183 case Builtin::BI__sync_sub_and_fetch_1:
1184 case Builtin::BI__sync_sub_and_fetch_2:
1185 case Builtin::BI__sync_sub_and_fetch_4:
1186 case Builtin::BI__sync_sub_and_fetch_8:
1187 case Builtin::BI__sync_sub_and_fetch_16:
1189 llvm::Instruction::Sub);
1190 case Builtin::BI__sync_and_and_fetch_1:
1191 case Builtin::BI__sync_and_and_fetch_2:
1192 case Builtin::BI__sync_and_and_fetch_4:
1193 case Builtin::BI__sync_and_and_fetch_8:
1194 case Builtin::BI__sync_and_and_fetch_16:
1197 case Builtin::BI__sync_or_and_fetch_1:
1198 case Builtin::BI__sync_or_and_fetch_2:
1199 case Builtin::BI__sync_or_and_fetch_4:
1200 case Builtin::BI__sync_or_and_fetch_8:
1201 case Builtin::BI__sync_or_and_fetch_16:
1203 llvm::Instruction::Or);
1204 case Builtin::BI__sync_xor_and_fetch_1:
1205 case Builtin::BI__sync_xor_and_fetch_2:
1206 case Builtin::BI__sync_xor_and_fetch_4:
1207 case Builtin::BI__sync_xor_and_fetch_8:
1208 case Builtin::BI__sync_xor_and_fetch_16:
1210 llvm::Instruction::Xor);
1211 case Builtin::BI__sync_nand_and_fetch_1:
1212 case Builtin::BI__sync_nand_and_fetch_2:
1213 case Builtin::BI__sync_nand_and_fetch_4:
1214 case Builtin::BI__sync_nand_and_fetch_8:
1215 case Builtin::BI__sync_nand_and_fetch_16:
1219 case Builtin::BI__sync_val_compare_and_swap_1:
1220 case Builtin::BI__sync_val_compare_and_swap_2:
1221 case Builtin::BI__sync_val_compare_and_swap_4:
1222 case Builtin::BI__sync_val_compare_and_swap_8:
1223 case Builtin::BI__sync_val_compare_and_swap_16:
1226 case Builtin::BI__sync_bool_compare_and_swap_1:
1227 case Builtin::BI__sync_bool_compare_and_swap_2:
1228 case Builtin::BI__sync_bool_compare_and_swap_4:
1229 case Builtin::BI__sync_bool_compare_and_swap_8:
1230 case Builtin::BI__sync_bool_compare_and_swap_16:
1233 case Builtin::BI__sync_swap_1:
1234 case Builtin::BI__sync_swap_2:
1235 case Builtin::BI__sync_swap_4:
1236 case Builtin::BI__sync_swap_8:
1237 case Builtin::BI__sync_swap_16:
1240 case Builtin::BI__sync_lock_test_and_set_1:
1241 case Builtin::BI__sync_lock_test_and_set_2:
1242 case Builtin::BI__sync_lock_test_and_set_4:
1243 case Builtin::BI__sync_lock_test_and_set_8:
1244 case Builtin::BI__sync_lock_test_and_set_16:
1247 case Builtin::BI__sync_lock_release_1:
1248 case Builtin::BI__sync_lock_release_2:
1249 case Builtin::BI__sync_lock_release_4:
1250 case Builtin::BI__sync_lock_release_8:
1251 case Builtin::BI__sync_lock_release_16: {
1252 Value *Ptr = EmitScalarExpr(E->getArg(0));
1254 CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
1255 llvm::Type *ITy = llvm::IntegerType::get(getLLVMContext(),
1257 Ptr =
Builder.CreateBitCast(Ptr, ITy->getPointerTo());
1258 llvm::StoreInst *
Store =
1259 Builder.CreateAlignedStore(llvm::Constant::getNullValue(ITy), Ptr,
1261 Store->setAtomic(llvm::Release);
1262 return RValue::get(
nullptr);
1265 case Builtin::BI__sync_synchronize: {
1273 Builder.CreateFence(llvm::SequentiallyConsistent);
1274 return RValue::get(
nullptr);
1277 case Builtin::BI__builtin_nontemporal_load:
1279 case Builtin::BI__builtin_nontemporal_store:
1281 case Builtin::BI__c11_atomic_is_lock_free:
1282 case Builtin::BI__atomic_is_lock_free: {
1286 const char *LibCallName =
"__atomic_is_lock_free";
1288 Args.
add(RValue::get(EmitScalarExpr(E->getArg(0))),
1289 getContext().getSizeType());
1290 if (BuiltinID == Builtin::BI__atomic_is_lock_free)
1291 Args.
add(RValue::get(EmitScalarExpr(E->getArg(1))),
1292 getContext().VoidPtrTy);
1294 Args.
add(RValue::get(llvm::Constant::getNullValue(VoidPtrTy)),
1295 getContext().VoidPtrTy);
1297 CGM.getTypes().arrangeFreeFunctionCall(E->
getType(), Args,
1300 llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FuncInfo);
1301 llvm::Constant *Func = CGM.CreateRuntimeFunction(FTy, LibCallName);
1305 case Builtin::BI__atomic_test_and_set: {
1312 Value *Ptr = EmitScalarExpr(E->getArg(0));
1313 unsigned AddrSpace = Ptr->getType()->getPointerAddressSpace();
1314 Ptr =
Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
1316 Value *Order = EmitScalarExpr(E->getArg(1));
1317 if (isa<llvm::ConstantInt>(Order)) {
1318 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1319 AtomicRMWInst *Result =
nullptr;
1323 Result =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1329 Result =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1334 Result =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1339 Result =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1341 llvm::AcquireRelease);
1344 Result =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1346 llvm::SequentiallyConsistent);
1349 Result->setVolatile(Volatile);
1350 return RValue::get(
Builder.CreateIsNotNull(Result,
"tobool"));
1353 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
1355 llvm::BasicBlock *BBs[5] = {
1356 createBasicBlock(
"monotonic", CurFn),
1357 createBasicBlock(
"acquire", CurFn),
1358 createBasicBlock(
"release", CurFn),
1359 createBasicBlock(
"acqrel", CurFn),
1360 createBasicBlock(
"seqcst", CurFn)
1362 llvm::AtomicOrdering Orders[5] = {
1363 llvm::Monotonic, llvm::Acquire, llvm::Release,
1364 llvm::AcquireRelease, llvm::SequentiallyConsistent
1367 Order =
Builder.CreateIntCast(Order,
Builder.getInt32Ty(),
false);
1368 llvm::SwitchInst *SI =
Builder.CreateSwitch(Order, BBs[0]);
1370 Builder.SetInsertPoint(ContBB);
1371 PHINode *Result =
Builder.CreatePHI(Int8Ty, 5,
"was_set");
1373 for (
unsigned i = 0; i < 5; ++i) {
1374 Builder.SetInsertPoint(BBs[i]);
1375 AtomicRMWInst *RMW =
Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1376 Ptr, NewVal, Orders[i]);
1377 RMW->setVolatile(Volatile);
1378 Result->addIncoming(RMW, BBs[i]);
1382 SI->addCase(
Builder.getInt32(0), BBs[0]);
1383 SI->addCase(
Builder.getInt32(1), BBs[1]);
1384 SI->addCase(
Builder.getInt32(2), BBs[1]);
1385 SI->addCase(
Builder.getInt32(3), BBs[2]);
1386 SI->addCase(
Builder.getInt32(4), BBs[3]);
1387 SI->addCase(
Builder.getInt32(5), BBs[4]);
1389 Builder.SetInsertPoint(ContBB);
1390 return RValue::get(
Builder.CreateIsNotNull(Result,
"tobool"));
1393 case Builtin::BI__atomic_clear: {
1398 Address Ptr = EmitPointerWithAlignment(E->getArg(0));
1399 unsigned AddrSpace = Ptr.
getPointer()->getType()->getPointerAddressSpace();
1400 Ptr =
Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
1402 Value *Order = EmitScalarExpr(E->getArg(1));
1403 if (isa<llvm::ConstantInt>(Order)) {
1404 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1405 StoreInst *Store =
Builder.CreateStore(NewVal, Ptr, Volatile);
1409 Store->setOrdering(llvm::Monotonic);
1412 Store->setOrdering(llvm::Release);
1415 Store->setOrdering(llvm::SequentiallyConsistent);
1418 return RValue::get(
nullptr);
1421 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
1423 llvm::BasicBlock *BBs[3] = {
1424 createBasicBlock(
"monotonic", CurFn),
1425 createBasicBlock(
"release", CurFn),
1426 createBasicBlock(
"seqcst", CurFn)
1428 llvm::AtomicOrdering Orders[3] = {
1429 llvm::Monotonic, llvm::Release, llvm::SequentiallyConsistent
1432 Order =
Builder.CreateIntCast(Order,
Builder.getInt32Ty(),
false);
1433 llvm::SwitchInst *SI =
Builder.CreateSwitch(Order, BBs[0]);
1435 for (
unsigned i = 0; i < 3; ++i) {
1436 Builder.SetInsertPoint(BBs[i]);
1437 StoreInst *Store =
Builder.CreateStore(NewVal, Ptr, Volatile);
1438 Store->setOrdering(Orders[i]);
1442 SI->addCase(
Builder.getInt32(0), BBs[0]);
1443 SI->addCase(
Builder.getInt32(3), BBs[1]);
1444 SI->addCase(
Builder.getInt32(5), BBs[2]);
1446 Builder.SetInsertPoint(ContBB);
1447 return RValue::get(
nullptr);
1450 case Builtin::BI__atomic_thread_fence:
1451 case Builtin::BI__atomic_signal_fence:
1452 case Builtin::BI__c11_atomic_thread_fence:
1453 case Builtin::BI__c11_atomic_signal_fence: {
1454 llvm::SynchronizationScope
Scope;
1455 if (BuiltinID == Builtin::BI__atomic_signal_fence ||
1456 BuiltinID == Builtin::BI__c11_atomic_signal_fence)
1457 Scope = llvm::SingleThread;
1459 Scope = llvm::CrossThread;
1460 Value *Order = EmitScalarExpr(E->getArg(0));
1461 if (isa<llvm::ConstantInt>(Order)) {
1462 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1469 Builder.CreateFence(llvm::Acquire, Scope);
1472 Builder.CreateFence(llvm::Release, Scope);
1475 Builder.CreateFence(llvm::AcquireRelease, Scope);
1478 Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
1481 return RValue::get(
nullptr);
1484 llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
1485 AcquireBB = createBasicBlock(
"acquire", CurFn);
1486 ReleaseBB = createBasicBlock(
"release", CurFn);
1487 AcqRelBB = createBasicBlock(
"acqrel", CurFn);
1488 SeqCstBB = createBasicBlock(
"seqcst", CurFn);
1489 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
1491 Order =
Builder.CreateIntCast(Order,
Builder.getInt32Ty(),
false);
1492 llvm::SwitchInst *SI =
Builder.CreateSwitch(Order, ContBB);
1494 Builder.SetInsertPoint(AcquireBB);
1495 Builder.CreateFence(llvm::Acquire, Scope);
1497 SI->addCase(
Builder.getInt32(1), AcquireBB);
1498 SI->addCase(
Builder.getInt32(2), AcquireBB);
1500 Builder.SetInsertPoint(ReleaseBB);
1501 Builder.CreateFence(llvm::Release, Scope);
1503 SI->addCase(
Builder.getInt32(3), ReleaseBB);
1505 Builder.SetInsertPoint(AcqRelBB);
1506 Builder.CreateFence(llvm::AcquireRelease, Scope);
1508 SI->addCase(
Builder.getInt32(4), AcqRelBB);
1510 Builder.SetInsertPoint(SeqCstBB);
1511 Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
1513 SI->addCase(
Builder.getInt32(5), SeqCstBB);
1515 Builder.SetInsertPoint(ContBB);
1516 return RValue::get(
nullptr);
1520 case Builtin::BIsqrt:
1521 case Builtin::BIsqrtf:
1522 case Builtin::BIsqrtl: {
1527 if (!FD->hasAttr<ConstAttr>())
1529 if (!(CGM.getCodeGenOpts().UnsafeFPMath ||
1530 CGM.getCodeGenOpts().NoNaNsFPMath))
1532 Value *Arg0 = EmitScalarExpr(E->getArg(0));
1533 llvm::Type *ArgType = Arg0->getType();
1535 return RValue::get(
Builder.CreateCall(F, Arg0));
1538 case Builtin::BI__builtin_pow:
1539 case Builtin::BI__builtin_powf:
1540 case Builtin::BI__builtin_powl:
1541 case Builtin::BIpow:
1542 case Builtin::BIpowf:
1543 case Builtin::BIpowl: {
1545 if (!FD->hasAttr<ConstAttr>())
1547 Value *Base = EmitScalarExpr(E->getArg(0));
1548 Value *Exponent = EmitScalarExpr(E->getArg(1));
1549 llvm::Type *ArgType = Base->getType();
1551 return RValue::get(
Builder.CreateCall(F, {Base, Exponent}));
1554 case Builtin::BIfma:
1555 case Builtin::BIfmaf:
1556 case Builtin::BIfmal:
1557 case Builtin::BI__builtin_fma:
1558 case Builtin::BI__builtin_fmaf:
1559 case Builtin::BI__builtin_fmal: {
1561 Value *FirstArg = EmitScalarExpr(E->getArg(0));
1562 llvm::Type *ArgType = FirstArg->getType();
1565 Builder.CreateCall(F, {FirstArg, EmitScalarExpr(E->getArg(1)),
1566 EmitScalarExpr(E->getArg(2))}));
1569 case Builtin::BI__builtin_signbit:
1570 case Builtin::BI__builtin_signbitf:
1571 case Builtin::BI__builtin_signbitl: {
1576 case Builtin::BI__builtin_annotation: {
1577 llvm::Value *AnnVal = EmitScalarExpr(E->getArg(0));
1578 llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
1584 StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
1585 return RValue::get(EmitAnnotationCall(F, AnnVal, Str, E->
getExprLoc()));
1587 case Builtin::BI__builtin_addcb:
1588 case Builtin::BI__builtin_addcs:
1589 case Builtin::BI__builtin_addc:
1590 case Builtin::BI__builtin_addcl:
1591 case Builtin::BI__builtin_addcll:
1592 case Builtin::BI__builtin_subcb:
1593 case Builtin::BI__builtin_subcs:
1594 case Builtin::BI__builtin_subc:
1595 case Builtin::BI__builtin_subcl:
1596 case Builtin::BI__builtin_subcll: {
1618 llvm::Value *Carryin = EmitScalarExpr(E->getArg(2));
1619 Address CarryOutPtr = EmitPointerWithAlignment(E->getArg(3));
1623 switch (BuiltinID) {
1624 default: llvm_unreachable(
"Unknown multiprecision builtin id.");
1625 case Builtin::BI__builtin_addcb:
1626 case Builtin::BI__builtin_addcs:
1627 case Builtin::BI__builtin_addc:
1628 case Builtin::BI__builtin_addcl:
1629 case Builtin::BI__builtin_addcll:
1630 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
1632 case Builtin::BI__builtin_subcb:
1633 case Builtin::BI__builtin_subcs:
1634 case Builtin::BI__builtin_subc:
1635 case Builtin::BI__builtin_subcl:
1636 case Builtin::BI__builtin_subcll:
1637 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
1647 Sum1, Carryin, Carry2);
1650 Builder.CreateStore(CarryOut, CarryOutPtr);
1651 return RValue::get(Sum2);
1654 case Builtin::BI__builtin_add_overflow:
1655 case Builtin::BI__builtin_sub_overflow:
1656 case Builtin::BI__builtin_mul_overflow: {
1664 WidthAndSignedness LeftInfo =
1666 WidthAndSignedness RightInfo =
1668 WidthAndSignedness ResultInfo =
1670 WidthAndSignedness EncompassingInfo =
1673 llvm::Type *EncompassingLLVMTy =
1674 llvm::IntegerType::get(CGM.getLLVMContext(), EncompassingInfo.Width);
1676 llvm::Type *ResultLLVMTy = CGM.getTypes().ConvertType(ResultQTy);
1679 switch (BuiltinID) {
1681 llvm_unreachable(
"Unknown overflow builtin id.");
1682 case Builtin::BI__builtin_add_overflow:
1683 IntrinsicId = EncompassingInfo.Signed
1684 ? llvm::Intrinsic::sadd_with_overflow
1685 : llvm::Intrinsic::uadd_with_overflow;
1687 case Builtin::BI__builtin_sub_overflow:
1688 IntrinsicId = EncompassingInfo.Signed
1689 ? llvm::Intrinsic::ssub_with_overflow
1690 : llvm::Intrinsic::usub_with_overflow;
1692 case Builtin::BI__builtin_mul_overflow:
1693 IntrinsicId = EncompassingInfo.Signed
1694 ? llvm::Intrinsic::smul_with_overflow
1695 : llvm::Intrinsic::umul_with_overflow;
1701 Address ResultPtr = EmitPointerWithAlignment(ResultArg);
1704 Left =
Builder.CreateIntCast(Left, EncompassingLLVMTy, LeftInfo.Signed);
1705 Right =
Builder.CreateIntCast(Right, EncompassingLLVMTy, RightInfo.Signed);
1711 if (EncompassingInfo.Width > ResultInfo.Width) {
1719 ResultTrunc, EncompassingLLVMTy, ResultInfo.Signed);
1721 Builder.CreateICmpNE(Result, ResultTruncExt);
1723 Overflow =
Builder.CreateOr(Overflow, TruncationOverflow);
1724 Result = ResultTrunc;
1730 Builder.CreateStore(EmitToMemory(Result, ResultQTy), ResultPtr, isVolatile);
1732 return RValue::get(Overflow);
1735 case Builtin::BI__builtin_uadd_overflow:
1736 case Builtin::BI__builtin_uaddl_overflow:
1737 case Builtin::BI__builtin_uaddll_overflow:
1738 case Builtin::BI__builtin_usub_overflow:
1739 case Builtin::BI__builtin_usubl_overflow:
1740 case Builtin::BI__builtin_usubll_overflow:
1741 case Builtin::BI__builtin_umul_overflow:
1742 case Builtin::BI__builtin_umull_overflow:
1743 case Builtin::BI__builtin_umulll_overflow:
1744 case Builtin::BI__builtin_sadd_overflow:
1745 case Builtin::BI__builtin_saddl_overflow:
1746 case Builtin::BI__builtin_saddll_overflow:
1747 case Builtin::BI__builtin_ssub_overflow:
1748 case Builtin::BI__builtin_ssubl_overflow:
1749 case Builtin::BI__builtin_ssubll_overflow:
1750 case Builtin::BI__builtin_smul_overflow:
1751 case Builtin::BI__builtin_smull_overflow:
1752 case Builtin::BI__builtin_smulll_overflow: {
1759 Address SumOutPtr = EmitPointerWithAlignment(E->getArg(2));
1763 switch (BuiltinID) {
1764 default: llvm_unreachable(
"Unknown overflow builtin id.");
1765 case Builtin::BI__builtin_uadd_overflow:
1766 case Builtin::BI__builtin_uaddl_overflow:
1767 case Builtin::BI__builtin_uaddll_overflow:
1768 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
1770 case Builtin::BI__builtin_usub_overflow:
1771 case Builtin::BI__builtin_usubl_overflow:
1772 case Builtin::BI__builtin_usubll_overflow:
1773 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
1775 case Builtin::BI__builtin_umul_overflow:
1776 case Builtin::BI__builtin_umull_overflow:
1777 case Builtin::BI__builtin_umulll_overflow:
1778 IntrinsicId = llvm::Intrinsic::umul_with_overflow;
1780 case Builtin::BI__builtin_sadd_overflow:
1781 case Builtin::BI__builtin_saddl_overflow:
1782 case Builtin::BI__builtin_saddll_overflow:
1783 IntrinsicId = llvm::Intrinsic::sadd_with_overflow;
1785 case Builtin::BI__builtin_ssub_overflow:
1786 case Builtin::BI__builtin_ssubl_overflow:
1787 case Builtin::BI__builtin_ssubll_overflow:
1788 IntrinsicId = llvm::Intrinsic::ssub_with_overflow;
1790 case Builtin::BI__builtin_smul_overflow:
1791 case Builtin::BI__builtin_smull_overflow:
1792 case Builtin::BI__builtin_smulll_overflow:
1793 IntrinsicId = llvm::Intrinsic::smul_with_overflow;
1800 Builder.CreateStore(Sum, SumOutPtr);
1802 return RValue::get(Carry);
1804 case Builtin::BI__builtin_addressof:
1805 return RValue::get(EmitLValue(E->getArg(0)).getPointer());
1806 case Builtin::BI__builtin_operator_new:
1808 E->getArg(0),
false);
1809 case Builtin::BI__builtin_operator_delete:
1811 E->getArg(0),
true);
1812 case Builtin::BI__noop:
1814 return RValue::get(ConstantInt::get(IntTy, 0));
1815 case Builtin::BI__builtin_call_with_static_chain: {
1816 const CallExpr *Call = cast<CallExpr>(E->getArg(0));
1817 const Expr *Chain = E->getArg(1);
1819 EmitScalarExpr(Call->
getCallee()), Call, ReturnValue,
1822 case Builtin::BI_InterlockedExchange:
1823 case Builtin::BI_InterlockedExchangePointer:
1825 case Builtin::BI_InterlockedCompareExchangePointer: {
1827 llvm::IntegerType *IntType =
1828 IntegerType::get(getLLVMContext(),
1830 llvm::Type *IntPtrType = IntType->getPointerTo();
1833 Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)), IntPtrType);
1835 llvm::Value *Exchange = EmitScalarExpr(E->getArg(1));
1836 RTy = Exchange->getType();
1837 Exchange =
Builder.CreatePtrToInt(Exchange, IntType);
1840 Builder.CreatePtrToInt(EmitScalarExpr(E->getArg(2)), IntType);
1842 auto Result =
Builder.CreateAtomicCmpXchg(Destination, Comparand, Exchange,
1843 SequentiallyConsistent,
1844 SequentiallyConsistent);
1845 Result->setVolatile(
true);
1847 return RValue::get(
Builder.CreateIntToPtr(
Builder.CreateExtractValue(Result,
1851 case Builtin::BI_InterlockedCompareExchange: {
1852 AtomicCmpXchgInst *CXI =
Builder.CreateAtomicCmpXchg(
1853 EmitScalarExpr(E->getArg(0)),
1854 EmitScalarExpr(E->getArg(2)),
1855 EmitScalarExpr(E->getArg(1)),
1856 SequentiallyConsistent,
1857 SequentiallyConsistent);
1858 CXI->setVolatile(
true);
1859 return RValue::get(
Builder.CreateExtractValue(CXI, 0));
1861 case Builtin::BI_InterlockedIncrement: {
1862 AtomicRMWInst *RMWI =
Builder.CreateAtomicRMW(
1864 EmitScalarExpr(E->getArg(0)),
1865 ConstantInt::get(Int32Ty, 1),
1866 llvm::SequentiallyConsistent);
1867 RMWI->setVolatile(
true);
1868 return RValue::get(
Builder.CreateAdd(RMWI, ConstantInt::get(Int32Ty, 1)));
1870 case Builtin::BI_InterlockedDecrement: {
1871 AtomicRMWInst *RMWI =
Builder.CreateAtomicRMW(
1873 EmitScalarExpr(E->getArg(0)),
1874 ConstantInt::get(Int32Ty, 1),
1875 llvm::SequentiallyConsistent);
1876 RMWI->setVolatile(
true);
1877 return RValue::get(
Builder.CreateSub(RMWI, ConstantInt::get(Int32Ty, 1)));
1879 case Builtin::BI_InterlockedExchangeAdd: {
1880 AtomicRMWInst *RMWI =
Builder.CreateAtomicRMW(
1882 EmitScalarExpr(E->getArg(0)),
1883 EmitScalarExpr(E->getArg(1)),
1884 llvm::SequentiallyConsistent);
1885 RMWI->setVolatile(
true);
1886 return RValue::get(RMWI);
1888 case Builtin::BI__readfsdword: {
1890 Builder.CreateIntToPtr(EmitScalarExpr(E->getArg(0)),
1891 llvm::PointerType::get(CGM.Int32Ty, 257));
1893 Builder.CreateAlignedLoad(IntToPtr, 4,
true);
1894 return RValue::get(Load);
1897 case Builtin::BI__exception_code:
1898 case Builtin::BI_exception_code:
1899 return RValue::get(EmitSEHExceptionCode());
1900 case Builtin::BI__exception_info:
1901 case Builtin::BI_exception_info:
1902 return RValue::get(EmitSEHExceptionInfo());
1903 case Builtin::BI__abnormal_termination:
1904 case Builtin::BI_abnormal_termination:
1905 return RValue::get(EmitSEHAbnormalTermination());
1906 case Builtin::BI_setjmpex: {
1907 if (getTarget().getTriple().isOSMSVCRT()) {
1908 llvm::Type *ArgTypes[] = {Int8PtrTy, Int8PtrTy};
1909 llvm::AttributeSet ReturnsTwiceAttr =
1910 AttributeSet::get(getLLVMContext(), llvm::AttributeSet::FunctionIndex,
1911 llvm::Attribute::ReturnsTwice);
1912 llvm::Constant *SetJmpEx = CGM.CreateRuntimeFunction(
1913 llvm::FunctionType::get(IntTy, ArgTypes,
false),
1914 "_setjmpex", ReturnsTwiceAttr);
1916 EmitScalarExpr(E->getArg(0)), Int8PtrTy);
1918 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
1919 ConstantInt::get(Int32Ty, 0));
1921 llvm::CallSite CS = EmitRuntimeCallOrInvoke(SetJmpEx, Args);
1922 CS.setAttributes(ReturnsTwiceAttr);
1923 return RValue::get(CS.getInstruction());
1927 case Builtin::BI_setjmp: {
1928 if (getTarget().getTriple().isOSMSVCRT()) {
1929 llvm::AttributeSet ReturnsTwiceAttr =
1930 AttributeSet::get(getLLVMContext(), llvm::AttributeSet::FunctionIndex,
1931 llvm::Attribute::ReturnsTwice);
1933 EmitScalarExpr(E->getArg(0)), Int8PtrTy);
1935 if (getTarget().getTriple().getArch() == llvm::Triple::x86) {
1936 llvm::Type *ArgTypes[] = {Int8PtrTy, IntTy};
1937 llvm::Constant *SetJmp3 = CGM.CreateRuntimeFunction(
1938 llvm::FunctionType::get(IntTy, ArgTypes,
true),
1939 "_setjmp3", ReturnsTwiceAttr);
1942 CS = EmitRuntimeCallOrInvoke(SetJmp3, Args);
1944 llvm::Type *ArgTypes[] = {Int8PtrTy, Int8PtrTy};
1945 llvm::Constant *SetJmp = CGM.CreateRuntimeFunction(
1946 llvm::FunctionType::get(IntTy, ArgTypes,
false),
1947 "_setjmp", ReturnsTwiceAttr);
1949 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
1950 ConstantInt::get(Int32Ty, 0));
1952 CS = EmitRuntimeCallOrInvoke(SetJmp, Args);
1954 CS.setAttributes(ReturnsTwiceAttr);
1955 return RValue::get(CS.getInstruction());
1960 case Builtin::BI__GetExceptionInfo: {
1961 if (llvm::GlobalVariable *GV =
1962 CGM.getCXXABI().getThrowInfo(FD->getParamDecl(0)->getType()))
1963 return RValue::get(llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy));
1971 if (getContext().
BuiltinInfo.isLibFunction(BuiltinID))
1973 CGM.getBuiltinLibFunction(FD, BuiltinID));
1977 if (getContext().BuiltinInfo.isPredefinedLibFunction(BuiltinID))
1985 checkTargetFeatures(E, FD);
1988 const char *
Name = getContext().BuiltinInfo.
getName(BuiltinID);
1990 if (
const char *Prefix =
1991 llvm::Triple::getArchTypePrefix(getTarget().getTriple().getArch())) {
1992 IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix, Name);
1996 if (IntrinsicID == Intrinsic::not_intrinsic)
1997 IntrinsicID = Intrinsic::getIntrinsicForMSBuiltin(Prefix, Name);
2000 if (IntrinsicID != Intrinsic::not_intrinsic) {
2005 unsigned ICEArguments = 0;
2007 getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
2008 assert(Error == ASTContext::GE_None &&
"Should not codegen an error");
2010 Function *F = CGM.getIntrinsic(IntrinsicID);
2011 llvm::FunctionType *FTy = F->getFunctionType();
2013 for (
unsigned i = 0, e = E->getNumArgs(); i != e; ++i) {
2016 if ((ICEArguments & (1 << i)) == 0) {
2017 ArgValue = EmitScalarExpr(E->getArg(i));
2021 llvm::APSInt Result;
2023 assert(IsConst &&
"Constant arg isn't actually constant?");
2025 ArgValue = llvm::ConstantInt::get(getLLVMContext(), Result);
2030 llvm::Type *PTy = FTy->getParamType(i);
2031 if (PTy != ArgValue->getType()) {
2032 assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
2033 "Must be able to losslessly bit cast to param");
2034 ArgValue =
Builder.CreateBitCast(ArgValue, PTy);
2037 Args.push_back(ArgValue);
2043 llvm::Type *RetTy = VoidTy;
2045 RetTy = ConvertType(BuiltinRetType);
2047 if (RetTy != V->getType()) {
2048 assert(V->getType()->canLosslesslyBitCastTo(RetTy) &&
2049 "Must be able to losslessly bit cast result type");
2050 V =
Builder.CreateBitCast(V, RetTy);
2053 return RValue::get(V);
2057 if (
Value *V = EmitTargetBuiltinExpr(BuiltinID, E))
2058 return RValue::get(V);
2060 ErrorUnsupported(E,
"builtin function");
2063 return GetUndefRValue(E->
getType());
2067 unsigned BuiltinID,
const CallExpr *E,
2068 llvm::Triple::ArchType Arch) {
2070 case llvm::Triple::arm:
2071 case llvm::Triple::armeb:
2072 case llvm::Triple::thumb:
2073 case llvm::Triple::thumbeb:
2075 case llvm::Triple::aarch64:
2076 case llvm::Triple::aarch64_be:
2078 case llvm::Triple::x86:
2079 case llvm::Triple::x86_64:
2081 case llvm::Triple::ppc:
2082 case llvm::Triple::ppc64:
2083 case llvm::Triple::ppc64le:
2085 case llvm::Triple::r600:
2086 case llvm::Triple::amdgcn:
2088 case llvm::Triple::systemz:
2090 case llvm::Triple::nvptx:
2091 case llvm::Triple::nvptx64:
2093 case llvm::Triple::wasm32:
2094 case llvm::Triple::wasm64:
2101 Value *CodeGenFunction::EmitTargetBuiltinExpr(
unsigned BuiltinID,
2104 assert(
getContext().getAuxTargetInfo() &&
"Missing aux target info");
2117 int IsQuad = TypeFlags.
isQuad();
2121 return llvm::VectorType::get(CGF->
Int8Ty, V1Ty ? 1 : (8 << IsQuad));
2125 return llvm::VectorType::get(CGF->
Int16Ty, V1Ty ? 1 : (4 << IsQuad));
2127 return llvm::VectorType::get(CGF->
Int32Ty, V1Ty ? 1 : (2 << IsQuad));
2130 return llvm::VectorType::get(CGF->
Int64Ty, V1Ty ? 1 : (1 << IsQuad));
2135 return llvm::VectorType::get(CGF->
Int8Ty, 16);
2137 return llvm::VectorType::get(CGF->
FloatTy, V1Ty ? 1 : (2 << IsQuad));
2139 return llvm::VectorType::get(CGF->
DoubleTy, V1Ty ? 1 : (1 << IsQuad));
2141 llvm_unreachable(
"Unknown vector element type!");
2146 int IsQuad = IntTypeFlags.
isQuad();
2149 return llvm::VectorType::get(CGF->
FloatTy, (2 << IsQuad));
2151 return llvm::VectorType::get(CGF->
DoubleTy, (1 << IsQuad));
2153 llvm_unreachable(
"Type can't be converted to floating-point!");
2158 unsigned nElts = cast<llvm::VectorType>(V->getType())->getNumElements();
2159 Value* SV = llvm::ConstantVector::getSplat(nElts, C);
2160 return Builder.CreateShuffleVector(V, V, SV,
"lane");
2165 unsigned shift,
bool rightshift) {
2167 for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
2168 ai != ae; ++ai, ++j)
2169 if (shift > 0 && shift == j)
2174 return Builder.CreateCall(F, Ops, name);
2179 int SV = cast<ConstantInt>(V)->getSExtValue();
2180 return ConstantInt::get(Ty, neg ? -SV : SV);
2187 llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
2189 int ShiftAmt = cast<ConstantInt>(
Shift)->getSExtValue();
2190 int EltSize = VTy->getScalarSizeInBits();
2196 if (ShiftAmt == EltSize) {
2199 return llvm::ConstantAggregateZero::get(VTy);
2204 Shift = ConstantInt::get(VTy->getElementType(), ShiftAmt);
2210 return Builder.CreateLShr(Vec, Shift, name);
2212 return Builder.CreateAShr(Vec, Shift, name);
2238 struct NeonIntrinsicInfo {
2239 const char *NameHint;
2241 unsigned LLVMIntrinsic;
2242 unsigned AltLLVMIntrinsic;
2243 unsigned TypeModifier;
2245 bool operator<(
unsigned RHSBuiltinID)
const {
2246 return BuiltinID < RHSBuiltinID;
2248 bool operator<(
const NeonIntrinsicInfo &TE)
const {
2249 return BuiltinID < TE.BuiltinID;
2254 #define NEONMAP0(NameBase) \
2255 { #NameBase, NEON::BI__builtin_neon_ ## NameBase, 0, 0, 0 }
2257 #define NEONMAP1(NameBase, LLVMIntrinsic, TypeModifier) \
2258 { #NameBase, NEON:: BI__builtin_neon_ ## NameBase, \
2259 Intrinsic::LLVMIntrinsic, 0, TypeModifier }
2261 #define NEONMAP2(NameBase, LLVMIntrinsic, AltLLVMIntrinsic, TypeModifier) \
2262 { #NameBase, NEON:: BI__builtin_neon_ ## NameBase, \
2263 Intrinsic::LLVMIntrinsic, Intrinsic::AltLLVMIntrinsic, \
2269 NEONMAP1(vabs_v, arm_neon_vabs, 0),
2270 NEONMAP1(vabsq_v, arm_neon_vabs, 0),
2272 NEONMAP1(vaesdq_v, arm_neon_aesd, 0),
2273 NEONMAP1(vaeseq_v, arm_neon_aese, 0),
2274 NEONMAP1(vaesimcq_v, arm_neon_aesimc, 0),
2275 NEONMAP1(vaesmcq_v, arm_neon_aesmc, 0),
2278 NEONMAP1(vcage_v, arm_neon_vacge, 0),
2279 NEONMAP1(vcageq_v, arm_neon_vacge, 0),
2280 NEONMAP1(vcagt_v, arm_neon_vacgt, 0),
2281 NEONMAP1(vcagtq_v, arm_neon_vacgt, 0),
2282 NEONMAP1(vcale_v, arm_neon_vacge, 0),
2283 NEONMAP1(vcaleq_v, arm_neon_vacge, 0),
2284 NEONMAP1(vcalt_v, arm_neon_vacgt, 0),
2285 NEONMAP1(vcaltq_v, arm_neon_vacgt, 0),
2292 NEONMAP1(vcvt_f16_f32, arm_neon_vcvtfp2hf, 0),
2293 NEONMAP1(vcvt_f32_f16, arm_neon_vcvthf2fp, 0),
2295 NEONMAP2(vcvt_n_f32_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
2296 NEONMAP1(vcvt_n_s32_v, arm_neon_vcvtfp2fxs, 0),
2297 NEONMAP1(vcvt_n_s64_v, arm_neon_vcvtfp2fxs, 0),
2298 NEONMAP1(vcvt_n_u32_v, arm_neon_vcvtfp2fxu, 0),
2299 NEONMAP1(vcvt_n_u64_v, arm_neon_vcvtfp2fxu, 0),
2304 NEONMAP1(vcvta_s32_v, arm_neon_vcvtas, 0),
2305 NEONMAP1(vcvta_s64_v, arm_neon_vcvtas, 0),
2306 NEONMAP1(vcvta_u32_v, arm_neon_vcvtau, 0),
2307 NEONMAP1(vcvta_u64_v, arm_neon_vcvtau, 0),
2308 NEONMAP1(vcvtaq_s32_v, arm_neon_vcvtas, 0),
2309 NEONMAP1(vcvtaq_s64_v, arm_neon_vcvtas, 0),
2310 NEONMAP1(vcvtaq_u32_v, arm_neon_vcvtau, 0),
2311 NEONMAP1(vcvtaq_u64_v, arm_neon_vcvtau, 0),
2312 NEONMAP1(vcvtm_s32_v, arm_neon_vcvtms, 0),
2313 NEONMAP1(vcvtm_s64_v, arm_neon_vcvtms, 0),
2314 NEONMAP1(vcvtm_u32_v, arm_neon_vcvtmu, 0),
2315 NEONMAP1(vcvtm_u64_v, arm_neon_vcvtmu, 0),
2316 NEONMAP1(vcvtmq_s32_v, arm_neon_vcvtms, 0),
2317 NEONMAP1(vcvtmq_s64_v, arm_neon_vcvtms, 0),
2318 NEONMAP1(vcvtmq_u32_v, arm_neon_vcvtmu, 0),
2319 NEONMAP1(vcvtmq_u64_v, arm_neon_vcvtmu, 0),
2320 NEONMAP1(vcvtn_s32_v, arm_neon_vcvtns, 0),
2321 NEONMAP1(vcvtn_s64_v, arm_neon_vcvtns, 0),
2322 NEONMAP1(vcvtn_u32_v, arm_neon_vcvtnu, 0),
2323 NEONMAP1(vcvtn_u64_v, arm_neon_vcvtnu, 0),
2324 NEONMAP1(vcvtnq_s32_v, arm_neon_vcvtns, 0),
2325 NEONMAP1(vcvtnq_s64_v, arm_neon_vcvtns, 0),
2326 NEONMAP1(vcvtnq_u32_v, arm_neon_vcvtnu, 0),
2327 NEONMAP1(vcvtnq_u64_v, arm_neon_vcvtnu, 0),
2328 NEONMAP1(vcvtp_s32_v, arm_neon_vcvtps, 0),
2329 NEONMAP1(vcvtp_s64_v, arm_neon_vcvtps, 0),
2330 NEONMAP1(vcvtp_u32_v, arm_neon_vcvtpu, 0),
2331 NEONMAP1(vcvtp_u64_v, arm_neon_vcvtpu, 0),
2332 NEONMAP1(vcvtpq_s32_v, arm_neon_vcvtps, 0),
2333 NEONMAP1(vcvtpq_s64_v, arm_neon_vcvtps, 0),
2334 NEONMAP1(vcvtpq_u32_v, arm_neon_vcvtpu, 0),
2335 NEONMAP1(vcvtpq_u64_v, arm_neon_vcvtpu, 0),
2337 NEONMAP2(vcvtq_n_f32_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
2338 NEONMAP1(vcvtq_n_s32_v, arm_neon_vcvtfp2fxs, 0),
2339 NEONMAP1(vcvtq_n_s64_v, arm_neon_vcvtfp2fxs, 0),
2340 NEONMAP1(vcvtq_n_u32_v, arm_neon_vcvtfp2fxu, 0),
2341 NEONMAP1(vcvtq_n_u64_v, arm_neon_vcvtfp2fxu, 0),
2355 NEONMAP1(vld1_v, arm_neon_vld1, 0),
2357 NEONMAP1(vld1q_v, arm_neon_vld1, 0),
2358 NEONMAP1(vld2_lane_v, arm_neon_vld2lane, 0),
2359 NEONMAP1(vld2_v, arm_neon_vld2, 0),
2360 NEONMAP1(vld2q_lane_v, arm_neon_vld2lane, 0),
2361 NEONMAP1(vld2q_v, arm_neon_vld2, 0),
2362 NEONMAP1(vld3_lane_v, arm_neon_vld3lane, 0),
2363 NEONMAP1(vld3_v, arm_neon_vld3, 0),
2364 NEONMAP1(vld3q_lane_v, arm_neon_vld3lane, 0),
2365 NEONMAP1(vld3q_v, arm_neon_vld3, 0),
2366 NEONMAP1(vld4_lane_v, arm_neon_vld4lane, 0),
2367 NEONMAP1(vld4_v, arm_neon_vld4, 0),
2368 NEONMAP1(vld4q_lane_v, arm_neon_vld4lane, 0),
2369 NEONMAP1(vld4q_v, arm_neon_vld4, 0),
2395 NEONMAP2(vqdmlal_v, arm_neon_vqdmull, arm_neon_vqadds, 0),
2396 NEONMAP2(vqdmlsl_v, arm_neon_vqdmull, arm_neon_vqsubs, 0),
2412 NEONMAP1(vqshlu_n_v, arm_neon_vqshiftsu, 0),
2413 NEONMAP1(vqshluq_n_v, arm_neon_vqshiftsu, 0),
2417 NEONMAP2(vrecpe_v, arm_neon_vrecpe, arm_neon_vrecpe, 0),
2418 NEONMAP2(vrecpeq_v, arm_neon_vrecpe, arm_neon_vrecpe, 0),
2439 NEONMAP2(vrsqrte_v, arm_neon_vrsqrte, arm_neon_vrsqrte, 0),
2440 NEONMAP2(vrsqrteq_v, arm_neon_vrsqrte, arm_neon_vrsqrte, 0),
2444 NEONMAP1(vsha1su0q_v, arm_neon_sha1su0, 0),
2445 NEONMAP1(vsha1su1q_v, arm_neon_sha1su1, 0),
2446 NEONMAP1(vsha256h2q_v, arm_neon_sha256h2, 0),
2447 NEONMAP1(vsha256hq_v, arm_neon_sha256h, 0),
2448 NEONMAP1(vsha256su0q_v, arm_neon_sha256su0, 0),
2449 NEONMAP1(vsha256su1q_v, arm_neon_sha256su1, 0),
2458 NEONMAP1(vst1_v, arm_neon_vst1, 0),
2459 NEONMAP1(vst1q_v, arm_neon_vst1, 0),
2460 NEONMAP1(vst2_lane_v, arm_neon_vst2lane, 0),
2461 NEONMAP1(vst2_v, arm_neon_vst2, 0),
2462 NEONMAP1(vst2q_lane_v, arm_neon_vst2lane, 0),
2463 NEONMAP1(vst2q_v, arm_neon_vst2, 0),
2464 NEONMAP1(vst3_lane_v, arm_neon_vst3lane, 0),
2465 NEONMAP1(vst3_v, arm_neon_vst3, 0),
2466 NEONMAP1(vst3q_lane_v, arm_neon_vst3lane, 0),
2467 NEONMAP1(vst3q_v, arm_neon_vst3, 0),
2468 NEONMAP1(vst4_lane_v, arm_neon_vst4lane, 0),
2469 NEONMAP1(vst4_v, arm_neon_vst4, 0),
2470 NEONMAP1(vst4q_lane_v, arm_neon_vst4lane, 0),
2471 NEONMAP1(vst4q_v, arm_neon_vst4, 0),
2484 NEONMAP1(vabs_v, aarch64_neon_abs, 0),
2485 NEONMAP1(vabsq_v, aarch64_neon_abs, 0),
2487 NEONMAP1(vaesdq_v, aarch64_crypto_aesd, 0),
2488 NEONMAP1(vaeseq_v, aarch64_crypto_aese, 0),
2489 NEONMAP1(vaesimcq_v, aarch64_crypto_aesimc, 0),
2490 NEONMAP1(vaesmcq_v, aarch64_crypto_aesmc, 0),
2491 NEONMAP1(vcage_v, aarch64_neon_facge, 0),
2492 NEONMAP1(vcageq_v, aarch64_neon_facge, 0),
2493 NEONMAP1(vcagt_v, aarch64_neon_facgt, 0),
2494 NEONMAP1(vcagtq_v, aarch64_neon_facgt, 0),
2495 NEONMAP1(vcale_v, aarch64_neon_facge, 0),
2496 NEONMAP1(vcaleq_v, aarch64_neon_facge, 0),
2497 NEONMAP1(vcalt_v, aarch64_neon_facgt, 0),
2498 NEONMAP1(vcaltq_v, aarch64_neon_facgt, 0),
2505 NEONMAP1(vcvt_f16_f32, aarch64_neon_vcvtfp2hf, 0),
2506 NEONMAP1(vcvt_f32_f16, aarch64_neon_vcvthf2fp, 0),
2508 NEONMAP2(vcvt_n_f32_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
2509 NEONMAP2(vcvt_n_f64_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
2510 NEONMAP1(vcvt_n_s32_v, aarch64_neon_vcvtfp2fxs, 0),
2511 NEONMAP1(vcvt_n_s64_v, aarch64_neon_vcvtfp2fxs, 0),
2512 NEONMAP1(vcvt_n_u32_v, aarch64_neon_vcvtfp2fxu, 0),
2513 NEONMAP1(vcvt_n_u64_v, aarch64_neon_vcvtfp2fxu, 0),
2515 NEONMAP2(vcvtq_n_f32_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
2516 NEONMAP2(vcvtq_n_f64_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
2517 NEONMAP1(vcvtq_n_s32_v, aarch64_neon_vcvtfp2fxs, 0),
2518 NEONMAP1(vcvtq_n_s64_v, aarch64_neon_vcvtfp2fxs, 0),
2519 NEONMAP1(vcvtq_n_u32_v, aarch64_neon_vcvtfp2fxu, 0),
2520 NEONMAP1(vcvtq_n_u64_v, aarch64_neon_vcvtfp2fxu, 0),
2542 NEONMAP2(vqdmlal_v, aarch64_neon_sqdmull, aarch64_neon_sqadd, 0),
2543 NEONMAP2(vqdmlsl_v, aarch64_neon_sqdmull, aarch64_neon_sqsub, 0),
2559 NEONMAP1(vqshlu_n_v, aarch64_neon_sqshlu, 0),
2560 NEONMAP1(vqshluq_n_v, aarch64_neon_sqshlu, 0),
2564 NEONMAP2(vrecpe_v, aarch64_neon_frecpe, aarch64_neon_urecpe, 0),
2565 NEONMAP2(vrecpeq_v, aarch64_neon_frecpe, aarch64_neon_urecpe, 0),
2574 NEONMAP2(vrsqrte_v, aarch64_neon_frsqrte, aarch64_neon_ursqrte, 0),
2575 NEONMAP2(vrsqrteq_v, aarch64_neon_frsqrte, aarch64_neon_ursqrte, 0),
2579 NEONMAP1(vsha1su0q_v, aarch64_crypto_sha1su0, 0),
2580 NEONMAP1(vsha1su1q_v, aarch64_crypto_sha1su1, 0),
2581 NEONMAP1(vsha256h2q_v, aarch64_crypto_sha256h2, 0),
2582 NEONMAP1(vsha256hq_v, aarch64_crypto_sha256h, 0),
2583 NEONMAP1(vsha256su0q_v, aarch64_crypto_sha256su0, 0),
2584 NEONMAP1(vsha256su1q_v, aarch64_crypto_sha256su1, 0),
2647 NEONMAP1(vcvtxd_f32_f64, aarch64_sisd_fcvtxn, 0),
2668 NEONMAP1(vmull_p64, aarch64_neon_pmull64, 0),
2696 NEONMAP1(vqdmulls_s32, aarch64_neon_sqdmulls_scalar, 0),
2773 NEONMAP1(vsha1cq_u32, aarch64_crypto_sha1c, 0),
2774 NEONMAP1(vsha1h_u32, aarch64_crypto_sha1h, 0),
2775 NEONMAP1(vsha1mq_u32, aarch64_crypto_sha1m, 0),
2776 NEONMAP1(vsha1pq_u32, aarch64_crypto_sha1p, 0),
2803 static const NeonIntrinsicInfo *
2805 unsigned BuiltinID,
bool &MapProvenSorted) {
2808 if (!MapProvenSorted) {
2810 MapProvenSorted =
true;
2814 const NeonIntrinsicInfo *Builtin =
2815 std::lower_bound(IntrinsicMap.begin(), IntrinsicMap.end(), BuiltinID);
2817 if (Builtin != IntrinsicMap.end() && Builtin->BuiltinID == BuiltinID)
2838 Ty = llvm::VectorType::get(
2839 Ty, VectorSize ? VectorSize / Ty->getPrimitiveSizeInBits() : 1);
2846 int Elts = VectorSize ? VectorSize / ArgType->getPrimitiveSizeInBits() : 1;
2847 ArgType = llvm::VectorType::get(ArgType, Elts);
2851 Tys.push_back(ArgType);
2854 Tys.push_back(ArgType);
2863 const NeonIntrinsicInfo &SISDInfo,
2866 unsigned BuiltinID = SISDInfo.BuiltinID;
2867 unsigned int Int = SISDInfo.LLVMIntrinsic;
2868 unsigned Modifier = SISDInfo.TypeModifier;
2869 const char *s = SISDInfo.NameHint;
2871 switch (BuiltinID) {
2872 case NEON::BI__builtin_neon_vcled_s64:
2873 case NEON::BI__builtin_neon_vcled_u64:
2874 case NEON::BI__builtin_neon_vcles_f32:
2875 case NEON::BI__builtin_neon_vcled_f64:
2876 case NEON::BI__builtin_neon_vcltd_s64:
2877 case NEON::BI__builtin_neon_vcltd_u64:
2878 case NEON::BI__builtin_neon_vclts_f32:
2879 case NEON::BI__builtin_neon_vcltd_f64:
2880 case NEON::BI__builtin_neon_vcales_f32:
2881 case NEON::BI__builtin_neon_vcaled_f64:
2882 case NEON::BI__builtin_neon_vcalts_f32:
2883 case NEON::BI__builtin_neon_vcaltd_f64:
2887 std::swap(Ops[0], Ops[1]);
2891 assert(Int &&
"Generic code assumes a valid intrinsic");
2899 ConstantInt *C0 = ConstantInt::get(CGF.
SizeTy, 0);
2900 for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
2901 ai != ae; ++ai, ++j) {
2903 if (Ops[j]->getType()->getPrimitiveSizeInBits() ==
2904 ArgTy->getPrimitiveSizeInBits())
2907 assert(ArgTy->isVectorTy() && !Ops[j]->getType()->isVectorTy());
2911 CGF.
Builder.CreateTruncOrBitCast(Ops[j], ArgTy->getVectorElementType());
2913 CGF.
Builder.CreateInsertElement(UndefValue::get(ArgTy), Ops[j], C0);
2918 if (ResultType->getPrimitiveSizeInBits() <
2919 Result->getType()->getPrimitiveSizeInBits())
2920 return CGF.
Builder.CreateExtractElement(Result, C0);
2926 unsigned BuiltinID,
unsigned LLVMIntrinsic,
unsigned AltLLVMIntrinsic,
2930 llvm::APSInt NeonTypeConst;
2937 bool Usgn = Type.isUnsigned();
2938 bool Quad = Type.isQuad();
2945 auto getAlignmentValue32 = [&](Address addr) ->
Value* {
2946 return Builder.getInt32(addr.getAlignment().getQuantity());
2949 unsigned Int = LLVMIntrinsic;
2951 Int = AltLLVMIntrinsic;
2953 switch (BuiltinID) {
2955 case NEON::BI__builtin_neon_vabs_v:
2956 case NEON::BI__builtin_neon_vabsq_v:
2957 if (VTy->getElementType()->isFloatingPointTy())
2960 case NEON::BI__builtin_neon_vaddhn_v: {
2961 llvm::VectorType *SrcTy =
2962 llvm::VectorType::getExtendedElementVectorType(VTy);
2967 Ops[0] =
Builder.CreateAdd(Ops[0], Ops[1],
"vaddhn");
2970 Constant *ShiftAmt =
2971 ConstantInt::get(SrcTy, SrcTy->getScalarSizeInBits() / 2);
2972 Ops[0] =
Builder.CreateLShr(Ops[0], ShiftAmt,
"vaddhn");
2975 return Builder.CreateTrunc(Ops[0], VTy,
"vaddhn");
2977 case NEON::BI__builtin_neon_vcale_v:
2978 case NEON::BI__builtin_neon_vcaleq_v:
2979 case NEON::BI__builtin_neon_vcalt_v:
2980 case NEON::BI__builtin_neon_vcaltq_v:
2981 std::swap(Ops[0], Ops[1]);
2982 case NEON::BI__builtin_neon_vcage_v:
2983 case NEON::BI__builtin_neon_vcageq_v:
2984 case NEON::BI__builtin_neon_vcagt_v:
2985 case NEON::BI__builtin_neon_vcagtq_v: {
2988 VTy->getNumElements());
2993 case NEON::BI__builtin_neon_vclz_v:
2994 case NEON::BI__builtin_neon_vclzq_v:
2999 case NEON::BI__builtin_neon_vcvt_f32_v:
3000 case NEON::BI__builtin_neon_vcvtq_f32_v:
3003 return Usgn ?
Builder.CreateUIToFP(Ops[0], Ty,
"vcvt")
3004 :
Builder.CreateSIToFP(Ops[0], Ty,
"vcvt");
3005 case NEON::BI__builtin_neon_vcvt_n_f32_v:
3006 case NEON::BI__builtin_neon_vcvt_n_f64_v:
3007 case NEON::BI__builtin_neon_vcvtq_n_f32_v:
3008 case NEON::BI__builtin_neon_vcvtq_n_f64_v: {
3010 Int = Usgn ? LLVMIntrinsic : AltLLVMIntrinsic;
3014 case NEON::BI__builtin_neon_vcvt_n_s32_v:
3015 case NEON::BI__builtin_neon_vcvt_n_u32_v:
3016 case NEON::BI__builtin_neon_vcvt_n_s64_v:
3017 case NEON::BI__builtin_neon_vcvt_n_u64_v:
3018 case NEON::BI__builtin_neon_vcvtq_n_s32_v:
3019 case NEON::BI__builtin_neon_vcvtq_n_u32_v:
3020 case NEON::BI__builtin_neon_vcvtq_n_s64_v:
3021 case NEON::BI__builtin_neon_vcvtq_n_u64_v: {
3026 case NEON::BI__builtin_neon_vcvt_s32_v:
3027 case NEON::BI__builtin_neon_vcvt_u32_v:
3028 case NEON::BI__builtin_neon_vcvt_s64_v:
3029 case NEON::BI__builtin_neon_vcvt_u64_v:
3030 case NEON::BI__builtin_neon_vcvtq_s32_v:
3031 case NEON::BI__builtin_neon_vcvtq_u32_v:
3032 case NEON::BI__builtin_neon_vcvtq_s64_v:
3033 case NEON::BI__builtin_neon_vcvtq_u64_v: {
3035 return Usgn ?
Builder.CreateFPToUI(Ops[0], Ty,
"vcvt")
3036 :
Builder.CreateFPToSI(Ops[0], Ty,
"vcvt");
3038 case NEON::BI__builtin_neon_vcvta_s32_v:
3039 case NEON::BI__builtin_neon_vcvta_s64_v:
3040 case NEON::BI__builtin_neon_vcvta_u32_v:
3041 case NEON::BI__builtin_neon_vcvta_u64_v:
3042 case NEON::BI__builtin_neon_vcvtaq_s32_v:
3043 case NEON::BI__builtin_neon_vcvtaq_s64_v:
3044 case NEON::BI__builtin_neon_vcvtaq_u32_v:
3045 case NEON::BI__builtin_neon_vcvtaq_u64_v:
3046 case NEON::BI__builtin_neon_vcvtn_s32_v:
3047 case NEON::BI__builtin_neon_vcvtn_s64_v:
3048 case NEON::BI__builtin_neon_vcvtn_u32_v:
3049 case NEON::BI__builtin_neon_vcvtn_u64_v:
3050 case NEON::BI__builtin_neon_vcvtnq_s32_v:
3051 case NEON::BI__builtin_neon_vcvtnq_s64_v:
3052 case NEON::BI__builtin_neon_vcvtnq_u32_v:
3053 case NEON::BI__builtin_neon_vcvtnq_u64_v:
3054 case NEON::BI__builtin_neon_vcvtp_s32_v:
3055 case NEON::BI__builtin_neon_vcvtp_s64_v:
3056 case NEON::BI__builtin_neon_vcvtp_u32_v:
3057 case NEON::BI__builtin_neon_vcvtp_u64_v:
3058 case NEON::BI__builtin_neon_vcvtpq_s32_v:
3059 case NEON::BI__builtin_neon_vcvtpq_s64_v:
3060 case NEON::BI__builtin_neon_vcvtpq_u32_v:
3061 case NEON::BI__builtin_neon_vcvtpq_u64_v:
3062 case NEON::BI__builtin_neon_vcvtm_s32_v:
3063 case NEON::BI__builtin_neon_vcvtm_s64_v:
3064 case NEON::BI__builtin_neon_vcvtm_u32_v:
3065 case NEON::BI__builtin_neon_vcvtm_u64_v:
3066 case NEON::BI__builtin_neon_vcvtmq_s32_v:
3067 case NEON::BI__builtin_neon_vcvtmq_s64_v:
3068 case NEON::BI__builtin_neon_vcvtmq_u32_v:
3069 case NEON::BI__builtin_neon_vcvtmq_u64_v: {
3073 case NEON::BI__builtin_neon_vext_v:
3074 case NEON::BI__builtin_neon_vextq_v: {
3075 int CV = cast<ConstantInt>(Ops[2])->getSExtValue();
3077 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
3078 Indices.push_back(ConstantInt::get(
Int32Ty, i+CV));
3082 Value *SV = llvm::ConstantVector::get(Indices);
3083 return Builder.CreateShuffleVector(Ops[0], Ops[1], SV,
"vext");
3085 case NEON::BI__builtin_neon_vfma_v:
3086 case NEON::BI__builtin_neon_vfmaq_v: {
3093 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
3095 case NEON::BI__builtin_neon_vld1_v:
3096 case NEON::BI__builtin_neon_vld1q_v: {
3098 Ops.push_back(getAlignmentValue32(PtrOp0));
3101 case NEON::BI__builtin_neon_vld2_v:
3102 case NEON::BI__builtin_neon_vld2q_v:
3103 case NEON::BI__builtin_neon_vld3_v:
3104 case NEON::BI__builtin_neon_vld3q_v:
3105 case NEON::BI__builtin_neon_vld4_v:
3106 case NEON::BI__builtin_neon_vld4q_v: {
3109 Value *Align = getAlignmentValue32(PtrOp1);
3110 Ops[1] =
Builder.CreateCall(F, {Ops[1], Align}, NameHint);
3111 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
3115 case NEON::BI__builtin_neon_vld1_dup_v:
3116 case NEON::BI__builtin_neon_vld1q_dup_v: {
3117 Value *V = UndefValue::get(Ty);
3118 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
3121 llvm::Constant *CI = ConstantInt::get(
SizeTy, 0);
3122 Ops[0] =
Builder.CreateInsertElement(V, Ld, CI);
3125 case NEON::BI__builtin_neon_vld2_lane_v:
3126 case NEON::BI__builtin_neon_vld2q_lane_v:
3127 case NEON::BI__builtin_neon_vld3_lane_v:
3128 case NEON::BI__builtin_neon_vld3q_lane_v:
3129 case NEON::BI__builtin_neon_vld4_lane_v:
3130 case NEON::BI__builtin_neon_vld4q_lane_v: {
3133 for (
unsigned I = 2;
I < Ops.size() - 1; ++
I)
3135 Ops.push_back(getAlignmentValue32(PtrOp1));
3136 Ops[1] =
Builder.CreateCall(F, makeArrayRef(Ops).slice(1), NameHint);
3137 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
3141 case NEON::BI__builtin_neon_vmovl_v: {
3142 llvm::Type *DTy =llvm::VectorType::getTruncatedElementVectorType(VTy);
3145 return Builder.CreateZExt(Ops[0], Ty,
"vmovl");
3146 return Builder.CreateSExt(Ops[0], Ty,
"vmovl");
3148 case NEON::BI__builtin_neon_vmovn_v: {
3149 llvm::Type *QTy = llvm::VectorType::getExtendedElementVectorType(VTy);
3151 return Builder.CreateTrunc(Ops[0], Ty,
"vmovn");
3153 case NEON::BI__builtin_neon_vmull_v:
3159 Int = Usgn ? Intrinsic::arm_neon_vmullu : Intrinsic::arm_neon_vmulls;
3160 Int = Type.isPoly() ? (
unsigned)Intrinsic::arm_neon_vmullp : Int;
3162 case NEON::BI__builtin_neon_vpadal_v:
3163 case NEON::BI__builtin_neon_vpadalq_v: {
3165 unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
3169 llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
3173 case NEON::BI__builtin_neon_vpaddl_v:
3174 case NEON::BI__builtin_neon_vpaddlq_v: {
3176 unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
3179 llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
3183 case NEON::BI__builtin_neon_vqdmlal_v:
3184 case NEON::BI__builtin_neon_vqdmlsl_v: {
3191 case NEON::BI__builtin_neon_vqshl_n_v:
3192 case NEON::BI__builtin_neon_vqshlq_n_v:
3195 case NEON::BI__builtin_neon_vqshlu_n_v:
3196 case NEON::BI__builtin_neon_vqshluq_n_v:
3199 case NEON::BI__builtin_neon_vrecpe_v:
3200 case NEON::BI__builtin_neon_vrecpeq_v:
3201 case NEON::BI__builtin_neon_vrsqrte_v:
3202 case NEON::BI__builtin_neon_vrsqrteq_v:
3203 Int = Ty->isFPOrFPVectorTy() ? LLVMIntrinsic : AltLLVMIntrinsic;
3206 case NEON::BI__builtin_neon_vrshr_n_v:
3207 case NEON::BI__builtin_neon_vrshrq_n_v:
3210 case NEON::BI__builtin_neon_vshl_n_v:
3211 case NEON::BI__builtin_neon_vshlq_n_v:
3215 case NEON::BI__builtin_neon_vshll_n_v: {
3216 llvm::Type *SrcTy = llvm::VectorType::getTruncatedElementVectorType(VTy);
3219 Ops[0] =
Builder.CreateZExt(Ops[0], VTy);
3221 Ops[0] =
Builder.CreateSExt(Ops[0], VTy);
3223 return Builder.CreateShl(Ops[0], Ops[1],
"vshll_n");
3225 case NEON::BI__builtin_neon_vshrn_n_v: {
3226 llvm::Type *SrcTy = llvm::VectorType::getExtendedElementVectorType(VTy);
3230 Ops[0] =
Builder.CreateLShr(Ops[0], Ops[1]);
3232 Ops[0] =
Builder.CreateAShr(Ops[0], Ops[1]);
3233 return Builder.CreateTrunc(Ops[0], Ty,
"vshrn_n");
3235 case NEON::BI__builtin_neon_vshr_n_v:
3236 case NEON::BI__builtin_neon_vshrq_n_v:
3238 case NEON::BI__builtin_neon_vst1_v:
3239 case NEON::BI__builtin_neon_vst1q_v:
3240 case NEON::BI__builtin_neon_vst2_v:
3241 case NEON::BI__builtin_neon_vst2q_v:
3242 case NEON::BI__builtin_neon_vst3_v:
3243 case NEON::BI__builtin_neon_vst3q_v:
3244 case NEON::BI__builtin_neon_vst4_v:
3245 case NEON::BI__builtin_neon_vst4q_v:
3246 case NEON::BI__builtin_neon_vst2_lane_v:
3247 case NEON::BI__builtin_neon_vst2q_lane_v:
3248 case NEON::BI__builtin_neon_vst3_lane_v:
3249 case NEON::BI__builtin_neon_vst3q_lane_v:
3250 case NEON::BI__builtin_neon_vst4_lane_v:
3251 case NEON::BI__builtin_neon_vst4q_lane_v: {
3253 Ops.push_back(getAlignmentValue32(PtrOp0));
3256 case NEON::BI__builtin_neon_vsubhn_v: {
3257 llvm::VectorType *SrcTy =
3258 llvm::VectorType::getExtendedElementVectorType(VTy);
3263 Ops[0] =
Builder.CreateSub(Ops[0], Ops[1],
"vsubhn");
3266 Constant *ShiftAmt =
3267 ConstantInt::get(SrcTy, SrcTy->getScalarSizeInBits() / 2);
3268 Ops[0] =
Builder.CreateLShr(Ops[0], ShiftAmt,
"vsubhn");
3271 return Builder.CreateTrunc(Ops[0], VTy,
"vsubhn");
3273 case NEON::BI__builtin_neon_vtrn_v:
3274 case NEON::BI__builtin_neon_vtrnq_v: {
3278 Value *SV =
nullptr;
3280 for (
unsigned vi = 0; vi != 2; ++vi) {
3282 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
3283 Indices.push_back(
Builder.getInt32(i+vi));
3284 Indices.push_back(
Builder.getInt32(i+e+vi));
3286 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
3287 SV = llvm::ConstantVector::get(Indices);
3288 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vtrn");
3293 case NEON::BI__builtin_neon_vtst_v:
3294 case NEON::BI__builtin_neon_vtstq_v: {
3297 Ops[0] =
Builder.CreateAnd(Ops[0], Ops[1]);
3298 Ops[0] =
Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
3299 ConstantAggregateZero::get(Ty));
3300 return Builder.CreateSExt(Ops[0], Ty,
"vtst");
3302 case NEON::BI__builtin_neon_vuzp_v:
3303 case NEON::BI__builtin_neon_vuzpq_v: {
3307 Value *SV =
nullptr;
3309 for (
unsigned vi = 0; vi != 2; ++vi) {
3311 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
3312 Indices.push_back(ConstantInt::get(
Int32Ty, 2*i+vi));
3314 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
3315 SV = llvm::ConstantVector::get(Indices);
3316 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vuzp");
3321 case NEON::BI__builtin_neon_vzip_v:
3322 case NEON::BI__builtin_neon_vzipq_v: {
3326 Value *SV =
nullptr;
3328 for (
unsigned vi = 0; vi != 2; ++vi) {
3330 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
3331 Indices.push_back(ConstantInt::get(
Int32Ty, (i + vi*e) >> 1));
3332 Indices.push_back(ConstantInt::get(
Int32Ty, ((i + vi*e) >> 1)+e));
3334 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
3335 SV = llvm::ConstantVector::get(Indices);
3336 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vzip");
3343 assert(Int &&
"Expected valid intrinsic number");
3357 const CmpInst::Predicate Ip,
const Twine &Name) {
3364 if (BitCastInst *BI = dyn_cast<BitCastInst>(Op))
3365 OTy = BI->getOperand(0)->getType();
3368 if (OTy->getScalarType()->isFloatingPointTy()) {
3369 Op =
Builder.CreateFCmp(Fp, Op, Constant::getNullValue(OTy));
3371 Op =
Builder.CreateICmp(Ip, Op, Constant::getNullValue(OTy));
3373 return Builder.CreateSExt(Op, Ty, Name);
3382 TblOps.push_back(ExtOp);
3386 llvm::VectorType *TblTy = cast<llvm::VectorType>(Ops[0]->getType());
3387 for (
unsigned i = 0, e = TblTy->getNumElements(); i != e; ++i) {
3388 Indices.push_back(ConstantInt::get(CGF.
Int32Ty, 2*i));
3389 Indices.push_back(ConstantInt::get(CGF.
Int32Ty, 2*i+1));
3391 Value *SV = llvm::ConstantVector::get(Indices);
3393 int PairPos = 0, End = Ops.size() - 1;
3394 while (PairPos < End) {
3395 TblOps.push_back(CGF.
Builder.CreateShuffleVector(Ops[PairPos],
3396 Ops[PairPos+1], SV, Name));
3402 if (PairPos == End) {
3403 Value *ZeroTbl = ConstantAggregateZero::get(TblTy);
3404 TblOps.push_back(CGF.
Builder.CreateShuffleVector(Ops[PairPos],
3405 ZeroTbl, SV, Name));
3409 TblOps.push_back(IndexOp);
3415 Value *CodeGenFunction::GetValueForARMHint(
unsigned BuiltinID) {
3417 switch (BuiltinID) {
3420 case ARM::BI__builtin_arm_nop:
3423 case ARM::BI__builtin_arm_yield:
3424 case ARM::BI__yield:
3427 case ARM::BI__builtin_arm_wfe:
3431 case ARM::BI__builtin_arm_wfi:
3435 case ARM::BI__builtin_arm_sev:
3439 case ARM::BI__builtin_arm_sevl:
3446 llvm::ConstantInt::get(
Int32Ty, Value));
3457 assert((RegisterType->isIntegerTy(32) || RegisterType->isIntegerTy(64))
3458 &&
"Unsupported size for register.");
3465 StringRef SysReg = cast<StringLiteral>(SysRegStrExpr)->getString();
3467 llvm::Metadata *Ops[] = { llvm::MDString::get(Context, SysReg) };
3468 llvm::MDNode *RegName = llvm::MDNode::get(Context, Ops);
3469 llvm::Value *Metadata = llvm::MetadataAsValue::get(Context, RegName);
3473 bool MixedTypes = RegisterType->isIntegerTy(64) && ValueType->isIntegerTy(32);
3474 assert(!(RegisterType->isIntegerTy(32) && ValueType->isIntegerTy(64))
3475 &&
"Can't fit 64-bit value in 32-bit register");
3479 llvm::Value *Call = Builder.CreateCall(F, Metadata);
3483 return Builder.CreateTrunc(Call, ValueType);
3485 if (ValueType->isPointerTy())
3487 return Builder.CreateIntToPtr(Call, ValueType);
3496 ArgValue = Builder.CreateZExt(ArgValue, RegisterType);
3497 return Builder.CreateCall(F, { Metadata, ArgValue });
3500 if (ValueType->isPointerTy()) {
3502 ArgValue = Builder.CreatePtrToInt(ArgValue, RegisterType);
3503 return Builder.CreateCall(F, { Metadata, ArgValue });
3506 return Builder.CreateCall(F, { Metadata, ArgValue });
3512 switch (BuiltinID) {
3514 case NEON::BI__builtin_neon_vget_lane_i8:
3515 case NEON::BI__builtin_neon_vget_lane_i16:
3516 case NEON::BI__builtin_neon_vget_lane_i32:
3517 case NEON::BI__builtin_neon_vget_lane_i64:
3518 case NEON::BI__builtin_neon_vget_lane_f32:
3519 case NEON::BI__builtin_neon_vgetq_lane_i8:
3520 case NEON::BI__builtin_neon_vgetq_lane_i16:
3521 case NEON::BI__builtin_neon_vgetq_lane_i32:
3522 case NEON::BI__builtin_neon_vgetq_lane_i64:
3523 case NEON::BI__builtin_neon_vgetq_lane_f32:
3524 case NEON::BI__builtin_neon_vset_lane_i8:
3525 case NEON::BI__builtin_neon_vset_lane_i16:
3526 case NEON::BI__builtin_neon_vset_lane_i32:
3527 case NEON::BI__builtin_neon_vset_lane_i64:
3528 case NEON::BI__builtin_neon_vset_lane_f32:
3529 case NEON::BI__builtin_neon_vsetq_lane_i8:
3530 case NEON::BI__builtin_neon_vsetq_lane_i16:
3531 case NEON::BI__builtin_neon_vsetq_lane_i32:
3532 case NEON::BI__builtin_neon_vsetq_lane_i64:
3533 case NEON::BI__builtin_neon_vsetq_lane_f32:
3534 case NEON::BI__builtin_neon_vsha1h_u32:
3535 case NEON::BI__builtin_neon_vsha1cq_u32:
3536 case NEON::BI__builtin_neon_vsha1pq_u32:
3537 case NEON::BI__builtin_neon_vsha1mq_u32:
3538 case ARM::BI_MoveToCoprocessor:
3539 case ARM::BI_MoveToCoprocessor2:
3547 if (
auto Hint = GetValueForARMHint(BuiltinID))
3550 if (BuiltinID == ARM::BI__emit) {
3552 llvm::FunctionType *FTy =
3553 llvm::FunctionType::get(
VoidTy,
false);
3557 llvm_unreachable(
"Sema will ensure that the parameter is constant");
3559 uint64_t ZExtValue = Value.zextOrTrunc(IsThumb ? 16 : 32).getZExtValue();
3561 llvm::InlineAsm *Emit =
3562 IsThumb ? InlineAsm::get(FTy,
".inst.n 0x" + utohexstr(ZExtValue),
"",
3564 : InlineAsm::get(FTy,
".inst 0x" + utohexstr(ZExtValue),
"",
3567 return Builder.CreateCall(Emit);
3570 if (BuiltinID == ARM::BI__builtin_arm_dbg) {
3575 if (BuiltinID == ARM::BI__builtin_arm_prefetch) {
3581 Value *Locality = llvm::ConstantInt::get(
Int32Ty, 3);
3584 return Builder.CreateCall(F, {Address, RW, Locality, IsData});
3587 if (BuiltinID == ARM::BI__builtin_arm_rbit) {
3593 if (BuiltinID == ARM::BI__clear_cache) {
3594 assert(E->
getNumArgs() == 2 &&
"__clear_cache takes 2 arguments");
3597 for (
unsigned i = 0; i < 2; i++)
3600 llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
3601 StringRef Name = FD->getName();
3605 if (BuiltinID == ARM::BI__builtin_arm_ldrexd ||
3606 ((BuiltinID == ARM::BI__builtin_arm_ldrex ||
3607 BuiltinID == ARM::BI__builtin_arm_ldaex) &&
3609 BuiltinID == ARM::BI__ldrexd) {
3612 switch (BuiltinID) {
3613 default: llvm_unreachable(
"unexpected builtin");
3614 case ARM::BI__builtin_arm_ldaex:
3617 case ARM::BI__builtin_arm_ldrexd:
3618 case ARM::BI__builtin_arm_ldrex:
3619 case ARM::BI__ldrexd:
3628 Value *Val0 =
Builder.CreateExtractValue(Val, 1);
3629 Value *Val1 =
Builder.CreateExtractValue(Val, 0);
3633 Value *ShiftCst = llvm::ConstantInt::get(
Int64Ty, 32);
3634 Val =
Builder.CreateShl(Val0, ShiftCst,
"shl",
true );
3635 Val =
Builder.CreateOr(Val, Val1);
3639 if (BuiltinID == ARM::BI__builtin_arm_ldrex ||
3640 BuiltinID == ARM::BI__builtin_arm_ldaex) {
3649 Function *F =
CGM.
getIntrinsic(BuiltinID == ARM::BI__builtin_arm_ldaex
3650 ? Intrinsic::arm_ldaex
3651 : Intrinsic::arm_ldrex,
3652 LoadAddr->getType());
3653 Value *Val =
Builder.CreateCall(F, LoadAddr,
"ldrex");
3655 if (RealResTy->isPointerTy())
3656 return Builder.CreateIntToPtr(Val, RealResTy);
3658 Val =
Builder.CreateTruncOrBitCast(Val, IntResTy);
3663 if (BuiltinID == ARM::BI__builtin_arm_strexd ||
3664 ((BuiltinID == ARM::BI__builtin_arm_stlex ||
3665 BuiltinID == ARM::BI__builtin_arm_strex) &&
3667 Function *F =
CGM.
getIntrinsic(BuiltinID == ARM::BI__builtin_arm_stlex
3668 ? Intrinsic::arm_stlexd
3669 : Intrinsic::arm_strexd);
3679 Value *Arg0 =
Builder.CreateExtractValue(Val, 0);
3680 Value *Arg1 =
Builder.CreateExtractValue(Val, 1);
3682 return Builder.CreateCall(F, {Arg0, Arg1, StPtr},
"strexd");
3685 if (BuiltinID == ARM::BI__builtin_arm_strex ||
3686 BuiltinID == ARM::BI__builtin_arm_stlex) {
3695 if (StoreVal->getType()->isPointerTy())
3702 Function *F =
CGM.
getIntrinsic(BuiltinID == ARM::BI__builtin_arm_stlex
3703 ? Intrinsic::arm_stlex
3704 : Intrinsic::arm_strex,
3705 StoreAddr->getType());
3706 return Builder.CreateCall(F, {StoreVal, StoreAddr},
"strex");
3709 if (BuiltinID == ARM::BI__builtin_arm_clrex) {
3716 switch (BuiltinID) {
3717 case ARM::BI__builtin_arm_crc32b:
3718 CRCIntrinsicID = Intrinsic::arm_crc32b;
break;
3719 case ARM::BI__builtin_arm_crc32cb:
3720 CRCIntrinsicID = Intrinsic::arm_crc32cb;
break;
3721 case ARM::BI__builtin_arm_crc32h:
3722 CRCIntrinsicID = Intrinsic::arm_crc32h;
break;
3723 case ARM::BI__builtin_arm_crc32ch:
3724 CRCIntrinsicID = Intrinsic::arm_crc32ch;
break;
3725 case ARM::BI__builtin_arm_crc32w:
3726 case ARM::BI__builtin_arm_crc32d:
3727 CRCIntrinsicID = Intrinsic::arm_crc32w;
break;
3728 case ARM::BI__builtin_arm_crc32cw:
3729 case ARM::BI__builtin_arm_crc32cd:
3730 CRCIntrinsicID = Intrinsic::arm_crc32cw;
break;
3733 if (CRCIntrinsicID != Intrinsic::not_intrinsic) {
3739 if (BuiltinID == ARM::BI__builtin_arm_crc32d ||
3740 BuiltinID == ARM::BI__builtin_arm_crc32cd) {
3741 Value *C1 = llvm::ConstantInt::get(
Int64Ty, 32);
3743 Value *Arg1b =
Builder.CreateLShr(Arg1, C1);
3747 Value *Res =
Builder.CreateCall(F, {Arg0, Arg1a});
3748 return Builder.CreateCall(F, {Res, Arg1b});
3753 return Builder.CreateCall(F, {Arg0, Arg1});
3757 if (BuiltinID == ARM::BI__builtin_arm_rsr ||
3758 BuiltinID == ARM::BI__builtin_arm_rsr64 ||
3759 BuiltinID == ARM::BI__builtin_arm_rsrp ||
3760 BuiltinID == ARM::BI__builtin_arm_wsr ||
3761 BuiltinID == ARM::BI__builtin_arm_wsr64 ||
3762 BuiltinID == ARM::BI__builtin_arm_wsrp) {
3764 bool IsRead = BuiltinID == ARM::BI__builtin_arm_rsr ||
3765 BuiltinID == ARM::BI__builtin_arm_rsr64 ||
3766 BuiltinID == ARM::BI__builtin_arm_rsrp;
3768 bool IsPointerBuiltin = BuiltinID == ARM::BI__builtin_arm_rsrp ||
3769 BuiltinID == ARM::BI__builtin_arm_wsrp;
3771 bool Is64Bit = BuiltinID == ARM::BI__builtin_arm_rsr64 ||
3772 BuiltinID == ARM::BI__builtin_arm_wsr64;
3776 if (IsPointerBuiltin) {
3779 }
else if (Is64Bit) {
3780 ValueType = RegisterType =
Int64Ty;
3782 ValueType = RegisterType =
Int32Ty;
3790 unsigned ICEArguments = 0;
3795 auto getAlignmentValue32 = [&](Address addr) -> Value* {
3796 return Builder.getInt32(addr.getAlignment().getQuantity());
3804 for (
unsigned i = 0, e = NumArgs; i != e; i++) {
3806 switch (BuiltinID) {
3807 case NEON::BI__builtin_neon_vld1_v:
3808 case NEON::BI__builtin_neon_vld1q_v:
3809 case NEON::BI__builtin_neon_vld1q_lane_v:
3810 case NEON::BI__builtin_neon_vld1_lane_v:
3811 case NEON::BI__builtin_neon_vld1_dup_v:
3812 case NEON::BI__builtin_neon_vld1q_dup_v:
3813 case NEON::BI__builtin_neon_vst1_v:
3814 case NEON::BI__builtin_neon_vst1q_v:
3815 case NEON::BI__builtin_neon_vst1q_lane_v:
3816 case NEON::BI__builtin_neon_vst1_lane_v:
3817 case NEON::BI__builtin_neon_vst2_v:
3818 case NEON::BI__builtin_neon_vst2q_v:
3819 case NEON::BI__builtin_neon_vst2_lane_v:
3820 case NEON::BI__builtin_neon_vst2q_lane_v:
3821 case NEON::BI__builtin_neon_vst3_v:
3822 case NEON::BI__builtin_neon_vst3q_v:
3823 case NEON::BI__builtin_neon_vst3_lane_v:
3824 case NEON::BI__builtin_neon_vst3q_lane_v:
3825 case NEON::BI__builtin_neon_vst4_v:
3826 case NEON::BI__builtin_neon_vst4q_v:
3827 case NEON::BI__builtin_neon_vst4_lane_v:
3828 case NEON::BI__builtin_neon_vst4q_lane_v:
3837 switch (BuiltinID) {
3838 case NEON::BI__builtin_neon_vld2_v:
3839 case NEON::BI__builtin_neon_vld2q_v:
3840 case NEON::BI__builtin_neon_vld3_v:
3841 case NEON::BI__builtin_neon_vld3q_v:
3842 case NEON::BI__builtin_neon_vld4_v:
3843 case NEON::BI__builtin_neon_vld4q_v:
3844 case NEON::BI__builtin_neon_vld2_lane_v:
3845 case NEON::BI__builtin_neon_vld2q_lane_v:
3846 case NEON::BI__builtin_neon_vld3_lane_v:
3847 case NEON::BI__builtin_neon_vld3q_lane_v:
3848 case NEON::BI__builtin_neon_vld4_lane_v:
3849 case NEON::BI__builtin_neon_vld4q_lane_v:
3850 case NEON::BI__builtin_neon_vld2_dup_v:
3851 case NEON::BI__builtin_neon_vld3_dup_v:
3852 case NEON::BI__builtin_neon_vld4_dup_v:
3861 if ((ICEArguments & (1 << i)) == 0) {
3866 llvm::APSInt Result;
3868 assert(IsConst &&
"Constant arg isn't actually constant?"); (void)IsConst;
3873 switch (BuiltinID) {
3876 case NEON::BI__builtin_neon_vget_lane_i8:
3877 case NEON::BI__builtin_neon_vget_lane_i16:
3878 case NEON::BI__builtin_neon_vget_lane_i32:
3879 case NEON::BI__builtin_neon_vget_lane_i64:
3880 case NEON::BI__builtin_neon_vget_lane_f32:
3881 case NEON::BI__builtin_neon_vgetq_lane_i8:
3882 case NEON::BI__builtin_neon_vgetq_lane_i16:
3883 case NEON::BI__builtin_neon_vgetq_lane_i32:
3884 case NEON::BI__builtin_neon_vgetq_lane_i64:
3885 case NEON::BI__builtin_neon_vgetq_lane_f32:
3886 return Builder.CreateExtractElement(Ops[0], Ops[1],
"vget_lane");
3888 case NEON::BI__builtin_neon_vset_lane_i8:
3889 case NEON::BI__builtin_neon_vset_lane_i16:
3890 case NEON::BI__builtin_neon_vset_lane_i32:
3891 case NEON::BI__builtin_neon_vset_lane_i64:
3892 case NEON::BI__builtin_neon_vset_lane_f32:
3893 case NEON::BI__builtin_neon_vsetq_lane_i8:
3894 case NEON::BI__builtin_neon_vsetq_lane_i16:
3895 case NEON::BI__builtin_neon_vsetq_lane_i32:
3896 case NEON::BI__builtin_neon_vsetq_lane_i64:
3897 case NEON::BI__builtin_neon_vsetq_lane_f32:
3898 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2],
"vset_lane");
3900 case NEON::BI__builtin_neon_vsha1h_u32:
3903 case NEON::BI__builtin_neon_vsha1cq_u32:
3906 case NEON::BI__builtin_neon_vsha1pq_u32:
3909 case NEON::BI__builtin_neon_vsha1mq_u32:
3915 case ARM::BI_MoveToCoprocessor:
3916 case ARM::BI_MoveToCoprocessor2: {
3917 Function *F =
CGM.
getIntrinsic(BuiltinID == ARM::BI_MoveToCoprocessor ?
3918 Intrinsic::arm_mcr : Intrinsic::arm_mcr2);
3919 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0],
3920 Ops[3], Ops[4], Ops[5]});
3925 assert(HasExtraArg);
3926 llvm::APSInt Result;
3931 if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f ||
3932 BuiltinID == ARM::BI__builtin_arm_vcvtr_d) {
3935 if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f)
3941 bool usgn = Result.getZExtValue() == 1;
3942 unsigned Int = usgn ? Intrinsic::arm_vcvtru : Intrinsic::arm_vcvtr;
3946 return Builder.CreateCall(F, Ops,
"vcvtr");
3951 bool usgn = Type.isUnsigned();
3952 bool rightShift =
false;
3961 auto IntrinsicMap = makeArrayRef(ARMSIMDIntrinsicMap);
3963 IntrinsicMap, BuiltinID, NEONSIMDIntrinsicsProvenSorted);
3966 Builtin->BuiltinID, Builtin->LLVMIntrinsic, Builtin->AltLLVMIntrinsic,
3967 Builtin->NameHint, Builtin->TypeModifier, E, Ops, PtrOp0, PtrOp1);
3970 switch (BuiltinID) {
3971 default:
return nullptr;
3972 case NEON::BI__builtin_neon_vld1q_lane_v:
3975 if (VTy->getElementType()->isIntegerTy(64)) {
3978 uint32_t Lane = cast<ConstantInt>(Ops[2])->getZExtValue();
3979 Value *SV = llvm::ConstantVector::get(ConstantInt::get(
Int32Ty, 1-Lane));
3980 Ops[1] =
Builder.CreateShuffleVector(Ops[1], Ops[1], SV);
3982 Ty = llvm::VectorType::get(VTy->getElementType(), 1);
3985 Value *Align = getAlignmentValue32(PtrOp0);
3986 Value *Ld =
Builder.CreateCall(F, {Ops[0], Align});
3988 uint32_t Indices[] = {1 - Lane, Lane};
3990 return Builder.CreateShuffleVector(Ops[1], Ld, SV,
"vld1q_lane");
3993 case NEON::BI__builtin_neon_vld1_lane_v: {
3997 return Builder.CreateInsertElement(Ops[1], Ld, Ops[2],
"vld1_lane");
3999 case NEON::BI__builtin_neon_vld2_dup_v:
4000 case NEON::BI__builtin_neon_vld3_dup_v:
4001 case NEON::BI__builtin_neon_vld4_dup_v: {
4003 if (VTy->getElementType()->getPrimitiveSizeInBits() == 64) {
4004 switch (BuiltinID) {
4005 case NEON::BI__builtin_neon_vld2_dup_v:
4006 Int = Intrinsic::arm_neon_vld2;
4008 case NEON::BI__builtin_neon_vld3_dup_v:
4009 Int = Intrinsic::arm_neon_vld3;
4011 case NEON::BI__builtin_neon_vld4_dup_v:
4012 Int = Intrinsic::arm_neon_vld4;
4014 default: llvm_unreachable(
"unknown vld_dup intrinsic?");
4019 Ops[1] =
Builder.CreateCall(F, {Ops[1], Align},
"vld_dup");
4020 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
4024 switch (BuiltinID) {
4025 case NEON::BI__builtin_neon_vld2_dup_v:
4026 Int = Intrinsic::arm_neon_vld2lane;
4028 case NEON::BI__builtin_neon_vld3_dup_v:
4029 Int = Intrinsic::arm_neon_vld3lane;
4031 case NEON::BI__builtin_neon_vld4_dup_v:
4032 Int = Intrinsic::arm_neon_vld4lane;
4034 default: llvm_unreachable(
"unknown vld_dup intrinsic?");
4038 llvm::StructType *STy = cast<llvm::StructType>(F->getReturnType());
4041 Args.push_back(Ops[1]);
4042 Args.append(STy->getNumElements(), UndefValue::get(Ty));
4044 llvm::Constant *CI = ConstantInt::get(
Int32Ty, 0);
4046 Args.push_back(getAlignmentValue32(PtrOp1));
4048 Ops[1] =
Builder.CreateCall(F, Args,
"vld_dup");
4050 for (
unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
4051 Value *Val =
Builder.CreateExtractValue(Ops[1], i);
4055 Ops[1] =
Builder.CreateInsertValue(Ops[1], Elt, i);
4057 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
4061 case NEON::BI__builtin_neon_vqrshrn_n_v:
4063 usgn ? Intrinsic::arm_neon_vqrshiftnu : Intrinsic::arm_neon_vqrshiftns;
4066 case NEON::BI__builtin_neon_vqrshrun_n_v:
4068 Ops,
"vqrshrun_n", 1,
true);
4069 case NEON::BI__builtin_neon_vqshrn_n_v:
4070 Int = usgn ? Intrinsic::arm_neon_vqshiftnu : Intrinsic::arm_neon_vqshiftns;
4073 case NEON::BI__builtin_neon_vqshrun_n_v:
4075 Ops,
"vqshrun_n", 1,
true);
4076 case NEON::BI__builtin_neon_vrecpe_v:
4077 case NEON::BI__builtin_neon_vrecpeq_v:
4080 case NEON::BI__builtin_neon_vrshrn_n_v:
4082 Ops,
"vrshrn_n", 1,
true);
4083 case NEON::BI__builtin_neon_vrsra_n_v:
4084 case NEON::BI__builtin_neon_vrsraq_n_v:
4088 Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
4090 return Builder.CreateAdd(Ops[0], Ops[1],
"vrsra_n");
4091 case NEON::BI__builtin_neon_vsri_n_v:
4092 case NEON::BI__builtin_neon_vsriq_n_v:
4094 case NEON::BI__builtin_neon_vsli_n_v:
4095 case NEON::BI__builtin_neon_vsliq_n_v:
4099 case NEON::BI__builtin_neon_vsra_n_v:
4100 case NEON::BI__builtin_neon_vsraq_n_v:
4103 return Builder.CreateAdd(Ops[0], Ops[1]);
4104 case NEON::BI__builtin_neon_vst1q_lane_v:
4107 if (VTy->getElementType()->isIntegerTy(64)) {
4109 Value *SV = llvm::ConstantVector::get(cast<llvm::Constant>(Ops[2]));
4110 Ops[1] =
Builder.CreateShuffleVector(Ops[1], Ops[1], SV);
4111 Ops[2] = getAlignmentValue32(PtrOp0);
4117 case NEON::BI__builtin_neon_vst1_lane_v: {
4119 Ops[1] =
Builder.CreateExtractElement(Ops[1], Ops[2]);
4120 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
4124 case NEON::BI__builtin_neon_vtbl1_v:
4127 case NEON::BI__builtin_neon_vtbl2_v:
4130 case NEON::BI__builtin_neon_vtbl3_v:
4133 case NEON::BI__builtin_neon_vtbl4_v:
4136 case NEON::BI__builtin_neon_vtbx1_v:
4139 case NEON::BI__builtin_neon_vtbx2_v:
4142 case NEON::BI__builtin_neon_vtbx3_v:
4145 case NEON::BI__builtin_neon_vtbx4_v:
4154 unsigned int Int = 0;
4155 const char *s =
nullptr;
4157 switch (BuiltinID) {
4160 case NEON::BI__builtin_neon_vtbl1_v:
4161 case NEON::BI__builtin_neon_vqtbl1_v:
4162 case NEON::BI__builtin_neon_vqtbl1q_v:
4163 case NEON::BI__builtin_neon_vtbl2_v:
4164 case NEON::BI__builtin_neon_vqtbl2_v:
4165 case NEON::BI__builtin_neon_vqtbl2q_v:
4166 case NEON::BI__builtin_neon_vtbl3_v:
4167 case NEON::BI__builtin_neon_vqtbl3_v:
4168 case NEON::BI__builtin_neon_vqtbl3q_v:
4169 case NEON::BI__builtin_neon_vtbl4_v:
4170 case NEON::BI__builtin_neon_vqtbl4_v:
4171 case NEON::BI__builtin_neon_vqtbl4q_v:
4173 case NEON::BI__builtin_neon_vtbx1_v:
4174 case NEON::BI__builtin_neon_vqtbx1_v:
4175 case NEON::BI__builtin_neon_vqtbx1q_v:
4176 case NEON::BI__builtin_neon_vtbx2_v:
4177 case NEON::BI__builtin_neon_vqtbx2_v:
4178 case NEON::BI__builtin_neon_vqtbx2q_v:
4179 case NEON::BI__builtin_neon_vtbx3_v:
4180 case NEON::BI__builtin_neon_vqtbx3_v:
4181 case NEON::BI__builtin_neon_vqtbx3q_v:
4182 case NEON::BI__builtin_neon_vtbx4_v:
4183 case NEON::BI__builtin_neon_vqtbx4_v:
4184 case NEON::BI__builtin_neon_vqtbx4q_v:
4191 llvm::APSInt Result;
4206 switch (BuiltinID) {
4207 case NEON::BI__builtin_neon_vtbl1_v: {
4209 Ops[1], Ty, Intrinsic::aarch64_neon_tbl1,
4212 case NEON::BI__builtin_neon_vtbl2_v: {
4214 Ops[2], Ty, Intrinsic::aarch64_neon_tbl1,
4217 case NEON::BI__builtin_neon_vtbl3_v: {
4219 Ops[3], Ty, Intrinsic::aarch64_neon_tbl2,
4222 case NEON::BI__builtin_neon_vtbl4_v: {
4224 Ops[4], Ty, Intrinsic::aarch64_neon_tbl2,
4227 case NEON::BI__builtin_neon_vtbx1_v: {
4230 Ty, Intrinsic::aarch64_neon_tbl1,
"vtbl1");
4232 llvm::Constant *EightV = ConstantInt::get(Ty, 8);
4233 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[2], EightV);
4234 CmpRes = Builder.CreateSExt(CmpRes, Ty);
4236 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]);
4237 Value *EltsFromTbl = Builder.CreateAnd(Builder.CreateNot(CmpRes), TblRes);
4238 return Builder.CreateOr(EltsFromInput, EltsFromTbl,
"vtbx");
4240 case NEON::BI__builtin_neon_vtbx2_v: {
4242 Ops[3], Ty, Intrinsic::aarch64_neon_tbx1,
4245 case NEON::BI__builtin_neon_vtbx3_v: {
4248 Ty, Intrinsic::aarch64_neon_tbl2,
"vtbl2");
4250 llvm::Constant *TwentyFourV = ConstantInt::get(Ty, 24);
4251 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[4],
4253 CmpRes = Builder.CreateSExt(CmpRes, Ty);
4255 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]);
4256 Value *EltsFromTbl = Builder.CreateAnd(Builder.CreateNot(CmpRes), TblRes);
4257 return Builder.CreateOr(EltsFromInput, EltsFromTbl,
"vtbx");
4259 case NEON::BI__builtin_neon_vtbx4_v: {
4261 Ops[5], Ty, Intrinsic::aarch64_neon_tbx2,
4264 case NEON::BI__builtin_neon_vqtbl1_v:
4265 case NEON::BI__builtin_neon_vqtbl1q_v:
4266 Int = Intrinsic::aarch64_neon_tbl1; s =
"vtbl1";
break;
4267 case NEON::BI__builtin_neon_vqtbl2_v:
4268 case NEON::BI__builtin_neon_vqtbl2q_v: {
4269 Int = Intrinsic::aarch64_neon_tbl2; s =
"vtbl2";
break;
4270 case NEON::BI__builtin_neon_vqtbl3_v:
4271 case NEON::BI__builtin_neon_vqtbl3q_v:
4272 Int = Intrinsic::aarch64_neon_tbl3; s =
"vtbl3";
break;
4273 case NEON::BI__builtin_neon_vqtbl4_v:
4274 case NEON::BI__builtin_neon_vqtbl4q_v:
4275 Int = Intrinsic::aarch64_neon_tbl4; s =
"vtbl4";
break;
4276 case NEON::BI__builtin_neon_vqtbx1_v:
4277 case NEON::BI__builtin_neon_vqtbx1q_v:
4278 Int = Intrinsic::aarch64_neon_tbx1; s =
"vtbx1";
break;
4279 case NEON::BI__builtin_neon_vqtbx2_v:
4280 case NEON::BI__builtin_neon_vqtbx2q_v:
4281 Int = Intrinsic::aarch64_neon_tbx2; s =
"vtbx2";
break;
4282 case NEON::BI__builtin_neon_vqtbx3_v:
4283 case NEON::BI__builtin_neon_vqtbx3q_v:
4284 Int = Intrinsic::aarch64_neon_tbx3; s =
"vtbx3";
break;
4285 case NEON::BI__builtin_neon_vqtbx4_v:
4286 case NEON::BI__builtin_neon_vqtbx4q_v:
4287 Int = Intrinsic::aarch64_neon_tbx4; s =
"vtbx4";
break;
4301 Value *V = UndefValue::get(VTy);
4302 llvm::Constant *CI = ConstantInt::get(
SizeTy, 0);
4303 Op =
Builder.CreateInsertElement(V, Op, CI);
4309 unsigned HintID =
static_cast<unsigned>(-1);
4310 switch (BuiltinID) {
4312 case AArch64::BI__builtin_arm_nop:
4315 case AArch64::BI__builtin_arm_yield:
4318 case AArch64::BI__builtin_arm_wfe:
4321 case AArch64::BI__builtin_arm_wfi:
4324 case AArch64::BI__builtin_arm_sev:
4327 case AArch64::BI__builtin_arm_sevl:
4332 if (HintID != static_cast<unsigned>(-1)) {
4334 return Builder.CreateCall(F, llvm::ConstantInt::get(
Int32Ty, HintID));
4337 if (BuiltinID == AArch64::BI__builtin_arm_prefetch) {
4344 Value *Locality =
nullptr;
4345 if (cast<llvm::ConstantInt>(RetentionPolicy)->isZero()) {
4347 Locality = llvm::ConstantInt::get(
Int32Ty,
4348 -cast<llvm::ConstantInt>(CacheLevel)->
getValue() + 3);
4351 Locality = llvm::ConstantInt::get(
Int32Ty, 0);
4357 return Builder.CreateCall(F, {Address, RW, Locality, IsData});
4360 if (BuiltinID == AArch64::BI__builtin_arm_rbit) {
4362 "rbit of unusual size!");
4365 CGM.
getIntrinsic(Intrinsic::aarch64_rbit, Arg->getType()), Arg,
"rbit");
4367 if (BuiltinID == AArch64::BI__builtin_arm_rbit64) {
4369 "rbit of unusual size!");
4372 CGM.
getIntrinsic(Intrinsic::aarch64_rbit, Arg->getType()), Arg,
"rbit");
4375 if (BuiltinID == AArch64::BI__clear_cache) {
4376 assert(E->
getNumArgs() == 2 &&
"__clear_cache takes 2 arguments");
4379 for (
unsigned i = 0; i < 2; i++)
4382 llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
4383 StringRef Name = FD->getName();
4387 if ((BuiltinID == AArch64::BI__builtin_arm_ldrex ||
4388 BuiltinID == AArch64::BI__builtin_arm_ldaex) &&
4390 Function *F =
CGM.
getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_ldaex
4391 ? Intrinsic::aarch64_ldaxp
4392 : Intrinsic::aarch64_ldxp);
4398 Value *Val0 =
Builder.CreateExtractValue(Val, 1);
4399 Value *Val1 =
Builder.CreateExtractValue(Val, 0);
4401 Val0 =
Builder.CreateZExt(Val0, Int128Ty);
4402 Val1 =
Builder.CreateZExt(Val1, Int128Ty);
4404 Value *ShiftCst = llvm::ConstantInt::get(Int128Ty, 64);
4405 Val =
Builder.CreateShl(Val0, ShiftCst,
"shl",
true );
4406 Val =
Builder.CreateOr(Val, Val1);
4408 }
else if (BuiltinID == AArch64::BI__builtin_arm_ldrex ||
4409 BuiltinID == AArch64::BI__builtin_arm_ldaex) {
4418 Function *F =
CGM.
getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_ldaex
4419 ? Intrinsic::aarch64_ldaxr
4420 : Intrinsic::aarch64_ldxr,
4421 LoadAddr->getType());
4422 Value *Val =
Builder.CreateCall(F, LoadAddr,
"ldxr");
4424 if (RealResTy->isPointerTy())
4425 return Builder.CreateIntToPtr(Val, RealResTy);
4427 Val =
Builder.CreateTruncOrBitCast(Val, IntResTy);
4431 if ((BuiltinID == AArch64::BI__builtin_arm_strex ||
4432 BuiltinID == AArch64::BI__builtin_arm_stlex) &&
4434 Function *F =
CGM.
getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_stlex
4435 ? Intrinsic::aarch64_stlxp
4436 : Intrinsic::aarch64_stxp);
4445 Value *Arg0 =
Builder.CreateExtractValue(Val, 0);
4446 Value *Arg1 =
Builder.CreateExtractValue(Val, 1);
4449 return Builder.CreateCall(F, {Arg0, Arg1, StPtr},
"stxp");
4452 if (BuiltinID == AArch64::BI__builtin_arm_strex ||
4453 BuiltinID == AArch64::BI__builtin_arm_stlex) {
4462 if (StoreVal->getType()->isPointerTy())
4469 Function *F =
CGM.
getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_stlex
4470 ? Intrinsic::aarch64_stlxr
4471 : Intrinsic::aarch64_stxr,
4472 StoreAddr->getType());
4473 return Builder.CreateCall(F, {StoreVal, StoreAddr},
"stxr");
4476 if (BuiltinID == AArch64::BI__builtin_arm_clrex) {
4481 if (BuiltinID == AArch64::BI__builtin_thread_pointer) {
4488 switch (BuiltinID) {
4489 case AArch64::BI__builtin_arm_crc32b:
4490 CRCIntrinsicID = Intrinsic::aarch64_crc32b;
break;
4491 case AArch64::BI__builtin_arm_crc32cb:
4492 CRCIntrinsicID = Intrinsic::aarch64_crc32cb;
break;
4493 case AArch64::BI__builtin_arm_crc32h:
4494 CRCIntrinsicID = Intrinsic::aarch64_crc32h;
break;
4495 case AArch64::BI__builtin_arm_crc32ch:
4496 CRCIntrinsicID = Intrinsic::aarch64_crc32ch;
break;
4497 case AArch64::BI__builtin_arm_crc32w:
4498 CRCIntrinsicID = Intrinsic::aarch64_crc32w;
break;
4499 case AArch64::BI__builtin_arm_crc32cw:
4500 CRCIntrinsicID = Intrinsic::aarch64_crc32cw;
break;
4501 case AArch64::BI__builtin_arm_crc32d:
4502 CRCIntrinsicID = Intrinsic::aarch64_crc32x;
break;
4503 case AArch64::BI__builtin_arm_crc32cd:
4504 CRCIntrinsicID = Intrinsic::aarch64_crc32cx;
break;
4507 if (CRCIntrinsicID != Intrinsic::not_intrinsic) {
4512 llvm::Type *DataTy = F->getFunctionType()->getParamType(1);
4513 Arg1 =
Builder.CreateZExtOrBitCast(Arg1, DataTy);
4515 return Builder.CreateCall(F, {Arg0, Arg1});
4518 if (BuiltinID == AArch64::BI__builtin_arm_rsr ||
4519 BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
4520 BuiltinID == AArch64::BI__builtin_arm_rsrp ||
4521 BuiltinID == AArch64::BI__builtin_arm_wsr ||
4522 BuiltinID == AArch64::BI__builtin_arm_wsr64 ||
4523 BuiltinID == AArch64::BI__builtin_arm_wsrp) {
4525 bool IsRead = BuiltinID == AArch64::BI__builtin_arm_rsr ||
4526 BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
4527 BuiltinID == AArch64::BI__builtin_arm_rsrp;
4529 bool IsPointerBuiltin = BuiltinID == AArch64::BI__builtin_arm_rsrp ||
4530 BuiltinID == AArch64::BI__builtin_arm_wsrp;
4532 bool Is64Bit = BuiltinID != AArch64::BI__builtin_arm_rsr &&
4533 BuiltinID != AArch64::BI__builtin_arm_wsr;
4537 if (IsPointerBuiltin) {
4539 }
else if (Is64Bit) {
4550 unsigned ICEArguments = 0;
4556 for (
unsigned i = 0, e = E->
getNumArgs() - 1; i != e; i++) {
4557 if ((ICEArguments & (1 << i)) == 0) {
4562 llvm::APSInt Result;
4564 assert(IsConst &&
"Constant arg isn't actually constant?");
4570 auto SISDMap = makeArrayRef(AArch64SISDIntrinsicMap);
4572 SISDMap, BuiltinID, AArch64SISDIntrinsicsProvenSorted);
4577 assert(Result &&
"SISD intrinsic should have been handled");
4581 llvm::APSInt Result;
4589 bool quad = Type.
isQuad();
4592 switch (BuiltinID) {
4594 case NEON::BI__builtin_neon_vldrq_p128: {
4599 case NEON::BI__builtin_neon_vstrq_p128: {
4604 case NEON::BI__builtin_neon_vcvts_u32_f32:
4605 case NEON::BI__builtin_neon_vcvtd_u64_f64:
4608 case NEON::BI__builtin_neon_vcvts_s32_f32:
4609 case NEON::BI__builtin_neon_vcvtd_s64_f64: {
4611 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64;
4616 return Builder.CreateFPToUI(Ops[0], InTy);
4617 return Builder.CreateFPToSI(Ops[0], InTy);
4619 case NEON::BI__builtin_neon_vcvts_f32_u32:
4620 case NEON::BI__builtin_neon_vcvtd_f64_u64:
4623 case NEON::BI__builtin_neon_vcvts_f32_s32:
4624 case NEON::BI__builtin_neon_vcvtd_f64_s64: {
4626 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64;
4631 return Builder.CreateUIToFP(Ops[0], FTy);
4632 return Builder.CreateSIToFP(Ops[0], FTy);
4634 case NEON::BI__builtin_neon_vpaddd_s64: {
4641 Value *Op0 =
Builder.CreateExtractElement(Vec, Idx0,
"lane0");
4642 Value *Op1 =
Builder.CreateExtractElement(Vec, Idx1,
"lane1");
4644 return Builder.CreateAdd(Op0, Op1,
"vpaddd");
4646 case NEON::BI__builtin_neon_vpaddd_f64: {
4648 llvm::VectorType::get(
DoubleTy, 2);
4654 Value *Op0 =
Builder.CreateExtractElement(Vec, Idx0,
"lane0");
4655 Value *Op1 =
Builder.CreateExtractElement(Vec, Idx1,
"lane1");
4657 return Builder.CreateFAdd(Op0, Op1,
"vpaddd");
4659 case NEON::BI__builtin_neon_vpadds_f32: {
4661 llvm::VectorType::get(
FloatTy, 2);
4667 Value *Op0 =
Builder.CreateExtractElement(Vec, Idx0,
"lane0");
4668 Value *Op1 =
Builder.CreateExtractElement(Vec, Idx1,
"lane1");
4670 return Builder.CreateFAdd(Op0, Op1,
"vpaddd");
4672 case NEON::BI__builtin_neon_vceqzd_s64:
4673 case NEON::BI__builtin_neon_vceqzd_f64:
4674 case NEON::BI__builtin_neon_vceqzs_f32:
4678 ICmpInst::FCMP_OEQ, ICmpInst::ICMP_EQ,
"vceqz");
4679 case NEON::BI__builtin_neon_vcgezd_s64:
4680 case NEON::BI__builtin_neon_vcgezd_f64:
4681 case NEON::BI__builtin_neon_vcgezs_f32:
4685 ICmpInst::FCMP_OGE, ICmpInst::ICMP_SGE,
"vcgez");
4686 case NEON::BI__builtin_neon_vclezd_s64:
4687 case NEON::BI__builtin_neon_vclezd_f64:
4688 case NEON::BI__builtin_neon_vclezs_f32:
4692 ICmpInst::FCMP_OLE, ICmpInst::ICMP_SLE,
"vclez");
4693 case NEON::BI__builtin_neon_vcgtzd_s64:
4694 case NEON::BI__builtin_neon_vcgtzd_f64:
4695 case NEON::BI__builtin_neon_vcgtzs_f32:
4699 ICmpInst::FCMP_OGT, ICmpInst::ICMP_SGT,
"vcgtz");
4700 case NEON::BI__builtin_neon_vcltzd_s64:
4701 case NEON::BI__builtin_neon_vcltzd_f64:
4702 case NEON::BI__builtin_neon_vcltzs_f32:
4706 ICmpInst::FCMP_OLT, ICmpInst::ICMP_SLT,
"vcltz");
4708 case NEON::BI__builtin_neon_vceqzd_u64: {
4712 Builder.CreateICmpEQ(Ops[0], llvm::Constant::getNullValue(
Int64Ty));
4715 case NEON::BI__builtin_neon_vceqd_f64:
4716 case NEON::BI__builtin_neon_vcled_f64:
4717 case NEON::BI__builtin_neon_vcltd_f64:
4718 case NEON::BI__builtin_neon_vcged_f64:
4719 case NEON::BI__builtin_neon_vcgtd_f64: {
4720 llvm::CmpInst::Predicate
P;
4721 switch (BuiltinID) {
4722 default: llvm_unreachable(
"missing builtin ID in switch!");
4723 case NEON::BI__builtin_neon_vceqd_f64: P = llvm::FCmpInst::FCMP_OEQ;
break;
4724 case NEON::BI__builtin_neon_vcled_f64: P = llvm::FCmpInst::FCMP_OLE;
break;
4725 case NEON::BI__builtin_neon_vcltd_f64: P = llvm::FCmpInst::FCMP_OLT;
break;
4726 case NEON::BI__builtin_neon_vcged_f64: P = llvm::FCmpInst::FCMP_OGE;
break;
4727 case NEON::BI__builtin_neon_vcgtd_f64: P = llvm::FCmpInst::FCMP_OGT;
break;
4732 Ops[0] =
Builder.CreateFCmp(P, Ops[0], Ops[1]);
4735 case NEON::BI__builtin_neon_vceqs_f32:
4736 case NEON::BI__builtin_neon_vcles_f32:
4737 case NEON::BI__builtin_neon_vclts_f32:
4738 case NEON::BI__builtin_neon_vcges_f32:
4739 case NEON::BI__builtin_neon_vcgts_f32: {
4740 llvm::CmpInst::Predicate
P;
4741 switch (BuiltinID) {
4742 default: llvm_unreachable(
"missing builtin ID in switch!");
4743 case NEON::BI__builtin_neon_vceqs_f32: P = llvm::FCmpInst::FCMP_OEQ;
break;
4744 case NEON::BI__builtin_neon_vcles_f32: P = llvm::FCmpInst::FCMP_OLE;
break;
4745 case NEON::BI__builtin_neon_vclts_f32: P = llvm::FCmpInst::FCMP_OLT;
break;
4746 case NEON::BI__builtin_neon_vcges_f32: P = llvm::FCmpInst::FCMP_OGE;
break;
4747 case NEON::BI__builtin_neon_vcgts_f32: P = llvm::FCmpInst::FCMP_OGT;
break;
4752 Ops[0] =
Builder.CreateFCmp(P, Ops[0], Ops[1]);
4755 case NEON::BI__builtin_neon_vceqd_s64:
4756 case NEON::BI__builtin_neon_vceqd_u64:
4757 case NEON::BI__builtin_neon_vcgtd_s64:
4758 case NEON::BI__builtin_neon_vcgtd_u64:
4759 case NEON::BI__builtin_neon_vcltd_s64:
4760 case NEON::BI__builtin_neon_vcltd_u64:
4761 case NEON::BI__builtin_neon_vcged_u64:
4762 case NEON::BI__builtin_neon_vcged_s64:
4763 case NEON::BI__builtin_neon_vcled_u64:
4764 case NEON::BI__builtin_neon_vcled_s64: {
4765 llvm::CmpInst::Predicate
P;
4766 switch (BuiltinID) {
4767 default: llvm_unreachable(
"missing builtin ID in switch!");
4768 case NEON::BI__builtin_neon_vceqd_s64:
4769 case NEON::BI__builtin_neon_vceqd_u64:P = llvm::ICmpInst::ICMP_EQ;
break;
4770 case NEON::BI__builtin_neon_vcgtd_s64:P = llvm::ICmpInst::ICMP_SGT;
break;
4771 case NEON::BI__builtin_neon_vcgtd_u64:P = llvm::ICmpInst::ICMP_UGT;
break;
4772 case NEON::BI__builtin_neon_vcltd_s64:P = llvm::ICmpInst::ICMP_SLT;
break;
4773 case NEON::BI__builtin_neon_vcltd_u64:P = llvm::ICmpInst::ICMP_ULT;
break;
4774 case NEON::BI__builtin_neon_vcged_u64:P = llvm::ICmpInst::ICMP_UGE;
break;
4775 case NEON::BI__builtin_neon_vcged_s64:P = llvm::ICmpInst::ICMP_SGE;
break;
4776 case NEON::BI__builtin_neon_vcled_u64:P = llvm::ICmpInst::ICMP_ULE;
break;
4777 case NEON::BI__builtin_neon_vcled_s64:P = llvm::ICmpInst::ICMP_SLE;
break;
4782 Ops[0] =
Builder.CreateICmp(P, Ops[0], Ops[1]);
4785 case NEON::BI__builtin_neon_vtstd_s64:
4786 case NEON::BI__builtin_neon_vtstd_u64: {
4790 Ops[0] =
Builder.CreateAnd(Ops[0], Ops[1]);
4791 Ops[0] =
Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
4792 llvm::Constant::getNullValue(
Int64Ty));
4795 case NEON::BI__builtin_neon_vset_lane_i8:
4796 case NEON::BI__builtin_neon_vset_lane_i16:
4797 case NEON::BI__builtin_neon_vset_lane_i32:
4798 case NEON::BI__builtin_neon_vset_lane_i64:
4799 case NEON::BI__builtin_neon_vset_lane_f32:
4800 case NEON::BI__builtin_neon_vsetq_lane_i8:
4801 case NEON::BI__builtin_neon_vsetq_lane_i16:
4802 case NEON::BI__builtin_neon_vsetq_lane_i32:
4803 case NEON::BI__builtin_neon_vsetq_lane_i64:
4804 case NEON::BI__builtin_neon_vsetq_lane_f32:
4806 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2],
"vset_lane");
4807 case NEON::BI__builtin_neon_vset_lane_f64:
4810 llvm::VectorType::get(
DoubleTy, 1));
4812 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2],
"vset_lane");
4813 case NEON::BI__builtin_neon_vsetq_lane_f64:
4816 llvm::VectorType::get(
DoubleTy, 2));
4818 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2],
"vset_lane");
4820 case NEON::BI__builtin_neon_vget_lane_i8:
4821 case NEON::BI__builtin_neon_vdupb_lane_i8:
4825 case NEON::BI__builtin_neon_vgetq_lane_i8:
4826 case NEON::BI__builtin_neon_vdupb_laneq_i8:
4830 case NEON::BI__builtin_neon_vget_lane_i16:
4831 case NEON::BI__builtin_neon_vduph_lane_i16:
4835 case NEON::BI__builtin_neon_vgetq_lane_i16:
4836 case NEON::BI__builtin_neon_vduph_laneq_i16:
4840 case NEON::BI__builtin_neon_vget_lane_i32:
4841 case NEON::BI__builtin_neon_vdups_lane_i32:
4845 case NEON::BI__builtin_neon_vdups_lane_f32:
4847 llvm::VectorType::get(
FloatTy, 2));
4850 case NEON::BI__builtin_neon_vgetq_lane_i32:
4851 case NEON::BI__builtin_neon_vdups_laneq_i32:
4855 case NEON::BI__builtin_neon_vget_lane_i64:
4856 case NEON::BI__builtin_neon_vdupd_lane_i64:
4860 case NEON::BI__builtin_neon_vdupd_lane_f64:
4862 llvm::VectorType::get(
DoubleTy, 1));
4865 case NEON::BI__builtin_neon_vgetq_lane_i64:
4866 case NEON::BI__builtin_neon_vdupd_laneq_i64:
4870 case NEON::BI__builtin_neon_vget_lane_f32:
4872 llvm::VectorType::get(
FloatTy, 2));
4875 case NEON::BI__builtin_neon_vget_lane_f64:
4877 llvm::VectorType::get(
DoubleTy, 1));
4880 case NEON::BI__builtin_neon_vgetq_lane_f32:
4881 case NEON::BI__builtin_neon_vdups_laneq_f32:
4883 llvm::VectorType::get(
FloatTy, 4));
4886 case NEON::BI__builtin_neon_vgetq_lane_f64:
4887 case NEON::BI__builtin_neon_vdupd_laneq_f64:
4889 llvm::VectorType::get(
DoubleTy, 2));
4892 case NEON::BI__builtin_neon_vaddd_s64:
4893 case NEON::BI__builtin_neon_vaddd_u64:
4895 case NEON::BI__builtin_neon_vsubd_s64:
4896 case NEON::BI__builtin_neon_vsubd_u64:
4898 case NEON::BI__builtin_neon_vqdmlalh_s16:
4899 case NEON::BI__builtin_neon_vqdmlslh_s16: {
4905 ProductOps,
"vqdmlXl");
4906 Constant *CI = ConstantInt::get(
SizeTy, 0);
4907 Ops[1] =
Builder.CreateExtractElement(Ops[1], CI,
"lane0");
4909 unsigned AccumInt = BuiltinID == NEON::BI__builtin_neon_vqdmlalh_s16
4910 ? Intrinsic::aarch64_neon_sqadd
4911 : Intrinsic::aarch64_neon_sqsub;
4914 case NEON::BI__builtin_neon_vqshlud_n_s64: {
4920 case NEON::BI__builtin_neon_vqshld_n_u64:
4921 case NEON::BI__builtin_neon_vqshld_n_s64: {
4922 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vqshld_n_u64
4923 ? Intrinsic::aarch64_neon_uqshl
4924 : Intrinsic::aarch64_neon_sqshl;
4929 case NEON::BI__builtin_neon_vrshrd_n_u64:
4930 case NEON::BI__builtin_neon_vrshrd_n_s64: {
4931 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vrshrd_n_u64
4932 ? Intrinsic::aarch64_neon_urshl
4933 : Intrinsic::aarch64_neon_srshl;
4935 int SV = cast<ConstantInt>(Ops[1])->getSExtValue();
4936 Ops[1] = ConstantInt::get(
Int64Ty, -SV);
4939 case NEON::BI__builtin_neon_vrsrad_n_u64:
4940 case NEON::BI__builtin_neon_vrsrad_n_s64: {
4941 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vrsrad_n_u64
4942 ? Intrinsic::aarch64_neon_urshl
4943 : Intrinsic::aarch64_neon_srshl;
4950 case NEON::BI__builtin_neon_vshld_n_s64:
4951 case NEON::BI__builtin_neon_vshld_n_u64: {
4954 Ops[0], ConstantInt::get(
Int64Ty, Amt->getZExtValue()),
"shld_n");
4956 case NEON::BI__builtin_neon_vshrd_n_s64: {
4959 Ops[0], ConstantInt::get(
Int64Ty, std::min(static_cast<uint64_t>(63),
4960 Amt->getZExtValue())),
4963 case NEON::BI__builtin_neon_vshrd_n_u64: {
4965 uint64_t ShiftAmt = Amt->getZExtValue();
4968 return ConstantInt::get(
Int64Ty, 0);
4969 return Builder.CreateLShr(Ops[0], ConstantInt::get(
Int64Ty, ShiftAmt),
4972 case NEON::BI__builtin_neon_vsrad_n_s64: {
4975 Ops[1], ConstantInt::get(
Int64Ty, std::min(static_cast<uint64_t>(63),
4976 Amt->getZExtValue())),
4978 return Builder.CreateAdd(Ops[0], Ops[1]);
4980 case NEON::BI__builtin_neon_vsrad_n_u64: {
4982 uint64_t ShiftAmt = Amt->getZExtValue();
4987 Ops[1] =
Builder.CreateLShr(Ops[1], ConstantInt::get(
Int64Ty, ShiftAmt),
4989 return Builder.CreateAdd(Ops[0], Ops[1]);
4991 case NEON::BI__builtin_neon_vqdmlalh_lane_s16:
4992 case NEON::BI__builtin_neon_vqdmlalh_laneq_s16:
4993 case NEON::BI__builtin_neon_vqdmlslh_lane_s16:
4994 case NEON::BI__builtin_neon_vqdmlslh_laneq_s16: {
5002 ProductOps,
"vqdmlXl");
5003 Constant *CI = ConstantInt::get(
SizeTy, 0);
5004 Ops[1] =
Builder.CreateExtractElement(Ops[1], CI,
"lane0");
5007 unsigned AccInt = (BuiltinID == NEON::BI__builtin_neon_vqdmlalh_lane_s16 ||
5008 BuiltinID == NEON::BI__builtin_neon_vqdmlalh_laneq_s16)
5009 ? Intrinsic::aarch64_neon_sqadd
5010 : Intrinsic::aarch64_neon_sqsub;
5013 case NEON::BI__builtin_neon_vqdmlals_s32:
5014 case NEON::BI__builtin_neon_vqdmlsls_s32: {
5016 ProductOps.push_back(Ops[1]);
5020 ProductOps,
"vqdmlXl");
5022 unsigned AccumInt = BuiltinID == NEON::BI__builtin_neon_vqdmlals_s32
5023 ? Intrinsic::aarch64_neon_sqadd
5024 : Intrinsic::aarch64_neon_sqsub;
5027 case NEON::BI__builtin_neon_vqdmlals_lane_s32:
5028 case NEON::BI__builtin_neon_vqdmlals_laneq_s32:
5029 case NEON::BI__builtin_neon_vqdmlsls_lane_s32:
5030 case NEON::BI__builtin_neon_vqdmlsls_laneq_s32: {
5034 ProductOps.push_back(Ops[1]);
5035 ProductOps.push_back(Ops[2]);
5038 ProductOps,
"vqdmlXl");
5041 unsigned AccInt = (BuiltinID == NEON::BI__builtin_neon_vqdmlals_lane_s32 ||
5042 BuiltinID == NEON::BI__builtin_neon_vqdmlals_laneq_s32)
5043 ? Intrinsic::aarch64_neon_sqadd
5044 : Intrinsic::aarch64_neon_sqsub;
5057 AArch64SIMDIntrinsicsProvenSorted);
5061 Builtin->BuiltinID, Builtin->LLVMIntrinsic, Builtin->AltLLVMIntrinsic,
5062 Builtin->NameHint, Builtin->TypeModifier, E, Ops,
5069 switch (BuiltinID) {
5070 default:
return nullptr;
5071 case NEON::BI__builtin_neon_vbsl_v:
5072 case NEON::BI__builtin_neon_vbslq_v: {
5073 llvm::Type *BitTy = llvm::VectorType::getInteger(VTy);
5078 Ops[1] =
Builder.CreateAnd(Ops[0], Ops[1],
"vbsl");
5079 Ops[2] =
Builder.CreateAnd(
Builder.CreateNot(Ops[0]), Ops[2],
"vbsl");
5080 Ops[0] =
Builder.CreateOr(Ops[1], Ops[2],
"vbsl");
5083 case NEON::BI__builtin_neon_vfma_lane_v:
5084 case NEON::BI__builtin_neon_vfmaq_lane_v: {
5087 Value *Addend = Ops[0];
5088 Value *Multiplicand = Ops[1];
5089 Value *LaneSource = Ops[2];
5090 Ops[0] = Multiplicand;
5091 Ops[1] = LaneSource;
5095 llvm::Type *SourceTy = BuiltinID == NEON::BI__builtin_neon_vfmaq_lane_v ?
5096 llvm::VectorType::get(VTy->getElementType(), VTy->getNumElements() / 2) :
5098 llvm::Constant *cst = cast<Constant>(Ops[3]);
5099 Value *SV = llvm::ConstantVector::getSplat(VTy->getNumElements(), cst);
5101 Ops[1] =
Builder.CreateShuffleVector(Ops[1], Ops[1], SV,
"lane");
5107 case NEON::BI__builtin_neon_vfma_laneq_v: {
5108 llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
5110 if (VTy && VTy->getElementType() ==
DoubleTy) {
5116 Ops[2] =
Builder.CreateExtractElement(Ops[2], Ops[3],
"extract");
5118 Value *Result =
Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
5125 llvm::Type *STy = llvm::VectorType::get(VTy->getElementType(),
5126 VTy->getNumElements() * 2);
5128 Value* SV = llvm::ConstantVector::getSplat(VTy->getNumElements(),
5129 cast<ConstantInt>(Ops[3]));
5130 Ops[2] =
Builder.CreateShuffleVector(Ops[2], Ops[2], SV,
"lane");
5132 return Builder.CreateCall(F, {Ops[2], Ops[1], Ops[0]});
5134 case NEON::BI__builtin_neon_vfmaq_laneq_v: {
5141 return Builder.CreateCall(F, {Ops[2], Ops[1], Ops[0]});
5143 case NEON::BI__builtin_neon_vfmas_lane_f32:
5144 case NEON::BI__builtin_neon_vfmas_laneq_f32:
5145 case NEON::BI__builtin_neon_vfmad_lane_f64:
5146 case NEON::BI__builtin_neon_vfmad_laneq_f64: {
5150 Ops[2] =
Builder.CreateExtractElement(Ops[2], Ops[3],
"extract");
5151 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
5153 case NEON::BI__builtin_neon_vfms_v:
5154 case NEON::BI__builtin_neon_vfmsq_v: {
5160 Value *Subtrahend = Ops[0];
5161 Value *Multiplicand = Ops[2];
5162 Ops[0] = Multiplicand;
5163 Ops[2] = Subtrahend;
5165 Ops[1] =
Builder.CreateFNeg(Ops[1]);
5169 case NEON::BI__builtin_neon_vmull_v:
5171 Int = usgn ? Intrinsic::aarch64_neon_umull : Intrinsic::aarch64_neon_smull;
5172 if (Type.
isPoly()) Int = Intrinsic::aarch64_neon_pmull;
5174 case NEON::BI__builtin_neon_vmax_v:
5175 case NEON::BI__builtin_neon_vmaxq_v:
5177 Int = usgn ? Intrinsic::aarch64_neon_umax : Intrinsic::aarch64_neon_smax;
5178 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmax;
5180 case NEON::BI__builtin_neon_vmin_v:
5181 case NEON::BI__builtin_neon_vminq_v:
5183 Int = usgn ? Intrinsic::aarch64_neon_umin : Intrinsic::aarch64_neon_smin;
5184 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmin;
5186 case NEON::BI__builtin_neon_vabd_v:
5187 case NEON::BI__builtin_neon_vabdq_v:
5189 Int = usgn ? Intrinsic::aarch64_neon_uabd : Intrinsic::aarch64_neon_sabd;
5190 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fabd;
5192 case NEON::BI__builtin_neon_vpadal_v:
5193 case NEON::BI__builtin_neon_vpadalq_v: {
5194 unsigned ArgElts = VTy->getNumElements();
5195 llvm::IntegerType *EltTy = cast<IntegerType>(VTy->getElementType());
5196 unsigned BitWidth = EltTy->getBitWidth();
5198 llvm::IntegerType::get(
getLLVMContext(), BitWidth/2), 2*ArgElts);
5200 Int = usgn ? Intrinsic::aarch64_neon_uaddlp : Intrinsic::aarch64_neon_saddlp;
5202 TmpOps.push_back(Ops[1]);
5206 return Builder.CreateAdd(tmp, addend);
5208 case NEON::BI__builtin_neon_vpmin_v:
5209 case NEON::BI__builtin_neon_vpminq_v:
5211 Int = usgn ? Intrinsic::aarch64_neon_uminp : Intrinsic::aarch64_neon_sminp;
5212 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fminp;
5214 case NEON::BI__builtin_neon_vpmax_v:
5215 case NEON::BI__builtin_neon_vpmaxq_v:
5217 Int = usgn ? Intrinsic::aarch64_neon_umaxp : Intrinsic::aarch64_neon_smaxp;
5218 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmaxp;
5220 case NEON::BI__builtin_neon_vminnm_v:
5221 case NEON::BI__builtin_neon_vminnmq_v:
5222 Int = Intrinsic::aarch64_neon_fminnm;
5224 case NEON::BI__builtin_neon_vmaxnm_v:
5225 case NEON::BI__builtin_neon_vmaxnmq_v:
5226 Int = Intrinsic::aarch64_neon_fmaxnm;
5228 case NEON::BI__builtin_neon_vrecpss_f32: {
5233 case NEON::BI__builtin_neon_vrecpsd_f64: {
5238 case NEON::BI__builtin_neon_vqshrun_n_v:
5239 Int = Intrinsic::aarch64_neon_sqshrun;
5241 case NEON::BI__builtin_neon_vqrshrun_n_v:
5242 Int = Intrinsic::aarch64_neon_sqrshrun;
5244 case NEON::BI__builtin_neon_vqshrn_n_v:
5245 Int = usgn ? Intrinsic::aarch64_neon_uqshrn : Intrinsic::aarch64_neon_sqshrn;
5247 case NEON::BI__builtin_neon_vrshrn_n_v:
5248 Int = Intrinsic::aarch64_neon_rshrn;
5250 case NEON::BI__builtin_neon_vqrshrn_n_v:
5251 Int = usgn ? Intrinsic::aarch64_neon_uqrshrn : Intrinsic::aarch64_neon_sqrshrn;
5253 case NEON::BI__builtin_neon_vrnda_v:
5254 case NEON::BI__builtin_neon_vrndaq_v: {
5258 case NEON::BI__builtin_neon_vrndi_v:
5259 case NEON::BI__builtin_neon_vrndiq_v: {
5263 case NEON::BI__builtin_neon_vrndm_v:
5264 case NEON::BI__builtin_neon_vrndmq_v: {
5268 case NEON::BI__builtin_neon_vrndn_v:
5269 case NEON::BI__builtin_neon_vrndnq_v: {
5270 Int = Intrinsic::aarch64_neon_frintn;
5273 case NEON::BI__builtin_neon_vrndp_v:
5274 case NEON::BI__builtin_neon_vrndpq_v: {
5278 case NEON::BI__builtin_neon_vrndx_v:
5279 case NEON::BI__builtin_neon_vrndxq_v: {
5283 case NEON::BI__builtin_neon_vrnd_v:
5284 case NEON::BI__builtin_neon_vrndq_v: {
5288 case NEON::BI__builtin_neon_vceqz_v:
5289 case NEON::BI__builtin_neon_vceqzq_v:
5291 ICmpInst::ICMP_EQ,
"vceqz");
5292 case NEON::BI__builtin_neon_vcgez_v:
5293 case NEON::BI__builtin_neon_vcgezq_v:
5295 ICmpInst::ICMP_SGE,
"vcgez");
5296 case NEON::BI__builtin_neon_vclez_v:
5297 case NEON::BI__builtin_neon_vclezq_v:
5299 ICmpInst::ICMP_SLE,
"vclez");
5300 case NEON::BI__builtin_neon_vcgtz_v:
5301 case NEON::BI__builtin_neon_vcgtzq_v:
5303 ICmpInst::ICMP_SGT,
"vcgtz");
5304 case NEON::BI__builtin_neon_vcltz_v:
5305 case NEON::BI__builtin_neon_vcltzq_v:
5307 ICmpInst::ICMP_SLT,
"vcltz");
5308 case NEON::BI__builtin_neon_vcvt_f64_v:
5309 case NEON::BI__builtin_neon_vcvtq_f64_v:
5312 return usgn ?
Builder.CreateUIToFP(Ops[0], Ty,
"vcvt")
5313 :
Builder.CreateSIToFP(Ops[0], Ty,
"vcvt");
5314 case NEON::BI__builtin_neon_vcvt_f64_f32: {
5316 "unexpected vcvt_f64_f32 builtin");
5320 return Builder.CreateFPExt(Ops[0], Ty,
"vcvt");
5322 case NEON::BI__builtin_neon_vcvt_f32_f64: {
5324 "unexpected vcvt_f32_f64 builtin");
5328 return Builder.CreateFPTrunc(Ops[0], Ty,
"vcvt");
5330 case NEON::BI__builtin_neon_vcvt_s32_v:
5331 case NEON::BI__builtin_neon_vcvt_u32_v:
5332 case NEON::BI__builtin_neon_vcvt_s64_v:
5333 case NEON::BI__builtin_neon_vcvt_u64_v:
5334 case NEON::BI__builtin_neon_vcvtq_s32_v:
5335 case NEON::BI__builtin_neon_vcvtq_u32_v:
5336 case NEON::BI__builtin_neon_vcvtq_s64_v:
5337 case NEON::BI__builtin_neon_vcvtq_u64_v: {
5340 return Builder.CreateFPToUI(Ops[0], Ty);
5341 return Builder.CreateFPToSI(Ops[0], Ty);
5343 case NEON::BI__builtin_neon_vcvta_s32_v:
5344 case NEON::BI__builtin_neon_vcvtaq_s32_v:
5345 case NEON::BI__builtin_neon_vcvta_u32_v:
5346 case NEON::BI__builtin_neon_vcvtaq_u32_v:
5347 case NEON::BI__builtin_neon_vcvta_s64_v:
5348 case NEON::BI__builtin_neon_vcvtaq_s64_v:
5349 case NEON::BI__builtin_neon_vcvta_u64_v:
5350 case NEON::BI__builtin_neon_vcvtaq_u64_v: {
5351 Int = usgn ? Intrinsic::aarch64_neon_fcvtau : Intrinsic::aarch64_neon_fcvtas;
5355 case NEON::BI__builtin_neon_vcvtm_s32_v:
5356 case NEON::BI__builtin_neon_vcvtmq_s32_v:
5357 case NEON::BI__builtin_neon_vcvtm_u32_v:
5358 case NEON::BI__builtin_neon_vcvtmq_u32_v:
5359 case NEON::BI__builtin_neon_vcvtm_s64_v:
5360 case NEON::BI__builtin_neon_vcvtmq_s64_v:
5361 case NEON::BI__builtin_neon_vcvtm_u64_v:
5362 case NEON::BI__builtin_neon_vcvtmq_u64_v: {
5363 Int = usgn ? Intrinsic::aarch64_neon_fcvtmu : Intrinsic::aarch64_neon_fcvtms;
5367 case NEON::BI__builtin_neon_vcvtn_s32_v:
5368 case NEON::BI__builtin_neon_vcvtnq_s32_v:
5369 case NEON::BI__builtin_neon_vcvtn_u32_v:
5370 case NEON::BI__builtin_neon_vcvtnq_u32_v:
5371 case NEON::BI__builtin_neon_vcvtn_s64_v:
5372 case NEON::BI__builtin_neon_vcvtnq_s64_v:
5373 case NEON::BI__builtin_neon_vcvtn_u64_v:
5374 case NEON::BI__builtin_neon_vcvtnq_u64_v: {
5375 Int = usgn ? Intrinsic::aarch64_neon_fcvtnu : Intrinsic::aarch64_neon_fcvtns;
5379 case NEON::BI__builtin_neon_vcvtp_s32_v:
5380 case NEON::BI__builtin_neon_vcvtpq_s32_v:
5381 case NEON::BI__builtin_neon_vcvtp_u32_v:
5382 case NEON::BI__builtin_neon_vcvtpq_u32_v:
5383 case NEON::BI__builtin_neon_vcvtp_s64_v:
5384 case NEON::BI__builtin_neon_vcvtpq_s64_v:
5385 case NEON::BI__builtin_neon_vcvtp_u64_v:
5386 case NEON::BI__builtin_neon_vcvtpq_u64_v: {
5387 Int = usgn ? Intrinsic::aarch64_neon_fcvtpu : Intrinsic::aarch64_neon_fcvtps;
5391 case NEON::BI__builtin_neon_vmulx_v:
5392 case NEON::BI__builtin_neon_vmulxq_v: {
5393 Int = Intrinsic::aarch64_neon_fmulx;
5396 case NEON::BI__builtin_neon_vmul_lane_v:
5397 case NEON::BI__builtin_neon_vmul_laneq_v: {
5400 if (BuiltinID == NEON::BI__builtin_neon_vmul_laneq_v)
5406 Ops[1] =
Builder.CreateExtractElement(Ops[1], Ops[2],
"extract");
5407 Value *Result =
Builder.CreateFMul(Ops[0], Ops[1]);
5410 case NEON::BI__builtin_neon_vnegd_s64:
5412 case NEON::BI__builtin_neon_vpmaxnm_v:
5413 case NEON::BI__builtin_neon_vpmaxnmq_v: {
5414 Int = Intrinsic::aarch64_neon_fmaxnmp;
5417 case NEON::BI__builtin_neon_vpminnm_v:
5418 case NEON::BI__builtin_neon_vpminnmq_v: {
5419 Int = Intrinsic::aarch64_neon_fminnmp;
5422 case NEON::BI__builtin_neon_vsqrt_v:
5423 case NEON::BI__builtin_neon_vsqrtq_v: {
5428 case NEON::BI__builtin_neon_vrbit_v:
5429 case NEON::BI__builtin_neon_vrbitq_v: {
5430 Int = Intrinsic::aarch64_neon_rbit;
5433 case NEON::BI__builtin_neon_vaddv_u8:
5437 case NEON::BI__builtin_neon_vaddv_s8: {
5438 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
5440 VTy = llvm::VectorType::get(
Int8Ty, 8);
5446 case NEON::BI__builtin_neon_vaddv_u16:
5449 case NEON::BI__builtin_neon_vaddv_s16: {
5450 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
5452 VTy = llvm::VectorType::get(
Int16Ty, 4);
5458 case NEON::BI__builtin_neon_vaddvq_u8:
5461 case NEON::BI__builtin_neon_vaddvq_s8: {
5462 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
5464 VTy = llvm::VectorType::get(
Int8Ty, 16);
5470 case NEON::BI__builtin_neon_vaddvq_u16:
5473 case NEON::BI__builtin_neon_vaddvq_s16: {
5474 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
5476 VTy = llvm::VectorType::get(
Int16Ty, 8);
5482 case NEON::BI__builtin_neon_vmaxv_u8: {
5483 Int = Intrinsic::aarch64_neon_umaxv;
5485 VTy = llvm::VectorType::get(
Int8Ty, 8);
5491 case NEON::BI__builtin_neon_vmaxv_u16: {
5492 Int = Intrinsic::aarch64_neon_umaxv;
5494 VTy = llvm::VectorType::get(
Int16Ty, 4);
5500 case NEON::BI__builtin_neon_vmaxvq_u8: {
5501 Int = Intrinsic::aarch64_neon_umaxv;
5503 VTy = llvm::VectorType::get(
Int8Ty, 16);
5509 case NEON::BI__builtin_neon_vmaxvq_u16: {
5510 Int = Intrinsic::aarch64_neon_umaxv;
5512 VTy = llvm::VectorType::get(
Int16Ty, 8);
5518 case NEON::BI__builtin_neon_vmaxv_s8: {
5519 Int = Intrinsic::aarch64_neon_smaxv;
5521 VTy = llvm::VectorType::get(
Int8Ty, 8);
5527 case NEON::BI__builtin_neon_vmaxv_s16: {
5528 Int = Intrinsic::aarch64_neon_smaxv;
5530 VTy = llvm::VectorType::get(
Int16Ty, 4);
5536 case NEON::BI__builtin_neon_vmaxvq_s8: {
5537 Int = Intrinsic::aarch64_neon_smaxv;
5539 VTy = llvm::VectorType::get(
Int8Ty, 16);
5545 case NEON::BI__builtin_neon_vmaxvq_s16: {
5546 Int = Intrinsic::aarch64_neon_smaxv;
5548 VTy = llvm::VectorType::get(
Int16Ty, 8);
5554 case NEON::BI__builtin_neon_vminv_u8: {
5555 Int = Intrinsic::aarch64_neon_uminv;
5557 VTy = llvm::VectorType::get(
Int8Ty, 8);
5563 case NEON::BI__builtin_neon_vminv_u16: {
5564 Int = Intrinsic::aarch64_neon_uminv;
5566 VTy = llvm::VectorType::get(
Int16Ty, 4);
5572 case NEON::BI__builtin_neon_vminvq_u8: {
5573 Int = Intrinsic::aarch64_neon_uminv;
5575 VTy = llvm::VectorType::get(
Int8Ty, 16);
5581 case NEON::BI__builtin_neon_vminvq_u16: {
5582 Int = Intrinsic::aarch64_neon_uminv;
5584 VTy = llvm::VectorType::get(
Int16Ty, 8);
5590 case NEON::BI__builtin_neon_vminv_s8: {
5591 Int = Intrinsic::aarch64_neon_sminv;
5593 VTy = llvm::VectorType::get(
Int8Ty, 8);
5599 case NEON::BI__builtin_neon_vminv_s16: {
5600 Int = Intrinsic::aarch64_neon_sminv;
5602 VTy = llvm::VectorType::get(
Int16Ty, 4);
5608 case NEON::BI__builtin_neon_vminvq_s8: {
5609 Int = Intrinsic::aarch64_neon_sminv;
5611 VTy = llvm::VectorType::get(
Int8Ty, 16);
5617 case NEON::BI__builtin_neon_vminvq_s16: {
5618 Int = Intrinsic::aarch64_neon_sminv;
5620 VTy = llvm::VectorType::get(
Int16Ty, 8);
5626 case NEON::BI__builtin_neon_vmul_n_f64: {
5629 return Builder.CreateFMul(Ops[0], RHS);
5631 case NEON::BI__builtin_neon_vaddlv_u8: {
5632 Int = Intrinsic::aarch64_neon_uaddlv;
5634 VTy = llvm::VectorType::get(
Int8Ty, 8);
5640 case NEON::BI__builtin_neon_vaddlv_u16: {
5641 Int = Intrinsic::aarch64_neon_uaddlv;
5643 VTy = llvm::VectorType::get(
Int16Ty, 4);
5648 case NEON::BI__builtin_neon_vaddlvq_u8: {
5649 Int = Intrinsic::aarch64_neon_uaddlv;
5651 VTy = llvm::VectorType::get(
Int8Ty, 16);
5657 case NEON::BI__builtin_neon_vaddlvq_u16: {
5658 Int = Intrinsic::aarch64_neon_uaddlv;
5660 VTy = llvm::VectorType::get(
Int16Ty, 8);
5665 case NEON::BI__builtin_neon_vaddlv_s8: {
5666 Int = Intrinsic::aarch64_neon_saddlv;
5668 VTy = llvm::VectorType::get(
Int8Ty, 8);
5674 case NEON::BI__builtin_neon_vaddlv_s16: {
5675 Int = Intrinsic::aarch64_neon_saddlv;
5677 VTy = llvm::VectorType::get(
Int16Ty, 4);
5682 case NEON::BI__builtin_neon_vaddlvq_s8: {
5683 Int = Intrinsic::aarch64_neon_saddlv;
5685 VTy = llvm::VectorType::get(
Int8Ty, 16);
5691 case NEON::BI__builtin_neon_vaddlvq_s16: {
5692 Int = Intrinsic::aarch64_neon_saddlv;
5694 VTy = llvm::VectorType::get(
Int16Ty, 8);
5699 case NEON::BI__builtin_neon_vsri_n_v:
5700 case NEON::BI__builtin_neon_vsriq_n_v: {
5701 Int = Intrinsic::aarch64_neon_vsri;
5705 case NEON::BI__builtin_neon_vsli_n_v:
5706 case NEON::BI__builtin_neon_vsliq_n_v: {
5707 Int = Intrinsic::aarch64_neon_vsli;
5711 case NEON::BI__builtin_neon_vsra_n_v:
5712 case NEON::BI__builtin_neon_vsraq_n_v:
5715 return Builder.CreateAdd(Ops[0], Ops[1]);
5716 case NEON::BI__builtin_neon_vrsra_n_v:
5717 case NEON::BI__builtin_neon_vrsraq_n_v: {
5718 Int = usgn ? Intrinsic::aarch64_neon_urshl : Intrinsic::aarch64_neon_srshl;
5720 TmpOps.push_back(Ops[1]);
5721 TmpOps.push_back(Ops[2]);
5725 return Builder.CreateAdd(Ops[0], tmp);
5729 case NEON::BI__builtin_neon_vld1_x2_v:
5730 case NEON::BI__builtin_neon_vld1q_x2_v:
5731 case NEON::BI__builtin_neon_vld1_x3_v:
5732 case NEON::BI__builtin_neon_vld1q_x3_v:
5733 case NEON::BI__builtin_neon_vld1_x4_v:
5734 case NEON::BI__builtin_neon_vld1q_x4_v: {
5735 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy->getVectorElementType());
5739 switch (BuiltinID) {
5740 case NEON::BI__builtin_neon_vld1_x2_v:
5741 case NEON::BI__builtin_neon_vld1q_x2_v:
5742 Int = Intrinsic::aarch64_neon_ld1x2;
5744 case NEON::BI__builtin_neon_vld1_x3_v:
5745 case NEON::BI__builtin_neon_vld1q_x3_v:
5746 Int = Intrinsic::aarch64_neon_ld1x3;
5748 case NEON::BI__builtin_neon_vld1_x4_v:
5749 case NEON::BI__builtin_neon_vld1q_x4_v:
5750 Int = Intrinsic::aarch64_neon_ld1x4;
5754 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld1xN");
5755 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5759 case NEON::BI__builtin_neon_vst1_x2_v:
5760 case NEON::BI__builtin_neon_vst1q_x2_v:
5761 case NEON::BI__builtin_neon_vst1_x3_v:
5762 case NEON::BI__builtin_neon_vst1q_x3_v:
5763 case NEON::BI__builtin_neon_vst1_x4_v:
5764 case NEON::BI__builtin_neon_vst1q_x4_v: {
5765 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy->getVectorElementType());
5768 switch (BuiltinID) {
5769 case NEON::BI__builtin_neon_vst1_x2_v:
5770 case NEON::BI__builtin_neon_vst1q_x2_v:
5771 Int = Intrinsic::aarch64_neon_st1x2;
5773 case NEON::BI__builtin_neon_vst1_x3_v:
5774 case NEON::BI__builtin_neon_vst1q_x3_v:
5775 Int = Intrinsic::aarch64_neon_st1x3;
5777 case NEON::BI__builtin_neon_vst1_x4_v:
5778 case NEON::BI__builtin_neon_vst1q_x4_v:
5779 Int = Intrinsic::aarch64_neon_st1x4;
5782 std::rotate(Ops.begin(), Ops.begin() + 1, Ops.end());
5785 case NEON::BI__builtin_neon_vld1_v:
5786 case NEON::BI__builtin_neon_vld1q_v:
5789 case NEON::BI__builtin_neon_vst1_v:
5790 case NEON::BI__builtin_neon_vst1q_v:
5794 case NEON::BI__builtin_neon_vld1_lane_v:
5795 case NEON::BI__builtin_neon_vld1q_lane_v:
5797 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
5800 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2],
"vld1_lane");
5801 case NEON::BI__builtin_neon_vld1_dup_v:
5802 case NEON::BI__builtin_neon_vld1q_dup_v: {
5803 Value *V = UndefValue::get(Ty);
5804 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
5807 llvm::Constant *CI = ConstantInt::get(
Int32Ty, 0);
5808 Ops[0] =
Builder.CreateInsertElement(V, Ops[0], CI);
5811 case NEON::BI__builtin_neon_vst1_lane_v:
5812 case NEON::BI__builtin_neon_vst1q_lane_v:
5814 Ops[1] =
Builder.CreateExtractElement(Ops[1], Ops[2]);
5815 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5818 case NEON::BI__builtin_neon_vld2_v:
5819 case NEON::BI__builtin_neon_vld2q_v: {
5820 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
5824 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld2");
5826 llvm::PointerType::getUnqual(Ops[1]->getType()));
5829 case NEON::BI__builtin_neon_vld3_v:
5830 case NEON::BI__builtin_neon_vld3q_v: {
5831 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
5835 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld3");
5837 llvm::PointerType::getUnqual(Ops[1]->getType()));
5840 case NEON::BI__builtin_neon_vld4_v:
5841 case NEON::BI__builtin_neon_vld4q_v: {
5842 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
5846 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld4");
5848 llvm::PointerType::getUnqual(Ops[1]->getType()));
5851 case NEON::BI__builtin_neon_vld2_dup_v:
5852 case NEON::BI__builtin_neon_vld2q_dup_v: {
5854 llvm::PointerType::getUnqual(VTy->getElementType());
5858 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld2");
5860 llvm::PointerType::getUnqual(Ops[1]->getType()));
5863 case NEON::BI__builtin_neon_vld3_dup_v:
5864 case NEON::BI__builtin_neon_vld3q_dup_v: {
5866 llvm::PointerType::getUnqual(VTy->getElementType());
5870 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld3");
5872 llvm::PointerType::getUnqual(Ops[1]->getType()));
5875 case NEON::BI__builtin_neon_vld4_dup_v:
5876 case NEON::BI__builtin_neon_vld4q_dup_v: {
5878 llvm::PointerType::getUnqual(VTy->getElementType());
5882 Ops[1] =
Builder.CreateCall(F, Ops[1],
"vld4");
5884 llvm::PointerType::getUnqual(Ops[1]->getType()));
5887 case NEON::BI__builtin_neon_vld2_lane_v:
5888 case NEON::BI__builtin_neon_vld2q_lane_v: {
5889 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
5891 Ops.push_back(Ops[1]);
5892 Ops.erase(Ops.begin()+1);
5896 Ops[1] =
Builder.CreateCall(F, makeArrayRef(Ops).slice(1),
"vld2_lane");
5897 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5901 case NEON::BI__builtin_neon_vld3_lane_v:
5902 case NEON::BI__builtin_neon_vld3q_lane_v: {
5903 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
5905 Ops.push_back(Ops[1]);
5906 Ops.erase(Ops.begin()+1);
5911 Ops[1] =
Builder.CreateCall(F, makeArrayRef(Ops).slice(1),
"vld3_lane");
5912 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5916 case NEON::BI__builtin_neon_vld4_lane_v:
5917 case NEON::BI__builtin_neon_vld4q_lane_v: {
5918 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
5920 Ops.push_back(Ops[1]);
5921 Ops.erase(Ops.begin()+1);
5927 Ops[1] =
Builder.CreateCall(F, makeArrayRef(Ops).slice(1),
"vld4_lane");
5928 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5932 case NEON::BI__builtin_neon_vst2_v:
5933 case NEON::BI__builtin_neon_vst2q_v: {
5934 Ops.push_back(Ops[0]);
5935 Ops.erase(Ops.begin());
5936 llvm::Type *Tys[2] = { VTy, Ops[2]->getType() };
5940 case NEON::BI__builtin_neon_vst2_lane_v:
5941 case NEON::BI__builtin_neon_vst2q_lane_v: {
5942 Ops.push_back(Ops[0]);
5943 Ops.erase(Ops.begin());
5945 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() };
5949 case NEON::BI__builtin_neon_vst3_v:
5950 case NEON::BI__builtin_neon_vst3q_v: {
5951 Ops.push_back(Ops[0]);
5952 Ops.erase(Ops.begin());
5953 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() };
5957 case NEON::BI__builtin_neon_vst3_lane_v:
5958 case NEON::BI__builtin_neon_vst3q_lane_v: {
5959 Ops.push_back(Ops[0]);
5960 Ops.erase(Ops.begin());
5962 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() };
5966 case NEON::BI__builtin_neon_vst4_v:
5967 case NEON::BI__builtin_neon_vst4q_v: {
5968 Ops.push_back(Ops[0]);
5969 Ops.erase(Ops.begin());
5970 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() };
5974 case NEON::BI__builtin_neon_vst4_lane_v:
5975 case NEON::BI__builtin_neon_vst4q_lane_v: {
5976 Ops.push_back(Ops[0]);
5977 Ops.erase(Ops.begin());
5979 llvm::Type *Tys[2] = { VTy, Ops[5]->getType() };
5983 case NEON::BI__builtin_neon_vtrn_v:
5984 case NEON::BI__builtin_neon_vtrnq_v: {
5988 Value *SV =
nullptr;
5990 for (
unsigned vi = 0; vi != 2; ++vi) {
5992 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
5993 Indices.push_back(ConstantInt::get(
Int32Ty, i+vi));
5994 Indices.push_back(ConstantInt::get(
Int32Ty, i+e+vi));
5996 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
5997 SV = llvm::ConstantVector::get(Indices);
5998 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vtrn");
6003 case NEON::BI__builtin_neon_vuzp_v:
6004 case NEON::BI__builtin_neon_vuzpq_v: {
6008 Value *SV =
nullptr;
6010 for (
unsigned vi = 0; vi != 2; ++vi) {
6012 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
6013 Indices.push_back(ConstantInt::get(
Int32Ty, 2*i+vi));
6015 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
6016 SV = llvm::ConstantVector::get(Indices);
6017 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vuzp");
6022 case NEON::BI__builtin_neon_vzip_v:
6023 case NEON::BI__builtin_neon_vzipq_v: {
6027 Value *SV =
nullptr;
6029 for (
unsigned vi = 0; vi != 2; ++vi) {
6031 for (
unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
6032 Indices.push_back(ConstantInt::get(
Int32Ty, (i + vi*e) >> 1));
6033 Indices.push_back(ConstantInt::get(
Int32Ty, ((i + vi*e) >> 1)+e));
6035 Value *Addr =
Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
6036 SV = llvm::ConstantVector::get(Indices);
6037 SV =
Builder.CreateShuffleVector(Ops[1], Ops[2], SV,
"vzip");
6042 case NEON::BI__builtin_neon_vqtbl1q_v: {
6046 case NEON::BI__builtin_neon_vqtbl2q_v: {
6050 case NEON::BI__builtin_neon_vqtbl3q_v: {
6054 case NEON::BI__builtin_neon_vqtbl4q_v: {
6058 case NEON::BI__builtin_neon_vqtbx1q_v: {
6062 case NEON::BI__builtin_neon_vqtbx2q_v: {
6066 case NEON::BI__builtin_neon_vqtbx3q_v: {
6070 case NEON::BI__builtin_neon_vqtbx4q_v: {
6074 case NEON::BI__builtin_neon_vsqadd_v:
6075 case NEON::BI__builtin_neon_vsqaddq_v: {
6076 Int = Intrinsic::aarch64_neon_usqadd;
6079 case NEON::BI__builtin_neon_vuqadd_v:
6080 case NEON::BI__builtin_neon_vuqaddq_v: {
6081 Int = Intrinsic::aarch64_neon_suqadd;
6089 assert((Ops.size() & (Ops.size() - 1)) == 0 &&
6090 "Not a power-of-two sized vector!");
6091 bool AllConstants =
true;
6092 for (
unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i)
6093 AllConstants &= isa<Constant>(Ops[i]);
6098 for (
unsigned i = 0, e = Ops.size(); i != e; ++i)
6099 CstOps.push_back(cast<Constant>(Ops[i]));
6100 return llvm::ConstantVector::get(CstOps);
6105 llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size()));
6107 for (
unsigned i = 0, e = Ops.size(); i != e; ++i)
6108 Result =
Builder.CreateInsertElement(Result, Ops[i],
Builder.getInt32(i));
6115 if (BuiltinID == X86::BI__builtin_ms_va_start ||
6116 BuiltinID == X86::BI__builtin_ms_va_end)
6118 BuiltinID == X86::BI__builtin_ms_va_start);
6119 if (BuiltinID == X86::BI__builtin_ms_va_copy) {
6143 unsigned ICEArguments = 0;
6148 for (
unsigned i = 0, e = E->
getNumArgs(); i != e; i++) {
6150 if ((ICEArguments & (1 << i)) == 0) {
6157 llvm::APSInt Result;
6159 assert(IsConst &&
"Constant arg isn't actually constant?"); (void)IsConst;
6163 switch (BuiltinID) {
6164 default:
return nullptr;
6165 case X86::BI__builtin_cpu_supports: {
6167 StringRef FeatureStr = cast<StringLiteral>(FeatureExpr)->getString();
6194 X86Features Feature = StringSwitch<X86Features>(FeatureStr)
6195 .Case(
"cmov", X86Features::CMOV)
6196 .Case(
"mmx", X86Features::MMX)
6197 .Case(
"popcnt", X86Features::POPCNT)
6198 .Case(
"sse", X86Features::SSE)
6199 .Case(
"sse2", X86Features::SSE2)
6200 .Case(
"sse3", X86Features::SSE3)
6201 .Case(
"sse4.1", X86Features::SSE4_1)
6202 .Case(
"sse4.2", X86Features::SSE4_2)
6203 .Case(
"avx", X86Features::AVX)
6204 .Case(
"avx2", X86Features::AVX2)
6205 .Case(
"sse4a", X86Features::SSE4_A)
6206 .Case(
"fma4", X86Features::FMA4)
6207 .Case(
"xop", X86Features::XOP)
6208 .Case(
"fma", X86Features::FMA)
6209 .Case(
"avx512f", X86Features::AVX512F)
6210 .Case(
"bmi", X86Features::BMI)
6211 .Case(
"bmi2", X86Features::BMI2)
6212 .Default(X86Features::MAX);
6213 assert(Feature != X86Features::MAX &&
"Invalid feature!");
6234 Value *CpuFeatures =
Builder.CreateGEP(STy, CpuModel, Idxs);
6239 Value *Bitset =
Builder.CreateAnd(
6240 Features, llvm::ConstantInt::get(
Int32Ty, 1 << Feature));
6241 return Builder.CreateICmpNE(Bitset, llvm::ConstantInt::get(
Int32Ty, 0));
6243 case X86::BI_mm_prefetch: {
6244 Value *Address = Ops[0];
6245 Value *RW = ConstantInt::get(
Int32Ty, 0);
6246 Value *Locality = Ops[1];
6247 Value *Data = ConstantInt::get(
Int32Ty, 1);
6249 return Builder.CreateCall(F, {Address, RW, Locality, Data});
6251 case X86::BI__builtin_ia32_undef128:
6252 case X86::BI__builtin_ia32_undef256:
6253 case X86::BI__builtin_ia32_undef512:
6255 case X86::BI__builtin_ia32_vec_init_v8qi:
6256 case X86::BI__builtin_ia32_vec_init_v4hi:
6257 case X86::BI__builtin_ia32_vec_init_v2si:
6260 case X86::BI__builtin_ia32_vec_ext_v2si:
6261 return Builder.CreateExtractElement(Ops[0],
6262 llvm::ConstantInt::get(Ops[1]->getType(), 0));
6263 case X86::BI__builtin_ia32_ldmxcsr: {
6269 case X86::BI__builtin_ia32_stmxcsr: {
6275 case X86::BI__builtin_ia32_xsave:
6276 case X86::BI__builtin_ia32_xsave64:
6277 case X86::BI__builtin_ia32_xrstor:
6278 case X86::BI__builtin_ia32_xrstor64:
6279 case X86::BI__builtin_ia32_xsaveopt:
6280 case X86::BI__builtin_ia32_xsaveopt64:
6281 case X86::BI__builtin_ia32_xrstors:
6282 case X86::BI__builtin_ia32_xrstors64:
6283 case X86::BI__builtin_ia32_xsavec:
6284 case X86::BI__builtin_ia32_xsavec64:
6285 case X86::BI__builtin_ia32_xsaves:
6286 case X86::BI__builtin_ia32_xsaves64: {
6288 #define INTRINSIC_X86_XSAVE_ID(NAME) \
6289 case X86::BI__builtin_ia32_##NAME: \
6290 ID = Intrinsic::x86_##NAME; \
6292 switch (BuiltinID) {
6293 default: llvm_unreachable(
"Unsupported intrinsic!");
6307 #undef INTRINSIC_X86_XSAVE_ID
6308 Value *Mhi =
Builder.CreateTrunc(
6315 case X86::BI__builtin_ia32_storehps:
6316 case X86::BI__builtin_ia32_storelps: {
6324 unsigned Index = BuiltinID == X86::BI__builtin_ia32_storelps ? 0 : 1;
6326 Ops[1] =
Builder.CreateExtractElement(Ops[1], Idx,
"extract");
6332 case X86::BI__builtin_ia32_palignr128:
6333 case X86::BI__builtin_ia32_palignr256: {
6334 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
6337 cast<llvm::VectorType>(Ops[0]->getType())->getNumElements();
6338 assert(NumElts % 16 == 0);
6339 unsigned NumLanes = NumElts / 16;
6340 unsigned NumLaneElts = NumElts / NumLanes;
6344 if (ShiftVal >= (2 * NumLaneElts))
6349 if (ShiftVal > NumLaneElts) {
6350 ShiftVal -= NumLaneElts;
6352 Ops[0] = llvm::Constant::getNullValue(Ops[0]->getType());
6355 uint32_t Indices[32];
6357 for (
unsigned l = 0; l != NumElts; l += NumLaneElts) {
6358 for (
unsigned i = 0; i != NumLaneElts; ++i) {
6359 unsigned Idx = ShiftVal + i;
6360 if (Idx >= NumLaneElts)
6361 Idx += NumElts - NumLaneElts;
6362 Indices[l + i] = Idx + l;
6367 makeArrayRef(Indices, NumElts));
6368 return Builder.CreateShuffleVector(Ops[1], Ops[0], SV,
"palignr");
6370 case X86::BI__builtin_ia32_pslldqi256: {
6372 unsigned shiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() >> 3;
6378 uint32_t Indices[32];
6380 for (
unsigned l = 0; l != 32; l += 16) {
6381 for (
unsigned i = 0; i != 16; ++i) {
6382 unsigned Idx = 32 + i - shiftVal;
6383 if (Idx < 32) Idx -= 16;
6384 Indices[l + i] = Idx + l;
6390 Value *Zero = llvm::Constant::getNullValue(VecTy);
6392 Value *SV = llvm::ConstantDataVector::get(
getLLVMContext(), Indices);
6393 SV =
Builder.CreateShuffleVector(Zero, Ops[0], SV,
"pslldq");
6397 case X86::BI__builtin_ia32_psrldqi256: {
6399 unsigned shiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() >> 3;
6405 uint32_t Indices[32];
6407 for (
unsigned l = 0; l != 32; l += 16) {
6408 for (
unsigned i = 0; i != 16; ++i) {
6409 unsigned Idx = i + shiftVal;
6410 if (Idx >= 16) Idx += 16;
6411 Indices[l + i] = Idx + l;
6417 Value *Zero = llvm::Constant::getNullValue(VecTy);
6419 Value *SV = llvm::ConstantDataVector::get(
getLLVMContext(), Indices);
6420 SV =
Builder.CreateShuffleVector(Ops[0], Zero, SV,
"psrldq");
6424 case X86::BI__builtin_ia32_movntps:
6425 case X86::BI__builtin_ia32_movntps256:
6426 case X86::BI__builtin_ia32_movntpd:
6427 case X86::BI__builtin_ia32_movntpd256:
6428 case X86::BI__builtin_ia32_movntdq:
6429 case X86::BI__builtin_ia32_movntdq256:
6430 case X86::BI__builtin_ia32_movnti:
6431 case X86::BI__builtin_ia32_movnti64: {
6432 llvm::MDNode *
Node = llvm::MDNode::get(
6437 llvm::PointerType::getUnqual(Ops[1]->getType()),
6450 SI->setAlignment(Align);
6454 case X86::BI__builtin_ia32_pswapdsf:
6455 case X86::BI__builtin_ia32_pswapdsi: {
6459 return Builder.CreateCall(F, Ops,
"pswapd");
6461 case X86::BI__builtin_ia32_rdrand16_step:
6462 case X86::BI__builtin_ia32_rdrand32_step:
6463 case X86::BI__builtin_ia32_rdrand64_step:
6464 case X86::BI__builtin_ia32_rdseed16_step:
6465 case X86::BI__builtin_ia32_rdseed32_step:
6466 case X86::BI__builtin_ia32_rdseed64_step: {
6468 switch (BuiltinID) {
6469 default: llvm_unreachable(
"Unsupported intrinsic!");
6470 case X86::BI__builtin_ia32_rdrand16_step:
6471 ID = Intrinsic::x86_rdrand_16;
6473 case X86::BI__builtin_ia32_rdrand32_step:
6474 ID = Intrinsic::x86_rdrand_32;
6476 case X86::BI__builtin_ia32_rdrand64_step:
6477 ID = Intrinsic::x86_rdrand_64;
6479 case X86::BI__builtin_ia32_rdseed16_step:
6480 ID = Intrinsic::x86_rdseed_16;
6482 case X86::BI__builtin_ia32_rdseed32_step:
6483 ID = Intrinsic::x86_rdseed_32;
6485 case X86::BI__builtin_ia32_rdseed64_step:
6486 ID = Intrinsic::x86_rdseed_64;
6493 return Builder.CreateExtractValue(Call, 1);
6496 case X86::BI__builtin_ia32_cmpeqps:
6497 case X86::BI__builtin_ia32_cmpltps:
6498 case X86::BI__builtin_ia32_cmpleps:
6499 case X86::BI__builtin_ia32_cmpunordps:
6500 case X86::BI__builtin_ia32_cmpneqps:
6501 case X86::BI__builtin_ia32_cmpnltps:
6502 case X86::BI__builtin_ia32_cmpnleps:
6503 case X86::BI__builtin_ia32_cmpordps:
6504 case X86::BI__builtin_ia32_cmpeqss:
6505 case X86::BI__builtin_ia32_cmpltss:
6506 case X86::BI__builtin_ia32_cmpless:
6507 case X86::BI__builtin_ia32_cmpunordss:
6508 case X86::BI__builtin_ia32_cmpneqss:
6509 case X86::BI__builtin_ia32_cmpnltss:
6510 case X86::BI__builtin_ia32_cmpnless:
6511 case X86::BI__builtin_ia32_cmpordss:
6512 case X86::BI__builtin_ia32_cmpeqpd:
6513 case X86::BI__builtin_ia32_cmpltpd:
6514 case X86::BI__builtin_ia32_cmplepd:
6515 case X86::BI__builtin_ia32_cmpunordpd:
6516 case X86::BI__builtin_ia32_cmpneqpd:
6517 case X86::BI__builtin_ia32_cmpnltpd:
6518 case X86::BI__builtin_ia32_cmpnlepd:
6519 case X86::BI__builtin_ia32_cmpordpd:
6520 case X86::BI__builtin_ia32_cmpeqsd:
6521 case X86::BI__builtin_ia32_cmpltsd:
6522 case X86::BI__builtin_ia32_cmplesd:
6523 case X86::BI__builtin_ia32_cmpunordsd:
6524 case X86::BI__builtin_ia32_cmpneqsd:
6525 case X86::BI__builtin_ia32_cmpnltsd:
6526 case X86::BI__builtin_ia32_cmpnlesd:
6527 case X86::BI__builtin_ia32_cmpordsd:
6537 switch (BuiltinID) {
6538 default: llvm_unreachable(
"Unsupported intrinsic!");
6539 case X86::BI__builtin_ia32_cmpeqps:
6540 case X86::BI__builtin_ia32_cmpeqss:
6541 case X86::BI__builtin_ia32_cmpeqpd:
6542 case X86::BI__builtin_ia32_cmpeqsd:
6545 case X86::BI__builtin_ia32_cmpltps:
6546 case X86::BI__builtin_ia32_cmpltss:
6547 case X86::BI__builtin_ia32_cmpltpd:
6548 case X86::BI__builtin_ia32_cmpltsd:
6551 case X86::BI__builtin_ia32_cmpleps:
6552 case X86::BI__builtin_ia32_cmpless:
6553 case X86::BI__builtin_ia32_cmplepd:
6554 case X86::BI__builtin_ia32_cmplesd:
6557 case X86::BI__builtin_ia32_cmpunordps:
6558 case X86::BI__builtin_ia32_cmpunordss:
6559 case X86::BI__builtin_ia32_cmpunordpd:
6560 case X86::BI__builtin_ia32_cmpunordsd:
6563 case X86::BI__builtin_ia32_cmpneqps:
6564 case X86::BI__builtin_ia32_cmpneqss:
6565 case X86::BI__builtin_ia32_cmpneqpd:
6566 case X86::BI__builtin_ia32_cmpneqsd:
6569 case X86::BI__builtin_ia32_cmpnltps:
6570 case X86::BI__builtin_ia32_cmpnltss:
6571 case X86::BI__builtin_ia32_cmpnltpd:
6572 case X86::BI__builtin_ia32_cmpnltsd:
6575 case X86::BI__builtin_ia32_cmpnleps:
6576 case X86::BI__builtin_ia32_cmpnless:
6577 case X86::BI__builtin_ia32_cmpnlepd:
6578 case X86::BI__builtin_ia32_cmpnlesd:
6581 case X86::BI__builtin_ia32_cmpordps:
6582 case X86::BI__builtin_ia32_cmpordss:
6583 case X86::BI__builtin_ia32_cmpordpd:
6584 case X86::BI__builtin_ia32_cmpordsd:
6592 switch (BuiltinID) {
6593 default: llvm_unreachable(
"Unsupported intrinsic!");
6594 case X86::BI__builtin_ia32_cmpeqps:
6595 case X86::BI__builtin_ia32_cmpltps:
6596 case X86::BI__builtin_ia32_cmpleps:
6597 case X86::BI__builtin_ia32_cmpunordps:
6598 case X86::BI__builtin_ia32_cmpneqps:
6599 case X86::BI__builtin_ia32_cmpnltps:
6600 case X86::BI__builtin_ia32_cmpnleps:
6601 case X86::BI__builtin_ia32_cmpordps:
6603 ID = Intrinsic::x86_sse_cmp_ps;
6605 case X86::BI__builtin_ia32_cmpeqss:
6606 case X86::BI__builtin_ia32_cmpltss:
6607 case X86::BI__builtin_ia32_cmpless:
6608 case X86::BI__builtin_ia32_cmpunordss:
6609 case X86::BI__builtin_ia32_cmpneqss:
6610 case X86::BI__builtin_ia32_cmpnltss:
6611 case X86::BI__builtin_ia32_cmpnless:
6612 case X86::BI__builtin_ia32_cmpordss:
6614 ID = Intrinsic::x86_sse_cmp_ss;
6616 case X86::BI__builtin_ia32_cmpeqpd:
6617 case X86::BI__builtin_ia32_cmpltpd:
6618 case X86::BI__builtin_ia32_cmplepd:
6619 case X86::BI__builtin_ia32_cmpunordpd:
6620 case X86::BI__builtin_ia32_cmpneqpd:
6621 case X86::BI__builtin_ia32_cmpnltpd:
6622 case X86::BI__builtin_ia32_cmpnlepd:
6623 case X86::BI__builtin_ia32_cmpordpd:
6625 ID = Intrinsic::x86_sse2_cmp_pd;
6627 case X86::BI__builtin_ia32_cmpeqsd:
6628 case X86::BI__builtin_ia32_cmpltsd:
6629 case X86::BI__builtin_ia32_cmplesd:
6630 case X86::BI__builtin_ia32_cmpunordsd:
6631 case X86::BI__builtin_ia32_cmpneqsd:
6632 case X86::BI__builtin_ia32_cmpnltsd:
6633 case X86::BI__builtin_ia32_cmpnlesd:
6634 case X86::BI__builtin_ia32_cmpordsd:
6636 ID = Intrinsic::x86_sse2_cmp_sd;
6640 Ops.push_back(llvm::ConstantInt::get(
Int8Ty, Imm));
6642 return Builder.CreateCall(F, Ops, name);
6651 for (
unsigned i = 0, e = E->
getNumArgs(); i != e; i++)
6656 switch (BuiltinID) {
6657 default:
return nullptr;
6661 case PPC::BI__builtin_ppc_get_timebase:
6665 case PPC::BI__builtin_altivec_lvx:
6666 case PPC::BI__builtin_altivec_lvxl:
6667 case PPC::BI__builtin_altivec_lvebx:
6668 case PPC::BI__builtin_altivec_lvehx:
6669 case PPC::BI__builtin_altivec_lvewx:
6670 case PPC::BI__builtin_altivec_lvsl:
6671 case PPC::BI__builtin_altivec_lvsr:
6672 case PPC::BI__builtin_vsx_lxvd2x:
6673 case PPC::BI__builtin_vsx_lxvw4x:
6677 Ops[0] =
Builder.CreateGEP(Ops[1], Ops[0]);
6680 switch (BuiltinID) {
6681 default: llvm_unreachable(
"Unsupported ld/lvsl/lvsr intrinsic!");
6682 case PPC::BI__builtin_altivec_lvx:
6683 ID = Intrinsic::ppc_altivec_lvx;
6685 case PPC::BI__builtin_altivec_lvxl:
6686 ID = Intrinsic::ppc_altivec_lvxl;
6688 case PPC::BI__builtin_altivec_lvebx:
6689 ID = Intrinsic::ppc_altivec_lvebx;
6691 case PPC::BI__builtin_altivec_lvehx:
6692 ID = Intrinsic::ppc_altivec_lvehx;
6694 case PPC::BI__builtin_altivec_lvewx:
6695 ID = Intrinsic::ppc_altivec_lvewx;
6697 case PPC::BI__builtin_altivec_lvsl:
6698 ID = Intrinsic::ppc_altivec_lvsl;
6700 case PPC::BI__builtin_altivec_lvsr:
6701 ID = Intrinsic::ppc_altivec_lvsr;
6703 case PPC::BI__builtin_vsx_lxvd2x:
6704 ID = Intrinsic::ppc_vsx_lxvd2x;
6706 case PPC::BI__builtin_vsx_lxvw4x:
6707 ID = Intrinsic::ppc_vsx_lxvw4x;
6711 return Builder.CreateCall(F, Ops,
"");
6715 case PPC::BI__builtin_altivec_stvx:
6716 case PPC::BI__builtin_altivec_stvxl:
6717 case PPC::BI__builtin_altivec_stvebx:
6718 case PPC::BI__builtin_altivec_stvehx:
6719 case PPC::BI__builtin_altivec_stvewx:
6720 case PPC::BI__builtin_vsx_stxvd2x:
6721 case PPC::BI__builtin_vsx_stxvw4x:
6724 Ops[1] =
Builder.CreateGEP(Ops[2], Ops[1]);
6727 switch (BuiltinID) {
6728 default: llvm_unreachable(
"Unsupported st intrinsic!");
6729 case PPC::BI__builtin_altivec_stvx:
6730 ID = Intrinsic::ppc_altivec_stvx;
6732 case PPC::BI__builtin_altivec_stvxl:
6733 ID = Intrinsic::ppc_altivec_stvxl;
6735 case PPC::BI__builtin_altivec_stvebx:
6736 ID = Intrinsic::ppc_altivec_stvebx;
6738 case PPC::BI__builtin_altivec_stvehx:
6739 ID = Intrinsic::ppc_altivec_stvehx;
6741 case PPC::BI__builtin_altivec_stvewx:
6742 ID = Intrinsic::ppc_altivec_stvewx;
6744 case PPC::BI__builtin_vsx_stxvd2x:
6745 ID = Intrinsic::ppc_vsx_stxvd2x;
6747 case PPC::BI__builtin_vsx_stxvw4x:
6748 ID = Intrinsic::ppc_vsx_stxvw4x;
6752 return Builder.CreateCall(F, Ops,
"");
6755 case PPC::BI__builtin_vsx_xvsqrtsp:
6756 case PPC::BI__builtin_vsx_xvsqrtdp: {
6761 return Builder.CreateCall(F, X);
6764 case PPC::BI__builtin_altivec_vclzb:
6765 case PPC::BI__builtin_altivec_vclzh:
6766 case PPC::BI__builtin_altivec_vclzw:
6767 case PPC::BI__builtin_altivec_vclzd: {
6770 Value *Undef = ConstantInt::get(
Builder.getInt1Ty(),
false);
6772 return Builder.CreateCall(F, {
X, Undef});
6775 case PPC::BI__builtin_vsx_xvcpsgnsp:
6776 case PPC::BI__builtin_vsx_xvcpsgndp: {
6782 return Builder.CreateCall(F, {
X, Y});
6785 case PPC::BI__builtin_vsx_xvrspip:
6786 case PPC::BI__builtin_vsx_xvrdpip:
6787 case PPC::BI__builtin_vsx_xvrdpim:
6788 case PPC::BI__builtin_vsx_xvrspim:
6789 case PPC::BI__builtin_vsx_xvrdpi:
6790 case PPC::BI__builtin_vsx_xvrspi:
6791 case PPC::BI__builtin_vsx_xvrdpic:
6792 case PPC::BI__builtin_vsx_xvrspic:
6793 case PPC::BI__builtin_vsx_xvrdpiz:
6794 case PPC::BI__builtin_vsx_xvrspiz: {
6797 if (BuiltinID == PPC::BI__builtin_vsx_xvrdpim ||
6798 BuiltinID == PPC::BI__builtin_vsx_xvrspim)
6800 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpi ||
6801 BuiltinID == PPC::BI__builtin_vsx_xvrspi)
6803 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpic ||
6804 BuiltinID == PPC::BI__builtin_vsx_xvrspic)
6806 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpip ||
6807 BuiltinID == PPC::BI__builtin_vsx_xvrspip)
6809 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpiz ||
6810 BuiltinID == PPC::BI__builtin_vsx_xvrspiz)
6813 return Builder.CreateCall(F, X);
6816 case PPC::BI__builtin_vsx_xvmaddadp:
6817 case PPC::BI__builtin_vsx_xvmaddasp:
6818 case PPC::BI__builtin_vsx_xvnmaddadp:
6819 case PPC::BI__builtin_vsx_xvnmaddasp:
6820 case PPC::BI__builtin_vsx_xvmsubadp:
6821 case PPC::BI__builtin_vsx_xvmsubasp:
6822 case PPC::BI__builtin_vsx_xvnmsubadp:
6823 case PPC::BI__builtin_vsx_xvnmsubasp: {
6828 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
6830 switch (BuiltinID) {
6831 case PPC::BI__builtin_vsx_xvmaddadp:
6832 case PPC::BI__builtin_vsx_xvmaddasp:
6833 return Builder.CreateCall(F, {
X, Y, Z});
6834 case PPC::BI__builtin_vsx_xvnmaddadp:
6835 case PPC::BI__builtin_vsx_xvnmaddasp:
6836 return Builder.CreateFSub(Zero,
6837 Builder.CreateCall(F, {X, Y, Z}),
"sub");
6838 case PPC::BI__builtin_vsx_xvmsubadp:
6839 case PPC::BI__builtin_vsx_xvmsubasp:
6841 {
X, Y,
Builder.CreateFSub(Zero, Z,
"sub")});
6842 case PPC::BI__builtin_vsx_xvnmsubadp:
6843 case PPC::BI__builtin_vsx_xvnmsubasp:
6846 return Builder.CreateFSub(Zero, FsubRes,
"sub");
6848 llvm_unreachable(
"Unknown FMA operation");
6857 unsigned IntrinsicID) {
6861 return CGF.
Builder.CreateCall(F, Src0);
6867 unsigned IntrinsicID) {
6873 return CGF.
Builder.CreateCall(F, {Src0, Src1, Src2});
6879 unsigned IntrinsicID) {
6884 return CGF.
Builder.CreateCall(F, {Src0, Src1});
6889 switch (BuiltinID) {
6890 case AMDGPU::BI__builtin_amdgpu_div_scale:
6891 case AMDGPU::BI__builtin_amdgpu_div_scalef: {
6910 = FlagOutPtr.
getPointer()->getType()->getPointerElementType();
6916 case AMDGPU::BI__builtin_amdgpu_div_fmas:
6917 case AMDGPU::BI__builtin_amdgpu_div_fmasf: {
6926 return Builder.CreateCall(F, {Src0, Src1, Src2, Src3ToBool});
6928 case AMDGPU::BI__builtin_amdgpu_div_fixup:
6929 case AMDGPU::BI__builtin_amdgpu_div_fixupf:
6931 case AMDGPU::BI__builtin_amdgpu_trig_preop:
6932 case AMDGPU::BI__builtin_amdgpu_trig_preopf:
6934 case AMDGPU::BI__builtin_amdgpu_rcp:
6935 case AMDGPU::BI__builtin_amdgpu_rcpf:
6937 case AMDGPU::BI__builtin_amdgpu_rsq:
6938 case AMDGPU::BI__builtin_amdgpu_rsqf:
6940 case AMDGPU::BI__builtin_amdgpu_rsq_clamped:
6941 case AMDGPU::BI__builtin_amdgpu_rsq_clampedf:
6943 case AMDGPU::BI__builtin_amdgpu_ldexp:
6944 case AMDGPU::BI__builtin_amdgpu_ldexpf:
6946 case AMDGPU::BI__builtin_amdgpu_class:
6947 case AMDGPU::BI__builtin_amdgpu_classf:
6958 unsigned IntrinsicID,
6966 Value *Call = CGF.
Builder.CreateCall(F, Args);
6967 Value *CC = CGF.
Builder.CreateExtractValue(Call, 1);
6969 return CGF.
Builder.CreateExtractValue(Call, 0);
6974 switch (BuiltinID) {
6975 case SystemZ::BI__builtin_tbegin: {
6977 Value *Control = llvm::ConstantInt::get(
Int32Ty, 0xff0c);
6979 return Builder.CreateCall(F, {TDB, Control});
6981 case SystemZ::BI__builtin_tbegin_nofloat: {
6983 Value *Control = llvm::ConstantInt::get(
Int32Ty, 0xff0c);
6985 return Builder.CreateCall(F, {TDB, Control});
6987 case SystemZ::BI__builtin_tbeginc: {
6988 Value *TDB = llvm::ConstantPointerNull::get(
Int8PtrTy);
6989 Value *Control = llvm::ConstantInt::get(
Int32Ty, 0xff08);
6991 return Builder.CreateCall(F, {TDB, Control});
6993 case SystemZ::BI__builtin_tabort: {
6998 case SystemZ::BI__builtin_non_tx_store: {
7002 return Builder.CreateCall(F, {Data, Address});
7010 case SystemZ::BI__builtin_s390_vpopctb:
7011 case SystemZ::BI__builtin_s390_vpopcth:
7012 case SystemZ::BI__builtin_s390_vpopctf:
7013 case SystemZ::BI__builtin_s390_vpopctg: {
7017 return Builder.CreateCall(F, X);
7020 case SystemZ::BI__builtin_s390_vclzb:
7021 case SystemZ::BI__builtin_s390_vclzh:
7022 case SystemZ::BI__builtin_s390_vclzf:
7023 case SystemZ::BI__builtin_s390_vclzg: {
7026 Value *Undef = ConstantInt::get(
Builder.getInt1Ty(),
false);
7028 return Builder.CreateCall(F, {
X, Undef});
7031 case SystemZ::BI__builtin_s390_vctzb:
7032 case SystemZ::BI__builtin_s390_vctzh:
7033 case SystemZ::BI__builtin_s390_vctzf:
7034 case SystemZ::BI__builtin_s390_vctzg: {
7037 Value *Undef = ConstantInt::get(
Builder.getInt1Ty(),
false);
7039 return Builder.CreateCall(F, {
X, Undef});
7042 case SystemZ::BI__builtin_s390_vfsqdb: {
7046 return Builder.CreateCall(F, X);
7048 case SystemZ::BI__builtin_s390_vfmadb: {
7054 return Builder.CreateCall(F, {
X, Y, Z});
7056 case SystemZ::BI__builtin_s390_vfmsdb: {
7061 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
7063 return Builder.CreateCall(F, {
X, Y,
Builder.CreateFSub(Zero, Z,
"sub")});
7065 case SystemZ::BI__builtin_s390_vflpdb: {
7069 return Builder.CreateCall(F, X);
7071 case SystemZ::BI__builtin_s390_vflndb: {
7074 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
7076 return Builder.CreateFSub(Zero,
Builder.CreateCall(F, X),
"sub");
7078 case SystemZ::BI__builtin_s390_vfidb: {
7082 llvm::APSInt M4, M5;
7085 assert(IsConstM4 && IsConstM5 &&
"Constant arg isn't actually constant?");
7086 (void)IsConstM4; (void)IsConstM5;
7090 switch (M4.getZExtValue()) {
7093 switch (M5.getZExtValue()) {
7099 switch (M5.getZExtValue()) {
7109 if (ID != Intrinsic::not_intrinsic) {
7111 return Builder.CreateCall(F, X);
7116 return Builder.CreateCall(F, {
X, M4Value, M5Value});
7121 #define INTRINSIC_WITH_CC(NAME) \
7122 case SystemZ::BI__builtin_##NAME: \
7123 return EmitSystemZIntrinsicWithCC(*this, Intrinsic::NAME, E)
7190 #undef INTRINSIC_WITH_CC
7199 switch (BuiltinID) {
7200 case NVPTX::BI__nvvm_atom_add_gen_i:
7201 case NVPTX::BI__nvvm_atom_add_gen_l:
7202 case NVPTX::BI__nvvm_atom_add_gen_ll:
7205 case NVPTX::BI__nvvm_atom_sub_gen_i:
7206 case NVPTX::BI__nvvm_atom_sub_gen_l:
7207 case NVPTX::BI__nvvm_atom_sub_gen_ll:
7210 case NVPTX::BI__nvvm_atom_and_gen_i:
7211 case NVPTX::BI__nvvm_atom_and_gen_l:
7212 case NVPTX::BI__nvvm_atom_and_gen_ll:
7215 case NVPTX::BI__nvvm_atom_or_gen_i:
7216 case NVPTX::BI__nvvm_atom_or_gen_l:
7217 case NVPTX::BI__nvvm_atom_or_gen_ll:
7220 case NVPTX::BI__nvvm_atom_xor_gen_i:
7221 case NVPTX::BI__nvvm_atom_xor_gen_l:
7222 case NVPTX::BI__nvvm_atom_xor_gen_ll:
7225 case NVPTX::BI__nvvm_atom_xchg_gen_i:
7226 case NVPTX::BI__nvvm_atom_xchg_gen_l:
7227 case NVPTX::BI__nvvm_atom_xchg_gen_ll:
7230 case NVPTX::BI__nvvm_atom_max_gen_i:
7231 case NVPTX::BI__nvvm_atom_max_gen_l:
7232 case NVPTX::BI__nvvm_atom_max_gen_ll:
7235 case NVPTX::BI__nvvm_atom_max_gen_ui:
7236 case NVPTX::BI__nvvm_atom_max_gen_ul:
7237 case NVPTX::BI__nvvm_atom_max_gen_ull:
7240 case NVPTX::BI__nvvm_atom_min_gen_i:
7241 case NVPTX::BI__nvvm_atom_min_gen_l:
7242 case NVPTX::BI__nvvm_atom_min_gen_ll:
7245 case NVPTX::BI__nvvm_atom_min_gen_ui:
7246 case NVPTX::BI__nvvm_atom_min_gen_ul:
7247 case NVPTX::BI__nvvm_atom_min_gen_ull:
7250 case NVPTX::BI__nvvm_atom_cas_gen_i:
7251 case NVPTX::BI__nvvm_atom_cas_gen_l:
7252 case NVPTX::BI__nvvm_atom_cas_gen_ll:
7257 case NVPTX::BI__nvvm_atom_add_gen_f: {
7263 CGM.
getIntrinsic(Intrinsic::nvvm_atomic_load_add_f32, Ptr->getType());
7264 return Builder.CreateCall(FnALAF32, {Ptr, Val});
7274 switch (BuiltinID) {
7275 case WebAssembly::BI__builtin_wasm_memory_size: {
7277 Value *Callee =
CGM.
getIntrinsic(Intrinsic::wasm_memory_size, ResultType);
7278 return Builder.CreateCall(Callee);
7280 case WebAssembly::BI__builtin_wasm_grow_memory: {
7282 Value *Callee =
CGM.
getIntrinsic(Intrinsic::wasm_grow_memory, X->getType());
7283 return Builder.CreateCall(Callee, X);
llvm::PointerType * Int8PtrPtrTy
ReturnValueSlot - Contains the address where the return value of a function can be stored...
Defines the clang::ASTContext interface.
llvm::StoreInst * CreateDefaultAlignedStore(llvm::Value *Val, llvm::Value *Addr, bool IsVolatile=false)
static Value * emitFPIntBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID)
FunctionDecl - An instance of this class is created to represent a function declaration or definition...
llvm::Value * EmitAArch64CompareBuiltinExpr(llvm::Value *Op, llvm::Type *Ty, const llvm::CmpInst::Predicate Fp, const llvm::CmpInst::Predicate Ip, const llvm::Twine &Name="")
PointerType - C99 6.7.5.1 - Pointer Declarators.
A (possibly-)qualified type.
#define fma(__x, __y, __z)
CodeGenTypes & getTypes()
static WidthAndSignedness getIntegerWidthAndSignedness(const clang::ASTContext &context, const clang::QualType Type)
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
llvm::Module & getModule() const
static struct WidthAndSignedness EncompassingIntegerType(ArrayRef< struct WidthAndSignedness > Types)
llvm::Value * EmitNVPTXBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
llvm::LLVMContext & getLLVMContext()
const TargetInfo & getTarget() const
static const Builtin::Info BuiltinInfo[]
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
llvm::LoadInst * CreateDefaultAlignedLoad(llvm::Value *Addr, const llvm::Twine &Name="")
llvm::Type * FloatTy
float, double
static Value * EmitToInt(CodeGenFunction &CGF, llvm::Value *V, QualType T, llvm::IntegerType *IntType)
Emit the conversions required to turn the given value into an integer of the given size...
const void * Store
Store - This opaque type encapsulates an immutable mapping from locations to values.
The base class of the type hierarchy.
bool isBooleanType() const
#define NEONMAP1(NameBase, LLVMIntrinsic, TypeModifier)
llvm::IntegerType * Int8Ty
i8, i16, i32, and i64
bool HasSideEffects(const ASTContext &Ctx, bool IncludePossibleEffects=true) const
HasSideEffects - This routine returns true for all those expressions which have any effect other than...
static RValue emitLibraryCall(CodeGenFunction &CGF, const FunctionDecl *Fn, const CallExpr *E, llvm::Value *calleeValue)
llvm::Type * getElementType() const
Return the type of the values stored in this address.
const Expr * getCallee() const
static Value * EmitSystemZIntrinsicWithCC(CodeGenFunction &CGF, unsigned IntrinsicID, const CallExpr *E)
Handle a SystemZ function in which the final argument is a pointer to an int that receives the post-i...
uint64_t getTypeSize(QualType T) const
Return the size of the specified (complete) type T, in bits.
llvm::Value * EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
ParmVarDecl - Represents a parameter to a function.
static llvm::Value * getTypeSize(CodeGenFunction &CGF, QualType Ty)
static bool HasExtraNeonArgument(unsigned BuiltinID)
Return true if BuiltinID is an overloaded Neon intrinsic with an extra argument that specifies the ve...
The collection of all-type qualifiers we support.
Expr * IgnoreImpCasts() LLVM_READONLY
IgnoreImpCasts - Skip past any implicit casts which might surround this expression.
llvm::Value * EmitCommonNeonBuiltinExpr(unsigned BuiltinID, unsigned LLVMIntrinsic, unsigned AltLLVMIntrinsic, const char *NameHint, unsigned Modifier, const CallExpr *E, SmallVectorImpl< llvm::Value * > &Ops, Address PtrOp0, Address PtrOp1)
class LLVM_ALIGNAS(8) DependentTemplateSpecializationType const IdentifierInfo * Name
Represents a template specialization type whose template cannot be resolved, e.g. ...
CodeGenFunction - This class organizes the per-function state that is used while generating LLVM code...
llvm::Type * ConvertType(QualType T)
ConvertType - Convert type T into a llvm::Type.
static llvm::VectorType * GetNeonType(CodeGenFunction *CGF, NeonTypeFlags TypeFlags, bool V1Ty=false)
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
llvm::IntegerType * Int64Ty
llvm::IntegerType * SizeTy
static bool NEONSIMDIntrinsicsProvenSorted
RValue EmitCall(const CGFunctionInfo &FnInfo, llvm::Value *Callee, ReturnValueSlot ReturnValue, const CallArgList &Args, CGCalleeInfo CalleeInfo=CGCalleeInfo(), llvm::Instruction **callOrInvoke=nullptr)
EmitCall - Generate a call of the given function, expecting the given result type, and using the given argument list which specifies both the LLVM arguments and the types they were derived from.
static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E, Instruction::BinaryOps Op, bool Invert=false)
Utility to insert an atomic instruction based Instrinsic::ID and the expression node, where the return value is the result of the operation.
bool hasSameUnqualifiedType(QualType T1, QualType T2) const
Determine whether the given types are equivalent after cvr-qualifiers have been removed.
static const NeonIntrinsicInfo AArch64SIMDIntrinsicMap[]
static Value * MakeBinaryAtomicValue(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E)
Utility to insert an atomic instruction based on Instrinsic::ID and the expression node...
llvm::Value * BuildVector(ArrayRef< llvm::Value * > Ops)
Address CreateElementBitCast(Address Addr, llvm::Type *Ty, const llvm::Twine &Name="")
Cast the element type of the given address to a different type, preserving information like the align...
CharUnits - This is an opaque type for sizes expressed in character units.
static const NeonIntrinsicInfo AArch64SISDIntrinsicMap[]
APValue Val
Val - This is the value the expression can be folded to.
#define INTRINSIC_WITH_CC(NAME)
QualType GetBuiltinType(unsigned ID, GetBuiltinTypeError &Error, unsigned *IntegerConstantArgs=nullptr) const
Return the type for the specified builtin.
llvm::PointerType * VoidPtrTy
static bool AArch64SISDIntrinsicsProvenSorted
static Value * emitUnaryFPBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID)
Expr * IgnoreParenCasts() LLVM_READONLY
IgnoreParenCasts - Ignore parentheses and casts.
Scope - A scope is a transient data structure that is used while parsing the program.
llvm::Constant * CreateRuntimeVariable(llvm::Type *Ty, StringRef Name)
Create a new runtime global variable with the specified type and name.
CharUnits getTypeSizeInChars(QualType T) const
Return the size of the specified (complete) type T, in characters.
RValue EmitBuiltinExpr(const FunctionDecl *FD, unsigned BuiltinID, const CallExpr *E, ReturnValueSlot ReturnValue)
detail::InMemoryDirectory::const_iterator I
std::pair< llvm::Value *, llvm::Value * > ComplexPairTy
Represents a prototype with parameter type info, e.g.
llvm::CallInst * EmitNounwindRuntimeCall(llvm::Value *callee, const Twine &name="")
#define NEONMAP0(NameBase)
void EmitAnyExprToMem(const Expr *E, Address Location, Qualifiers Quals, bool IsInitializer)
EmitAnyExprToMem - Emits the code necessary to evaluate an arbitrary expression into the given memory...
bool EvaluateAsRValue(EvalResult &Result, const ASTContext &Ctx) const
EvaluateAsRValue - Return true if this is a constant which we can fold to an rvalue using any crazy t...
RValue - This trivial value class is used to represent the result of an expression that is evaluated...
ID
Defines the set of possible language-specific address spaces.
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee...
llvm::Value * EmitVAStartEnd(llvm::Value *ArgValue, bool IsStart)
Emits a call to an LLVM variable-argument intrinsic, either llvm.va_start or llvm.va_end.
llvm::Value * getPointer() const
#define copysign(__x, __y)
Expr - This represents one expression.
StringRef getName() const
Return the actual identifier string.
const char * getName(unsigned ID) const
Return the identifier name for the specified builtin, e.g.
static Value * emitTernaryFPBuiltin(CodeGenFunction &CGF, const CallExpr *E, unsigned IntrinsicID)
void EmitStoreOfScalar(llvm::Value *Value, Address Addr, bool Volatile, QualType Ty, AlignmentSource AlignSource=AlignmentSource::Type, llvm::MDNode *TBAAInfo=nullptr, bool isInit=false, QualType TBAABaseTy=QualType(), uint64_t TBAAOffset=0, bool isNontemporal=false)
EmitStoreOfScalar - Store a scalar value to an address, taking care to appropriately convert from the...
#define INTRINSIC_X86_XSAVE_ID(NAME)
ASTContext & getContext() const
llvm::Value * EmitToMemory(llvm::Value *Value, QualType Ty)
EmitToMemory - Change a scalar value from its value representation to its in-memory representation...
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
void add(RValue rvalue, QualType type, bool needscopy=false)
static Value * EmitFromInt(CodeGenFunction &CGF, llvm::Value *V, QualType T, llvm::Type *ResultType)
static SVal getValue(SVal val, SValBuilder &svalBuilder)
llvm::LLVMContext & getLLVMContext()
llvm::IntegerType * Int32Ty
Address EmitPointerWithAlignment(const Expr *Addr, AlignmentSource *Source=nullptr)
EmitPointerWithAlignment - Given an expression with a pointer type, emit the value and compute our be...
static const NeonIntrinsicInfo ARMSIMDIntrinsicMap[]
bool EvaluateAsInt(llvm::APSInt &Result, const ASTContext &Ctx, SideEffectsKind AllowSideEffects=SE_NoSideEffects) const
EvaluateAsInt - Return true if this is a constant which we can fold and convert to an integer...
LValue MakeNaturalAlignAddrLValue(llvm::Value *V, QualType T)
static bool areBOSTypesCompatible(int From, int To)
Checks if using the result of __builtin_object_size(p, From) in place of __builtin_object_size(p, To) is correct.
llvm::Function * getIntrinsic(unsigned IID, ArrayRef< llvm::Type * > Tys=None)
class LLVM_ALIGNAS(8) TemplateSpecializationType unsigned NumArgs
Represents a type template specialization; the template must be a class template, a type alias templa...
static const NeonIntrinsicInfo * findNeonIntrinsicInMap(ArrayRef< NeonIntrinsicInfo > IntrinsicMap, unsigned BuiltinID, bool &MapProvenSorted)
The result type of a method or function.
static Value * EmitNontemporalLoad(CodeGenFunction &CGF, const CallExpr *E)
llvm::Value * EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E)
GlobalDecl - represents a global declaration.
llvm::Value * EmitAMDGPUBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
The l-value was considered opaque, so the alignment was determined from a type.
llvm::Value * EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
static llvm::Value * EmitOverflowIntrinsic(CodeGenFunction &CGF, const llvm::Intrinsic::ID IntrinsicID, llvm::Value *X, llvm::Value *Y, llvm::Value *&Carry)
Emit a call to llvm.
Enumerates target-specific builtins in their own namespaces within namespace clang.
Address CreateBitCast(Address Addr, llvm::Type *Ty, const llvm::Twine &Name="")
TypeInfo getTypeInfo(const Type *T) const
Get the size and alignment of the specified complete type in bits.
llvm::Constant * CreateRuntimeFunction(llvm::FunctionType *Ty, StringRef Name, llvm::AttributeSet ExtraAttrs=llvm::AttributeSet())
Create a new runtime function with the specified type and name.
static Value * packTBLDVectorList(CodeGenFunction &CGF, ArrayRef< Value * > Ops, Value *ExtOp, Value *IndexOp, llvm::Type *ResTy, unsigned IntID, const char *Name)
ASTContext & getContext() const
llvm::Value * EmitFromMemory(llvm::Value *Value, QualType Ty)
EmitFromMemory - Change a scalar value from its memory representation to its value representation...
bool hasSideEffects() const
llvm::Value * EmitLoadOfScalar(Address Addr, bool Volatile, QualType Ty, SourceLocation Loc, AlignmentSource AlignSource=AlignmentSource::Type, llvm::MDNode *TBAAInfo=nullptr, QualType TBAABaseTy=QualType(), uint64_t TBAAOffset=0, bool isNontemporal=false)
EmitLoadOfScalar - Load a scalar value from an address, taking care to appropriately convert from the...
static llvm::VectorType * GetFloatNeonType(CodeGenFunction *CGF, NeonTypeFlags IntTypeFlags)
static Value * EmitNontemporalStore(CodeGenFunction &CGF, const CallExpr *E)
llvm::IntegerType * Int16Ty
llvm::APSInt EvaluateKnownConstInt(const ASTContext &Ctx, SmallVectorImpl< PartialDiagnosticAt > *Diag=nullptr) const
EvaluateKnownConstInt - Call EvaluateAsRValue and return the folded integer.
llvm::Value * EmitAArch64BuiltinExpr(unsigned BuiltinID, const CallExpr *E)
static Value * MakeAtomicCmpXchgValue(CodeGenFunction &CGF, const CallExpr *E, bool ReturnBool)
Utility to insert an atomic cmpxchg instruction.
OpenMPLinearClauseKind Modifier
Modifier of 'linear' clause.
bool isIntegerConstantExpr(llvm::APSInt &Result, const ASTContext &Ctx, SourceLocation *Loc=nullptr, bool isEvaluated=true) const
isIntegerConstantExpr - Return true if this expression is a valid integer constant expression...
llvm::Value * EmitNeonCall(llvm::Function *F, SmallVectorImpl< llvm::Value * > &O, const char *name, unsigned shift=0, bool rightshift=false)
EltType getEltType() const
const T * castAs() const
Member-template castAs<specific type>.
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
bool isVolatileQualified() const
Determine whether this type is volatile-qualified.
Address EmitMSVAListRef(const Expr *E)
Emit a "reference" to a __builtin_ms_va_list; this is always the value of the expression, because a __builtin_ms_va_list is a pointer to a char.
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
static Value * EmitFAbs(CodeGenFunction &CGF, Value *V)
EmitFAbs - Emit a call to .fabs().
llvm::Value * getBuiltinLibFunction(const FunctionDecl *FD, unsigned BuiltinID)
Given a builtin id for a function like "__builtin_fabsf", return a Function* for "fabsf".
llvm::Value * EmitScalarExpr(const Expr *E, bool IgnoreResultAssign=false)
EmitScalarExpr - Emit the computation of the specified expression of LLVM scalar type, returning the result.
QualType getCallReturnType(const ASTContext &Ctx) const
getCallReturnType - Get the return type of the call expr.
ast_type_traits::DynTypedNode Node
CGFunctionInfo - Class to encapsulate the information about a function definition.
llvm::Value * EmitNeonRShiftImm(llvm::Value *Vec, llvm::Value *Amt, llvm::Type *Ty, bool usgn, const char *name)
CharUnits getAlignment() const
Return the alignment of this pointer.
This class organizes the cross-function state that is used while generating LLVM code.
#define NEONMAP2(NameBase, LLVMIntrinsic, AltLLVMIntrinsic, TypeModifier)
bool tryEvaluateObjectSize(uint64_t &Result, ASTContext &Ctx, unsigned Type) const
If the current Expr is a pointer, this will try to statically determine the number of bytes available...
static RValue getComplex(llvm::Value *V1, llvm::Value *V2)
EvalResult is a struct with detailed info about an evaluated expression.
Address CreateMemTemp(QualType T, const Twine &Name="tmp")
CreateMemTemp - Create a temporary memory object of the given type, with appropriate alignment...
FunctionDecl * getDirectCallee()
If the callee is a FunctionDecl, return it. Otherwise return 0.
llvm::Value * EmitWebAssemblyBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
llvm::LoadInst * CreateAlignedLoad(llvm::Value *Addr, CharUnits Align, const llvm::Twine &Name="")
const TargetInfo * getAuxTargetInfo() const
llvm::LoadInst * CreateLoad(Address Addr, const llvm::Twine &Name="")
static Value * EmitTargetArchBuiltinExpr(CodeGenFunction *CGF, unsigned BuiltinID, const CallExpr *E, llvm::Triple::ArchType Arch)
const llvm::Triple & getTriple() const
Returns the target triple of the primary target.
detail::InMemoryDirectory::const_iterator E
llvm::StoreInst * CreateStore(llvm::Value *Val, Address Addr, bool IsVolatile=false)
unsigned getNumArgs() const
getNumArgs - Return the number of actual arguments to this call.
Flags to identify the types for overloaded Neon builtins.
Expr * IgnoreParenImpCasts() LLVM_READONLY
IgnoreParenImpCasts - Ignore parentheses and implicit casts.
llvm::Value * EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx)
static Value * EmitCommonNeonSISDBuiltinExpr(CodeGenFunction &CGF, const NeonIntrinsicInfo &SISDInfo, SmallVectorImpl< Value * > &Ops, const CallExpr *E)
static Value * EmitAArch64TblBuiltinExpr(CodeGenFunction &CGF, unsigned BuiltinID, const CallExpr *E, SmallVectorImpl< Value * > &Ops)
bool isLibFunction(unsigned ID) const
Return true if this is a builtin for a libc/libm function, with a "__builtin_" prefix (e...
llvm::Value * vectorWrapScalar16(llvm::Value *Op)
static llvm::Value * getDefaultBuiltinObjectSizeResult(unsigned Type, llvm::IntegerType *ResType)
llvm::Value * EmitNeonShiftVector(llvm::Value *V, llvm::Type *Ty, bool negateForRightShift)
llvm::PointerType * Int8PtrTy
void setNontemporal(bool Value)
X
Add a minimal nested name specifier fixit hint to allow lookup of a tag name from an outer enclosing ...
BoundNodesTreeBuilder *const Builder
llvm::Function * LookupNeonLLVMIntrinsic(unsigned IntrinsicID, unsigned Modifier, llvm::Type *ArgTy, const CallExpr *E)
llvm::Type * ConvertType(QualType T)
Builtin::Context & BuiltinInfo
Defines the clang::TargetInfo interface.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
static Value * EmitSignBit(CodeGenFunction &CGF, Value *V)
Emit the computation of the sign bit for a floating point value.
static RValue get(llvm::Value *V)
LValue - This represents an lvalue references.
bool isSignedIntegerType() const
Return true if this is an integer type that is signed, according to C99 6.2.5p4 [char, signed char, short, int, long..], or an enum decl which has a signed representation.
CallArgList - Type for representing both the value and type of arguments in a call.
static RValue EmitBinaryAtomic(CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E)
bool isIntegerType() const
isIntegerType() does not include complex integers (a GCC extension).
static bool AArch64SIMDIntrinsicsProvenSorted
static Value * EmitSpecialRegisterBuiltin(CodeGenFunction &CGF, const CallExpr *E, llvm::Type *RegisterType, llvm::Type *ValueType, bool IsRead)
llvm::Value * EmitSystemZBuiltinExpr(unsigned BuiltinID, const CallExpr *E)
bool isPointerType() const