|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
References
include/llvm/CodeGen/GlobalISel/LegalizerInfo.h 112 return Opcode == RHS.Opcode && Idx == RHS.Idx && Type == RHS.Type;
176 return Type0 == Other.Type0 && Type1 == Other.Type1 &&
176 return Type0 == Other.Type0 && Type1 == Other.Type1 &&
184 return Type0 == Other.Type0 && Type1 == Other.Type1 &&
184 return Type0 == Other.Type0 && Type1 == Other.Type1 &&
864 return VecTy.isVector() && VecTy.getElementType() == EltTy &&
882 return VecTy.isVector() && VecTy.getElementType() == EltTy &&
899 assert(MinTy.getElementType() == MaxTy.getElementType() &&
include/llvm/CodeGen/GlobalISel/MIPatternMatch.h 329 return MRI.getType(Reg) == Ty;
include/llvm/Support/LowLevelTypeImpl.h 180 bool operator!=(const LLT &RHS) const { return !(*this == RHS); }
296 return LHS == RHS;
lib/CodeGen/GlobalISel/CombinerHelper.cpp 81 if (DstTy.isValid() && SrcTy.isValid() && DstTy == SrcTy)
138 assert(MRI.getType(Undef->getOperand(0).getReg()) ==
294 if (CurrentUse.Ty == TyForCandidate) {
474 if (Preferred.Ty == UseDstTy) {
831 if (Ty == LLT()) {
915 assert(ExtType == Ty && "Vector memset value type not supported yet");
lib/CodeGen/GlobalISel/IRTranslator.cpp 1024 if (getLLTForType(*U.getOperand(0)->getType(), *DL) ==
lib/CodeGen/GlobalISel/LegalityPredicates.cpp 19 [=](const LegalityQuery &Query) { return Query.Types[TypeIdx] == Type; };
lib/CodeGen/GlobalISel/LegalizerHelper.cpp 177 assert(OrigTy.getElementType() == TargetTy.getElementType());
184 assert(OrigTy.getElementType() == TargetTy);
1183 assert(MRI.getType(SrcReg) == LLT::scalar(PartSize));
1187 Register NextResult = I + 1 == NumOps && WideTy == DstTy ? DstReg :
2771 if (DstTy == NarrowTy)
2775 if (DstTy == GCDTy) {
2845 if (DstTy == WidenedDstTy)
3475 } else if (SrcStart == OpStart && NarrowTy == MRI.getType(OpReg)) {
3543 } else if (DstStart == OpStart && NarrowTy == MRI.getType(OpReg)) {
3797 assert(MRI.getType(Src) == S64 && MRI.getType(Dst) == S32);
3797 assert(MRI.getType(Src) == S64 && MRI.getType(Dst) == S32);
3855 if (DstTy == LLT::scalar(32)) {
3880 if (DstTy == S32) {
3998 if (Src0Ty == Src1Ty) {
4081 SrcTy.getElementType() == DstTy) {
4207 (SrcTy.isVector() && DstTy == SrcTy.getElementType()))) {
4240 (DstTy.isVector() && DstTy.getElementType() == InsertTy))) {
lib/CodeGen/GlobalISel/LegalizerInfo.cpp 148 return NewTy.getScalarType() == OldTy.getElementType();
lib/CodeGen/GlobalISel/MachineIRBuilder.cpp 213 assert((Res == Op0 && Res == Op1) && "type mismatch");
213 assert((Res == Op0 && Res == Op1) && "type mismatch");
219 assert((Res == Op0) && "type mismatch");
226 Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
252 Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
470 assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
494 if (SrcTy == DstTy)
757 assert(OldValResTy == CmpValTy && "type mismatch");
758 assert(OldValResTy == NewValTy && "type mismatch");
783 assert(OldValResTy == CmpValTy && "type mismatch");
784 assert(OldValResTy == NewValTy && "type mismatch");
807 assert(OldValResTy == ValTy && "type mismatch");
942 assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
942 assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
1034 assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1052 return Op.getLLTTy(*getMRI()) ==
1066 return Op.getLLTTy(*getMRI()) ==
1090 assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1100 assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1117 return Op.getLLTTy(*getMRI()) ==
1134 return Op.getLLTTy(*getMRI()) ==
1150 Op.getLLTTy(*getMRI()) ==
1163 assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1164 (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1167 assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
lib/Target/AArch64/AArch64ISelLowering.cpp 1170 Ty == LLT::vector(2, 64);
lib/Target/AArch64/AArch64InstructionSelector.cpp 837 if (Ty == LLT::scalar(32))
839 else if (Ty == LLT::scalar(64) || Ty == LLT::pointer(0, 64))
839 else if (Ty == LLT::scalar(64) || Ty == LLT::pointer(0, 64))
1021 if (Ty == LLT::vector(2, 64)) {
1023 } else if (Ty == LLT::vector(4, 32)) {
1025 } else if (Ty == LLT::vector(2, 32)) {
1057 if (Ty == LLT::vector(2, 64)) {
1061 } else if (Ty == LLT::vector(4, 32)) {
1065 } else if (Ty == LLT::vector(2, 32)) {
1301 if (Ty == LLT::scalar(64)) {
1977 } else if (Opcode == TargetOpcode::G_TRUNC && DstTy == LLT::scalar(32) &&
1978 SrcTy == LLT::scalar(64)) {
1993 if (DstTy == LLT::vector(4, 16) && SrcTy == LLT::vector(4, 32)) {
1993 if (DstTy == LLT::vector(4, 16) && SrcTy == LLT::vector(4, 32)) {
2721 if (DstTy == LLT::scalar(128)) {
3242 if (CmpTy == LLT::scalar(32)) {
3245 } else if (CmpTy == LLT::scalar(64) || CmpTy.isPointer()) {
lib/Target/AArch64/AArch64LegalizerInfo.cpp 73 if (EltTy == s64)
161 return Ty.isVector() && Ty.getElementType() == s16 &&
169 return Query.Types[0] == s16 && !ST.hasFullFP16();
209 if (Ty1 == p0)
331 [=](const LegalityQuery &Query) { return Query.Types[1] == v2s16; },
334 [=](const LegalityQuery &Query) { return Query.Types[1] == v2p0; }, 0,
359 if (SrcTy == s1)
549 return VecTy == v2s16 || VecTy == v4s16 || VecTy == v8s16 ||
549 return VecTy == v2s16 || VecTy == v4s16 || VecTy == v8s16 ||
549 return VecTy == v2s16 || VecTy == v4s16 || VecTy == v8s16 ||
550 VecTy == v4s32 || VecTy == v2s64 || VecTy == v2s32;
550 VecTy == v4s32 || VecTy == v2s64 || VecTy == v2s32;
550 VecTy == v4s32 || VecTy == v2s64 || VecTy == v2s32;
557 return VecTy == v2s32 || VecTy == v4s32 || VecTy == v2s64;
557 return VecTy == v2s32 || VecTy == v4s32 || VecTy == v2s64;
557 return VecTy == v2s32 || VecTy == v4s32 || VecTy == v2s64;
590 if (DstTy == Ty)
610 return Query.Types[0] == p0 && Query.Types[1] == s64;
610 return Query.Types[0] == p0 && Query.Types[1] == s64;
lib/Target/AArch64/AArch64RegisterBankInfo.cpp 771 if (SrcTy.isVector() || SrcTy == LLT::scalar(128) ||
lib/Target/AMDGPU/AMDGPUCallLowering.cpp 501 assert(LLTy.getElementType() == PartLLT.getElementType());
523 if (DstEltTy == PartLLT) {
lib/Target/AMDGPU/AMDGPUGenRegisterBankInfo.def 228 if (SizeTy == LLT::vector(8, 32))
230 else if (SizeTy == LLT::vector(16, 32))
232 else if (SizeTy == LLT::vector(4, 64))
234 else if (SizeTy == LLT::vector(8, 64))
lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp 151 return Query.Types[TypeIdx].getElementType() == Type;
1352 assert(MRI.getType(Src) == S64);
1398 assert(MRI.getType(Src) == S64);
1445 assert(MRI.getType(Src) == S64 && MRI.getType(Dst) == S64);
1445 assert(MRI.getType(Src) == S64 && MRI.getType(Dst) == S64);
1506 assert(EltTy == MRI.getType(Dst));
1536 assert(EltTy == MRI.getType(Ins));
1723 if (Ty == LLT::scalar(32) && !ST.hasFP32Denormals())
1725 if (Ty == LLT::scalar(16) && !ST.hasFP16Denormals())
1870 if (DstTy == S16)
1893 if (!MF.getTarget().Options.UnsafeFPMath && ResTy == S64)
1896 if (!Unsafe && ResTy == S32 && ST.hasFP32Denormals())
2061 assert(StoreVT.isVector() && StoreVT.getElementType() == S16);
2088 if (Ty == LLT::scalar(8) || Ty == S16) {
2088 if (Ty == LLT::scalar(8) || Ty == S16) {
2095 if (Ty.getElementType() == S16 && Ty.getNumElements() <= 4) {
2101 return Ty.getElementType() == S32 && Ty.getNumElements() <= 4;
2104 return Ty == S32;
lib/Target/AMDGPU/AMDGPURegisterBankInfo.cpp 68 if (MRI.getType(Reg) == LLT::scalar(1))
1478 if (Ty == S16) {
2368 if (DstTy == LLT::vector(2, 16)) {
lib/Target/ARM/ARMInstructionSelector.cpp 504 return MRI.getType(LHSReg) == MRI.getType(RHSReg) &&
lib/Target/ARM/ARMLegalizerInfo.cpp 397 assert(MRI.getType(MI.getOperand(2).getReg()) ==
lib/Target/Mips/MipsLegalizerInfo.cpp 47 if (Val == Query.Types[N])
282 if (DstTy == s64)
lib/Target/Mips/MipsRegisterBankInfo.cpp 637 assert(MRI.getType(Dest) == LLT::scalar(32) && "Unexpected operand type.");
lib/Target/X86/X86InstructionSelector.cpp 402 if (Ty == LLT::scalar(8)) {
405 } else if (Ty == LLT::scalar(16)) {
408 } else if (Ty == LLT::scalar(32) || Ty == LLT::pointer(0, 32)) {
408 } else if (Ty == LLT::scalar(32) || Ty == LLT::pointer(0, 32)) {
418 } else if (Ty == LLT::scalar(64) || Ty == LLT::pointer(0, 64)) {
418 } else if (Ty == LLT::scalar(64) || Ty == LLT::pointer(0, 64)) {
550 if (Ty == LLT::pointer(0, 64))
552 else if (Ty == LLT::pointer(0, 32))
781 assert(!(SrcTy == LLT::scalar(8) && DstTy == LLT::scalar(32)) &&
781 assert(!(SrcTy == LLT::scalar(8) && DstTy == LLT::scalar(32)) &&
783 assert(!(SrcTy == LLT::scalar(16) && DstTy == LLT::scalar(32)) &&
783 assert(!(SrcTy == LLT::scalar(16) && DstTy == LLT::scalar(32)) &&
801 return El.DstTy == DstTy && El.SrcTy == SrcTy;
801 return El.DstTy == DstTy && El.SrcTy == SrcTy;
849 if (DstTy == LLT::scalar(8))
851 else if (DstTy == LLT::scalar(16))
853 else if (DstTy == LLT::scalar(32))
855 else if (DstTy == LLT::scalar(64))
1531 assert(RegTy == MRI.getType(Op1Reg) && RegTy == MRI.getType(Op2Reg) &&
1531 assert(RegTy == MRI.getType(Op1Reg) && RegTy == MRI.getType(Op2Reg) &&
unittests/CodeGen/LowLevelTypeTest.cpp 39 EXPECT_TRUE(Ty == Ty);
79 EXPECT_TRUE(VTy == VTy);
194 EXPECT_TRUE(Ty == Ty);
196 EXPECT_TRUE(VTy == VTy);
usr/include/c++/7.4.0/bits/predefined_ops.h 241 { return *__it == _M_value; }
usr/include/c++/7.4.0/bits/stl_pair.h 449 { return __x.first == __y.first && __x.second == __y.second; }
449 { return __x.first == __y.first && __x.second == __y.second; }
usr/include/c++/7.4.0/tuple 1372 return bool(std::get<__i>(__t) == std::get<__i>(__u))
utils/TableGen/GlobalISelEmitter.cpp 178 bool operator==(const LLTCodeGen &B) const { return Ty == B.Ty; }
utils/unittest/googletest/include/gtest/gtest.h 1392 if (lhs == rhs) {