diff --git a/llvm/lib/Target/PowerPC/PPC.td b/llvm/lib/Target/PowerPC/PPC.td --- a/llvm/lib/Target/PowerPC/PPC.td +++ b/llvm/lib/Target/PowerPC/PPC.td @@ -203,6 +203,10 @@ SubtargetFeature<"fuse-logical", "HasLogicalFusion", "true", "Target supports Logical Operations fusion", [FeatureFusion]>; +def FeatureBack2BackFusion: + SubtargetFeature<"fuse-back2back", "HasBack2BackFusion", "true", + "Target supports general back to back fusion", + [FeatureFusion]>; def FeatureUnalignedFloats : SubtargetFeature<"allow-unaligned-fp-access", "AllowsUnalignedFPAccess", "true", "CPU does not trap on unaligned FP access">; diff --git a/llvm/lib/Target/PowerPC/PPCBack2BackFusion.def b/llvm/lib/Target/PowerPC/PPCBack2BackFusion.def new file mode 100644 --- /dev/null +++ b/llvm/lib/Target/PowerPC/PPCBack2BackFusion.def @@ -0,0 +1,1042 @@ +// Automatically generated file, do not edit! +// +// This file defines instruction list for general back2back fusion. +//===----------------------------------------------------------------------===// +FUSION_FEATURE(GeneralBack2Back, hasBack2BackFusion, -1, + FUSION_OP_SET(ADD4, + ADD4O, + ADD4TLS, + ADD4_rec, + ADD8, + ADD8O, + ADD8TLS, + ADD8TLS_, + ADD8_rec, + ADDE, + ADDE8, + ADDE8O, + ADDEO, + ADDEX, + ADDEX8, + ADDI, + ADDI8, + ADDIC, + ADDIC8, + ADDIS, + ADDIS8, + ADDISdtprelHA32, + ADDIStocHA, + ADDIStocHA8, + ADDIdtprelL32, + ADDItlsldLADDR32, + ADDItocL, + ADDME, + ADDME8, + ADDME8O, + ADDMEO, + ADDZE, + ADDZE8, + ADDZE8O, + ADDZEO, + AND, + AND8, + AND8_rec, + ANDC, + ANDC8, + ANDC8_rec, + ANDC_rec, + ANDI8_rec, + ANDIS8_rec, + ANDIS_rec, + ANDI_rec, + AND_rec, + CMPB, + CMPB8, + CNTLZD, + CNTLZD_rec, + CNTLZW, + CNTLZW8, + CNTLZW8_rec, + CNTLZW_rec, + CNTTZD, + CNTTZD_rec, + CNTTZW, + CNTTZW8, + CNTTZW8_rec, + CNTTZW_rec, + EQV, + EQV8, + EQV8_rec, + EQV_rec, + EXTSB, + EXTSB8, + EXTSB8_32_64, + EXTSB8_rec, + EXTSB_rec, + EXTSH, + EXTSH8, + EXTSH8_32_64, + EXTSH8_rec, + EXTSH_rec, + EXTSW, + EXTSWSLI, + EXTSWSLI_32_64, + EXTSWSLI_32_64_rec, + EXTSWSLI_rec, + EXTSW_32, + EXTSW_32_64, + EXTSW_32_64_rec, + EXTSW_rec, + FABSD, + FABSS, + FCPSGND, + FCPSGNS, + FMR, + FNABSD, + FNABSS, + FNEGD, + FNEGS, + ISEL, + ISEL8, + LI, + LI8, + LIS, + LIS8, + MFCTR, + MFCTR8, + MFLR, + MFLR8, + MFOCRF, + MFOCRF8, + MFVRD, + MFVRWZ, + MFVSRD, + MFVSRWZ, + MTVRD, + MTVRWA, + MTVRWZ, + MTVSRBM, + MTVSRBMI, + MTVSRD, + MTVSRDM, + MTVSRHM, + MTVSRQM, + MTVSRWA, + MTVSRWM, + MTVSRWZ, + NAND, + NAND8, + NAND8_rec, + NAND_rec, + NEG, + NEG8, + NEG8O, + NEG8_rec, + NEGO, + NEG_rec, + NOP, + NOP_GT_PWR6, + NOP_GT_PWR7, + NOR, + NOR8, + NOR8_rec, + NOR_rec, + OR, + OR8, + OR8_rec, + ORC, + ORC8, + ORC8_rec, + ORC_rec, + ORI, + ORI8, + ORIS, + ORIS8, + OR_rec, + POPCNTB, + POPCNTB8, + POPCNTD, + POPCNTW, + RLDCL, + RLDCL_rec, + RLDCR, + RLDCR_rec, + RLDIC, + RLDICL, + RLDICL_32, + RLDICL_32_64, + RLDICL_32_rec, + RLDICL_rec, + RLDICR, + RLDICR_32, + RLDICR_rec, + RLDIC_rec, + RLDIMI, + RLDIMI_rec, + RLWIMI, + RLWIMI8, + RLWIMI8_rec, + RLWIMI_rec, + RLWINM, + RLWINM8, + RLWINM8_rec, + RLWINM_rec, + RLWNM, + RLWNM8, + RLWNM8_rec, + RLWNM_rec, + SETB, + SETB8, + SETBC, + SETBC8, + SETBCR, + SETBCR8, + SETNBC, + SETNBC8, + SETNBCR, + SETNBCR8, + SLD, + SLD_rec, + SLW, + SLW8, + SLW8_rec, + SLW_rec, + SRAD, + SRADI, + SRADI_32, + SRAW, + SRAWI, + SRD, + SRD_rec, + SRW, + SRW8, + SRW8_rec, + SRW_rec, + SUBF, + SUBF8, + SUBF8O, + SUBF8_rec, + SUBFE, + SUBFE8, + SUBFE8O, + SUBFEO, + SUBFIC, + SUBFIC8, + SUBFME, + SUBFME8, + SUBFME8O, + SUBFMEO, + SUBFO, + SUBFZE, + SUBFZE8, + SUBFZE8O, + SUBFZEO, + SUBF_rec, + VABSDUB, + VABSDUH, + VABSDUW, + VADDCUW, + VADDSBS, + VADDSHS, + VADDSWS, + VADDUBM, + VADDUBS, + VADDUDM, + VADDUHM, + VADDUHS, + VADDUWM, + VADDUWS, + VAND, + VANDC, + VAVGSB, + VAVGSH, + VAVGSW, + VAVGUB, + VAVGUH, + VAVGUW, + VCLZB, + VCLZD, + VCLZH, + VCLZW, + VCMPBFP, + VCMPBFP_rec, + VCMPEQFP, + VCMPEQFP_rec, + VCMPEQUB, + VCMPEQUB_rec, + VCMPEQUD, + VCMPEQUD_rec, + VCMPEQUH, + VCMPEQUH_rec, + VCMPEQUQ, + VCMPEQUQ_rec, + VCMPEQUW, + VCMPEQUW_rec, + VCMPGEFP, + VCMPGEFP_rec, + VCMPGTFP, + VCMPGTFP_rec, + VCMPGTSB, + VCMPGTSB_rec, + VCMPGTSD, + VCMPGTSD_rec, + VCMPGTSH, + VCMPGTSH_rec, + VCMPGTSQ, + VCMPGTSQ_rec, + VCMPGTSW, + VCMPGTSW_rec, + VCMPGTUB, + VCMPGTUB_rec, + VCMPGTUD, + VCMPGTUD_rec, + VCMPGTUH, + VCMPGTUH_rec, + VCMPGTUQ, + VCMPGTUQ_rec, + VCMPGTUW, + VCMPGTUW_rec, + VCMPNEB, + VCMPNEB_rec, + VCMPNEH, + VCMPNEH_rec, + VCMPNEW, + VCMPNEW_rec, + VCMPNEZB, + VCMPNEZB_rec, + VCMPNEZH, + VCMPNEZH_rec, + VCMPNEZW, + VCMPNEZW_rec, + VCNTMBB, + VCNTMBD, + VCNTMBH, + VCNTMBW, + VCTZB, + VCTZD, + VCTZH, + VCTZW, + VEQV, + VEXPANDBM, + VEXPANDDM, + VEXPANDHM, + VEXPANDQM, + VEXPANDWM, + VEXTRACTBM, + VEXTRACTDM, + VEXTRACTHM, + VEXTRACTQM, + VEXTRACTWM, + VEXTSB2D, + VEXTSB2Ds, + VEXTSB2W, + VEXTSB2Ws, + VEXTSD2Q, + VEXTSH2D, + VEXTSH2Ds, + VEXTSH2W, + VEXTSH2Ws, + VEXTSW2D, + VEXTSW2Ds, + VMAXFP, + VMAXSB, + VMAXSD, + VMAXSH, + VMAXSW, + VMAXUB, + VMAXUD, + VMAXUH, + VMAXUW, + VMINFP, + VMINSB, + VMINSD, + VMINSH, + VMINSW, + VMINUB, + VMINUD, + VMINUH, + VMINUW, + VMRGEW, + VMRGOW, + VNAND, + VNEGD, + VNEGW, + VNOR, + VOR, + VORC, + VPOPCNTB, + VPOPCNTD, + VPOPCNTH, + VPOPCNTW, + VPRTYBD, + VPRTYBW, + VRLB, + VRLD, + VRLDMI, + VRLDNM, + VRLH, + VRLW, + VRLWMI, + VRLWNM, + VSEL, + VSHASIGMAD, + VSHASIGMAW, + VSLB, + VSLD, + VSLH, + VSLW, + VSRAB, + VSRAD, + VSRAH, + VSRAW, + VSRB, + VSRD, + VSRH, + VSRW, + VSUBCUW, + VSUBSBS, + VSUBSHS, + VSUBSWS, + VSUBUBM, + VSUBUBS, + VSUBUDM, + VSUBUHM, + VSUBUHS, + VSUBUWM, + VSUBUWS, + VXOR, + V_SET0, + V_SET0B, + V_SET0H, + XOR, + XOR8, + XOR8_rec, + XORI, + XORI8, + XORIS, + XORIS8, + XOR_rec, + XSABSDP, + XSABSQP, + XSCMPEQDP, + XSCMPGEDP, + XSCMPGTDP, + XSCPSGNDP, + XSCPSGNQP, + XSCVHPDP, + XSCVSPDPN, + XSIEXPDP, + XSIEXPQP, + XSMAXCDP, + XSMAXDP, + XSMAXJDP, + XSMINCDP, + XSMINDP, + XSMINJDP, + XSNABSDP, + XSNABSQP, + XSNEGDP, + XSNEGQP, + XSXEXPDP, + XSXEXPQP, + XSXSIGDP, + XVABSDP, + XVABSSP, + XVCMPEQDP, + XVCMPEQDP_rec, + XVCMPEQSP, + XVCMPEQSP_rec, + XVCMPGEDP, + XVCMPGEDP_rec, + XVCMPGESP, + XVCMPGESP_rec, + XVCMPGTDP, + XVCMPGTDP_rec, + XVCMPGTSP, + XVCMPGTSP_rec, + XVCPSGNDP, + XVCPSGNSP, + XVCVHPSP, + XVIEXPDP, + XVIEXPSP, + XVMAXDP, + XVMAXSP, + XVMINDP, + XVMINSP, + XVNABSDP, + XVNABSSP, + XVNEGDP, + XVNEGSP, + XVTSTDCDP, + XVTSTDCSP, + XVXEXPDP, + XVXEXPSP, + XVXSIGDP, + XVXSIGSP, + XXLAND, + XXLANDC, + XXLEQV, + XXLEQVOnes, + XXLNAND, + XXLNOR, + XXLOR, + XXLORC, + XXLORf, + XXLXOR, + XXLXORdpz, + XXLXORspz, + XXLXORz, + XXSEL), + FUSION_OP_SET(ADD4, + ADD4O, + ADD4TLS, + ADD4_rec, + ADD8, + ADD8O, + ADD8TLS, + ADD8TLS_, + ADD8_rec, + ADDE, + ADDE8, + ADDE8O, + ADDEO, + ADDEX, + ADDEX8, + ADDI, + ADDI8, + ADDIC, + ADDIC8, + ADDIS, + ADDIS8, + ADDISdtprelHA32, + ADDIStocHA, + ADDIStocHA8, + ADDIdtprelL32, + ADDItlsldLADDR32, + ADDItocL, + ADDME, + ADDME8, + ADDME8O, + ADDMEO, + ADDZE, + ADDZE8, + ADDZE8O, + ADDZEO, + AND, + AND8, + AND8_rec, + ANDC, + ANDC8, + ANDC8_rec, + ANDC_rec, + ANDI8_rec, + ANDIS8_rec, + ANDIS_rec, + ANDI_rec, + AND_rec, + CMPB, + CMPB8, + CMPD, + CMPDI, + CMPEQB, + CMPLD, + CMPLDI, + CMPLW, + CMPLWI, + CMPRB, + CMPRB8, + CMPW, + CMPWI, + CNTLZD, + CNTLZD_rec, + CNTLZW, + CNTLZW8, + CNTLZW8_rec, + CNTLZW_rec, + CNTTZD, + CNTTZD_rec, + CNTTZW, + CNTTZW8, + CNTTZW8_rec, + CNTTZW_rec, + CR6SET, + CR6UNSET, + CRAND, + CRANDC, + CREQV, + CRNAND, + CRNOR, + CROR, + CRORC, + CRSET, + CRUNSET, + CRXOR, + DSS, + DSSALL, + DST, + DST64, + DSTST, + DSTST64, + DSTSTT, + DSTSTT64, + DSTT, + DSTT64, + EQV, + EQV8, + EQV8_rec, + EQV_rec, + EXTSB, + EXTSB8, + EXTSB8_32_64, + EXTSB8_rec, + EXTSB_rec, + EXTSH, + EXTSH8, + EXTSH8_32_64, + EXTSH8_rec, + EXTSH_rec, + EXTSW, + EXTSWSLI, + EXTSWSLI_32_64, + EXTSWSLI_32_64_rec, + EXTSWSLI_rec, + EXTSW_32, + EXTSW_32_64, + EXTSW_32_64_rec, + EXTSW_rec, + FABSD, + FABSS, + FCMPOD, + FCMPOS, + FCMPUD, + FCMPUS, + FCPSGND, + FCPSGNS, + FMR, + FNABSD, + FNABSS, + FNEGD, + FNEGS, + FTDIV, + FTSQRT, + ISEL, + ISEL8, + LI, + LI8, + LIS, + LIS8, + MCRF, + MCRXRX, + MFCTR, + MFCTR8, + MFLR, + MFLR8, + MFOCRF, + MFOCRF8, + MFVRD, + MFVRWZ, + MFVSRD, + MFVSRWZ, + MTCTR, + MTCTR8, + MTCTR8loop, + MTCTRloop, + MTLR, + MTLR8, + MTOCRF, + MTOCRF8, + MTVRD, + MTVRWA, + MTVRWZ, + MTVSRBM, + MTVSRBMI, + MTVSRD, + MTVSRDM, + MTVSRHM, + MTVSRQM, + MTVSRWA, + MTVSRWM, + MTVSRWZ, + NAND, + NAND8, + NAND8_rec, + NAND_rec, + NEG, + NEG8, + NEG8O, + NEG8_rec, + NEGO, + NEG_rec, + NOP, + NOP_GT_PWR6, + NOP_GT_PWR7, + NOR, + NOR8, + NOR8_rec, + NOR_rec, + OR, + OR8, + OR8_rec, + ORC, + ORC8, + ORC8_rec, + ORC_rec, + ORI, + ORI8, + ORIS, + ORIS8, + OR_rec, + POPCNTB, + POPCNTB8, + POPCNTD, + POPCNTW, + RLDCL, + RLDCL_rec, + RLDCR, + RLDCR_rec, + RLDIC, + RLDICL, + RLDICL_32, + RLDICL_32_64, + RLDICL_32_rec, + RLDICL_rec, + RLDICR, + RLDICR_32, + RLDICR_rec, + RLDIC_rec, + RLDIMI, + RLDIMI_rec, + RLWIMI, + RLWIMI8, + RLWIMI8_rec, + RLWIMI_rec, + RLWINM, + RLWINM8, + RLWINM8_rec, + RLWINM_rec, + RLWNM, + RLWNM8, + RLWNM8_rec, + RLWNM_rec, + SETB, + SETB8, + SETBC, + SETBC8, + SETBCR, + SETBCR8, + SETNBC, + SETNBC8, + SETNBCR, + SETNBCR8, + SLD, + SLD_rec, + SLW, + SLW8, + SLW8_rec, + SLW_rec, + SRAD, + SRADI, + SRADI_32, + SRAW, + SRAWI, + SRD, + SRD_rec, + SRW, + SRW8, + SRW8_rec, + SRW_rec, + SUBF, + SUBF8, + SUBF8O, + SUBF8_rec, + SUBFE, + SUBFE8, + SUBFE8O, + SUBFEO, + SUBFIC, + SUBFIC8, + SUBFME, + SUBFME8, + SUBFME8O, + SUBFMEO, + SUBFO, + SUBFZE, + SUBFZE8, + SUBFZE8O, + SUBFZEO, + SUBF_rec, + TD, + TDI, + TRAP, + TW, + TWI, + VABSDUB, + VABSDUH, + VABSDUW, + VADDCUW, + VADDSBS, + VADDSHS, + VADDSWS, + VADDUBM, + VADDUBS, + VADDUDM, + VADDUHM, + VADDUHS, + VADDUWM, + VADDUWS, + VAND, + VANDC, + VAVGSB, + VAVGSH, + VAVGSW, + VAVGUB, + VAVGUH, + VAVGUW, + VCLZB, + VCLZD, + VCLZH, + VCLZW, + VCMPBFP, + VCMPBFP_rec, + VCMPEQFP, + VCMPEQFP_rec, + VCMPEQUB, + VCMPEQUB_rec, + VCMPEQUD, + VCMPEQUD_rec, + VCMPEQUH, + VCMPEQUH_rec, + VCMPEQUQ, + VCMPEQUQ_rec, + VCMPEQUW, + VCMPEQUW_rec, + VCMPGEFP, + VCMPGEFP_rec, + VCMPGTFP, + VCMPGTFP_rec, + VCMPGTSB, + VCMPGTSB_rec, + VCMPGTSD, + VCMPGTSD_rec, + VCMPGTSH, + VCMPGTSH_rec, + VCMPGTSQ, + VCMPGTSQ_rec, + VCMPGTSW, + VCMPGTSW_rec, + VCMPGTUB, + VCMPGTUB_rec, + VCMPGTUD, + VCMPGTUD_rec, + VCMPGTUH, + VCMPGTUH_rec, + VCMPGTUQ, + VCMPGTUQ_rec, + VCMPGTUW, + VCMPGTUW_rec, + VCMPNEB, + VCMPNEB_rec, + VCMPNEH, + VCMPNEH_rec, + VCMPNEW, + VCMPNEW_rec, + VCMPNEZB, + VCMPNEZB_rec, + VCMPNEZH, + VCMPNEZH_rec, + VCMPNEZW, + VCMPNEZW_rec, + VCMPSQ, + VCMPUQ, + VCNTMBB, + VCNTMBD, + VCNTMBH, + VCNTMBW, + VCTZB, + VCTZD, + VCTZH, + VCTZW, + VEQV, + VEXPANDBM, + VEXPANDDM, + VEXPANDHM, + VEXPANDQM, + VEXPANDWM, + VEXTRACTBM, + VEXTRACTDM, + VEXTRACTHM, + VEXTRACTQM, + VEXTRACTWM, + VEXTSB2D, + VEXTSB2Ds, + VEXTSB2W, + VEXTSB2Ws, + VEXTSD2Q, + VEXTSH2D, + VEXTSH2Ds, + VEXTSH2W, + VEXTSH2Ws, + VEXTSW2D, + VEXTSW2Ds, + VMAXFP, + VMAXSB, + VMAXSD, + VMAXSH, + VMAXSW, + VMAXUB, + VMAXUD, + VMAXUH, + VMAXUW, + VMINFP, + VMINSB, + VMINSD, + VMINSH, + VMINSW, + VMINUB, + VMINUD, + VMINUH, + VMINUW, + VMRGEW, + VMRGOW, + VNAND, + VNEGD, + VNEGW, + VNOR, + VOR, + VORC, + VPOPCNTB, + VPOPCNTD, + VPOPCNTH, + VPOPCNTW, + VPRTYBD, + VPRTYBW, + VRLB, + VRLD, + VRLDMI, + VRLDNM, + VRLH, + VRLW, + VRLWMI, + VRLWNM, + VSEL, + VSHASIGMAD, + VSHASIGMAW, + VSLB, + VSLD, + VSLH, + VSLW, + VSRAB, + VSRAD, + VSRAH, + VSRAW, + VSRB, + VSRD, + VSRH, + VSRW, + VSUBCUW, + VSUBSBS, + VSUBSHS, + VSUBSWS, + VSUBUBM, + VSUBUBS, + VSUBUDM, + VSUBUHM, + VSUBUHS, + VSUBUWM, + VSUBUWS, + VXOR, + V_SET0, + V_SET0B, + V_SET0H, + WAIT, + XOR, + XOR8, + XOR8_rec, + XORI, + XORI8, + XORIS, + XORIS8, + XOR_rec, + XSABSDP, + XSABSQP, + XSCMPEQDP, + XSCMPEXPDP, + XSCMPGEDP, + XSCMPGTDP, + XSCMPODP, + XSCMPUDP, + XSCPSGNDP, + XSCPSGNQP, + XSCVHPDP, + XSCVSPDPN, + XSIEXPDP, + XSIEXPQP, + XSMAXCDP, + XSMAXDP, + XSMAXJDP, + XSMINCDP, + XSMINDP, + XSMINJDP, + XSNABSDP, + XSNABSQP, + XSNEGDP, + XSNEGQP, + XSTDIVDP, + XSTSQRTDP, + XSTSTDCDP, + XSTSTDCSP, + XSXEXPDP, + XSXEXPQP, + XSXSIGDP, + XVABSDP, + XVABSSP, + XVCMPEQDP, + XVCMPEQDP_rec, + XVCMPEQSP, + XVCMPEQSP_rec, + XVCMPGEDP, + XVCMPGEDP_rec, + XVCMPGESP, + XVCMPGESP_rec, + XVCMPGTDP, + XVCMPGTDP_rec, + XVCMPGTSP, + XVCMPGTSP_rec, + XVCPSGNDP, + XVCPSGNSP, + XVCVHPSP, + XVIEXPDP, + XVIEXPSP, + XVMAXDP, + XVMAXSP, + XVMINDP, + XVMINSP, + XVNABSDP, + XVNABSSP, + XVNEGDP, + XVNEGSP, + XVTDIVDP, + XVTDIVSP, + XVTLSBB, + XVTSQRTDP, + XVTSQRTSP, + XVTSTDCDP, + XVTSTDCSP, + XVXEXPDP, + XVXEXPSP, + XVXSIGDP, + XVXSIGSP, + XXLAND, + XXLANDC, + XXLEQV, + XXLEQVOnes, + XXLNAND, + XXLNOR, + XXLOR, + XXLORC, + XXLORf, + XXLXOR, + XXLXORdpz, + XXLXORspz, + XXLXORz, + XXSEL)) diff --git a/llvm/lib/Target/PowerPC/PPCMacroFusion.def b/llvm/lib/Target/PowerPC/PPCMacroFusion.def --- a/llvm/lib/Target/PowerPC/PPCMacroFusion.def +++ b/llvm/lib/Target/PowerPC/PPCMacroFusion.def @@ -78,5 +78,7 @@ FUSION_FEATURE(SldiAdd, hasArithAddFusion, -1, FUSION_OP_SET(RLDICR, RLDICR_32), FUSION_OP_SET(ADD4, ADD8, SUBF, SUBF8)) +#include "PPCBack2BackFusion.def" + #undef FUSION_FEATURE #undef FUSION_OP_SET diff --git a/llvm/lib/Target/PowerPC/PPCSubtarget.h b/llvm/lib/Target/PowerPC/PPCSubtarget.h --- a/llvm/lib/Target/PowerPC/PPCSubtarget.h +++ b/llvm/lib/Target/PowerPC/PPCSubtarget.h @@ -151,6 +151,7 @@ bool HasAddLogicalFusion; bool HasLogicalAddFusion; bool HasLogicalFusion; + bool HasBack2BackFusion; bool IsISA2_06; bool IsISA2_07; bool IsISA3_0; @@ -340,6 +341,7 @@ bool hasAddLogicalFusion() const { return HasAddLogicalFusion; } bool hasLogicalAddFusion() const { return HasLogicalAddFusion; } bool hasLogicalFusion() const { return HasLogicalFusion; } + bool hasBack2BackFusion() const { return HasBack2BackFusion; } bool needsSwapsForVSXMemOps() const { return hasVSX() && isLittleEndian() && !hasP9Vector(); } diff --git a/llvm/lib/Target/PowerPC/PPCSubtarget.cpp b/llvm/lib/Target/PowerPC/PPCSubtarget.cpp --- a/llvm/lib/Target/PowerPC/PPCSubtarget.cpp +++ b/llvm/lib/Target/PowerPC/PPCSubtarget.cpp @@ -131,6 +131,7 @@ HasAddLogicalFusion = false; HasLogicalAddFusion = false; HasLogicalFusion = false; + HasBack2BackFusion = false; IsISA2_06 = false; IsISA2_07 = false; IsISA3_0 = false;