// GP relative instruction in the absence of the corresponding immediate
// extender.
switch (MCI.getOpcode()) {
- case Hexagon::S2_storerbabs:
+ case Hexagon::PS_storerbabs:
opcode = Hexagon::S2_storerbgp;
break;
- case Hexagon::S2_storerhabs:
+ case Hexagon::PS_storerhabs:
opcode = Hexagon::S2_storerhgp;
break;
- case Hexagon::S2_storerfabs:
+ case Hexagon::PS_storerfabs:
opcode = Hexagon::S2_storerfgp;
break;
- case Hexagon::S2_storeriabs:
+ case Hexagon::PS_storeriabs:
opcode = Hexagon::S2_storerigp;
break;
- case Hexagon::S2_storerbnewabs:
+ case Hexagon::PS_storerbnewabs:
opcode = Hexagon::S2_storerbnewgp;
break;
- case Hexagon::S2_storerhnewabs:
+ case Hexagon::PS_storerhnewabs:
opcode = Hexagon::S2_storerhnewgp;
break;
- case Hexagon::S2_storerinewabs:
+ case Hexagon::PS_storerinewabs:
opcode = Hexagon::S2_storerinewgp;
break;
- case Hexagon::S2_storerdabs:
+ case Hexagon::PS_storerdabs:
opcode = Hexagon::S2_storerdgp;
break;
- case Hexagon::L4_loadrb_abs:
+ case Hexagon::PS_loadrbabs:
opcode = Hexagon::L2_loadrbgp;
break;
- case Hexagon::L4_loadrub_abs:
+ case Hexagon::PS_loadrubabs:
opcode = Hexagon::L2_loadrubgp;
break;
- case Hexagon::L4_loadrh_abs:
+ case Hexagon::PS_loadrhabs:
opcode = Hexagon::L2_loadrhgp;
break;
- case Hexagon::L4_loadruh_abs:
+ case Hexagon::PS_loadruhabs:
opcode = Hexagon::L2_loadruhgp;
break;
- case Hexagon::L4_loadri_abs:
+ case Hexagon::PS_loadriabs:
opcode = Hexagon::L2_loadrigp;
break;
- case Hexagon::L4_loadrd_abs:
+ case Hexagon::PS_loadrdabs:
opcode = Hexagon::L2_loadrdgp;
break;
default:
// HexagonII::INST_ICLASS_LD
// HexagonII::INST_ICLASS_LD_ST_2
-static unsigned int LoadStoreOpcodeData[][2] = {{L4_loadrd_abs, 0x49c00000},
- {L4_loadri_abs, 0x49800000},
- {L4_loadruh_abs, 0x49600000},
- {L4_loadrh_abs, 0x49400000},
- {L4_loadrub_abs, 0x49200000},
- {L4_loadrb_abs, 0x49000000},
- {S2_storerdabs, 0x48c00000},
- {S2_storerinewabs, 0x48a01000},
- {S2_storerhnewabs, 0x48a00800},
- {S2_storerbnewabs, 0x48a00000},
- {S2_storeriabs, 0x48800000},
- {S2_storerfabs, 0x48600000},
- {S2_storerhabs, 0x48400000},
- {S2_storerbabs, 0x48000000}};
+static unsigned int LoadStoreOpcodeData[][2] = {{PS_loadrdabs, 0x49c00000},
+ {PS_loadriabs, 0x49800000},
+ {PS_loadruhabs, 0x49600000},
+ {PS_loadrhabs, 0x49400000},
+ {PS_loadrubabs, 0x49200000},
+ {PS_loadrbabs, 0x49000000},
+ {PS_storerdabs, 0x48c00000},
+ {PS_storerinewabs, 0x48a01000},
+ {PS_storerhnewabs, 0x48a00800},
+ {PS_storerbnewabs, 0x48a00000},
+ {PS_storeriabs, 0x48800000},
+ {PS_storerfabs, 0x48600000},
+ {PS_storerhabs, 0x48400000},
+ {PS_storerbabs, 0x48000000}};
static const size_t NumCondS = array_lengthof(StoreConditionalOpcodeData);
static const size_t NumLS = array_lengthof(LoadStoreOpcodeData);
break;
// op: g16_2
- case (Hexagon::L4_loadri_abs):
+ case (Hexagon::PS_loadriabs):
++shift;
// op: g16_1
- case Hexagon::L4_loadrh_abs:
- case Hexagon::L4_loadruh_abs:
+ case Hexagon::PS_loadrhabs:
+ case Hexagon::PS_loadruhabs:
++shift;
// op: g16_0
- case Hexagon::L4_loadrb_abs:
- case Hexagon::L4_loadrub_abs: {
+ case Hexagon::PS_loadrbabs:
+ case Hexagon::PS_loadrubabs: {
// op: Rd
Value |= insn & UINT64_C(31);
DecodeIntRegsRegisterClass(MI, Value, 0, 0);
break;
}
- case Hexagon::L4_loadrd_abs: {
+ case Hexagon::PS_loadrdabs: {
Value = insn & UINT64_C(31);
DecodeDoubleRegsRegisterClass(MI, Value, 0, 0);
Value = (insn >> 11) & UINT64_C(49152);
break;
}
- case Hexagon::S2_storerdabs: {
+ case Hexagon::PS_storerdabs: {
// op: g16_3
Value = (insn >> 11) & UINT64_C(49152);
Value |= (insn >> 7) & UINT64_C(15872);
}
// op: g16_2
- case Hexagon::S2_storerinewabs:
+ case Hexagon::PS_storerinewabs:
++shift;
// op: g16_1
- case Hexagon::S2_storerhnewabs:
+ case Hexagon::PS_storerhnewabs:
++shift;
// op: g16_0
- case Hexagon::S2_storerbnewabs: {
+ case Hexagon::PS_storerbnewabs: {
Value = (insn >> 11) & UINT64_C(49152);
Value |= (insn >> 7) & UINT64_C(15872);
Value |= (insn >> 5) & UINT64_C(256);
}
// op: g16_2
- case Hexagon::S2_storeriabs:
+ case Hexagon::PS_storeriabs:
++shift;
// op: g16_1
- case Hexagon::S2_storerhabs:
- case Hexagon::S2_storerfabs:
+ case Hexagon::PS_storerhabs:
+ case Hexagon::PS_storerfabs:
++shift;
// op: g16_0
- case Hexagon::S2_storerbabs: {
+ case Hexagon::PS_storerbabs: {
Value = (insn >> 11) & UINT64_C(49152);
Value |= (insn >> 7) & UINT64_C(15872);
Value |= (insn >> 5) & UINT64_C(256);
Operand ImmOp, bits<2> MajOp, bit isHalf = 0> {
let CextOpcode = CextOp, BaseOpcode = CextOp#_abs in {
let opExtendable = 0, isPredicable = 1 in
- def S2_#NAME#abs : T_StoreAbs <mnemonic, RC, ImmOp, MajOp, isHalf>;
+ def PS_#NAME#abs : T_StoreAbs <mnemonic, RC, ImmOp, MajOp, isHalf>;
// Predicated
def S4_p#NAME#t_abs : T_StoreAbs_Pred<mnemonic, RC, MajOp, isHalf, 0, 0>;
bits<2> MajOp> {
let CextOpcode = CextOp, BaseOpcode = CextOp#_abs in {
let opExtendable = 0, isPredicable = 1 in
- def S2_#NAME#newabs : T_StoreAbs_NV <mnemonic, ImmOp, MajOp>;
+ def PS_#NAME#newabs : T_StoreAbs_NV <mnemonic, ImmOp, MajOp>;
// Predicated
def S4_p#NAME#newt_abs : T_StoreAbs_NV_Pred <mnemonic, MajOp, 0, 0>;
(MI Addr:$addr, (ValueMod Value:$val))>;
let AddedComplexity = 30 in {
- def: Storea_pat<truncstorei8, I32, addrga, S2_storerbabs>;
- def: Storea_pat<truncstorei16, I32, addrga, S2_storerhabs>;
- def: Storea_pat<store, I32, addrga, S2_storeriabs>;
- def: Storea_pat<store, I64, addrga, S2_storerdabs>;
+ def: Storea_pat<truncstorei8, I32, addrga, PS_storerbabs>;
+ def: Storea_pat<truncstorei16, I32, addrga, PS_storerhabs>;
+ def: Storea_pat<store, I32, addrga, PS_storeriabs>;
+ def: Storea_pat<store, I64, addrga, PS_storerdabs>;
- def: Stoream_pat<truncstorei8, I64, addrga, LoReg, S2_storerbabs>;
- def: Stoream_pat<truncstorei16, I64, addrga, LoReg, S2_storerhabs>;
- def: Stoream_pat<truncstorei32, I64, addrga, LoReg, S2_storeriabs>;
+ def: Stoream_pat<truncstorei8, I64, addrga, LoReg, PS_storerbabs>;
+ def: Stoream_pat<truncstorei16, I64, addrga, LoReg, PS_storerhabs>;
+ def: Stoream_pat<truncstorei32, I64, addrga, LoReg, PS_storeriabs>;
}
def: Storea_pat<SwapSt<atomic_store_8>, I32, addrgp, S2_storerbgp>;
Operand ImmOp, bits<3> MajOp> {
let CextOpcode = CextOp, BaseOpcode = CextOp#_abs in {
let opExtendable = 1, isPredicable = 1 in
- def L4_#NAME#_abs: T_LoadAbs <mnemonic, RC, ImmOp, MajOp>;
+ def PS_#NAME#abs: T_LoadAbs <mnemonic, RC, ImmOp, MajOp>;
// Predicated
defm L4_p#NAME#t : LD_Abs_Pred<mnemonic, RC, MajOp, 0>;
(VT (MI tglobaladdr:$absaddr))>;
let AddedComplexity = 30 in {
- def: LoadAbs_pats <load, L4_loadri_abs>;
- def: LoadAbs_pats <zextloadi1, L4_loadrub_abs>;
- def: LoadAbs_pats <sextloadi8, L4_loadrb_abs>;
- def: LoadAbs_pats <extloadi8, L4_loadrub_abs>;
- def: LoadAbs_pats <zextloadi8, L4_loadrub_abs>;
- def: LoadAbs_pats <sextloadi16, L4_loadrh_abs>;
- def: LoadAbs_pats <extloadi16, L4_loadruh_abs>;
- def: LoadAbs_pats <zextloadi16, L4_loadruh_abs>;
- def: LoadAbs_pats <load, L4_loadrd_abs, i64>;
+ def: LoadAbs_pats <load, PS_loadriabs>;
+ def: LoadAbs_pats <zextloadi1, PS_loadrubabs>;
+ def: LoadAbs_pats <sextloadi8, PS_loadrbabs>;
+ def: LoadAbs_pats <extloadi8, PS_loadrubabs>;
+ def: LoadAbs_pats <zextloadi8, PS_loadrubabs>;
+ def: LoadAbs_pats <sextloadi16, PS_loadrhabs>;
+ def: LoadAbs_pats <extloadi16, PS_loadruhabs>;
+ def: LoadAbs_pats <zextloadi16, PS_loadruhabs>;
+ def: LoadAbs_pats <load, PS_loadrdabs, i64>;
}
let AddedComplexity = 30 in
def: Pat<(i64 (zextloadi1 (HexagonCONST32 tglobaladdr:$absaddr))),
- (Zext64 (L4_loadrub_abs tglobaladdr:$absaddr))>;
+ (Zext64 (PS_loadrubabs tglobaladdr:$absaddr))>;
//===----------------------------------------------------------------------===//
// multiclass for load instructions with GP-relative addressing mode.
def: Loada_pat<atomic_load_64, i64, addrgp, L2_loadrdgp>;
// Map from Pd = load(globaladdress) -> Rd = memb(globaladdress), Pd = Rd
-def: Loadam_pat<load, i1, addrga, I32toI1, L4_loadrub_abs>;
+def: Loadam_pat<load, i1, addrga, I32toI1, PS_loadrubabs>;
def: Loadam_pat<load, i1, addrgp, I32toI1, L2_loadrubgp>;
-def: Stoream_pat<store, I1, addrga, I1toI32, S2_storerbabs>;
+def: Stoream_pat<store, I1, addrga, I1toI32, PS_storerbabs>;
def: Stoream_pat<store, I1, addrgp, I1toI32, S2_storerbgp>;
// Map from load(globaladdress) -> mem[u][bhwd](#foo)
def: Pat<(HexagonCONST32_GP tglobaladdr:$Rs), (A2_tfrsi s16Ext:$Rs)>;
let AddedComplexity = 30 in {
- def: Storea_pat<truncstorei8, I32, u32ImmPred, S2_storerbabs>;
- def: Storea_pat<truncstorei16, I32, u32ImmPred, S2_storerhabs>;
- def: Storea_pat<store, I32, u32ImmPred, S2_storeriabs>;
+ def: Storea_pat<truncstorei8, I32, u32ImmPred, PS_storerbabs>;
+ def: Storea_pat<truncstorei16, I32, u32ImmPred, PS_storerhabs>;
+ def: Storea_pat<store, I32, u32ImmPred, PS_storeriabs>;
}
let AddedComplexity = 30 in {
- def: Loada_pat<load, i32, u32ImmPred, L4_loadri_abs>;
- def: Loada_pat<sextloadi8, i32, u32ImmPred, L4_loadrb_abs>;
- def: Loada_pat<zextloadi8, i32, u32ImmPred, L4_loadrub_abs>;
- def: Loada_pat<sextloadi16, i32, u32ImmPred, L4_loadrh_abs>;
- def: Loada_pat<zextloadi16, i32, u32ImmPred, L4_loadruh_abs>;
+ def: Loada_pat<load, i32, u32ImmPred, PS_loadriabs>;
+ def: Loada_pat<sextloadi8, i32, u32ImmPred, PS_loadrbabs>;
+ def: Loada_pat<zextloadi8, i32, u32ImmPred, PS_loadrubabs>;
+ def: Loada_pat<sextloadi16, i32, u32ImmPred, PS_loadrhabs>;
+ def: Loada_pat<zextloadi16, i32, u32ImmPred, PS_loadruhabs>;
}
// Indexed store word - global address.
// Load from a global address that has only one use in the current basic block.
let AddedComplexity = 100 in {
- def: Loada_pat<extloadi8, i32, addrga, L4_loadrub_abs>;
- def: Loada_pat<sextloadi8, i32, addrga, L4_loadrb_abs>;
- def: Loada_pat<zextloadi8, i32, addrga, L4_loadrub_abs>;
+ def: Loada_pat<extloadi8, i32, addrga, PS_loadrubabs>;
+ def: Loada_pat<sextloadi8, i32, addrga, PS_loadrbabs>;
+ def: Loada_pat<zextloadi8, i32, addrga, PS_loadrubabs>;
- def: Loada_pat<extloadi16, i32, addrga, L4_loadruh_abs>;
- def: Loada_pat<sextloadi16, i32, addrga, L4_loadrh_abs>;
- def: Loada_pat<zextloadi16, i32, addrga, L4_loadruh_abs>;
+ def: Loada_pat<extloadi16, i32, addrga, PS_loadruhabs>;
+ def: Loada_pat<sextloadi16, i32, addrga, PS_loadrhabs>;
+ def: Loada_pat<zextloadi16, i32, addrga, PS_loadruhabs>;
- def: Loada_pat<load, i32, addrga, L4_loadri_abs>;
- def: Loada_pat<load, i64, addrga, L4_loadrd_abs>;
+ def: Loada_pat<load, i32, addrga, PS_loadriabs>;
+ def: Loada_pat<load, i64, addrga, PS_loadrdabs>;
}
// Store to a global address that has only one use in the current basic block.
let AddedComplexity = 100 in {
- def: Storea_pat<truncstorei8, I32, addrga, S2_storerbabs>;
- def: Storea_pat<truncstorei16, I32, addrga, S2_storerhabs>;
- def: Storea_pat<store, I32, addrga, S2_storeriabs>;
- def: Storea_pat<store, I64, addrga, S2_storerdabs>;
+ def: Storea_pat<truncstorei8, I32, addrga, PS_storerbabs>;
+ def: Storea_pat<truncstorei16, I32, addrga, PS_storerhabs>;
+ def: Storea_pat<store, I32, addrga, PS_storeriabs>;
+ def: Storea_pat<store, I64, addrga, PS_storerdabs>;
- def: Stoream_pat<truncstorei32, I64, addrga, LoReg, S2_storeriabs>;
+ def: Stoream_pat<truncstorei32, I64, addrga, LoReg, PS_storeriabs>;
}
// i8/i16/i32 -> i64 loads
// We need a complexity of 120 here to override preceding handling of
// zextload.
let AddedComplexity = 120 in {
- def: Loadam_pat<extloadi8, i64, addrga, Zext64, L4_loadrub_abs>;
- def: Loadam_pat<sextloadi8, i64, addrga, Sext64, L4_loadrb_abs>;
- def: Loadam_pat<zextloadi8, i64, addrga, Zext64, L4_loadrub_abs>;
+ def: Loadam_pat<extloadi8, i64, addrga, Zext64, PS_loadrubabs>;
+ def: Loadam_pat<sextloadi8, i64, addrga, Sext64, PS_loadrbabs>;
+ def: Loadam_pat<zextloadi8, i64, addrga, Zext64, PS_loadrubabs>;
- def: Loadam_pat<extloadi16, i64, addrga, Zext64, L4_loadruh_abs>;
- def: Loadam_pat<sextloadi16, i64, addrga, Sext64, L4_loadrh_abs>;
- def: Loadam_pat<zextloadi16, i64, addrga, Zext64, L4_loadruh_abs>;
+ def: Loadam_pat<extloadi16, i64, addrga, Zext64, PS_loadruhabs>;
+ def: Loadam_pat<sextloadi16, i64, addrga, Sext64, PS_loadrhabs>;
+ def: Loadam_pat<zextloadi16, i64, addrga, Zext64, PS_loadruhabs>;
- def: Loadam_pat<extloadi32, i64, addrga, Zext64, L4_loadri_abs>;
- def: Loadam_pat<sextloadi32, i64, addrga, Sext64, L4_loadri_abs>;
- def: Loadam_pat<zextloadi32, i64, addrga, Zext64, L4_loadri_abs>;
+ def: Loadam_pat<extloadi32, i64, addrga, Zext64, PS_loadriabs>;
+ def: Loadam_pat<sextloadi32, i64, addrga, Sext64, PS_loadriabs>;
+ def: Loadam_pat<zextloadi32, i64, addrga, Zext64, PS_loadriabs>;
}
let AddedComplexity = 100 in {
- def: Loada_pat<extloadi8, i32, addrgp, L4_loadrub_abs>;
- def: Loada_pat<sextloadi8, i32, addrgp, L4_loadrb_abs>;
- def: Loada_pat<zextloadi8, i32, addrgp, L4_loadrub_abs>;
+ def: Loada_pat<extloadi8, i32, addrgp, PS_loadrubabs>;
+ def: Loada_pat<sextloadi8, i32, addrgp, PS_loadrbabs>;
+ def: Loada_pat<zextloadi8, i32, addrgp, PS_loadrubabs>;
- def: Loada_pat<extloadi16, i32, addrgp, L4_loadruh_abs>;
- def: Loada_pat<sextloadi16, i32, addrgp, L4_loadrh_abs>;
- def: Loada_pat<zextloadi16, i32, addrgp, L4_loadruh_abs>;
+ def: Loada_pat<extloadi16, i32, addrgp, PS_loadruhabs>;
+ def: Loada_pat<sextloadi16, i32, addrgp, PS_loadrhabs>;
+ def: Loada_pat<zextloadi16, i32, addrgp, PS_loadruhabs>;
- def: Loada_pat<load, i32, addrgp, L4_loadri_abs>;
- def: Loada_pat<load, i64, addrgp, L4_loadrd_abs>;
+ def: Loada_pat<load, i32, addrgp, PS_loadriabs>;
+ def: Loada_pat<load, i64, addrgp, PS_loadrdabs>;
}
let AddedComplexity = 100 in {
- def: Storea_pat<truncstorei8, I32, addrgp, S2_storerbabs>;
- def: Storea_pat<truncstorei16, I32, addrgp, S2_storerhabs>;
- def: Storea_pat<store, I32, addrgp, S2_storeriabs>;
- def: Storea_pat<store, I64, addrgp, S2_storerdabs>;
+ def: Storea_pat<truncstorei8, I32, addrgp, PS_storerbabs>;
+ def: Storea_pat<truncstorei16, I32, addrgp, PS_storerhabs>;
+ def: Storea_pat<store, I32, addrgp, PS_storeriabs>;
+ def: Storea_pat<store, I64, addrgp, PS_storerdabs>;
}
-def: Loada_pat<atomic_load_8, i32, addrgp, L4_loadrub_abs>;
-def: Loada_pat<atomic_load_16, i32, addrgp, L4_loadruh_abs>;
-def: Loada_pat<atomic_load_32, i32, addrgp, L4_loadri_abs>;
-def: Loada_pat<atomic_load_64, i64, addrgp, L4_loadrd_abs>;
+def: Loada_pat<atomic_load_8, i32, addrgp, PS_loadrubabs>;
+def: Loada_pat<atomic_load_16, i32, addrgp, PS_loadruhabs>;
+def: Loada_pat<atomic_load_32, i32, addrgp, PS_loadriabs>;
+def: Loada_pat<atomic_load_64, i64, addrgp, PS_loadrdabs>;
-def: Storea_pat<SwapSt<atomic_store_8>, I32, addrgp, S2_storerbabs>;
-def: Storea_pat<SwapSt<atomic_store_16>, I32, addrgp, S2_storerhabs>;
-def: Storea_pat<SwapSt<atomic_store_32>, I32, addrgp, S2_storeriabs>;
-def: Storea_pat<SwapSt<atomic_store_64>, I64, addrgp, S2_storerdabs>;
+def: Storea_pat<SwapSt<atomic_store_8>, I32, addrgp, PS_storerbabs>;
+def: Storea_pat<SwapSt<atomic_store_16>, I32, addrgp, PS_storerhabs>;
+def: Storea_pat<SwapSt<atomic_store_32>, I32, addrgp, PS_storeriabs>;
+def: Storea_pat<SwapSt<atomic_store_64>, I64, addrgp, PS_storerdabs>;
let Constraints = "@earlyclobber $dst" in
def Insert4 : PseudoM<(outs DoubleRegs:$dst), (ins IntRegs:$a, IntRegs:$b,