Get rid of #ifdef for JIT::Assembler::generateCJumpOnCompare
...by moving the body into the 8-byte register operations class. Change-Id: I386c1af711935f08f48cb65adb2f1f4fec64322d Reviewed-by: Lars Knoll <lars.knoll@qt.io>
This commit is contained in:
parent
4d15332948
commit
9500615569
|
@ -209,7 +209,6 @@ void Assembler<TargetConfiguration>::generateCJumpOnNonZero(RegisterID reg, IR::
|
|||
generateCJumpOnCompare(RelationalCondition::NotEqual, reg, TrustedImm32(0), currentBlock, trueBlock, falseBlock);
|
||||
}
|
||||
|
||||
#ifdef QV4_USE_64_BIT_VALUE_ENCODING
|
||||
template <typename TargetConfiguration>
|
||||
void Assembler<TargetConfiguration>::generateCJumpOnCompare(RelationalCondition cond,
|
||||
RegisterID left,
|
||||
|
@ -218,16 +217,8 @@ void Assembler<TargetConfiguration>::generateCJumpOnCompare(RelationalCondition
|
|||
IR::BasicBlock *trueBlock,
|
||||
IR::BasicBlock *falseBlock)
|
||||
{
|
||||
if (trueBlock == _nextBlock) {
|
||||
Jump target = branch64(invert(cond), left, right);
|
||||
addPatch(falseBlock, target);
|
||||
} else {
|
||||
Jump target = branch64(cond, left, right);
|
||||
addPatch(trueBlock, target);
|
||||
jumpToBlock(currentBlock, falseBlock);
|
||||
}
|
||||
RegisterSizeDependentOps::generateCJumpOnCompare(this, cond, left, right, _nextBlock, currentBlock, trueBlock, falseBlock);
|
||||
}
|
||||
#endif
|
||||
|
||||
template <typename TargetConfiguration>
|
||||
void Assembler<TargetConfiguration>::generateCJumpOnCompare(RelationalCondition cond,
|
||||
|
|
|
@ -114,36 +114,56 @@ struct AssemblerTargetConfiguration
|
|||
// More things coming here in the future, such as Target OS
|
||||
};
|
||||
|
||||
template <typename MacroAssembler, typename TargetPlatform, int RegisterSize>
|
||||
template <typename JITAssembler, typename MacroAssembler, typename TargetPlatform, int RegisterSize>
|
||||
struct RegisterSizeDependentAssembler
|
||||
{
|
||||
};
|
||||
|
||||
template <typename MacroAssembler, typename TargetPlatform>
|
||||
struct RegisterSizeDependentAssembler<MacroAssembler, TargetPlatform, 4>
|
||||
template <typename JITAssembler, typename MacroAssembler, typename TargetPlatform>
|
||||
struct RegisterSizeDependentAssembler<JITAssembler, MacroAssembler, TargetPlatform, 4>
|
||||
{
|
||||
using FPRegisterID = typename MacroAssembler::FPRegisterID;
|
||||
using Address = typename MacroAssembler::Address;
|
||||
using RegisterID = typename JITAssembler::RegisterID;
|
||||
using FPRegisterID = typename JITAssembler::FPRegisterID;
|
||||
using RelationalCondition = typename JITAssembler::RelationalCondition;
|
||||
using Address = typename JITAssembler::Address;
|
||||
using TrustedImm64 = typename JITAssembler::TrustedImm64;
|
||||
|
||||
static void loadDouble(MacroAssembler *as, Address addr, FPRegisterID dest)
|
||||
static void loadDouble(JITAssembler *as, Address addr, FPRegisterID dest)
|
||||
{
|
||||
as->loadDouble(addr, dest);
|
||||
as->MacroAssembler::loadDouble(addr, dest);
|
||||
}
|
||||
|
||||
static void storeDouble(MacroAssembler *as, FPRegisterID source, Address addr)
|
||||
static void storeDouble(JITAssembler *as, FPRegisterID source, Address addr)
|
||||
{
|
||||
as->storeDouble(source, addr);
|
||||
as->MacroAssembler::storeDouble(source, addr);
|
||||
}
|
||||
|
||||
static void generateCJumpOnCompare(JITAssembler *as,
|
||||
RelationalCondition cond,
|
||||
RegisterID,
|
||||
TrustedImm64,
|
||||
IR::BasicBlock *,
|
||||
IR::BasicBlock *,
|
||||
IR::BasicBlock *,
|
||||
IR::BasicBlock *)
|
||||
{
|
||||
Q_UNUSED(as);
|
||||
Q_UNUSED(cond);
|
||||
Q_ASSERT(!"unimplemented generateCJumpOnCompare with TrustedImm64 for 32-bit");
|
||||
}
|
||||
};
|
||||
|
||||
template <typename MacroAssembler, typename TargetPlatform>
|
||||
struct RegisterSizeDependentAssembler<MacroAssembler, TargetPlatform, 8>
|
||||
template <typename JITAssembler, typename MacroAssembler, typename TargetPlatform>
|
||||
struct RegisterSizeDependentAssembler<JITAssembler, MacroAssembler, TargetPlatform, 8>
|
||||
{
|
||||
using FPRegisterID = typename MacroAssembler::FPRegisterID;
|
||||
using Address = typename MacroAssembler::Address;
|
||||
using TrustedImm64 = typename MacroAssembler::TrustedImm64;
|
||||
using RegisterID = typename JITAssembler::RegisterID;
|
||||
using FPRegisterID = typename JITAssembler::FPRegisterID;
|
||||
using Address = typename JITAssembler::Address;
|
||||
using TrustedImm64 = typename JITAssembler::TrustedImm64;
|
||||
using RelationalCondition = typename JITAssembler::RelationalCondition;
|
||||
using Jump = typename JITAssembler::Jump;
|
||||
|
||||
static void loadDouble(MacroAssembler *as, Address addr, FPRegisterID dest)
|
||||
static void loadDouble(JITAssembler *as, Address addr, FPRegisterID dest)
|
||||
{
|
||||
as->load64(addr, TargetPlatform::ReturnValueRegister);
|
||||
as->move(TrustedImm64(QV4::Value::NaNEncodeMask), TargetPlatform::ScratchRegister);
|
||||
|
@ -151,13 +171,32 @@ struct RegisterSizeDependentAssembler<MacroAssembler, TargetPlatform, 8>
|
|||
as->move64ToDouble(TargetPlatform::ReturnValueRegister, dest);
|
||||
}
|
||||
|
||||
static void storeDouble(MacroAssembler *as, FPRegisterID source, Address addr)
|
||||
static void storeDouble(JITAssembler *as, FPRegisterID source, Address addr)
|
||||
{
|
||||
as->moveDoubleTo64(source, TargetPlatform::ReturnValueRegister);
|
||||
as->move(TrustedImm64(QV4::Value::NaNEncodeMask), TargetPlatform::ScratchRegister);
|
||||
as->xor64(TargetPlatform::ScratchRegister, TargetPlatform::ReturnValueRegister);
|
||||
as->store64(TargetPlatform::ReturnValueRegister, addr);
|
||||
}
|
||||
|
||||
static void generateCJumpOnCompare(JITAssembler *as,
|
||||
RelationalCondition cond,
|
||||
RegisterID left,
|
||||
TrustedImm64 right,
|
||||
IR::BasicBlock *nextBlock,
|
||||
IR::BasicBlock *currentBlock,
|
||||
IR::BasicBlock *trueBlock,
|
||||
IR::BasicBlock *falseBlock)
|
||||
{
|
||||
if (trueBlock == nextBlock) {
|
||||
Jump target = as->branch64(as->invert(cond), left, right);
|
||||
as->addPatch(falseBlock, target);
|
||||
} else {
|
||||
Jump target = as->branch64(cond, left, right);
|
||||
as->addPatch(trueBlock, target);
|
||||
as->jumpToBlock(currentBlock, falseBlock);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <typename TargetConfiguration>
|
||||
|
@ -189,7 +228,6 @@ public:
|
|||
using MacroAssembler::xor64;
|
||||
using MacroAssembler::store64;
|
||||
using MacroAssembler::load64;
|
||||
using MacroAssembler::branch64;
|
||||
#endif
|
||||
using MacroAssembler::add32;
|
||||
using MacroAssembler::and32;
|
||||
|
@ -231,7 +269,7 @@ public:
|
|||
using JITTargetPlatform::platformEnterStandardStackFrame;
|
||||
using JITTargetPlatform::platformLeaveStandardStackFrame;
|
||||
|
||||
using RegisterSizeDependentOps = RegisterSizeDependentAssembler<MacroAssembler, JITTargetPlatform, RegisterSize>;
|
||||
using RegisterSizeDependentOps = RegisterSizeDependentAssembler<Assembler<TargetConfiguration>, MacroAssembler, JITTargetPlatform, RegisterSize>;
|
||||
|
||||
struct LookupCall {
|
||||
Address addr;
|
||||
|
@ -456,11 +494,11 @@ public:
|
|||
void addPatch(DataLabelPtr patch, IR::BasicBlock *target);
|
||||
void generateCJumpOnNonZero(RegisterID reg, IR::BasicBlock *currentBlock,
|
||||
IR::BasicBlock *trueBlock, IR::BasicBlock *falseBlock);
|
||||
#ifdef QV4_USE_64_BIT_VALUE_ENCODING
|
||||
|
||||
void generateCJumpOnCompare(RelationalCondition cond, RegisterID left, TrustedImm64 right,
|
||||
IR::BasicBlock *currentBlock, IR::BasicBlock *trueBlock,
|
||||
IR::BasicBlock *falseBlock);
|
||||
#endif
|
||||
|
||||
void generateCJumpOnCompare(RelationalCondition cond, RegisterID left, TrustedImm32 right,
|
||||
IR::BasicBlock *currentBlock, IR::BasicBlock *trueBlock,
|
||||
IR::BasicBlock *falseBlock);
|
||||
|
|
Loading…
Reference in New Issue