summaryrefslogtreecommitdiff
path: root/compiler/optimizing/code_generator_riscv64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/code_generator_riscv64.cc')
-rw-r--r--compiler/optimizing/code_generator_riscv64.cc24
1 files changed, 7 insertions, 17 deletions
diff --git a/compiler/optimizing/code_generator_riscv64.cc b/compiler/optimizing/code_generator_riscv64.cc
index 108c948345..0c0b8a9f14 100644
--- a/compiler/optimizing/code_generator_riscv64.cc
+++ b/compiler/optimizing/code_generator_riscv64.cc
@@ -266,8 +266,8 @@ void LocationsBuilderRISCV64::HandleInvoke(HInvoke* instruction) {
class CompileOptimizedSlowPathRISCV64 : public SlowPathCodeRISCV64 {
public:
- CompileOptimizedSlowPathRISCV64(HSuspendCheck* suspend_check, XRegister base, int32_t imm12)
- : SlowPathCodeRISCV64(suspend_check),
+ CompileOptimizedSlowPathRISCV64(XRegister base, int32_t imm12)
+ : SlowPathCodeRISCV64(/*instruction=*/ nullptr),
base_(base),
imm12_(imm12) {}
@@ -280,18 +280,10 @@ class CompileOptimizedSlowPathRISCV64 : public SlowPathCodeRISCV64 {
XRegister counter = srs.AllocateXRegister();
__ LoadConst32(counter, ProfilingInfo::GetOptimizeThreshold());
__ Sh(counter, base_, imm12_);
- if (instruction_ != nullptr) {
- // Only saves live vector regs for SIMD.
- SaveLiveRegisters(codegen, instruction_->GetLocations());
- }
__ Loadd(RA, TR, entrypoint_offset);
// Note: we don't record the call here (and therefore don't generate a stack
// map), as the entrypoint should never be suspended.
__ Jalr(RA);
- if (instruction_ != nullptr) {
- // Only restores live vector regs for SIMD.
- RestoreLiveRegisters(codegen, instruction_->GetLocations());
- }
__ J(GetExitLabel());
}
@@ -2017,7 +2009,7 @@ void InstructionCodeGeneratorRISCV64::HandleGoto(HInstruction* instruction,
HLoopInformation* info = block->GetLoopInformation();
if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
- codegen_->MaybeIncrementHotness(info->GetSuspendCheck(), /*is_frame_entry=*/ false);
+ codegen_->MaybeIncrementHotness(/*is_frame_entry=*/ false);
GenerateSuspendCheck(info->GetSuspendCheck(), successor);
return; // `GenerateSuspendCheck()` emitted the jump.
}
@@ -5702,8 +5694,7 @@ CodeGeneratorRISCV64::CodeGeneratorRISCV64(HGraph* graph,
AddAllocatedRegister(Location::RegisterLocation(RA));
}
-void CodeGeneratorRISCV64::MaybeIncrementHotness(HSuspendCheck* suspend_check,
- bool is_frame_entry) {
+void CodeGeneratorRISCV64::MaybeIncrementHotness(bool is_frame_entry) {
if (GetCompilerOptions().CountHotnessInCompiledCode()) {
ScratchRegisterScope srs(GetAssembler());
XRegister method = is_frame_entry ? kArtMethodRegister : srs.AllocateXRegister();
@@ -5735,7 +5726,7 @@ void CodeGeneratorRISCV64::MaybeIncrementHotness(HSuspendCheck* suspend_check,
XRegister tmp = RA;
__ LoadConst64(tmp, base_address);
SlowPathCodeRISCV64* slow_path =
- new (GetScopedAllocator()) CompileOptimizedSlowPathRISCV64(suspend_check, tmp, imm12);
+ new (GetScopedAllocator()) CompileOptimizedSlowPathRISCV64(tmp, imm12);
AddSlowPath(slow_path);
__ Lhu(counter, tmp, imm12);
__ Beqz(counter, slow_path->GetEntryLabel()); // Can clobber `TMP` if taken.
@@ -5880,7 +5871,7 @@ void CodeGeneratorRISCV64::GenerateFrameEntry() {
__ Storew(Zero, SP, GetStackOffsetOfShouldDeoptimizeFlag());
}
}
- MaybeIncrementHotness(/* suspend_check= */ nullptr, /*is_frame_entry=*/ true);
+ MaybeIncrementHotness(/*is_frame_entry=*/ true);
}
void CodeGeneratorRISCV64::GenerateFrameExit() {
@@ -6743,8 +6734,7 @@ void CodeGeneratorRISCV64::MaybeGenerateInlineCacheCheck(HInstruction* instructi
if (ProfilingInfoBuilder::IsInlineCacheUseful(instruction->AsInvoke(), this)) {
ProfilingInfo* info = GetGraph()->GetProfilingInfo();
DCHECK(info != nullptr);
- InlineCache* cache = ProfilingInfoBuilder::GetInlineCache(
- info, GetCompilerOptions(), instruction->AsInvoke());
+ InlineCache* cache = ProfilingInfoBuilder::GetInlineCache(info, instruction->AsInvoke());
if (cache != nullptr) {
uint64_t address = reinterpret_cast64<uint64_t>(cache);
Riscv64Label done;