summaryrefslogtreecommitdiff
path: root/compiler/optimizing/code_generator_x86_64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/code_generator_x86_64.cc')
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc24
1 files changed, 7 insertions, 17 deletions
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 05b1de9f0a..9d010190f7 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1043,8 +1043,8 @@ class MethodEntryExitHooksSlowPathX86_64 : public SlowPathCode {
class CompileOptimizedSlowPathX86_64 : public SlowPathCode {
public:
- CompileOptimizedSlowPathX86_64(HSuspendCheck* suspend_check, uint64_t counter_address)
- : SlowPathCode(suspend_check),
+ explicit CompileOptimizedSlowPathX86_64(uint64_t counter_address)
+ : SlowPathCode(/* instruction= */ nullptr),
counter_address_(counter_address) {}
void EmitNativeCode(CodeGenerator* codegen) override {
@@ -1052,16 +1052,8 @@ class CompileOptimizedSlowPathX86_64 : public SlowPathCode {
__ Bind(GetEntryLabel());
__ movq(CpuRegister(TMP), Immediate(counter_address_));
__ movw(Address(CpuRegister(TMP), 0), Immediate(ProfilingInfo::GetOptimizeThreshold()));
- if (instruction_ != nullptr) {
- // Only saves full width XMM for SIMD.
- SaveLiveRegisters(codegen, instruction_->GetLocations());
- }
x86_64_codegen->GenerateInvokeRuntime(
GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value());
- if (instruction_ != nullptr) {
- // Only restores full width XMM for SIMD.
- RestoreLiveRegisters(codegen, instruction_->GetLocations());
- }
__ jmp(GetExitLabel());
}
@@ -1771,7 +1763,7 @@ void InstructionCodeGeneratorX86_64::VisitMethodExitHook(HMethodExitHook* instru
GenerateMethodEntryExitHook(instruction);
}
-void CodeGeneratorX86_64::MaybeIncrementHotness(HSuspendCheck* suspend_check, bool is_frame_entry) {
+void CodeGeneratorX86_64::MaybeIncrementHotness(bool is_frame_entry) {
if (GetCompilerOptions().CountHotnessInCompiledCode()) {
NearLabel overflow;
Register method = kMethodRegisterArgument;
@@ -1794,8 +1786,7 @@ void CodeGeneratorX86_64::MaybeIncrementHotness(HSuspendCheck* suspend_check, bo
CHECK(!HasEmptyFrame());
uint64_t address = reinterpret_cast64<uint64_t>(info) +
ProfilingInfo::BaselineHotnessCountOffset().Int32Value();
- SlowPathCode* slow_path =
- new (GetScopedAllocator()) CompileOptimizedSlowPathX86_64(suspend_check, address);
+ SlowPathCode* slow_path = new (GetScopedAllocator()) CompileOptimizedSlowPathX86_64(address);
AddSlowPath(slow_path);
// Note: if the address was in the 32bit range, we could use
// Address::Absolute and avoid this movq.
@@ -1900,7 +1891,7 @@ void CodeGeneratorX86_64::GenerateFrameEntry() {
}
}
- MaybeIncrementHotness(/* suspend_check= */ nullptr, /* is_frame_entry= */ true);
+ MaybeIncrementHotness(/* is_frame_entry= */ true);
}
void CodeGeneratorX86_64::GenerateFrameExit() {
@@ -2087,7 +2078,7 @@ void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock*
HLoopInformation* info = block->GetLoopInformation();
if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
- codegen_->MaybeIncrementHotness(info->GetSuspendCheck(), /* is_frame_entry= */ false);
+ codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
GenerateSuspendCheck(info->GetSuspendCheck(), successor);
return;
}
@@ -3150,8 +3141,7 @@ void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instructio
if (ProfilingInfoBuilder::IsInlineCacheUseful(instruction->AsInvoke(), this)) {
ProfilingInfo* info = GetGraph()->GetProfilingInfo();
DCHECK(info != nullptr);
- InlineCache* cache = ProfilingInfoBuilder::GetInlineCache(
- info, GetCompilerOptions(), instruction->AsInvoke());
+ InlineCache* cache = ProfilingInfoBuilder::GetInlineCache(info, instruction->AsInvoke());
if (cache != nullptr) {
uint64_t address = reinterpret_cast64<uint64_t>(cache);
NearLabel done;