summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-12-15 00:20:44 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2023-12-15 00:20:44 +0000
commit625c3bf0500721d11f2611cdce187228dd6665c7 (patch)
treecdb4f64a3800e00d8176162ccb9dc56367c5fa72
parente5ea745193d7bf227c0e6564d93196737d9733ae (diff)
parent0062f43dd2d74dca5bffcfc8f3eae1a82e827c9b (diff)
downloadart-android14-qpr2-s3-release.tar.gz
Change-Id: I4bc992f0ab92d4c72f0d347ef58a33f3cc86511c
-rw-r--r--runtime/class_linker.cc8
-rw-r--r--runtime/jit/jit.cc88
-rw-r--r--runtime/jit/jit.h3
-rw-r--r--runtime/jit/jit_code_cache.cc78
-rw-r--r--runtime/jit/jit_code_cache.h22
-rw-r--r--runtime/runtime.cc3
-rw-r--r--runtime/thread_pool.cc5
-rw-r--r--runtime/thread_pool.h3
-rw-r--r--test/knownfailures.json3
9 files changed, 33 insertions, 180 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 838d6c1a80..151abeb3db 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2405,6 +2405,10 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// enabling tracing requires the mutator lock, there are no race conditions here.
const bool tracing_enabled = Trace::IsTracingEnabled();
Thread* const self = Thread::Current();
+ // For simplicity, if there is JIT activity, we'll trace all class loaders.
+ // This prevents class unloading while a method is being compiled or is going
+ // to be compiled.
+ const bool is_jit_active = jit::Jit::IsActive(self);
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
if (gUseReadBarrier) {
// We do not track new roots for CC.
@@ -2435,8 +2439,8 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// these objects.
UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
boot_class_table_->VisitRoots(root_visitor);
- // If tracing is enabled, then mark all the class loaders to prevent unloading.
- if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
+ // If tracing is enabled or jit is active, mark all the class loaders to prevent unloading.
+ if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled || is_jit_active) {
for (const ClassLoaderData& data : class_loaders_) {
GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
root.VisitRoot(visitor, RootInfo(kRootVMInternal));
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index f67ffddcfc..c20329855f 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -229,10 +229,6 @@ bool Jit::CompileMethodInternal(ArtMethod* method,
Thread* self,
CompilationKind compilation_kind,
bool prejit) {
- if (kIsDebugBuild) {
- MutexLock mu(self, *Locks::jit_lock_);
- CHECK(GetCodeCache()->IsMethodBeingCompiled(method, compilation_kind));
- }
DCHECK(Runtime::Current()->UseJitCompilation());
DCHECK(!method->IsRuntimeMethod());
@@ -728,51 +724,6 @@ void Jit::NotifyZygoteCompilationDone() {
child_mapping_methods.Reset();
}
-class ScopedCompilation {
- public:
- ScopedCompilation(ScopedCompilation&& other) noexcept :
- jit_(other.jit_),
- method_(other.method_),
- compilation_kind_(other.compilation_kind_),
- owns_compilation_(other.owns_compilation_) {
- other.owns_compilation_ = false;
- }
-
- ScopedCompilation(Jit* jit, ArtMethod* method, CompilationKind compilation_kind)
- : jit_(jit),
- method_(method),
- compilation_kind_(compilation_kind),
- owns_compilation_(true) {
- MutexLock mu(Thread::Current(), *Locks::jit_lock_);
- // We don't want to enqueue any new tasks when thread pool has stopped. This simplifies
- // the implementation of redefinition feature in jvmti.
- if (jit_->GetThreadPool() == nullptr ||
- !jit_->GetThreadPool()->HasStarted(Thread::Current()) ||
- jit_->GetCodeCache()->IsMethodBeingCompiled(method_, compilation_kind_)) {
- owns_compilation_ = false;
- return;
- }
- jit_->GetCodeCache()->AddMethodBeingCompiled(method_, compilation_kind_);
- }
-
- bool OwnsCompilation() const {
- return owns_compilation_;
- }
-
- ~ScopedCompilation() {
- if (owns_compilation_) {
- MutexLock mu(Thread::Current(), *Locks::jit_lock_);
- jit_->GetCodeCache()->RemoveMethodBeingCompiled(method_, compilation_kind_);
- }
- }
-
- private:
- Jit* const jit_;
- ArtMethod* const method_;
- const CompilationKind compilation_kind_;
- bool owns_compilation_;
-};
-
class JitCompileTask final : public Task {
public:
enum class TaskKind {
@@ -782,16 +733,10 @@ class JitCompileTask final : public Task {
JitCompileTask(ArtMethod* method,
TaskKind task_kind,
- CompilationKind compilation_kind,
- ScopedCompilation&& sc)
+ CompilationKind compilation_kind)
: method_(method),
kind_(task_kind),
- compilation_kind_(compilation_kind),
- scoped_compilation_(std::move(sc)) {
- DCHECK(scoped_compilation_.OwnsCompilation());
- // NOLINTNEXTLINE - OwnsCompilation is still valid after move constructor
- DCHECK(!sc.OwnsCompilation());
- }
+ compilation_kind_(compilation_kind) {}
void Run(Thread* self) override {
{
@@ -819,7 +764,6 @@ class JitCompileTask final : public Task {
ArtMethod* const method_;
const TaskKind kind_;
const CompilationKind compilation_kind_;
- ScopedCompilation scoped_compilation_;
DISALLOW_IMPLICIT_CONSTRUCTORS(JitCompileTask);
};
@@ -1314,15 +1258,12 @@ void Jit::AddCompileTask(Thread* self,
ArtMethod* method,
CompilationKind compilation_kind,
bool precompile) {
- ScopedCompilation sc(this, method, compilation_kind);
- if (!sc.OwnsCompilation()) {
- return;
- }
JitCompileTask::TaskKind task_kind = precompile
? JitCompileTask::TaskKind::kPreCompile
: JitCompileTask::TaskKind::kCompile;
- thread_pool_->AddTask(
- self, new JitCompileTask(method, task_kind, compilation_kind, std::move(sc)));
+ if (thread_pool_ != nullptr && thread_pool_->HasStarted(self)) {
+ thread_pool_->AddTask(self, new JitCompileTask(method, task_kind, compilation_kind));
+ }
}
bool Jit::CompileMethodFromProfile(Thread* self,
@@ -1357,15 +1298,11 @@ bool Jit::CompileMethodFromProfile(Thread* self,
VLOG(jit) << "JIT Zygote processing method " << ArtMethod::PrettyMethod(method)
<< " from profile";
method->SetPreCompiled();
- ScopedCompilation sc(this, method, compilation_kind);
- if (!sc.OwnsCompilation()) {
- return false;
- }
if (!add_to_queue) {
CompileMethodInternal(method, self, compilation_kind, /* prejit= */ true);
} else {
Task* task = new JitCompileTask(
- method, JitCompileTask::TaskKind::kPreCompile, compilation_kind, std::move(sc));
+ method, JitCompileTask::TaskKind::kPreCompile, compilation_kind);
if (compile_after_boot) {
AddPostBootTask(self, task);
} else {
@@ -1816,11 +1753,6 @@ bool Jit::CompileMethod(ArtMethod* method,
Thread* self,
CompilationKind compilation_kind,
bool prejit) {
- ScopedCompilation sc(this, method, compilation_kind);
- // TODO: all current users of this method expect us to wait if it is being compiled.
- if (!sc.OwnsCompilation()) {
- return false;
- }
// Fake being in a runtime thread so that class-load behavior will be the same as normal jit.
ScopedSetRuntimeThread ssrt(self);
// TODO(ngeoffray): For JIT at first use, use kPreCompile. Currently we don't due to
@@ -1828,5 +1760,13 @@ bool Jit::CompileMethod(ArtMethod* method,
return CompileMethodInternal(method, self, compilation_kind, prejit);
}
+bool Jit::IsActive(Thread* self) {
+ Jit* jit = Runtime::Current()->GetJit();
+ if (jit == nullptr || jit->GetThreadPool() == nullptr) {
+ return false;
+ }
+ return jit->GetThreadPool()->IsActive(self);
+}
+
} // namespace jit
} // namespace art
diff --git a/runtime/jit/jit.h b/runtime/jit/jit.h
index d5fcd35b3d..28bd99dda4 100644
--- a/runtime/jit/jit.h
+++ b/runtime/jit/jit.h
@@ -242,6 +242,9 @@ class Jit {
// Create JIT itself.
static std::unique_ptr<Jit> Create(JitCodeCache* code_cache, JitOptions* options);
+ // Return whether any JIT thread is busy compiling, or the task queue is not empty.
+ static bool IsActive(Thread* self);
+
bool CompileMethod(ArtMethod* method, Thread* self, CompilationKind compilation_kind, bool prejit)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 4f59930806..6a7820b207 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -1197,62 +1197,9 @@ void JitCodeCache::SetGarbageCollectCode(bool value) {
garbage_collect_code_ = value;
}
-void JitCodeCache::RemoveMethodBeingCompiled(ArtMethod* method, CompilationKind kind) {
- ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
- DCHECK(IsMethodBeingCompiled(method, kind));
- switch (kind) {
- case CompilationKind::kOsr:
- current_osr_compilations_.erase(method);
- break;
- case CompilationKind::kBaseline:
- current_baseline_compilations_.erase(method);
- break;
- case CompilationKind::kOptimized:
- current_optimized_compilations_.erase(method);
- break;
- }
-}
-
-void JitCodeCache::AddMethodBeingCompiled(ArtMethod* method, CompilationKind kind) {
- ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
- DCHECK(!IsMethodBeingCompiled(method, kind));
- switch (kind) {
- case CompilationKind::kOsr:
- current_osr_compilations_.insert(method);
- break;
- case CompilationKind::kBaseline:
- current_baseline_compilations_.insert(method);
- break;
- case CompilationKind::kOptimized:
- current_optimized_compilations_.insert(method);
- break;
- }
-}
-
-bool JitCodeCache::IsMethodBeingCompiled(ArtMethod* method, CompilationKind kind) {
- ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
- switch (kind) {
- case CompilationKind::kOsr:
- return ContainsElement(current_osr_compilations_, method);
- case CompilationKind::kBaseline:
- return ContainsElement(current_baseline_compilations_, method);
- case CompilationKind::kOptimized:
- return ContainsElement(current_optimized_compilations_, method);
- }
-}
-
-bool JitCodeCache::IsMethodBeingCompiled(ArtMethod* method) {
- ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
- return ContainsElement(current_optimized_compilations_, method) ||
- ContainsElement(current_osr_compilations_, method) ||
- ContainsElement(current_baseline_compilations_, method);
-}
-
ProfilingInfo* JitCodeCache::GetProfilingInfo(ArtMethod* method, Thread* self) {
ScopedDebugDisallowReadBarriers sddrb(self);
MutexLock mu(self, *Locks::jit_lock_);
- DCHECK(IsMethodBeingCompiled(method))
- << "GetProfilingInfo should only be called when the method is being compiled";
auto it = profiling_infos_.find(method);
if (it == profiling_infos_.end()) {
return nullptr;
@@ -1572,35 +1519,10 @@ bool JitCodeCache::IsOsrCompiled(ArtMethod* method) {
return osr_code_map_.find(method) != osr_code_map_.end();
}
-void JitCodeCache::VisitRoots(RootVisitor* visitor) {
- if (Runtime::Current()->GetHeap()->IsPerformingUffdCompaction()) {
- // In case of userfaultfd compaction, ArtMethods are updated concurrently
- // via linear-alloc.
- return;
- }
- MutexLock mu(Thread::Current(), *Locks::jit_lock_);
- UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
- for (ArtMethod* method : current_optimized_compilations_) {
- method->VisitRoots(root_visitor, kRuntimePointerSize);
- }
- for (ArtMethod* method : current_baseline_compilations_) {
- method->VisitRoots(root_visitor, kRuntimePointerSize);
- }
- for (ArtMethod* method : current_osr_compilations_) {
- method->VisitRoots(root_visitor, kRuntimePointerSize);
- }
-}
-
bool JitCodeCache::NotifyCompilationOf(ArtMethod* method,
Thread* self,
CompilationKind compilation_kind,
bool prejit) {
- if (kIsDebugBuild) {
- MutexLock mu(self, *Locks::jit_lock_);
- // Note: the compilation kind may have been adjusted after what was passed initially.
- // We really just want to check that the method is indeed being compiled.
- CHECK(IsMethodBeingCompiled(method));
- }
const void* existing_entry_point = method->GetEntryPointFromQuickCompiledCode();
if (compilation_kind != CompilationKind::kOsr && ContainsPc(existing_entry_point)) {
OatQuickMethodHeader* method_header =
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 82510fbe3c..eb605947a1 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -422,20 +422,6 @@ class JitCodeCache {
Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
- void VisitRoots(RootVisitor* visitor);
-
- // Return whether `method` is being compiled with the given mode.
- bool IsMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
- REQUIRES(Locks::jit_lock_);
-
- // Remove `method` from the list of methods meing compiled with the given mode.
- void RemoveMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
- REQUIRES(Locks::jit_lock_);
-
- // Record that `method` is being compiled with the given mode.
- void AddMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
- REQUIRES(Locks::jit_lock_);
-
private:
JitCodeCache();
@@ -521,9 +507,6 @@ class JitCodeCache {
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- // Return whether `method` is being compiled in any mode.
- bool IsMethodBeingCompiled(ArtMethod* method) REQUIRES(Locks::jit_lock_);
-
class JniStubKey;
class JniStubData;
@@ -569,11 +552,6 @@ class JitCodeCache {
// ProfilingInfo objects we have allocated.
SafeMap<ArtMethod*, ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
- // Methods we are currently compiling, one set for each kind of compilation.
- std::set<ArtMethod*> current_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
- std::set<ArtMethod*> current_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
- std::set<ArtMethod*> current_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
-
// Methods that the zygote has compiled and can be shared across processes
// forked from the zygote.
ZygoteMap zygote_map_;
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index f79351ce93..a1d260740b 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -2614,9 +2614,6 @@ void Runtime::VisitConcurrentRoots(RootVisitor* visitor, VisitRootFlags flags) {
}
jni_id_manager_->VisitRoots(visitor);
heap_->VisitAllocationRecords(visitor);
- if (jit_ != nullptr) {
- jit_->GetCodeCache()->VisitRoots(visitor);
- }
if ((flags & kVisitRootFlagNewRoots) == 0) {
// Guaranteed to have no new roots in the constant roots.
VisitConstantRoots(visitor);
diff --git a/runtime/thread_pool.cc b/runtime/thread_pool.cc
index de9ccbaba5..fdfed7402c 100644
--- a/runtime/thread_pool.cc
+++ b/runtime/thread_pool.cc
@@ -304,6 +304,11 @@ Task* ThreadPool::TryGetTaskLocked() {
return nullptr;
}
+bool ThreadPool::IsActive(Thread* self) {
+ MutexLock mu(self, task_queue_lock_);
+ return waiting_count_ != GetThreadCount() || HasOutstandingTasks();
+}
+
void ThreadPool::Wait(Thread* self, bool do_work, bool may_hold_locks) {
if (do_work) {
CHECK(!create_peers_);
diff --git a/runtime/thread_pool.h b/runtime/thread_pool.h
index 5c75733517..7a3acecfa1 100644
--- a/runtime/thread_pool.h
+++ b/runtime/thread_pool.h
@@ -133,6 +133,9 @@ class ThreadPool {
// Remove all tasks in the queue.
void RemoveAllTasks(Thread* self) REQUIRES(!task_queue_lock_);
+ // Return whether any thread in the pool is busy, or the task queue is not empty.
+ bool IsActive(Thread* self) REQUIRES(!task_queue_lock_);
+
// Create a named thread pool with the given number of threads.
//
// If create_peers is true, all worker threads will have a Java peer object. Note that if the
diff --git a/test/knownfailures.json b/test/knownfailures.json
index c858c70ddc..659e719e51 100644
--- a/test/knownfailures.json
+++ b/test/knownfailures.json
@@ -577,7 +577,8 @@
"817-hiddenapi",
"944-transform-classloaders",
"999-redefine-hiddenapi",
- "2038-hiddenapi-jvmti-ext"
+ "2038-hiddenapi-jvmti-ext",
+ "2270-mh-internal-hiddenapi-use"
],
"description": [
"Tests that use custom class loaders or other features not supported ",