diff options
34 files changed, 489 insertions, 213 deletions
diff --git a/Android.bp b/Android.bp index 72b87214e4..afb1341785 100644 --- a/Android.bp +++ b/Android.bp @@ -133,3 +133,19 @@ aidl_interface { frozen: true, } + +latest_av_audio_types_aidl = "av-audio-types-aidl-V1" + +cc_defaults { + name: "latest_av_audio_types_aidl_ndk_shared", + shared_libs: [ + latest_av_audio_types_aidl + "-ndk", + ], +} + +cc_defaults { + name: "latest_av_audio_types_aidl_ndk_static", + static_libs: [ + latest_av_audio_types_aidl + "-ndk", + ], +} diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING index d38859693a..1a637ac389 100644 --- a/media/TEST_MAPPING +++ b/media/TEST_MAPPING @@ -47,8 +47,13 @@ ], // Postsubmit tests for TV devices "tv-postsubmit": [ - { - "name": "DecoderRenderTest" - } + { + "name": "CtsMediaDecoderTestCases", + "options": [ + { + "include-filter": "android.media.decoder.cts.DecoderRenderTest" + } + ] + } ] } diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp index 1c2a0fb7d8..dbbabfeb26 100644 --- a/media/codec2/hal/client/GraphicsTracker.cpp +++ b/media/codec2/hal/client/GraphicsTracker.cpp @@ -173,7 +173,7 @@ void GraphicsTracker::BufferCache::unblockSlot(int slot) { } GraphicsTracker::GraphicsTracker(int maxDequeueCount) - : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount}, + : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount}, mMaxDequeueCommitted{maxDequeueCount}, mDequeueable{maxDequeueCount}, mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0}, @@ -235,6 +235,7 @@ c2_status_t GraphicsTracker::configureGraphics( const sp<IGraphicBufferProducer>& igbp, uint32_t generation) { // TODO: wait until operations to previous IGBP is completed. std::shared_ptr<BufferCache> prevCache; + int prevDequeueRequested = 0; int prevDequeueCommitted; std::unique_lock<std::mutex> cl(mConfigLock); @@ -243,6 +244,9 @@ c2_status_t GraphicsTracker::configureGraphics( mInConfig = true; prevCache = mBufferCache; prevDequeueCommitted = mMaxDequeueCommitted; + if (mMaxDequeueRequested.has_value()) { + prevDequeueRequested = mMaxDequeueRequested.value(); + } } // NOTE: Switching to the same surface is blocked from MediaCodec. // Switching to the same surface might not work if tried, since disconnect() @@ -263,6 +267,11 @@ c2_status_t GraphicsTracker::configureGraphics( mInConfig = false; return C2_BAD_VALUE; } + ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)", + prevDequeueRequested, prevDequeueCommitted); + if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) { + prevDequeueCommitted = prevDequeueRequested; + } if (igbp) { ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted); if (ret != ::android::OK) { @@ -280,6 +289,34 @@ c2_status_t GraphicsTracker::configureGraphics( std::unique_lock<std::mutex> l(mLock); mInConfig = false; mBufferCache = newCache; + // {@code dequeued} is the number of currently dequeued buffers. + // {@code prevDequeueCommitted} is max dequeued buffer at any moment + // from the new surface. + // {@code newDequeueable} is hence the current # of dequeueable buffers + // if no change occurs. + int dequeued = mDequeued.size() + mNumDequeueing; + int newDequeueable = prevDequeueCommitted - dequeued; + if (newDequeueable < 0) { + // This will not happen. + // But if this happens, we respect the value and try to continue. + ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)", + newDequeueable, prevDequeueCommitted, dequeued); + } + + if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) { + mMaxDequeueRequested.reset(); + } + mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted; + + int delta = newDequeueable - mDequeueable; + if (delta > 0) { + writeIncDequeueableLocked(delta); + } else if (delta < 0) { + drainDequeueableLocked(-delta); + } + ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d", + newDequeueable, delta, mMaxDequeue); + mDequeueable = newDequeueable; } return C2_OK; } @@ -529,6 +566,7 @@ c2_status_t GraphicsTracker::requestAllocate(std::shared_ptr<BufferCache> *cache ALOGE("writing end for the waitable object seems to be closed"); return C2_BAD_STATE; } + mNumDequeueing++; mDequeueable--; *cache = mBufferCache; return C2_OK; @@ -543,6 +581,7 @@ void GraphicsTracker::commitAllocate(c2_status_t res, const std::shared_ptr<Buff bool cached, int slot, const sp<Fence> &fence, std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) { std::unique_lock<std::mutex> l(mLock); + mNumDequeueing--; if (res == C2_OK) { if (cached) { auto it = cache->mBuffers.find(slot); @@ -655,7 +694,8 @@ c2_status_t GraphicsTracker::_allocate(const std::shared_ptr<BufferCache> &cache ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d", status); igbp->cancelBuffer(slotId, fence); - return C2_CORRUPTED; + // This might be due to life-cycle end and/or surface switching. + return C2_BLOCKING; } *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence); if (!*buffer) { diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h index dd6c8694ea..762030b3ed 100644 --- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h +++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h @@ -234,6 +234,7 @@ private: // Maps bufferId to buffer std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued; std::set<uint64_t> mDeallocating; + int mNumDequeueing; // These member variables are read and modified accessed as follows. // 1. mConfigLock being held diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp index 197d6e7086..b92615034a 100644 --- a/media/codec2/hal/plugin/FilterWrapper.cpp +++ b/media/codec2/hal/plugin/FilterWrapper.cpp @@ -49,11 +49,6 @@ public: std::weak_ptr<FilterWrapper> filterWrapper) : mIntf(intf), mFilterWrapper(filterWrapper) { takeFilters(std::move(filters)); - for (size_t i = 0; i < mFilters.size(); ++i) { - mControlParamTypes.insert( - mFilters[i].desc.controlParams.begin(), - mFilters[i].desc.controlParams.end()); - } } ~WrappedDecoderInterface() override = default; @@ -91,6 +86,12 @@ public: // TODO: documentation mFilters = std::move(filters); + mControlParamTypes.clear(); + for (size_t i = 0; i < mFilters.size(); ++i) { + mControlParamTypes.insert( + mFilters[i].desc.controlParams.begin(), + mFilters[i].desc.controlParams.end()); + } mTypeToIndexForQuery.clear(); mTypeToIndexForConfig.clear(); for (size_t i = 0; i < mFilters.size(); ++i) { diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp index 20b6d7f719..a897fa0f84 100644 --- a/media/codec2/sfplugin/CCodec.cpp +++ b/media/codec2/sfplugin/CCodec.cpp @@ -2229,9 +2229,15 @@ void CCodec::stop(bool pushBlankBuffer) { // See also b/300350761. // // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes. - // so we are reverting back to the logical sequence of the operations. + // so we are reverting back to the logical sequence of the operations when + // AIDL HALs are selected. + // When the HIDL HALs are selected, we retained workaround(the reversed + // order) as default in order to keep legacy behavior. + bool stopHalBeforeSurface = + Codec2Client::IsAidlSelected() || + property_get_bool("debug.codec2.stop_hal_before_surface", false); status_t err = C2_OK; - if (android::media::codec::provider_->stop_hal_before_surface()) { + if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) { err = comp->stop(); mChannel->stopUseOutputSurface(pushBlankBuffer); } else { @@ -2334,8 +2340,14 @@ void CCodec::release(bool sendCallback, bool pushBlankBuffer) { // See also b/300350761. // // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes. - // so we are reverting back to the logical sequence of the operations. - if (android::media::codec::provider_->stop_hal_before_surface()) { + // so we are reverting back to the logical sequence of the operations when + // AIDL HALs are selected. + // When the HIDL HALs are selected, we retained workaround(the reversed + // order) as default in order to keep legacy behavior. + bool stopHalBeforeSurface = + Codec2Client::IsAidlSelected() || + property_get_bool("debug.codec2.stop_hal_before_surface", false); + if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) { comp->release(); mChannel->stopUseOutputSurface(pushBlankBuffer); } else { diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp index 3984b83ceb..c7ab82f9a6 100644 --- a/media/codec2/sfplugin/CCodecBufferChannel.cpp +++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp @@ -2784,7 +2784,16 @@ void CCodecBufferChannel::resetBuffersPixelFormat(bool isEncoder) { } void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) { - mInfoBuffers.push_back(buffer); + if (mInputSurface == nullptr) { + mInfoBuffers.push_back(buffer); + } else { + std::list<std::unique_ptr<C2Work>> items; + std::unique_ptr<C2Work> work(new C2Work); + work->input.infoBuffers.emplace_back(*buffer); + work->worklets.emplace_back(new C2Worklet); + items.push_back(std::move(work)); + c2_status_t err = mComponent->queue(&items); + } } status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) { diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp index 77a76e8584..7a33af4096 100644 --- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp +++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp @@ -32,10 +32,15 @@ namespace android { -static bool isAtLeast(int version, const char *codeName) { - char deviceCodeName[PROP_VALUE_MAX]; - __system_property_get("ro.build.version.codename", deviceCodeName); - return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName); +static bool isAtLeast(int version, const std::string codeName) { + static std::once_flag sCheckOnce; + static std::string sDeviceCodeName; + static int sDeviceApiLevel; + std::call_once(sCheckOnce, [&](){ + sDeviceCodeName = base::GetProperty("ro.build.version.codename", ""); + sDeviceApiLevel = android_get_device_api_level(); + }); + return sDeviceApiLevel >= version || sDeviceCodeName == codeName; } bool isAtLeastT() { diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp index 2afe80c756..d6b11636a6 100644 --- a/media/libaudioclient/AudioTrack.cpp +++ b/media/libaudioclient/AudioTrack.cpp @@ -1707,14 +1707,14 @@ status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) { mSelectedDeviceId = deviceId; if (mStatus == NO_ERROR) { if (isOffloadedOrDirect_l()) { - if (mState == STATE_STOPPED || mState == STATE_FLUSHED) { - ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId); - result = restoreTrack_l("setOutputDevice", true /* forceRestore */); - } else { + if (isPlaying_l()) { ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. " "State: %s.", __func__, mPortId, stateToString(mState)); result = INVALID_OPERATION; + } else { + ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId); + result = restoreTrack_l("setOutputDevice", true /* forceRestore */); } } else { // allow track invalidation when track is not playing to propagate diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp index dd8f021385..1a6b949508 100644 --- a/media/libaudiohal/impl/Android.bp +++ b/media/libaudiohal/impl/Android.bp @@ -227,11 +227,11 @@ cc_defaults { "latest_android_hardware_audio_core_sounddose_ndk_shared", "latest_android_hardware_audio_effect_ndk_shared", "latest_android_media_audio_common_types_ndk_shared", + "latest_av_audio_types_aidl_ndk_shared", ], shared_libs: [ "android.hardware.common-V2-ndk", "android.hardware.common.fmq-V1-ndk", - "av-audio-types-aidl-V1-ndk", "libaudio_aidl_conversion_common_cpp", "libaudio_aidl_conversion_common_ndk", "libaudio_aidl_conversion_common_ndk_cpp", diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp index c35a60edf0..3fe20464ea 100644 --- a/media/libaudiohal/impl/EffectHalAidl.cpp +++ b/media/libaudiohal/impl/EffectHalAidl.cpp @@ -57,7 +57,9 @@ using ::aidl::android::aidl_utils::statusTFromBinderStatus; using ::aidl::android::hardware::audio::effect::Descriptor; using ::aidl::android::hardware::audio::effect::IEffect; using ::aidl::android::hardware::audio::effect::IFactory; +using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty; using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate; +using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty; using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion; using ::aidl::android::hardware::audio::effect::State; @@ -199,6 +201,7 @@ status_t EffectHalAidl::process() { efState & kEventFlagDataMqUpdate) { ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(), halVersion); + mConversion->reopen(); } auto statusQ = mConversion->getStatusMQ(); @@ -224,12 +227,22 @@ status_t EffectHalAidl::process() { floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite()); return INVALID_OPERATION; } - efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty); + + // for V2 audio effect HAL, expect different EventFlag to avoid bit conflict with FMQ_NOT_EMPTY + efGroup->wake(halVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty + : kEventFlagNotEmpty); IEffect::Status retStatus{}; - if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK || - (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) { - ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str()); + if (!statusQ->readBlocking(&retStatus, 1)) { + ALOGE("%s %s V%d read status from status FMQ failed", __func__, effectName.c_str(), + halVersion); + return INVALID_OPERATION; + } + if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != floatsToWrite || + retStatus.fmqProduced == 0) { + ALOGE("%s read status failed: %s, consumed %d (of %zu) produced %d", __func__, + retStatus.toString().c_str(), retStatus.fmqConsumed, floatsToWrite, + retStatus.fmqProduced); return INVALID_OPERATION; } diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp index 9d404a0d56..10c3105303 100644 --- a/media/libaudiohal/impl/StreamHalAidl.cpp +++ b/media/libaudiohal/impl/StreamHalAidl.cpp @@ -200,8 +200,12 @@ status_t StreamHalAidl::standby() { StreamDescriptor::Reply reply; switch (state) { case StreamDescriptor::State::ACTIVE: + case StreamDescriptor::State::DRAINING: + case StreamDescriptor::State::TRANSFERRING: RETURN_STATUS_IF_ERROR(pause(&reply)); - if (reply.state != StreamDescriptor::State::PAUSED) { + if (reply.state != StreamDescriptor::State::PAUSED && + reply.state != StreamDescriptor::State::DRAIN_PAUSED && + reply.state != StreamDescriptor::State::TRANSFER_PAUSED) { ALOGE("%s: unexpected stream state: %s (expected PAUSED)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; @@ -209,6 +213,7 @@ status_t StreamHalAidl::standby() { FALLTHROUGH_INTENDED; case StreamDescriptor::State::PAUSED: case StreamDescriptor::State::DRAIN_PAUSED: + case StreamDescriptor::State::TRANSFER_PAUSED: if (mIsInput) return flush(); RETURN_STATUS_IF_ERROR(flush(&reply)); if (reply.state != StreamDescriptor::State::IDLE) { @@ -276,11 +281,12 @@ status_t StreamHalAidl::getLatency(uint32_t *latency) { return OK; } -status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) { +status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp, + StatePositions* statePositions) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); if (!mStream) return NO_INIT; StreamDescriptor::Reply reply; - RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); + RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions)); *frames = std::max<int64_t>(0, reply.observable.frames); *timestamp = std::max<int64_t>(0, reply.observable.timeNs); return OK; @@ -323,8 +329,11 @@ status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred return INVALID_OPERATION; } } + StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE; + std::string fmqErrorMsg; if (!mIsInput) { - bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite()); + bytes = std::min(bytes, + mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg)); } StreamDescriptor::Command burst = StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes); @@ -341,12 +350,14 @@ status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred LOG_ALWAYS_FATAL_IF(*transferred > bytes, "%s: HAL module read %zu bytes, which exceeds requested count %zu", __func__, *transferred, bytes); - if (auto toRead = mContext.getDataMQ()->availableToRead(); + if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg); toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) { ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead); return NOT_ENOUGH_DATA; } } + LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE, + "%s", fmqErrorMsg.c_str()); mStreamPowerLog.log(buffer, *transferred); return OK; } @@ -432,8 +443,12 @@ void StreamHalAidl::onAsyncDrainReady() { if (auto state = getState(); state == StreamDescriptor::State::DRAINING) { // Retrieve the current state together with position counters unconditionally // to ensure that the state on our side gets updated. - sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), - nullptr, true /*safeFromNonWorkerThread */); + sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr, + true /*safeFromNonWorkerThread */); + // For compatibility with HIDL behavior, apply a "soft" position reset + // after receiving the "drain ready" callback. + std::lock_guard l(mLock); + mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames; } else { ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str()); } @@ -441,15 +456,8 @@ void StreamHalAidl::onAsyncDrainReady() { void StreamHalAidl::onAsyncError() { std::lock_guard l(mLock); - if (mLastReply.state == StreamDescriptor::State::IDLE || - mLastReply.state == StreamDescriptor::State::DRAINING || - mLastReply.state == StreamDescriptor::State::TRANSFERRING) { - mLastReply.state = StreamDescriptor::State::ERROR; - ALOGW("%s: onError received", __func__); - } else { - ALOGW("%s: unexpected onError in the state %s", __func__, - toString(mLastReply.state).c_str()); - } + ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str()); + mLastReply.state = StreamDescriptor::State::ERROR; } status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused, @@ -500,9 +508,9 @@ status_t StreamHalAidl::legacyReleaseAudioPatch() { } status_t StreamHalAidl::sendCommand( - const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command, + const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command, ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply, - bool safeFromNonWorkerThread) { + bool safeFromNonWorkerThread, StatePositions* statePositions) { // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized. if (!safeFromNonWorkerThread) { const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire); @@ -534,6 +542,23 @@ status_t StreamHalAidl::sendCommand( } mLastReply = *reply; mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs; + if (!mIsInput && reply->status == STATUS_OK) { + if (command.getTag() == StreamDescriptor::Command::standby && + reply->state == StreamDescriptor::State::STANDBY) { + mStatePositions.framesAtStandby = reply->observable.frames; + } else if (command.getTag() == StreamDescriptor::Command::flush && + reply->state == StreamDescriptor::State::IDLE) { + mStatePositions.framesAtFlushOrDrain = reply->observable.frames; + } else if (!mContext.isAsynchronous() && + command.getTag() == StreamDescriptor::Command::drain && + (reply->state == StreamDescriptor::State::IDLE || + reply->state == StreamDescriptor::State::DRAINING)) { + mStatePositions.framesAtFlushOrDrain = reply->observable.frames; + } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady' + } + if (statePositions != nullptr) { + *statePositions = mStatePositions; + } } } switch (reply->status) { @@ -549,7 +574,8 @@ status_t StreamHalAidl::sendCommand( } status_t StreamHalAidl::updateCountersIfNeeded( - ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) { + ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply, + StatePositions* statePositions) { bool doUpdate = false; { std::lock_guard l(mLock); @@ -559,10 +585,13 @@ status_t StreamHalAidl::updateCountersIfNeeded( // Since updates are paced, it is OK to perform them from any thread, they should // not interfere with I/O operations of the worker. return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), - reply, true /*safeFromNonWorkerThread */); + reply, true /*safeFromNonWorkerThread */, statePositions); } else if (reply != nullptr) { // provide cached reply std::lock_guard l(mLock); *reply = mLastReply; + if (statePositions != nullptr) { + *statePositions = mStatePositions; + } } return OK; } @@ -649,21 +678,27 @@ status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *writt return transfer(const_cast<void*>(buffer), bytes, written); } -status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) { +status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) { if (dspFrames == nullptr) { return BAD_VALUE; } int64_t aidlFrames = 0, aidlTimestamp = 0; - RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp)); - *dspFrames = static_cast<uint32_t>(aidlFrames); + StatePositions statePositions{}; + RETURN_STATUS_IF_ERROR( + getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions)); + // Number of audio frames since the stream has exited standby. + // See the table at the start of 'StreamHalInterface' on when it needs to reset. + int64_t mostRecentResetPoint; + if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) { + mostRecentResetPoint = statePositions.framesAtStandby; + } else { + mostRecentResetPoint = + std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain); + } + *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint; return OK; } -status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) { - // Obsolete, use getPresentationPosition. - return INVALID_OPERATION; -} - status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) { ALOGD("%p %s", this, __func__); TIME_CHECK(); @@ -717,13 +752,26 @@ status_t StreamOutHalAidl::getPresentationPosition(uint64_t *frames, struct time return BAD_VALUE; } int64_t aidlFrames = 0, aidlTimestamp = 0; - RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp)); - *frames = aidlFrames; + StatePositions statePositions{}; + RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions)); + // See the table at the start of 'StreamHalInterface'. + if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) { + *frames = aidlFrames; + } else { + const int64_t mostRecentResetPoint = + std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain); + *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint; + } timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND; timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND; return OK; } +status_t StreamOutHalAidl::presentationComplete() { + ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); + return OK; +} + status_t StreamOutHalAidl::updateSourceMetadata( const StreamOutHalInterface::SourceMetadata& sourceMetadata) { TIME_CHECK(); diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h index b20eb00974..fff7a92ad0 100644 --- a/media/libaudiohal/impl/StreamHalAidl.h +++ b/media/libaudiohal/impl/StreamHalAidl.h @@ -194,6 +194,11 @@ class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelper // For tests. friend class sp<StreamHalAidl>; + struct StatePositions { + int64_t framesAtFlushOrDrain; + int64_t framesAtStandby; + }; + template<class T> static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon( const std::shared_ptr<T>& stream); @@ -212,7 +217,8 @@ class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelper status_t getLatency(uint32_t *latency); // Always returns non-negative values. - status_t getObservablePosition(int64_t *frames, int64_t *timestamp); + status_t getObservablePosition(int64_t* frames, int64_t* timestamp, + StatePositions* statePositions = nullptr); // Always returns non-negative values. status_t getHardwarePosition(int64_t *frames, int64_t *timestamp); @@ -268,11 +274,13 @@ class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelper // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call // it with `mLock` being held. status_t sendCommand( - const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command, + const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command, ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr, - bool safeFromNonWorkerThread = false); + bool safeFromNonWorkerThread = false, + StatePositions* statePositions = nullptr); status_t updateCountersIfNeeded( - ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr); + ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr, + StatePositions* statePositions = nullptr); const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream; const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt; @@ -280,6 +288,9 @@ class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelper std::mutex mLock; ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock); int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0; + // Cached values of observable positions when the stream last entered certain state. + // Updated for output streams only. + StatePositions mStatePositions GUARDED_BY(mLock) = {}; // mStreamPowerLog is used for audio signal power logging. StreamPowerLog mStreamPowerLog; std::atomic<pid_t> mWorkerTid = -1; @@ -308,10 +319,7 @@ class StreamOutHalAidl : public virtual StreamOutHalInterface, // Return the number of audio frames written by the audio dsp to DAC since // the output has exited standby. - status_t getRenderPosition(uint32_t *dspFrames) override; - - // Get the local time at which the next write to the audio driver will be presented. - status_t getNextWriteTimestamp(int64_t *timestamp) override; + status_t getRenderPosition(uint64_t *dspFrames) override; // Set the callback for notifying completion of non-blocking write and drain. status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override; @@ -331,12 +339,19 @@ class StreamOutHalAidl : public virtual StreamOutHalInterface, // Requests notification when data buffered by the driver/hardware has been played. status_t drain(bool earlyNotify) override; - // Notifies to the audio driver to flush the queued data. + // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must + // already be paused before calling 'flush'. status_t flush() override; // Return a recent count of the number of audio frames presented to an external observer. + // This excludes frames which have been written but are still in the pipeline. See the + // table at the start of the 'StreamOutHalInterface' for the specification of the frame + // count behavior w.r.t. 'flush', 'drain' and 'standby' operations. status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override; + // Notifies the HAL layer that the framework considers the current playback as completed. + status_t presentationComplete() override; + // Called when the metadata of the stream's source has been changed. status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override; @@ -413,6 +428,7 @@ class StreamInHalAidl : public StreamInHalInterface, public StreamHalAidl { // Return a recent count of the number of audio frames received and // the clock time associated with that frame count. + // The count must not reset to zero when a PCM input enters standby. status_t getCapturePosition(int64_t *frames, int64_t *time) override; // Get active microphones diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp index 77c75dbbe3..9e22700c4c 100644 --- a/media/libaudiohal/impl/StreamHalHidl.cpp +++ b/media/libaudiohal/impl/StreamHalHidl.cpp @@ -17,6 +17,8 @@ #define LOG_TAG "StreamHalHidl" //#define LOG_NDEBUG 0 +#include <cinttypes> + #include <android/hidl/manager/1.0/IServiceManager.h> #include <hwbinder/IPCThreadState.h> #include <media/AudioParameter.h> @@ -589,32 +591,39 @@ status_t StreamOutHalHidl::prepareForWriting(size_t bufferSize) { return OK; } -status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) { +status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) { // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized. if (mStream == 0) return NO_INIT; Result retval; + uint32_t halPosition = 0; Return<void> ret = mStream->getRenderPosition( [&](Result r, uint32_t d) { retval = r; if (retval == Result::OK) { - *dspFrames = d; + halPosition = d; } }); - return processReturn("getRenderPosition", ret, retval); -} - -status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) { - TIME_CHECK(); - if (mStream == 0) return NO_INIT; - Result retval; - Return<void> ret = mStream->getNextWriteTimestamp( - [&](Result r, int64_t t) { - retval = r; - if (retval == Result::OK) { - *timestamp = t; - } - }); - return processReturn("getRenderPosition", ret, retval); + status_t status = processReturn("getRenderPosition", ret, retval); + if (status != OK) { + return status; + } + // Maintain a 64-bit render position using the 32-bit result from the HAL. + // This delta calculation relies on the arithmetic overflow behavior + // of integers. For example (100 - 0xFFFFFFF0) = 116. + std::lock_guard l(mPositionMutex); + const auto truncatedPosition = (uint32_t)mRenderPosition; + int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow() + (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition); + + if (deltaHalPosition >= 0) { + mRenderPosition += deltaHalPosition; + } else if (mExpectRetrograde) { + mExpectRetrograde = false; + mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition); + ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition); + } + *dspFrames = mRenderPosition; + return OK; } status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) { @@ -667,9 +676,23 @@ status_t StreamOutHalHidl::drain(bool earlyNotify) { status_t StreamOutHalHidl::flush() { TIME_CHECK(); if (mStream == 0) return NO_INIT; + { + std::lock_guard l(mPositionMutex); + mRenderPosition = 0; + mExpectRetrograde = false; + } return processReturn("pause", mStream->flush()); } +status_t StreamOutHalHidl::standby() { + { + std::lock_guard l(mPositionMutex); + mRenderPosition = 0; + mExpectRetrograde = false; + } + return StreamHalHidl::standby(); +} + status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) { // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized. if (mStream == 0) return NO_INIT; @@ -696,6 +719,16 @@ status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct time } } +status_t StreamOutHalHidl::presentationComplete() { + // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when + // transitioning between tracks. + // The HAL resets the frame position without flush/stop being called, but calls back prior to + // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of + // mRenderPosition. + mExpectRetrograde = true; + return OK; +} + #if MAJOR_VERSION == 2 status_t StreamOutHalHidl::updateSourceMetadata( const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) { diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h index 48da633514..433e0a3327 100644 --- a/media/libaudiohal/impl/StreamHalHidl.h +++ b/media/libaudiohal/impl/StreamHalHidl.h @@ -18,10 +18,12 @@ #define ANDROID_HARDWARE_STREAM_HAL_HIDL_H #include <atomic> +#include <mutex> #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h) #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h) #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h) +#include <android-base/thread_annotations.h> #include <fmq/EventFlag.h> #include <fmq/MessageQueue.h> #include <media/audiohal/EffectHalInterface.h> @@ -119,6 +121,9 @@ class StreamHalHidl : public virtual StreamHalInterface, public CoreConversionHe class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl { public: + // Put the audio hardware input/output into standby mode (from StreamHalInterface). + status_t standby() override; + // Return the frame size (number of bytes per sample) of a stream. virtual status_t getFrameSize(size_t *size); @@ -136,10 +141,7 @@ class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl { // Return the number of audio frames written by the audio dsp to DAC since // the output has exited standby. - virtual status_t getRenderPosition(uint32_t *dspFrames); - - // Get the local time at which the next write to the audio driver will be presented. - virtual status_t getNextWriteTimestamp(int64_t *timestamp); + virtual status_t getRenderPosition(uint64_t *dspFrames); // Set the callback for notifying completion of non-blocking write and drain. virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback); @@ -159,12 +161,19 @@ class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl { // Requests notification when data buffered by the driver/hardware has been played. virtual status_t drain(bool earlyNotify); - // Notifies to the audio driver to flush the queued data. + // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must + // already be paused before calling 'flush'. virtual status_t flush(); // Return a recent count of the number of audio frames presented to an external observer. + // This excludes frames which have been written but are still in the pipeline. See the + // table at the start of the 'StreamOutHalInterface' for the specification of the frame + // count behavior w.r.t. 'flush', 'drain' and 'standby' operations. virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp); + // Notifies the HAL layer that the framework considers the current playback as completed. + status_t presentationComplete() override; + // Called when the metadata of the stream's source has been changed. status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override; @@ -221,6 +230,10 @@ class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl { std::unique_ptr<StatusMQ> mStatusMQ; std::atomic<pid_t> mWriterClient; EventFlag* mEfGroup; + std::mutex mPositionMutex; + // Used to expand correctly the 32-bit position from the HAL. + uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0; + bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'. // Can not be constructed directly by clients. StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream); @@ -250,6 +263,7 @@ class StreamInHalHidl : public StreamInHalInterface, public StreamHalHidl { // Return a recent count of the number of audio frames received and // the clock time associated with that frame count. + // The count must not reset to zero when a PCM input enters standby. virtual status_t getCapturePosition(int64_t *frames, int64_t *time); // Get active microphones diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h index 37615afd3f..585a89501a 100644 --- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h +++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h @@ -135,6 +135,38 @@ protected: virtual ~StreamOutHalInterfaceLatencyModeCallback() = default; }; +/** + * On position reporting. There are two methods: 'getRenderPosition' and + * 'getPresentationPosition'. The first difference is that they may have a + * time offset because "render" position relates to what happens between + * ADSP and DAC, while "observable" position is relative to the external + * observer. The second difference is that 'getRenderPosition' always + * resets on standby (for all types of stream data) according to its + * definition. Since the original C definition of 'getRenderPosition' used + * 32-bit frame counters, and also because in complex playback chains that + * include wireless devices the "observable" position has more practical + * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface. + * The table below summarizes frame count behavior for 'getPresentationPosition': + * + * | Mixed | Direct | Direct + * | | non-offload | offload + * ==============|============|==============|============== + * PCM and | Continuous | | + * encapsulated | | | + * bitstream | | | + * --------------|------------| Continuous†| + * Bitstream | | | Reset on + * encapsulated | | | flush, drain + * into PCM | | | and standby + * | Not | | + * --------------| supported |--------------| + * Bitstream | | Reset on | + * | | flush, drain | + * | | and standby | + * | | | + * + * †- on standby, reset of the frame count happens at the framework level. + */ class StreamOutHalInterface : public virtual StreamHalInterface { public: // Return the audio hardware driver estimated latency in milliseconds. @@ -151,10 +183,7 @@ class StreamOutHalInterface : public virtual StreamHalInterface { // Return the number of audio frames written by the audio dsp to DAC since // the output has exited standby. - virtual status_t getRenderPosition(uint32_t *dspFrames) = 0; - - // Get the local time at which the next write to the audio driver will be presented. - virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0; + virtual status_t getRenderPosition(uint64_t *dspFrames) = 0; // Set the callback for notifying completion of non-blocking write and drain. // The callback must be owned by someone else. The output stream does not own it @@ -176,12 +205,19 @@ class StreamOutHalInterface : public virtual StreamHalInterface { // Requests notification when data buffered by the driver/hardware has been played. virtual status_t drain(bool earlyNotify) = 0; - // Notifies to the audio driver to flush the queued data. + // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must + // already be paused before calling 'flush'. virtual status_t flush() = 0; // Return a recent count of the number of audio frames presented to an external observer. + // This excludes frames which have been written but are still in the pipeline. See the + // table at the start of the 'StreamOutHalInterface' for the specification of the frame + // count behavior w.r.t. 'flush', 'drain' and 'standby' operations. virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0; + // Notifies the HAL layer that the framework considers the current playback as completed. + virtual status_t presentationComplete() = 0; + struct SourceMetadata { std::vector<playback_track_metadata_v7_t> tracks; }; @@ -270,6 +306,7 @@ class StreamInHalInterface : public virtual StreamHalInterface { // Return a recent count of the number of audio frames received and // the clock time associated with that frame count. + // The count must not reset to zero when a PCM input enters standby. virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0; // Get active microphones diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp index de60ca4921..883d41d629 100644 --- a/media/libeffects/downmix/aidl/EffectDownmix.cpp +++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp @@ -177,7 +177,10 @@ void DownmixImpl::process() { * in the life cycle of workerThread (threadLoop). */ uint32_t efState = 0; - if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) { + if (!mEventFlag || + ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */, + true /* retry */) || + !(efState & mDataMqNotEmptyEf)) { LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid"; } diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp index fdc16e38d7..836e034c8a 100644 --- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp +++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp @@ -213,11 +213,12 @@ ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common, RETURN_OK_IF(mState != State::INIT); mImplContext = createContext(common); RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed"); - int version = 0; - RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION, + RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION, "FailedToGetInterfaceVersion"); mImplContext->setVersion(version); mEventFlag = mImplContext->getStatusEventFlag(); + mDataMqNotEmptyEf = + mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty; if (specific.has_value()) { RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr"); @@ -231,8 +232,9 @@ ndk::ScopedAStatus DynamicsProcessingImpl::open(const Parameter::Common& common, mState = State::IDLE; mContext->dupeFmq(ret); - RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION, - "FailedToCreateWorker"); + RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS, + EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker"); + LOG(INFO) << getEffectNameWithVersion() << __func__; return ndk::ScopedAStatus::ok(); } diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp index 507da29b11..6469ab67c8 100644 --- a/media/libmediaplayerservice/fuzzer/Android.bp +++ b/media/libmediaplayerservice/fuzzer/Android.bp @@ -44,7 +44,7 @@ cc_defaults { ], fuzz_config: { cc: [ - "android-media-fuzzing-reports@google.com", + "android-media-playback+bugs@google.com", ], componentid: 155276, hotlists: [ diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp index eb9ac0f92b..bf29b1d888 100644 --- a/media/libstagefright/VideoRenderQualityTracker.cpp +++ b/media/libstagefright/VideoRenderQualityTracker.cpp @@ -302,13 +302,6 @@ void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t a mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000; } - // Now that a frame has been rendered, the previously skipped frames can be processed as skipped - // frames since the app is not skipping them to terminate playback. - for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) { - processMetricsForSkippedFrame(contentTimeUs); - } - mPendingSkippedFrameContentTimeUsList = {}; - // We can render a pending queued frame if it's the last frame of the video, so release it // immediately. if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) { @@ -332,9 +325,25 @@ void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t a (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs); break; } + // Process all skipped frames before the dropped frame. + while (!mPendingSkippedFrameContentTimeUsList.empty()) { + if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) { + break; + } + processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front()); + mPendingSkippedFrameContentTimeUsList.pop_front(); + } processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs, nextExpectedFrame.desiredRenderTimeUs); } + // Process all skipped frames before the rendered frame. + while (!mPendingSkippedFrameContentTimeUsList.empty()) { + if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) { + break; + } + processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front()); + mPendingSkippedFrameContentTimeUsList.pop_front(); + } processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs, nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs, freezeEventOut, judderEventOut); diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp index a2791ba77d..ff64af50eb 100644 --- a/media/libstagefright/rtsp/fuzzer/Android.bp +++ b/media/libstagefright/rtsp/fuzzer/Android.bp @@ -29,11 +29,19 @@ cc_defaults { header_libs: [ "libstagefright_rtsp_headers", ], - fuzz_config:{ + fuzz_config: { cc: [ - "android-media-fuzzing-reports@google.com", + "android-media-playback@google.com", ], componentid: 155276, + hotlists: [ + "4593311", + ], + description: "This fuzzer targets the APIs of libstagefright_rtsp", + vector: "local_privileges_required", + service_privilege: "privileged", + users: "multi_user", + fuzzed_code_usage: "shipped", }, } @@ -44,7 +52,7 @@ cc_fuzz { ], defaults: [ "libstagefright_rtsp_fuzzer_defaults", - ] + ], } cc_fuzz { @@ -55,7 +63,7 @@ cc_fuzz { defaults: [ "libstagefright_rtsp_fuzzer_defaults", ], - shared_libs:[ + shared_libs: [ "libandroid_net", "libbase", "libstagefright", diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp index 6590ebbce8..8724d51da6 100644 --- a/media/libstagefright/timedtext/test/fuzzer/Android.bp +++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp @@ -48,8 +48,16 @@ cc_fuzz { ], fuzz_config: { cc: [ - "android-media-fuzzing-reports@google.com", + "android-media-playback@google.com", ], - componentid: 155276, + componentid: 42195, + hotlists: [ + "4593311", + ], + description: "This fuzzer targets the APIs of libstagefright_timedtext", + vector: "local_no_privileges_required", + service_privilege: "constrained", + users: "multi_user", + fuzzed_code_usage: "shipped", }, } diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp index 1d2562e41d..202d8030a7 100644 --- a/media/module/bufferpool/2.0/AccessorImpl.cpp +++ b/media/module/bufferpool/2.0/AccessorImpl.cpp @@ -609,7 +609,7 @@ void Accessor::Impl::BufferPool::processStatusMessages() { } if (ret == false) { ALOGW("buffer status message processing failure - message : %d connection : %lld", - message.newStatus, (long long)message.connectionId); + (int)message.newStatus, (long long)message.connectionId); } } messages.clear(); diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp index b3707c8d18..cb2994ea58 100644 --- a/media/module/extractors/mp4/MPEG4Extractor.cpp +++ b/media/module/extractors/mp4/MPEG4Extractor.cpp @@ -1615,39 +1615,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->timescale = ntohl(timescale); - // 14496-12 says all ones means indeterminate, but some files seem to use - // 0 instead. We treat both the same. - int64_t duration = 0; - if (version == 1) { - if (mDataSource->readAt( - timescale_offset + 4, &duration, sizeof(duration)) - < (ssize_t)sizeof(duration)) { - return ERROR_IO; - } - if (duration != -1) { - duration = ntoh64(duration); - } - } else { - uint32_t duration32; - if (mDataSource->readAt( - timescale_offset + 4, &duration32, sizeof(duration32)) - < (ssize_t)sizeof(duration32)) { - return ERROR_IO; - } - if (duration32 != 0xffffffff) { - duration = ntohl(duration32); - } - } - if (duration != 0 && mLastTrack->timescale != 0) { - long double durationUs = ((long double)duration * 1000000) / mLastTrack->timescale; - if (durationUs < 0 || durationUs > INT64_MAX) { - ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits", - (long long) duration, (long long) mLastTrack->timescale); - return ERROR_MALFORMED; - } - AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs); - } - uint8_t lang[2]; off64_t lang_offset; if (version == 1) { @@ -3907,17 +3874,18 @@ status_t MPEG4Extractor::parseTrackHeader( } int32_t id; + int64_t duration; if (version == 1) { // we can get ctime value from U64_AT(&buffer[4]) // we can get mtime value from U64_AT(&buffer[12]) id = U32_AT(&buffer[20]); - // we can get duration value from U64_AT(&buffer[28]) + duration = U64_AT(&buffer[28]); } else if (version == 0) { // we can get ctime value from U32_AT(&buffer[4]) // we can get mtime value from U32_AT(&buffer[8]) id = U32_AT(&buffer[12]); - // we can get duration value from U32_AT(&buffer[20]) + duration = U32_AT(&buffer[20]); } else { return ERROR_UNSUPPORTED; } @@ -3926,6 +3894,15 @@ status_t MPEG4Extractor::parseTrackHeader( return ERROR_MALFORMED; AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id); + if (duration != 0 && mHeaderTimescale != 0) { + long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale; + if (durationUs < 0 || durationUs > INT64_MAX) { + ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits", + (long long) duration, (long long) mHeaderTimescale); + return ERROR_MALFORMED; + } + AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs); + } size_t matrixOffset = dynSize + 16; int32_t a00 = U32_AT(&buffer[matrixOffset]); diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp index 9ec770045b..3d873df026 100644 --- a/media/ndk/Android.bp +++ b/media/ndk/Android.bp @@ -192,7 +192,6 @@ cc_library { header_libs: [ "libstagefright_headers", "libmedia_headers", - "libstagefright_headers", ], shared_libs: [ diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 97c80a877d..4e5011426e 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -1922,10 +1922,11 @@ size_t AudioFlinger::getInputBufferSize(uint32_t sampleRate, audio_format_t form if (mPrimaryHardwareDev == nullptr) { return 0; } + if (mInputBufferSizeOrderedDevs.empty()) { + return 0; + } mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE; - sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice(); - std::vector<audio_channel_mask_t> channelMasks = {channelMask}; if (channelMask != AUDIO_CHANNEL_IN_MONO) { channelMasks.push_back(AUDIO_CHANNEL_IN_MONO); @@ -1955,6 +1956,22 @@ size_t AudioFlinger::getInputBufferSize(uint32_t sampleRate, audio_format_t form mHardwareStatus = AUDIO_HW_IDLE; + auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config, + size_t* bytes) -> status_t { + if (!dev) { + return BAD_VALUE; + } + status_t result = dev->getInputBufferSize(&config, bytes); + if (result == BAD_VALUE) { + // Retry with the config suggested by the HAL. + result = dev->getInputBufferSize(&config, bytes); + } + if (result != OK || *bytes == 0) { + return BAD_VALUE; + } + return result; + }; + // Change parameters of the configuration each iteration until we find a // configuration that the device will support, or HAL suggests what it supports. audio_config_t config = AUDIO_CONFIG_INITIALIZER; @@ -1966,16 +1983,15 @@ size_t AudioFlinger::getInputBufferSize(uint32_t sampleRate, audio_format_t form config.sample_rate = testSampleRate; size_t bytes = 0; - audio_config_t loopConfig = config; - status_t result = dev->getInputBufferSize(&config, &bytes); - if (result == BAD_VALUE) { - // Retry with the config suggested by the HAL. - result = dev->getInputBufferSize(&config, &bytes); - } - if (result != OK || bytes == 0) { - config = loopConfig; - continue; + ret = BAD_VALUE; + for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) { + ret = getInputBufferSize(dev->hwDevice(), config, &bytes); + if (ret == OK) { + break; + } } + if (ret == BAD_VALUE) continue; + if (config.sample_rate != sampleRate || config.channel_mask != channelMask || config.format != format) { uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask); @@ -2603,12 +2619,43 @@ AudioHwDevice* AudioFlinger::loadHwModule_ll(const char *name) } mAudioHwDevs.add(handle, audioDevice); + if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) { + mInputBufferSizeOrderedDevs.insert(audioDevice); + } ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle); return audioDevice; } +// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order: +// Primary, Usb, Bluetooth, A2DP, other modules, remote submix. +/* static */ +bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) { + static const std::map<std::string_view, int> kPriorities = { + { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 }, + { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 }, + { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() } + }; + + const std::string_view lhsName = lhs->moduleName(); + const std::string_view rhsName = rhs->moduleName(); + + auto lhsPriority = std::numeric_limits<int>::max() - 1; + if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) { + lhsPriority = lhsIt->second; + } + auto rhsPriority = std::numeric_limits<int>::max() - 1; + if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) { + rhsPriority = rhsIt->second; + } + + if (lhsPriority != rhsPriority) { + return lhsPriority < rhsPriority; + } + return lhsName < rhsName; +} + // ---------------------------------------------------------------------------- uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 0f75d6e44b..39462fc5f0 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -629,6 +629,10 @@ private: DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs GUARDED_BY(hardwareMutex()) {nullptr /* defValue */}; + static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs); + std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)> + mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp}; + const sp<DevicesFactoryHalInterface> mDevicesFactoryHal = DevicesFactoryHalInterface::create(); /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback; // set onFirstRef(). diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 1d7c356246..60abb58e5f 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -2690,14 +2690,17 @@ sp<IAfTrack> PlaybackThread::createTrack_l( } } - // Set DIRECT flag if current thread is DirectOutputThread. This can - // happen when the playback is rerouted to direct output thread by + // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread. + // This can happen when the playback is rerouted to direct output/offload thread by // dynamic audio policy. // Do NOT report the flag changes back to client, since the client - // doesn't explicitly request a direct flag. + // doesn't explicitly request a direct/offload flag. audio_output_flags_t trackFlags = *flags; if (mType == DIRECT) { trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT); + } else if (mType == OFFLOAD) { + trackFlags = static_cast<audio_output_flags_t>(trackFlags | + AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT); } *afTrackFlags = trackFlags; @@ -3357,9 +3360,9 @@ status_t PlaybackThread::getRenderPosition( return NO_ERROR; } else { status_t status; - uint32_t frames; + uint64_t frames = 0; status = mOutput->getRenderPosition(&frames); - *dspFrames = (size_t)frames; + *dspFrames = (uint32_t)frames; return status; } } @@ -5903,7 +5906,7 @@ PlaybackThread::mixer_state MixerThread::prepareTracks_l( vaf = v * sendLevel * (1. / MAX_GAIN_INT); } - track->setFinalVolume(vrf, vlf); + track->setFinalVolume(vlf, vrf); // Delegate volume control to effect in track effect chain if needed if (chain != 0 && chain->setVolume_l(&vl, &vr)) { diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp index 76618f44b9..165ac255ec 100644 --- a/services/audioflinger/datapath/AudioStreamIn.cpp +++ b/services/audioflinger/datapath/AudioStreamIn.cpp @@ -58,7 +58,7 @@ status_t AudioStreamIn::getCapturePosition(int64_t* frames, int64_t* time) if (mHalFormatHasProportionalFrames && (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) { - // For DirectRecord reset timestamp to 0 on standby. + // For DirectRecord reset position to 0 on standby. const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ? 0 : (halPosition - mFramesReadAtStandby); // Scale from HAL sample rate to application rate. diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp index 9851f3a00d..a686ff625b 100644 --- a/services/audioflinger/datapath/AudioStreamOut.cpp +++ b/services/audioflinger/datapath/AudioStreamOut.cpp @@ -51,42 +51,17 @@ status_t AudioStreamOut::getRenderPosition(uint64_t *frames) return NO_INIT; } - uint32_t halPosition = 0; + uint64_t halPosition = 0; const status_t status = stream->getRenderPosition(&halPosition); if (status != NO_ERROR) { return status; } - - // Maintain a 64-bit render position using the 32-bit result from the HAL. - // This delta calculation relies on the arithmetic overflow behavior - // of integers. For example (100 - 0xFFFFFFF0) = 116. - const auto truncatedPosition = (uint32_t)mRenderPosition; - int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow() - (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition); - - if (deltaHalPosition > 0) { - mRenderPosition += deltaHalPosition; - } else if (mExpectRetrograde) { - mExpectRetrograde = false; - mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition); - } // Scale from HAL sample rate to application rate. - *frames = mRenderPosition / mRateMultiplier; + *frames = halPosition / mRateMultiplier; return status; } -// return bottom 32-bits of the render position -status_t AudioStreamOut::getRenderPosition(uint32_t *frames) -{ - uint64_t position64 = 0; - const status_t status = getRenderPosition(&position64); - if (status == NO_ERROR) { - *frames = (uint32_t)position64; - } - return status; -} - status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) { if (stream == nullptr) { @@ -101,7 +76,7 @@ status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timesp if (mHalFormatHasProportionalFrames && (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) { - // For DirectTrack reset timestamp to 0 on standby. + // For DirectTrack reset position to 0 on standby. const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ? 0 : (halPosition - mFramesWrittenAtStandby); // Scale from HAL sample rate to application rate. @@ -179,8 +154,6 @@ audio_config_base_t AudioStreamOut::getAudioProperties() const int AudioStreamOut::flush() { - mRenderPosition = 0; - mExpectRetrograde = false; mFramesWritten = 0; mFramesWrittenAtStandby = 0; const status_t result = stream->flush(); @@ -189,12 +162,14 @@ int AudioStreamOut::flush() int AudioStreamOut::standby() { - mRenderPosition = 0; - mExpectRetrograde = false; mFramesWrittenAtStandby = mFramesWritten; return stream->standby(); } +void AudioStreamOut::presentationComplete() { + stream->presentationComplete(); +} + ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes) { size_t bytesWritten; diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h index ea41bbab25..2c9fb3ead6 100644 --- a/services/audioflinger/datapath/AudioStreamOut.h +++ b/services/audioflinger/datapath/AudioStreamOut.h @@ -51,9 +51,6 @@ public: virtual ~AudioStreamOut(); - // Get the bottom 32-bits of the 64-bit render position. - status_t getRenderPosition(uint32_t *frames); - virtual status_t getRenderPosition(uint64_t *frames); virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp); @@ -91,21 +88,14 @@ public: virtual status_t flush(); virtual status_t standby(); - // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when - // transitioning between tracks. - // The HAL resets the frame position without flush/stop being called, but calls back prior to - // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of - // mRenderPosition. - virtual void presentationComplete() { mExpectRetrograde = true; } + virtual void presentationComplete(); protected: uint64_t mFramesWritten = 0; // reset by flush uint64_t mFramesWrittenAtStandby = 0; - uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete int mRateMultiplier = 1; bool mHalFormatHasProportionalFrames = false; size_t mHalFrameSize = 0; - bool mExpectRetrograde = false; // see presentationComplete }; } // namespace android diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp index f5feece8d6..1c1c1e19a9 100644 --- a/services/audioparameterparser/Android.bp +++ b/services/audioparameterparser/Android.bp @@ -35,10 +35,10 @@ cc_defaults { name: "android.hardware.audio.parameter_parser.example_defaults", defaults: [ "latest_android_hardware_audio_core_ndk_shared", + "latest_av_audio_types_aidl_ndk_shared", ], shared_libs: [ - "av-audio-types-aidl-V1-ndk", "libbase", "libbinder_ndk", ], diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp index d027564e56..747af4afcc 100644 --- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp +++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp @@ -561,6 +561,7 @@ bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool muted, audio_port_config config = {}; devicePort->toAudioPortConfig(&config); config.config_mask = AUDIO_PORT_CONFIG_GAIN; + config.gain.mode = gains[0]->getMode(); config.gain.values[0] = gainValueMb; return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR; } diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp index 5bac0627ae..3f04f69312 100644 --- a/services/mediaresourcemanager/fuzzer/Android.bp +++ b/services/mediaresourcemanager/fuzzer/Android.bp @@ -47,7 +47,7 @@ cc_defaults { ], fuzz_config: { cc: [ - "android-media-fuzzing-reports@google.com", + "girishshetty@google.com", ], componentid: 155276, hotlists: [ |