diff --git a/audio/core/4.0/vts/functional/Android.bp b/audio/core/4.0/vts/functional/Android.bp index 22c5493dd8..e3b376ca88 100644 --- a/audio/core/4.0/vts/functional/Android.bp +++ b/audio/core/4.0/vts/functional/Android.bp @@ -29,6 +29,9 @@ cc_test { "libicuuc_stubdata", "libxml2", ], + shared_libs: [ + "libfmq", + ], header_libs: [ "android.hardware.audio.common.util@all-versions", ], diff --git a/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp b/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp index 836e15085f..f84e1e2dc0 100644 --- a/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp +++ b/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp @@ -40,6 +40,8 @@ #include #include #include +#include +#include #include @@ -57,12 +59,15 @@ using std::vector; using std::list; using ::android::sp; +using ::android::hardware::EventFlag; using ::android::hardware::hidl_bitfield; using ::android::hardware::hidl_enum_range; using ::android::hardware::hidl_handle; using ::android::hardware::hidl_string; using ::android::hardware::hidl_vec; +using ::android::hardware::kSynchronizedReadWrite; using ::android::hardware::IPCThreadState; +using ::android::hardware::MessageQueue; using ::android::hardware::MQDescriptorSync; using ::android::hardware::Return; using ::android::hardware::audio::V4_0::AudioDrain; @@ -74,6 +79,7 @@ using TtyMode = ::android::hardware::audio::V4_0::IPrimaryDevice::TtyMode; using ::android::hardware::audio::V4_0::IDevicesFactory; using ::android::hardware::audio::V4_0::IStream; using ::android::hardware::audio::V4_0::IStreamIn; +using ::android::hardware::audio::V4_0::MessageQueueFlagBits; using ::android::hardware::audio::V4_0::TimeSpec; using ReadParameters = ::android::hardware::audio::V4_0::IStreamIn::ReadParameters; using ReadStatus = ::android::hardware::audio::V4_0::IStreamIn::ReadStatus; @@ -502,7 +508,7 @@ TEST_F(AudioPrimaryHidlTest, getParameters) { } ////////////////////////////////////////////////////////////////////////////// -/////////////////////////////// getMicrophones /////////////////////////////// +/////////////////////////// get(Active)Microphones /////////////////////////// ////////////////////////////////////////////////////////////////////////////// TEST_F(AudioPrimaryHidlTest, GetMicrophonesTest) { @@ -510,6 +516,76 @@ TEST_F(AudioPrimaryHidlTest, GetMicrophonesTest) { hidl_vec microphones; ASSERT_OK(device->getMicrophones(returnIn(res, microphones))); ASSERT_OK(res); + if (microphones.size() > 0) { + // When there is microphone on the phone, try to open an input stream + // and query for the active microphones. + doc::test( + "Make sure getMicrophones always succeeds" + "and getActiveMicrophones always succeeds when recording from these microphones."); + AudioIoHandle ioHandle = (AudioIoHandle)AudioHandleConsts::AUDIO_IO_HANDLE_NONE; + AudioConfig config{}; + config.channelMask = mkBitfield(AudioChannelMask::IN_MONO); + config.sampleRateHz = 8000; + config.format = AudioFormat::PCM_16_BIT; + auto flags = hidl_bitfield(AudioInputFlag::NONE); + const SinkMetadata initialMetadata = {{{AudioSource::MIC, 1 /* gain */}}}; + EventFlag* efGroup; + for (auto microphone : microphones) { + if (microphone.deviceAddress.device != AudioDevice::IN_BUILTIN_MIC) { + continue; + } + sp stream; + AudioConfig suggestedConfig{}; + ASSERT_OK(device->openInputStream(ioHandle, microphone.deviceAddress, config, flags, + initialMetadata, + returnIn(res, stream, suggestedConfig))); + if (res != Result::OK) { + ASSERT_TRUE(stream == nullptr); + AudioConfig suggestedConfigRetry{}; + ASSERT_OK(device->openInputStream(ioHandle, microphone.deviceAddress, + suggestedConfig, flags, initialMetadata, + returnIn(res, stream, suggestedConfigRetry))); + } + ASSERT_OK(res); + hidl_vec activeMicrophones; + Result readRes; + typedef MessageQueue CommandMQ; + typedef MessageQueue DataMQ; + std::unique_ptr commandMQ; + std::unique_ptr dataMQ; + size_t frameSize = stream->getFrameSize(); + size_t frameCount = stream->getBufferSize() / frameSize; + ASSERT_OK(stream->prepareForReading( + frameSize, frameCount, [&](auto r, auto& c, auto& d, auto&, auto&) { + readRes = r; + if (readRes == Result::OK) { + commandMQ.reset(new CommandMQ(c)); + dataMQ.reset(new DataMQ(d)); + if (dataMQ->isValid() && dataMQ->getEventFlagWord()) { + EventFlag::createEventFlag(dataMQ->getEventFlagWord(), &efGroup); + } + } + })); + ASSERT_OK(readRes); + ReadParameters params; + params.command = IStreamIn::ReadCommand::READ; + ASSERT_TRUE(commandMQ != nullptr); + ASSERT_TRUE(commandMQ->isValid()); + ASSERT_TRUE(commandMQ->write(¶ms)); + efGroup->wake(static_cast(MessageQueueFlagBits::NOT_FULL)); + uint32_t efState = 0; + efGroup->wait(static_cast(MessageQueueFlagBits::NOT_EMPTY), &efState); + if (efState & static_cast(MessageQueueFlagBits::NOT_EMPTY)) { + ASSERT_OK(stream->getActiveMicrophones(returnIn(res, activeMicrophones))); + ASSERT_OK(res); + ASSERT_NE(0U, activeMicrophones.size()); + } + stream->close(); + if (efGroup) { + EventFlag::deleteEventFlag(&efGroup); + } + } + } } ////////////////////////////////////////////////////////////////////////////// @@ -1117,14 +1193,6 @@ TEST_P(InputStreamTest, updateSinkMetadata) { ASSERT_OK(stream->updateSinkMetadata(initialMetadata)); } -TEST_P(InputStreamTest, getActiveMicrophones) { - doc::test("Getting active microphones should always succeed"); - hidl_vec microphones; - ASSERT_OK(device->getMicrophones(returnIn(res, microphones))); - ASSERT_OK(res); - ASSERT_TRUE(microphones.size() > 0); -} - ////////////////////////////////////////////////////////////////////////////// ///////////////////////////////// StreamOut ////////////////////////////////// //////////////////////////////////////////////////////////////////////////////