diff --git a/neuralnetworks/1.0/vts/functional/Android.bp b/neuralnetworks/1.0/vts/functional/Android.bp index 0af7f79860..3e9d5f7e83 100644 --- a/neuralnetworks/1.0/vts/functional/Android.bp +++ b/neuralnetworks/1.0/vts/functional/Android.bp @@ -40,10 +40,11 @@ cc_library_static { ], } -cc_defaults { - name: "VtsHalNeuralNetworksV1_0TargetTestDefaults", +cc_test { + name: "VtsHalNeuralnetworksV1_0TargetTest", defaults: ["VtsHalTargetTestDefaults"], srcs: [ + "BasicTests.cpp", "TestAssertions.cpp", "ValidateModel.cpp", "ValidateRequest.cpp", @@ -64,33 +65,11 @@ cc_defaults { "libneuralnetworks_utils", "VtsHalNeuralNetworksV1_0_utils", ], + whole_static_libs: [ + "neuralnetworks_generated_V1_0_example", + ], header_libs: [ "libneuralnetworks_headers", ], test_suites: ["general-tests"], } - -cc_test { - name: "VtsHalNeuralnetworksV1_0TargetTest", - defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - ], -} - -cc_test { - name: "PresubmitHalNeuralnetworksV1_0TargetTest", - defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - ], - cflags: [ - "-DPRESUBMIT_NOT_VTS", - ], -} diff --git a/neuralnetworks/1.0/vts/functional/BasicTests.cpp b/neuralnetworks/1.0/vts/functional/BasicTests.cpp index 551ea6788a..cc44c9efe1 100644 --- a/neuralnetworks/1.0/vts/functional/BasicTests.cpp +++ b/neuralnetworks/1.0/vts/functional/BasicTests.cpp @@ -21,17 +21,17 @@ namespace android::hardware::neuralnetworks::V1_0::vts::functional { // create device test -TEST_F(NeuralnetworksHidlTest, CreateDevice) {} +TEST_P(NeuralnetworksHidlTest, CreateDevice) {} // status test -TEST_F(NeuralnetworksHidlTest, StatusTest) { +TEST_P(NeuralnetworksHidlTest, StatusTest) { Return status = kDevice->getStatus(); ASSERT_TRUE(status.isOk()); EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast(status)); } // initialization -TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { +TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) { Return ret = kDevice->getCapabilities([](ErrorStatus status, const Capabilities& capabilities) { EXPECT_EQ(ErrorStatus::NONE, status); diff --git a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp index 1948c053a9..595ad85633 100644 --- a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp +++ b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp @@ -148,6 +148,20 @@ void Execute(const sp& device, const TestModel& testModel) { checkResults(testModel, outputs); } +void GeneratedTestBase::SetUp() { + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); +} + +std::vector getNamedModels(const FilterFn& filter) { + return TestModelManager::get().getTestModels(filter); +} + +std::string printGeneratedTest(const testing::TestParamInfo& info) { + const auto& [namedDevice, namedModel] = info.param; + return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel)); +} + // Tag for the generated tests class GeneratedTest : public GeneratedTestBase {}; diff --git a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h index 10e46b7d2c..f230a028f3 100644 --- a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h +++ b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h @@ -18,29 +18,38 @@ #define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_GENERATED_TEST_HARNESS_H #include +#include #include "TestHarness.h" #include "VtsHalNeuralnetworks.h" namespace android::hardware::neuralnetworks::V1_0::vts::functional { -class GeneratedTestBase - : public NeuralnetworksHidlTest, - public testing::WithParamInterface { +using NamedModel = Named; +using GeneratedTestParam = std::tuple; + +class GeneratedTestBase : public testing::TestWithParam { protected: - const test_helper::TestModel& kTestModel = *GetParam().second; + void SetUp() override; + const sp kDevice = getData(std::get(GetParam())); + const test_helper::TestModel& kTestModel = *getData(std::get(GetParam())); }; -#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ - INSTANTIATE_TEST_SUITE_P( \ - TestGenerated, TestSuite, \ - testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \ - [](const auto& info) { return info.param.first; }) +using FilterFn = std::function; +std::vector getNamedModels(const FilterFn& filter); + +std::string printGeneratedTest(const testing::TestParamInfo& info); + +#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ + INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \ + testing::Combine(testing::ValuesIn(getNamedDevices()), \ + testing::ValuesIn(getNamedModels(filter))), \ + printGeneratedTest) // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp. // TODO: Clean up the hierarchy for ValidationTest. class ValidationTest : public GeneratedTestBase {}; -Model createModel(const ::test_helper::TestModel& testModel); +Model createModel(const test_helper::TestModel& testModel); } // namespace android::hardware::neuralnetworks::V1_0::vts::functional diff --git a/neuralnetworks/1.0/vts/functional/Utils.cpp b/neuralnetworks/1.0/vts/functional/Utils.cpp index 307003cf4a..5b630fd7a6 100644 --- a/neuralnetworks/1.0/vts/functional/Utils.cpp +++ b/neuralnetworks/1.0/vts/functional/Utils.cpp @@ -117,6 +117,13 @@ std::vector getOutputBuffers(const Request& request) { return outputBuffers; } +std::string gtestCompliantName(std::string name) { + // gtest test names must only contain alphanumeric characters + std::replace_if( + name.begin(), name.end(), [](char c) { return !std::isalnum(c); }, '_'); + return name; +} + } // namespace android::hardware::neuralnetworks namespace android::hardware::neuralnetworks::V1_0 { diff --git a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp index 20b4565a47..cb2225025b 100644 --- a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp +++ b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp @@ -18,11 +18,13 @@ #include "VtsHalNeuralnetworks.h" #include "1.0/Callbacks.h" -#include "1.0/Utils.h" #include "GeneratedTestHarness.h" #include "TestHarness.h" #include +#include +#include +#include namespace android::hardware::neuralnetworks::V1_0::vts::functional { @@ -76,34 +78,39 @@ void createPreparedModel(const sp& device, const Model& model, ASSERT_NE(nullptr, preparedModel->get()); } -// A class for test environment setup -NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() { - // This has to return a "new" object because it is freed inside - // testing::AddGlobalTestEnvironment when the gtest is being torn down - static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment(); - return instance; -} - -void NeuralnetworksHidlEnvironment::registerTestServices() { - registerTestService(); -} - -// The main test class for NEURALNETWORK HIDL HAL. void NeuralnetworksHidlTest::SetUp() { - testing::VtsHalHidlTargetTestBase::SetUp(); - -#ifdef PRESUBMIT_NOT_VTS - const std::string name = - NeuralnetworksHidlEnvironment::getInstance()->getServiceName(); - const std::string sampleDriver = "sample-"; - if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) { - GTEST_SKIP(); - } -#endif // PRESUBMIT_NOT_VTS - - ASSERT_NE(nullptr, kDevice.get()); + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); } +static NamedDevice makeNamedDevice(const std::string& name) { + return {name, IDevice::getService(name)}; +} + +static std::vector getNamedDevicesImpl() { + // Retrieves the name of all service instances that implement IDevice, + // including any Lazy HAL instances. + const std::vector names = hardware::getAllHalInstanceNames(IDevice::descriptor); + + // Get a handle to each device and pair it with its name. + std::vector namedDevices; + namedDevices.reserve(names.size()); + std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice); + return namedDevices; +} + +const std::vector& getNamedDevices() { + const static std::vector devices = getNamedDevicesImpl(); + return devices; +} + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info) { + return gtestCompliantName(getName(info.param)); +} + +INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest); + // Forward declaration from ValidateModel.cpp void validateModel(const sp& device, const Model& model); // Forward declaration from ValidateRequest.cpp @@ -130,14 +137,3 @@ TEST_P(ValidationTest, Test) { INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; }); } // namespace android::hardware::neuralnetworks::V1_0::vts::functional - -using android::hardware::neuralnetworks::V1_0::vts::functional::NeuralnetworksHidlEnvironment; - -int main(int argc, char** argv) { - testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance()); - testing::InitGoogleTest(&argc, argv); - NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv); - - int status = RUN_ALL_TESTS(); - return status; -} diff --git a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h index 48dc23774f..17f4613ac6 100644 --- a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h +++ b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h @@ -17,40 +17,34 @@ #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H #define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H +#include "1.0/Utils.h" + #include #include - -#include -#include - -#include #include +#include + namespace android::hardware::neuralnetworks::V1_0::vts::functional { -// A class for test environment setup -class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment); - NeuralnetworksHidlEnvironment() = default; - - public: - static NeuralnetworksHidlEnvironment* getInstance(); - void registerTestServices() override; -}; - -// The main test class for NEURALNETWORKS HIDL HAL. -class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest); - - public: - NeuralnetworksHidlTest() = default; - void SetUp() override; +using NamedDevice = Named>; +using NeuralnetworksHidlTestParam = NamedDevice; +class NeuralnetworksHidlTest : public testing::TestWithParam { protected: - const sp kDevice = testing::VtsHalHidlTargetTestBase::getService( - NeuralnetworksHidlEnvironment::getInstance()); + void SetUp() override; + const sp kDevice = getData(GetParam()); }; +const std::vector& getNamedDevices(); + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info); + +#define INSTANTIATE_DEVICE_TEST(TestSuite) \ + INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \ + printNeuralnetworksHidlTest) + // Create an IPreparedModel object. If the model cannot be prepared, // "preparedModel" will be nullptr instead. void createPreparedModel(const sp& device, const Model& model, diff --git a/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h b/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h index 1ce751c54c..6d4534cb4e 100644 --- a/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h +++ b/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h @@ -21,13 +21,15 @@ #include #include #include +#include +#include #include #include "TestHarness.h" namespace android::hardware::neuralnetworks { // Create HIDL Request from the TestModel struct. -V1_0::Request createRequest(const ::test_helper::TestModel& testModel); +V1_0::Request createRequest(const test_helper::TestModel& testModel); // After execution, copy out output results from the output memory pool. std::vector<::test_helper::TestBuffer> getOutputBuffers(const V1_0::Request& request); @@ -51,6 +53,21 @@ inline uint32_t hidl_vec_push_back(hidl_vec* vec, const Type& value) { return index; } +template +using Named = std::pair; + +template +const std::string& getName(const Named& namedData) { + return namedData.first; +} + +template +const Type& getData(const Named& namedData) { + return namedData.second; +} + +std::string gtestCompliantName(std::string name); + } // namespace android::hardware::neuralnetworks namespace android::hardware::neuralnetworks::V1_0 { diff --git a/neuralnetworks/1.1/vts/functional/Android.bp b/neuralnetworks/1.1/vts/functional/Android.bp index c197e6de66..4e85355838 100644 --- a/neuralnetworks/1.1/vts/functional/Android.bp +++ b/neuralnetworks/1.1/vts/functional/Android.bp @@ -14,10 +14,11 @@ // limitations under the License. // -cc_defaults { - name: "VtsHalNeuralNetworksV1_1TargetTestDefaults", +cc_test { + name: "VtsHalNeuralnetworksV1_1TargetTest", defaults: ["VtsHalTargetTestDefaults"], srcs: [ + "BasicTests.cpp", "TestAssertions.cpp", "ValidateModel.cpp", "ValidateRequest.cpp", @@ -39,35 +40,12 @@ cc_defaults { "libneuralnetworks_utils", "VtsHalNeuralNetworksV1_0_utils", ], + whole_static_libs: [ + "neuralnetworks_generated_V1_0_example", + "neuralnetworks_generated_V1_1_example", + ], header_libs: [ "libneuralnetworks_headers", ], test_suites: ["general-tests"], } - -cc_test { - name: "VtsHalNeuralnetworksV1_1TargetTest", - defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - "neuralnetworks_generated_V1_1_example", - ], -} - -cc_test { - name: "PresubmitHalNeuralnetworksV1_1TargetTest", - defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - "neuralnetworks_generated_V1_1_example", - ], - cflags: [ - "-DPRESUBMIT_NOT_VTS", - ], -} diff --git a/neuralnetworks/1.1/vts/functional/BasicTests.cpp b/neuralnetworks/1.1/vts/functional/BasicTests.cpp index 2791e804c3..44836f0c95 100644 --- a/neuralnetworks/1.1/vts/functional/BasicTests.cpp +++ b/neuralnetworks/1.1/vts/functional/BasicTests.cpp @@ -24,17 +24,17 @@ using V1_0::DeviceStatus; using V1_0::ErrorStatus; // create device test -TEST_F(NeuralnetworksHidlTest, CreateDevice) {} +TEST_P(NeuralnetworksHidlTest, CreateDevice) {} // status test -TEST_F(NeuralnetworksHidlTest, StatusTest) { +TEST_P(NeuralnetworksHidlTest, StatusTest) { Return status = kDevice->getStatus(); ASSERT_TRUE(status.isOk()); EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast(status)); } // initialization -TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { +TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) { Return ret = kDevice->getCapabilities_1_1([](ErrorStatus status, const Capabilities& capabilities) { EXPECT_EQ(ErrorStatus::NONE, status); diff --git a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp index fddfc2bb28..7a929d6063 100644 --- a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp +++ b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp @@ -156,6 +156,20 @@ void Execute(const sp& device, const TestModel& testModel) { checkResults(testModel, outputs); } +void GeneratedTestBase::SetUp() { + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); +} + +std::vector getNamedModels(const FilterFn& filter) { + return TestModelManager::get().getTestModels(filter); +} + +std::string printGeneratedTest(const testing::TestParamInfo& info) { + const auto& [namedDevice, namedModel] = info.param; + return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel)); +} + // Tag for the generated tests class GeneratedTest : public GeneratedTestBase {}; diff --git a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h index 273d1ec66a..cf449ea42d 100644 --- a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h +++ b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h @@ -18,29 +18,38 @@ #define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_GENERATED_TEST_HARNESS_H #include +#include "1.0/Utils.h" #include "TestHarness.h" #include "VtsHalNeuralnetworks.h" namespace android::hardware::neuralnetworks::V1_1::vts::functional { -class GeneratedTestBase - : public NeuralnetworksHidlTest, - public testing::WithParamInterface { +using NamedModel = Named; +using GeneratedTestParam = std::tuple; + +class GeneratedTestBase : public testing::TestWithParam { protected: - const test_helper::TestModel& kTestModel = *GetParam().second; + void SetUp() override; + const sp kDevice = getData(std::get(GetParam())); + const test_helper::TestModel& kTestModel = *getData(std::get(GetParam())); }; -#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ - INSTANTIATE_TEST_SUITE_P( \ - TestGenerated, TestSuite, \ - testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \ - [](const auto& info) { return info.param.first; }) +using FilterFn = std::function; +std::vector getNamedModels(const FilterFn& filter); + +std::string printGeneratedTest(const testing::TestParamInfo& info); + +#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ + INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \ + testing::Combine(testing::ValuesIn(getNamedDevices()), \ + testing::ValuesIn(getNamedModels(filter))), \ + printGeneratedTest) // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp. // TODO: Clean up the hierarchy for ValidationTest. class ValidationTest : public GeneratedTestBase {}; -Model createModel(const ::test_helper::TestModel& testModel); +Model createModel(const test_helper::TestModel& testModel); } // namespace android::hardware::neuralnetworks::V1_1::vts::functional diff --git a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp index d53d43e0b6..d56d40b2ba 100644 --- a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp +++ b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp @@ -17,13 +17,15 @@ #define LOG_TAG "neuralnetworks_hidl_hal_test" #include "VtsHalNeuralnetworks.h" +#include +#include +#include +#include #include "1.0/Callbacks.h" #include "1.0/Utils.h" #include "GeneratedTestHarness.h" #include "TestHarness.h" -#include - namespace android::hardware::neuralnetworks::V1_1::vts::functional { using V1_0::ErrorStatus; @@ -79,34 +81,39 @@ void createPreparedModel(const sp& device, const Model& model, ASSERT_NE(nullptr, preparedModel->get()); } -// A class for test environment setup -NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() { - // This has to return a "new" object because it is freed inside - // testing::AddGlobalTestEnvironment when the gtest is being torn down - static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment(); - return instance; -} - -void NeuralnetworksHidlEnvironment::registerTestServices() { - registerTestService(); -} - -// The main test class for NEURALNETWORK HIDL HAL. void NeuralnetworksHidlTest::SetUp() { - testing::VtsHalHidlTargetTestBase::SetUp(); - -#ifdef PRESUBMIT_NOT_VTS - const std::string name = - NeuralnetworksHidlEnvironment::getInstance()->getServiceName(); - const std::string sampleDriver = "sample-"; - if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) { - GTEST_SKIP(); - } -#endif // PRESUBMIT_NOT_VTS - - ASSERT_NE(nullptr, kDevice.get()); + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); } +static NamedDevice makeNamedDevice(const std::string& name) { + return {name, IDevice::getService(name)}; +} + +static std::vector getNamedDevicesImpl() { + // Retrieves the name of all service instances that implement IDevice, + // including any Lazy HAL instances. + const std::vector names = hardware::getAllHalInstanceNames(IDevice::descriptor); + + // Get a handle to each device and pair it with its name. + std::vector namedDevices; + namedDevices.reserve(names.size()); + std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice); + return namedDevices; +} + +const std::vector& getNamedDevices() { + const static std::vector devices = getNamedDevicesImpl(); + return devices; +} + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info) { + return gtestCompliantName(getName(info.param)); +} + +INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest); + // Forward declaration from ValidateModel.cpp void validateModel(const sp& device, const Model& model); // Forward declaration from ValidateRequest.cpp @@ -133,14 +140,3 @@ TEST_P(ValidationTest, Test) { INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; }); } // namespace android::hardware::neuralnetworks::V1_1::vts::functional - -using android::hardware::neuralnetworks::V1_1::vts::functional::NeuralnetworksHidlEnvironment; - -int main(int argc, char** argv) { - testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance()); - testing::InitGoogleTest(&argc, argv); - NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv); - - int status = RUN_ALL_TESTS(); - return status; -} diff --git a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h index 9d6194a143..e879d843d2 100644 --- a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h +++ b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h @@ -17,41 +17,33 @@ #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H #define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H -#include +#include #include #include - -#include -#include - -#include #include +#include +#include "1.0/Utils.h" namespace android::hardware::neuralnetworks::V1_1::vts::functional { -// A class for test environment setup -class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment); - NeuralnetworksHidlEnvironment() = default; - - public: - static NeuralnetworksHidlEnvironment* getInstance(); - void registerTestServices() override; -}; - -// The main test class for NEURALNETWORKS HIDL HAL. -class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest); - - public: - NeuralnetworksHidlTest() = default; - void SetUp() override; +using NamedDevice = Named>; +using NeuralnetworksHidlTestParam = NamedDevice; +class NeuralnetworksHidlTest : public testing::TestWithParam { protected: - const sp kDevice = testing::VtsHalHidlTargetTestBase::getService( - NeuralnetworksHidlEnvironment::getInstance()); + void SetUp() override; + const sp kDevice = getData(GetParam()); }; +const std::vector& getNamedDevices(); + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info); + +#define INSTANTIATE_DEVICE_TEST(TestSuite) \ + INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \ + printNeuralnetworksHidlTest) + // Create an IPreparedModel object. If the model cannot be prepared, // "preparedModel" will be nullptr instead. void createPreparedModel(const sp& device, const Model& model, diff --git a/neuralnetworks/1.2/vts/functional/Android.bp b/neuralnetworks/1.2/vts/functional/Android.bp index 40ca809612..3ba8879ae9 100644 --- a/neuralnetworks/1.2/vts/functional/Android.bp +++ b/neuralnetworks/1.2/vts/functional/Android.bp @@ -14,16 +14,19 @@ // limitations under the License. // -cc_defaults { - name: "VtsHalNeuralNetworksV1_2TargetTestDefaults", +cc_test { + name: "VtsHalNeuralnetworksV1_2TargetTest", defaults: ["VtsHalTargetTestDefaults"], srcs: [ + "BasicTests.cpp", + "Callbacks.cpp", + "CompilationCachingTests.cpp", + "GeneratedTestHarness.cpp", "TestAssertions.cpp", "ValidateModel.cpp", "ValidateRequest.cpp", + "ValidateBurst.cpp", "VtsHalNeuralnetworks.cpp", - "Callbacks.cpp", - "GeneratedTestHarness.cpp", ], local_include_dirs: ["include"], shared_libs: [ @@ -42,41 +45,13 @@ cc_defaults { "libneuralnetworks_utils", "VtsHalNeuralNetworksV1_0_utils", ], + whole_static_libs: [ + "neuralnetworks_generated_V1_0_example", + "neuralnetworks_generated_V1_1_example", + "neuralnetworks_generated_V1_2_example", + ], header_libs: [ "libneuralnetworks_headers", ], test_suites: ["general-tests"], } - -cc_test { - name: "VtsHalNeuralnetworksV1_2TargetTest", - defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - "CompilationCachingTests.cpp", - "ValidateBurst.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - "neuralnetworks_generated_V1_1_example", - "neuralnetworks_generated_V1_2_example", - ], -} - -cc_test { - name: "PresubmitHalNeuralnetworksV1_2TargetTest", - defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"], - srcs: [ - "BasicTests.cpp", - "CompilationCachingTests.cpp", - "ValidateBurst.cpp", - ], - whole_static_libs: [ - "neuralnetworks_generated_V1_0_example", - "neuralnetworks_generated_V1_1_example", - "neuralnetworks_generated_V1_2_example", - ], - cflags: [ - "-DPRESUBMIT_NOT_VTS", - ], -} diff --git a/neuralnetworks/1.2/vts/functional/BasicTests.cpp b/neuralnetworks/1.2/vts/functional/BasicTests.cpp index 8f95b96c52..8e82c5376e 100644 --- a/neuralnetworks/1.2/vts/functional/BasicTests.cpp +++ b/neuralnetworks/1.2/vts/functional/BasicTests.cpp @@ -25,17 +25,17 @@ using V1_0::ErrorStatus; using V1_0::PerformanceInfo; // create device test -TEST_F(NeuralnetworksHidlTest, CreateDevice) {} +TEST_P(NeuralnetworksHidlTest, CreateDevice) {} // status test -TEST_F(NeuralnetworksHidlTest, StatusTest) { +TEST_P(NeuralnetworksHidlTest, StatusTest) { Return status = kDevice->getStatus(); ASSERT_TRUE(status.isOk()); EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast(status)); } // initialization -TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { +TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) { using OperandPerformance = Capabilities::OperandPerformance; Return ret = kDevice->getCapabilities_1_2([](ErrorStatus status, const Capabilities& capabilities) { @@ -60,7 +60,7 @@ TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { } // device version test -TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) { +TEST_P(NeuralnetworksHidlTest, GetDeviceVersionStringTest) { Return ret = kDevice->getVersionString([](ErrorStatus status, const hidl_string& version) { EXPECT_EQ(ErrorStatus::NONE, status); @@ -70,7 +70,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) { } // device type test -TEST_F(NeuralnetworksHidlTest, GetDeviceTypeTest) { +TEST_P(NeuralnetworksHidlTest, GetDeviceTypeTest) { Return ret = kDevice->getType([](ErrorStatus status, DeviceType type) { EXPECT_EQ(ErrorStatus::NONE, status); EXPECT_TRUE(type == DeviceType::OTHER || type == DeviceType::CPU || @@ -80,7 +80,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceTypeTest) { } // device supported extensions test -TEST_F(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) { +TEST_P(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) { Return ret = kDevice->getSupportedExtensions( [](ErrorStatus status, const hidl_vec& extensions) { EXPECT_EQ(ErrorStatus::NONE, status); @@ -101,7 +101,7 @@ TEST_F(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) { } // getNumberOfCacheFilesNeeded test -TEST_F(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) { +TEST_P(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) { Return ret = kDevice->getNumberOfCacheFilesNeeded( [](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) { EXPECT_EQ(ErrorStatus::NONE, status); diff --git a/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp b/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp index bb46e06d6f..2130a76b75 100644 --- a/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp +++ b/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp @@ -17,6 +17,7 @@ #define LOG_TAG "neuralnetworks_hidl_hal_test" #include +#include #include #include #include @@ -37,11 +38,11 @@ // Forward declaration of the mobilenet generated test models in // frameworks/ml/nn/runtime/test/generated/. namespace generated_tests::mobilenet_224_gender_basic_fixed { -const ::test_helper::TestModel& get_test_model(); +const test_helper::TestModel& get_test_model(); } // namespace generated_tests::mobilenet_224_gender_basic_fixed namespace generated_tests::mobilenet_quantized { -const ::test_helper::TestModel& get_test_model(); +const test_helper::TestModel& get_test_model(); } // namespace generated_tests::mobilenet_quantized namespace android::hardware::neuralnetworks::V1_2::vts::functional { @@ -53,13 +54,13 @@ using V1_1::ExecutionPreference; namespace float32_model { -constexpr auto get_test_model = ::generated_tests::mobilenet_224_gender_basic_fixed::get_test_model; +constexpr auto get_test_model = generated_tests::mobilenet_224_gender_basic_fixed::get_test_model; } // namespace float32_model namespace quant8_model { -constexpr auto get_test_model = ::generated_tests::mobilenet_quantized::get_test_model; +constexpr auto get_test_model = generated_tests::mobilenet_quantized::get_test_model; } // namespace quant8_model @@ -217,12 +218,13 @@ TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) { } // namespace // Tag for the compilation caching tests. -class CompilationCachingTestBase : public NeuralnetworksHidlTest { +class CompilationCachingTestBase : public testing::Test { protected: - CompilationCachingTestBase(OperandType type) : kOperandType(type) {} + CompilationCachingTestBase(sp device, OperandType type) + : kDevice(std::move(device)), kOperandType(type) {} void SetUp() override { - NeuralnetworksHidlTest::SetUp(); + testing::Test::SetUp(); ASSERT_NE(kDevice.get(), nullptr); // Create cache directory. The cache directory and a temporary cache file is always created @@ -274,7 +276,7 @@ class CompilationCachingTestBase : public NeuralnetworksHidlTest { }; nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS); } - NeuralnetworksHidlTest::TearDown(); + testing::Test::TearDown(); } // Model and examples creators. According to kOperandType, the following methods will return @@ -398,16 +400,21 @@ class CompilationCachingTestBase : public NeuralnetworksHidlTest { uint32_t mNumDataCache; uint32_t mIsCachingSupported; + const sp kDevice; // The primary data type of the testModel. const OperandType kOperandType; }; +using CompilationCachingTestParam = std::tuple; + // A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first // pass running with float32 models and the second pass running with quant8 models. class CompilationCachingTest : public CompilationCachingTestBase, - public testing::WithParamInterface { + public testing::WithParamInterface { protected: - CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {} + CompilationCachingTest() + : CompilationCachingTestBase(getData(std::get(GetParam())), + std::get(GetParam())) {} }; TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) { @@ -1192,16 +1199,30 @@ TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) { } } +static const auto kNamedDeviceChoices = testing::ValuesIn(getNamedDevices()); static const auto kOperandTypeChoices = testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM); -INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices); +std::string printCompilationCachingTest( + const testing::TestParamInfo& info) { + const auto& [namedDevice, operandType] = info.param; + const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8"); + return gtestCompliantName(getName(namedDevice) + "_" + type); +} + +INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, + testing::Combine(kNamedDeviceChoices, kOperandTypeChoices), + printCompilationCachingTest); + +using CompilationCachingSecurityTestParam = std::tuple; class CompilationCachingSecurityTest : public CompilationCachingTestBase, - public testing::WithParamInterface> { + public testing::WithParamInterface { protected: - CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {} + CompilationCachingSecurityTest() + : CompilationCachingTestBase(getData(std::get(GetParam())), + std::get(GetParam())) {} void SetUp() { CompilationCachingTestBase::SetUp(); @@ -1291,7 +1312,7 @@ class CompilationCachingSecurityTest } } - const uint32_t kSeed = std::get<1>(GetParam()); + const uint32_t kSeed = std::get(GetParam()); std::mt19937 generator; }; @@ -1338,7 +1359,16 @@ TEST_P(CompilationCachingSecurityTest, WrongToken) { }); } +std::string printCompilationCachingSecurityTest( + const testing::TestParamInfo& info) { + const auto& [namedDevice, operandType, seed] = info.param; + const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8"); + return gtestCompliantName(getName(namedDevice) + "_" + type + "_" + std::to_string(seed)); +} + INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest, - testing::Combine(kOperandTypeChoices, testing::Range(0U, 10U))); + testing::Combine(kNamedDeviceChoices, kOperandTypeChoices, + testing::Range(0U, 10U)), + printCompilationCachingSecurityTest); } // namespace android::hardware::neuralnetworks::V1_2::vts::functional diff --git a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp index a2d37920d5..2beec983e0 100644 --- a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp +++ b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp @@ -190,7 +190,7 @@ static Return ExecutePreparedModel(const sp& prepar } static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst( const sp& preparedModel) { - return ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true); + return android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true); } enum class Executor { ASYNC, SYNC, BURST }; @@ -371,6 +371,20 @@ void Execute(const sp& device, const TestModel& testModel, bool testDyn EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape); } +void GeneratedTestBase::SetUp() { + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); +} + +std::vector getNamedModels(const FilterFn& filter) { + return TestModelManager::get().getTestModels(filter); +} + +std::string printGeneratedTest(const testing::TestParamInfo& info) { + const auto& [namedDevice, namedModel] = info.param; + return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel)); +} + // Tag for the generated tests class GeneratedTest : public GeneratedTestBase {}; diff --git a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h index 0b8b917b0a..dfc980c169 100644 --- a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h +++ b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h @@ -22,34 +22,43 @@ #include #include #include +#include "1.0/Utils.h" #include "TestHarness.h" #include "VtsHalNeuralnetworks.h" namespace android::hardware::neuralnetworks::V1_2::vts::functional { -class GeneratedTestBase - : public NeuralnetworksHidlTest, - public testing::WithParamInterface { +using NamedModel = Named; +using GeneratedTestParam = std::tuple; + +class GeneratedTestBase : public testing::TestWithParam { protected: - const test_helper::TestModel& kTestModel = *GetParam().second; + void SetUp() override; + const sp kDevice = getData(std::get(GetParam())); + const test_helper::TestModel& kTestModel = *getData(std::get(GetParam())); }; -#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ - INSTANTIATE_TEST_SUITE_P( \ - TestGenerated, TestSuite, \ - testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \ - [](const auto& info) { return info.param.first; }) +using FilterFn = std::function; +std::vector getNamedModels(const FilterFn& filter); + +std::string printGeneratedTest(const testing::TestParamInfo& info); + +#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \ + INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite, \ + testing::Combine(testing::ValuesIn(getNamedDevices()), \ + testing::ValuesIn(getNamedModels(filter))), \ + printGeneratedTest) // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp. // TODO: Clean up the hierarchy for ValidationTest. class ValidationTest : public GeneratedTestBase {}; -Model createModel(const ::test_helper::TestModel& testModel); +Model createModel(const test_helper::TestModel& testModel); void PrepareModel(const sp& device, const Model& model, sp* preparedModel); void EvaluatePreparedModel(const sp& preparedModel, - const ::test_helper::TestModel& testModel, bool testDynamicOutputShape); + const test_helper::TestModel& testModel, bool testDynamicOutputShape); } // namespace android::hardware::neuralnetworks::V1_2::vts::functional diff --git a/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp b/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp index c02d0206e2..1d4493d208 100644 --- a/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp +++ b/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp @@ -262,7 +262,7 @@ static void validateBurstSerialization(const sp& preparedModel, })); // serialize the request - const auto serialized = ::android::nn::serialize(request, MeasureTiming::YES, slots); + const auto serialized = android::nn::serialize(request, MeasureTiming::YES, slots); // validations removeDatumTest(sender.get(), receiver.get(), serialized); @@ -299,7 +299,7 @@ static void validateBurstFmqLength(const sp& preparedModel, // skip test if regular burst output isn't useful for testing a failure // caused by having too small of a length for the result FMQ const std::vector serialized = - ::android::nn::serialize(statusRegular, outputShapesRegular, timingRegular); + android::nn::serialize(statusRegular, outputShapesRegular, timingRegular); if (statusRegular != ErrorStatus::NONE || serialized.size() <= kExecutionBurstChannelSmallLength) { return; diff --git a/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp b/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp index 5c52de5834..f25ee62617 100644 --- a/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp +++ b/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp @@ -94,7 +94,7 @@ static void validate(const sp& preparedModel, const std::string& // create burst std::shared_ptr<::android::nn::ExecutionBurstController> burst = - ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true); + android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true); ASSERT_NE(nullptr, burst.get()); // create memory keys diff --git a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp index aa4f1f20b8..4fbd0e270f 100644 --- a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp +++ b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp @@ -17,13 +17,15 @@ #define LOG_TAG "neuralnetworks_hidl_hal_test" #include "VtsHalNeuralnetworks.h" +#include +#include +#include +#include #include "1.0/Callbacks.h" #include "1.0/Utils.h" #include "GeneratedTestHarness.h" #include "TestHarness.h" -#include - namespace android::hardware::neuralnetworks::V1_2::vts::functional { using implementation::PreparedModelCallback; @@ -82,34 +84,39 @@ void createPreparedModel(const sp& device, const Model& model, ASSERT_NE(nullptr, preparedModel->get()); } -// A class for test environment setup -NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() { - // This has to return a "new" object because it is freed inside - // testing::AddGlobalTestEnvironment when the gtest is being torn down - static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment(); - return instance; -} - -void NeuralnetworksHidlEnvironment::registerTestServices() { - registerTestService(); -} - -// The main test class for NEURALNETWORK HIDL HAL. void NeuralnetworksHidlTest::SetUp() { - testing::VtsHalHidlTargetTestBase::SetUp(); - -#ifdef PRESUBMIT_NOT_VTS - const std::string name = - NeuralnetworksHidlEnvironment::getInstance()->getServiceName(); - const std::string sampleDriver = "sample-"; - if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) { - GTEST_SKIP(); - } -#endif // PRESUBMIT_NOT_VTS - - ASSERT_NE(nullptr, kDevice.get()); + testing::TestWithParam::SetUp(); + ASSERT_NE(kDevice, nullptr); } +static NamedDevice makeNamedDevice(const std::string& name) { + return {name, IDevice::getService(name)}; +} + +static std::vector getNamedDevicesImpl() { + // Retrieves the name of all service instances that implement IDevice, + // including any Lazy HAL instances. + const std::vector names = hardware::getAllHalInstanceNames(IDevice::descriptor); + + // Get a handle to each device and pair it with its name. + std::vector namedDevices; + namedDevices.reserve(names.size()); + std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice); + return namedDevices; +} + +const std::vector& getNamedDevices() { + const static std::vector devices = getNamedDevicesImpl(); + return devices; +} + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info) { + return gtestCompliantName(getName(info.param)); +} + +INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest); + // Forward declaration from ValidateModel.cpp void validateModel(const sp& device, const Model& model); // Forward declaration from ValidateRequest.cpp @@ -162,14 +169,3 @@ sp getPreparedModel_1_2(const spinit(&argc, argv); - - int status = RUN_ALL_TESTS(); - return status; -} diff --git a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h index 9981696290..d01336eccd 100644 --- a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h +++ b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h @@ -17,42 +17,33 @@ #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H #define ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H -#include -#include -#include -#include -#include #include +#include #include #include - +#include "1.0/Utils.h" #include "1.2/Callbacks.h" namespace android::hardware::neuralnetworks::V1_2::vts::functional { -// A class for test environment setup -class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment); - NeuralnetworksHidlEnvironment() = default; - - public: - static NeuralnetworksHidlEnvironment* getInstance(); - void registerTestServices() override; -}; - -// The main test class for NEURALNETWORKS HIDL HAL. -class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase { - DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest); - - public: - NeuralnetworksHidlTest() = default; - void SetUp() override; +using NamedDevice = Named>; +using NeuralnetworksHidlTestParam = NamedDevice; +class NeuralnetworksHidlTest : public testing::TestWithParam { protected: - const sp kDevice = testing::VtsHalHidlTargetTestBase::getService( - NeuralnetworksHidlEnvironment::getInstance()); + void SetUp() override; + const sp kDevice = getData(GetParam()); }; +const std::vector& getNamedDevices(); + +std::string printNeuralnetworksHidlTest( + const testing::TestParamInfo& info); + +#define INSTANTIATE_DEVICE_TEST(TestSuite) \ + INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \ + printNeuralnetworksHidlTest) + // Create an IPreparedModel object. If the model cannot be prepared, // "preparedModel" will be nullptr instead. void createPreparedModel(const sp& device, const Model& model, diff --git a/neuralnetworks/TEST_MAPPING b/neuralnetworks/TEST_MAPPING index 50b6c19124..421922a25e 100644 --- a/neuralnetworks/TEST_MAPPING +++ b/neuralnetworks/TEST_MAPPING @@ -1,26 +1,35 @@ { "presubmit": [ { - "name": "PresubmitHalNeuralnetworksV1_0TargetTest", + "name": "VtsHalNeuralnetworksV1_0TargetTest", "options": [ { - "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.0::IDevice/sample-all" + // Just use sample-all driver for presubmit tests for faster results. + // The other sample drivers (fast-float, quant, etc.) are subsets of + // sample-all. + "native-test-flag": "--gtest_filter=*sample_all*" } ] }, { - "name": "PresubmitHalNeuralnetworksV1_1TargetTest", + "name": "VtsHalNeuralnetworksV1_1TargetTest", "options": [ { - "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.1::IDevice/sample-all" + // Just use sample-all driver for presubmit tests for faster results. + // The other sample drivers (fast-float, quant, etc.) are subsets of + // sample-all. + "native-test-flag": "--gtest_filter=*sample_all*" } ] }, { - "name": "PresubmitHalNeuralnetworksV1_2TargetTest", + "name": "VtsHalNeuralnetworksV1_2TargetTest", "options": [ { - "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.2::IDevice/sample-all" + // Just use sample-all driver for presubmit tests for faster results. + // The other sample drivers (fast-float, quant, etc.) are subsets of + // sample-all. + "native-test-flag": "--gtest_filter=*sample_all*" } ] }