Update VTS tests with the new test harness.

Bug: 120601396
Test: All VTS
Change-Id: I539e75585b2cc01d153565814491361adfa048be
This commit is contained in:
Xusong Wang
2019-08-23 16:10:54 -07:00
parent 0fe5a0e89e
commit 9e2b97b83c
26 changed files with 348 additions and 284 deletions

View File

@@ -74,7 +74,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_0_all_generated_V1_0_tests",
":VtsHalNeuralNetworksV1_0_all_generated_tests",
],
}
@@ -83,7 +83,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_0_all_generated_V1_0_tests",
":VtsHalNeuralNetworksV1_0_all_generated_tests",
],
cflags: [
"-DPRESUBMIT_NOT_VTS",

View File

@@ -20,6 +20,7 @@
#include "1.0/Utils.h"
#include "MemoryUtils.h"
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
#include <android-base/logging.h>
#include <android/hardware/neuralnetworks/1.0/IDevice.h>
@@ -36,7 +37,8 @@ namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_0 {
namespace generated_tests {
namespace vts {
namespace functional {
using namespace test_helper;
using ::android::hardware::neuralnetworks::V1_0::ErrorStatus;
@@ -151,48 +153,61 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
checkResults(testModel, outputs);
}
void Execute(const sp<IDevice>& device, const TestModel& testModel) {
Model model = createModel(testModel);
// Tag for the generated tests
class GeneratedTest : public GeneratedTestBase {
protected:
void Execute(const TestModel& testModel) {
Model model = createModel(testModel);
// see if service can handle model
bool fullySupportsModel = false;
Return<void> supportedCall = device->getSupportedOperations(
model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
ASSERT_EQ(ErrorStatus::NONE, status);
ASSERT_NE(0ul, supported.size());
fullySupportsModel = std::all_of(supported.begin(), supported.end(),
[](bool valid) { return valid; });
});
ASSERT_TRUE(supportedCall.isOk());
// see if service can handle model
bool fullySupportsModel = false;
Return<void> supportedCall = device->getSupportedOperations(
model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
ASSERT_EQ(ErrorStatus::NONE, status);
ASSERT_NE(0ul, supported.size());
fullySupportsModel = std::all_of(supported.begin(), supported.end(),
[](bool valid) { return valid; });
});
ASSERT_TRUE(supportedCall.isOk());
// launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Return<ErrorStatus> prepareLaunchStatus = device->prepareModel(model, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
// launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Return<ErrorStatus> prepareLaunchStatus =
device->prepareModel(model, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
// retrieve prepared model
preparedModelCallback->wait();
ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
// retrieve prepared model
preparedModelCallback->wait();
ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
// early termination if vendor service cannot fully prepare model
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel.get());
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"prepare model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot "
"prepare model that it does not support."
<< std::endl;
GTEST_SKIP();
// early termination if vendor service cannot fully prepare model
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel.get());
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"prepare model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot "
"prepare model that it does not support."
<< std::endl;
GTEST_SKIP();
}
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
ASSERT_NE(nullptr, preparedModel.get());
EvaluatePreparedModel(preparedModel, testModel);
}
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
ASSERT_NE(nullptr, preparedModel.get());
};
EvaluatePreparedModel(preparedModel, testModel);
TEST_P(GeneratedTest, Test) {
Execute(*mTestModel);
}
} // namespace generated_tests
INSTANTIATE_GENERATED_TEST(GeneratedTest,
[](const TestModel& testModel) { return !testModel.expectFailure; });
} // namespace functional
} // namespace vts
} // namespace V1_0
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -19,18 +19,48 @@
#include <android/hardware/neuralnetworks/1.0/IDevice.h>
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_0 {
namespace generated_tests {
namespace vts {
namespace functional {
class GeneratedTestBase
: public NeuralnetworksHidlTest,
public ::testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
protected:
void SetUp() override {
NeuralnetworksHidlTest::SetUp();
ASSERT_NE(mTestModel, nullptr);
}
const test_helper::TestModel* mTestModel = GetParam().second;
};
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
INSTANTIATE_TEST_SUITE_P( \
TestGenerated, TestSuite, \
::testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
[](const auto& info) { return info.param.first; })
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
// TODO: Clean up the hierarchy for ValidationTest.
class ValidationTest : public GeneratedTestBase {
protected:
void validateEverything(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
};
Model createModel(const ::test_helper::TestModel& testModel);
void Execute(const sp<V1_0::IDevice>& device, const ::test_helper::TestModel& testModel);
} // namespace generated_tests
} // namespace functional
} // namespace vts
} // namespace V1_0
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -1,26 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android::hardware::neuralnetworks::V1_0::generated_tests {
using namespace android::hardware::neuralnetworks::V1_0::vts::functional;
} // namespace android::hardware::neuralnetworks::V1_0::generated_tests

View File

@@ -16,9 +16,9 @@
#define LOG_TAG "neuralnetworks_hidl_hal_test"
#include "VtsHalNeuralnetworks.h"
#include "1.0/Callbacks.h"
#include "GeneratedTestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {

View File

@@ -17,6 +17,7 @@
#define LOG_TAG "neuralnetworks_hidl_hal_test"
#include "1.0/Callbacks.h"
#include "GeneratedTestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {

View File

@@ -17,11 +17,13 @@
#define LOG_TAG "neuralnetworks_hidl_hal_test"
#include "VtsHalNeuralnetworks.h"
#include "1.0/Callbacks.h"
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include <android-base/logging.h>
#include "1.0/Callbacks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
@@ -134,6 +136,15 @@ void ValidationTest::validateEverything(const Model& model, const Request& reque
validateRequest(preparedModel, request);
}
TEST_P(ValidationTest, Test) {
const Model model = createModel(*mTestModel);
const Request request = createRequest(*mTestModel);
ASSERT_FALSE(mTestModel->expectFailure);
validateEverything(model, request);
}
INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
} // namespace functional
} // namespace vts
} // namespace V1_0

View File

@@ -28,6 +28,8 @@
#include <iostream>
#include <vector>
#include "TestHarness.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
@@ -60,19 +62,6 @@ class NeuralnetworksHidlTest : public ::testing::VtsHalHidlTargetTestBase {
sp<IDevice> device;
};
// Tag for the validation tests
class ValidationTest : public NeuralnetworksHidlTest {
protected:
void validateEverything(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
};
// Tag for the generated tests
class GeneratedTest : public NeuralnetworksHidlTest {};
} // namespace functional
} // namespace vts
} // namespace V1_0

View File

@@ -49,7 +49,7 @@ cc_test {
name: "VtsHalNeuralnetworksV1_1CompatV1_0TargetTest",
defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
srcs: [
":VtsHalNeuralNetworksV1_1_all_generated_V1_0_tests",
":VtsHalNeuralNetworksV1_0_all_generated_tests",
],
}
@@ -59,7 +59,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_1_all_generated_V1_1_tests",
":VtsHalNeuralNetworksV1_1_all_generated_tests",
],
}
@@ -68,7 +68,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_1_all_generated_V1_1_tests",
":VtsHalNeuralNetworksV1_1_all_generated_tests",
],
cflags: [
"-DPRESUBMIT_NOT_VTS",

View File

@@ -31,12 +31,14 @@
#include "1.0/Utils.h"
#include "MemoryUtils.h"
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_1 {
namespace generated_tests {
namespace vts {
namespace functional {
using namespace test_helper;
using ::android::hardware::neuralnetworks::V1_0::DataLocation;
@@ -157,49 +159,61 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
checkResults(testModel, outputs);
}
void Execute(const sp<IDevice>& device, const TestModel& testModel) {
Model model = createModel(testModel);
// Tag for the generated tests
class GeneratedTest : public GeneratedTestBase {
protected:
void Execute(const TestModel& testModel) {
Model model = createModel(testModel);
// see if service can handle model
bool fullySupportsModel = false;
Return<void> supportedCall = device->getSupportedOperations_1_1(
model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
ASSERT_EQ(ErrorStatus::NONE, status);
ASSERT_NE(0ul, supported.size());
fullySupportsModel = std::all_of(supported.begin(), supported.end(),
[](bool valid) { return valid; });
});
ASSERT_TRUE(supportedCall.isOk());
// see if service can handle model
bool fullySupportsModel = false;
Return<void> supportedCall = device->getSupportedOperations_1_1(
model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
ASSERT_EQ(ErrorStatus::NONE, status);
ASSERT_NE(0ul, supported.size());
fullySupportsModel = std::all_of(supported.begin(), supported.end(),
[](bool valid) { return valid; });
});
ASSERT_TRUE(supportedCall.isOk());
// launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
// launch prepare model
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
// retrieve prepared model
preparedModelCallback->wait();
ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
// retrieve prepared model
preparedModelCallback->wait();
ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
// early termination if vendor service cannot fully prepare model
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel.get());
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"prepare model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot "
"prepare model that it does not support."
<< std::endl;
GTEST_SKIP();
// early termination if vendor service cannot fully prepare model
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel.get());
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"prepare model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot "
"prepare model that it does not support."
<< std::endl;
GTEST_SKIP();
}
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
ASSERT_NE(nullptr, preparedModel.get());
EvaluatePreparedModel(preparedModel, testModel);
}
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
ASSERT_NE(nullptr, preparedModel.get());
};
EvaluatePreparedModel(preparedModel, testModel);
TEST_P(GeneratedTest, Test) {
Execute(*mTestModel);
}
} // namespace generated_tests
INSTANTIATE_GENERATED_TEST(GeneratedTest,
[](const TestModel& testModel) { return !testModel.expectFailure; });
} // namespace functional
} // namespace vts
} // namespace V1_1
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -19,18 +19,48 @@
#include <android/hardware/neuralnetworks/1.1/IDevice.h>
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_1 {
namespace generated_tests {
namespace vts {
namespace functional {
class GeneratedTestBase
: public NeuralnetworksHidlTest,
public ::testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
protected:
void SetUp() override {
NeuralnetworksHidlTest::SetUp();
ASSERT_NE(mTestModel, nullptr);
}
const test_helper::TestModel* mTestModel = GetParam().second;
};
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
INSTANTIATE_TEST_SUITE_P( \
TestGenerated, TestSuite, \
::testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
[](const auto& info) { return info.param.first; })
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
// TODO: Clean up the hierarchy for ValidationTest.
class ValidationTest : public GeneratedTestBase {
protected:
void validateEverything(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
};
Model createModel(const ::test_helper::TestModel& testModel);
void Execute(const sp<V1_1::IDevice>& device, const ::test_helper::TestModel& testModel);
} // namespace generated_tests
} // namespace functional
} // namespace vts
} // namespace V1_1
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -1,29 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android::hardware::neuralnetworks::V1_1::generated_tests {
using namespace android::hardware::neuralnetworks::V1_1::vts::functional;
using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
using ::android::hardware::neuralnetworks::V1_0::Request;
} // namespace android::hardware::neuralnetworks::V1_1::generated_tests

View File

@@ -18,6 +18,7 @@
#include "1.0/Callbacks.h"
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {

View File

@@ -18,6 +18,7 @@
#include "1.0/Callbacks.h"
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {

View File

@@ -17,11 +17,13 @@
#define LOG_TAG "neuralnetworks_hidl_hal_test"
#include "VtsHalNeuralnetworks.h"
#include "1.0/Callbacks.h"
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include <android-base/logging.h>
#include "1.0/Callbacks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
@@ -135,6 +137,15 @@ void ValidationTest::validateEverything(const Model& model, const Request& reque
validateRequest(preparedModel, request);
}
TEST_P(ValidationTest, Test) {
const Model model = createModel(*mTestModel);
const Request request = createRequest(*mTestModel);
ASSERT_FALSE(mTestModel->expectFailure);
validateEverything(model, request);
}
INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
} // namespace functional
} // namespace vts
} // namespace V1_1

View File

@@ -29,6 +29,8 @@
#include <iostream>
#include <vector>
#include "TestHarness.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
@@ -69,19 +71,6 @@ class NeuralnetworksHidlTest : public ::testing::VtsHalHidlTargetTestBase {
sp<IDevice> device;
};
// Tag for the validation tests
class ValidationTest : public NeuralnetworksHidlTest {
protected:
void validateEverything(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
};
// Tag for the generated tests
class GeneratedTest : public NeuralnetworksHidlTest {};
} // namespace functional
} // namespace vts
} // namespace V1_1

View File

@@ -52,7 +52,7 @@ cc_test {
name: "VtsHalNeuralnetworksV1_2CompatV1_0TargetTest",
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
srcs: [
":VtsHalNeuralNetworksV1_2_all_generated_V1_0_tests",
":VtsHalNeuralNetworksV1_0_all_generated_tests",
"ValidateBurst.cpp",
],
}
@@ -62,7 +62,7 @@ cc_test {
name: "VtsHalNeuralnetworksV1_2CompatV1_1TargetTest",
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
srcs: [
":VtsHalNeuralNetworksV1_2_all_generated_V1_1_tests",
":VtsHalNeuralNetworksV1_1_all_generated_tests",
"ValidateBurst.cpp",
],
}
@@ -73,7 +73,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_2_all_generated_V1_2_tests",
":VtsHalNeuralNetworksV1_2_all_generated_tests",
":VtsHalNeuralNetworksV1_2_mobilenets",
"CompilationCachingTests.cpp",
"ValidateBurst.cpp",
@@ -85,7 +85,7 @@ cc_test {
defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
srcs: [
"BasicTests.cpp",
":VtsHalNeuralNetworksV1_2_all_generated_V1_2_tests",
":VtsHalNeuralNetworksV1_2_all_generated_tests",
"ValidateBurst.cpp",
],
cflags: [

View File

@@ -425,7 +425,7 @@ class CompilationCachingTest : public CompilationCachingTestBase,
TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
sp<IPreparedModel> preparedModel = nullptr;
@@ -459,14 +459,14 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
}
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
}
TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
sp<IPreparedModel> preparedModel = nullptr;
@@ -522,14 +522,14 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
}
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
}
TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
// Test with number of model cache files greater than mNumModelCache.
@@ -544,8 +544,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -569,8 +569,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -593,8 +593,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -618,8 +618,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -634,7 +634,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
// Save the compilation to cache.
@@ -715,7 +715,7 @@ TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
// Go through each handle in model cache, test with NumFd greater than 1.
@@ -730,8 +730,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -755,8 +755,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -779,8 +779,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -804,8 +804,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -820,7 +820,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
// Save the compilation to cache.
@@ -901,7 +901,7 @@ TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
@@ -917,8 +917,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -940,8 +940,8 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr);
// Execute and verify results.
generated_tests::EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModel,
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails.
preparedModel = nullptr;
ErrorStatus status;
@@ -956,7 +956,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
// Create test HIDL model and compile.
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
@@ -1035,10 +1035,10 @@ TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
// Create test models and check if fully supported by the service.
const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
const Model modelMul = generated_tests::createModel(testModelMul);
const Model modelMul = createModel(testModelMul);
if (checkEarlyTermination(modelMul)) return;
const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
const Model modelAdd = generated_tests::createModel(testModelAdd);
const Model modelAdd = createModel(testModelAdd);
if (checkEarlyTermination(modelAdd)) return;
// Save the modelMul compilation to cache.
@@ -1085,8 +1085,8 @@ TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
ASSERT_EQ(preparedModel, nullptr);
} else {
ASSERT_NE(preparedModel, nullptr);
generated_tests::EvaluatePreparedModel(preparedModel, testModelAdd,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModelAdd,
/*testDynamicOutputShape=*/false);
}
}
}
@@ -1097,10 +1097,10 @@ TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
// Create test models and check if fully supported by the service.
const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
const Model modelMul = generated_tests::createModel(testModelMul);
const Model modelMul = createModel(testModelMul);
if (checkEarlyTermination(modelMul)) return;
const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
const Model modelAdd = generated_tests::createModel(testModelAdd);
const Model modelAdd = createModel(testModelAdd);
if (checkEarlyTermination(modelAdd)) return;
// Save the modelMul compilation to cache.
@@ -1147,8 +1147,8 @@ TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
ASSERT_EQ(preparedModel, nullptr);
} else {
ASSERT_NE(preparedModel, nullptr);
generated_tests::EvaluatePreparedModel(preparedModel, testModelAdd,
/*testDynamicOutputShape=*/false);
EvaluatePreparedModel(preparedModel, testModelAdd,
/*testDynamicOutputShape=*/false);
}
}
}
@@ -1159,10 +1159,10 @@ TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
// Create test models and check if fully supported by the service.
const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
const Model modelMul = generated_tests::createModel(testModelMul);
const Model modelMul = createModel(testModelMul);
if (checkEarlyTermination(modelMul)) return;
const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
const Model modelAdd = generated_tests::createModel(testModelAdd);
const Model modelAdd = createModel(testModelAdd);
if (checkEarlyTermination(modelAdd)) return;
// Save the modelMul compilation to cache.
@@ -1265,7 +1265,7 @@ class CompilationCachingSecurityTest
// whether the test should be skipped or not.
void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
const TestModel& testModel = createTestModel();
const Model model = generated_tests::createModel(testModel);
const Model model = createModel(testModel);
if (checkEarlyTermination(model)) return;
// Save the compilation to cache.

View File

@@ -42,12 +42,14 @@
#include "MemoryUtils.h"
#include "TestHarness.h"
#include "Utils.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_2 {
namespace generated_tests {
namespace vts {
namespace functional {
using namespace test_helper;
using ::android::hardware::neuralnetworks::V1_0::DataLocation;
@@ -410,21 +412,43 @@ void PrepareModel(const sp<IDevice>& device, const Model& model,
ASSERT_NE(nullptr, preparedModel->get());
}
void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) {
Model model = createModel(testModel);
if (testDynamicOutputShape) {
makeOutputDimensionsUnspecified(&model);
}
// Tag for the generated tests
class GeneratedTest : public GeneratedTestBase {
protected:
void Execute(const TestModel& testModel, bool testDynamicOutputShape) {
Model model = createModel(testModel);
if (testDynamicOutputShape) {
makeOutputDimensionsUnspecified(&model);
}
sp<IPreparedModel> preparedModel = nullptr;
PrepareModel(device, model, &preparedModel);
if (preparedModel == nullptr) {
GTEST_SKIP();
sp<IPreparedModel> preparedModel = nullptr;
PrepareModel(device, model, &preparedModel);
if (preparedModel == nullptr) {
GTEST_SKIP();
}
EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
}
EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
};
// Tag for the dynamic output shape tests
class DynamicOutputShapeTest : public GeneratedTest {};
TEST_P(GeneratedTest, Test) {
Execute(*mTestModel, /*testDynamicOutputShape=*/false);
}
} // namespace generated_tests
TEST_P(DynamicOutputShapeTest, Test) {
Execute(*mTestModel, /*testDynamicOutputShape=*/true);
}
INSTANTIATE_GENERATED_TEST(GeneratedTest,
[](const TestModel& testModel) { return !testModel.expectFailure; });
INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
[](const TestModel& testModel) { return !testModel.expectFailure; });
} // namespace functional
} // namespace vts
} // namespace V1_2
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -23,12 +23,46 @@
#include <functional>
#include <vector>
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
namespace V1_2 {
namespace generated_tests {
namespace vts {
namespace functional {
class GeneratedTestBase
: public NeuralnetworksHidlTest,
public ::testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
protected:
void SetUp() override {
NeuralnetworksHidlTest::SetUp();
ASSERT_NE(mTestModel, nullptr);
}
const test_helper::TestModel* mTestModel = GetParam().second;
};
#define INSTANTIATE_GENERATED_TEST(TestSuite, filter) \
INSTANTIATE_TEST_SUITE_P( \
TestGenerated, TestSuite, \
::testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
[](const auto& info) { return info.param.first; })
// Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
// TODO: Clean up the hierarchy for ValidationTest.
class ValidationTest : public GeneratedTestBase {
protected:
void validateEverything(const Model& model, const Request& request);
void validateFailure(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
void validateRequestFailure(const sp<IPreparedModel>& preparedModel, const Request& request);
void validateBurst(const sp<IPreparedModel>& preparedModel, const Request& request);
};
Model createModel(const ::test_helper::TestModel& testModel);
@@ -38,10 +72,8 @@ void PrepareModel(const sp<V1_2::IDevice>& device, const V1_2::Model& model,
void EvaluatePreparedModel(const sp<V1_2::IPreparedModel>& preparedModel,
const ::test_helper::TestModel& testModel, bool testDynamicOutputShape);
void Execute(const sp<V1_2::IDevice>& device, const ::test_helper::TestModel& testModel,
bool testDynamicOutputShape = false);
} // namespace generated_tests
} // namespace functional
} // namespace vts
} // namespace V1_2
} // namespace neuralnetworks
} // namespace hardware

View File

@@ -1,29 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android::hardware::neuralnetworks::V1_2::generated_tests {
using namespace ::android::hardware::neuralnetworks::V1_2::vts::functional;
using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
using ::android::hardware::neuralnetworks::V1_0::Request;
} // namespace android::hardware::neuralnetworks::V1_2::generated_tests

View File

@@ -21,6 +21,7 @@
#include "1.2/Callbacks.h"
#include "ExecutionBurstController.h"
#include "ExecutionBurstServer.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include "Utils.h"

View File

@@ -18,6 +18,7 @@
#include "1.0/Utils.h"
#include "1.2/Callbacks.h"
#include "GeneratedTestHarness.h"
#include "VtsHalNeuralnetworks.h"
namespace android {

View File

@@ -19,6 +19,7 @@
#include "1.0/Utils.h"
#include "1.2/Callbacks.h"
#include "ExecutionBurstController.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include "Utils.h"
#include "VtsHalNeuralnetworks.h"

View File

@@ -17,11 +17,13 @@
#define LOG_TAG "neuralnetworks_hidl_hal_test"
#include "VtsHalNeuralnetworks.h"
#include "1.0/Callbacks.h"
#include "1.0/Utils.h"
#include "GeneratedTestHarness.h"
#include "TestHarness.h"
#include <android-base/logging.h>
#include "1.2/Callbacks.h"
namespace android {
namespace hardware {
namespace neuralnetworks {
@@ -154,6 +156,18 @@ void ValidationTest::validateFailure(const Model& model, const Request& request)
validateRequestFailure(preparedModel, request);
}
TEST_P(ValidationTest, Test) {
const Model model = createModel(*mTestModel);
const Request request = createRequest(*mTestModel);
if (mTestModel->expectFailure) {
validateFailure(model, request);
} else {
validateEverything(model, request);
}
}
INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
sp<IPreparedModel> getPreparedModel_1_2(
const sp<V1_2::implementation::PreparedModelCallback>& callback) {
sp<V1_0::IPreparedModel> preparedModelV1_0 = callback->getPreparedModel();

View File

@@ -30,6 +30,7 @@
#include <vector>
#include "1.2/Callbacks.h"
#include "TestHarness.h"
namespace android {
namespace hardware {
@@ -68,24 +69,6 @@ class NeuralnetworksHidlTest : public ::testing::VtsHalHidlTargetTestBase {
sp<IDevice> device;
};
class ValidationTest : public NeuralnetworksHidlTest {
protected:
void validateEverything(const Model& model, const Request& request);
void validateFailure(const Model& model, const Request& request);
private:
void validateModel(const Model& model);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
void validateRequestFailure(const sp<IPreparedModel>& preparedModel, const Request& request);
void validateBurst(const sp<IPreparedModel>& preparedModel, const Request& request);
};
// Tag for the generated tests
class GeneratedTest : public NeuralnetworksHidlTest {};
// Tag for the dynamic output shape tests
class DynamicOutputShapeTest : public NeuralnetworksHidlTest {};
// Utility function to get PreparedModel from callback and downcast to V1_2.
sp<IPreparedModel> getPreparedModel_1_2(
const sp<V1_2::implementation::PreparedModelCallback>& callback);