Merge changes from topic "aosp-nnapi-qos" am: 27cfaa82f8 am: 1090fe7c4f

Change-Id: Ia40a6a9ff8f21cd540b252f883f056d9e0931d45
This commit is contained in:
Automerger Merge Worker
2020-01-30 18:07:53 +00:00
7 changed files with 375 additions and 21 deletions

View File

@@ -40,6 +40,7 @@ cc_test {
"BasicTests.cpp",
"CompilationCachingTests.cpp",
"GeneratedTestHarness.cpp",
"QualityOfServiceTests.cpp",
"TestAssertions.cpp",
"ValidateBurst.cpp",
"ValidateModel.cpp",

View File

@@ -45,6 +45,7 @@
#include "1.0/Utils.h"
#include "1.3/Callbacks.h"
#include "1.3/Utils.h"
#include "ExecutionBurstController.h"
#include "MemoryUtils.h"
#include "TestHarness.h"
@@ -714,7 +715,8 @@ void Execute(const sp<IDevice>& device, const TestModel& testModel, TestKind tes
} break;
case TestKind::QUANTIZATION_COUPLING: {
ASSERT_TRUE(testModel.hasQuant8CoupledOperands());
createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ false);
createPreparedModel(device, model, &preparedModel,
/*reportSkipping*/ false);
TestModel signedQuantizedModel = convertQuant8AsymmOperandsToSigned(testModel);
sp<IPreparedModel> preparedCoupledModel;
createPreparedModel(device, createModel(signedQuantizedModel), &preparedCoupledModel,
@@ -743,6 +745,12 @@ void Execute(const sp<IDevice>& device, const TestModel& testModel, TestKind tes
void GeneratedTestBase::SetUp() {
testing::TestWithParam<GeneratedTestParam>::SetUp();
ASSERT_NE(kDevice, nullptr);
const Return<void> ret =
kDevice->supportsDeadlines([this](bool prepareModelDeadline, bool executionDeadline) {
mSupportsDeadlines = {prepareModelDeadline, executionDeadline};
});
ASSERT_TRUE(ret.isOk());
}
std::vector<NamedModel> getNamedModels(const FilterFn& filter) {

View File

@@ -36,6 +36,7 @@ class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
void SetUp() override;
const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
std::pair<bool, bool> mSupportsDeadlines;
};
using FilterFn = std::function<bool(const test_helper::TestModel&)>;

View File

@@ -0,0 +1,299 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "1.0/Utils.h"
#include "1.3/Callbacks.h"
#include "1.3/Utils.h"
#include "GeneratedTestHarness.h"
#include "Utils.h"
namespace android::hardware::neuralnetworks::V1_3::vts::functional {
using implementation::ExecutionCallback;
using implementation::PreparedModelCallback;
using test_helper::TestBuffer;
using test_helper::TestModel;
using V1_1::ExecutionPreference;
using V1_2::MeasureTiming;
using V1_2::OutputShape;
using V1_2::Timing;
using HidlToken =
hidl_array<uint8_t, static_cast<uint32_t>(V1_2::Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
enum class DeadlineBoundType { NOW, UNLIMITED };
constexpr std::array<DeadlineBoundType, 2> deadlineBounds = {DeadlineBoundType::NOW,
DeadlineBoundType::UNLIMITED};
std::string toString(DeadlineBoundType type) {
switch (type) {
case DeadlineBoundType::NOW:
return "NOW";
case DeadlineBoundType::UNLIMITED:
return "UNLIMITED";
}
LOG(FATAL) << "Unrecognized DeadlineBoundType: " << static_cast<int>(type);
return {};
}
using Results = std::tuple<ErrorStatus, hidl_vec<OutputShape>, Timing>;
using MaybeResults = std::optional<Results>;
using ExecutionFunction =
std::function<MaybeResults(const sp<IPreparedModel>& preparedModel, const Request& request,
DeadlineBoundType deadlineBound)>;
static OptionalTimePoint makeOptionalTimePoint(DeadlineBoundType deadlineBoundType) {
OptionalTimePoint deadline;
switch (deadlineBoundType) {
case DeadlineBoundType::NOW: {
const auto currentTime = std::chrono::steady_clock::now();
const auto currentTimeInNanoseconds =
std::chrono::time_point_cast<std::chrono::nanoseconds>(currentTime);
const uint64_t nanosecondsSinceEpoch =
currentTimeInNanoseconds.time_since_epoch().count();
deadline.nanoseconds(nanosecondsSinceEpoch);
} break;
case DeadlineBoundType::UNLIMITED: {
uint64_t unlimited = std::numeric_limits<uint64_t>::max();
deadline.nanoseconds(unlimited);
} break;
}
return deadline;
}
void runPrepareModelTest(const sp<IDevice>& device, const Model& model, Priority priority,
std::optional<DeadlineBoundType> deadlineBound) {
OptionalTimePoint deadline;
if (deadlineBound.has_value()) {
deadline = makeOptionalTimePoint(deadlineBound.value());
}
// see if service can handle model
bool fullySupportsModel = false;
const Return<void> supportedCall = device->getSupportedOperations_1_3(
model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
ASSERT_EQ(ErrorStatus::NONE, status);
ASSERT_NE(0ul, supported.size());
fullySupportsModel = std::all_of(supported.begin(), supported.end(),
[](bool valid) { return valid; });
});
ASSERT_TRUE(supportedCall.isOk());
// launch prepare model
const sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
const Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_3(
model, ExecutionPreference::FAST_SINGLE_ANSWER, priority, deadline,
hidl_vec<hidl_handle>(), hidl_vec<hidl_handle>(), HidlToken(), preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
// retrieve prepared model
preparedModelCallback->wait();
const ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
const sp<V1_0::IPreparedModel> preparedModelV1_0 = preparedModelCallback->getPreparedModel();
const sp<IPreparedModel> preparedModel =
IPreparedModel::castFrom(preparedModelV1_0).withDefault(nullptr);
// The getSupportedOperations_1_3 call returns a list of operations that are
// guaranteed not to fail if prepareModel_1_3 is called, and
// 'fullySupportsModel' is true i.f.f. the entire model is guaranteed.
// If a driver has any doubt that it can prepare an operation, it must
// return false. So here, if a driver isn't sure if it can support an
// operation, but reports that it successfully prepared the model, the test
// can continue.
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel.get());
return;
}
// verify return status
if (!deadlineBound.has_value()) {
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
} else {
switch (deadlineBound.value()) {
case DeadlineBoundType::NOW:
// If the execution was launched with a deadline of NOW, the
// deadline has already passed when the driver would launch the
// execution. In this case, the driver must return
// MISSED_DEADLINE_*.
EXPECT_TRUE(prepareReturnStatus == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
prepareReturnStatus == ErrorStatus::MISSED_DEADLINE_PERSISTENT);
break;
case DeadlineBoundType::UNLIMITED:
// If an unlimited deadline is supplied, we expect the execution to
// proceed normally. In this case, check it normally by breaking out
// of the switch statement.
EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
break;
}
}
ASSERT_EQ(prepareReturnStatus == ErrorStatus::NONE, preparedModel.get() != nullptr);
}
void runPrepareModelTests(const sp<IDevice>& device, const Model& model,
bool supportsPrepareModelDeadline) {
// test priority
for (auto priority : hidl_enum_range<Priority>{}) {
SCOPED_TRACE("priority: " + toString(priority));
if (priority == kDefaultPriority) continue;
runPrepareModelTest(device, model, priority, {});
}
// test deadline
if (supportsPrepareModelDeadline) {
for (auto deadlineBound : deadlineBounds) {
SCOPED_TRACE("deadlineBound: " + toString(deadlineBound));
runPrepareModelTest(device, model, kDefaultPriority, deadlineBound);
}
}
}
static MaybeResults executeAsynchronously(const sp<IPreparedModel>& preparedModel,
const Request& request, DeadlineBoundType deadlineBound) {
SCOPED_TRACE("asynchronous");
const MeasureTiming measure = MeasureTiming::NO;
const OptionalTimePoint deadline = makeOptionalTimePoint(deadlineBound);
// launch execution
const sp<ExecutionCallback> callback = new ExecutionCallback();
Return<ErrorStatus> ret = preparedModel->execute_1_3(request, measure, deadline, callback);
EXPECT_TRUE(ret.isOk());
EXPECT_EQ(ErrorStatus::NONE, ret.withDefault(ErrorStatus::GENERAL_FAILURE));
if (!ret.isOk() || ret != ErrorStatus::NONE) return std::nullopt;
// retrieve execution results
callback->wait();
const ErrorStatus status = callback->getStatus();
hidl_vec<OutputShape> outputShapes = callback->getOutputShapes();
const Timing timing = callback->getTiming();
// return results
return Results{status, std::move(outputShapes), timing};
}
static MaybeResults executeSynchronously(const sp<IPreparedModel>& preparedModel,
const Request& request, DeadlineBoundType deadlineBound) {
SCOPED_TRACE("synchronous");
const MeasureTiming measure = MeasureTiming::NO;
const OptionalTimePoint deadline = makeOptionalTimePoint(deadlineBound);
// configure results callback
MaybeResults results;
const auto cb = [&results](const auto&... args) { *results = {args...}; };
// run execution
const Return<void> ret =
preparedModel->executeSynchronously_1_3(request, measure, deadline, cb);
EXPECT_TRUE(ret.isOk());
if (!ret.isOk()) return std::nullopt;
// return results
return results;
}
void runExecutionTest(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
const Request& request, bool synchronous, DeadlineBoundType deadlineBound) {
const ExecutionFunction execute = synchronous ? executeSynchronously : executeAsynchronously;
// Perform execution and unpack results.
const auto results = execute(preparedModel, request, deadlineBound);
if (!results.has_value()) return;
const auto& [status, outputShapes, timing] = results.value();
// Verify no timing information was returned
EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
// Validate deadline information if applicable.
switch (deadlineBound) {
case DeadlineBoundType::NOW:
// If the execution was launched with a deadline of NOW, the
// deadline has already passed when the driver would launch the
// execution. In this case, the driver must return
// MISSED_DEADLINE_*.
ASSERT_TRUE(status == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
status == ErrorStatus::MISSED_DEADLINE_PERSISTENT);
return;
case DeadlineBoundType::UNLIMITED:
// If an unlimited deadline is supplied, we expect the execution to
// proceed normally. In this case, check it normally by breaking out
// of the switch statement.
ASSERT_EQ(ErrorStatus::NONE, status);
break;
}
// If the model output operands are fully specified, outputShapes must be either
// either empty, or have the same number of elements as the number of outputs.
ASSERT_TRUE(outputShapes.size() == 0 || outputShapes.size() == testModel.outputIndexes.size());
// Go through all outputs, check returned output shapes.
for (uint32_t i = 0; i < outputShapes.size(); i++) {
EXPECT_TRUE(outputShapes[i].isSufficient);
const auto& expect = testModel.operands[testModel.outputIndexes[i]].dimensions;
const std::vector<uint32_t> actual = outputShapes[i].dimensions;
EXPECT_EQ(expect, actual);
}
// Retrieve execution results.
ASSERT_TRUE(nn::compliantWithV1_0(request));
const V1_0::Request request10 = nn::convertToV1_0(request);
const std::vector<TestBuffer> outputs = getOutputBuffers(request10);
// We want "close-enough" results.
checkResults(testModel, outputs);
}
void runExecutionTests(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
const Request& request) {
for (bool synchronous : {false, true}) {
for (auto deadlineBound : deadlineBounds) {
runExecutionTest(preparedModel, testModel, request, synchronous, deadlineBound);
}
}
}
void runTests(const sp<IDevice>& device, const TestModel& testModel,
std::pair<bool, bool> supportsDeadlines) {
// setup
const auto [supportsPrepareModelDeadline, supportsExecutionDeadline] = supportsDeadlines;
if (!supportsPrepareModelDeadline && !supportsExecutionDeadline) return;
const Model model = createModel(testModel);
// run prepare model tests
runPrepareModelTests(device, model, supportsPrepareModelDeadline);
if (supportsExecutionDeadline) {
// prepare model
sp<IPreparedModel> preparedModel;
createPreparedModel(device, model, &preparedModel);
if (preparedModel == nullptr) return;
// run execution tests
const Request request = nn::convertToV1_3(createRequest(testModel));
runExecutionTests(preparedModel, testModel, request);
}
}
class DeadlineTest : public GeneratedTestBase {};
TEST_P(DeadlineTest, Test) {
runTests(kDevice, kTestModel, mSupportsDeadlines);
}
INSTANTIATE_GENERATED_TEST(DeadlineTest,
[](const TestModel& testModel) { return !testModel.expectFailure; });
} // namespace android::hardware::neuralnetworks::V1_3::vts::functional

View File

@@ -44,12 +44,18 @@ static void validateGetSupportedOperations(const sp<IDevice>& device, const std:
}
static void validatePrepareModel(const sp<IDevice>& device, const std::string& message,
const Model& model, ExecutionPreference preference) {
const Model& model, ExecutionPreference preference,
bool testDeadline) {
SCOPED_TRACE(message + " [prepareModel_1_3]");
OptionalTimePoint deadline;
if (testDeadline) {
deadline.nanoseconds(std::numeric_limits<uint64_t>::max());
}
sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_3(
model, preference, kDefaultPriority, {}, hidl_vec<hidl_handle>(),
model, preference, kDefaultPriority, deadline, hidl_vec<hidl_handle>(),
hidl_vec<hidl_handle>(), HidlToken(), preparedModelCallback);
ASSERT_TRUE(prepareLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(prepareLaunchStatus));
@@ -73,12 +79,13 @@ static bool validExecutionPreference(ExecutionPreference preference) {
// to the model does not leave this function.
static void validate(const sp<IDevice>& device, const std::string& message, Model model,
const std::function<void(Model*)>& mutation,
ExecutionPreference preference = ExecutionPreference::FAST_SINGLE_ANSWER) {
ExecutionPreference preference = ExecutionPreference::FAST_SINGLE_ANSWER,
bool testDeadline = false) {
mutation(&model);
if (validExecutionPreference(preference)) {
if (validExecutionPreference(preference) && !testDeadline) {
validateGetSupportedOperations(device, message, model);
}
validatePrepareModel(device, message, model, preference);
validatePrepareModel(device, message, model, preference, testDeadline);
}
static uint32_t addOperand(Model* model) {
@@ -714,9 +721,19 @@ static void mutateExecutionPreferenceTest(const sp<IDevice>& device, const Model
}
}
///////////////////////// DEADLINE /////////////////////////
static void deadlineTest(const sp<IDevice>& device, const Model& model) {
const std::string message = "deadlineTest: deadline not supported";
const auto noop = [](Model*) {};
validate(device, message, model, noop, ExecutionPreference::FAST_SINGLE_ANSWER,
/*testDeadline=*/true);
}
////////////////////////// ENTRY POINT //////////////////////////////
void validateModel(const sp<IDevice>& device, const Model& model) {
void validateModel(const sp<IDevice>& device, const Model& model,
bool prepareModelDeadlineSupported) {
mutateOperandTypeTest(device, model);
mutateOperandRankTest(device, model);
mutateOperandScaleTest(device, model);
@@ -732,6 +749,9 @@ void validateModel(const sp<IDevice>& device, const Model& model) {
addOperationInputTest(device, model);
addOperationOutputTest(device, model);
mutateExecutionPreferenceTest(device, model);
if (!prepareModelDeadlineSupported) {
deadlineTest(device, model);
}
}
} // namespace android::hardware::neuralnetworks::V1_3::vts::functional

View File

@@ -43,7 +43,8 @@ static bool badTiming(Timing timing) {
// that use the request. Note that the request here is passed by value, and any
// mutation to the request does not leave this function.
static void validate(const sp<IPreparedModel>& preparedModel, const std::string& message,
Request request, const std::function<void(Request*)>& mutation) {
Request request, const std::function<void(Request*)>& mutation,
bool testDeadline = false) {
mutation(&request);
// We'd like to test both with timing requested and without timing
@@ -56,13 +57,18 @@ static void validate(const sp<IPreparedModel>& preparedModel, const std::string&
};
MeasureTiming measure = (hash & 1) ? MeasureTiming::YES : MeasureTiming::NO;
OptionalTimePoint deadline;
if (testDeadline) {
deadline.nanoseconds(std::numeric_limits<uint64_t>::max());
}
// asynchronous
{
SCOPED_TRACE(message + " [execute_1_3]");
sp<ExecutionCallback> executionCallback = new ExecutionCallback();
Return<ErrorStatus> executeLaunchStatus =
preparedModel->execute_1_3(request, measure, {}, executionCallback);
preparedModel->execute_1_3(request, measure, deadline, executionCallback);
ASSERT_TRUE(executeLaunchStatus.isOk());
ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(executeLaunchStatus));
@@ -80,7 +86,7 @@ static void validate(const sp<IPreparedModel>& preparedModel, const std::string&
SCOPED_TRACE(message + " [executeSynchronously_1_3]");
Return<void> executeStatus = preparedModel->executeSynchronously_1_3(
request, measure, {},
request, measure, deadline,
[](ErrorStatus error, const hidl_vec<OutputShape>& outputShapes,
const Timing& timing) {
ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, error);
@@ -92,7 +98,7 @@ static void validate(const sp<IPreparedModel>& preparedModel, const std::string&
// burst
// TODO(butlermichael): Check if we need to test burst in V1_3 if the interface remains V1_2.
{
if (!testDeadline) {
SCOPED_TRACE(message + " [burst]");
ASSERT_TRUE(nn::compliantWithV1_0(request));
@@ -152,11 +158,23 @@ static void removeOutputTest(const sp<IPreparedModel>& preparedModel, const Requ
}
}
///////////////////////// DEADLINE ////////////////////////////////////
static void deadlineTest(const sp<IPreparedModel>& preparedModel, const Request& request) {
const std::string message = "deadlineTest: deadline not supported";
const auto noop = [](Request*) {};
validate(preparedModel, message, request, noop, /*testDeadline=*/true);
}
///////////////////////////// ENTRY POINT //////////////////////////////////
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request) {
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request,
bool executionDeadlineSupported) {
removeInputTest(preparedModel, request);
removeOutputTest(preparedModel, request);
if (!executionDeadlineSupported) {
deadlineTest(preparedModel, request);
}
}
void validateRequestFailure(const sp<IPreparedModel>& preparedModel, const Request& request) {

View File

@@ -84,6 +84,7 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
<< std::endl;
GTEST_SKIP();
}
ASSERT_EQ(ErrorStatus::NONE, prepareReturnStatus);
ASSERT_NE(nullptr, preparedModel->get());
}
@@ -122,23 +123,27 @@ std::string printNeuralnetworksHidlTest(
INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
// Forward declaration from ValidateModel.cpp
void validateModel(const sp<IDevice>& device, const Model& model);
void validateModel(const sp<IDevice>& device, const Model& model,
bool prepareModelDeadlineSupported);
// Forward declaration from ValidateRequest.cpp
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request);
void validateRequest(const sp<IPreparedModel>& preparedModel, const Request& request,
bool executionDeadlineSupported);
// Forward declaration from ValidateRequest.cpp
void validateRequestFailure(const sp<IPreparedModel>& preparedModel, const Request& request);
// Forward declaration from ValidateBurst.cpp
void validateBurst(const sp<IPreparedModel>& preparedModel, const V1_0::Request& request);
void validateEverything(const sp<IDevice>& device, const Model& model, const Request& request) {
validateModel(device, model);
void validateEverything(const sp<IDevice>& device, const Model& model, const Request& request,
std::pair<bool, bool> supportsDeadlines) {
const auto [prepareModelDeadlineSupported, executionDeadlineSupported] = supportsDeadlines;
validateModel(device, model, prepareModelDeadlineSupported);
// Create IPreparedModel.
sp<IPreparedModel> preparedModel;
createPreparedModel(device, model, &preparedModel);
if (preparedModel == nullptr) return;
validateRequest(preparedModel, request);
validateRequest(preparedModel, request, executionDeadlineSupported);
// TODO(butlermichael): Check if we need to test burst in V1_3 if the interface remains V1_2.
ASSERT_TRUE(nn::compliantWithV1_0(request));
@@ -146,10 +151,12 @@ void validateEverything(const sp<IDevice>& device, const Model& model, const Req
validateBurst(preparedModel, request10);
}
void validateFailure(const sp<IDevice>& device, const Model& model, const Request& request) {
void validateFailure(const sp<IDevice>& device, const Model& model, const Request& request,
std::pair<bool, bool> supportsDeadlines) {
const bool prepareModelDeadlineSupported = supportsDeadlines.first;
// TODO: Should this always succeed?
// What if the invalid input is part of the model (i.e., a parameter).
validateModel(device, model);
validateModel(device, model, prepareModelDeadlineSupported);
// Create IPreparedModel.
sp<IPreparedModel> preparedModel;
@@ -163,9 +170,9 @@ TEST_P(ValidationTest, Test) {
const Model model = createModel(kTestModel);
const Request request = nn::convertToV1_3(createRequest(kTestModel));
if (kTestModel.expectFailure) {
validateFailure(kDevice, model, request);
validateFailure(kDevice, model, request, mSupportsDeadlines);
} else {
validateEverything(kDevice, model, request);
validateEverything(kDevice, model, request, mSupportsDeadlines);
}
}