Add quantization coupling test am: 9226c1e910

am: 33661d5403

Change-Id: Ic13562b9b8d8a3159ac51ddaf76cf3bab26b2a69
This commit is contained in:
Lev Proleev
2019-11-26 09:52:57 -08:00
committed by android-build-merger
5 changed files with 160 additions and 52 deletions

View File

@@ -456,8 +456,7 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
} }
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
} }
TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) { TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
@@ -519,8 +518,7 @@ TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
} }
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
} }
TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) { TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
@@ -541,8 +539,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -566,8 +563,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -590,8 +586,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -615,8 +610,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -727,8 +721,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -752,8 +745,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -776,8 +768,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -801,8 +792,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -914,8 +904,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -937,8 +926,7 @@ TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
saveModelToCache(model, modelCache, dataCache, &preparedModel); saveModelToCache(model, modelCache, dataCache, &preparedModel);
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
// Execute and verify results. // Execute and verify results.
EvaluatePreparedModel(preparedModel, testModel, EvaluatePreparedModel(preparedModel, testModel, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
// Check if prepareModelFromCache fails. // Check if prepareModelFromCache fails.
preparedModel = nullptr; preparedModel = nullptr;
ErrorStatus status; ErrorStatus status;
@@ -1082,8 +1070,7 @@ TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
ASSERT_EQ(preparedModel, nullptr); ASSERT_EQ(preparedModel, nullptr);
} else { } else {
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
EvaluatePreparedModel(preparedModel, testModelAdd, EvaluatePreparedModel(preparedModel, testModelAdd, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
} }
} }
} }
@@ -1144,8 +1131,7 @@ TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
ASSERT_EQ(preparedModel, nullptr); ASSERT_EQ(preparedModel, nullptr);
} else { } else {
ASSERT_NE(preparedModel, nullptr); ASSERT_NE(preparedModel, nullptr);
EvaluatePreparedModel(preparedModel, testModelAdd, EvaluatePreparedModel(preparedModel, testModelAdd, /*testKind=*/TestKind::GENERAL);
/*testDynamicOutputShape=*/false);
} }
} }
} }

View File

@@ -79,6 +79,21 @@ struct TestConfig {
Executor executor; Executor executor;
MeasureTiming measureTiming; MeasureTiming measureTiming;
OutputType outputType; OutputType outputType;
// `reportSkipping` indicates if a test should print an info message in case
// it is skipped. The field is set to true by default and is set to false in
// quantization coupling tests to suppress skipping a test
bool reportSkipping;
TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType)
: executor(executor),
measureTiming(measureTiming),
outputType(outputType),
reportSkipping(true) {}
TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType,
bool reportSkipping)
: executor(executor),
measureTiming(measureTiming),
outputType(outputType),
reportSkipping(reportSkipping) {}
}; };
} // namespace } // namespace
@@ -219,7 +234,10 @@ static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
} }
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel, void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
const TestConfig& testConfig) { const TestConfig& testConfig, bool* skipped = nullptr) {
if (skipped != nullptr) {
*skipped = false;
}
// If output0 does not have size larger than one byte, we can not test with insufficient buffer. // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
if (testConfig.outputType == OutputType::INSUFFICIENT && if (testConfig.outputType == OutputType::INSUFFICIENT &&
!isOutputSizeGreaterThanOne(testModel, 0)) { !isOutputSizeGreaterThanOne(testModel, 0)) {
@@ -290,6 +308,12 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
if (testConfig.outputType != OutputType::FULLY_SPECIFIED && if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
executionStatus == ErrorStatus::GENERAL_FAILURE) { executionStatus == ErrorStatus::GENERAL_FAILURE) {
if (skipped != nullptr) {
*skipped = true;
}
if (!testConfig.reportSkipping) {
return;
}
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot " LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"execute model that it does not support."; "execute model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot " std::cout << "[ ] Early termination of test because vendor service cannot "
@@ -343,44 +367,117 @@ void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestMo
} }
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel, void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
bool testDynamicOutputShape) { TestKind testKind) {
std::initializer_list<OutputType> outputTypesList; std::initializer_list<OutputType> outputTypesList;
std::initializer_list<MeasureTiming> measureTimingList; std::initializer_list<MeasureTiming> measureTimingList;
std::initializer_list<Executor> executorList; std::initializer_list<Executor> executorList;
if (testDynamicOutputShape) { switch (testKind) {
outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT}; case TestKind::GENERAL: {
measureTimingList = {MeasureTiming::NO, MeasureTiming::YES}; outputTypesList = {OutputType::FULLY_SPECIFIED};
executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST}; measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
} else { executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
outputTypesList = {OutputType::FULLY_SPECIFIED}; } break;
measureTimingList = {MeasureTiming::NO, MeasureTiming::YES}; case TestKind::DYNAMIC_SHAPE: {
executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST}; outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
} break;
case TestKind::QUANTIZATION_COUPLING: {
LOG(FATAL) << "Wrong TestKind for EvaluatePreparedModel";
return;
} break;
} }
for (const OutputType outputType : outputTypesList) { for (const OutputType outputType : outputTypesList) {
for (const MeasureTiming measureTiming : measureTimingList) { for (const MeasureTiming measureTiming : measureTimingList) {
for (const Executor executor : executorList) { for (const Executor executor : executorList) {
const TestConfig testConfig = {.executor = executor, const TestConfig testConfig(executor, measureTiming, outputType);
.measureTiming = measureTiming,
.outputType = outputType};
EvaluatePreparedModel(preparedModel, testModel, testConfig); EvaluatePreparedModel(preparedModel, testModel, testConfig);
} }
} }
} }
} }
void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) { void EvaluatePreparedCoupledModels(const sp<IPreparedModel>& preparedModel,
const TestModel& testModel,
const sp<IPreparedModel>& preparedCoupledModel,
const TestModel& coupledModel) {
std::initializer_list<OutputType> outputTypesList = {OutputType::FULLY_SPECIFIED};
std::initializer_list<MeasureTiming> measureTimingList = {MeasureTiming::NO,
MeasureTiming::YES};
std::initializer_list<Executor> executorList = {Executor::ASYNC, Executor::SYNC,
Executor::BURST};
for (const OutputType outputType : outputTypesList) {
for (const MeasureTiming measureTiming : measureTimingList) {
for (const Executor executor : executorList) {
const TestConfig testConfig(executor, measureTiming, outputType,
/*reportSkipping=*/false);
bool baseSkipped = false;
EvaluatePreparedModel(preparedModel, testModel, testConfig, &baseSkipped);
bool coupledSkipped = false;
EvaluatePreparedModel(preparedCoupledModel, coupledModel, testConfig,
&coupledSkipped);
ASSERT_EQ(baseSkipped, coupledSkipped);
if (baseSkipped) {
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"execute model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service "
"cannot "
"execute model that it does not support."
<< std::endl;
GTEST_SKIP();
}
}
}
}
}
void Execute(const sp<IDevice>& device, const TestModel& testModel, TestKind testKind) {
Model model = createModel(testModel); Model model = createModel(testModel);
if (testDynamicOutputShape) { if (testKind == TestKind::DYNAMIC_SHAPE) {
makeOutputDimensionsUnspecified(&model); makeOutputDimensionsUnspecified(&model);
} }
sp<IPreparedModel> preparedModel; sp<IPreparedModel> preparedModel;
createPreparedModel(device, model, &preparedModel); switch (testKind) {
if (preparedModel == nullptr) return; case TestKind::GENERAL: {
createPreparedModel(device, model, &preparedModel);
EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape); if (preparedModel == nullptr) return;
EvaluatePreparedModel(preparedModel, testModel, TestKind::GENERAL);
} break;
case TestKind::DYNAMIC_SHAPE: {
createPreparedModel(device, model, &preparedModel);
if (preparedModel == nullptr) return;
EvaluatePreparedModel(preparedModel, testModel, TestKind::DYNAMIC_SHAPE);
} break;
case TestKind::QUANTIZATION_COUPLING: {
ASSERT_TRUE(testModel.hasQuant8AsymmOperands());
createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ false);
TestModel signedQuantizedModel = convertQuant8AsymmOperandsToSigned(testModel);
sp<IPreparedModel> preparedCoupledModel;
createPreparedModel(device, createModel(signedQuantizedModel), &preparedCoupledModel,
/*reportSkipping*/ false);
// If we couldn't prepare a model with unsigned quantization, we must
// fail to prepare a model with signed quantization as well.
if (preparedModel == nullptr) {
ASSERT_EQ(preparedCoupledModel, nullptr);
// If we failed to prepare both of the models, we can safely skip
// the test.
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
"prepare model that it does not support.";
std::cout
<< "[ ] Early termination of test because vendor service cannot "
"prepare model that it does not support."
<< std::endl;
GTEST_SKIP();
}
ASSERT_NE(preparedCoupledModel, nullptr);
EvaluatePreparedCoupledModels(preparedModel, testModel, preparedCoupledModel,
signedQuantizedModel);
} break;
}
} }
void GeneratedTestBase::SetUp() { void GeneratedTestBase::SetUp() {
@@ -403,12 +500,19 @@ class GeneratedTest : public GeneratedTestBase {};
// Tag for the dynamic output shape tests // Tag for the dynamic output shape tests
class DynamicOutputShapeTest : public GeneratedTest {}; class DynamicOutputShapeTest : public GeneratedTest {};
// Tag for the dynamic output shape tests
class DISABLED_QuantizationCouplingTest : public GeneratedTest {};
TEST_P(GeneratedTest, Test) { TEST_P(GeneratedTest, Test) {
Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/false); Execute(kDevice, kTestModel, /*testKind=*/TestKind::GENERAL);
} }
TEST_P(DynamicOutputShapeTest, Test) { TEST_P(DynamicOutputShapeTest, Test) {
Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/true); Execute(kDevice, kTestModel, /*testKind=*/TestKind::DYNAMIC_SHAPE);
}
TEST_P(DISABLED_QuantizationCouplingTest, Test) {
Execute(kDevice, kTestModel, /*testKind=*/TestKind::QUANTIZATION_COUPLING);
} }
INSTANTIATE_GENERATED_TEST(GeneratedTest, INSTANTIATE_GENERATED_TEST(GeneratedTest,
@@ -417,4 +521,8 @@ INSTANTIATE_GENERATED_TEST(GeneratedTest,
INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest, INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
[](const TestModel& testModel) { return !testModel.expectFailure; }); [](const TestModel& testModel) { return !testModel.expectFailure; });
INSTANTIATE_GENERATED_TEST(DISABLED_QuantizationCouplingTest, [](const TestModel& testModel) {
return testModel.hasQuant8AsymmOperands() && testModel.operations.size() == 1;
});
} // namespace android::hardware::neuralnetworks::V1_3::vts::functional } // namespace android::hardware::neuralnetworks::V1_3::vts::functional

View File

@@ -57,8 +57,19 @@ Model createModel(const test_helper::TestModel& testModel);
void PrepareModel(const sp<IDevice>& device, const Model& model, sp<IPreparedModel>* preparedModel); void PrepareModel(const sp<IDevice>& device, const Model& model, sp<IPreparedModel>* preparedModel);
enum class TestKind {
// Runs a test model and compares the results to a golden data
GENERAL,
// Same as GENERAL but sets dimensions for the output tensors to zeros
DYNAMIC_SHAPE,
// Tests if quantized model with TENSOR_QUANT8_ASYMM produces the same result
// (OK/SKIPPED/FAILED) as the model with all such tensors converted to
// TENSOR_QUANT8_ASYMM_SIGNED.
QUANTIZATION_COUPLING
};
void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel,
const test_helper::TestModel& testModel, bool testDynamicOutputShape); const test_helper::TestModel& testModel, TestKind testKind);
} // namespace android::hardware::neuralnetworks::V1_3::vts::functional } // namespace android::hardware::neuralnetworks::V1_3::vts::functional

View File

@@ -37,7 +37,7 @@ using V1_1::ExecutionPreference;
// internal helper function // internal helper function
void createPreparedModel(const sp<IDevice>& device, const Model& model, void createPreparedModel(const sp<IDevice>& device, const Model& model,
sp<IPreparedModel>* preparedModel) { sp<IPreparedModel>* preparedModel, bool reportSkipping) {
ASSERT_NE(nullptr, preparedModel); ASSERT_NE(nullptr, preparedModel);
*preparedModel = nullptr; *preparedModel = nullptr;
@@ -74,6 +74,9 @@ void createPreparedModel(const sp<IDevice>& device, const Model& model,
// can continue. // can continue.
if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) { if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
ASSERT_EQ(nullptr, preparedModel->get()); ASSERT_EQ(nullptr, preparedModel->get());
if (!reportSkipping) {
return;
}
LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot prepare " LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot prepare "
"model that it does not support."; "model that it does not support.";
std::cout << "[ ] Early termination of test because vendor service cannot " std::cout << "[ ] Early termination of test because vendor service cannot "

View File

@@ -47,7 +47,7 @@ std::string printNeuralnetworksHidlTest(
// Create an IPreparedModel object. If the model cannot be prepared, // Create an IPreparedModel object. If the model cannot be prepared,
// "preparedModel" will be nullptr instead. // "preparedModel" will be nullptr instead.
void createPreparedModel(const sp<IDevice>& device, const Model& model, void createPreparedModel(const sp<IDevice>& device, const Model& model,
sp<IPreparedModel>* preparedModel); sp<IPreparedModel>* preparedModel, bool reportSkipping = true);
// Utility function to get PreparedModel from callback and downcast to V1_2. // Utility function to get PreparedModel from callback and downcast to V1_2.
sp<IPreparedModel> getPreparedModel_1_3(const sp<implementation::PreparedModelCallback>& callback); sp<IPreparedModel> getPreparedModel_1_3(const sp<implementation::PreparedModelCallback>& callback);