diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h new file mode 100644 index 0000000000..e66507aa17 --- /dev/null +++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include +#include +#include +#include + +#include +#include +#include + +namespace aidl::android::hardware::neuralnetworks { + +class InvalidDevice : public BnDevice { + public: + static std::shared_ptr create(); + + InvalidDevice(Capabilities capabilities, const NumberOfCacheFiles& numberOfCacheFiles, + std::vector extensions, DeviceType deviceType, + std::string versionString); + + ndk::ScopedAStatus allocate(const BufferDesc& desc, + const std::vector& preparedModels, + const std::vector& inputRoles, + const std::vector& outputRoles, + DeviceBuffer* deviceBuffer) override; + ndk::ScopedAStatus getCapabilities(Capabilities* capabilities) override; + ndk::ScopedAStatus getNumberOfCacheFilesNeeded(NumberOfCacheFiles* numberOfCacheFiles) override; + ndk::ScopedAStatus getSupportedExtensions(std::vector* extensions) override; + ndk::ScopedAStatus getSupportedOperations(const Model& model, + std::vector* supportedOperations) override; + ndk::ScopedAStatus getType(DeviceType* deviceType) override; + ndk::ScopedAStatus getVersionString(std::string* versionString) override; + ndk::ScopedAStatus prepareModel( + const Model& model, ExecutionPreference preference, Priority priority, int64_t deadline, + const std::vector& modelCache, + const std::vector& dataCache, + const std::vector& token, + const std::shared_ptr& callback) override; + ndk::ScopedAStatus prepareModelFromCache( + int64_t deadline, const std::vector& modelCache, + const std::vector& dataCache, + const std::vector& token, + const std::shared_ptr& callback) override; + + private: + const Capabilities kCapabilities; + const NumberOfCacheFiles kNumberOfCacheFiles; + const std::vector kExtensions; + const DeviceType kDeviceType; + const std::string kVersionString; +}; + +} // namespace aidl::android::hardware::neuralnetworks diff --git a/neuralnetworks/aidl/utils/src/InvalidDevice.cpp b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp new file mode 100644 index 0000000000..c9d995590f --- /dev/null +++ b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "InvalidDevice" + +#include "InvalidDevice.h" + +#include +#include +#include +#include + +#include "Conversions.h" +#include "Utils.h" + +#include +#include +#include +#include + +namespace aidl::android::hardware::neuralnetworks { +namespace { + +ndk::ScopedAStatus toAStatus(ErrorStatus errorStatus, const std::string& errorMessage) { + if (errorStatus == ErrorStatus::NONE) { + return ndk::ScopedAStatus::ok(); + } + return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage( + static_cast(errorStatus), errorMessage.c_str()); +} + +} // namespace + +std::shared_ptr InvalidDevice::create() { + constexpr auto perf = PerformanceInfo{ + .execTime = std::numeric_limits::max(), + .powerUsage = std::numeric_limits::max(), + }; + auto capabilities = Capabilities{ + .relaxedFloat32toFloat16PerformanceScalar = perf, + .relaxedFloat32toFloat16PerformanceTensor = perf, + .operandPerformance = {}, + .ifPerformance = perf, + .whilePerformance = perf, + }; + constexpr auto numberOfCacheFiles = NumberOfCacheFiles{ + .numModelCache = 0, + .numDataCache = 0, + }; + std::vector extensions{}; + constexpr auto deviceType = DeviceType::OTHER; + std::string versionString = "invalid"; + + return ndk::SharedRefBase::make(std::move(capabilities), numberOfCacheFiles, + std::move(extensions), deviceType, + std::move(versionString)); +} + +InvalidDevice::InvalidDevice(Capabilities capabilities, + const NumberOfCacheFiles& numberOfCacheFiles, + std::vector extensions, DeviceType deviceType, + std::string versionString) + : kCapabilities(std::move(capabilities)), + kNumberOfCacheFiles(numberOfCacheFiles), + kExtensions(std::move(extensions)), + kDeviceType(deviceType), + kVersionString(std::move(versionString)) {} + +ndk::ScopedAStatus InvalidDevice::allocate( + const BufferDesc& /*desc*/, const std::vector& /*preparedModels*/, + const std::vector& /*inputRoles*/, + const std::vector& /*outputRoles*/, DeviceBuffer* /*deviceBuffer*/) { + return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice"); +} + +ndk::ScopedAStatus InvalidDevice::getCapabilities(Capabilities* capabilities) { + *capabilities = kCapabilities; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::getNumberOfCacheFilesNeeded( + NumberOfCacheFiles* numberOfCacheFiles) { + *numberOfCacheFiles = kNumberOfCacheFiles; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::getSupportedExtensions(std::vector* extensions) { + *extensions = kExtensions; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::getSupportedOperations(const Model& model, + std::vector* supportedOperations) { + if (const auto result = utils::validate(model); !result.ok()) { + return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error()); + } + *supportedOperations = std::vector(model.main.operations.size(), false); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::getType(DeviceType* deviceType) { + *deviceType = kDeviceType; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::getVersionString(std::string* versionString) { + *versionString = kVersionString; + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::prepareModel( + const Model& model, ExecutionPreference preference, Priority priority, int64_t deadline, + const std::vector& modelCache, + const std::vector& dataCache, const std::vector& token, + const std::shared_ptr& callback) { + if (callback.get() == nullptr) { + return toAStatus(ErrorStatus::INVALID_ARGUMENT, + "invalid callback passed to InvalidDevice::prepareModel"); + } + if (const auto result = utils::validate(model); !result.ok()) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error()); + } + if (const auto result = utils::validate(preference); !result.ok()) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error()); + } + if (const auto result = utils::validate(priority); !result.ok()) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error()); + } + if (deadline < -1) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, + "Invalid deadline " + std::to_string(deadline)); + } + if (modelCache.size() != static_cast(kNumberOfCacheFiles.numModelCache)) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, + "Invalid modelCache, size = " + std::to_string(modelCache.size())); + } + if (dataCache.size() != static_cast(kNumberOfCacheFiles.numDataCache)) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus(ErrorStatus::INVALID_ARGUMENT, + "Invalid modelCache, size = " + std::to_string(dataCache.size())); + } + if (token.size() != IDevice::BYTE_SIZE_OF_CACHE_TOKEN) { + callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr); + return toAStatus( + ErrorStatus::INVALID_ARGUMENT, + "Invalid cache token, size = " + std::to_string(IDevice::BYTE_SIZE_OF_CACHE_TOKEN)); + } + callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr); + return ndk::ScopedAStatus::ok(); +} + +ndk::ScopedAStatus InvalidDevice::prepareModelFromCache( + int64_t /*deadline*/, const std::vector& /*modelCache*/, + const std::vector& /*dataCache*/, + const std::vector& /*token*/, + const std::shared_ptr& callback) { + callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr); + return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice"); +} + +} // namespace aidl::android::hardware::neuralnetworks diff --git a/neuralnetworks/aidl/utils/src/Service.cpp b/neuralnetworks/aidl/utils/src/Service.cpp index 511de559a6..ac182a205e 100644 --- a/neuralnetworks/aidl/utils/src/Service.cpp +++ b/neuralnetworks/aidl/utils/src/Service.cpp @@ -16,6 +16,7 @@ #include "Service.h" +#include #include #include #include @@ -35,13 +36,23 @@ nn::GeneralResult getDevice(const std::string& instanceName) { hal::utils::ResilientDevice::Factory makeDevice = [instanceName, name = std::move(fullName)](bool blocking) -> nn::GeneralResult { - const auto& getService = - blocking ? AServiceManager_getService : AServiceManager_checkService; + std::add_pointer_t getService; + if (blocking) { + if (__builtin_available(android __NNAPI_AIDL_MIN_ANDROID_API__, *)) { + getService = AServiceManager_waitForService; + } else { + getService = AServiceManager_getService; + } + } else { + getService = AServiceManager_checkService; + } + auto service = IDevice::fromBinder(ndk::SpAIBinder(getService(name.c_str()))); if (service == nullptr) { - return NN_ERROR() << (blocking ? "AServiceManager_getService" - : "AServiceManager_checkService") - << " returned nullptr"; + return NN_ERROR() + << (blocking ? "AServiceManager_waitForService (or AServiceManager_getService)" + : "AServiceManager_checkService") + << " returned nullptr"; } ABinderProcess_startThreadPool(); return Device::create(instanceName, std::move(service)); diff --git a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp index 0c3a19674e..ee7cf89d4f 100644 --- a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp +++ b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp @@ -94,7 +94,7 @@ void NeuralNetworksAidlTest::SetUp() { } static NamedDevice makeNamedDevice(const std::string& name) { - ndk::SpAIBinder binder(AServiceManager_getService(name.c_str())); + ndk::SpAIBinder binder(AServiceManager_waitForService(name.c_str())); return {name, IDevice::fromBinder(binder)}; }