diff --git a/current.txt b/current.txt index d4b14ba9c8..5383877e3c 100644 --- a/current.txt +++ b/current.txt @@ -446,11 +446,11 @@ dd1ec219f5d2e2b33c6c0bcb92e63bbedb36f7c716413462848f6b6ae74fc864 android.hardwar 2b4a14661e6a38617b7dd0c6ebb66a56a90e564674ac7697a14cb8a0cab92b2f android.hardware.health.storage@1.0::types 4880af120fc1640225abdc2c60bda6d79617d73484d5124913c7278af3b11e2d android.hardware.neuralnetworks@1.2::IBurstCallback 19877e466ad8c6ed42b38050b77bd010cf7800ff365fdc8574f45bbfda03a758 android.hardware.neuralnetworks@1.2::IBurstContext -96249c852dabeefa3a9496ecdfc44681a071c665bfbf88527bf775c88bf1ab1b android.hardware.neuralnetworks@1.2::IDevice +dbe96a8ecf3c1f645585c27568464bc4db3c4b2d9a9624d88da606c59959afbe android.hardware.neuralnetworks@1.2::IDevice 92714960d1a53fc2ec557302b41c7cc93d2636d8364a44bd0f85be0c92927ff8 android.hardware.neuralnetworks@1.2::IExecutionCallback 83885d366f22ada42c00d8854f0b7e7ba4cf73ddf80bb0d8e168ce132cec57ea android.hardware.neuralnetworks@1.2::IPreparedModel e1c734d1545e1a4ae749ff1dd9704a8e594c59aea7c8363159dc258e93e0df3b android.hardware.neuralnetworks@1.2::IPreparedModelCallback -730c74ee5a3dd61a73f150cf07653e4b928e413b0f228eb004541bfcc22ed245 android.hardware.neuralnetworks@1.2::types +ba7e93fb136cabfde41ac1b035abd87a51f2c260cea89163984e4e9c69b55a5f android.hardware.neuralnetworks@1.2::types cf7a4ba516a638f9b82a249c91fb603042c2d9ca43fd5aad9cf6c0401ed2a5d7 android.hardware.nfc@1.2::INfc abf98c2ae08bf765db54edc8068e36d52eb558cff6706b6fd7c18c65a1f3fc18 android.hardware.nfc@1.2::types 4cb252dc6372a874aef666b92a6e9529915aa187521a700f0789065c3c702ead android.hardware.power.stats@1.0::IPowerStats diff --git a/neuralnetworks/1.2/IDevice.hal b/neuralnetworks/1.2/IDevice.hal index b9fa38870e..43e2c563cc 100644 --- a/neuralnetworks/1.2/IDevice.hal +++ b/neuralnetworks/1.2/IDevice.hal @@ -75,6 +75,17 @@ interface IDevice extends @1.1::IDevice { */ getType() generates (ErrorStatus status, DeviceType type); + /** + * Gets the capabilities of a driver. + * + * @return status Error status of the call, must be: + * - NONE if successful + * - DEVICE_UNAVAILABLE if driver is offline or busy + * - GENERAL_FAILURE if there is an unspecified error + * @return capabilities Capabilities of the driver. + */ + getCapabilities_1_2() generates (ErrorStatus status, Capabilities capabilities); + /** * Gets information about extensions supported by the driver implementation. * diff --git a/neuralnetworks/1.2/types.hal b/neuralnetworks/1.2/types.hal index 918048bd3f..77e1eb3c2a 100644 --- a/neuralnetworks/1.2/types.hal +++ b/neuralnetworks/1.2/types.hal @@ -4582,6 +4582,39 @@ enum DeviceType : int32_t { ACCELERATOR = 4, }; +/** + * The capabilities of a driver. + * + * Performance of an operation comes from the type of its first operand. + * This represents performance for non extension operand types. + */ +struct Capabilities { + /** + * Driver performance when operating on float32 data but performing + * calculations with range and/or precision as low as that of the IEEE + * 754 16-bit floating-point format. + */ + PerformanceInfo relaxedFloat32toFloat16PerformanceScalar; + PerformanceInfo relaxedFloat32toFloat16PerformanceTensor; + + /** + * Driver performance when operating on a particular data type. + * In the case of float32 data, this is used when the calculations + * are not relaxed. + */ + struct OperandPerformance { + OperandType type; + PerformanceInfo info; + }; + + /** + * Performance by operand type. Must be sorted by OperandType. + * If a particular OperandType is not present in operandPerformance, + * its performance is treated as { .execTime = FLT_MAX, .powerUsage = FLT_MAX }. + */ + vec operandPerformance; +}; + /** * Describes one operation of the model's graph. */ diff --git a/neuralnetworks/1.2/vts/functional/BasicTests.cpp b/neuralnetworks/1.2/vts/functional/BasicTests.cpp index 365a750fdb..61aa5716fe 100644 --- a/neuralnetworks/1.2/vts/functional/BasicTests.cpp +++ b/neuralnetworks/1.2/vts/functional/BasicTests.cpp @@ -25,7 +25,7 @@ namespace V1_2 { namespace vts { namespace functional { -using V1_1::Capabilities; +using V1_0::PerformanceInfo; // create device test TEST_F(NeuralnetworksHidlTest, CreateDevice) {} @@ -37,6 +37,31 @@ TEST_F(NeuralnetworksHidlTest, StatusTest) { EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast(status)); } +// initialization +TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { + using OperandPerformance = Capabilities::OperandPerformance; + Return ret = device->getCapabilities_1_2([](ErrorStatus status, + const Capabilities& capabilities) { + EXPECT_EQ(ErrorStatus::NONE, status); + + auto isPositive = [](const PerformanceInfo& perf) { + return perf.execTime > 0.0f && perf.powerUsage > 0.0f; + }; + + EXPECT_TRUE(isPositive(capabilities.relaxedFloat32toFloat16PerformanceScalar)); + EXPECT_TRUE(isPositive(capabilities.relaxedFloat32toFloat16PerformanceTensor)); + const auto& opPerf = capabilities.operandPerformance; + EXPECT_TRUE(std::all_of( + opPerf.begin(), opPerf.end(), + [isPositive](const OperandPerformance& a) { return isPositive(a.info); })); + EXPECT_TRUE(std::is_sorted(opPerf.begin(), opPerf.end(), + [](const OperandPerformance& a, const OperandPerformance& b) { + return a.type < b.type; + })); + }); + EXPECT_TRUE(ret.isOk()); +} + // device version test TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) { Return ret = device->getVersionString([](ErrorStatus status, const hidl_string& version) {