diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/FencedExecutionResult.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/FencedExecutionResult.aidl new file mode 100644 index 0000000000..7952b34632 --- /dev/null +++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/FencedExecutionResult.aidl @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/////////////////////////////////////////////////////////////////////////////// +// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. // +/////////////////////////////////////////////////////////////////////////////// + +// This file is a snapshot of an AIDL file. Do not edit it manually. There are +// two cases: +// 1). this is a frozen version file - do not edit this in any case. +// 2). this is a 'current' file. If you make a backwards compatible change to +// the interface (from the latest frozen version), the build system will +// prompt you to update this file with `m -update-api`. +// +// You must not make a backward incompatible change to any AIDL file built +// with the aidl_interface module type with versions property set. The module +// type is used to build AIDL files in a way that they can be used across +// independently updatable components of the system. If a device is shipped +// with such a backward incompatible change, it has a high risk of breaking +// later when a module using the interface is updated, e.g., Mainline modules. + +package android.hardware.neuralnetworks; +@VintfStability +parcelable FencedExecutionResult { + android.hardware.neuralnetworks.IFencedExecutionCallback callback; + @nullable ParcelFileDescriptor syncFence; +} diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl index 3ca155009a..1f7cbe0e4b 100644 --- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl +++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl @@ -34,7 +34,7 @@ package android.hardware.neuralnetworks; @VintfStability interface IPreparedModel { android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in boolean measureTiming, in long deadline, in long loopTimeoutDuration); - android.hardware.neuralnetworks.IFencedExecutionCallback executeFenced(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadline, in long loopTimeoutDuration, in long duration, out @nullable ParcelFileDescriptor syncFence); + android.hardware.neuralnetworks.FencedExecutionResult executeFenced(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadline, in long loopTimeoutDuration, in long duration); const long DEFAULT_LOOP_TIMEOUT_DURATION_NS = 2000000000; const long MAXIMUM_LOOP_TIMEOUT_DURATION_NS = 15000000000; } diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/FencedExecutionResult.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/FencedExecutionResult.aidl new file mode 100644 index 0000000000..ba3be31f37 --- /dev/null +++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/FencedExecutionResult.aidl @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.neuralnetworks; + +import android.hardware.neuralnetworks.IFencedExecutionCallback; + +/** + * A result from running an asynchronous execution of a prepared model. + */ +@VintfStability +parcelable FencedExecutionResult { + /** + * IFencedExecutionCallback can be used to query information like duration and error + * status when the execution is completed. + */ + IFencedExecutionCallback callback; + /** + * The sync fence that will be signaled when the task is completed. The + * sync fence will be set to error if a critical error, e.g. hardware + * failure or kernel panic, occurs when doing execution. + */ + @nullable ParcelFileDescriptor syncFence; +} diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl index 2414a4a1b6..0240e3c0df 100644 --- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl +++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl @@ -19,7 +19,7 @@ package android.hardware.neuralnetworks; import android.hardware.common.NativeHandle; import android.hardware.neuralnetworks.ErrorStatus; import android.hardware.neuralnetworks.ExecutionResult; -import android.hardware.neuralnetworks.IFencedExecutionCallback; +import android.hardware.neuralnetworks.FencedExecutionResult; import android.hardware.neuralnetworks.Request; /** @@ -152,11 +152,8 @@ interface IPreparedModel { * complete after all sync fences in waitFor are signaled. If the execution * cannot be finished within the duration, the execution may be aborted. Passing * -1 means the duration is omitted. Other negative values are invalid. - * @param out syncFence The sync fence that will be signaled when the task is completed. The - * sync fence will be set to error if a critical error, e.g. hardware - * failure or kernel panic, occurs when doing execution. - * @return The IFencedExecutionCallback can be used to query information like duration and error - * status when the execution is completed. + * @return The FencedExecutionResult parcelable, containing IFencedExecutionCallback and the + * sync fence. * @throws ServiceSpecificException with one of the following ErrorStatus values: * - DEVICE_UNAVAILABLE if driver is offline or busy * - GENERAL_FAILURE if there is an unspecified error @@ -166,7 +163,7 @@ interface IPreparedModel { * deadline * - RESOURCE_EXHAUSTED_* if the task was aborted by the driver */ - IFencedExecutionCallback executeFenced(in Request request, in ParcelFileDescriptor[] waitFor, + FencedExecutionResult executeFenced(in Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadline, in long loopTimeoutDuration, - in long duration, out @nullable ParcelFileDescriptor syncFence); + in long duration); } diff --git a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp index 4beb828253..4eb704b6e0 100644 --- a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp +++ b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp @@ -571,33 +571,32 @@ void EvaluatePreparedModel(const std::shared_ptr& device, case Executor::FENCED: { SCOPED_TRACE("fenced"); ErrorStatus result = ErrorStatus::NONE; - ndk::ScopedFileDescriptor syncFenceFd; - std::shared_ptr fencedCallback; + FencedExecutionResult executionResult; auto ret = preparedModel->executeFenced(request, {}, testConfig.measureTiming, kNoDeadline, loopTimeoutDuration, kNoDuration, - &syncFenceFd, &fencedCallback); + &executionResult); ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC) << ret.getDescription(); if (!ret.isOk()) { result = static_cast(ret.getServiceSpecificError()); executionStatus = result; - } else if (syncFenceFd.get() != -1) { + } else if (executionResult.syncFence.get() != -1) { std::vector waitFor; - auto dupFd = dup(syncFenceFd.get()); + auto dupFd = dup(executionResult.syncFence.get()); ASSERT_NE(dupFd, -1); waitFor.emplace_back(dupFd); // If a sync fence is returned, try start another run waiting for the sync fence. ret = preparedModel->executeFenced(request, waitFor, testConfig.measureTiming, kNoDeadline, loopTimeoutDuration, kNoDuration, - &syncFenceFd, &fencedCallback); + &executionResult); ASSERT_TRUE(ret.isOk()); - waitForSyncFence(syncFenceFd.get()); + waitForSyncFence(executionResult.syncFence.get()); } if (result == ErrorStatus::NONE) { - ASSERT_NE(fencedCallback, nullptr); + ASSERT_NE(executionResult.callback, nullptr); Timing timingFenced; - auto ret = - fencedCallback->getExecutionInfo(&timing, &timingFenced, &executionStatus); + auto ret = executionResult.callback->getExecutionInfo(&timing, &timingFenced, + &executionStatus); ASSERT_TRUE(ret.isOk()); } break; diff --git a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp index a37a0caa29..1929750d28 100644 --- a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp +++ b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp @@ -198,8 +198,8 @@ class InvalidPreparedModel : public BnPreparedModel { static_cast(ErrorStatus::GENERAL_FAILURE)); } ndk::ScopedAStatus executeFenced(const Request&, const std::vector&, - bool, int64_t, int64_t, int64_t, ndk::ScopedFileDescriptor*, - std::shared_ptr*) override { + bool, int64_t, int64_t, int64_t, + FencedExecutionResult*) override { return ndk::ScopedAStatus::fromServiceSpecificError( static_cast(ErrorStatus::GENERAL_FAILURE)); } @@ -893,25 +893,24 @@ class MemoryDomainExecutionTest ErrorStatus executeFenced(const std::shared_ptr& preparedModel, const Request& request) { - ndk::ScopedFileDescriptor syncFence; - std::shared_ptr fencedCallback; + FencedExecutionResult executionResult; const auto ret = preparedModel->executeFenced(request, {}, false, kNoDeadline, kOmittedTimeoutDuration, kNoDuration, - &syncFence, &fencedCallback); + &executionResult); if (!ret.isOk()) { EXPECT_EQ(ret.getExceptionCode(), EX_SERVICE_SPECIFIC); return static_cast(ret.getServiceSpecificError()); } - if (syncFence.get() != -1) { - waitForSyncFence(syncFence.get()); + if (executionResult.syncFence.get() != -1) { + waitForSyncFence(executionResult.syncFence.get()); } - EXPECT_NE(fencedCallback, nullptr); + EXPECT_NE(executionResult.callback, nullptr); ErrorStatus executionStatus = ErrorStatus::GENERAL_FAILURE; Timing time = kNoTiming; Timing timeFenced = kNoTiming; const auto retExecutionInfo = - fencedCallback->getExecutionInfo(&time, &timeFenced, &executionStatus); + executionResult.callback->getExecutionInfo(&time, &timeFenced, &executionStatus); EXPECT_TRUE(retExecutionInfo.isOk()); EXPECT_EQ(time, kNoTiming); return executionStatus; diff --git a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp index db8f429f13..3be4c1b97d 100644 --- a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp +++ b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp @@ -68,11 +68,10 @@ static void validate(const std::shared_ptr& preparedModel, // fenced { SCOPED_TRACE(message + " [executeFenced]"); - ndk::ScopedFileDescriptor syncFence; - std::shared_ptr callback; + FencedExecutionResult executionResult; const auto executeStatus = preparedModel->executeFenced(request, {}, false, kNoDeadline, kOmittedTimeoutDuration, - kNoDuration, &syncFence, &callback); + kNoDuration, &executionResult); ASSERT_FALSE(executeStatus.isOk()); ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC); ASSERT_EQ(static_cast(executeStatus.getServiceSpecificError()),