aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTreeHugger Robot <treehugger-gerrit@google.com>2021-05-20 19:29:41 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2021-05-20 19:29:41 +0000
commitb30ea668adc0eba4c1f9530e9c23444eb250db28 (patch)
treea5263225fce8b7d828c13195eb549bcce523b3b8
parent4ba8529987d69afa524c86e74d80898748072103 (diff)
parent990b5cf0fa562448f0590cf2650aa84cdc63f151 (diff)
downloadandroid-nn-driver-b30ea668adc0eba4c1f9530e9c23444eb250db28.tar.gz
Merge "Revert "IVGCVSW-5090 Add android-nn-driver Support for Logical operators"" into sc-dev am: 990b5cf0faplatform-tools-33.0.1platform-tools-33.0.0platform-tools-32.0.0
Original change: https://googleplex-android-review.googlesource.com/c/platform/external/android-nn-driver/+/14647169 Change-Id: I3c20d8d572979616d5568b7593b989f2a8784c37
-rw-r--r--1.3/HalPolicy.cpp15
-rw-r--r--1.3/HalPolicy.hpp5
-rw-r--r--ConversionUtils_1_3.hpp73
-rw-r--r--NnapiSupport.txt3
4 files changed, 0 insertions, 96 deletions
diff --git a/1.3/HalPolicy.cpp b/1.3/HalPolicy.cpp
index 9333759..53b67c7 100644
--- a/1.3/HalPolicy.cpp
+++ b/1.3/HalPolicy.cpp
@@ -81,12 +81,6 @@ bool HalPolicy::ConvertOperation(const Operation& operation, const Model& model,
return ConvertComparison(operation, model, data, ComparisonOperation::LessOrEqual);
case V1_3::OperationType::LOCAL_RESPONSE_NORMALIZATION:
return ConvertLocalResponseNormalization(operation, model, data);
- case V1_3::OperationType::LOGICAL_AND:
- return ConvertLogicalBinary(operation, model, data, LogicalBinaryOperation::LogicalAnd);
- case V1_3::OperationType::LOGICAL_NOT:
- return ConvertElementwiseUnary(operation, model, data, UnaryOperation::LogicalNot);
- case V1_3::OperationType::LOGICAL_OR:
- return ConvertLogicalBinary(operation, model, data, LogicalBinaryOperation::LogicalOr);
case V1_3::OperationType::LOGISTIC:
return ConvertLogistic(operation, model, data);
case V1_3::OperationType::LOG_SOFTMAX:
@@ -320,15 +314,6 @@ bool HalPolicy::ConvertLocalResponseNormalization(const Operation& operation,
return ::ConvertLocalResponseNormalization<hal_1_3::HalPolicy>(operation, model, data);
}
-bool HalPolicy::ConvertLogicalBinary(const Operation& operation,
- const Model& model,
- ConversionData& data,
- armnn::LogicalBinaryOperation logicalOperation)
-{
- ALOGV("hal_1_3::HalPolicy::ConvertLogicalBinary()");
- return ::ConvertLogicalBinary<hal_1_3::HalPolicy>(operation, model, data, logicalOperation);
-}
-
bool HalPolicy::ConvertLogistic(const Operation& operation, const Model& model, ConversionData& data)
{
ALOGV("hal_1_3::HalPolicy::ConvertLogistic()");
diff --git a/1.3/HalPolicy.hpp b/1.3/HalPolicy.hpp
index f82a5ef..0eb5f4d 100644
--- a/1.3/HalPolicy.hpp
+++ b/1.3/HalPolicy.hpp
@@ -95,11 +95,6 @@ private:
const Model& model,
ConversionData& data);
- static bool ConvertLogicalBinary(const Operation& operation,
- const Model& model,
- ConversionData& data,
- armnn::LogicalBinaryOperation logicalOperation);
-
static bool ConvertLogistic(const Operation& operation, const Model& model, ConversionData& data);
static bool ConvertLogSoftmax(const Operation& operation, const Model& model, ConversionData& data);
diff --git a/ConversionUtils_1_3.hpp b/ConversionUtils_1_3.hpp
index 150735e..a7f00fc 100644
--- a/ConversionUtils_1_3.hpp
+++ b/ConversionUtils_1_3.hpp
@@ -153,79 +153,6 @@ bool ConvertFill(const HalOperation& operation, const HalModel& model, Conversio
template<typename HalPolicy,
typename HalOperation = typename HalPolicy::Operation,
typename HalModel = typename HalPolicy::Model>
-bool ConvertLogicalBinary(const HalOperation& operation,
- const HalModel& model,
- ConversionData& data,
- LogicalBinaryOperation logicalOperation)
-{
- using HalOperand = typename HalPolicy::Operand;
-
- ALOGV("HalPolicy::ConvertLogicalBinary()");
- ALOGV("logicalOperation = %s", GetLogicalBinaryOperationAsCString(logicalOperation));
-
- LayerInputHandle input0 = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data);
- LayerInputHandle input1 = ConvertToLayerInputHandle<HalPolicy>(operation, 1, model, data);
-
- if (!(input0.IsValid() && input1.IsValid()))
- {
- return Fail("%s: Operation has invalid inputs", __func__);
- }
-
- const HalOperand* output = GetOutputOperand<HalPolicy>(operation, 0, model);
- if (!output)
- {
- return Fail("%s: Could not read output 0", __func__);
- }
-
- const TensorInfo& inputInfo0 = input0.GetTensorInfo();
- const TensorInfo& inputInfo1 = input1.GetTensorInfo();
- const TensorInfo& outputInfo = GetTensorInfoForOperand(*output);
-
- LogicalBinaryDescriptor descriptor(logicalOperation);
-
- bool isSupported = false;
-
- auto validateFunc = [&](const armnn::TensorInfo& outputInfo, bool& isSupported)
- {
- FORWARD_LAYER_SUPPORT_FUNC(__func__,
- IsLogicalBinarySupported,
- data.m_Backends,
- isSupported,
- inputInfo0,
- inputInfo1,
- outputInfo,
- descriptor);
- };
-
- if(!IsDynamicTensor(outputInfo))
- {
- validateFunc(outputInfo, isSupported);
- }
- else
- {
- isSupported = AreDynamicTensorsSupported();
- }
-
- if (!isSupported)
- {
- return false;
- }
-
- IConnectableLayer* layer = data.m_Network->AddLogicalBinaryLayer(descriptor);
- assert(layer != nullptr);
-
- bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
- if (!isReshapeSupported)
- {
- return false;
- }
-
- return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data, nullptr, validateFunc);
-}
-
-template<typename HalPolicy,
- typename HalOperation = typename HalPolicy::Operation,
- typename HalModel = typename HalPolicy::Model>
bool ConvertQuantizedLstm(const HalOperation& operation, const HalModel& model, ConversionData& data)
{
using HalOperand = typename HalPolicy::Operand;
diff --git a/NnapiSupport.txt b/NnapiSupport.txt
index e0400e1..2b6eaca 100644
--- a/NnapiSupport.txt
+++ b/NnapiSupport.txt
@@ -43,9 +43,6 @@ L2_POOL_2D (FLOAT32, QUANT8_ASYMM)
LESS (FLOAT32, INT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LESS_EQUAL (FLOAT32, INT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LOCAL_RESPONSE_NORMALIZATION (FLOAT32)
-LOGICAL_AND (BOOL8)
-LOGICAL_NOT (BOOL8)
-LOGICAL_OR (BOOL8)
LOGISTIC (FLOAT32, QUANT8_ASYMM, QUANT8_ASYMM_SIGNED)
LOG_SOFTMAX (FLOAT32)
LSTM (FLOAT32)