@@ -50,6 +50,7 @@ namespace test
5050{
5151namespace validation
5252{
53+ using framework::dataset::make;
5354namespace
5455{
5556
@@ -62,6 +63,11 @@ const auto NeonActivationFunctionsDataset = concat(datasets::ActivationFunctions
6263
6364/* * Input data sets. */
6465const auto ActivationDataset = combine(combine(framework::dataset::make(" InPlace" , { false , true }), NeonActivationFunctionsDataset), framework::dataset::make(" AlphaBeta" , { 0 .5f , 1 .f }));
66+ const auto ActivationDatasetForPaddingAfterConfigure = combine(
67+ make (" InPlace" , { false , true }),
68+ NeonActivationFunctionsDataset,
69+ make (" AlphaBeta" , { 0 .5f })
70+ );
6571
6672template <typename T, ARM_COMPUTE_REQUIRES_TA(arm_compute::utils::traits::is_floating_point<T>::value)>
6773void test_float_sqrt_boundary_value ()
@@ -181,6 +187,8 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
181187
182188template <typename T>
183189using NEActivationLayerFixture = ActivationValidationFixture<Tensor, Accessor, NEActivationLayer, T>;
190+ template <typename T>
191+ using NEActivationLayerWithPaddingFixture = ActivationWithPaddingValidationFixture<Tensor, Accessor, NEActivationLayer, T>;
184192
185193TEST_SUITE (Float)
186194#ifdef ARM_COMPUTE_ENABLE_FP16
@@ -204,6 +212,25 @@ FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture<half>, framework::Data
204212 framework::ARM_COMPUTE_PRINT_INFO ();
205213 }
206214}
215+
216+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingFixture<half>, framework::DatasetMode::ALL,
217+ combine (
218+ make (" Shape" , TensorShape{ 7U , 7U , 17U , 2U }),
219+ ActivationDatasetForPaddingAfterConfigure,
220+ make(" DataType" , DataType::F16))
221+ )
222+ {
223+ if (CPUInfo::get ().has_fp16 ())
224+ {
225+ // Validate output
226+ validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
227+ }
228+ else
229+ {
230+ ARM_COMPUTE_TEST_INFO (" Device does not support fp16 vector operations. Test SKIPPED." );
231+ framework::ARM_COMPUTE_PRINT_INFO ();
232+ }
233+ }
207234TEST_SUITE_END () // FP16
208235#endif /* ARM_COMPUTE_ENABLE_FP16 */
209236
@@ -212,28 +239,45 @@ TEST_CASE(SqrtBoundaryValue, framework::DatasetMode::ALL)
212239{
213240 test_float_sqrt_boundary_value<float >();
214241}
215- FIXTURE_DATA_TEST_CASE (RunSmall, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), ActivationDataset), framework::dataset:: make(" DataType" ,
242+ FIXTURE_DATA_TEST_CASE (RunSmall, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), ActivationDataset), make(" DataType" ,
216243 DataType::F32)))
217244
218245{
219246 // Validate output
220247 validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
221248}
249+
250+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingFixture<float >, framework::DatasetMode::ALL,
251+ combine (
252+ make (" Shape" , TensorShape{ 7U , 7U , 17U , 2U }),
253+ ActivationDatasetForPaddingAfterConfigure,
254+ make(" DataType" , DataType::F32))
255+ )
256+ {
257+ validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
258+ }
222259// Run only on SME Devices to stress Logistic SME kernel
223260#ifdef ARM_COMPUTE_ENABLE_SME2
224261TEST_SUITE (SME)
225- const auto LogsisticDataset = combine(framework::dataset:: make(" InPlace" , { false }), framework::dataset:: make(" Function" , ActivationLayerInfo::ActivationFunction::LOGISTIC), framework::dataset:: make(" AlphaBeta" , { 1 .f }));
226- FIXTURE_DATA_TEST_CASE (RunLogistic5D, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(datasets::Tiny5dShapes(), LogsisticDataset, framework::dataset:: make(" DataType" ,
262+ const auto LogisticDataset = combine(make(" InPlace" , { false }), make(" Function" , ActivationLayerInfo::ActivationFunction::LOGISTIC), make(" AlphaBeta" , { 1 .f }));
263+ FIXTURE_DATA_TEST_CASE (RunLogistic5D, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(datasets::Tiny5dShapes(), LogisticDataset, make(" DataType" ,
227264 DataType::F32)))
228265
229266{
230267 // Validate output
231268 validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
232269}
233270
234- FIXTURE_DATA_TEST_CASE (RunLogisticSME, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(datasets::LogisticSMEStressShapesFp32(), LogsisticDataset, framework::dataset:: make(" DataType" ,
271+ FIXTURE_DATA_TEST_CASE (RunLogisticSME, NEActivationLayerFixture<float >, framework::DatasetMode::ALL, combine(datasets::LogisticSMEStressShapesFp32(), LogisticDataset, make(" DataType" ,
235272 DataType::F32)))
236273
274+ {
275+ // Validate output
276+ validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
277+ }
278+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingFixture<float >, framework::DatasetMode::ALL,
279+ combine (datasets::LogisticSMEStressShapesFp32(), LogisticDataset, make(" DataType" , DataType::F32)))
280+
237281{
238282 // Validate output
239283 validate (Accessor (_target), _reference, helper::relative_tolerance (_data_type, _function), 0 .f , helper::absolute_tolerance (_data_type, _function));
@@ -245,6 +289,8 @@ TEST_SUITE_END() // Float
245289
246290template <typename T>
247291using NEActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
292+ template <typename T>
293+ using NEActivationLayerWithPaddingQuantizedFixture = ActivationWithPaddingValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
248294
249295/* * Input data sets. */
250296const auto QuantizedActivationFunctionsDataset = framework::dataset::make(" ActivationFunction" ,
@@ -263,6 +309,13 @@ const auto QuantizedActivationFunctionsDataset = framework::dataset::make("Activ
263309const auto QuantizedActivationDataset = combine(combine(framework::dataset::make(" InPlace" , { false }),
264310 concat (QuantizedActivationFunctionsDataset, framework::dataset::make(" ActivationFunction" , ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
265311 framework::dataset::make(" AlphaBeta" , { 0 .5f , 1 .f }));
312+ const auto QuantizedActivationDatasetForPaddingAfterConfigure = combine(
313+ make (" InPlace" , { false }),
314+ concat(QuantizedActivationFunctionsDataset,
315+ make (" ActivationFunction" , ActivationLayerInfo::ActivationFunction::HARD_SWISH)
316+ ),
317+ make(" AlphaBeta" , { 0 .5f })
318+ );
266319
267320TEST_SUITE (Quantized)
268321TEST_SUITE(QASYMM8)
@@ -274,6 +327,17 @@ FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<uint8_t>, fra
274327 // Validate output
275328 validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
276329}
330+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingQuantizedFixture<uint8_t >, framework::DatasetMode::ALL,
331+ combine (
332+ make (" Shape" , TensorShape{ 7U , 7U , 17U , 2U }),
333+ QuantizedActivationDatasetForPaddingAfterConfigure,
334+ make(" DataType" , DataType::QASYMM8),
335+ make(" QuantizationInfo" , { QuantizationInfo (0 .1f , 128 .0f ) }
336+ )))
337+ {
338+ // Validate output
339+ validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
340+ }
277341TEST_SUITE_END () // QASYMM8
278342
279343TEST_SUITE(QASYMM8_SIGNED)
@@ -285,6 +349,17 @@ FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int8_t>, fram
285349 // Validate output
286350 validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
287351}
352+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingQuantizedFixture<int8_t >, framework::DatasetMode::ALL,
353+ combine (
354+ make (" Shape" , TensorShape{ 7U , 7U , 17U , 2U }),
355+ QuantizedActivationDatasetForPaddingAfterConfigure,
356+ make(" DataType" , DataType::QASYMM8_SIGNED),
357+ make(" QuantizationInfo" , { QuantizationInfo (0 .5f , 10 .0f ) }
358+ )))
359+ {
360+ // Validate output
361+ validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
362+ }
288363TEST_SUITE_END () // QASYMM8_SIGNED
289364
290365/* * Input data sets. */
@@ -297,6 +372,12 @@ const auto Int16QuantizedActivationFunctionsDataset = framework::dataset::make("
297372const auto Int16QuantizedActivationDataset = combine(combine(framework::dataset::make(" InPlace" , { false }), Int16QuantizedActivationFunctionsDataset),
298373 framework::dataset::make(" AlphaBeta" , { 0 .5f , 1 .f }));
299374
375+ const auto Int16QuantizedActivationDatasetForPaddingAfterConfigure = combine(
376+ make (" InPlace" , { false }),
377+ Int16QuantizedActivationFunctionsDataset,
378+ make(" AlphaBeta" , { 0 .5f })
379+ );
380+
300381TEST_SUITE (QSYMM16)
301382FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int16_t >, framework::DatasetMode::ALL, combine(combine(combine(datasets::SmallShapes(), Int16QuantizedActivationDataset),
302383 framework::dataset::make(" DataType" ,
@@ -306,6 +387,17 @@ FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int16_t>, fra
306387 // Validate output
307388 validate (Accessor (_target), _reference, tolerance_qsymm16);
308389}
390+ FIXTURE_DATA_TEST_CASE (PaddingAfterConfigure, NEActivationLayerWithPaddingQuantizedFixture<int16_t >, framework::DatasetMode::ALL,
391+ combine (
392+ make (" Shape" , TensorShape{ 7U , 7U , 17U , 2U }),
393+ Int16QuantizedActivationDatasetForPaddingAfterConfigure,
394+ make(" DataType" , DataType::QSYMM16),
395+ make(" QuantizationInfo" , { QuantizationInfo (1 .f / 32768 .f , 0 .f ) }))
396+ )
397+ {
398+ // Validate output
399+ validate (Accessor (_target), _reference, tolerance_qsymm16);
400+ }
309401TEST_SUITE_END () // QSYMM16
310402TEST_SUITE_END() // Quantized
311403
0 commit comments