-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
Copy pathtest_item.py
36 lines (28 loc) · 1.32 KB
/
test_item.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# Copyright (C) 2018-2025 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import pytest
import numpy as np
import torch
from pytorch_layer_test_class import PytorchLayerTest
@pytest.mark.parametrize("dtype", [pytest.param(np.int32, marks=pytest.mark.xfail(reason="item returns scalar and for i32 it returns i64")),
np.int64,
np.float32,
pytest.param(np.float64, marks=pytest.mark.xfail(reason="item returns scalar and for f64 it returns f32"))])
@pytest.mark.parametrize("shape", [[], [1], [1, 1, 1]])
class TestItem(PytorchLayerTest):
def _prepare_input(self):
return [np.random.randn(1).astype(self.dtype).reshape(self.shape)]
def create_model(self):
class aten_item(torch.nn.Module):
def forward(self, x: torch.Tensor):
return x.item()
ref_net = None
return aten_item(), ref_net, "aten::item"
@pytest.mark.nightly
@pytest.mark.precommit
def test_item(self, ie_device, precision, ir_version, dtype, shape):
self.dtype = dtype
self.shape = shape
# Dynamic shapes are not supported by Squeeze implementation
self._test(*self.create_model(), ie_device, precision,
ir_version, dynamic_shapes=False)