30
30
31
31
import chip .clusters as Clusters
32
32
from chip .clusters import Attribute
33
- from matter_testing_support .matter_testing import (MatterBaseTest , async_test_body , get_accepted_endpoints_for_test , has_attribute ,
34
- has_cluster , has_feature , per_endpoint_test , per_node_test )
33
+ from matter_testing_support .matter_testing import (MatterBaseTest , MatterTestConfig , async_test_body , has_attribute , has_cluster , has_feature ,
34
+ run_if_endpoint_matches , run_on_singleton_matching_endpoint , should_run_test_on_endpoint )
35
+ from typing import Optional
35
36
from mobly import asserts
36
37
from MockTestRunner import MockTestRunner
37
38
@@ -127,107 +128,101 @@ async def test_endpoints(self):
127
128
all_endpoints = await self .default_controller .Read (self .dut_node_id , [()])
128
129
all_endpoints = list (all_endpoints .attributes .keys ())
129
130
130
- msg = "Unexpected endpoint list returned"
131
-
132
- endpoints = await get_accepted_endpoints_for_test (self , has_onoff )
133
- asserts .assert_equal (endpoints , all_endpoints , msg )
131
+ msg = "Unexpected evaluation of should_run_test_on_endpoint"
132
+ for e in all_endpoints :
133
+ self .matter_test_config .endpoint = e
134
+ should_run = await should_run_test_on_endpoint (self , has_onoff )
135
+ asserts .assert_true (should_run , msg )
134
136
135
- endpoints = await get_accepted_endpoints_for_test (self , has_onoff_onoff )
136
- asserts .assert_equal ( endpoints , all_endpoints , msg )
137
+ should_run = await should_run_test_on_endpoint (self , has_onoff_onoff )
138
+ asserts .assert_true ( should_run , msg )
137
139
138
- endpoints = await get_accepted_endpoints_for_test (self , has_onoff_ontime )
139
- asserts .assert_equal ( endpoints , [] , msg )
140
+ should_run = await should_run_test_on_endpoint (self , has_onoff_ontime )
141
+ asserts .assert_false ( should_run , msg )
140
142
141
- endpoints = await get_accepted_endpoints_for_test (self , has_timesync )
142
- asserts .assert_equal ( endpoints , [] , msg )
143
+ should_run = await should_run_test_on_endpoint (self , has_timesync )
144
+ asserts .assert_false ( should_run , msg )
143
145
144
- endpoints = await get_accepted_endpoints_for_test (self , has_timesync_utc )
145
- asserts .assert_equal (endpoints , [], msg )
146
-
147
- # This test should cause an assertion because it has pics_ method
148
- @per_node_test
149
- async def test_whole_node_with_pics (self ):
150
- pass
151
-
152
- # This method returns the top level pics for test_whole_node_with_pics
153
- # It is used to test that test_whole_node_with_pics will fail since you can't have a whole node test gated on a PICS.
154
- def pics_whole_node_with_pics (self ):
155
- return ['EXAMPLE.S' ]
146
+ should_run = await should_run_test_on_endpoint (self , has_timesync_utc )
147
+ asserts .assert_false (should_run , msg )
156
148
157
149
# This test should cause an assertion because it has a pics_ method
158
- @per_endpoint_test (has_cluster (Clusters .OnOff ))
159
- async def test_per_endpoint_with_pics (self ):
150
+ @run_if_endpoint_matches (has_cluster (Clusters .OnOff ))
151
+ async def test_endpoint_with_pics (self ):
160
152
pass
161
153
162
- # This method returns the top level pics for test_per_endpoint_with_pics
163
- # It is used to test that test_per_endpoint_with_pics will fail since you can't have a per endpoint test gated on a PICS.
164
- def pics_per_endpoint_with_pics (self ):
154
+ # This method returns the top level pics for test_endpoint_with_pics
155
+ # It is used to test that test_endpoint_with_pics will fail since you can't have a per endpoint test gated on a PICS.
156
+ def pics_endpoint_with_pics (self ):
165
157
return ['EXAMPLE.S' ]
166
158
167
- # This test should be run once
168
- @per_node_test
169
- async def test_whole_node_ok (self ):
170
- pass
171
-
172
159
# This test should be run once per endpoint
173
- @per_endpoint_test (has_cluster (Clusters .OnOff ))
160
+ @run_if_endpoint_matches (has_cluster (Clusters .OnOff ))
174
161
async def test_endpoint_cluster_yes (self ):
175
162
pass
176
163
177
164
# This test should be skipped since this cluster isn't on any endpoint
178
- @per_endpoint_test (has_cluster (Clusters .TimeSynchronization ))
165
+ @run_if_endpoint_matches (has_cluster (Clusters .TimeSynchronization ))
179
166
async def test_endpoint_cluster_no (self ):
180
167
pass
181
168
182
169
# This test should be run once per endpoint
183
- @per_endpoint_test (has_attribute (Clusters .OnOff .Attributes .OnOff ))
170
+ @run_if_endpoint_matches (has_attribute (Clusters .OnOff .Attributes .OnOff ))
184
171
async def test_endpoint_attribute_yes (self ):
185
172
pass
186
173
187
174
# This test should be skipped since this attribute isn't on the supported cluster
188
- @per_endpoint_test (has_attribute (Clusters .OnOff .Attributes .OffWaitTime ))
175
+ @run_if_endpoint_matches (has_attribute (Clusters .OnOff .Attributes .OffWaitTime ))
189
176
async def test_endpoint_attribute_supported_cluster_no (self ):
190
177
pass
191
178
192
179
# This test should be skipped since this attribute is part of an unsupported cluster
193
- @per_endpoint_test (has_attribute (Clusters .TimeSynchronization .Attributes .Granularity ))
180
+ @run_if_endpoint_matches (has_attribute (Clusters .TimeSynchronization .Attributes .Granularity ))
194
181
async def test_endpoint_attribute_unsupported_cluster_no (self ):
195
182
pass
196
183
197
184
# This test should be run once per endpoint
198
- @per_endpoint_test (has_feature (Clusters .OnOff , Clusters .OnOff .Bitmaps .Feature .kLighting ))
185
+ @run_if_endpoint_matches (has_feature (Clusters .OnOff , Clusters .OnOff .Bitmaps .Feature .kLighting ))
199
186
async def test_endpoint_feature_yes (self ):
200
187
pass
201
188
202
189
# This test should be skipped since this attribute is part of an unsupported cluster
203
- @per_endpoint_test (has_feature (Clusters .TimeSynchronization , Clusters .TimeSynchronization .Bitmaps .Feature .kNTPClient ))
190
+ @run_if_endpoint_matches (has_feature (Clusters .TimeSynchronization , Clusters .TimeSynchronization .Bitmaps .Feature .kNTPClient ))
204
191
async def test_endpoint_feature_unsupported_cluster_no (self ):
205
192
pass
206
193
207
194
# This test should be run since both are present
208
- @per_endpoint_test (has_attribute (Clusters .OnOff .Attributes .OnOff ) and has_cluster (Clusters .OnOff ))
195
+ @run_if_endpoint_matches (has_attribute (Clusters .OnOff .Attributes .OnOff ) and has_cluster (Clusters .OnOff ))
209
196
async def test_endpoint_boolean_yes (self ):
210
197
pass
211
198
212
199
# This test should be skipped since we have an OnOff cluster, but no Time sync
213
- @per_endpoint_test (has_cluster (Clusters .OnOff ) and has_cluster (Clusters .TimeSynchronization ))
200
+ @run_if_endpoint_matches (has_cluster (Clusters .OnOff ) and has_cluster (Clusters .TimeSynchronization ))
214
201
async def test_endpoint_boolean_no (self ):
215
202
pass
216
203
217
- @per_endpoint_test (has_cluster (Clusters .OnOff ))
204
+ @run_if_endpoint_matches (has_cluster (Clusters .OnOff ))
218
205
async def test_fail_on_ep0 (self ):
219
206
if self .matter_test_config .endpoint == 0 :
220
207
asserts .fail ("Expected failure" )
221
208
222
- @per_endpoint_test (has_cluster (Clusters .OnOff ))
209
+ @run_if_endpoint_matches (has_cluster (Clusters .OnOff ))
223
210
async def test_fail_on_ep1 (self ):
224
211
if self .matter_test_config .endpoint == 1 :
225
212
asserts .fail ("Expected failure" )
226
213
227
- @per_node_test
228
- async def test_fail_on_whole_node (self ):
214
+ @run_on_singleton_matching_endpoint (has_cluster (Clusters .OnOff ))
215
+ async def test_run_on_singleton_matching_endpoint (self ):
216
+ pass
217
+
218
+ @run_on_singleton_matching_endpoint (has_cluster (Clusters .OnOff ))
219
+ async def test_run_on_singleton_matching_endpoint_failure (self ):
229
220
asserts .fail ("Expected failure" )
230
221
222
+ @run_on_singleton_matching_endpoint (has_attribute (Clusters .OnOff .Attributes .OffWaitTime ))
223
+ async def test_no_run_on_singleton_matching_endpoint (self ):
224
+ pass
225
+
231
226
232
227
def main ():
233
228
failures = []
@@ -249,82 +244,129 @@ def main():
249
244
if not ok :
250
245
failures .append ("Test case failure: test_endpoints" )
251
246
252
- test_name = 'test_whole_node_with_pics'
253
- test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
254
- ok = test_runner .run_test_with_mock_read (read_resp , hooks )
255
- if ok :
256
- failures .append (f"Did not get expected test assertion on { test_name } " )
257
-
258
- test_name = 'test_per_endpoint_with_pics'
247
+ test_name = 'test_endpoint_with_pics'
259
248
test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
260
249
ok = test_runner .run_test_with_mock_read (read_resp , hooks )
261
250
if ok :
262
251
failures .append (f"Did not get expected test assertion on { test_name } " )
263
252
264
253
# Test should run once for the whole node, regardless of the number of endpoints
265
- def run_check (test_name : str , read_response : Attribute .AsyncReadTransaction .ReadResponse , expected_runs : int , expect_skip : bool ) -> None :
254
+ def run_check (test_name : str , read_response : Attribute .AsyncReadTransaction .ReadResponse , expect_skip : bool ) -> None :
266
255
nonlocal failures
267
256
test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
268
257
hooks = DecoratorTestRunnerHooks ()
269
- ok = test_runner .run_test_with_mock_read (read_response , hooks )
270
- started_ok = len (hooks .started ) == expected_runs
271
- skipped_ok = (hooks .skipped != []) == expect_skip
272
- stopped_ok = hooks .stopped == expected_runs
258
+ num_endpoints = 2
259
+ for e in [0 , 1 ]:
260
+ test_runner .set_test_config (MatterTestConfig (endpoint = e ))
261
+ ok = test_runner .run_test_with_mock_read (read_response , hooks )
262
+ started_ok = len (hooks .started ) == num_endpoints
263
+ expected_num_skips = 2 if expect_skip else 0
264
+ skipped_ok = len (hooks .skipped ) == expected_num_skips
265
+ stopped_ok = hooks .stopped == num_endpoints
273
266
if not ok or not started_ok or not skipped_ok or not stopped_ok :
274
267
failures .append (
275
- f'Expected { expected_runs } run of { test_name } , skips expected: { expect_skip } . Runs: { hooks .started } , skips: { hooks .skipped } stops: { hooks .stopped } ' )
276
-
277
- def check_once_per_node (test_name : str ):
278
- run_check (test_name , get_clusters ([0 ]), 1 , False )
279
- run_check (test_name , get_clusters ([0 , 1 ]), 1 , False )
268
+ f'Expected { num_endpoints } run of { test_name } , skips expected: { expect_skip } . Runs: { hooks .started } , skips: { hooks .skipped } stops: { hooks .stopped } ' )
280
269
281
270
def check_once_per_endpoint (test_name : str ):
282
- run_check (test_name , get_clusters ([0 ]), 1 , False )
283
- run_check (test_name , get_clusters ([0 , 1 ]), 2 , False )
271
+ run_check (test_name , get_clusters ([0 , 1 ]), False )
284
272
285
- def check_skipped (test_name : str ):
286
- run_check (test_name , get_clusters ([0 ]), 1 , True )
287
- run_check (test_name , get_clusters ([0 , 1 ]), 1 , True )
273
+ def check_all_skipped (test_name : str ):
274
+ run_check (test_name , get_clusters ([0 , 1 ]), True )
288
275
289
- check_once_per_node ('test_whole_node_ok' )
290
276
check_once_per_endpoint ('test_endpoint_cluster_yes' )
291
- check_skipped ('test_endpoint_cluster_no' )
277
+ check_all_skipped ('test_endpoint_cluster_no' )
292
278
check_once_per_endpoint ('test_endpoint_attribute_yes' )
293
- check_skipped ('test_endpoint_attribute_supported_cluster_no' )
294
- check_skipped ('test_endpoint_attribute_unsupported_cluster_no' )
279
+ check_all_skipped ('test_endpoint_attribute_supported_cluster_no' )
280
+ check_all_skipped ('test_endpoint_attribute_unsupported_cluster_no' )
295
281
check_once_per_endpoint ('test_endpoint_feature_yes' )
296
- check_skipped ('test_endpoint_feature_unsupported_cluster_no' )
282
+ check_all_skipped ('test_endpoint_feature_unsupported_cluster_no' )
297
283
check_once_per_endpoint ('test_endpoint_boolean_yes' )
298
- check_skipped ('test_endpoint_boolean_no' )
284
+ check_all_skipped ('test_endpoint_boolean_no' )
299
285
300
286
test_name = 'test_fail_on_ep0'
301
287
test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
302
288
read_resp = get_clusters ([0 , 1 ])
289
+ # fail on EP0, pass on EP1
290
+ test_runner .set_test_config (MatterTestConfig (endpoint = 0 ))
303
291
ok = test_runner .run_test_with_mock_read (read_resp , hooks )
304
292
if ok :
305
293
failures .append (f"Did not get expected test assertion on { test_name } " )
306
-
307
- test_name = 'test_fail_on_ep1'
308
- test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
309
- read_resp = get_clusters ([0 , 1 ])
294
+ test_runner .set_test_config (MatterTestConfig (endpoint = 1 ))
310
295
ok = test_runner .run_test_with_mock_read (read_resp , hooks )
311
- if ok :
312
- failures .append (f"Did not get expected test assertion on { test_name } " )
296
+ if not ok :
297
+ failures .append (f"Unexpected failure on { test_name } " )
313
298
314
299
test_name = 'test_fail_on_ep1'
315
300
test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
316
- read_resp = get_clusters ([0 ])
301
+ read_resp = get_clusters ([0 , 1 ])
302
+ # pass on EP0, fail on EP1
303
+ test_runner .set_test_config (MatterTestConfig (endpoint = 0 ))
317
304
ok = test_runner .run_test_with_mock_read (read_resp , hooks )
318
305
if not ok :
319
306
failures .append (f"Unexpected failure on { test_name } " )
320
-
321
- test_name = 'test_fail_on_whole_node'
322
- test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
323
- read_resp = get_clusters ([0 , 1 ])
307
+ test_runner .set_test_config (MatterTestConfig (endpoint = 1 ))
324
308
ok = test_runner .run_test_with_mock_read (read_resp , hooks )
325
309
if ok :
326
310
failures .append (f"Did not get expected test assertion on { test_name } " )
327
311
312
+ def run_singleton_dynamic (test_name : str , cluster_list : list [int ]) -> tuple [bool , DecoratorTestRunnerHooks ]:
313
+ nonlocal failures
314
+ read_resp = get_clusters (cluster_list )
315
+ test_runner .set_test ('TestDecorators.py' , 'TestDecorators' , test_name )
316
+ test_runner .set_test_config (MatterTestConfig (endpoint = 2 ))
317
+ hooks = DecoratorTestRunnerHooks ()
318
+ ok = test_runner .run_test_with_mock_read (read_resp , hooks )
319
+ # for all tests, we need to ensure the endpoint was set back to the prior values
320
+ if test_runner .config .endpoint != 2 :
321
+ failures .append (f"Dynamic tests { test_name } with clusters { cluster_list } did not set endpoint back to prior" )
322
+ # All tests should have a start and a stop
323
+ started_ok = len (hooks .started ) == 1
324
+ stopped_ok = hooks .stopped == 1
325
+ if not started_ok or not stopped_ok :
326
+ failures .append (
327
+ f'Hooks failure on { test_name } , Runs: { hooks .started } , skips: { hooks .skipped } stops: { hooks .stopped } ' )
328
+ return ok , hooks
329
+
330
+ def expect_success_dynamic (test_name : str , cluster_list : list [int ]):
331
+ ok , hooks = run_singleton_dynamic (test_name , cluster_list )
332
+ if not ok :
333
+ failures .append (f"Unexpected failure on { test_name } with cluster list { cluster_list } " )
334
+ if hooks .skipped :
335
+ failures .append (f'Unexpected skip call on { test_name } with cluster list { cluster_list } ' )
336
+
337
+ def expect_failure_dynamic (test_name : str , cluster_list : list [int ]):
338
+ ok , hooks = run_singleton_dynamic (test_name , cluster_list )
339
+ if ok :
340
+ failures .append (f"Unexpected success on { test_name } with cluster list { cluster_list } " )
341
+ if hooks .skipped :
342
+ # We don't expect a skip call because the test actually failed.
343
+ failures .append (f'Skip called for { test_name } with cluster list { cluster_list } ' )
344
+
345
+ def expect_skip_dynamic (test_name : str , cluster_list : list [int ]):
346
+ ok , hooks = run_singleton_dynamic (test_name , cluster_list )
347
+ if not ok :
348
+ failures .append (f"Unexpected failure on { test_name } with cluster list { cluster_list } " )
349
+ if not hooks .skipped :
350
+ # We don't expect a skip call because the test actually failed.
351
+ failures .append (f'Skip not called for { test_name } with cluster list { cluster_list } ' )
352
+
353
+ test_name = 'test_run_on_singleton_matching_endpoint'
354
+ expect_success_dynamic (test_name , [0 ])
355
+ expect_success_dynamic (test_name , [1 ])
356
+ # expect failure because there is more than 1 endpoint
357
+ expect_failure_dynamic (test_name , [0 , 1 ])
358
+
359
+ test_name = 'test_run_on_singleton_matching_endpoint_failure'
360
+ expect_failure_dynamic (test_name , [0 ])
361
+ expect_failure_dynamic (test_name , [1 ])
362
+ expect_failure_dynamic (test_name , [0 , 1 ])
363
+
364
+ test_name = 'test_no_run_on_singleton_matching_endpoint'
365
+ # no failure, no matches, expect skips on all endpoints
366
+ expect_skip_dynamic (test_name , [0 ])
367
+ expect_skip_dynamic (test_name , [1 ])
368
+ expect_skip_dynamic (test_name , [0 , 1 ])
369
+
328
370
test_runner .Shutdown ()
329
371
print (
330
372
f"Test of Decorators: test response incorrect: { len (failures )} " )
0 commit comments