diff --git a/src/app/tests/suites/certification/Test_TC_CADMIN_1_15.yaml b/src/app/tests/suites/certification/Test_TC_CADMIN_1_15.yaml deleted file mode 100644 index 72f93fe51ae3d4..00000000000000 --- a/src/app/tests/suites/certification/Test_TC_CADMIN_1_15.yaml +++ /dev/null @@ -1,316 +0,0 @@ -# Copyright (c) 2021 Project CHIP Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Auto-generated scripts for harness use only, please review before automation. The endpoints and cluster names are currently set to default - -name: - 4.1.15. [TC-CADMIN-1.15] Removing Fabrics from DUT and Fabric index - enumeration using ECM [DUT - Commissionee] - -PICS: - - CADMIN.S - -config: - nodeId: 0x12344321 - cluster: "Basic Information" - endpoint: 0 - -tests: - - label: "Precondition" - verification: | - Reset Devices to factory defaults - disabled: true - - - label: "Step 1: TH_CR1 starts a commissioning process with DUT_CE" - PICS: CADMIN.S - verification: | - "1. Provision the device using TH_CR1 (Chip-tool as controller) - " - disabled: true - - - label: - "Step 2: TH_CR1 opens a commissioning window on DUT_CE using a - commissioning timeout of PIXIT.CADMIN.CwDuration seconds using ECM" - PICS: CADMIN.S.C00.Rsp - verification: | - On TH_CR1 chip tool, open commissioning window using ECM - - ./chip-tool pairing open-commissioning-window 1 1 180 1000 3840 - - Verify Manual pairing code on TH_CR1 (chip-tool) Log - - [1635925713.966786][9695:9700] CHIP:SC: Success status report received. Session was established - [1635925713.966839][9695:9700] CHIP:IN: New secure session created for device 0x0000000000000001, key 33!! - [1635925713.966938][9695:9700] CHIP:CTL: OpenCommissioningWindow for device ID 1 - [1635925713.972601][9695:9700] CHIP:DMG: ICR moving to [AddingComm] - [1635925713.972705][9695:9700] CHIP:DMG: ICR moving to [AddedComma] - [1635925713.972815][9695:9700] CHIP:IN: Prepared encrypted message 0xaaaad9b57d10 to 0x0000000000000001 of type 0x8 and protocolId (0, 1) on exchange 31056i with MessageCounter:0. - [1635925713.972876][9695:9700] CHIP:IN: Sending encrypted msg 0xaaaad9b57d10 with MessageCounter:0 to 0x0000000000000001 at monotonic time: 13449459 msec - [1635925713.973006][9695:9700] CHIP:DMG: ICR moving to [CommandSen] - [1635925713.973061][9695:9700] CHIP:CTL: Manual pairing code: [36217551633] - [1635925713.973120][9695:9700] CHIP:CTL: SetupQRCode: [MT:00000CQM00A7F87ZT10] - [1635925713.973178][9695:9700] CHIP:EM: Sending Standalone Ack for MessageCounter:1964916542 on exchange 31055i - disabled: true - - - label: "Step 3: TH_CR2 starts a commissioning process with DUT_CE" - PICS: CADMIN.S - verification: | - On TH_CR2 using chip tool connect to the accessory - - - ./chip-tool pairing code 2 36217551633 --commissioner-name beta - - Verify you got below message on TH_CR2 (chip-tool) - Device commissioning completed with success - disabled: true - - - label: - "Step 4: TH_CR1 opens a commissioning window on DUT_CE using a - commissioning timeout of PIXIT.CADMIN.CwDuration seconds using ECM" - PICS: CADMIN.S.C00.Rsp - verification: | - On TH_CR1 chip tool, open commissioning window using ECM - - ./chip-tool pairing open-commissioning-window 1 1 300 1000 3840 - - Verify Manual pairing code on TH_CR1 (chip-tool) Log - - [1635925713.966786][9695:9700] CHIP:SC: Success status report received. Session was established - [1635925713.966839][9695:9700] CHIP:IN: New secure session created for device 0x0000000000000001, key 33!! - [1635925713.966938][9695:9700] CHIP:CTL: OpenCommissioningWindow for device ID 1 - [1635925713.972601][9695:9700] CHIP:DMG: ICR moving to [AddingComm] - [1635925713.972705][9695:9700] CHIP:DMG: ICR moving to [AddedComma] - [1635925713.972815][9695:9700] CHIP:IN: Prepared encrypted message 0xaaaad9b57d10 to 0x0000000000000001 of type 0x8 and protocolId (0, 1) on exchange 31056i with MessageCounter:0. - [1635925713.972876][9695:9700] CHIP:IN: Sending encrypted msg 0xaaaad9b57d10 with MessageCounter:0 to 0x0000000000000001 at monotonic time: 13449459 msec - [1635925713.973006][9695:9700] CHIP:DMG: ICR moving to [CommandSen] - [1635925713.973061][9695:9700] CHIP:CTL: Manual pairing code: [36217551633] - [1635925713.973120][9695:9700] CHIP:CTL: SetupQRCode: [MT:00000CQM00A7F87ZT10] - [1635925713.973178][9695:9700] CHIP:EM: Sending Standalone Ack for MessageCounter:1964916542 on exchange 31055i - disabled: true - - - label: "Step 5: TH_CR3 starts a commissioning process with DUT_CE" - PICS: CADMIN.S - verification: | - On TH_CR3 using chip tool connect to the accessory - - ./chip-tool pairing code 3 36217551633 --commissioner-name gamma - - Verify you got below message on TH_CR3 (chip-tool) side - Device commissioning completed with success - disabled: true - - - label: "Step 6: TH_CR2 reads the list of Fabrics on DUT_CE" - PICS: OPCREDS.S.A0001 - verification: | - On TH_CR2 using chip tool, read fabrics list - - ./chip-tool operationalcredentials read fabrics 2 0 --fabric-filtered 0 --commissioner-name beta - - Verify the list of Fabrics consists of FabricIndex 1, FabricIndex 2, FabricIndex 3 on TH_CR2 (chip-tool) log - - CHIP:TOO: Endpoint: 0 Cluster: 0x0000_003E Attribute 0x0000_0001 DataVersion: 2455995193 - CHIP:TOO: Fabrics: 3 entries - CHIP:TOO: [1]: { - CHIP:TOO: RootPublicKey: 04656F56DBDB677DC957028DFC0ED76709C72753C9194B117A483BDC07386BFD9529B68EB4448FBBA3964EFF37A56A8F461D348B0DAF3B56A75F3B94BF8209D36F - CHIP:TOO: VendorId: 65521 - CHIP:TOO: FabricId: 1 - CHIP:TOO: NodeId: 1 - CHIP:TOO: Label: - CHIP:TOO: FabricIndex: 1 - CHIP:TOO: } - CHIP:TOO: [2]: { - CHIP:TOO: RootPublicKey: 04F8F28D5D70A4510E0F72FBBA31369796C4206FF95D97B77C1BDFD0438A3BE43510631A1B915BE189323F4CC0E015480192654D8170F8F230C7713898962958B7 - CHIP:TOO: VendorId: 65521 - CHIP:TOO: FabricId: 1 - CHIP:TOO: NodeId: 2 - CHIP:TOO: Label: - CHIP:TOO: FabricIndex: 2 - CHIP:TOO: } - CHIP:TOO: [3]: { - CHIP:TOO: RootPublicKey: 04F8F28D5D70A4510E0F72FBBA31369796C4206FF95D97B77C1BDFD0438A3BE43510631A1B915BE189323F4CC0E015480192654D8170F8F230C7713898962958B7 - CHIP:TOO: VendorId: 65521 - CHIP:TOO: FabricId: 1 - CHIP:TOO: NodeId: 3 - CHIP:TOO: Label: - CHIP:TOO: FabricIndex: 3 - CHIP:TOO: } - CHIP:EM: Sending Standalone Ack for MessageCounter:9143157 on exchange 1147i - disabled: true - - - label: - "Step 7: TH_CR2 sends RemoveFabric with FabricIndex = 2 command to - DUT_CE" - PICS: OPCREDS.S.C0a.Rsp - verification: | - on TH_CR2 using chip tool, remove fabric with FabricIndex=2 - - ./chip-tool operationalcredentials remove-fabric 2 2 0 --commissioner-name beta - - Verify DUT_CE responses with NOCResponse with a StatusCode OK on TH_CR2 (chip-tool) log - - CHIP:DMG: Received Command Response Data, Endpoint=0 Cluster=0x0000_003E Command=0x0000_0008 - CHIP:TOO: Endpoint: 0 Cluster: 0x0000_003E Command 0x0000_0008 - CHIP:TOO: NOCResponse: { - CHIP:TOO: statusCode: 0 - CHIP:TOO: fabricIndex: 2 - CHIP:TOO: } - CHIP:DMG: ICR moving to [AwaitingDe] - disabled: true - - - label: - "Step 8: TH_CR2 writes and reads the Basic Information Clusters - NodeLabel mandatory attribute of DUT_CE" - PICS: BINFO.S.A0005 - verification: | - Using TH_CR2 , write attribute and read attribute - - Verify read/write commands fail as expected since the DUT_CE is no longer on the network on TH_CR2 (chip-tool) side - - ./chip-tool basicinformation write node-label te5new 2 0 --commissioner-name beta - - [1678875950.655277][713499:713501] CHIP:SC: Received error (protocol code 1) during pairing process: ../../third_party/connectedhomeip/src/protocols/secure_channel/CASESession.cpp:1727: CHIP Error 0x000000C9: No shared trusted root - [1678875950.655290][713499:713501] CHIP:IN: SecureSession[0x7f5760026220]: Released - Type:2 LSID:1179 - [1678875950.655316][713499:713501] CHIP:-: ../../third_party/connectedhomeip/src/protocols/secure_channel/CASESession.cpp:1727: CHIP Error 0x000000C9: No shared trusted root at ../../commands/clusters/ModelCommand.cpp:62 - - - ./chip-tool basicinformation read node-label 2 0 --commissioner-name beta - - [1678875950.655277][713499:713501] CHIP:SC: Received error (protocol code 1) during pairing process: ../../third_party/connectedhomeip/src/protocols/secure_channel/CASESession.cpp:1727: CHIP Error 0x000000C9: No shared trusted root - [1678875950.655290][713499:713501] CHIP:IN: SecureSession[0x7f5760026220]: Released - Type:2 LSID:1179 - [1678875950.655316][713499:713501] CHIP:-: ../../third_party/connectedhomeip/src/protocols/secure_channel/CASESession.cpp:1727: CHIP Error 0x000000C9: No shared trusted root at ../../commands/clusters/ModelCommand.cpp:62 - disabled: true - - - label: "Step 9: TH_CR1 reads the list of Fabrics on DUT_CE" - PICS: OPCREDS.S.A0001 - verification: | - On TH_CR1 using chip tool, read fabrics list - - Verify the list of Fabrics consists of FabricIndex 1, FabricIndex 3 on TH_CR1 (chip-tool) log - - ./chip-tool operationalcredentials read fabrics 1 0 --fabric-filtered 0 - - CHIP:TOO: Endpoint: 0 Cluster: 0x0000_003E Attribute 0x0000_0001 DataVersion: 2455995219 - [1647863260.286772][9294:9299] CHIP:TOO: Fabrics: 2 entries - [1647863260.286908][9294:9299] CHIP:TOO: [1]: { - [1647863260.286947][9294:9299] CHIP:TOO: RootPublicKey: 04656F56DBDB677DC957028DFC0ED76709C72753C9194B117A483BDC07386BFD9529B68EB4448FBBA3964EFF37A56A8F461D348B0DAF3B56A75F3B94BF8209D36F - [1647863260.286995][9294:9299] CHIP:TOO: VendorId: 65521 - [1647863260.287026][9294:9299] CHIP:TOO: FabricId: 1 - [1647863260.287055][9294:9299] CHIP:TOO: NodeId: 1 - [1647863260.287084][9294:9299] CHIP:TOO: Label: - [1647863260.287113][9294:9299] CHIP:TOO: FabricIndex: 1 - [1647863260.287144][9294:9299] CHIP:TOO: } - [1647863260.287185][9294:9299] CHIP:TOO: [2]: { - [1647863260.287221][9294:9299] CHIP:TOO: RootPublicKey: 04F8F28D5D70A4510E0F72FBBA31369796C4206FF95D97B77C1BDFD0438A3BE43510631A1B915BE189323F4CC0E015480192654D8170F8F230C7713898962958B7 - [1647863260.287253][9294:9299] CHIP:TOO: VendorId: 65521 - [1647863260.287282][9294:9299] CHIP:TOO: FabricId: 1 - [1647863260.287310][9294:9299] CHIP:TOO: NodeId: 3 - [1647863260.287339][9294:9299] CHIP:TOO: Label: - [1647863260.287368][9294:9299] CHIP:TOO: FabricIndex: 3 - [1647863260.287396][9294:9299] CHIP:TOO: } - [1647863260.287515][9294:9299] CHIP:EM: Sending Standalone Ack for MessageCounter:11301761 on exchange 13180i" - disabled: true - - - label: - "Step 10: Verify DUT_CE is now discoverable over DNS-SD with 2 - Operational service records (_matter._tcp SRV records)." - PICS: CADMIN.S.C00.Rsp - verification: | - Execute the below command in any linux platform or in TH_CR1 - avahi-browse -rt _matter._tcp - - + wlp5s0 IPv6 8E50A59FAF52A809-0000000000000001 _matter._tcp local - + wlp5s0 IPv6 03E707466A904C7E-0000000000000003 _matter._tcp local - = wlp5s0 IPv6 8E50A59FAF52A809-0000000000000001 _matter._tcp local - hostname = [E45F010F27530000.local] - address = [fe80::e65f:1ff:fe0f:2753] - port = [5540] - txt = ["T=1" "SAI=300" "SII=5000"] - = wlp5s0 IPv6 03E707466A904C7E-0000000000000003 _matter._tcp local - hostname = [E45F010F27530000.local] - address = [fe80::e65f:1ff:fe0f:2753] - port = [5540] - txt = ["T=1" "SAI=300" "SII=5000"] - disabled: true - - - label: - "Step 11: TH_CR1 opens a commissioning window on DUT_CE using a - commissioning timeout of PIXIT.CADMIN.CwDuration seconds using ECM" - PICS: CADMIN.S.C00.Rsp - verification: | - On TH_CR1 chip tool, open commissioning window using ECM - - ./chip-tool pairing open-commissioning-window 1 1 180 1000 3840 - - Verify Manual pairing code on TH_CR1 (chip-tool) Log - - [1635925713.966786][9695:9700] CHIP:SC: Success status report received. Session was established - [1635925713.966839][9695:9700] CHIP:IN: New secure session created for device 0x0000000000000001, key 33!! - [1635925713.966938][9695:9700] CHIP:CTL: OpenCommissioningWindow for device ID 1 - [1635925713.972601][9695:9700] CHIP:DMG: ICR moving to [AddingComm] - [1635925713.972705][9695:9700] CHIP:DMG: ICR moving to [AddedComma] - [1635925713.972815][9695:9700] CHIP:IN: Prepared encrypted message 0xaaaad9b57d10 to 0x0000000000000001 of type 0x8 and protocolId (0, 1) on exchange 31056i with MessageCounter:0. - [1635925713.972876][9695:9700] CHIP:IN: Sending encrypted msg 0xaaaad9b57d10 with MessageCounter:0 to 0x0000000000000001 at monotonic time: 13449459 msec - [1635925713.973006][9695:9700] CHIP:DMG: ICR moving to [CommandSen] - [1635925713.973061][9695:9700] CHIP:CTL: Manual pairing code: [36217551633] - [1635925713.973120][9695:9700] CHIP:CTL: SetupQRCode: [MT:00000CQM00A7F87ZT10] - [1635925713.973178][9695:9700] CHIP:EM: Sending Standalone Ack for MessageCounter:1964916542 on exchange 31055i - disabled: true - - - label: "Step 12: TH_CR2 starts a commissioning process with DUT_CE" - PICS: CADMIN.S - verification: | - On TH_CR2 using chip tool connect to the accessory - - ./chip-tool pairing code 2 36217551633 --commissioner-name beta - - Verify you got below message on TH_CR2 (chip-tool) side - Device commissioning completed with success - disabled: true - - - label: "Step 13: TH_CR1 reads the list of Fabrics on DUT_CE" - PICS: OPCREDS.S.A0001 - verification: | - On TH_CR1 , read fabrics list - - Verify the list shows FabricIndex=1, FabricIndex=3, FabricIndex=4 on TH_CR1 (chip-tool) side - - ./chip-tool operationalcredentials read fabrics 1 0 --fabric-filtered 0 - - CHIP:TOO: Endpoint: 0 Cluster: 0x0000_003E Attribute 0x0000_0001 DataVersion: 2455995246 - [1647863342.980899][9309:9314] CHIP:TOO: Fabrics: 3 entries - [1647863342.981158][9309:9314] CHIP:TOO: [1]: { - [1647863342.981231][9309:9314] CHIP:TOO: RootPublicKey: 04656F56DBDB677DC957028DFC0ED76709C72753C9194B117A483BDC07386BFD9529B68EB4448FBBA3964EFF37A56A8F461D348B0DAF3B56A75F3B94BF8209D36F - [1647863342.981322][9309:9314] CHIP:TOO: VendorId: 65521 - [1647863342.981380][9309:9314] CHIP:TOO: FabricId: 1 - [1647863342.981434][9309:9314] CHIP:TOO: NodeId: 1 - [1647863342.981486][9309:9314] CHIP:TOO: Label: - [1647863342.981539][9309:9314] CHIP:TOO: FabricIndex: 1 - [1647863342.981594][9309:9314] CHIP:TOO: } - [1647863342.981671][9309:9314] CHIP:TOO: [2]: { - [1647863342.981737][9309:9314] CHIP:TOO: RootPublicKey: 04F8F28D5D70A4510E0F72FBBA31369796C4206FF95D97B77C1BDFD0438A3BE43510631A1B915BE189323F4CC0E015480192654D8170F8F230C7713898962958B7 - [1647863342.981796][9309:9314] CHIP:TOO: VendorId: 65521 - [1647863342.981849][9309:9314] CHIP:TOO: FabricId: 1 - [1647863342.981901][9309:9314] CHIP:TOO: NodeId: 3 - [1647863342.981952][9309:9314] CHIP:TOO: Label: - [1647863342.982005][9309:9314] CHIP:TOO: FabricIndex: 3 - [1647863342.982057][9309:9314] CHIP:TOO: } - [1647863342.982131][9309:9314] CHIP:TOO: [3]: { - [1647863342.982195][9309:9314] CHIP:TOO: RootPublicKey: 04F8F28D5D70A4510E0F72FBBA31369796C4206FF95D97B77C1BDFD0438A3BE43510631A1B915BE189323F4CC0E015480192654D8170F8F230C7713898962958B7 - [1647863342.982252][9309:9314] CHIP:TOO: VendorId: 65521 - [1647863342.982305][9309:9314] CHIP:TOO: FabricId: 1 - [1647863342.982357][9309:9314] CHIP:TOO: NodeId: 2 - [1647863342.982409][9309:9314] CHIP:TOO: Label: - [1647863342.982460][9309:9314] CHIP:TOO: FabricIndex: 4 - [1647863342.982512][9309:9314] CHIP:TOO: } - [1647863342.982738][9309:9314] CHIP:EM: Sending Standalone Ack for MessageCounter:5772016 on exchange 3310i - disabled: true diff --git a/src/app/tests/suites/manualTests.json b/src/app/tests/suites/manualTests.json index 265607715c07f8..a5e014ab233ea2 100644 --- a/src/app/tests/suites/manualTests.json +++ b/src/app/tests/suites/manualTests.json @@ -170,7 +170,6 @@ "Test_TC_CADMIN_1_8", "Test_TC_CADMIN_1_12", "Test_TC_CADMIN_1_14", - "Test_TC_CADMIN_1_15", "Test_TC_CADMIN_1_16", "Test_TC_CADMIN_1_17", "Test_TC_CADMIN_1_18", diff --git a/src/python_testing/TC_CADMIN_1_15.py b/src/python_testing/TC_CADMIN_1_15.py new file mode 100644 index 00000000000000..1ee3606a9a3f0e --- /dev/null +++ b/src/python_testing/TC_CADMIN_1_15.py @@ -0,0 +1,267 @@ +# +# Copyright (c) 2024 Project CHIP Authors +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# === BEGIN CI TEST ARGUMENTS === +# test-runner-runs: +# run1: +# app: ${ALL_CLUSTERS_APP} +# app-args: --discriminator 1234 --KVS kvs1 --trace-to json:${TRACE_APP}.json +# script-args: > +# --storage-path admin_storage.json +# --commissioning-method on-network +# --discriminator 1234 +# --passcode 20202021 +# --trace-to json:${TRACE_TEST_JSON}.json +# --trace-to perfetto:${TRACE_TEST_PERFETTO}.perfetto +# --PICS src/app/tests/suites/certification/ci-pics-values +# factory-reset: true +# quiet: true +# === END CI TEST ARGUMENTS === + +import logging +import random +from typing import Optional + +import chip.clusters as Clusters +from chip import ChipDeviceCtrl +from chip.ChipDeviceCtrl import CommissioningParameters +from chip.exceptions import ChipStackError +from chip.testing.matter_testing import MatterBaseTest, TestStep, async_test_body, default_matter_test_main +from mdns_discovery.mdns_discovery import MdnsDiscovery +from mobly import asserts + + +class TC_CADMIN_1_15(MatterBaseTest): + async def OpenCommissioningWindow(self, th: ChipDeviceCtrl, expectedErrCode: Optional[Clusters.AdministratorCommissioning.Enums.StatusCode] = None) -> CommissioningParameters: + if expectedErrCode == 0x00: + params = await th.OpenCommissioningWindow( + nodeid=self.dut_node_id, timeout=self.max_window_duration, iteration=10000, discriminator=self.discriminator, option=1) + return params + + else: + ctx = asserts.assert_raises(ChipStackError) + with ctx: + await th.OpenCommissioningWindow( + nodeid=self.dut_node_id, timeout=self.max_window_duration, iteration=10000, discriminator=self.discriminator, option=1) + errcode = ctx.exception.chip_error + logging.info('Commissioning complete done. Successful? {}, errorcode = {}'.format(errcode.is_success, errcode)) + asserts.assert_false(errcode.is_success, 'Commissioning complete did not error as expected') + asserts.assert_true(errcode.sdk_code == expectedErrCode, + 'Unexpected error code returned from CommissioningComplete') + + async def read_currentfabricindex(self, th: ChipDeviceCtrl) -> int: + cluster = Clusters.Objects.OperationalCredentials + attribute = Clusters.OperationalCredentials.Attributes.CurrentFabricIndex + current_fabric_index = await self.read_single_attribute_check_success(dev_ctrl=th, endpoint=0, cluster=cluster, attribute=attribute) + return current_fabric_index + + async def get_fabrics(self, th: ChipDeviceCtrl) -> int: + OC_cluster = Clusters.OperationalCredentials + fabrics = await self.read_single_attribute_check_success(dev_ctrl=th, fabric_filtered=False, endpoint=0, cluster=OC_cluster, attribute=OC_cluster.Attributes.Fabrics) + return fabrics + + async def CommissionAttempt( + self, setupPinCode: int, thnum: int, th): + + logging.info(f"-----------------Commissioning with TH_CR{str(thnum)}-------------------------") + await th.CommissionOnNetwork( + nodeId=self.dut_node_id, setupPinCode=setupPinCode, + filterType=ChipDeviceCtrl.DiscoveryFilterType.LONG_DISCRIMINATOR, filter=self.discriminator) + + def steps_TC_CADMIN_1_15(self) -> list[TestStep]: + return [ + TestStep( + 1, "TH_CR1 gets the MaxCumulativeFailsafeSeconds value from BasicCommissioningInfo attribute in GeneralCommissioning Cluster", + "Should set the MaxCumulativeFailsafeSeconds value from BasicCommissioningInfo attribute to timeout"), + TestStep( + 2, "TH_CR1 reads the Fabrics attribute from the Node Operational Credentials cluster using a non-fabric-filtered read. Save the number of fabrics in the list as initial_number_of_fabrics"), + TestStep( + 3, "TH_CR1 opens commissioning window on DUT with duration set to value for MaxCumulativeFailsafeSeconds", + "Commissioning window should open with timeout set to MaxCumulativeFailsafeSeconds"), + TestStep(4, "TH_CR2 fully commissions DUT_CE", "DUT should fully commission"), + TestStep( + 5, "TH_CR1 opens commissioning window on DUT with duration set to value from BasicCommissioningInfo", "New commissioning window should open and be set to timeout"), + TestStep(6, "TH_CR3 fully commissions DUT_CE", "DUT should fully commission to TH_CR3"), + TestStep(7, "TH_CR2 reads the Fabrics attribute from the Node Operational Credentials cluster using a non-fabric-filtered read", + "Verify the list shows initial_number_of_fabrics + 2 fabrics"), + TestStep(8, "Verify DUT_CE is now discoverable over DNS-SD with 3 Operational service records (_matter._tcp SRV records)."), + TestStep(9, "TH_CR2 reads the CurrentFabricIndex from the Node Operational Credentials cluster and saves as fabric_idx_cr2"), + TestStep(10, "TH_CR2 sends RemoveFabric with FabricIndex = fabric_idx_cr2 command to DUT_CE", + "Verify DUT_CE responses with NOCResponse with a StatusCode OK"), + TestStep(11, "TH_CR2 reads the Basic Information Cluster’s NodeLabel attribute of DUT_CE", + "Verify read/write commands fail as expected since the DUT_CE is no longer on the network"), + TestStep(12, "TH_CR1 reads the list of Fabrics on DUT_CE", + "Verify the list shows initial_number_of_fabrics + 1 fabrics and fabric_idx_cr2 is not included."), + TestStep(13, "TH_CR1 sends a OpenCommissioningWindow command to DUT_CE using a commissioning timeout of max_window_duration"), + TestStep(14, "TH_CR2 commissions DUT_CE", "Commissioning is successful"), + TestStep(15, "TH_CR2 reads the Fabrics attribute from the Node Operational Credentials cluster using a non-fabric-filtered read", + "Verify the list shows initial_number_of_fabrics + 2 fabrics and fabric_idx_cr2 is not included, since a new fabric index should have been allocated."), + TestStep(16, "TH_CR2 reads the CurrentFabricIndex from the Node Operational Credentials cluster and saves as fabric_idx_cr2_2"), + TestStep(17, "TH_CR3 reads the CurrentFabricIndex from the Node Operational Credentials cluster and saves as fabric_idx_cr3"), + TestStep(18, "TH_CR1 sends RemoveFabric with FabricIndex = fabric_idx_cr2_2 command to DUT_CE", + "Verify DUT_CE responses with NOCResponse with a StatusCode OK"), + TestStep(19, "TH_CR1 sends RemoveFabric with FabricIndex = fabric_idx_cr3 command to DUT_CE", + "Verify DUT_CE responses with NOCResponse with a StatusCode OK"), + TestStep(20, "TH_CR1 reads the list of Fabrics on DUT_CE", + "Verify the list shows initial_number_of_fabrics fabrics."), + ] + + def pics_TC_CADMIN_1_15(self) -> list[str]: + return ["CADMIN.S"] + + @async_test_body + async def test_TC_CADMIN_1_15(self): + # Establishing TH1 controller + self.th1 = self.default_controller + self.discriminator = random.randint(0, 4095) + + # Establishing TH2 controller + th2_certificate_authority = self.certificate_authority_manager.NewCertificateAuthority() + th2_fabric_admin = th2_certificate_authority.NewFabricAdmin(vendorId=0xFFF1, fabricId=self.th1.fabricId + 1) + self.th2 = th2_fabric_admin.NewController(nodeId=2, useTestCommissioner=True) + + # Establishing TH3 controller + th3_certificate_authority = self.certificate_authority_manager.NewCertificateAuthority() + th3_fabric_admin = th3_certificate_authority.NewFabricAdmin(vendorId=0xFFF1, fabricId=self.th2.fabricId + 1) + self.th3 = th3_fabric_admin.NewController(nodeId=3, useTestCommissioner=True) + + self.step(1) + GC_cluster = Clusters.GeneralCommissioning + attribute = GC_cluster.Attributes.BasicCommissioningInfo + duration = await self.read_single_attribute_check_success(endpoint=0, cluster=GC_cluster, attribute=attribute) + self.max_window_duration = duration.maxCumulativeFailsafeSeconds + + self.step(2) + fabrics = await self.get_fabrics(th=self.th1) + initial_number_of_fabrics = len(fabrics) + + self.step(3) + params = await self.OpenCommissioningWindow(th=self.th1, expectedErrCode=0x00) + setupPinCode = params.setupPinCode + + self.step(4) + await self.CommissionAttempt(setupPinCode, thnum=2, th=self.th2) + + self.step(5) + params2 = await self.OpenCommissioningWindow(th=self.th1, expectedErrCode=0x00) + setupPinCode2 = params2.setupPinCode + + self.step(6) + await self.CommissionAttempt(setupPinCode2, thnum=3, th=self.th3) + + self.step(7) + fabrics = await self.get_fabrics(th=self.th2) + if len(fabrics) != initial_number_of_fabrics + 2: + # len of fabrics is expected to be 3, if 3 not found then we assert failure here + asserts.fail("Expected number of fabrics not correct") + + self.step(8) + # Gathering instance names associated with compressed fabrics for each TH in order to verify there are 3 operational service records for DUT. + mdns = MdnsDiscovery() + compressed_fabric_ids = { + "th1": self.th1.GetCompressedFabricId(), + "th2": self.th2.GetCompressedFabricId(), + "th3": self.th3.GetCompressedFabricId(), + } + + op_services = [] + for th, compressed_id in compressed_fabric_ids.items(): + service = await MdnsDiscovery.get_operational_service( + mdns, + node_id=self.dut_node_id, + compressed_fabric_id=compressed_id, + log_output=True + ) + op_services.append(service.instance_name) + + asserts.assert_equal( + 3, + len(set(op_services)), + f"Expected 3 instances but got {len(op_services)}" + ) + + self.step(9) + fabric_idx_cr2 = await self.read_currentfabricindex(th=self.th2) + + self.step(10) + removeFabricCmd = Clusters.OperationalCredentials.Commands.RemoveFabric(fabric_idx_cr2) + await self.th2.SendCommand(nodeid=self.dut_node_id, endpoint=0, payload=removeFabricCmd) + + self.step(11) + # Verifies TH_CR2 is unable to read the Basic Information Cluster’s NodeLabel attribute of DUT_CE as no longer on network + try: + await self.read_single_attribute_check_success( + dev_ctrl=self.th2, + endpoint=0, + cluster=Clusters.BasicInformation, + attribute=Clusters.BasicInformation.Attributes.NodeLabel + ) + asserts.fail("Expected exception not thrown") + except ChipStackError as e: + # Verify that the DUT returns an "Timeout" status response + asserts.assert_equal(e.err, 0x00000032, + "Expected to timeout as DUT_CE is no longer on network") + + self.step(12) + fabrics2 = await self.get_fabrics(th=self.th1) + fabric_indexes = [fabric.fabricIndex for fabric in fabrics2] + if len(fabrics2) != initial_number_of_fabrics + 1: + # len of fabrics is expected to be 2, if 2 not found then we assert failure + asserts.fail(f"Expected number of fabrics not correct, should show 2, but instead shows {str(len(fabrics2))}") + + if fabric_idx_cr2 in fabric_indexes: + asserts.fail("fabricIndexes should consist of indexes 1 and 3 at this point") + + self.step(13) + params3 = await self.OpenCommissioningWindow(self.th1, expectedErrCode=0x00) + setupPinCode3 = params3.setupPinCode + + self.step(14) + await self.CommissionAttempt(setupPinCode3, thnum=2, th=self.th2) + + self.step(15) + fabrics3 = await self.get_fabrics(th=self.th2) + fabric_indexes2 = [fabric.fabricIndex for fabric in fabrics3] + if len(fabrics3) != initial_number_of_fabrics + 2: + # len of fabrics is expected to be 3, if 3 not found then we assert failure + asserts.fail("Expected number of fabrics not correct") + + if fabric_idx_cr2 in fabric_indexes2: + asserts.fail("fabricIndexes should not consist of fabric_idx_cr2, but it appears it was") + + self.step(16) + fabric_idx_cr2_2 = await self.read_currentfabricindex(th=self.th2) + + self.step(17) + fabric_idx_cr3 = await self.read_currentfabricindex(th=self.th3) + + self.step(18) + removeFabricCmd2 = Clusters.OperationalCredentials.Commands.RemoveFabric(fabric_idx_cr2_2) + await self.th1.SendCommand(nodeid=self.dut_node_id, endpoint=0, payload=removeFabricCmd2) + + self.step(19) + removeFabricCmd3 = Clusters.OperationalCredentials.Commands.RemoveFabric(fabric_idx_cr3) + await self.th1.SendCommand(nodeid=self.dut_node_id, endpoint=0, payload=removeFabricCmd3) + + self.step(20) + fabrics4 = await self.get_fabrics(th=self.th1) + if len(fabrics4) > initial_number_of_fabrics: + asserts.fail( + f"Expected number of fabrics not correct, should be {str(initial_number_of_fabrics)}, but instead found {str(len(fabrics4))}") + + +if __name__ == "__main__": + default_matter_test_main() diff --git a/src/python_testing/mdns_discovery/mdns_discovery.py b/src/python_testing/mdns_discovery/mdns_discovery.py index 1261ab3a578059..d36b3973b9815e 100644 --- a/src/python_testing/mdns_discovery/mdns_discovery.py +++ b/src/python_testing/mdns_discovery/mdns_discovery.py @@ -191,7 +191,8 @@ async def get_operational_service(self, raise ValueError("Both node_id and compressed_fabric_id must be provided together or not at all.") self._name_filter = f'{compressed_fabric_id:016x}-{node_id:016x}.{MdnsServiceType.OPERATIONAL.value}'.upper() - return await self._get_service(MdnsServiceType.OPERATIONAL, log_output, discovery_timeout_sec) + logger.info(f"name filter {self._name_filter}") + return await self._get_service(MdnsServiceType.OPERATIONAL, log_output, discovery_timeout_sec, self._name_filter) async def get_border_router_service(self, log_output: bool = False, discovery_timeout_sec: float = DISCOVERY_TIMEOUT_SEC @@ -493,7 +494,8 @@ def _get_instance_name(self, service_info: AsyncServiceInfo) -> str: async def _get_service(self, service_type: MdnsServiceType, log_output: bool, - discovery_timeout_sec: float + discovery_timeout_sec: float, + expected_value: str = None, ) -> Optional[MdnsServiceInfo]: """ Asynchronously discovers a specific type of mDNS service within the network and returns its details. @@ -502,6 +504,7 @@ async def _get_service(self, service_type: MdnsServiceType, service_type (MdnsServiceType): The enum representing the type of mDNS service to discover. log_output (bool): Logs the discovered services to the console. Defaults to False. discovery_timeout_sec (float): Defaults to 15 seconds. + expected_value (str): Defaults to none as currently only utilized to gather specific record in multiple discovery records if available Returns: Optional[MdnsServiceInfo]: An instance of MdnsServiceInfo representing the discovered service, if @@ -515,7 +518,12 @@ async def _get_service(self, service_type: MdnsServiceType, logger.info("Getting service from discovered services: %s", self._discovered_services) if service_type.value in self._discovered_services: - return self._discovered_services[service_type.value][0] + if expected_value is not None: + for service in self._discovered_services[service_type.value]: + if service.service_name == expected_value.replace("._MATTER._TCP.LOCAL.", "._matter._tcp.local."): + return service + else: + return self._discovered_services[service_type.value][0] else: return None