diff --git a/src/app/zap-templates/zcl/data-model/chip/camera-av-stream-management-cluster.xml b/src/app/zap-templates/zcl/data-model/chip/camera-av-stream-management-cluster.xml
index 0a5110ec402bcc..700dd2223927b7 100644
--- a/src/app/zap-templates/zcl/data-model/chip/camera-av-stream-management-cluster.xml
+++ b/src/app/zap-templates/zcl/data-model/chip/camera-av-stream-management-cluster.xml
@@ -18,7 +18,7 @@ limitations under the License.
XML generated by Alchemy; DO NOT EDIT.
Source: src/app_clusters/CameraAVStreamManagement.adoc
Parameters: in-progress
-Git: 1.4-526-g306635762
+Git: 0.7-summer-2025-285-g2a2bd1961
-->
@@ -111,9 +111,8 @@ Git: 1.4-526-g306635762
-
-
-
+
+
@@ -188,12 +187,7 @@ Git: 1.4-526-g306635762
-
- MaxContentBufferSize
-
-
-
-
+ MaxContentBufferSize
MicrophoneCapabilities
@@ -203,19 +197,13 @@ Git: 1.4-526-g306635762
SpeakerCapabilities
-
-
-
-
+
TwoWayTalkSupport
-
-
-
-
+
@@ -235,11 +223,11 @@ Git: 1.4-526-g306635762
HDRModeEnabled
-
-
-
+
+
+
- FabricsUsingCamera
+ SupportedStreamUsages
AllocatedVideoStreams
@@ -315,10 +303,7 @@ Git: 1.4-526-g306635762
-
-
-
-
+
@@ -326,30 +311,21 @@ Git: 1.4-526-g306635762
-
-
-
-
+
SpeakerMaxLevel
-
-
-
-
+
SpeakerMinLevel
-
-
-
-
+
@@ -482,18 +458,18 @@ Git: 1.4-526-g306635762
This command SHALL allocate a video stream on the camera and return an allocated video stream identifier.
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
@@ -536,12 +512,14 @@ Git: 1.4-526-g306635762
This command SHALL allocate a snapshot stream on the device and return an allocated snapshot stream identifier.
-
-
-
-
-
-
+
+
+
+
+
+
+
+
@@ -555,7 +533,24 @@ Git: 1.4-526-g306635762
-
+
+ This command SHALL be used to modify a stream specified by the VideoStreamID.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
This command SHALL deallocate an snapshot stream on the camera, corresponding to the given snapshot stream identifier.
@@ -564,13 +559,13 @@ Git: 1.4-526-g306635762
-
+
This command SHALL set the relative priorities of the various stream usages on the camera.
-
+
-
+
This command SHALL return a Snapshot from the camera.
@@ -580,7 +575,7 @@ Git: 1.4-526-g306635762
-
+
This command SHALL be sent by the device in response to the CaptureSnapshot command, carrying the requested snapshot.
@@ -591,52 +586,6 @@ Git: 1.4-526-g306635762
-
-
-
-
-
-
-
-
-
-
-
-
- This event SHALL be generated when there is a modification in the corresponding video stream.
-
-
-
-
-
-
-
-
-
-
-
-
-
- This event SHALL be generated when there is a modification in the corresponding audio stream.
-
-
-
-
-
-
-
-
-
-
-
-
-
- This event SHALL be generated when there is a modification in the corresponding snapshot stream.
-
-
-
-
-
@@ -651,20 +600,45 @@ Git: 1.4-526-g306635762
-
+
+
+
-
+
+
+
+
+
+
-
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/controller/data_model/controller-clusters.matter b/src/controller/data_model/controller-clusters.matter
index 1e7ad57fc02778..b3a9ba30206450 100644
--- a/src/controller/data_model/controller-clusters.matter
+++ b/src/controller/data_model/controller-clusters.matter
@@ -9741,6 +9741,7 @@ provisional cluster CameraAvStreamManagement = 1361 {
kWatermark = 0x40;
kOnScreenDisplay = 0x80;
kLocalStorage = 0x100;
+ kHighDynamicRange = 0x200;
}
struct VideoResolutionStruct {
@@ -9809,9 +9810,8 @@ provisional cluster CameraAvStreamManagement = 1361 {
struct VideoSensorParamsStruct {
int16u sensorWidth = 0;
int16u sensorHeight = 1;
- boolean HDRCapable = 2;
- int16u maxFPS = 3;
- int16u maxHDRFPS = 4;
+ int16u maxFPS = 2;
+ optional int16u maxHDRFPS = 3;
}
struct ViewportStruct {
@@ -9821,47 +9821,13 @@ provisional cluster CameraAvStreamManagement = 1361 {
int16u y2 = 3;
}
- info event VideoStreamChanged = 0 {
- int16u videoStreamID = 0;
- optional StreamUsageEnum streamUsage = 1;
- optional VideoCodecEnum videoCodec = 2;
- optional int16u minFrameRate = 3;
- optional int16u maxFrameRate = 4;
- optional VideoResolutionStruct minResolution = 5;
- optional VideoResolutionStruct maxResolution = 6;
- optional int32u minBitRate = 7;
- optional int32u maxBitRate = 8;
- optional int16u minFragmentLen = 9;
- optional int16u maxFragmentLen = 10;
- }
-
- info event AudioStreamChanged = 1 {
- int16u audioStreamID = 0;
- optional StreamUsageEnum streamUsage = 1;
- optional AudioCodecEnum audioCodec = 2;
- optional int8u channelCount = 3;
- optional int32u sampleRate = 4;
- optional int32u bitRate = 5;
- optional int8u bitDepth = 6;
- }
-
- info event SnapshotStreamChanged = 2 {
- int16u snapshotStreamID = 0;
- optional ImageCodecEnum imageCodec = 1;
- optional int16u frameRate = 2;
- optional int32u bitRate = 3;
- optional VideoResolutionStruct minResolution = 4;
- optional VideoResolutionStruct maxResolution = 5;
- optional int8u quality = 6;
- }
-
readonly attribute optional int8u maxConcurrentVideoEncoders = 0;
readonly attribute optional int32u maxEncodedPixelRate = 1;
readonly attribute optional VideoSensorParamsStruct videoSensorParams = 2;
readonly attribute optional boolean nightVisionCapable = 3;
readonly attribute optional VideoResolutionStruct minViewport = 4;
readonly attribute optional RateDistortionTradeOffPointsStruct rateDistortionTradeOffPoints[] = 5;
- readonly attribute optional int32u maxContentBufferSize = 6;
+ readonly attribute int32u maxContentBufferSize = 6;
readonly attribute optional AudioCapabilitiesStruct microphoneCapabilities = 7;
readonly attribute optional AudioCapabilitiesStruct speakerCapabilities = 8;
readonly attribute optional TwoWayTalkSupportTypeEnum twoWayTalkSupport = 9;
@@ -9869,7 +9835,7 @@ provisional cluster CameraAvStreamManagement = 1361 {
readonly attribute int32u maxNetworkBandwidth = 11;
readonly attribute optional int16u currentFrameRate = 12;
attribute access(read: manage, write: manage) optional boolean HDRModeEnabled = 13;
- readonly attribute fabric_idx fabricsUsingCamera[] = 14;
+ readonly attribute StreamUsageEnum supportedStreamUsages[] = 14;
readonly attribute optional VideoStreamStruct allocatedVideoStreams[] = 15;
readonly attribute optional AudioStreamStruct allocatedAudioStreams[] = 16;
readonly attribute optional SnapshotStreamStruct allocatedSnapshotStreams[] = 17;
@@ -9956,12 +9922,20 @@ provisional cluster CameraAvStreamManagement = 1361 {
VideoResolutionStruct minResolution = 3;
VideoResolutionStruct maxResolution = 4;
int8u quality = 5;
+ optional boolean watermarkEnabled = 6;
+ optional boolean OSDEnabled = 7;
}
response struct SnapshotStreamAllocateResponse = 8 {
int16u snapshotStreamID = 0;
}
+ request struct SnapshotStreamModifyRequest {
+ int16u snapshotStreamID = 0;
+ optional boolean watermarkEnabled = 1;
+ optional boolean OSDEnabled = 2;
+ }
+
request struct SnapshotStreamDeallocateRequest {
int16u snapshotStreamID = 0;
}
@@ -9975,7 +9949,7 @@ provisional cluster CameraAvStreamManagement = 1361 {
VideoResolutionStruct requestedResolution = 1;
}
- response struct CaptureSnapshotResponse = 12 {
+ response struct CaptureSnapshotResponse = 13 {
octet_string data = 0;
ImageCodecEnum imageCodec = 1;
VideoResolutionStruct resolution = 2;
@@ -9993,12 +9967,14 @@ provisional cluster CameraAvStreamManagement = 1361 {
command access(invoke: manage) VideoStreamDeallocate(VideoStreamDeallocateRequest): DefaultSuccess = 6;
/** This command SHALL allocate a snapshot stream on the device and return an allocated snapshot stream identifier. */
command access(invoke: manage) SnapshotStreamAllocate(SnapshotStreamAllocateRequest): SnapshotStreamAllocateResponse = 7;
+ /** This command SHALL be used to modify a stream specified by the VideoStreamID. */
+ command access(invoke: manage) SnapshotStreamModify(SnapshotStreamModifyRequest): DefaultSuccess = 9;
/** This command SHALL deallocate an snapshot stream on the camera, corresponding to the given snapshot stream identifier. */
- command access(invoke: manage) SnapshotStreamDeallocate(SnapshotStreamDeallocateRequest): DefaultSuccess = 9;
+ command access(invoke: manage) SnapshotStreamDeallocate(SnapshotStreamDeallocateRequest): DefaultSuccess = 10;
/** This command SHALL set the relative priorities of the various stream usages on the camera. */
- command access(invoke: administer) SetStreamPriorities(SetStreamPrioritiesRequest): DefaultSuccess = 10;
+ command access(invoke: administer) SetStreamPriorities(SetStreamPrioritiesRequest): DefaultSuccess = 11;
/** This command SHALL return a Snapshot from the camera. */
- command CaptureSnapshot(CaptureSnapshotRequest): CaptureSnapshotResponse = 11;
+ command CaptureSnapshot(CaptureSnapshotRequest): CaptureSnapshotResponse = 12;
}
/** This cluster provides an interface into controls associated with the operation of a device that provides pan, tilt, and zoom functions, either mechanically, or against a digital image. */
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ChipClusters.java b/src/controller/java/generated/java/chip/devicecontroller/ChipClusters.java
index cb423ce525f915..6813577c1d4d48 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ChipClusters.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ChipClusters.java
@@ -60409,7 +60409,7 @@ public static class CameraAvStreamManagementCluster extends BaseChipCluster {
private static final long MAX_NETWORK_BANDWIDTH_ATTRIBUTE_ID = 11L;
private static final long CURRENT_FRAME_RATE_ATTRIBUTE_ID = 12L;
private static final long HDR_MODE_ENABLED_ATTRIBUTE_ID = 13L;
- private static final long FABRICS_USING_CAMERA_ATTRIBUTE_ID = 14L;
+ private static final long SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID = 14L;
private static final long ALLOCATED_VIDEO_STREAMS_ATTRIBUTE_ID = 15L;
private static final long ALLOCATED_AUDIO_STREAMS_ATTRIBUTE_ID = 16L;
private static final long ALLOCATED_SNAPSHOT_STREAMS_ATTRIBUTE_ID = 17L;
@@ -60645,11 +60645,11 @@ public void onResponse(StructType invokeStructValue) {
}}, commandId, commandArgs, timedInvokeTimeoutMs);
}
- public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality) {
- snapshotStreamAllocate(callback, imageCodec, maxFrameRate, bitRate, minResolution, maxResolution, quality, 0);
+ public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, Optional watermarkEnabled, Optional OSDEnabled) {
+ snapshotStreamAllocate(callback, imageCodec, maxFrameRate, bitRate, minResolution, maxResolution, quality, watermarkEnabled, OSDEnabled, 0);
}
- public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, int timedInvokeTimeoutMs) {
+ public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, Optional watermarkEnabled, Optional OSDEnabled, int timedInvokeTimeoutMs) {
final long commandId = 7L;
ArrayList elements = new ArrayList<>();
@@ -60677,6 +60677,14 @@ public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callba
BaseTLVType qualitytlvValue = new UIntType(quality);
elements.add(new StructElement(qualityFieldID, qualitytlvValue));
+ final long watermarkEnabledFieldID = 6L;
+ BaseTLVType watermarkEnabledtlvValue = watermarkEnabled.map((nonOptionalwatermarkEnabled) -> new BooleanType(nonOptionalwatermarkEnabled)).orElse(new EmptyType());
+ elements.add(new StructElement(watermarkEnabledFieldID, watermarkEnabledtlvValue));
+
+ final long OSDEnabledFieldID = 7L;
+ BaseTLVType OSDEnabledtlvValue = OSDEnabled.map((nonOptionalOSDEnabled) -> new BooleanType(nonOptionalOSDEnabled)).orElse(new EmptyType());
+ elements.add(new StructElement(OSDEnabledFieldID, OSDEnabledtlvValue));
+
StructType commandArgs = new StructType(elements);
invoke(new InvokeCallbackImpl(callback) {
@Override
@@ -60695,12 +60703,40 @@ public void onResponse(StructType invokeStructValue) {
}}, commandId, commandArgs, timedInvokeTimeoutMs);
}
+ public void snapshotStreamModify(DefaultClusterCallback callback, Integer snapshotStreamID, Optional watermarkEnabled, Optional OSDEnabled) {
+ snapshotStreamModify(callback, snapshotStreamID, watermarkEnabled, OSDEnabled, 0);
+ }
+
+ public void snapshotStreamModify(DefaultClusterCallback callback, Integer snapshotStreamID, Optional watermarkEnabled, Optional OSDEnabled, int timedInvokeTimeoutMs) {
+ final long commandId = 9L;
+
+ ArrayList elements = new ArrayList<>();
+ final long snapshotStreamIDFieldID = 0L;
+ BaseTLVType snapshotStreamIDtlvValue = new UIntType(snapshotStreamID);
+ elements.add(new StructElement(snapshotStreamIDFieldID, snapshotStreamIDtlvValue));
+
+ final long watermarkEnabledFieldID = 1L;
+ BaseTLVType watermarkEnabledtlvValue = watermarkEnabled.map((nonOptionalwatermarkEnabled) -> new BooleanType(nonOptionalwatermarkEnabled)).orElse(new EmptyType());
+ elements.add(new StructElement(watermarkEnabledFieldID, watermarkEnabledtlvValue));
+
+ final long OSDEnabledFieldID = 2L;
+ BaseTLVType OSDEnabledtlvValue = OSDEnabled.map((nonOptionalOSDEnabled) -> new BooleanType(nonOptionalOSDEnabled)).orElse(new EmptyType());
+ elements.add(new StructElement(OSDEnabledFieldID, OSDEnabledtlvValue));
+
+ StructType commandArgs = new StructType(elements);
+ invoke(new InvokeCallbackImpl(callback) {
+ @Override
+ public void onResponse(StructType invokeStructValue) {
+ callback.onSuccess();
+ }}, commandId, commandArgs, timedInvokeTimeoutMs);
+ }
+
public void snapshotStreamDeallocate(DefaultClusterCallback callback, Integer snapshotStreamID) {
snapshotStreamDeallocate(callback, snapshotStreamID, 0);
}
public void snapshotStreamDeallocate(DefaultClusterCallback callback, Integer snapshotStreamID, int timedInvokeTimeoutMs) {
- final long commandId = 9L;
+ final long commandId = 10L;
ArrayList elements = new ArrayList<>();
final long snapshotStreamIDFieldID = 0L;
@@ -60720,7 +60756,7 @@ public void setStreamPriorities(DefaultClusterCallback callback, ArrayList streamPriorities, int timedInvokeTimeoutMs) {
- final long commandId = 10L;
+ final long commandId = 11L;
ArrayList elements = new ArrayList<>();
final long streamPrioritiesFieldID = 0L;
@@ -60740,7 +60776,7 @@ public void captureSnapshot(CaptureSnapshotResponseCallback callback, Integer sn
}
public void captureSnapshot(CaptureSnapshotResponseCallback callback, Integer snapshotStreamID, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct requestedResolution, int timedInvokeTimeoutMs) {
- final long commandId = 11L;
+ final long commandId = 12L;
ArrayList elements = new ArrayList<>();
final long snapshotStreamIDFieldID = 0L;
@@ -60823,7 +60859,7 @@ public interface SupportedSnapshotParamsAttributeCallback extends BaseAttributeC
void onSuccess(List value);
}
- public interface FabricsUsingCameraAttributeCallback extends BaseAttributeCallback {
+ public interface SupportedStreamUsagesAttributeCallback extends BaseAttributeCallback {
void onSuccess(List value);
}
@@ -61236,9 +61272,9 @@ public void onSuccess(byte[] tlv) {
}, HDR_MODE_ENABLED_ATTRIBUTE_ID, minInterval, maxInterval);
}
- public void readFabricsUsingCameraAttribute(
- FabricsUsingCameraAttributeCallback callback) {
- ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, FABRICS_USING_CAMERA_ATTRIBUTE_ID);
+ public void readSupportedStreamUsagesAttribute(
+ SupportedStreamUsagesAttributeCallback callback) {
+ ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID);
readAttribute(new ReportCallbackImpl(callback, path) {
@Override
@@ -61246,12 +61282,12 @@ public void onSuccess(byte[] tlv) {
List value = ChipTLVValueDecoder.decodeAttributeValue(path, tlv);
callback.onSuccess(value);
}
- }, FABRICS_USING_CAMERA_ATTRIBUTE_ID, true);
+ }, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID, true);
}
- public void subscribeFabricsUsingCameraAttribute(
- FabricsUsingCameraAttributeCallback callback, int minInterval, int maxInterval) {
- ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, FABRICS_USING_CAMERA_ATTRIBUTE_ID);
+ public void subscribeSupportedStreamUsagesAttribute(
+ SupportedStreamUsagesAttributeCallback callback, int minInterval, int maxInterval) {
+ ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID);
subscribeAttribute(new ReportCallbackImpl(callback, path) {
@Override
@@ -61259,7 +61295,7 @@ public void onSuccess(byte[] tlv) {
List value = ChipTLVValueDecoder.decodeAttributeValue(path, tlv);
callback.onSuccess(value);
}
- }, FABRICS_USING_CAMERA_ATTRIBUTE_ID, minInterval, maxInterval);
+ }, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID, minInterval, maxInterval);
}
public void readAllocatedVideoStreamsAttribute(
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ChipEventStructs.java b/src/controller/java/generated/java/chip/devicecontroller/ChipEventStructs.java
index 491de7a3337b83..04af0f6bd2afb4 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ChipEventStructs.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ChipEventStructs.java
@@ -6309,474 +6309,6 @@ public String toString() {
return output.toString();
}
}
-public static class CameraAvStreamManagementClusterVideoStreamChangedEvent {
- public Integer videoStreamID;
- public Optional streamUsage;
- public Optional videoCodec;
- public Optional minFrameRate;
- public Optional maxFrameRate;
- public Optional minResolution;
- public Optional maxResolution;
- public Optional minBitRate;
- public Optional maxBitRate;
- public Optional minFragmentLen;
- public Optional maxFragmentLen;
- private static final long VIDEO_STREAM_ID_ID = 0L;
- private static final long STREAM_USAGE_ID = 1L;
- private static final long VIDEO_CODEC_ID = 2L;
- private static final long MIN_FRAME_RATE_ID = 3L;
- private static final long MAX_FRAME_RATE_ID = 4L;
- private static final long MIN_RESOLUTION_ID = 5L;
- private static final long MAX_RESOLUTION_ID = 6L;
- private static final long MIN_BIT_RATE_ID = 7L;
- private static final long MAX_BIT_RATE_ID = 8L;
- private static final long MIN_FRAGMENT_LEN_ID = 9L;
- private static final long MAX_FRAGMENT_LEN_ID = 10L;
-
- public CameraAvStreamManagementClusterVideoStreamChangedEvent(
- Integer videoStreamID,
- Optional streamUsage,
- Optional videoCodec,
- Optional minFrameRate,
- Optional maxFrameRate,
- Optional minResolution,
- Optional maxResolution,
- Optional minBitRate,
- Optional maxBitRate,
- Optional minFragmentLen,
- Optional maxFragmentLen
- ) {
- this.videoStreamID = videoStreamID;
- this.streamUsage = streamUsage;
- this.videoCodec = videoCodec;
- this.minFrameRate = minFrameRate;
- this.maxFrameRate = maxFrameRate;
- this.minResolution = minResolution;
- this.maxResolution = maxResolution;
- this.minBitRate = minBitRate;
- this.maxBitRate = maxBitRate;
- this.minFragmentLen = minFragmentLen;
- this.maxFragmentLen = maxFragmentLen;
- }
-
- public StructType encodeTlv() {
- ArrayList values = new ArrayList<>();
- values.add(new StructElement(VIDEO_STREAM_ID_ID, new UIntType(videoStreamID)));
- values.add(new StructElement(STREAM_USAGE_ID, streamUsage.map((nonOptionalstreamUsage) -> new UIntType(nonOptionalstreamUsage)).orElse(new EmptyType())));
- values.add(new StructElement(VIDEO_CODEC_ID, videoCodec.map((nonOptionalvideoCodec) -> new UIntType(nonOptionalvideoCodec)).orElse(new EmptyType())));
- values.add(new StructElement(MIN_FRAME_RATE_ID, minFrameRate.map((nonOptionalminFrameRate) -> new UIntType(nonOptionalminFrameRate)).orElse(new EmptyType())));
- values.add(new StructElement(MAX_FRAME_RATE_ID, maxFrameRate.map((nonOptionalmaxFrameRate) -> new UIntType(nonOptionalmaxFrameRate)).orElse(new EmptyType())));
- values.add(new StructElement(MIN_RESOLUTION_ID, minResolution.map((nonOptionalminResolution) -> nonOptionalminResolution.encodeTlv()).orElse(new EmptyType())));
- values.add(new StructElement(MAX_RESOLUTION_ID, maxResolution.map((nonOptionalmaxResolution) -> nonOptionalmaxResolution.encodeTlv()).orElse(new EmptyType())));
- values.add(new StructElement(MIN_BIT_RATE_ID, minBitRate.map((nonOptionalminBitRate) -> new UIntType(nonOptionalminBitRate)).orElse(new EmptyType())));
- values.add(new StructElement(MAX_BIT_RATE_ID, maxBitRate.map((nonOptionalmaxBitRate) -> new UIntType(nonOptionalmaxBitRate)).orElse(new EmptyType())));
- values.add(new StructElement(MIN_FRAGMENT_LEN_ID, minFragmentLen.map((nonOptionalminFragmentLen) -> new UIntType(nonOptionalminFragmentLen)).orElse(new EmptyType())));
- values.add(new StructElement(MAX_FRAGMENT_LEN_ID, maxFragmentLen.map((nonOptionalmaxFragmentLen) -> new UIntType(nonOptionalmaxFragmentLen)).orElse(new EmptyType())));
-
- return new StructType(values);
- }
-
- public static CameraAvStreamManagementClusterVideoStreamChangedEvent decodeTlv(BaseTLVType tlvValue) {
- if (tlvValue == null || tlvValue.type() != TLVType.Struct) {
- return null;
- }
- Integer videoStreamID = null;
- Optional streamUsage = Optional.empty();
- Optional videoCodec = Optional.empty();
- Optional minFrameRate = Optional.empty();
- Optional maxFrameRate = Optional.empty();
- Optional minResolution = Optional.empty();
- Optional maxResolution = Optional.empty();
- Optional minBitRate = Optional.empty();
- Optional maxBitRate = Optional.empty();
- Optional minFragmentLen = Optional.empty();
- Optional maxFragmentLen = Optional.empty();
- for (StructElement element: ((StructType)tlvValue).value()) {
- if (element.contextTagNum() == VIDEO_STREAM_ID_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- videoStreamID = castingValue.value(Integer.class);
- }
- } else if (element.contextTagNum() == STREAM_USAGE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- streamUsage = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == VIDEO_CODEC_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- videoCodec = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == MIN_FRAME_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- minFrameRate = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == MAX_FRAME_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- maxFrameRate = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == MIN_RESOLUTION_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.Struct) {
- StructType castingValue = element.value(StructType.class);
- minResolution = Optional.of(ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct.decodeTlv(castingValue));
- }
- } else if (element.contextTagNum() == MAX_RESOLUTION_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.Struct) {
- StructType castingValue = element.value(StructType.class);
- maxResolution = Optional.of(ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct.decodeTlv(castingValue));
- }
- } else if (element.contextTagNum() == MIN_BIT_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- minBitRate = Optional.of(castingValue.value(Long.class));
- }
- } else if (element.contextTagNum() == MAX_BIT_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- maxBitRate = Optional.of(castingValue.value(Long.class));
- }
- } else if (element.contextTagNum() == MIN_FRAGMENT_LEN_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- minFragmentLen = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == MAX_FRAGMENT_LEN_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- maxFragmentLen = Optional.of(castingValue.value(Integer.class));
- }
- }
- }
- return new CameraAvStreamManagementClusterVideoStreamChangedEvent(
- videoStreamID,
- streamUsage,
- videoCodec,
- minFrameRate,
- maxFrameRate,
- minResolution,
- maxResolution,
- minBitRate,
- maxBitRate,
- minFragmentLen,
- maxFragmentLen
- );
- }
-
- @Override
- public String toString() {
- StringBuilder output = new StringBuilder();
- output.append("CameraAvStreamManagementClusterVideoStreamChangedEvent {\n");
- output.append("\tvideoStreamID: ");
- output.append(videoStreamID);
- output.append("\n");
- output.append("\tstreamUsage: ");
- output.append(streamUsage);
- output.append("\n");
- output.append("\tvideoCodec: ");
- output.append(videoCodec);
- output.append("\n");
- output.append("\tminFrameRate: ");
- output.append(minFrameRate);
- output.append("\n");
- output.append("\tmaxFrameRate: ");
- output.append(maxFrameRate);
- output.append("\n");
- output.append("\tminResolution: ");
- output.append(minResolution);
- output.append("\n");
- output.append("\tmaxResolution: ");
- output.append(maxResolution);
- output.append("\n");
- output.append("\tminBitRate: ");
- output.append(minBitRate);
- output.append("\n");
- output.append("\tmaxBitRate: ");
- output.append(maxBitRate);
- output.append("\n");
- output.append("\tminFragmentLen: ");
- output.append(minFragmentLen);
- output.append("\n");
- output.append("\tmaxFragmentLen: ");
- output.append(maxFragmentLen);
- output.append("\n");
- output.append("}\n");
- return output.toString();
- }
-}
-public static class CameraAvStreamManagementClusterAudioStreamChangedEvent {
- public Integer audioStreamID;
- public Optional streamUsage;
- public Optional audioCodec;
- public Optional channelCount;
- public Optional sampleRate;
- public Optional bitRate;
- public Optional bitDepth;
- private static final long AUDIO_STREAM_ID_ID = 0L;
- private static final long STREAM_USAGE_ID = 1L;
- private static final long AUDIO_CODEC_ID = 2L;
- private static final long CHANNEL_COUNT_ID = 3L;
- private static final long SAMPLE_RATE_ID = 4L;
- private static final long BIT_RATE_ID = 5L;
- private static final long BIT_DEPTH_ID = 6L;
-
- public CameraAvStreamManagementClusterAudioStreamChangedEvent(
- Integer audioStreamID,
- Optional streamUsage,
- Optional audioCodec,
- Optional channelCount,
- Optional sampleRate,
- Optional bitRate,
- Optional bitDepth
- ) {
- this.audioStreamID = audioStreamID;
- this.streamUsage = streamUsage;
- this.audioCodec = audioCodec;
- this.channelCount = channelCount;
- this.sampleRate = sampleRate;
- this.bitRate = bitRate;
- this.bitDepth = bitDepth;
- }
-
- public StructType encodeTlv() {
- ArrayList values = new ArrayList<>();
- values.add(new StructElement(AUDIO_STREAM_ID_ID, new UIntType(audioStreamID)));
- values.add(new StructElement(STREAM_USAGE_ID, streamUsage.map((nonOptionalstreamUsage) -> new UIntType(nonOptionalstreamUsage)).orElse(new EmptyType())));
- values.add(new StructElement(AUDIO_CODEC_ID, audioCodec.map((nonOptionalaudioCodec) -> new UIntType(nonOptionalaudioCodec)).orElse(new EmptyType())));
- values.add(new StructElement(CHANNEL_COUNT_ID, channelCount.map((nonOptionalchannelCount) -> new UIntType(nonOptionalchannelCount)).orElse(new EmptyType())));
- values.add(new StructElement(SAMPLE_RATE_ID, sampleRate.map((nonOptionalsampleRate) -> new UIntType(nonOptionalsampleRate)).orElse(new EmptyType())));
- values.add(new StructElement(BIT_RATE_ID, bitRate.map((nonOptionalbitRate) -> new UIntType(nonOptionalbitRate)).orElse(new EmptyType())));
- values.add(new StructElement(BIT_DEPTH_ID, bitDepth.map((nonOptionalbitDepth) -> new UIntType(nonOptionalbitDepth)).orElse(new EmptyType())));
-
- return new StructType(values);
- }
-
- public static CameraAvStreamManagementClusterAudioStreamChangedEvent decodeTlv(BaseTLVType tlvValue) {
- if (tlvValue == null || tlvValue.type() != TLVType.Struct) {
- return null;
- }
- Integer audioStreamID = null;
- Optional streamUsage = Optional.empty();
- Optional audioCodec = Optional.empty();
- Optional channelCount = Optional.empty();
- Optional sampleRate = Optional.empty();
- Optional bitRate = Optional.empty();
- Optional bitDepth = Optional.empty();
- for (StructElement element: ((StructType)tlvValue).value()) {
- if (element.contextTagNum() == AUDIO_STREAM_ID_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- audioStreamID = castingValue.value(Integer.class);
- }
- } else if (element.contextTagNum() == STREAM_USAGE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- streamUsage = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == AUDIO_CODEC_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- audioCodec = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == CHANNEL_COUNT_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- channelCount = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == SAMPLE_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- sampleRate = Optional.of(castingValue.value(Long.class));
- }
- } else if (element.contextTagNum() == BIT_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- bitRate = Optional.of(castingValue.value(Long.class));
- }
- } else if (element.contextTagNum() == BIT_DEPTH_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- bitDepth = Optional.of(castingValue.value(Integer.class));
- }
- }
- }
- return new CameraAvStreamManagementClusterAudioStreamChangedEvent(
- audioStreamID,
- streamUsage,
- audioCodec,
- channelCount,
- sampleRate,
- bitRate,
- bitDepth
- );
- }
-
- @Override
- public String toString() {
- StringBuilder output = new StringBuilder();
- output.append("CameraAvStreamManagementClusterAudioStreamChangedEvent {\n");
- output.append("\taudioStreamID: ");
- output.append(audioStreamID);
- output.append("\n");
- output.append("\tstreamUsage: ");
- output.append(streamUsage);
- output.append("\n");
- output.append("\taudioCodec: ");
- output.append(audioCodec);
- output.append("\n");
- output.append("\tchannelCount: ");
- output.append(channelCount);
- output.append("\n");
- output.append("\tsampleRate: ");
- output.append(sampleRate);
- output.append("\n");
- output.append("\tbitRate: ");
- output.append(bitRate);
- output.append("\n");
- output.append("\tbitDepth: ");
- output.append(bitDepth);
- output.append("\n");
- output.append("}\n");
- return output.toString();
- }
-}
-public static class CameraAvStreamManagementClusterSnapshotStreamChangedEvent {
- public Integer snapshotStreamID;
- public Optional imageCodec;
- public Optional frameRate;
- public Optional bitRate;
- public Optional minResolution;
- public Optional maxResolution;
- public Optional quality;
- private static final long SNAPSHOT_STREAM_ID_ID = 0L;
- private static final long IMAGE_CODEC_ID = 1L;
- private static final long FRAME_RATE_ID = 2L;
- private static final long BIT_RATE_ID = 3L;
- private static final long MIN_RESOLUTION_ID = 4L;
- private static final long MAX_RESOLUTION_ID = 5L;
- private static final long QUALITY_ID = 6L;
-
- public CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- Integer snapshotStreamID,
- Optional imageCodec,
- Optional frameRate,
- Optional bitRate,
- Optional minResolution,
- Optional maxResolution,
- Optional quality
- ) {
- this.snapshotStreamID = snapshotStreamID;
- this.imageCodec = imageCodec;
- this.frameRate = frameRate;
- this.bitRate = bitRate;
- this.minResolution = minResolution;
- this.maxResolution = maxResolution;
- this.quality = quality;
- }
-
- public StructType encodeTlv() {
- ArrayList values = new ArrayList<>();
- values.add(new StructElement(SNAPSHOT_STREAM_ID_ID, new UIntType(snapshotStreamID)));
- values.add(new StructElement(IMAGE_CODEC_ID, imageCodec.map((nonOptionalimageCodec) -> new UIntType(nonOptionalimageCodec)).orElse(new EmptyType())));
- values.add(new StructElement(FRAME_RATE_ID, frameRate.map((nonOptionalframeRate) -> new UIntType(nonOptionalframeRate)).orElse(new EmptyType())));
- values.add(new StructElement(BIT_RATE_ID, bitRate.map((nonOptionalbitRate) -> new UIntType(nonOptionalbitRate)).orElse(new EmptyType())));
- values.add(new StructElement(MIN_RESOLUTION_ID, minResolution.map((nonOptionalminResolution) -> nonOptionalminResolution.encodeTlv()).orElse(new EmptyType())));
- values.add(new StructElement(MAX_RESOLUTION_ID, maxResolution.map((nonOptionalmaxResolution) -> nonOptionalmaxResolution.encodeTlv()).orElse(new EmptyType())));
- values.add(new StructElement(QUALITY_ID, quality.map((nonOptionalquality) -> new UIntType(nonOptionalquality)).orElse(new EmptyType())));
-
- return new StructType(values);
- }
-
- public static CameraAvStreamManagementClusterSnapshotStreamChangedEvent decodeTlv(BaseTLVType tlvValue) {
- if (tlvValue == null || tlvValue.type() != TLVType.Struct) {
- return null;
- }
- Integer snapshotStreamID = null;
- Optional imageCodec = Optional.empty();
- Optional frameRate = Optional.empty();
- Optional bitRate = Optional.empty();
- Optional minResolution = Optional.empty();
- Optional maxResolution = Optional.empty();
- Optional quality = Optional.empty();
- for (StructElement element: ((StructType)tlvValue).value()) {
- if (element.contextTagNum() == SNAPSHOT_STREAM_ID_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- snapshotStreamID = castingValue.value(Integer.class);
- }
- } else if (element.contextTagNum() == IMAGE_CODEC_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- imageCodec = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == FRAME_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- frameRate = Optional.of(castingValue.value(Integer.class));
- }
- } else if (element.contextTagNum() == BIT_RATE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- bitRate = Optional.of(castingValue.value(Long.class));
- }
- } else if (element.contextTagNum() == MIN_RESOLUTION_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.Struct) {
- StructType castingValue = element.value(StructType.class);
- minResolution = Optional.of(ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct.decodeTlv(castingValue));
- }
- } else if (element.contextTagNum() == MAX_RESOLUTION_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.Struct) {
- StructType castingValue = element.value(StructType.class);
- maxResolution = Optional.of(ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct.decodeTlv(castingValue));
- }
- } else if (element.contextTagNum() == QUALITY_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
- UIntType castingValue = element.value(UIntType.class);
- quality = Optional.of(castingValue.value(Integer.class));
- }
- }
- }
- return new CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- snapshotStreamID,
- imageCodec,
- frameRate,
- bitRate,
- minResolution,
- maxResolution,
- quality
- );
- }
-
- @Override
- public String toString() {
- StringBuilder output = new StringBuilder();
- output.append("CameraAvStreamManagementClusterSnapshotStreamChangedEvent {\n");
- output.append("\tsnapshotStreamID: ");
- output.append(snapshotStreamID);
- output.append("\n");
- output.append("\timageCodec: ");
- output.append(imageCodec);
- output.append("\n");
- output.append("\tframeRate: ");
- output.append(frameRate);
- output.append("\n");
- output.append("\tbitRate: ");
- output.append(bitRate);
- output.append("\n");
- output.append("\tminResolution: ");
- output.append(minResolution);
- output.append("\n");
- output.append("\tmaxResolution: ");
- output.append(maxResolution);
- output.append("\n");
- output.append("\tquality: ");
- output.append(quality);
- output.append("\n");
- output.append("}\n");
- return output.toString();
- }
-}
public static class PushAvStreamTransportClusterPushTransportBeginEvent {
public Integer connectionID;
public Integer triggerType;
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ChipStructs.java b/src/controller/java/generated/java/chip/devicecontroller/ChipStructs.java
index 5614ac0f8fa5b6..1f549d49e5dcfe 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ChipStructs.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ChipStructs.java
@@ -14105,25 +14105,21 @@ public String toString() {
public static class CameraAvStreamManagementClusterVideoSensorParamsStruct {
public Integer sensorWidth;
public Integer sensorHeight;
- public Boolean HDRCapable;
public Integer maxFPS;
- public Integer maxHDRFPS;
+ public Optional maxHDRFPS;
private static final long SENSOR_WIDTH_ID = 0L;
private static final long SENSOR_HEIGHT_ID = 1L;
- private static final long HDR_CAPABLE_ID = 2L;
- private static final long MAX_FPS_ID = 3L;
- private static final long MAX_HDRFPS_ID = 4L;
+ private static final long MAX_FPS_ID = 2L;
+ private static final long MAX_HDRFPS_ID = 3L;
public CameraAvStreamManagementClusterVideoSensorParamsStruct(
Integer sensorWidth,
Integer sensorHeight,
- Boolean HDRCapable,
Integer maxFPS,
- Integer maxHDRFPS
+ Optional maxHDRFPS
) {
this.sensorWidth = sensorWidth;
this.sensorHeight = sensorHeight;
- this.HDRCapable = HDRCapable;
this.maxFPS = maxFPS;
this.maxHDRFPS = maxHDRFPS;
}
@@ -14132,9 +14128,8 @@ public StructType encodeTlv() {
ArrayList values = new ArrayList<>();
values.add(new StructElement(SENSOR_WIDTH_ID, new UIntType(sensorWidth)));
values.add(new StructElement(SENSOR_HEIGHT_ID, new UIntType(sensorHeight)));
- values.add(new StructElement(HDR_CAPABLE_ID, new BooleanType(HDRCapable)));
values.add(new StructElement(MAX_FPS_ID, new UIntType(maxFPS)));
- values.add(new StructElement(MAX_HDRFPS_ID, new UIntType(maxHDRFPS)));
+ values.add(new StructElement(MAX_HDRFPS_ID, maxHDRFPS.map((nonOptionalmaxHDRFPS) -> new UIntType(nonOptionalmaxHDRFPS)).orElse(new EmptyType())));
return new StructType(values);
}
@@ -14145,9 +14140,8 @@ public static CameraAvStreamManagementClusterVideoSensorParamsStruct decodeTlv(B
}
Integer sensorWidth = null;
Integer sensorHeight = null;
- Boolean HDRCapable = null;
Integer maxFPS = null;
- Integer maxHDRFPS = null;
+ Optional maxHDRFPS = Optional.empty();
for (StructElement element: ((StructType)tlvValue).value()) {
if (element.contextTagNum() == SENSOR_WIDTH_ID) {
if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
@@ -14159,11 +14153,6 @@ public static CameraAvStreamManagementClusterVideoSensorParamsStruct decodeTlv(B
UIntType castingValue = element.value(UIntType.class);
sensorHeight = castingValue.value(Integer.class);
}
- } else if (element.contextTagNum() == HDR_CAPABLE_ID) {
- if (element.value(BaseTLVType.class).type() == TLVType.Boolean) {
- BooleanType castingValue = element.value(BooleanType.class);
- HDRCapable = castingValue.value(Boolean.class);
- }
} else if (element.contextTagNum() == MAX_FPS_ID) {
if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
UIntType castingValue = element.value(UIntType.class);
@@ -14172,14 +14161,13 @@ public static CameraAvStreamManagementClusterVideoSensorParamsStruct decodeTlv(B
} else if (element.contextTagNum() == MAX_HDRFPS_ID) {
if (element.value(BaseTLVType.class).type() == TLVType.UInt) {
UIntType castingValue = element.value(UIntType.class);
- maxHDRFPS = castingValue.value(Integer.class);
+ maxHDRFPS = Optional.of(castingValue.value(Integer.class));
}
}
}
return new CameraAvStreamManagementClusterVideoSensorParamsStruct(
sensorWidth,
sensorHeight,
- HDRCapable,
maxFPS,
maxHDRFPS
);
@@ -14195,9 +14183,6 @@ public String toString() {
output.append("\tsensorHeight: ");
output.append(sensorHeight);
output.append("\n");
- output.append("\tHDRCapable: ");
- output.append(HDRCapable);
- output.append("\n");
output.append("\tmaxFPS: ");
output.append(maxFPS);
output.append("\n");
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ClusterIDMapping.java b/src/controller/java/generated/java/chip/devicecontroller/ClusterIDMapping.java
index 885e1f045c0ccf..64242e9ab049a7 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ClusterIDMapping.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ClusterIDMapping.java
@@ -17284,7 +17284,7 @@ public enum Attribute {
MaxNetworkBandwidth(11L),
CurrentFrameRate(12L),
HDRModeEnabled(13L),
- FabricsUsingCamera(14L),
+ SupportedStreamUsages(14L),
AllocatedVideoStreams(15L),
AllocatedAudioStreams(16L),
AllocatedSnapshotStreams(17L),
@@ -17336,10 +17336,7 @@ public static Attribute value(long id) throws NoSuchFieldError {
}
}
- public enum Event {
- VideoStreamChanged(0L),
- AudioStreamChanged(1L),
- SnapshotStreamChanged(2L),;
+ public enum Event {;
private final long id;
Event(long id) {
this.id = id;
@@ -17366,9 +17363,10 @@ public enum Command {
VideoStreamModify(5L),
VideoStreamDeallocate(6L),
SnapshotStreamAllocate(7L),
- SnapshotStreamDeallocate(9L),
- SetStreamPriorities(10L),
- CaptureSnapshot(11L),;
+ SnapshotStreamModify(9L),
+ SnapshotStreamDeallocate(10L),
+ SetStreamPriorities(11L),
+ CaptureSnapshot(12L),;
private final long id;
Command(long id) {
this.id = id;
@@ -17471,7 +17469,7 @@ public static VideoStreamDeallocateCommandField value(int id) throws NoSuchField
}
throw new NoSuchFieldError();
}
- }public enum SnapshotStreamAllocateCommandField {ImageCodec(0),MaxFrameRate(1),BitRate(2),MinResolution(3),MaxResolution(4),Quality(5),;
+ }public enum SnapshotStreamAllocateCommandField {ImageCodec(0),MaxFrameRate(1),BitRate(2),MinResolution(3),MaxResolution(4),Quality(5),WatermarkEnabled(6),OSDEnabled(7),;
private final int id;
SnapshotStreamAllocateCommandField(int id) {
this.id = id;
@@ -17488,6 +17486,23 @@ public static SnapshotStreamAllocateCommandField value(int id) throws NoSuchFiel
}
throw new NoSuchFieldError();
}
+ }public enum SnapshotStreamModifyCommandField {SnapshotStreamID(0),WatermarkEnabled(1),OSDEnabled(2),;
+ private final int id;
+ SnapshotStreamModifyCommandField(int id) {
+ this.id = id;
+ }
+
+ public int getID() {
+ return id;
+ }
+ public static SnapshotStreamModifyCommandField value(int id) throws NoSuchFieldError {
+ for (SnapshotStreamModifyCommandField field : SnapshotStreamModifyCommandField.values()) {
+ if (field.getID() == id) {
+ return field;
+ }
+ }
+ throw new NoSuchFieldError();
+ }
}public enum SnapshotStreamDeallocateCommandField {SnapshotStreamID(0),;
private final int id;
SnapshotStreamDeallocateCommandField(int id) {
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ClusterInfoMapping.java b/src/controller/java/generated/java/chip/devicecontroller/ClusterInfoMapping.java
index f10d149932b188..7d6a3303c0aedf 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ClusterInfoMapping.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ClusterInfoMapping.java
@@ -20493,7 +20493,7 @@ public void onError(Exception ex) {
}
}
- public static class DelegatedCameraAvStreamManagementClusterFabricsUsingCameraAttributeCallback implements ChipClusters.CameraAvStreamManagementCluster.FabricsUsingCameraAttributeCallback, DelegatedClusterCallback {
+ public static class DelegatedCameraAvStreamManagementClusterSupportedStreamUsagesAttributeCallback implements ChipClusters.CameraAvStreamManagementCluster.SupportedStreamUsagesAttributeCallback, DelegatedClusterCallback {
private ClusterCommandCallback callback;
@Override
public void setCallbackDelegate(ClusterCommandCallback callback) {
@@ -30728,6 +30728,12 @@ public Map> getCommandMap() {
CommandParameterInfo cameraAvStreamManagementsnapshotStreamAllocatequalityCommandParameterInfo = new CommandParameterInfo("quality", Integer.class, Integer.class);
cameraAvStreamManagementsnapshotStreamAllocateCommandParams.put("quality",cameraAvStreamManagementsnapshotStreamAllocatequalityCommandParameterInfo);
+
+ CommandParameterInfo cameraAvStreamManagementsnapshotStreamAllocatewatermarkEnabledCommandParameterInfo = new CommandParameterInfo("watermarkEnabled", Optional.class, Boolean.class);
+ cameraAvStreamManagementsnapshotStreamAllocateCommandParams.put("watermarkEnabled",cameraAvStreamManagementsnapshotStreamAllocatewatermarkEnabledCommandParameterInfo);
+
+ CommandParameterInfo cameraAvStreamManagementsnapshotStreamAllocateOSDEnabledCommandParameterInfo = new CommandParameterInfo("OSDEnabled", Optional.class, Boolean.class);
+ cameraAvStreamManagementsnapshotStreamAllocateCommandParams.put("OSDEnabled",cameraAvStreamManagementsnapshotStreamAllocateOSDEnabledCommandParameterInfo);
InteractionInfo cameraAvStreamManagementsnapshotStreamAllocateInteractionInfo = new InteractionInfo(
(cluster, callback, commandArguments) -> {
((ChipClusters.CameraAvStreamManagementCluster) cluster)
@@ -30750,6 +30756,12 @@ public Map> getCommandMap() {
, (Integer)
commandArguments.get("quality")
+ , (Optional)
+ commandArguments.get("watermarkEnabled")
+
+ , (Optional)
+ commandArguments.get("OSDEnabled")
+
);
},
() -> new DelegatedCameraAvStreamManagementClusterSnapshotStreamAllocateResponseCallback(),
@@ -30757,6 +30769,33 @@ public Map> getCommandMap() {
);
cameraAvStreamManagementClusterInteractionInfoMap.put("snapshotStreamAllocate", cameraAvStreamManagementsnapshotStreamAllocateInteractionInfo);
+ Map cameraAvStreamManagementsnapshotStreamModifyCommandParams = new LinkedHashMap();
+
+ CommandParameterInfo cameraAvStreamManagementsnapshotStreamModifysnapshotStreamIDCommandParameterInfo = new CommandParameterInfo("snapshotStreamID", Integer.class, Integer.class);
+ cameraAvStreamManagementsnapshotStreamModifyCommandParams.put("snapshotStreamID",cameraAvStreamManagementsnapshotStreamModifysnapshotStreamIDCommandParameterInfo);
+
+ CommandParameterInfo cameraAvStreamManagementsnapshotStreamModifywatermarkEnabledCommandParameterInfo = new CommandParameterInfo("watermarkEnabled", Optional.class, Boolean.class);
+ cameraAvStreamManagementsnapshotStreamModifyCommandParams.put("watermarkEnabled",cameraAvStreamManagementsnapshotStreamModifywatermarkEnabledCommandParameterInfo);
+
+ CommandParameterInfo cameraAvStreamManagementsnapshotStreamModifyOSDEnabledCommandParameterInfo = new CommandParameterInfo("OSDEnabled", Optional.class, Boolean.class);
+ cameraAvStreamManagementsnapshotStreamModifyCommandParams.put("OSDEnabled",cameraAvStreamManagementsnapshotStreamModifyOSDEnabledCommandParameterInfo);
+ InteractionInfo cameraAvStreamManagementsnapshotStreamModifyInteractionInfo = new InteractionInfo(
+ (cluster, callback, commandArguments) -> {
+ ((ChipClusters.CameraAvStreamManagementCluster) cluster)
+ .snapshotStreamModify((DefaultClusterCallback) callback
+ , (Integer)
+ commandArguments.get("snapshotStreamID")
+ , (Optional)
+ commandArguments.get("watermarkEnabled")
+ , (Optional)
+ commandArguments.get("OSDEnabled")
+ );
+ },
+ () -> new DelegatedDefaultClusterCallback(),
+ cameraAvStreamManagementsnapshotStreamModifyCommandParams
+ );
+ cameraAvStreamManagementClusterInteractionInfoMap.put("snapshotStreamModify", cameraAvStreamManagementsnapshotStreamModifyInteractionInfo);
+
Map cameraAvStreamManagementsnapshotStreamDeallocateCommandParams = new LinkedHashMap();
CommandParameterInfo cameraAvStreamManagementsnapshotStreamDeallocatesnapshotStreamIDCommandParameterInfo = new CommandParameterInfo("snapshotStreamID", Integer.class, Integer.class);
diff --git a/src/controller/java/generated/java/chip/devicecontroller/ClusterReadMapping.java b/src/controller/java/generated/java/chip/devicecontroller/ClusterReadMapping.java
index ff8503bec2e239..0281673b384c4b 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/ClusterReadMapping.java
+++ b/src/controller/java/generated/java/chip/devicecontroller/ClusterReadMapping.java
@@ -18564,17 +18564,17 @@ private static Map readCameraAvStreamManagementInteract
readCameraAvStreamManagementHDRModeEnabledCommandParams
);
result.put("readHDRModeEnabledAttribute", readCameraAvStreamManagementHDRModeEnabledAttributeInteractionInfo);
- Map readCameraAvStreamManagementFabricsUsingCameraCommandParams = new LinkedHashMap();
- InteractionInfo readCameraAvStreamManagementFabricsUsingCameraAttributeInteractionInfo = new InteractionInfo(
+ Map readCameraAvStreamManagementSupportedStreamUsagesCommandParams = new LinkedHashMap();
+ InteractionInfo readCameraAvStreamManagementSupportedStreamUsagesAttributeInteractionInfo = new InteractionInfo(
(cluster, callback, commandArguments) -> {
- ((ChipClusters.CameraAvStreamManagementCluster) cluster).readFabricsUsingCameraAttribute(
- (ChipClusters.CameraAvStreamManagementCluster.FabricsUsingCameraAttributeCallback) callback
+ ((ChipClusters.CameraAvStreamManagementCluster) cluster).readSupportedStreamUsagesAttribute(
+ (ChipClusters.CameraAvStreamManagementCluster.SupportedStreamUsagesAttributeCallback) callback
);
},
- () -> new ClusterInfoMapping.DelegatedCameraAvStreamManagementClusterFabricsUsingCameraAttributeCallback(),
- readCameraAvStreamManagementFabricsUsingCameraCommandParams
+ () -> new ClusterInfoMapping.DelegatedCameraAvStreamManagementClusterSupportedStreamUsagesAttributeCallback(),
+ readCameraAvStreamManagementSupportedStreamUsagesCommandParams
);
- result.put("readFabricsUsingCameraAttribute", readCameraAvStreamManagementFabricsUsingCameraAttributeInteractionInfo);
+ result.put("readSupportedStreamUsagesAttribute", readCameraAvStreamManagementSupportedStreamUsagesAttributeInteractionInfo);
Map readCameraAvStreamManagementAllocatedVideoStreamsCommandParams = new LinkedHashMap();
InteractionInfo readCameraAvStreamManagementAllocatedVideoStreamsAttributeInteractionInfo = new InteractionInfo(
(cluster, callback, commandArguments) -> {
diff --git a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt b/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt
deleted file mode 100644
index 4c6471dddbb5e7..00000000000000
--- a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package chip.devicecontroller.cluster.eventstructs
-
-import chip.devicecontroller.cluster.*
-import java.util.Optional
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterAudioStreamChangedEvent(
- val audioStreamID: UInt,
- val streamUsage: Optional,
- val audioCodec: Optional,
- val channelCount: Optional,
- val sampleRate: Optional,
- val bitRate: Optional,
- val bitDepth: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterAudioStreamChangedEvent {\n")
- append("\taudioStreamID : $audioStreamID\n")
- append("\tstreamUsage : $streamUsage\n")
- append("\taudioCodec : $audioCodec\n")
- append("\tchannelCount : $channelCount\n")
- append("\tsampleRate : $sampleRate\n")
- append("\tbitRate : $bitRate\n")
- append("\tbitDepth : $bitDepth\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_AUDIO_STREAM_ID), audioStreamID)
- if (streamUsage.isPresent) {
- val optstreamUsage = streamUsage.get()
- put(ContextSpecificTag(TAG_STREAM_USAGE), optstreamUsage)
- }
- if (audioCodec.isPresent) {
- val optaudioCodec = audioCodec.get()
- put(ContextSpecificTag(TAG_AUDIO_CODEC), optaudioCodec)
- }
- if (channelCount.isPresent) {
- val optchannelCount = channelCount.get()
- put(ContextSpecificTag(TAG_CHANNEL_COUNT), optchannelCount)
- }
- if (sampleRate.isPresent) {
- val optsampleRate = sampleRate.get()
- put(ContextSpecificTag(TAG_SAMPLE_RATE), optsampleRate)
- }
- if (bitRate.isPresent) {
- val optbitRate = bitRate.get()
- put(ContextSpecificTag(TAG_BIT_RATE), optbitRate)
- }
- if (bitDepth.isPresent) {
- val optbitDepth = bitDepth.get()
- put(ContextSpecificTag(TAG_BIT_DEPTH), optbitDepth)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_AUDIO_STREAM_ID = 0
- private const val TAG_STREAM_USAGE = 1
- private const val TAG_AUDIO_CODEC = 2
- private const val TAG_CHANNEL_COUNT = 3
- private const val TAG_SAMPLE_RATE = 4
- private const val TAG_BIT_RATE = 5
- private const val TAG_BIT_DEPTH = 6
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterAudioStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val audioStreamID = tlvReader.getUInt(ContextSpecificTag(TAG_AUDIO_STREAM_ID))
- val streamUsage =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_STREAM_USAGE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_STREAM_USAGE)))
- } else {
- Optional.empty()
- }
- val audioCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_AUDIO_CODEC))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_AUDIO_CODEC)))
- } else {
- Optional.empty()
- }
- val channelCount =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_CHANNEL_COUNT))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_CHANNEL_COUNT)))
- } else {
- Optional.empty()
- }
- val sampleRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_SAMPLE_RATE))) {
- Optional.of(tlvReader.getULong(ContextSpecificTag(TAG_SAMPLE_RATE)))
- } else {
- Optional.empty()
- }
- val bitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_RATE))) {
- Optional.of(tlvReader.getULong(ContextSpecificTag(TAG_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val bitDepth =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_DEPTH))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_BIT_DEPTH)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterAudioStreamChangedEvent(
- audioStreamID,
- streamUsage,
- audioCodec,
- channelCount,
- sampleRate,
- bitRate,
- bitDepth,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt b/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt
deleted file mode 100644
index 7863af3d86b853..00000000000000
--- a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package chip.devicecontroller.cluster.eventstructs
-
-import chip.devicecontroller.cluster.*
-import java.util.Optional
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- val snapshotStreamID: UInt,
- val imageCodec: Optional,
- val frameRate: Optional,
- val bitRate: Optional,
- val minResolution:
- Optional<
- chip.devicecontroller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val maxResolution:
- Optional<
- chip.devicecontroller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val quality: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterSnapshotStreamChangedEvent {\n")
- append("\tsnapshotStreamID : $snapshotStreamID\n")
- append("\timageCodec : $imageCodec\n")
- append("\tframeRate : $frameRate\n")
- append("\tbitRate : $bitRate\n")
- append("\tminResolution : $minResolution\n")
- append("\tmaxResolution : $maxResolution\n")
- append("\tquality : $quality\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID), snapshotStreamID)
- if (imageCodec.isPresent) {
- val optimageCodec = imageCodec.get()
- put(ContextSpecificTag(TAG_IMAGE_CODEC), optimageCodec)
- }
- if (frameRate.isPresent) {
- val optframeRate = frameRate.get()
- put(ContextSpecificTag(TAG_FRAME_RATE), optframeRate)
- }
- if (bitRate.isPresent) {
- val optbitRate = bitRate.get()
- put(ContextSpecificTag(TAG_BIT_RATE), optbitRate)
- }
- if (minResolution.isPresent) {
- val optminResolution = minResolution.get()
- optminResolution.toTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), this)
- }
- if (maxResolution.isPresent) {
- val optmaxResolution = maxResolution.get()
- optmaxResolution.toTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), this)
- }
- if (quality.isPresent) {
- val optquality = quality.get()
- put(ContextSpecificTag(TAG_QUALITY), optquality)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_SNAPSHOT_STREAM_ID = 0
- private const val TAG_IMAGE_CODEC = 1
- private const val TAG_FRAME_RATE = 2
- private const val TAG_BIT_RATE = 3
- private const val TAG_MIN_RESOLUTION = 4
- private const val TAG_MAX_RESOLUTION = 5
- private const val TAG_QUALITY = 6
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterSnapshotStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val snapshotStreamID = tlvReader.getUInt(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID))
- val imageCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_IMAGE_CODEC))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_IMAGE_CODEC)))
- } else {
- Optional.empty()
- }
- val frameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_FRAME_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val bitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_RATE))) {
- Optional.of(tlvReader.getULong(ContextSpecificTag(TAG_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val minResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_RESOLUTION))) {
- Optional.of(
- chip.devicecontroller.cluster.structs
- .CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val maxResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_RESOLUTION))) {
- Optional.of(
- chip.devicecontroller.cluster.structs
- .CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val quality =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_QUALITY))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_QUALITY)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- snapshotStreamID,
- imageCodec,
- frameRate,
- bitRate,
- minResolution,
- maxResolution,
- quality,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt b/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt
deleted file mode 100644
index 802ce6a49c8c67..00000000000000
--- a/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package chip.devicecontroller.cluster.eventstructs
-
-import chip.devicecontroller.cluster.*
-import java.util.Optional
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterVideoStreamChangedEvent(
- val videoStreamID: UInt,
- val streamUsage: Optional,
- val videoCodec: Optional,
- val minFrameRate: Optional,
- val maxFrameRate: Optional,
- val minResolution:
- Optional<
- chip.devicecontroller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val maxResolution:
- Optional<
- chip.devicecontroller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val minBitRate: Optional,
- val maxBitRate: Optional,
- val minFragmentLen: Optional,
- val maxFragmentLen: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterVideoStreamChangedEvent {\n")
- append("\tvideoStreamID : $videoStreamID\n")
- append("\tstreamUsage : $streamUsage\n")
- append("\tvideoCodec : $videoCodec\n")
- append("\tminFrameRate : $minFrameRate\n")
- append("\tmaxFrameRate : $maxFrameRate\n")
- append("\tminResolution : $minResolution\n")
- append("\tmaxResolution : $maxResolution\n")
- append("\tminBitRate : $minBitRate\n")
- append("\tmaxBitRate : $maxBitRate\n")
- append("\tminFragmentLen : $minFragmentLen\n")
- append("\tmaxFragmentLen : $maxFragmentLen\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_VIDEO_STREAM_ID), videoStreamID)
- if (streamUsage.isPresent) {
- val optstreamUsage = streamUsage.get()
- put(ContextSpecificTag(TAG_STREAM_USAGE), optstreamUsage)
- }
- if (videoCodec.isPresent) {
- val optvideoCodec = videoCodec.get()
- put(ContextSpecificTag(TAG_VIDEO_CODEC), optvideoCodec)
- }
- if (minFrameRate.isPresent) {
- val optminFrameRate = minFrameRate.get()
- put(ContextSpecificTag(TAG_MIN_FRAME_RATE), optminFrameRate)
- }
- if (maxFrameRate.isPresent) {
- val optmaxFrameRate = maxFrameRate.get()
- put(ContextSpecificTag(TAG_MAX_FRAME_RATE), optmaxFrameRate)
- }
- if (minResolution.isPresent) {
- val optminResolution = minResolution.get()
- optminResolution.toTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), this)
- }
- if (maxResolution.isPresent) {
- val optmaxResolution = maxResolution.get()
- optmaxResolution.toTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), this)
- }
- if (minBitRate.isPresent) {
- val optminBitRate = minBitRate.get()
- put(ContextSpecificTag(TAG_MIN_BIT_RATE), optminBitRate)
- }
- if (maxBitRate.isPresent) {
- val optmaxBitRate = maxBitRate.get()
- put(ContextSpecificTag(TAG_MAX_BIT_RATE), optmaxBitRate)
- }
- if (minFragmentLen.isPresent) {
- val optminFragmentLen = minFragmentLen.get()
- put(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN), optminFragmentLen)
- }
- if (maxFragmentLen.isPresent) {
- val optmaxFragmentLen = maxFragmentLen.get()
- put(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN), optmaxFragmentLen)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_VIDEO_STREAM_ID = 0
- private const val TAG_STREAM_USAGE = 1
- private const val TAG_VIDEO_CODEC = 2
- private const val TAG_MIN_FRAME_RATE = 3
- private const val TAG_MAX_FRAME_RATE = 4
- private const val TAG_MIN_RESOLUTION = 5
- private const val TAG_MAX_RESOLUTION = 6
- private const val TAG_MIN_BIT_RATE = 7
- private const val TAG_MAX_BIT_RATE = 8
- private const val TAG_MIN_FRAGMENT_LEN = 9
- private const val TAG_MAX_FRAGMENT_LEN = 10
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterVideoStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val videoStreamID = tlvReader.getUInt(ContextSpecificTag(TAG_VIDEO_STREAM_ID))
- val streamUsage =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_STREAM_USAGE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_STREAM_USAGE)))
- } else {
- Optional.empty()
- }
- val videoCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_VIDEO_CODEC))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_VIDEO_CODEC)))
- } else {
- Optional.empty()
- }
- val minFrameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_FRAME_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MIN_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val maxFrameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_FRAME_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MAX_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val minResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_RESOLUTION))) {
- Optional.of(
- chip.devicecontroller.cluster.structs
- .CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val maxResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_RESOLUTION))) {
- Optional.of(
- chip.devicecontroller.cluster.structs
- .CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val minBitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_BIT_RATE))) {
- Optional.of(tlvReader.getULong(ContextSpecificTag(TAG_MIN_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val maxBitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_BIT_RATE))) {
- Optional.of(tlvReader.getULong(ContextSpecificTag(TAG_MAX_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val minFragmentLen =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN)))
- } else {
- Optional.empty()
- }
- val maxFragmentLen =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterVideoStreamChangedEvent(
- videoStreamID,
- streamUsage,
- videoCodec,
- minFrameRate,
- maxFrameRate,
- minResolution,
- maxResolution,
- minBitRate,
- maxBitRate,
- minFragmentLen,
- maxFragmentLen,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/chip/devicecontroller/cluster/files.gni b/src/controller/java/generated/java/chip/devicecontroller/cluster/files.gni
index d5c85a30254287..8c1864dd3cb333 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/cluster/files.gni
+++ b/src/controller/java/generated/java/chip/devicecontroller/cluster/files.gni
@@ -210,9 +210,6 @@ eventstructs_sources = [
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/BridgedDeviceBasicInformationClusterActiveChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/BridgedDeviceBasicInformationClusterReachableChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/BridgedDeviceBasicInformationClusterStartUpEvent.kt",
- "${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt",
- "${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt",
- "${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/CommissionerControlClusterCommissioningRequestResultEvent.kt",
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/DemandResponseLoadControlClusterLoadControlEventStatusChangeEvent.kt",
"${chip_root}/src/controller/java/generated/java/chip/devicecontroller/cluster/eventstructs/DeviceEnergyManagementClusterPowerAdjustEndEvent.kt",
diff --git a/src/controller/java/generated/java/chip/devicecontroller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt b/src/controller/java/generated/java/chip/devicecontroller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
index ed6516bd11c587..4aee8408a85c74 100644
--- a/src/controller/java/generated/java/chip/devicecontroller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
+++ b/src/controller/java/generated/java/chip/devicecontroller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
@@ -17,6 +17,7 @@
package chip.devicecontroller.cluster.structs
import chip.devicecontroller.cluster.*
+import java.util.Optional
import matter.tlv.ContextSpecificTag
import matter.tlv.Tag
import matter.tlv.TlvReader
@@ -25,15 +26,13 @@ import matter.tlv.TlvWriter
class CameraAvStreamManagementClusterVideoSensorParamsStruct(
val sensorWidth: UInt,
val sensorHeight: UInt,
- val HDRCapable: Boolean,
val maxFPS: UInt,
- val maxHDRFPS: UInt,
+ val maxHDRFPS: Optional,
) {
override fun toString(): String = buildString {
append("CameraAvStreamManagementClusterVideoSensorParamsStruct {\n")
append("\tsensorWidth : $sensorWidth\n")
append("\tsensorHeight : $sensorHeight\n")
- append("\tHDRCapable : $HDRCapable\n")
append("\tmaxFPS : $maxFPS\n")
append("\tmaxHDRFPS : $maxHDRFPS\n")
append("}\n")
@@ -44,9 +43,11 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
startStructure(tlvTag)
put(ContextSpecificTag(TAG_SENSOR_WIDTH), sensorWidth)
put(ContextSpecificTag(TAG_SENSOR_HEIGHT), sensorHeight)
- put(ContextSpecificTag(TAG_HDR_CAPABLE), HDRCapable)
put(ContextSpecificTag(TAG_MAX_FPS), maxFPS)
- put(ContextSpecificTag(TAG_MAX_HDRFPS), maxHDRFPS)
+ if (maxHDRFPS.isPresent) {
+ val optmaxHDRFPS = maxHDRFPS.get()
+ put(ContextSpecificTag(TAG_MAX_HDRFPS), optmaxHDRFPS)
+ }
endStructure()
}
}
@@ -54,9 +55,8 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
companion object {
private const val TAG_SENSOR_WIDTH = 0
private const val TAG_SENSOR_HEIGHT = 1
- private const val TAG_HDR_CAPABLE = 2
- private const val TAG_MAX_FPS = 3
- private const val TAG_MAX_HDRFPS = 4
+ private const val TAG_MAX_FPS = 2
+ private const val TAG_MAX_HDRFPS = 3
fun fromTlv(
tlvTag: Tag,
@@ -65,16 +65,19 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
tlvReader.enterStructure(tlvTag)
val sensorWidth = tlvReader.getUInt(ContextSpecificTag(TAG_SENSOR_WIDTH))
val sensorHeight = tlvReader.getUInt(ContextSpecificTag(TAG_SENSOR_HEIGHT))
- val HDRCapable = tlvReader.getBoolean(ContextSpecificTag(TAG_HDR_CAPABLE))
val maxFPS = tlvReader.getUInt(ContextSpecificTag(TAG_MAX_FPS))
- val maxHDRFPS = tlvReader.getUInt(ContextSpecificTag(TAG_MAX_HDRFPS))
+ val maxHDRFPS =
+ if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_HDRFPS))) {
+ Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MAX_HDRFPS)))
+ } else {
+ Optional.empty()
+ }
tlvReader.exitContainer()
return CameraAvStreamManagementClusterVideoSensorParamsStruct(
sensorWidth,
sensorHeight,
- HDRCapable,
maxFPS,
maxHDRFPS,
)
diff --git a/src/controller/java/generated/java/matter/controller/cluster/clusters/CameraAvStreamManagementCluster.kt b/src/controller/java/generated/java/matter/controller/cluster/clusters/CameraAvStreamManagementCluster.kt
index 037f97217b68a3..8a156bfad42144 100644
--- a/src/controller/java/generated/java/matter/controller/cluster/clusters/CameraAvStreamManagementCluster.kt
+++ b/src/controller/java/generated/java/matter/controller/cluster/clusters/CameraAvStreamManagementCluster.kt
@@ -139,14 +139,14 @@ class CameraAvStreamManagementCluster(
object SubscriptionEstablished : SupportedSnapshotParamsAttributeSubscriptionState()
}
- class FabricsUsingCameraAttribute(val value: List)
+ class SupportedStreamUsagesAttribute(val value: List)
- sealed class FabricsUsingCameraAttributeSubscriptionState {
- data class Success(val value: List) : FabricsUsingCameraAttributeSubscriptionState()
+ sealed class SupportedStreamUsagesAttributeSubscriptionState {
+ data class Success(val value: List) : SupportedStreamUsagesAttributeSubscriptionState()
- data class Error(val exception: Exception) : FabricsUsingCameraAttributeSubscriptionState()
+ data class Error(val exception: Exception) : SupportedStreamUsagesAttributeSubscriptionState()
- object SubscriptionEstablished : FabricsUsingCameraAttributeSubscriptionState()
+ object SubscriptionEstablished : SupportedStreamUsagesAttributeSubscriptionState()
}
class AllocatedVideoStreamsAttribute(
@@ -495,6 +495,8 @@ class CameraAvStreamManagementCluster(
minResolution: CameraAvStreamManagementClusterVideoResolutionStruct,
maxResolution: CameraAvStreamManagementClusterVideoResolutionStruct,
quality: UByte,
+ watermarkEnabled: Boolean?,
+ OSDEnabled: Boolean?,
timedInvokeTimeout: Duration? = null,
): SnapshotStreamAllocateResponse {
val commandId: UInt = 7u
@@ -519,6 +521,14 @@ class CameraAvStreamManagementCluster(
val TAG_QUALITY_REQ: Int = 5
tlvWriter.put(ContextSpecificTag(TAG_QUALITY_REQ), quality)
+
+ val TAG_WATERMARK_ENABLED_REQ: Int = 6
+ watermarkEnabled?.let {
+ tlvWriter.put(ContextSpecificTag(TAG_WATERMARK_ENABLED_REQ), watermarkEnabled)
+ }
+
+ val TAG_OSD_ENABLED_REQ: Int = 7
+ OSDEnabled?.let { tlvWriter.put(ContextSpecificTag(TAG_OSD_ENABLED_REQ), OSDEnabled) }
tlvWriter.endStructure()
val request: InvokeRequest =
@@ -555,8 +565,10 @@ class CameraAvStreamManagementCluster(
return SnapshotStreamAllocateResponse(snapshotStreamID_decoded)
}
- suspend fun snapshotStreamDeallocate(
+ suspend fun snapshotStreamModify(
snapshotStreamID: UShort,
+ watermarkEnabled: Boolean?,
+ OSDEnabled: Boolean?,
timedInvokeTimeout: Duration? = null,
) {
val commandId: UInt = 9u
@@ -564,6 +576,38 @@ class CameraAvStreamManagementCluster(
val tlvWriter = TlvWriter()
tlvWriter.startStructure(AnonymousTag)
+ val TAG_SNAPSHOT_STREAM_ID_REQ: Int = 0
+ tlvWriter.put(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID_REQ), snapshotStreamID)
+
+ val TAG_WATERMARK_ENABLED_REQ: Int = 1
+ watermarkEnabled?.let {
+ tlvWriter.put(ContextSpecificTag(TAG_WATERMARK_ENABLED_REQ), watermarkEnabled)
+ }
+
+ val TAG_OSD_ENABLED_REQ: Int = 2
+ OSDEnabled?.let { tlvWriter.put(ContextSpecificTag(TAG_OSD_ENABLED_REQ), OSDEnabled) }
+ tlvWriter.endStructure()
+
+ val request: InvokeRequest =
+ InvokeRequest(
+ CommandPath(endpointId, clusterId = CLUSTER_ID, commandId),
+ tlvPayload = tlvWriter.getEncoded(),
+ timedRequest = timedInvokeTimeout,
+ )
+
+ val response: InvokeResponse = controller.invoke(request)
+ logger.log(Level.FINE, "Invoke command succeeded: ${response}")
+ }
+
+ suspend fun snapshotStreamDeallocate(
+ snapshotStreamID: UShort,
+ timedInvokeTimeout: Duration? = null,
+ ) {
+ val commandId: UInt = 10u
+
+ val tlvWriter = TlvWriter()
+ tlvWriter.startStructure(AnonymousTag)
+
val TAG_SNAPSHOT_STREAM_ID_REQ: Int = 0
tlvWriter.put(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID_REQ), snapshotStreamID)
tlvWriter.endStructure()
@@ -583,7 +627,7 @@ class CameraAvStreamManagementCluster(
streamPriorities: List,
timedInvokeTimeout: Duration? = null,
) {
- val commandId: UInt = 10u
+ val commandId: UInt = 11u
val tlvWriter = TlvWriter()
tlvWriter.startStructure(AnonymousTag)
@@ -612,7 +656,7 @@ class CameraAvStreamManagementCluster(
requestedResolution: CameraAvStreamManagementClusterVideoResolutionStruct,
timedInvokeTimeout: Duration? = null,
): CaptureSnapshotResponse {
- val commandId: UInt = 11u
+ val commandId: UInt = 12u
val tlvWriter = TlvWriter()
tlvWriter.startStructure(AnonymousTag)
@@ -1265,7 +1309,7 @@ class CameraAvStreamManagementCluster(
}
}
- suspend fun readMaxContentBufferSizeAttribute(): UInt? {
+ suspend fun readMaxContentBufferSizeAttribute(): UInt {
val ATTRIBUTE_ID: UInt = 6u
val attributePath =
@@ -1291,12 +1335,7 @@ class CameraAvStreamManagementCluster(
// Decode the TLV data into the appropriate type
val tlvReader = TlvReader(attributeData.data)
- val decodedValue: UInt? =
- if (tlvReader.isNextTag(AnonymousTag)) {
- tlvReader.getUInt(AnonymousTag)
- } else {
- null
- }
+ val decodedValue: UInt = tlvReader.getUInt(AnonymousTag)
return decodedValue
}
@@ -1342,14 +1381,9 @@ class CameraAvStreamManagementCluster(
// Decode the TLV data into the appropriate type
val tlvReader = TlvReader(attributeData.data)
- val decodedValue: UInt? =
- if (tlvReader.isNextTag(AnonymousTag)) {
- tlvReader.getUInt(AnonymousTag)
- } else {
- null
- }
+ val decodedValue: UInt = tlvReader.getUInt(AnonymousTag)
- decodedValue?.let { emit(UIntSubscriptionState.Success(it)) }
+ emit(UIntSubscriptionState.Success(decodedValue))
}
SubscriptionState.SubscriptionEstablished -> {
emit(UIntSubscriptionState.SubscriptionEstablished)
@@ -2064,7 +2098,7 @@ class CameraAvStreamManagementCluster(
}
}
- suspend fun readFabricsUsingCameraAttribute(): FabricsUsingCameraAttribute {
+ suspend fun readSupportedStreamUsagesAttribute(): SupportedStreamUsagesAttribute {
val ATTRIBUTE_ID: UInt = 14u
val attributePath =
@@ -2086,7 +2120,7 @@ class CameraAvStreamManagementCluster(
it.path.attributeId == ATTRIBUTE_ID
}
- requireNotNull(attributeData) { "Fabricsusingcamera attribute not found in response" }
+ requireNotNull(attributeData) { "Supportedstreamusages attribute not found in response" }
// Decode the TLV data into the appropriate type
val tlvReader = TlvReader(attributeData.data)
@@ -2099,13 +2133,13 @@ class CameraAvStreamManagementCluster(
tlvReader.exitContainer()
}
- return FabricsUsingCameraAttribute(decodedValue)
+ return SupportedStreamUsagesAttribute(decodedValue)
}
- suspend fun subscribeFabricsUsingCameraAttribute(
+ suspend fun subscribeSupportedStreamUsagesAttribute(
minInterval: Int,
maxInterval: Int,
- ): Flow {
+ ): Flow {
val ATTRIBUTE_ID: UInt = 14u
val attributePaths =
listOf(
@@ -2124,7 +2158,7 @@ class CameraAvStreamManagementCluster(
when (subscriptionState) {
is SubscriptionState.SubscriptionErrorNotification -> {
emit(
- FabricsUsingCameraAttributeSubscriptionState.Error(
+ SupportedStreamUsagesAttributeSubscriptionState.Error(
Exception(
"Subscription terminated with error code: ${subscriptionState.terminationCause}"
)
@@ -2138,7 +2172,7 @@ class CameraAvStreamManagementCluster(
.firstOrNull { it.path.attributeId == ATTRIBUTE_ID }
requireNotNull(attributeData) {
- "Fabricsusingcamera attribute not found in Node State update"
+ "Supportedstreamusages attribute not found in Node State update"
}
// Decode the TLV data into the appropriate type
@@ -2152,10 +2186,10 @@ class CameraAvStreamManagementCluster(
tlvReader.exitContainer()
}
- emit(FabricsUsingCameraAttributeSubscriptionState.Success(decodedValue))
+ emit(SupportedStreamUsagesAttributeSubscriptionState.Success(decodedValue))
}
SubscriptionState.SubscriptionEstablished -> {
- emit(FabricsUsingCameraAttributeSubscriptionState.SubscriptionEstablished)
+ emit(SupportedStreamUsagesAttributeSubscriptionState.SubscriptionEstablished)
}
}
}
diff --git a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt b/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt
deleted file mode 100644
index 3e3f78187c7c50..00000000000000
--- a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package matter.controller.cluster.eventstructs
-
-import java.util.Optional
-import matter.controller.cluster.*
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterAudioStreamChangedEvent(
- val audioStreamID: UShort,
- val streamUsage: Optional,
- val audioCodec: Optional,
- val channelCount: Optional,
- val sampleRate: Optional,
- val bitRate: Optional,
- val bitDepth: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterAudioStreamChangedEvent {\n")
- append("\taudioStreamID : $audioStreamID\n")
- append("\tstreamUsage : $streamUsage\n")
- append("\taudioCodec : $audioCodec\n")
- append("\tchannelCount : $channelCount\n")
- append("\tsampleRate : $sampleRate\n")
- append("\tbitRate : $bitRate\n")
- append("\tbitDepth : $bitDepth\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_AUDIO_STREAM_ID), audioStreamID)
- if (streamUsage.isPresent) {
- val optstreamUsage = streamUsage.get()
- put(ContextSpecificTag(TAG_STREAM_USAGE), optstreamUsage)
- }
- if (audioCodec.isPresent) {
- val optaudioCodec = audioCodec.get()
- put(ContextSpecificTag(TAG_AUDIO_CODEC), optaudioCodec)
- }
- if (channelCount.isPresent) {
- val optchannelCount = channelCount.get()
- put(ContextSpecificTag(TAG_CHANNEL_COUNT), optchannelCount)
- }
- if (sampleRate.isPresent) {
- val optsampleRate = sampleRate.get()
- put(ContextSpecificTag(TAG_SAMPLE_RATE), optsampleRate)
- }
- if (bitRate.isPresent) {
- val optbitRate = bitRate.get()
- put(ContextSpecificTag(TAG_BIT_RATE), optbitRate)
- }
- if (bitDepth.isPresent) {
- val optbitDepth = bitDepth.get()
- put(ContextSpecificTag(TAG_BIT_DEPTH), optbitDepth)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_AUDIO_STREAM_ID = 0
- private const val TAG_STREAM_USAGE = 1
- private const val TAG_AUDIO_CODEC = 2
- private const val TAG_CHANNEL_COUNT = 3
- private const val TAG_SAMPLE_RATE = 4
- private const val TAG_BIT_RATE = 5
- private const val TAG_BIT_DEPTH = 6
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterAudioStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val audioStreamID = tlvReader.getUShort(ContextSpecificTag(TAG_AUDIO_STREAM_ID))
- val streamUsage =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_STREAM_USAGE))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_STREAM_USAGE)))
- } else {
- Optional.empty()
- }
- val audioCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_AUDIO_CODEC))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_AUDIO_CODEC)))
- } else {
- Optional.empty()
- }
- val channelCount =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_CHANNEL_COUNT))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_CHANNEL_COUNT)))
- } else {
- Optional.empty()
- }
- val sampleRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_SAMPLE_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_SAMPLE_RATE)))
- } else {
- Optional.empty()
- }
- val bitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val bitDepth =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_DEPTH))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_BIT_DEPTH)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterAudioStreamChangedEvent(
- audioStreamID,
- streamUsage,
- audioCodec,
- channelCount,
- sampleRate,
- bitRate,
- bitDepth,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt b/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt
deleted file mode 100644
index 074df191c914fd..00000000000000
--- a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package matter.controller.cluster.eventstructs
-
-import java.util.Optional
-import matter.controller.cluster.*
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- val snapshotStreamID: UShort,
- val imageCodec: Optional,
- val frameRate: Optional,
- val bitRate: Optional,
- val minResolution:
- Optional<
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val maxResolution:
- Optional<
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val quality: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterSnapshotStreamChangedEvent {\n")
- append("\tsnapshotStreamID : $snapshotStreamID\n")
- append("\timageCodec : $imageCodec\n")
- append("\tframeRate : $frameRate\n")
- append("\tbitRate : $bitRate\n")
- append("\tminResolution : $minResolution\n")
- append("\tmaxResolution : $maxResolution\n")
- append("\tquality : $quality\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID), snapshotStreamID)
- if (imageCodec.isPresent) {
- val optimageCodec = imageCodec.get()
- put(ContextSpecificTag(TAG_IMAGE_CODEC), optimageCodec)
- }
- if (frameRate.isPresent) {
- val optframeRate = frameRate.get()
- put(ContextSpecificTag(TAG_FRAME_RATE), optframeRate)
- }
- if (bitRate.isPresent) {
- val optbitRate = bitRate.get()
- put(ContextSpecificTag(TAG_BIT_RATE), optbitRate)
- }
- if (minResolution.isPresent) {
- val optminResolution = minResolution.get()
- optminResolution.toTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), this)
- }
- if (maxResolution.isPresent) {
- val optmaxResolution = maxResolution.get()
- optmaxResolution.toTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), this)
- }
- if (quality.isPresent) {
- val optquality = quality.get()
- put(ContextSpecificTag(TAG_QUALITY), optquality)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_SNAPSHOT_STREAM_ID = 0
- private const val TAG_IMAGE_CODEC = 1
- private const val TAG_FRAME_RATE = 2
- private const val TAG_BIT_RATE = 3
- private const val TAG_MIN_RESOLUTION = 4
- private const val TAG_MAX_RESOLUTION = 5
- private const val TAG_QUALITY = 6
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterSnapshotStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val snapshotStreamID = tlvReader.getUShort(ContextSpecificTag(TAG_SNAPSHOT_STREAM_ID))
- val imageCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_IMAGE_CODEC))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_IMAGE_CODEC)))
- } else {
- Optional.empty()
- }
- val frameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_FRAME_RATE))) {
- Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val bitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_BIT_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val minResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_RESOLUTION))) {
- Optional.of(
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val maxResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_RESOLUTION))) {
- Optional.of(
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val quality =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_QUALITY))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_QUALITY)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterSnapshotStreamChangedEvent(
- snapshotStreamID,
- imageCodec,
- frameRate,
- bitRate,
- minResolution,
- maxResolution,
- quality,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt b/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt
deleted file mode 100644
index e365197fee4a8c..00000000000000
--- a/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- *
- * Copyright (c) 2023 Project CHIP Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package matter.controller.cluster.eventstructs
-
-import java.util.Optional
-import matter.controller.cluster.*
-import matter.tlv.ContextSpecificTag
-import matter.tlv.Tag
-import matter.tlv.TlvReader
-import matter.tlv.TlvWriter
-
-class CameraAvStreamManagementClusterVideoStreamChangedEvent(
- val videoStreamID: UShort,
- val streamUsage: Optional,
- val videoCodec: Optional,
- val minFrameRate: Optional,
- val maxFrameRate: Optional,
- val minResolution:
- Optional<
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val maxResolution:
- Optional<
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- >,
- val minBitRate: Optional,
- val maxBitRate: Optional,
- val minFragmentLen: Optional,
- val maxFragmentLen: Optional,
-) {
- override fun toString(): String = buildString {
- append("CameraAvStreamManagementClusterVideoStreamChangedEvent {\n")
- append("\tvideoStreamID : $videoStreamID\n")
- append("\tstreamUsage : $streamUsage\n")
- append("\tvideoCodec : $videoCodec\n")
- append("\tminFrameRate : $minFrameRate\n")
- append("\tmaxFrameRate : $maxFrameRate\n")
- append("\tminResolution : $minResolution\n")
- append("\tmaxResolution : $maxResolution\n")
- append("\tminBitRate : $minBitRate\n")
- append("\tmaxBitRate : $maxBitRate\n")
- append("\tminFragmentLen : $minFragmentLen\n")
- append("\tmaxFragmentLen : $maxFragmentLen\n")
- append("}\n")
- }
-
- fun toTlv(tlvTag: Tag, tlvWriter: TlvWriter) {
- tlvWriter.apply {
- startStructure(tlvTag)
- put(ContextSpecificTag(TAG_VIDEO_STREAM_ID), videoStreamID)
- if (streamUsage.isPresent) {
- val optstreamUsage = streamUsage.get()
- put(ContextSpecificTag(TAG_STREAM_USAGE), optstreamUsage)
- }
- if (videoCodec.isPresent) {
- val optvideoCodec = videoCodec.get()
- put(ContextSpecificTag(TAG_VIDEO_CODEC), optvideoCodec)
- }
- if (minFrameRate.isPresent) {
- val optminFrameRate = minFrameRate.get()
- put(ContextSpecificTag(TAG_MIN_FRAME_RATE), optminFrameRate)
- }
- if (maxFrameRate.isPresent) {
- val optmaxFrameRate = maxFrameRate.get()
- put(ContextSpecificTag(TAG_MAX_FRAME_RATE), optmaxFrameRate)
- }
- if (minResolution.isPresent) {
- val optminResolution = minResolution.get()
- optminResolution.toTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), this)
- }
- if (maxResolution.isPresent) {
- val optmaxResolution = maxResolution.get()
- optmaxResolution.toTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), this)
- }
- if (minBitRate.isPresent) {
- val optminBitRate = minBitRate.get()
- put(ContextSpecificTag(TAG_MIN_BIT_RATE), optminBitRate)
- }
- if (maxBitRate.isPresent) {
- val optmaxBitRate = maxBitRate.get()
- put(ContextSpecificTag(TAG_MAX_BIT_RATE), optmaxBitRate)
- }
- if (minFragmentLen.isPresent) {
- val optminFragmentLen = minFragmentLen.get()
- put(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN), optminFragmentLen)
- }
- if (maxFragmentLen.isPresent) {
- val optmaxFragmentLen = maxFragmentLen.get()
- put(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN), optmaxFragmentLen)
- }
- endStructure()
- }
- }
-
- companion object {
- private const val TAG_VIDEO_STREAM_ID = 0
- private const val TAG_STREAM_USAGE = 1
- private const val TAG_VIDEO_CODEC = 2
- private const val TAG_MIN_FRAME_RATE = 3
- private const val TAG_MAX_FRAME_RATE = 4
- private const val TAG_MIN_RESOLUTION = 5
- private const val TAG_MAX_RESOLUTION = 6
- private const val TAG_MIN_BIT_RATE = 7
- private const val TAG_MAX_BIT_RATE = 8
- private const val TAG_MIN_FRAGMENT_LEN = 9
- private const val TAG_MAX_FRAGMENT_LEN = 10
-
- fun fromTlv(
- tlvTag: Tag,
- tlvReader: TlvReader,
- ): CameraAvStreamManagementClusterVideoStreamChangedEvent {
- tlvReader.enterStructure(tlvTag)
- val videoStreamID = tlvReader.getUShort(ContextSpecificTag(TAG_VIDEO_STREAM_ID))
- val streamUsage =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_STREAM_USAGE))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_STREAM_USAGE)))
- } else {
- Optional.empty()
- }
- val videoCodec =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_VIDEO_CODEC))) {
- Optional.of(tlvReader.getUByte(ContextSpecificTag(TAG_VIDEO_CODEC)))
- } else {
- Optional.empty()
- }
- val minFrameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_FRAME_RATE))) {
- Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_MIN_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val maxFrameRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_FRAME_RATE))) {
- Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_MAX_FRAME_RATE)))
- } else {
- Optional.empty()
- }
- val minResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_RESOLUTION))) {
- Optional.of(
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MIN_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val maxResolution =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_RESOLUTION))) {
- Optional.of(
- matter.controller.cluster.structs.CameraAvStreamManagementClusterVideoResolutionStruct
- .fromTlv(ContextSpecificTag(TAG_MAX_RESOLUTION), tlvReader)
- )
- } else {
- Optional.empty()
- }
- val minBitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_BIT_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MIN_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val maxBitRate =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_BIT_RATE))) {
- Optional.of(tlvReader.getUInt(ContextSpecificTag(TAG_MAX_BIT_RATE)))
- } else {
- Optional.empty()
- }
- val minFragmentLen =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN))) {
- Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_MIN_FRAGMENT_LEN)))
- } else {
- Optional.empty()
- }
- val maxFragmentLen =
- if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN))) {
- Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_MAX_FRAGMENT_LEN)))
- } else {
- Optional.empty()
- }
-
- tlvReader.exitContainer()
-
- return CameraAvStreamManagementClusterVideoStreamChangedEvent(
- videoStreamID,
- streamUsage,
- videoCodec,
- minFrameRate,
- maxFrameRate,
- minResolution,
- maxResolution,
- minBitRate,
- maxBitRate,
- minFragmentLen,
- maxFragmentLen,
- )
- }
- }
-}
diff --git a/src/controller/java/generated/java/matter/controller/cluster/files.gni b/src/controller/java/generated/java/matter/controller/cluster/files.gni
index 9cf22185f9a8d5..e25dcf66937659 100644
--- a/src/controller/java/generated/java/matter/controller/cluster/files.gni
+++ b/src/controller/java/generated/java/matter/controller/cluster/files.gni
@@ -210,9 +210,6 @@ matter_eventstructs_sources = [
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/BridgedDeviceBasicInformationClusterActiveChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/BridgedDeviceBasicInformationClusterReachableChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/BridgedDeviceBasicInformationClusterStartUpEvent.kt",
- "${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterAudioStreamChangedEvent.kt",
- "${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterSnapshotStreamChangedEvent.kt",
- "${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CameraAvStreamManagementClusterVideoStreamChangedEvent.kt",
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/CommissionerControlClusterCommissioningRequestResultEvent.kt",
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/DemandResponseLoadControlClusterLoadControlEventStatusChangeEvent.kt",
"${chip_root}/src/controller/java/generated/java/matter/controller/cluster/eventstructs/DeviceEnergyManagementClusterPowerAdjustEndEvent.kt",
diff --git a/src/controller/java/generated/java/matter/controller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt b/src/controller/java/generated/java/matter/controller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
index 5cf6052a753999..cf8b78d722eb9f 100644
--- a/src/controller/java/generated/java/matter/controller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
+++ b/src/controller/java/generated/java/matter/controller/cluster/structs/CameraAvStreamManagementClusterVideoSensorParamsStruct.kt
@@ -16,6 +16,7 @@
*/
package matter.controller.cluster.structs
+import java.util.Optional
import matter.controller.cluster.*
import matter.tlv.ContextSpecificTag
import matter.tlv.Tag
@@ -25,15 +26,13 @@ import matter.tlv.TlvWriter
class CameraAvStreamManagementClusterVideoSensorParamsStruct(
val sensorWidth: UShort,
val sensorHeight: UShort,
- val HDRCapable: Boolean,
val maxFPS: UShort,
- val maxHDRFPS: UShort,
+ val maxHDRFPS: Optional,
) {
override fun toString(): String = buildString {
append("CameraAvStreamManagementClusterVideoSensorParamsStruct {\n")
append("\tsensorWidth : $sensorWidth\n")
append("\tsensorHeight : $sensorHeight\n")
- append("\tHDRCapable : $HDRCapable\n")
append("\tmaxFPS : $maxFPS\n")
append("\tmaxHDRFPS : $maxHDRFPS\n")
append("}\n")
@@ -44,9 +43,11 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
startStructure(tlvTag)
put(ContextSpecificTag(TAG_SENSOR_WIDTH), sensorWidth)
put(ContextSpecificTag(TAG_SENSOR_HEIGHT), sensorHeight)
- put(ContextSpecificTag(TAG_HDR_CAPABLE), HDRCapable)
put(ContextSpecificTag(TAG_MAX_FPS), maxFPS)
- put(ContextSpecificTag(TAG_MAX_HDRFPS), maxHDRFPS)
+ if (maxHDRFPS.isPresent) {
+ val optmaxHDRFPS = maxHDRFPS.get()
+ put(ContextSpecificTag(TAG_MAX_HDRFPS), optmaxHDRFPS)
+ }
endStructure()
}
}
@@ -54,9 +55,8 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
companion object {
private const val TAG_SENSOR_WIDTH = 0
private const val TAG_SENSOR_HEIGHT = 1
- private const val TAG_HDR_CAPABLE = 2
- private const val TAG_MAX_FPS = 3
- private const val TAG_MAX_HDRFPS = 4
+ private const val TAG_MAX_FPS = 2
+ private const val TAG_MAX_HDRFPS = 3
fun fromTlv(
tlvTag: Tag,
@@ -65,16 +65,19 @@ class CameraAvStreamManagementClusterVideoSensorParamsStruct(
tlvReader.enterStructure(tlvTag)
val sensorWidth = tlvReader.getUShort(ContextSpecificTag(TAG_SENSOR_WIDTH))
val sensorHeight = tlvReader.getUShort(ContextSpecificTag(TAG_SENSOR_HEIGHT))
- val HDRCapable = tlvReader.getBoolean(ContextSpecificTag(TAG_HDR_CAPABLE))
val maxFPS = tlvReader.getUShort(ContextSpecificTag(TAG_MAX_FPS))
- val maxHDRFPS = tlvReader.getUShort(ContextSpecificTag(TAG_MAX_HDRFPS))
+ val maxHDRFPS =
+ if (tlvReader.isNextTag(ContextSpecificTag(TAG_MAX_HDRFPS))) {
+ Optional.of(tlvReader.getUShort(ContextSpecificTag(TAG_MAX_HDRFPS)))
+ } else {
+ Optional.empty()
+ }
tlvReader.exitContainer()
return CameraAvStreamManagementClusterVideoSensorParamsStruct(
sensorWidth,
sensorHeight,
- HDRCapable,
maxFPS,
maxHDRFPS,
)
diff --git a/src/controller/java/zap-generated/CHIPAttributeTLVValueDecoder.cpp b/src/controller/java/zap-generated/CHIPAttributeTLVValueDecoder.cpp
index 588d3c6120b139..e9712a8a4676cc 100644
--- a/src/controller/java/zap-generated/CHIPAttributeTLVValueDecoder.cpp
+++ b/src/controller/java/zap-generated/CHIPAttributeTLVValueDecoder.cpp
@@ -40413,12 +40413,6 @@ jobject DecodeAttributeValue(const app::ConcreteAttributePath & aPath, TLV::TLVR
chip::JniReferences::GetInstance().CreateBoxedObject(value_sensorHeightClassName.c_str(),
value_sensorHeightCtorSignature.c_str(),
jnivalue_sensorHeight, value_sensorHeight);
- jobject value_HDRCapable;
- std::string value_HDRCapableClassName = "java/lang/Boolean";
- std::string value_HDRCapableCtorSignature = "(Z)V";
- jboolean jnivalue_HDRCapable = static_cast(cppValue.HDRCapable);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_HDRCapableClassName.c_str(), value_HDRCapableCtorSignature.c_str(), jnivalue_HDRCapable, value_HDRCapable);
jobject value_maxFPS;
std::string value_maxFPSClassName = "java/lang/Integer";
std::string value_maxFPSCtorSignature = "(I)V";
@@ -40426,11 +40420,21 @@ jobject DecodeAttributeValue(const app::ConcreteAttributePath & aPath, TLV::TLVR
chip::JniReferences::GetInstance().CreateBoxedObject(
value_maxFPSClassName.c_str(), value_maxFPSCtorSignature.c_str(), jnivalue_maxFPS, value_maxFPS);
jobject value_maxHDRFPS;
- std::string value_maxHDRFPSClassName = "java/lang/Integer";
- std::string value_maxHDRFPSCtorSignature = "(I)V";
- jint jnivalue_maxHDRFPS = static_cast(cppValue.maxHDRFPS);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxHDRFPSClassName.c_str(), value_maxHDRFPSCtorSignature.c_str(), jnivalue_maxHDRFPS, value_maxHDRFPS);
+ if (!cppValue.maxHDRFPS.HasValue())
+ {
+ chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxHDRFPS);
+ }
+ else
+ {
+ jobject value_maxHDRFPSInsideOptional;
+ std::string value_maxHDRFPSInsideOptionalClassName = "java/lang/Integer";
+ std::string value_maxHDRFPSInsideOptionalCtorSignature = "(I)V";
+ jint jnivalue_maxHDRFPSInsideOptional = static_cast(cppValue.maxHDRFPS.Value());
+ chip::JniReferences::GetInstance().CreateBoxedObject(
+ value_maxHDRFPSInsideOptionalClassName.c_str(), value_maxHDRFPSInsideOptionalCtorSignature.c_str(),
+ jnivalue_maxHDRFPSInsideOptional, value_maxHDRFPSInsideOptional);
+ chip::JniReferences::GetInstance().CreateOptional(value_maxHDRFPSInsideOptional, value_maxHDRFPS);
+ }
{
jclass videoSensorParamsStructStructClass_0;
@@ -40446,7 +40450,7 @@ jobject DecodeAttributeValue(const app::ConcreteAttributePath & aPath, TLV::TLVR
jmethodID videoSensorParamsStructStructCtor_0;
err = chip::JniReferences::GetInstance().FindMethod(
env, videoSensorParamsStructStructClass_0, "",
- "(Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Boolean;Ljava/lang/Integer;Ljava/lang/Integer;)V",
+ "(Ljava/lang/Integer;Ljava/lang/Integer;Ljava/lang/Integer;Ljava/util/Optional;)V",
&videoSensorParamsStructStructCtor_0);
if (err != CHIP_NO_ERROR || videoSensorParamsStructStructCtor_0 == nullptr)
{
@@ -40456,7 +40460,7 @@ jobject DecodeAttributeValue(const app::ConcreteAttributePath & aPath, TLV::TLVR
}
value = env->NewObject(videoSensorParamsStructStructClass_0, videoSensorParamsStructStructCtor_0, value_sensorWidth,
- value_sensorHeight, value_HDRCapable, value_maxFPS, value_maxHDRFPS);
+ value_sensorHeight, value_maxFPS, value_maxHDRFPS);
}
return value;
}
@@ -40997,8 +41001,8 @@ jobject DecodeAttributeValue(const app::ConcreteAttributePath & aPath, TLV::TLVR
jnivalue, value);
return value;
}
- case Attributes::FabricsUsingCamera::Id: {
- using TypeInfo = Attributes::FabricsUsingCamera::TypeInfo;
+ case Attributes::SupportedStreamUsages::Id: {
+ using TypeInfo = Attributes::SupportedStreamUsages::TypeInfo;
TypeInfo::DecodableType cppValue;
*aError = app::DataModel::Decode(aReader, cppValue);
if (*aError != CHIP_NO_ERROR)
diff --git a/src/controller/java/zap-generated/CHIPEventTLVValueDecoder.cpp b/src/controller/java/zap-generated/CHIPEventTLVValueDecoder.cpp
index a1c8b6472cb929..606e7a760061ee 100644
--- a/src/controller/java/zap-generated/CHIPEventTLVValueDecoder.cpp
+++ b/src/controller/java/zap-generated/CHIPEventTLVValueDecoder.cpp
@@ -8437,662 +8437,6 @@ jobject DecodeEventValue(const app::ConcreteEventPath & aPath, TLV::TLVReader &
using namespace app::Clusters::CameraAvStreamManagement;
switch (aPath.mEventId)
{
- case Events::VideoStreamChanged::Id: {
- Events::VideoStreamChanged::DecodableType cppValue;
- *aError = app::DataModel::Decode(aReader, cppValue);
- if (*aError != CHIP_NO_ERROR)
- {
- return nullptr;
- }
- jobject value_videoStreamID;
- std::string value_videoStreamIDClassName = "java/lang/Integer";
- std::string value_videoStreamIDCtorSignature = "(I)V";
- jint jnivalue_videoStreamID = static_cast(cppValue.videoStreamID);
- chip::JniReferences::GetInstance().CreateBoxedObject(value_videoStreamIDClassName.c_str(),
- value_videoStreamIDCtorSignature.c_str(),
- jnivalue_videoStreamID, value_videoStreamID);
-
- jobject value_streamUsage;
- if (!cppValue.streamUsage.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_streamUsage);
- }
- else
- {
- jobject value_streamUsageInsideOptional;
- std::string value_streamUsageInsideOptionalClassName = "java/lang/Integer";
- std::string value_streamUsageInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_streamUsageInsideOptional = static_cast(cppValue.streamUsage.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_streamUsageInsideOptionalClassName.c_str(), value_streamUsageInsideOptionalCtorSignature.c_str(),
- jnivalue_streamUsageInsideOptional, value_streamUsageInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_streamUsageInsideOptional, value_streamUsage);
- }
-
- jobject value_videoCodec;
- if (!cppValue.videoCodec.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_videoCodec);
- }
- else
- {
- jobject value_videoCodecInsideOptional;
- std::string value_videoCodecInsideOptionalClassName = "java/lang/Integer";
- std::string value_videoCodecInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_videoCodecInsideOptional = static_cast(cppValue.videoCodec.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_videoCodecInsideOptionalClassName.c_str(), value_videoCodecInsideOptionalCtorSignature.c_str(),
- jnivalue_videoCodecInsideOptional, value_videoCodecInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_videoCodecInsideOptional, value_videoCodec);
- }
-
- jobject value_minFrameRate;
- if (!cppValue.minFrameRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_minFrameRate);
- }
- else
- {
- jobject value_minFrameRateInsideOptional;
- std::string value_minFrameRateInsideOptionalClassName = "java/lang/Integer";
- std::string value_minFrameRateInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_minFrameRateInsideOptional = static_cast(cppValue.minFrameRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minFrameRateInsideOptionalClassName.c_str(), value_minFrameRateInsideOptionalCtorSignature.c_str(),
- jnivalue_minFrameRateInsideOptional, value_minFrameRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_minFrameRateInsideOptional, value_minFrameRate);
- }
-
- jobject value_maxFrameRate;
- if (!cppValue.maxFrameRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxFrameRate);
- }
- else
- {
- jobject value_maxFrameRateInsideOptional;
- std::string value_maxFrameRateInsideOptionalClassName = "java/lang/Integer";
- std::string value_maxFrameRateInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_maxFrameRateInsideOptional = static_cast(cppValue.maxFrameRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxFrameRateInsideOptionalClassName.c_str(), value_maxFrameRateInsideOptionalCtorSignature.c_str(),
- jnivalue_maxFrameRateInsideOptional, value_maxFrameRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_maxFrameRateInsideOptional, value_maxFrameRate);
- }
-
- jobject value_minResolution;
- if (!cppValue.minResolution.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_minResolution);
- }
- else
- {
- jobject value_minResolutionInsideOptional;
- jobject value_minResolutionInsideOptional_width;
- std::string value_minResolutionInsideOptional_widthClassName = "java/lang/Integer";
- std::string value_minResolutionInsideOptional_widthCtorSignature = "(I)V";
- jint jnivalue_minResolutionInsideOptional_width = static_cast(cppValue.minResolution.Value().width);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minResolutionInsideOptional_widthClassName.c_str(),
- value_minResolutionInsideOptional_widthCtorSignature.c_str(), jnivalue_minResolutionInsideOptional_width,
- value_minResolutionInsideOptional_width);
- jobject value_minResolutionInsideOptional_height;
- std::string value_minResolutionInsideOptional_heightClassName = "java/lang/Integer";
- std::string value_minResolutionInsideOptional_heightCtorSignature = "(I)V";
- jint jnivalue_minResolutionInsideOptional_height = static_cast(cppValue.minResolution.Value().height);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minResolutionInsideOptional_heightClassName.c_str(),
- value_minResolutionInsideOptional_heightCtorSignature.c_str(), jnivalue_minResolutionInsideOptional_height,
- value_minResolutionInsideOptional_height);
-
- {
- jclass videoResolutionStructStructClass_1;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct",
- videoResolutionStructStructClass_1);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct");
- return nullptr;
- }
-
- jmethodID videoResolutionStructStructCtor_1;
- err = chip::JniReferences::GetInstance().FindMethod(env, videoResolutionStructStructClass_1, "",
- "(Ljava/lang/Integer;Ljava/lang/Integer;)V",
- &videoResolutionStructStructCtor_1);
- if (err != CHIP_NO_ERROR || videoResolutionStructStructCtor_1 == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct constructor");
- return nullptr;
- }
-
- value_minResolutionInsideOptional =
- env->NewObject(videoResolutionStructStructClass_1, videoResolutionStructStructCtor_1,
- value_minResolutionInsideOptional_width, value_minResolutionInsideOptional_height);
- }
- chip::JniReferences::GetInstance().CreateOptional(value_minResolutionInsideOptional, value_minResolution);
- }
-
- jobject value_maxResolution;
- if (!cppValue.maxResolution.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxResolution);
- }
- else
- {
- jobject value_maxResolutionInsideOptional;
- jobject value_maxResolutionInsideOptional_width;
- std::string value_maxResolutionInsideOptional_widthClassName = "java/lang/Integer";
- std::string value_maxResolutionInsideOptional_widthCtorSignature = "(I)V";
- jint jnivalue_maxResolutionInsideOptional_width = static_cast(cppValue.maxResolution.Value().width);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxResolutionInsideOptional_widthClassName.c_str(),
- value_maxResolutionInsideOptional_widthCtorSignature.c_str(), jnivalue_maxResolutionInsideOptional_width,
- value_maxResolutionInsideOptional_width);
- jobject value_maxResolutionInsideOptional_height;
- std::string value_maxResolutionInsideOptional_heightClassName = "java/lang/Integer";
- std::string value_maxResolutionInsideOptional_heightCtorSignature = "(I)V";
- jint jnivalue_maxResolutionInsideOptional_height = static_cast(cppValue.maxResolution.Value().height);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxResolutionInsideOptional_heightClassName.c_str(),
- value_maxResolutionInsideOptional_heightCtorSignature.c_str(), jnivalue_maxResolutionInsideOptional_height,
- value_maxResolutionInsideOptional_height);
-
- {
- jclass videoResolutionStructStructClass_1;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct",
- videoResolutionStructStructClass_1);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct");
- return nullptr;
- }
-
- jmethodID videoResolutionStructStructCtor_1;
- err = chip::JniReferences::GetInstance().FindMethod(env, videoResolutionStructStructClass_1, "",
- "(Ljava/lang/Integer;Ljava/lang/Integer;)V",
- &videoResolutionStructStructCtor_1);
- if (err != CHIP_NO_ERROR || videoResolutionStructStructCtor_1 == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct constructor");
- return nullptr;
- }
-
- value_maxResolutionInsideOptional =
- env->NewObject(videoResolutionStructStructClass_1, videoResolutionStructStructCtor_1,
- value_maxResolutionInsideOptional_width, value_maxResolutionInsideOptional_height);
- }
- chip::JniReferences::GetInstance().CreateOptional(value_maxResolutionInsideOptional, value_maxResolution);
- }
-
- jobject value_minBitRate;
- if (!cppValue.minBitRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_minBitRate);
- }
- else
- {
- jobject value_minBitRateInsideOptional;
- std::string value_minBitRateInsideOptionalClassName = "java/lang/Long";
- std::string value_minBitRateInsideOptionalCtorSignature = "(J)V";
- jlong jnivalue_minBitRateInsideOptional = static_cast(cppValue.minBitRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minBitRateInsideOptionalClassName.c_str(), value_minBitRateInsideOptionalCtorSignature.c_str(),
- jnivalue_minBitRateInsideOptional, value_minBitRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_minBitRateInsideOptional, value_minBitRate);
- }
-
- jobject value_maxBitRate;
- if (!cppValue.maxBitRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxBitRate);
- }
- else
- {
- jobject value_maxBitRateInsideOptional;
- std::string value_maxBitRateInsideOptionalClassName = "java/lang/Long";
- std::string value_maxBitRateInsideOptionalCtorSignature = "(J)V";
- jlong jnivalue_maxBitRateInsideOptional = static_cast(cppValue.maxBitRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxBitRateInsideOptionalClassName.c_str(), value_maxBitRateInsideOptionalCtorSignature.c_str(),
- jnivalue_maxBitRateInsideOptional, value_maxBitRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_maxBitRateInsideOptional, value_maxBitRate);
- }
-
- jobject value_minFragmentLen;
- if (!cppValue.minFragmentLen.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_minFragmentLen);
- }
- else
- {
- jobject value_minFragmentLenInsideOptional;
- std::string value_minFragmentLenInsideOptionalClassName = "java/lang/Integer";
- std::string value_minFragmentLenInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_minFragmentLenInsideOptional = static_cast(cppValue.minFragmentLen.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minFragmentLenInsideOptionalClassName.c_str(), value_minFragmentLenInsideOptionalCtorSignature.c_str(),
- jnivalue_minFragmentLenInsideOptional, value_minFragmentLenInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_minFragmentLenInsideOptional, value_minFragmentLen);
- }
-
- jobject value_maxFragmentLen;
- if (!cppValue.maxFragmentLen.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxFragmentLen);
- }
- else
- {
- jobject value_maxFragmentLenInsideOptional;
- std::string value_maxFragmentLenInsideOptionalClassName = "java/lang/Integer";
- std::string value_maxFragmentLenInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_maxFragmentLenInsideOptional = static_cast(cppValue.maxFragmentLen.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxFragmentLenInsideOptionalClassName.c_str(), value_maxFragmentLenInsideOptionalCtorSignature.c_str(),
- jnivalue_maxFragmentLenInsideOptional, value_maxFragmentLenInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_maxFragmentLenInsideOptional, value_maxFragmentLen);
- }
-
- jclass videoStreamChangedStructClass;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipEventStructs$CameraAvStreamManagementClusterVideoStreamChangedEvent",
- videoStreamChangedStructClass);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipEventStructs$CameraAvStreamManagementClusterVideoStreamChangedEvent");
- return nullptr;
- }
-
- jmethodID videoStreamChangedStructCtor;
- err = chip::JniReferences::GetInstance().FindMethod(
- env, videoStreamChangedStructClass, "",
- "(Ljava/lang/Integer;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/"
- "Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;)V",
- &videoStreamChangedStructCtor);
- if (err != CHIP_NO_ERROR || videoStreamChangedStructCtor == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipEventStructs$CameraAvStreamManagementClusterVideoStreamChangedEvent constructor");
- return nullptr;
- }
-
- jobject value =
- env->NewObject(videoStreamChangedStructClass, videoStreamChangedStructCtor, value_videoStreamID, value_streamUsage,
- value_videoCodec, value_minFrameRate, value_maxFrameRate, value_minResolution, value_maxResolution,
- value_minBitRate, value_maxBitRate, value_minFragmentLen, value_maxFragmentLen);
-
- return value;
- }
- case Events::AudioStreamChanged::Id: {
- Events::AudioStreamChanged::DecodableType cppValue;
- *aError = app::DataModel::Decode(aReader, cppValue);
- if (*aError != CHIP_NO_ERROR)
- {
- return nullptr;
- }
- jobject value_audioStreamID;
- std::string value_audioStreamIDClassName = "java/lang/Integer";
- std::string value_audioStreamIDCtorSignature = "(I)V";
- jint jnivalue_audioStreamID = static_cast(cppValue.audioStreamID);
- chip::JniReferences::GetInstance().CreateBoxedObject(value_audioStreamIDClassName.c_str(),
- value_audioStreamIDCtorSignature.c_str(),
- jnivalue_audioStreamID, value_audioStreamID);
-
- jobject value_streamUsage;
- if (!cppValue.streamUsage.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_streamUsage);
- }
- else
- {
- jobject value_streamUsageInsideOptional;
- std::string value_streamUsageInsideOptionalClassName = "java/lang/Integer";
- std::string value_streamUsageInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_streamUsageInsideOptional = static_cast(cppValue.streamUsage.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_streamUsageInsideOptionalClassName.c_str(), value_streamUsageInsideOptionalCtorSignature.c_str(),
- jnivalue_streamUsageInsideOptional, value_streamUsageInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_streamUsageInsideOptional, value_streamUsage);
- }
-
- jobject value_audioCodec;
- if (!cppValue.audioCodec.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_audioCodec);
- }
- else
- {
- jobject value_audioCodecInsideOptional;
- std::string value_audioCodecInsideOptionalClassName = "java/lang/Integer";
- std::string value_audioCodecInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_audioCodecInsideOptional = static_cast(cppValue.audioCodec.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_audioCodecInsideOptionalClassName.c_str(), value_audioCodecInsideOptionalCtorSignature.c_str(),
- jnivalue_audioCodecInsideOptional, value_audioCodecInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_audioCodecInsideOptional, value_audioCodec);
- }
-
- jobject value_channelCount;
- if (!cppValue.channelCount.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_channelCount);
- }
- else
- {
- jobject value_channelCountInsideOptional;
- std::string value_channelCountInsideOptionalClassName = "java/lang/Integer";
- std::string value_channelCountInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_channelCountInsideOptional = static_cast(cppValue.channelCount.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_channelCountInsideOptionalClassName.c_str(), value_channelCountInsideOptionalCtorSignature.c_str(),
- jnivalue_channelCountInsideOptional, value_channelCountInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_channelCountInsideOptional, value_channelCount);
- }
-
- jobject value_sampleRate;
- if (!cppValue.sampleRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_sampleRate);
- }
- else
- {
- jobject value_sampleRateInsideOptional;
- std::string value_sampleRateInsideOptionalClassName = "java/lang/Long";
- std::string value_sampleRateInsideOptionalCtorSignature = "(J)V";
- jlong jnivalue_sampleRateInsideOptional = static_cast(cppValue.sampleRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_sampleRateInsideOptionalClassName.c_str(), value_sampleRateInsideOptionalCtorSignature.c_str(),
- jnivalue_sampleRateInsideOptional, value_sampleRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_sampleRateInsideOptional, value_sampleRate);
- }
-
- jobject value_bitRate;
- if (!cppValue.bitRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_bitRate);
- }
- else
- {
- jobject value_bitRateInsideOptional;
- std::string value_bitRateInsideOptionalClassName = "java/lang/Long";
- std::string value_bitRateInsideOptionalCtorSignature = "(J)V";
- jlong jnivalue_bitRateInsideOptional = static_cast(cppValue.bitRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_bitRateInsideOptionalClassName.c_str(), value_bitRateInsideOptionalCtorSignature.c_str(),
- jnivalue_bitRateInsideOptional, value_bitRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_bitRateInsideOptional, value_bitRate);
- }
-
- jobject value_bitDepth;
- if (!cppValue.bitDepth.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_bitDepth);
- }
- else
- {
- jobject value_bitDepthInsideOptional;
- std::string value_bitDepthInsideOptionalClassName = "java/lang/Integer";
- std::string value_bitDepthInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_bitDepthInsideOptional = static_cast(cppValue.bitDepth.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_bitDepthInsideOptionalClassName.c_str(), value_bitDepthInsideOptionalCtorSignature.c_str(),
- jnivalue_bitDepthInsideOptional, value_bitDepthInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_bitDepthInsideOptional, value_bitDepth);
- }
-
- jclass audioStreamChangedStructClass;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipEventStructs$CameraAvStreamManagementClusterAudioStreamChangedEvent",
- audioStreamChangedStructClass);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipEventStructs$CameraAvStreamManagementClusterAudioStreamChangedEvent");
- return nullptr;
- }
-
- jmethodID audioStreamChangedStructCtor;
- err = chip::JniReferences::GetInstance().FindMethod(
- env, audioStreamChangedStructClass, "",
- "(Ljava/lang/Integer;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/Optional;Ljava/util/"
- "Optional;Ljava/util/Optional;)V",
- &audioStreamChangedStructCtor);
- if (err != CHIP_NO_ERROR || audioStreamChangedStructCtor == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipEventStructs$CameraAvStreamManagementClusterAudioStreamChangedEvent constructor");
- return nullptr;
- }
-
- jobject value =
- env->NewObject(audioStreamChangedStructClass, audioStreamChangedStructCtor, value_audioStreamID, value_streamUsage,
- value_audioCodec, value_channelCount, value_sampleRate, value_bitRate, value_bitDepth);
-
- return value;
- }
- case Events::SnapshotStreamChanged::Id: {
- Events::SnapshotStreamChanged::DecodableType cppValue;
- *aError = app::DataModel::Decode(aReader, cppValue);
- if (*aError != CHIP_NO_ERROR)
- {
- return nullptr;
- }
- jobject value_snapshotStreamID;
- std::string value_snapshotStreamIDClassName = "java/lang/Integer";
- std::string value_snapshotStreamIDCtorSignature = "(I)V";
- jint jnivalue_snapshotStreamID = static_cast(cppValue.snapshotStreamID);
- chip::JniReferences::GetInstance().CreateBoxedObject(value_snapshotStreamIDClassName.c_str(),
- value_snapshotStreamIDCtorSignature.c_str(),
- jnivalue_snapshotStreamID, value_snapshotStreamID);
-
- jobject value_imageCodec;
- if (!cppValue.imageCodec.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_imageCodec);
- }
- else
- {
- jobject value_imageCodecInsideOptional;
- std::string value_imageCodecInsideOptionalClassName = "java/lang/Integer";
- std::string value_imageCodecInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_imageCodecInsideOptional = static_cast(cppValue.imageCodec.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_imageCodecInsideOptionalClassName.c_str(), value_imageCodecInsideOptionalCtorSignature.c_str(),
- jnivalue_imageCodecInsideOptional, value_imageCodecInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_imageCodecInsideOptional, value_imageCodec);
- }
-
- jobject value_frameRate;
- if (!cppValue.frameRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_frameRate);
- }
- else
- {
- jobject value_frameRateInsideOptional;
- std::string value_frameRateInsideOptionalClassName = "java/lang/Integer";
- std::string value_frameRateInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_frameRateInsideOptional = static_cast(cppValue.frameRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_frameRateInsideOptionalClassName.c_str(), value_frameRateInsideOptionalCtorSignature.c_str(),
- jnivalue_frameRateInsideOptional, value_frameRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_frameRateInsideOptional, value_frameRate);
- }
-
- jobject value_bitRate;
- if (!cppValue.bitRate.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_bitRate);
- }
- else
- {
- jobject value_bitRateInsideOptional;
- std::string value_bitRateInsideOptionalClassName = "java/lang/Long";
- std::string value_bitRateInsideOptionalCtorSignature = "(J)V";
- jlong jnivalue_bitRateInsideOptional = static_cast(cppValue.bitRate.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_bitRateInsideOptionalClassName.c_str(), value_bitRateInsideOptionalCtorSignature.c_str(),
- jnivalue_bitRateInsideOptional, value_bitRateInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_bitRateInsideOptional, value_bitRate);
- }
-
- jobject value_minResolution;
- if (!cppValue.minResolution.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_minResolution);
- }
- else
- {
- jobject value_minResolutionInsideOptional;
- jobject value_minResolutionInsideOptional_width;
- std::string value_minResolutionInsideOptional_widthClassName = "java/lang/Integer";
- std::string value_minResolutionInsideOptional_widthCtorSignature = "(I)V";
- jint jnivalue_minResolutionInsideOptional_width = static_cast(cppValue.minResolution.Value().width);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minResolutionInsideOptional_widthClassName.c_str(),
- value_minResolutionInsideOptional_widthCtorSignature.c_str(), jnivalue_minResolutionInsideOptional_width,
- value_minResolutionInsideOptional_width);
- jobject value_minResolutionInsideOptional_height;
- std::string value_minResolutionInsideOptional_heightClassName = "java/lang/Integer";
- std::string value_minResolutionInsideOptional_heightCtorSignature = "(I)V";
- jint jnivalue_minResolutionInsideOptional_height = static_cast(cppValue.minResolution.Value().height);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_minResolutionInsideOptional_heightClassName.c_str(),
- value_minResolutionInsideOptional_heightCtorSignature.c_str(), jnivalue_minResolutionInsideOptional_height,
- value_minResolutionInsideOptional_height);
-
- {
- jclass videoResolutionStructStructClass_1;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct",
- videoResolutionStructStructClass_1);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct");
- return nullptr;
- }
-
- jmethodID videoResolutionStructStructCtor_1;
- err = chip::JniReferences::GetInstance().FindMethod(env, videoResolutionStructStructClass_1, "",
- "(Ljava/lang/Integer;Ljava/lang/Integer;)V",
- &videoResolutionStructStructCtor_1);
- if (err != CHIP_NO_ERROR || videoResolutionStructStructCtor_1 == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct constructor");
- return nullptr;
- }
-
- value_minResolutionInsideOptional =
- env->NewObject(videoResolutionStructStructClass_1, videoResolutionStructStructCtor_1,
- value_minResolutionInsideOptional_width, value_minResolutionInsideOptional_height);
- }
- chip::JniReferences::GetInstance().CreateOptional(value_minResolutionInsideOptional, value_minResolution);
- }
-
- jobject value_maxResolution;
- if (!cppValue.maxResolution.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_maxResolution);
- }
- else
- {
- jobject value_maxResolutionInsideOptional;
- jobject value_maxResolutionInsideOptional_width;
- std::string value_maxResolutionInsideOptional_widthClassName = "java/lang/Integer";
- std::string value_maxResolutionInsideOptional_widthCtorSignature = "(I)V";
- jint jnivalue_maxResolutionInsideOptional_width = static_cast(cppValue.maxResolution.Value().width);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxResolutionInsideOptional_widthClassName.c_str(),
- value_maxResolutionInsideOptional_widthCtorSignature.c_str(), jnivalue_maxResolutionInsideOptional_width,
- value_maxResolutionInsideOptional_width);
- jobject value_maxResolutionInsideOptional_height;
- std::string value_maxResolutionInsideOptional_heightClassName = "java/lang/Integer";
- std::string value_maxResolutionInsideOptional_heightCtorSignature = "(I)V";
- jint jnivalue_maxResolutionInsideOptional_height = static_cast(cppValue.maxResolution.Value().height);
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_maxResolutionInsideOptional_heightClassName.c_str(),
- value_maxResolutionInsideOptional_heightCtorSignature.c_str(), jnivalue_maxResolutionInsideOptional_height,
- value_maxResolutionInsideOptional_height);
-
- {
- jclass videoResolutionStructStructClass_1;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct",
- videoResolutionStructStructClass_1);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl, "Could not find class ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct");
- return nullptr;
- }
-
- jmethodID videoResolutionStructStructCtor_1;
- err = chip::JniReferences::GetInstance().FindMethod(env, videoResolutionStructStructClass_1, "",
- "(Ljava/lang/Integer;Ljava/lang/Integer;)V",
- &videoResolutionStructStructCtor_1);
- if (err != CHIP_NO_ERROR || videoResolutionStructStructCtor_1 == nullptr)
- {
- ChipLogError(Zcl,
- "Could not find ChipStructs$CameraAvStreamManagementClusterVideoResolutionStruct constructor");
- return nullptr;
- }
-
- value_maxResolutionInsideOptional =
- env->NewObject(videoResolutionStructStructClass_1, videoResolutionStructStructCtor_1,
- value_maxResolutionInsideOptional_width, value_maxResolutionInsideOptional_height);
- }
- chip::JniReferences::GetInstance().CreateOptional(value_maxResolutionInsideOptional, value_maxResolution);
- }
-
- jobject value_quality;
- if (!cppValue.quality.HasValue())
- {
- chip::JniReferences::GetInstance().CreateOptional(nullptr, value_quality);
- }
- else
- {
- jobject value_qualityInsideOptional;
- std::string value_qualityInsideOptionalClassName = "java/lang/Integer";
- std::string value_qualityInsideOptionalCtorSignature = "(I)V";
- jint jnivalue_qualityInsideOptional = static_cast(cppValue.quality.Value());
- chip::JniReferences::GetInstance().CreateBoxedObject(
- value_qualityInsideOptionalClassName.c_str(), value_qualityInsideOptionalCtorSignature.c_str(),
- jnivalue_qualityInsideOptional, value_qualityInsideOptional);
- chip::JniReferences::GetInstance().CreateOptional(value_qualityInsideOptional, value_quality);
- }
-
- jclass snapshotStreamChangedStructClass;
- err = chip::JniReferences::GetInstance().GetLocalClassRef(
- env, "chip/devicecontroller/ChipEventStructs$CameraAvStreamManagementClusterSnapshotStreamChangedEvent",
- snapshotStreamChangedStructClass);
- if (err != CHIP_NO_ERROR)
- {
- ChipLogError(Zcl,
- "Could not find class ChipEventStructs$CameraAvStreamManagementClusterSnapshotStreamChangedEvent");
- return nullptr;
- }
-
- jmethodID snapshotStreamChangedStructCtor;
- err = chip::JniReferences::GetInstance().FindMethod(
- env, snapshotStreamChangedStructClass, "