Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Camera av stream management XML changes for Spec ballot fixes #37823

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

62 changes: 19 additions & 43 deletions src/controller/data_model/controller-clusters.matter
Original file line number Diff line number Diff line change
Expand Up @@ -9741,6 +9741,7 @@ provisional cluster CameraAvStreamManagement = 1361 {
kWatermark = 0x40;
kOnScreenDisplay = 0x80;
kLocalStorage = 0x100;
kHighDynamicRange = 0x200;
}

struct VideoResolutionStruct {
Expand Down Expand Up @@ -9809,9 +9810,8 @@ provisional cluster CameraAvStreamManagement = 1361 {
struct VideoSensorParamsStruct {
int16u sensorWidth = 0;
int16u sensorHeight = 1;
boolean HDRCapable = 2;
int16u maxFPS = 3;
int16u maxHDRFPS = 4;
int16u maxFPS = 2;
optional int16u maxHDRFPS = 3;
}

struct ViewportStruct {
Expand All @@ -9821,55 +9821,21 @@ provisional cluster CameraAvStreamManagement = 1361 {
int16u y2 = 3;
}

info event VideoStreamChanged = 0 {
int16u videoStreamID = 0;
optional StreamUsageEnum streamUsage = 1;
optional VideoCodecEnum videoCodec = 2;
optional int16u minFrameRate = 3;
optional int16u maxFrameRate = 4;
optional VideoResolutionStruct minResolution = 5;
optional VideoResolutionStruct maxResolution = 6;
optional int32u minBitRate = 7;
optional int32u maxBitRate = 8;
optional int16u minFragmentLen = 9;
optional int16u maxFragmentLen = 10;
}

info event AudioStreamChanged = 1 {
int16u audioStreamID = 0;
optional StreamUsageEnum streamUsage = 1;
optional AudioCodecEnum audioCodec = 2;
optional int8u channelCount = 3;
optional int32u sampleRate = 4;
optional int32u bitRate = 5;
optional int8u bitDepth = 6;
}

info event SnapshotStreamChanged = 2 {
int16u snapshotStreamID = 0;
optional ImageCodecEnum imageCodec = 1;
optional int16u frameRate = 2;
optional int32u bitRate = 3;
optional VideoResolutionStruct minResolution = 4;
optional VideoResolutionStruct maxResolution = 5;
optional int8u quality = 6;
}

readonly attribute optional int8u maxConcurrentVideoEncoders = 0;
readonly attribute optional int32u maxEncodedPixelRate = 1;
readonly attribute optional VideoSensorParamsStruct videoSensorParams = 2;
readonly attribute optional boolean nightVisionCapable = 3;
readonly attribute optional VideoResolutionStruct minViewport = 4;
readonly attribute optional RateDistortionTradeOffPointsStruct rateDistortionTradeOffPoints[] = 5;
readonly attribute optional int32u maxContentBufferSize = 6;
readonly attribute int32u maxContentBufferSize = 6;
readonly attribute optional AudioCapabilitiesStruct microphoneCapabilities = 7;
readonly attribute optional AudioCapabilitiesStruct speakerCapabilities = 8;
readonly attribute optional TwoWayTalkSupportTypeEnum twoWayTalkSupport = 9;
readonly attribute optional SnapshotParamsStruct supportedSnapshotParams[] = 10;
readonly attribute int32u maxNetworkBandwidth = 11;
readonly attribute optional int16u currentFrameRate = 12;
attribute access(read: manage, write: manage) optional boolean HDRModeEnabled = 13;
readonly attribute fabric_idx fabricsUsingCamera[] = 14;
readonly attribute StreamUsageEnum supportedStreamUsages[] = 14;
readonly attribute optional VideoStreamStruct allocatedVideoStreams[] = 15;
readonly attribute optional AudioStreamStruct allocatedAudioStreams[] = 16;
readonly attribute optional SnapshotStreamStruct allocatedSnapshotStreams[] = 17;
Expand Down Expand Up @@ -9956,12 +9922,20 @@ provisional cluster CameraAvStreamManagement = 1361 {
VideoResolutionStruct minResolution = 3;
VideoResolutionStruct maxResolution = 4;
int8u quality = 5;
optional boolean watermarkEnabled = 6;
optional boolean OSDEnabled = 7;
}

response struct SnapshotStreamAllocateResponse = 8 {
int16u snapshotStreamID = 0;
}

request struct SnapshotStreamModifyRequest {
int16u snapshotStreamID = 0;
optional boolean watermarkEnabled = 1;
optional boolean OSDEnabled = 2;
}

request struct SnapshotStreamDeallocateRequest {
int16u snapshotStreamID = 0;
}
Expand All @@ -9975,7 +9949,7 @@ provisional cluster CameraAvStreamManagement = 1361 {
VideoResolutionStruct requestedResolution = 1;
}

response struct CaptureSnapshotResponse = 12 {
response struct CaptureSnapshotResponse = 13 {
octet_string data = 0;
ImageCodecEnum imageCodec = 1;
VideoResolutionStruct resolution = 2;
Expand All @@ -9993,12 +9967,14 @@ provisional cluster CameraAvStreamManagement = 1361 {
command access(invoke: manage) VideoStreamDeallocate(VideoStreamDeallocateRequest): DefaultSuccess = 6;
/** This command SHALL allocate a snapshot stream on the device and return an allocated snapshot stream identifier. */
command access(invoke: manage) SnapshotStreamAllocate(SnapshotStreamAllocateRequest): SnapshotStreamAllocateResponse = 7;
/** This command SHALL be used to modify a stream specified by the VideoStreamID. */
command access(invoke: manage) SnapshotStreamModify(SnapshotStreamModifyRequest): DefaultSuccess = 9;
/** This command SHALL deallocate an snapshot stream on the camera, corresponding to the given snapshot stream identifier. */
command access(invoke: manage) SnapshotStreamDeallocate(SnapshotStreamDeallocateRequest): DefaultSuccess = 9;
command access(invoke: manage) SnapshotStreamDeallocate(SnapshotStreamDeallocateRequest): DefaultSuccess = 10;
/** This command SHALL set the relative priorities of the various stream usages on the camera. */
command access(invoke: administer) SetStreamPriorities(SetStreamPrioritiesRequest): DefaultSuccess = 10;
command access(invoke: administer) SetStreamPriorities(SetStreamPrioritiesRequest): DefaultSuccess = 11;
/** This command SHALL return a Snapshot from the camera. */
command CaptureSnapshot(CaptureSnapshotRequest): CaptureSnapshotResponse = 11;
command CaptureSnapshot(CaptureSnapshotRequest): CaptureSnapshotResponse = 12;
}

/** This cluster provides an interface into controls associated with the operation of a device that provides pan, tilt, and zoom functions, either mechanically, or against a digital image. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60409,7 +60409,7 @@ public static class CameraAvStreamManagementCluster extends BaseChipCluster {
private static final long MAX_NETWORK_BANDWIDTH_ATTRIBUTE_ID = 11L;
private static final long CURRENT_FRAME_RATE_ATTRIBUTE_ID = 12L;
private static final long HDR_MODE_ENABLED_ATTRIBUTE_ID = 13L;
private static final long FABRICS_USING_CAMERA_ATTRIBUTE_ID = 14L;
private static final long SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID = 14L;
private static final long ALLOCATED_VIDEO_STREAMS_ATTRIBUTE_ID = 15L;
private static final long ALLOCATED_AUDIO_STREAMS_ATTRIBUTE_ID = 16L;
private static final long ALLOCATED_SNAPSHOT_STREAMS_ATTRIBUTE_ID = 17L;
Expand Down Expand Up @@ -60645,11 +60645,11 @@ public void onResponse(StructType invokeStructValue) {
}}, commandId, commandArgs, timedInvokeTimeoutMs);
}

public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality) {
snapshotStreamAllocate(callback, imageCodec, maxFrameRate, bitRate, minResolution, maxResolution, quality, 0);
public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, Optional<Boolean> watermarkEnabled, Optional<Boolean> OSDEnabled) {
snapshotStreamAllocate(callback, imageCodec, maxFrameRate, bitRate, minResolution, maxResolution, quality, watermarkEnabled, OSDEnabled, 0);
}

public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, int timedInvokeTimeoutMs) {
public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callback, Integer imageCodec, Integer maxFrameRate, Long bitRate, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct minResolution, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct maxResolution, Integer quality, Optional<Boolean> watermarkEnabled, Optional<Boolean> OSDEnabled, int timedInvokeTimeoutMs) {
final long commandId = 7L;

ArrayList<StructElement> elements = new ArrayList<>();
Expand Down Expand Up @@ -60677,6 +60677,14 @@ public void snapshotStreamAllocate(SnapshotStreamAllocateResponseCallback callba
BaseTLVType qualitytlvValue = new UIntType(quality);
elements.add(new StructElement(qualityFieldID, qualitytlvValue));

final long watermarkEnabledFieldID = 6L;
BaseTLVType watermarkEnabledtlvValue = watermarkEnabled.<BaseTLVType>map((nonOptionalwatermarkEnabled) -> new BooleanType(nonOptionalwatermarkEnabled)).orElse(new EmptyType());
elements.add(new StructElement(watermarkEnabledFieldID, watermarkEnabledtlvValue));

final long OSDEnabledFieldID = 7L;
BaseTLVType OSDEnabledtlvValue = OSDEnabled.<BaseTLVType>map((nonOptionalOSDEnabled) -> new BooleanType(nonOptionalOSDEnabled)).orElse(new EmptyType());
elements.add(new StructElement(OSDEnabledFieldID, OSDEnabledtlvValue));

StructType commandArgs = new StructType(elements);
invoke(new InvokeCallbackImpl(callback) {
@Override
Expand All @@ -60695,12 +60703,40 @@ public void onResponse(StructType invokeStructValue) {
}}, commandId, commandArgs, timedInvokeTimeoutMs);
}

public void snapshotStreamModify(DefaultClusterCallback callback, Integer snapshotStreamID, Optional<Boolean> watermarkEnabled, Optional<Boolean> OSDEnabled) {
snapshotStreamModify(callback, snapshotStreamID, watermarkEnabled, OSDEnabled, 0);
}

public void snapshotStreamModify(DefaultClusterCallback callback, Integer snapshotStreamID, Optional<Boolean> watermarkEnabled, Optional<Boolean> OSDEnabled, int timedInvokeTimeoutMs) {
final long commandId = 9L;

ArrayList<StructElement> elements = new ArrayList<>();
final long snapshotStreamIDFieldID = 0L;
BaseTLVType snapshotStreamIDtlvValue = new UIntType(snapshotStreamID);
elements.add(new StructElement(snapshotStreamIDFieldID, snapshotStreamIDtlvValue));

final long watermarkEnabledFieldID = 1L;
BaseTLVType watermarkEnabledtlvValue = watermarkEnabled.<BaseTLVType>map((nonOptionalwatermarkEnabled) -> new BooleanType(nonOptionalwatermarkEnabled)).orElse(new EmptyType());
elements.add(new StructElement(watermarkEnabledFieldID, watermarkEnabledtlvValue));

final long OSDEnabledFieldID = 2L;
BaseTLVType OSDEnabledtlvValue = OSDEnabled.<BaseTLVType>map((nonOptionalOSDEnabled) -> new BooleanType(nonOptionalOSDEnabled)).orElse(new EmptyType());
elements.add(new StructElement(OSDEnabledFieldID, OSDEnabledtlvValue));

StructType commandArgs = new StructType(elements);
invoke(new InvokeCallbackImpl(callback) {
@Override
public void onResponse(StructType invokeStructValue) {
callback.onSuccess();
}}, commandId, commandArgs, timedInvokeTimeoutMs);
}

public void snapshotStreamDeallocate(DefaultClusterCallback callback, Integer snapshotStreamID) {
snapshotStreamDeallocate(callback, snapshotStreamID, 0);
}

public void snapshotStreamDeallocate(DefaultClusterCallback callback, Integer snapshotStreamID, int timedInvokeTimeoutMs) {
final long commandId = 9L;
final long commandId = 10L;

ArrayList<StructElement> elements = new ArrayList<>();
final long snapshotStreamIDFieldID = 0L;
Expand All @@ -60720,7 +60756,7 @@ public void setStreamPriorities(DefaultClusterCallback callback, ArrayList<Integ
}

public void setStreamPriorities(DefaultClusterCallback callback, ArrayList<Integer> streamPriorities, int timedInvokeTimeoutMs) {
final long commandId = 10L;
final long commandId = 11L;

ArrayList<StructElement> elements = new ArrayList<>();
final long streamPrioritiesFieldID = 0L;
Expand All @@ -60740,7 +60776,7 @@ public void captureSnapshot(CaptureSnapshotResponseCallback callback, Integer sn
}

public void captureSnapshot(CaptureSnapshotResponseCallback callback, Integer snapshotStreamID, ChipStructs.CameraAvStreamManagementClusterVideoResolutionStruct requestedResolution, int timedInvokeTimeoutMs) {
final long commandId = 11L;
final long commandId = 12L;

ArrayList<StructElement> elements = new ArrayList<>();
final long snapshotStreamIDFieldID = 0L;
Expand Down Expand Up @@ -60823,7 +60859,7 @@ public interface SupportedSnapshotParamsAttributeCallback extends BaseAttributeC
void onSuccess(List<ChipStructs.CameraAvStreamManagementClusterSnapshotParamsStruct> value);
}

public interface FabricsUsingCameraAttributeCallback extends BaseAttributeCallback {
public interface SupportedStreamUsagesAttributeCallback extends BaseAttributeCallback {
void onSuccess(List<Integer> value);
}

Expand Down Expand Up @@ -61236,30 +61272,30 @@ public void onSuccess(byte[] tlv) {
}, HDR_MODE_ENABLED_ATTRIBUTE_ID, minInterval, maxInterval);
}

public void readFabricsUsingCameraAttribute(
FabricsUsingCameraAttributeCallback callback) {
ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, FABRICS_USING_CAMERA_ATTRIBUTE_ID);
public void readSupportedStreamUsagesAttribute(
SupportedStreamUsagesAttributeCallback callback) {
ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID);

readAttribute(new ReportCallbackImpl(callback, path) {
@Override
public void onSuccess(byte[] tlv) {
List<Integer> value = ChipTLVValueDecoder.decodeAttributeValue(path, tlv);
callback.onSuccess(value);
}
}, FABRICS_USING_CAMERA_ATTRIBUTE_ID, true);
}, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID, true);
}

public void subscribeFabricsUsingCameraAttribute(
FabricsUsingCameraAttributeCallback callback, int minInterval, int maxInterval) {
ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, FABRICS_USING_CAMERA_ATTRIBUTE_ID);
public void subscribeSupportedStreamUsagesAttribute(
SupportedStreamUsagesAttributeCallback callback, int minInterval, int maxInterval) {
ChipAttributePath path = ChipAttributePath.newInstance(endpointId, clusterId, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID);

subscribeAttribute(new ReportCallbackImpl(callback, path) {
@Override
public void onSuccess(byte[] tlv) {
List<Integer> value = ChipTLVValueDecoder.decodeAttributeValue(path, tlv);
callback.onSuccess(value);
}
}, FABRICS_USING_CAMERA_ATTRIBUTE_ID, minInterval, maxInterval);
}, SUPPORTED_STREAM_USAGES_ATTRIBUTE_ID, minInterval, maxInterval);
}

public void readAllocatedVideoStreamsAttribute(
Expand Down
Loading
Loading