matc/clusters/codec/
camera_av_stream_management.rs

1//! Matter TLV encoders and decoders for Camera AV Stream Management Cluster
2//! Cluster ID: 0x0551
3//!
4//! This file is automatically generated from CameraAVStreamManagement.xml
5
6#![allow(clippy::too_many_arguments)]
7
8use crate::tlv;
9use anyhow;
10use serde_json;
11
12
13// Import serialization helpers for octet strings
14use crate::clusters::helpers::{serialize_opt_bytes_as_hex};
15
16// Enum definitions
17
18#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
19#[repr(u8)]
20pub enum AudioCodec {
21    /// Open source IETF standard codec.
22    Opus = 0,
23    /// Advanced Audio Coding codec-Low Complexity
24    AacLc = 1,
25}
26
27impl AudioCodec {
28    /// Convert from u8 value
29    pub fn from_u8(value: u8) -> Option<Self> {
30        match value {
31            0 => Some(AudioCodec::Opus),
32            1 => Some(AudioCodec::AacLc),
33            _ => None,
34        }
35    }
36
37    /// Convert to u8 value
38    pub fn to_u8(self) -> u8 {
39        self as u8
40    }
41}
42
43impl From<AudioCodec> for u8 {
44    fn from(val: AudioCodec) -> Self {
45        val as u8
46    }
47}
48
49#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
50#[repr(u8)]
51pub enum ImageCodec {
52    /// JPEG image codec.
53    Jpeg = 0,
54    /// HEIC image codec.
55    Heic = 1,
56}
57
58impl ImageCodec {
59    /// Convert from u8 value
60    pub fn from_u8(value: u8) -> Option<Self> {
61        match value {
62            0 => Some(ImageCodec::Jpeg),
63            1 => Some(ImageCodec::Heic),
64            _ => None,
65        }
66    }
67
68    /// Convert to u8 value
69    pub fn to_u8(self) -> u8 {
70        self as u8
71    }
72}
73
74impl From<ImageCodec> for u8 {
75    fn from(val: ImageCodec) -> Self {
76        val as u8
77    }
78}
79
80#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
81#[repr(u8)]
82pub enum TriStateAuto {
83    /// Off
84    Off = 0,
85    /// On
86    On = 1,
87    /// Automatic Operation
88    Auto = 2,
89}
90
91impl TriStateAuto {
92    /// Convert from u8 value
93    pub fn from_u8(value: u8) -> Option<Self> {
94        match value {
95            0 => Some(TriStateAuto::Off),
96            1 => Some(TriStateAuto::On),
97            2 => Some(TriStateAuto::Auto),
98            _ => None,
99        }
100    }
101
102    /// Convert to u8 value
103    pub fn to_u8(self) -> u8 {
104        self as u8
105    }
106}
107
108impl From<TriStateAuto> for u8 {
109    fn from(val: TriStateAuto) -> Self {
110        val as u8
111    }
112}
113
114#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
115#[repr(u8)]
116pub enum TwoWayTalkSupportType {
117    /// Two-way Talk support is absent.
118    Notsupported = 0,
119    /// Audio in one direction at a time.
120    Halfduplex = 1,
121    /// Audio in both directions simultaneously.
122    Fullduplex = 2,
123}
124
125impl TwoWayTalkSupportType {
126    /// Convert from u8 value
127    pub fn from_u8(value: u8) -> Option<Self> {
128        match value {
129            0 => Some(TwoWayTalkSupportType::Notsupported),
130            1 => Some(TwoWayTalkSupportType::Halfduplex),
131            2 => Some(TwoWayTalkSupportType::Fullduplex),
132            _ => None,
133        }
134    }
135
136    /// Convert to u8 value
137    pub fn to_u8(self) -> u8 {
138        self as u8
139    }
140}
141
142impl From<TwoWayTalkSupportType> for u8 {
143    fn from(val: TwoWayTalkSupportType) -> Self {
144        val as u8
145    }
146}
147
148#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
149#[repr(u8)]
150pub enum VideoCodec {
151    /// Advanced Video Coding (H.264) codec.
152    H264 = 0,
153    /// High efficiency Video Coding (H.265) codec.
154    Hevc = 1,
155    /// Versatile Video Coding (H.266) codec.
156    Vvc = 2,
157    /// AOMedia Video 1 codec.
158    Av1 = 3,
159}
160
161impl VideoCodec {
162    /// Convert from u8 value
163    pub fn from_u8(value: u8) -> Option<Self> {
164        match value {
165            0 => Some(VideoCodec::H264),
166            1 => Some(VideoCodec::Hevc),
167            2 => Some(VideoCodec::Vvc),
168            3 => Some(VideoCodec::Av1),
169            _ => None,
170        }
171    }
172
173    /// Convert to u8 value
174    pub fn to_u8(self) -> u8 {
175        self as u8
176    }
177}
178
179impl From<VideoCodec> for u8 {
180    fn from(val: VideoCodec) -> Self {
181        val as u8
182    }
183}
184
185// Struct definitions
186
187#[derive(Debug, serde::Serialize)]
188pub struct AVMetadata {
189    pub utc_time: Option<u64>,
190    pub motion_zones_active: Option<Vec<u8>>,
191    pub black_and_white_active: Option<bool>,
192    #[serde(serialize_with = "serialize_opt_bytes_as_hex")]
193    pub user_defined: Option<Vec<u8>>,
194}
195
196#[derive(Debug, serde::Serialize)]
197pub struct AudioCapabilities {
198    pub max_number_of_channels: Option<u8>,
199    pub supported_codecs: Option<Vec<AudioCodec>>,
200    pub supported_sample_rates: Option<Vec<u32>>,
201    pub supported_bit_depths: Option<Vec<u8>>,
202}
203
204#[derive(Debug, serde::Serialize)]
205pub struct AudioStream {
206    pub audio_stream_id: Option<u8>,
207    pub stream_usage: Option<u8>,
208    pub audio_codec: Option<AudioCodec>,
209    pub channel_count: Option<u8>,
210    pub sample_rate: Option<u32>,
211    pub bit_rate: Option<u32>,
212    pub bit_depth: Option<u8>,
213    pub reference_count: Option<u8>,
214}
215
216#[derive(Debug, serde::Serialize)]
217pub struct RateDistortionTradeOffPoints {
218    pub codec: Option<VideoCodec>,
219    pub resolution: Option<VideoResolution>,
220    pub min_bit_rate: Option<u32>,
221}
222
223#[derive(Debug, serde::Serialize)]
224pub struct SnapshotCapabilities {
225    pub resolution: Option<VideoResolution>,
226    pub max_frame_rate: Option<u16>,
227    pub image_codec: Option<ImageCodec>,
228    pub requires_encoded_pixels: Option<bool>,
229    pub requires_hardware_encoder: Option<bool>,
230}
231
232#[derive(Debug, serde::Serialize)]
233pub struct SnapshotStream {
234    pub snapshot_stream_id: Option<u8>,
235    pub image_codec: Option<ImageCodec>,
236    pub frame_rate: Option<u16>,
237    pub min_resolution: Option<VideoResolution>,
238    pub max_resolution: Option<VideoResolution>,
239    pub quality: Option<u8>,
240    pub reference_count: Option<u8>,
241    pub encoded_pixels: Option<bool>,
242    pub hardware_encoder: Option<bool>,
243    pub watermark_enabled: Option<bool>,
244    pub osd_enabled: Option<bool>,
245}
246
247#[derive(Debug, serde::Serialize)]
248pub struct VideoResolution {
249    pub width: Option<u16>,
250    pub height: Option<u16>,
251}
252
253#[derive(Debug, serde::Serialize)]
254pub struct VideoSensorParams {
255    pub sensor_width: Option<u16>,
256    pub sensor_height: Option<u16>,
257    pub max_fps: Option<u16>,
258    pub max_hdrfps: Option<u16>,
259}
260
261#[derive(Debug, serde::Serialize)]
262pub struct VideoStream {
263    pub video_stream_id: Option<u8>,
264    pub stream_usage: Option<u8>,
265    pub video_codec: Option<VideoCodec>,
266    pub min_frame_rate: Option<u16>,
267    pub max_frame_rate: Option<u16>,
268    pub min_resolution: Option<VideoResolution>,
269    pub max_resolution: Option<VideoResolution>,
270    pub min_bit_rate: Option<u32>,
271    pub max_bit_rate: Option<u32>,
272    pub key_frame_interval: Option<u16>,
273    pub watermark_enabled: Option<bool>,
274    pub osd_enabled: Option<bool>,
275    pub reference_count: Option<u8>,
276}
277
278// Command encoders
279
280/// Encode AudioStreamAllocate command (0x00)
281pub fn encode_audio_stream_allocate(stream_usage: u8, audio_codec: AudioCodec, channel_count: u8, sample_rate: u32, bit_rate: u32, bit_depth: u8) -> anyhow::Result<Vec<u8>> {
282    let tlv = tlv::TlvItemEnc {
283        tag: 0,
284        value: tlv::TlvItemValueEnc::StructInvisible(vec![
285        (0, tlv::TlvItemValueEnc::UInt8(stream_usage)).into(),
286        (1, tlv::TlvItemValueEnc::UInt8(audio_codec.to_u8())).into(),
287        (2, tlv::TlvItemValueEnc::UInt8(channel_count)).into(),
288        (3, tlv::TlvItemValueEnc::UInt32(sample_rate)).into(),
289        (4, tlv::TlvItemValueEnc::UInt32(bit_rate)).into(),
290        (5, tlv::TlvItemValueEnc::UInt8(bit_depth)).into(),
291        ]),
292    };
293    Ok(tlv.encode()?)
294}
295
296/// Encode AudioStreamDeallocate command (0x02)
297pub fn encode_audio_stream_deallocate(audio_stream_id: u8) -> anyhow::Result<Vec<u8>> {
298    let tlv = tlv::TlvItemEnc {
299        tag: 0,
300        value: tlv::TlvItemValueEnc::StructInvisible(vec![
301        (0, tlv::TlvItemValueEnc::UInt8(audio_stream_id)).into(),
302        ]),
303    };
304    Ok(tlv.encode()?)
305}
306
307/// Parameters for VideoStreamAllocate command
308pub struct VideoStreamAllocateParams {
309    pub stream_usage: u8,
310    pub video_codec: VideoCodec,
311    pub min_frame_rate: u16,
312    pub max_frame_rate: u16,
313    pub min_resolution: VideoResolution,
314    pub max_resolution: VideoResolution,
315    pub min_bit_rate: u32,
316    pub max_bit_rate: u32,
317    pub key_frame_interval: u16,
318    pub watermark_enabled: bool,
319    pub osd_enabled: bool,
320}
321
322/// Encode VideoStreamAllocate command (0x03)
323pub fn encode_video_stream_allocate(params: VideoStreamAllocateParams) -> anyhow::Result<Vec<u8>> {
324            // Encode struct VideoResolutionStruct
325            let mut min_resolution_fields = Vec::new();
326            if let Some(x) = params.min_resolution.width { min_resolution_fields.push((0, tlv::TlvItemValueEnc::UInt16(x)).into()); }
327            if let Some(x) = params.min_resolution.height { min_resolution_fields.push((1, tlv::TlvItemValueEnc::UInt16(x)).into()); }
328            // Encode struct VideoResolutionStruct
329            let mut max_resolution_fields = Vec::new();
330            if let Some(x) = params.max_resolution.width { max_resolution_fields.push((0, tlv::TlvItemValueEnc::UInt16(x)).into()); }
331            if let Some(x) = params.max_resolution.height { max_resolution_fields.push((1, tlv::TlvItemValueEnc::UInt16(x)).into()); }
332    let tlv = tlv::TlvItemEnc {
333        tag: 0,
334        value: tlv::TlvItemValueEnc::StructInvisible(vec![
335        (0, tlv::TlvItemValueEnc::UInt8(params.stream_usage)).into(),
336        (1, tlv::TlvItemValueEnc::UInt8(params.video_codec.to_u8())).into(),
337        (2, tlv::TlvItemValueEnc::UInt16(params.min_frame_rate)).into(),
338        (3, tlv::TlvItemValueEnc::UInt16(params.max_frame_rate)).into(),
339        (4, tlv::TlvItemValueEnc::StructInvisible(min_resolution_fields)).into(),
340        (5, tlv::TlvItemValueEnc::StructInvisible(max_resolution_fields)).into(),
341        (6, tlv::TlvItemValueEnc::UInt32(params.min_bit_rate)).into(),
342        (7, tlv::TlvItemValueEnc::UInt32(params.max_bit_rate)).into(),
343        (8, tlv::TlvItemValueEnc::UInt16(params.key_frame_interval)).into(),
344        (9, tlv::TlvItemValueEnc::Bool(params.watermark_enabled)).into(),
345        (10, tlv::TlvItemValueEnc::Bool(params.osd_enabled)).into(),
346        ]),
347    };
348    Ok(tlv.encode()?)
349}
350
351/// Encode VideoStreamModify command (0x05)
352pub fn encode_video_stream_modify(video_stream_id: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<Vec<u8>> {
353    let tlv = tlv::TlvItemEnc {
354        tag: 0,
355        value: tlv::TlvItemValueEnc::StructInvisible(vec![
356        (0, tlv::TlvItemValueEnc::UInt8(video_stream_id)).into(),
357        (1, tlv::TlvItemValueEnc::Bool(watermark_enabled)).into(),
358        (2, tlv::TlvItemValueEnc::Bool(osd_enabled)).into(),
359        ]),
360    };
361    Ok(tlv.encode()?)
362}
363
364/// Encode VideoStreamDeallocate command (0x06)
365pub fn encode_video_stream_deallocate(video_stream_id: u8) -> anyhow::Result<Vec<u8>> {
366    let tlv = tlv::TlvItemEnc {
367        tag: 0,
368        value: tlv::TlvItemValueEnc::StructInvisible(vec![
369        (0, tlv::TlvItemValueEnc::UInt8(video_stream_id)).into(),
370        ]),
371    };
372    Ok(tlv.encode()?)
373}
374
375/// Encode SnapshotStreamAllocate command (0x07)
376pub fn encode_snapshot_stream_allocate(image_codec: ImageCodec, max_frame_rate: u16, min_resolution: VideoResolution, max_resolution: VideoResolution, quality: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<Vec<u8>> {
377            // Encode struct VideoResolutionStruct
378            let mut min_resolution_fields = Vec::new();
379            if let Some(x) = min_resolution.width { min_resolution_fields.push((0, tlv::TlvItemValueEnc::UInt16(x)).into()); }
380            if let Some(x) = min_resolution.height { min_resolution_fields.push((1, tlv::TlvItemValueEnc::UInt16(x)).into()); }
381            // Encode struct VideoResolutionStruct
382            let mut max_resolution_fields = Vec::new();
383            if let Some(x) = max_resolution.width { max_resolution_fields.push((0, tlv::TlvItemValueEnc::UInt16(x)).into()); }
384            if let Some(x) = max_resolution.height { max_resolution_fields.push((1, tlv::TlvItemValueEnc::UInt16(x)).into()); }
385    let tlv = tlv::TlvItemEnc {
386        tag: 0,
387        value: tlv::TlvItemValueEnc::StructInvisible(vec![
388        (0, tlv::TlvItemValueEnc::UInt8(image_codec.to_u8())).into(),
389        (1, tlv::TlvItemValueEnc::UInt16(max_frame_rate)).into(),
390        (2, tlv::TlvItemValueEnc::StructInvisible(min_resolution_fields)).into(),
391        (3, tlv::TlvItemValueEnc::StructInvisible(max_resolution_fields)).into(),
392        (4, tlv::TlvItemValueEnc::UInt8(quality)).into(),
393        (5, tlv::TlvItemValueEnc::Bool(watermark_enabled)).into(),
394        (6, tlv::TlvItemValueEnc::Bool(osd_enabled)).into(),
395        ]),
396    };
397    Ok(tlv.encode()?)
398}
399
400/// Encode SnapshotStreamModify command (0x09)
401pub fn encode_snapshot_stream_modify(snapshot_stream_id: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<Vec<u8>> {
402    let tlv = tlv::TlvItemEnc {
403        tag: 0,
404        value: tlv::TlvItemValueEnc::StructInvisible(vec![
405        (0, tlv::TlvItemValueEnc::UInt8(snapshot_stream_id)).into(),
406        (1, tlv::TlvItemValueEnc::Bool(watermark_enabled)).into(),
407        (2, tlv::TlvItemValueEnc::Bool(osd_enabled)).into(),
408        ]),
409    };
410    Ok(tlv.encode()?)
411}
412
413/// Encode SnapshotStreamDeallocate command (0x0A)
414pub fn encode_snapshot_stream_deallocate(snapshot_stream_id: u8) -> anyhow::Result<Vec<u8>> {
415    let tlv = tlv::TlvItemEnc {
416        tag: 0,
417        value: tlv::TlvItemValueEnc::StructInvisible(vec![
418        (0, tlv::TlvItemValueEnc::UInt8(snapshot_stream_id)).into(),
419        ]),
420    };
421    Ok(tlv.encode()?)
422}
423
424/// Encode SetStreamPriorities command (0x0B)
425pub fn encode_set_stream_priorities(stream_priorities: Vec<u8>) -> anyhow::Result<Vec<u8>> {
426    let tlv = tlv::TlvItemEnc {
427        tag: 0,
428        value: tlv::TlvItemValueEnc::StructInvisible(vec![
429        (0, tlv::TlvItemValueEnc::StructAnon(stream_priorities.into_iter().map(|v| (0, tlv::TlvItemValueEnc::UInt8(v)).into()).collect())).into(),
430        ]),
431    };
432    Ok(tlv.encode()?)
433}
434
435/// Encode CaptureSnapshot command (0x0C)
436pub fn encode_capture_snapshot(snapshot_stream_id: Option<u8>, requested_resolution: VideoResolution) -> anyhow::Result<Vec<u8>> {
437            // Encode struct VideoResolutionStruct
438            let mut requested_resolution_fields = Vec::new();
439            if let Some(x) = requested_resolution.width { requested_resolution_fields.push((0, tlv::TlvItemValueEnc::UInt16(x)).into()); }
440            if let Some(x) = requested_resolution.height { requested_resolution_fields.push((1, tlv::TlvItemValueEnc::UInt16(x)).into()); }
441    let tlv = tlv::TlvItemEnc {
442        tag: 0,
443        value: tlv::TlvItemValueEnc::StructInvisible(vec![
444        (0, tlv::TlvItemValueEnc::UInt8(snapshot_stream_id.unwrap_or(0))).into(),
445        (1, tlv::TlvItemValueEnc::StructInvisible(requested_resolution_fields)).into(),
446        ]),
447    };
448    Ok(tlv.encode()?)
449}
450
451// Attribute decoders
452
453/// Decode MaxConcurrentEncoders attribute (0x0000)
454pub fn decode_max_concurrent_encoders(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
455    if let tlv::TlvItemValue::Int(v) = inp {
456        Ok(*v as u8)
457    } else {
458        Err(anyhow::anyhow!("Expected UInt8"))
459    }
460}
461
462/// Decode MaxEncodedPixelRate attribute (0x0001)
463pub fn decode_max_encoded_pixel_rate(inp: &tlv::TlvItemValue) -> anyhow::Result<u32> {
464    if let tlv::TlvItemValue::Int(v) = inp {
465        Ok(*v as u32)
466    } else {
467        Err(anyhow::anyhow!("Expected UInt32"))
468    }
469}
470
471/// Decode VideoSensorParams attribute (0x0002)
472pub fn decode_video_sensor_params(inp: &tlv::TlvItemValue) -> anyhow::Result<VideoSensorParams> {
473    if let tlv::TlvItemValue::List(_fields) = inp {
474        // Struct with fields
475        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
476        Ok(VideoSensorParams {
477                sensor_width: item.get_int(&[0]).map(|v| v as u16),
478                sensor_height: item.get_int(&[1]).map(|v| v as u16),
479                max_fps: item.get_int(&[2]).map(|v| v as u16),
480                max_hdrfps: item.get_int(&[3]).map(|v| v as u16),
481        })
482    } else {
483        Err(anyhow::anyhow!("Expected struct fields"))
484    }
485}
486
487/// Decode NightVisionUsesInfrared attribute (0x0003)
488pub fn decode_night_vision_uses_infrared(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
489    if let tlv::TlvItemValue::Bool(v) = inp {
490        Ok(*v)
491    } else {
492        Err(anyhow::anyhow!("Expected Bool"))
493    }
494}
495
496/// Decode MinViewportResolution attribute (0x0004)
497pub fn decode_min_viewport_resolution(inp: &tlv::TlvItemValue) -> anyhow::Result<VideoResolution> {
498    if let tlv::TlvItemValue::List(_fields) = inp {
499        // Struct with fields
500        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
501        Ok(VideoResolution {
502                width: item.get_int(&[0]).map(|v| v as u16),
503                height: item.get_int(&[1]).map(|v| v as u16),
504        })
505    } else {
506        Err(anyhow::anyhow!("Expected struct fields"))
507    }
508}
509
510/// Decode RateDistortionTradeOffPoints attribute (0x0005)
511pub fn decode_rate_distortion_trade_off_points(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<RateDistortionTradeOffPoints>> {
512    let mut res = Vec::new();
513    if let tlv::TlvItemValue::List(v) = inp {
514        for item in v {
515            res.push(RateDistortionTradeOffPoints {
516                codec: item.get_int(&[0]).and_then(|v| VideoCodec::from_u8(v as u8)),
517                resolution: {
518                    if let Some(nested_tlv) = item.get(&[1]) {
519                        if let tlv::TlvItemValue::List(_) = nested_tlv {
520                            let nested_item = tlv::TlvItem { tag: 1, value: nested_tlv.clone() };
521                            Some(VideoResolution {
522                width: nested_item.get_int(&[0]).map(|v| v as u16),
523                height: nested_item.get_int(&[1]).map(|v| v as u16),
524                            })
525                        } else {
526                            None
527                        }
528                    } else {
529                        None
530                    }
531                },
532                min_bit_rate: item.get_int(&[2]).map(|v| v as u32),
533            });
534        }
535    }
536    Ok(res)
537}
538
539/// Decode MaxContentBufferSize attribute (0x0006)
540pub fn decode_max_content_buffer_size(inp: &tlv::TlvItemValue) -> anyhow::Result<u32> {
541    if let tlv::TlvItemValue::Int(v) = inp {
542        Ok(*v as u32)
543    } else {
544        Err(anyhow::anyhow!("Expected UInt32"))
545    }
546}
547
548/// Decode MicrophoneCapabilities attribute (0x0007)
549pub fn decode_microphone_capabilities(inp: &tlv::TlvItemValue) -> anyhow::Result<AudioCapabilities> {
550    if let tlv::TlvItemValue::List(_fields) = inp {
551        // Struct with fields
552        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
553        Ok(AudioCapabilities {
554                max_number_of_channels: item.get_int(&[0]).map(|v| v as u8),
555                supported_codecs: {
556                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[1]) {
557                        let items: Vec<AudioCodec> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { AudioCodec::from_u8(*v as u8) } else { None } }).collect();
558                        Some(items)
559                    } else {
560                        None
561                    }
562                },
563                supported_sample_rates: {
564                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[2]) {
565                        let items: Vec<u32> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { Some(*v as u32) } else { None } }).collect();
566                        Some(items)
567                    } else {
568                        None
569                    }
570                },
571                supported_bit_depths: {
572                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[3]) {
573                        let items: Vec<u8> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { Some(*v as u8) } else { None } }).collect();
574                        Some(items)
575                    } else {
576                        None
577                    }
578                },
579        })
580    } else {
581        Err(anyhow::anyhow!("Expected struct fields"))
582    }
583}
584
585/// Decode SpeakerCapabilities attribute (0x0008)
586pub fn decode_speaker_capabilities(inp: &tlv::TlvItemValue) -> anyhow::Result<AudioCapabilities> {
587    if let tlv::TlvItemValue::List(_fields) = inp {
588        // Struct with fields
589        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
590        Ok(AudioCapabilities {
591                max_number_of_channels: item.get_int(&[0]).map(|v| v as u8),
592                supported_codecs: {
593                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[1]) {
594                        let items: Vec<AudioCodec> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { AudioCodec::from_u8(*v as u8) } else { None } }).collect();
595                        Some(items)
596                    } else {
597                        None
598                    }
599                },
600                supported_sample_rates: {
601                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[2]) {
602                        let items: Vec<u32> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { Some(*v as u32) } else { None } }).collect();
603                        Some(items)
604                    } else {
605                        None
606                    }
607                },
608                supported_bit_depths: {
609                    if let Some(tlv::TlvItemValue::List(l)) = item.get(&[3]) {
610                        let items: Vec<u8> = l.iter().filter_map(|e| { if let tlv::TlvItemValue::Int(v) = &e.value { Some(*v as u8) } else { None } }).collect();
611                        Some(items)
612                    } else {
613                        None
614                    }
615                },
616        })
617    } else {
618        Err(anyhow::anyhow!("Expected struct fields"))
619    }
620}
621
622/// Decode TwoWayTalkSupport attribute (0x0009)
623pub fn decode_two_way_talk_support(inp: &tlv::TlvItemValue) -> anyhow::Result<TwoWayTalkSupportType> {
624    if let tlv::TlvItemValue::Int(v) = inp {
625        TwoWayTalkSupportType::from_u8(*v as u8).ok_or_else(|| anyhow::anyhow!("Invalid enum value"))
626    } else {
627        Err(anyhow::anyhow!("Expected Integer"))
628    }
629}
630
631/// Decode SnapshotCapabilities attribute (0x000A)
632pub fn decode_snapshot_capabilities(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<SnapshotCapabilities>> {
633    let mut res = Vec::new();
634    if let tlv::TlvItemValue::List(v) = inp {
635        for item in v {
636            res.push(SnapshotCapabilities {
637                resolution: {
638                    if let Some(nested_tlv) = item.get(&[0]) {
639                        if let tlv::TlvItemValue::List(_) = nested_tlv {
640                            let nested_item = tlv::TlvItem { tag: 0, value: nested_tlv.clone() };
641                            Some(VideoResolution {
642                width: nested_item.get_int(&[0]).map(|v| v as u16),
643                height: nested_item.get_int(&[1]).map(|v| v as u16),
644                            })
645                        } else {
646                            None
647                        }
648                    } else {
649                        None
650                    }
651                },
652                max_frame_rate: item.get_int(&[1]).map(|v| v as u16),
653                image_codec: item.get_int(&[2]).and_then(|v| ImageCodec::from_u8(v as u8)),
654                requires_encoded_pixels: item.get_bool(&[3]),
655                requires_hardware_encoder: item.get_bool(&[4]),
656            });
657        }
658    }
659    Ok(res)
660}
661
662/// Decode MaxNetworkBandwidth attribute (0x000B)
663pub fn decode_max_network_bandwidth(inp: &tlv::TlvItemValue) -> anyhow::Result<u32> {
664    if let tlv::TlvItemValue::Int(v) = inp {
665        Ok(*v as u32)
666    } else {
667        Err(anyhow::anyhow!("Expected UInt32"))
668    }
669}
670
671/// Decode CurrentFrameRate attribute (0x000C)
672pub fn decode_current_frame_rate(inp: &tlv::TlvItemValue) -> anyhow::Result<u16> {
673    if let tlv::TlvItemValue::Int(v) = inp {
674        Ok(*v as u16)
675    } else {
676        Err(anyhow::anyhow!("Expected UInt16"))
677    }
678}
679
680/// Decode HDRModeEnabled attribute (0x000D)
681pub fn decode_hdr_mode_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
682    if let tlv::TlvItemValue::Bool(v) = inp {
683        Ok(*v)
684    } else {
685        Err(anyhow::anyhow!("Expected Bool"))
686    }
687}
688
689/// Decode SupportedStreamUsages attribute (0x000E)
690pub fn decode_supported_stream_usages(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<u8>> {
691    let mut res = Vec::new();
692    if let tlv::TlvItemValue::List(v) = inp {
693        for item in v {
694            if let tlv::TlvItemValue::Int(i) = &item.value {
695                res.push(*i as u8);
696            }
697        }
698    }
699    Ok(res)
700}
701
702/// Decode AllocatedVideoStreams attribute (0x000F)
703pub fn decode_allocated_video_streams(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<VideoStream>> {
704    let mut res = Vec::new();
705    if let tlv::TlvItemValue::List(v) = inp {
706        for item in v {
707            res.push(VideoStream {
708                video_stream_id: item.get_int(&[0]).map(|v| v as u8),
709                stream_usage: item.get_int(&[1]).map(|v| v as u8),
710                video_codec: item.get_int(&[2]).and_then(|v| VideoCodec::from_u8(v as u8)),
711                min_frame_rate: item.get_int(&[3]).map(|v| v as u16),
712                max_frame_rate: item.get_int(&[4]).map(|v| v as u16),
713                min_resolution: {
714                    if let Some(nested_tlv) = item.get(&[5]) {
715                        if let tlv::TlvItemValue::List(_) = nested_tlv {
716                            let nested_item = tlv::TlvItem { tag: 5, value: nested_tlv.clone() };
717                            Some(VideoResolution {
718                width: nested_item.get_int(&[0]).map(|v| v as u16),
719                height: nested_item.get_int(&[1]).map(|v| v as u16),
720                            })
721                        } else {
722                            None
723                        }
724                    } else {
725                        None
726                    }
727                },
728                max_resolution: {
729                    if let Some(nested_tlv) = item.get(&[6]) {
730                        if let tlv::TlvItemValue::List(_) = nested_tlv {
731                            let nested_item = tlv::TlvItem { tag: 6, value: nested_tlv.clone() };
732                            Some(VideoResolution {
733                width: nested_item.get_int(&[0]).map(|v| v as u16),
734                height: nested_item.get_int(&[1]).map(|v| v as u16),
735                            })
736                        } else {
737                            None
738                        }
739                    } else {
740                        None
741                    }
742                },
743                min_bit_rate: item.get_int(&[7]).map(|v| v as u32),
744                max_bit_rate: item.get_int(&[8]).map(|v| v as u32),
745                key_frame_interval: item.get_int(&[9]).map(|v| v as u16),
746                watermark_enabled: item.get_bool(&[10]),
747                osd_enabled: item.get_bool(&[11]),
748                reference_count: item.get_int(&[12]).map(|v| v as u8),
749            });
750        }
751    }
752    Ok(res)
753}
754
755/// Decode AllocatedAudioStreams attribute (0x0010)
756pub fn decode_allocated_audio_streams(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<AudioStream>> {
757    let mut res = Vec::new();
758    if let tlv::TlvItemValue::List(v) = inp {
759        for item in v {
760            res.push(AudioStream {
761                audio_stream_id: item.get_int(&[0]).map(|v| v as u8),
762                stream_usage: item.get_int(&[1]).map(|v| v as u8),
763                audio_codec: item.get_int(&[2]).and_then(|v| AudioCodec::from_u8(v as u8)),
764                channel_count: item.get_int(&[3]).map(|v| v as u8),
765                sample_rate: item.get_int(&[4]).map(|v| v as u32),
766                bit_rate: item.get_int(&[5]).map(|v| v as u32),
767                bit_depth: item.get_int(&[6]).map(|v| v as u8),
768                reference_count: item.get_int(&[7]).map(|v| v as u8),
769            });
770        }
771    }
772    Ok(res)
773}
774
775/// Decode AllocatedSnapshotStreams attribute (0x0011)
776pub fn decode_allocated_snapshot_streams(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<SnapshotStream>> {
777    let mut res = Vec::new();
778    if let tlv::TlvItemValue::List(v) = inp {
779        for item in v {
780            res.push(SnapshotStream {
781                snapshot_stream_id: item.get_int(&[0]).map(|v| v as u8),
782                image_codec: item.get_int(&[1]).and_then(|v| ImageCodec::from_u8(v as u8)),
783                frame_rate: item.get_int(&[2]).map(|v| v as u16),
784                min_resolution: {
785                    if let Some(nested_tlv) = item.get(&[3]) {
786                        if let tlv::TlvItemValue::List(_) = nested_tlv {
787                            let nested_item = tlv::TlvItem { tag: 3, value: nested_tlv.clone() };
788                            Some(VideoResolution {
789                width: nested_item.get_int(&[0]).map(|v| v as u16),
790                height: nested_item.get_int(&[1]).map(|v| v as u16),
791                            })
792                        } else {
793                            None
794                        }
795                    } else {
796                        None
797                    }
798                },
799                max_resolution: {
800                    if let Some(nested_tlv) = item.get(&[4]) {
801                        if let tlv::TlvItemValue::List(_) = nested_tlv {
802                            let nested_item = tlv::TlvItem { tag: 4, value: nested_tlv.clone() };
803                            Some(VideoResolution {
804                width: nested_item.get_int(&[0]).map(|v| v as u16),
805                height: nested_item.get_int(&[1]).map(|v| v as u16),
806                            })
807                        } else {
808                            None
809                        }
810                    } else {
811                        None
812                    }
813                },
814                quality: item.get_int(&[5]).map(|v| v as u8),
815                reference_count: item.get_int(&[6]).map(|v| v as u8),
816                encoded_pixels: item.get_bool(&[7]),
817                hardware_encoder: item.get_bool(&[8]),
818                watermark_enabled: item.get_bool(&[9]),
819                osd_enabled: item.get_bool(&[10]),
820            });
821        }
822    }
823    Ok(res)
824}
825
826/// Decode StreamUsagePriorities attribute (0x0012)
827pub fn decode_stream_usage_priorities(inp: &tlv::TlvItemValue) -> anyhow::Result<Vec<u8>> {
828    let mut res = Vec::new();
829    if let tlv::TlvItemValue::List(v) = inp {
830        for item in v {
831            if let tlv::TlvItemValue::Int(i) = &item.value {
832                res.push(*i as u8);
833            }
834        }
835    }
836    Ok(res)
837}
838
839/// Decode SoftRecordingPrivacyModeEnabled attribute (0x0013)
840pub fn decode_soft_recording_privacy_mode_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
841    if let tlv::TlvItemValue::Bool(v) = inp {
842        Ok(*v)
843    } else {
844        Err(anyhow::anyhow!("Expected Bool"))
845    }
846}
847
848/// Decode SoftLivestreamPrivacyModeEnabled attribute (0x0014)
849pub fn decode_soft_livestream_privacy_mode_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
850    if let tlv::TlvItemValue::Bool(v) = inp {
851        Ok(*v)
852    } else {
853        Err(anyhow::anyhow!("Expected Bool"))
854    }
855}
856
857/// Decode HardPrivacyModeOn attribute (0x0015)
858pub fn decode_hard_privacy_mode_on(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
859    if let tlv::TlvItemValue::Bool(v) = inp {
860        Ok(*v)
861    } else {
862        Err(anyhow::anyhow!("Expected Bool"))
863    }
864}
865
866/// Decode NightVision attribute (0x0016)
867pub fn decode_night_vision(inp: &tlv::TlvItemValue) -> anyhow::Result<TriStateAuto> {
868    if let tlv::TlvItemValue::Int(v) = inp {
869        TriStateAuto::from_u8(*v as u8).ok_or_else(|| anyhow::anyhow!("Invalid enum value"))
870    } else {
871        Err(anyhow::anyhow!("Expected Integer"))
872    }
873}
874
875/// Decode NightVisionIllum attribute (0x0017)
876pub fn decode_night_vision_illum(inp: &tlv::TlvItemValue) -> anyhow::Result<TriStateAuto> {
877    if let tlv::TlvItemValue::Int(v) = inp {
878        TriStateAuto::from_u8(*v as u8).ok_or_else(|| anyhow::anyhow!("Invalid enum value"))
879    } else {
880        Err(anyhow::anyhow!("Expected Integer"))
881    }
882}
883
884/// Decode Viewport attribute (0x0018)
885pub fn decode_viewport(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
886    if let tlv::TlvItemValue::Int(v) = inp {
887        Ok(*v as u8)
888    } else {
889        Err(anyhow::anyhow!("Expected UInt8"))
890    }
891}
892
893/// Decode SpeakerMuted attribute (0x0019)
894pub fn decode_speaker_muted(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
895    if let tlv::TlvItemValue::Bool(v) = inp {
896        Ok(*v)
897    } else {
898        Err(anyhow::anyhow!("Expected Bool"))
899    }
900}
901
902/// Decode SpeakerVolumeLevel attribute (0x001A)
903pub fn decode_speaker_volume_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
904    if let tlv::TlvItemValue::Int(v) = inp {
905        Ok(*v as u8)
906    } else {
907        Err(anyhow::anyhow!("Expected UInt8"))
908    }
909}
910
911/// Decode SpeakerMaxLevel attribute (0x001B)
912pub fn decode_speaker_max_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
913    if let tlv::TlvItemValue::Int(v) = inp {
914        Ok(*v as u8)
915    } else {
916        Err(anyhow::anyhow!("Expected UInt8"))
917    }
918}
919
920/// Decode SpeakerMinLevel attribute (0x001C)
921pub fn decode_speaker_min_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
922    if let tlv::TlvItemValue::Int(v) = inp {
923        Ok(*v as u8)
924    } else {
925        Err(anyhow::anyhow!("Expected UInt8"))
926    }
927}
928
929/// Decode MicrophoneMuted attribute (0x001D)
930pub fn decode_microphone_muted(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
931    if let tlv::TlvItemValue::Bool(v) = inp {
932        Ok(*v)
933    } else {
934        Err(anyhow::anyhow!("Expected Bool"))
935    }
936}
937
938/// Decode MicrophoneVolumeLevel attribute (0x001E)
939pub fn decode_microphone_volume_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
940    if let tlv::TlvItemValue::Int(v) = inp {
941        Ok(*v as u8)
942    } else {
943        Err(anyhow::anyhow!("Expected UInt8"))
944    }
945}
946
947/// Decode MicrophoneMaxLevel attribute (0x001F)
948pub fn decode_microphone_max_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
949    if let tlv::TlvItemValue::Int(v) = inp {
950        Ok(*v as u8)
951    } else {
952        Err(anyhow::anyhow!("Expected UInt8"))
953    }
954}
955
956/// Decode MicrophoneMinLevel attribute (0x0020)
957pub fn decode_microphone_min_level(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
958    if let tlv::TlvItemValue::Int(v) = inp {
959        Ok(*v as u8)
960    } else {
961        Err(anyhow::anyhow!("Expected UInt8"))
962    }
963}
964
965/// Decode MicrophoneAGCEnabled attribute (0x0021)
966pub fn decode_microphone_agc_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
967    if let tlv::TlvItemValue::Bool(v) = inp {
968        Ok(*v)
969    } else {
970        Err(anyhow::anyhow!("Expected Bool"))
971    }
972}
973
974/// Decode ImageRotation attribute (0x0022)
975pub fn decode_image_rotation(inp: &tlv::TlvItemValue) -> anyhow::Result<u16> {
976    if let tlv::TlvItemValue::Int(v) = inp {
977        Ok(*v as u16)
978    } else {
979        Err(anyhow::anyhow!("Expected UInt16"))
980    }
981}
982
983/// Decode ImageFlipHorizontal attribute (0x0023)
984pub fn decode_image_flip_horizontal(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
985    if let tlv::TlvItemValue::Bool(v) = inp {
986        Ok(*v)
987    } else {
988        Err(anyhow::anyhow!("Expected Bool"))
989    }
990}
991
992/// Decode ImageFlipVertical attribute (0x0024)
993pub fn decode_image_flip_vertical(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
994    if let tlv::TlvItemValue::Bool(v) = inp {
995        Ok(*v)
996    } else {
997        Err(anyhow::anyhow!("Expected Bool"))
998    }
999}
1000
1001/// Decode LocalVideoRecordingEnabled attribute (0x0025)
1002pub fn decode_local_video_recording_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
1003    if let tlv::TlvItemValue::Bool(v) = inp {
1004        Ok(*v)
1005    } else {
1006        Err(anyhow::anyhow!("Expected Bool"))
1007    }
1008}
1009
1010/// Decode LocalSnapshotRecordingEnabled attribute (0x0026)
1011pub fn decode_local_snapshot_recording_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
1012    if let tlv::TlvItemValue::Bool(v) = inp {
1013        Ok(*v)
1014    } else {
1015        Err(anyhow::anyhow!("Expected Bool"))
1016    }
1017}
1018
1019/// Decode StatusLightEnabled attribute (0x0027)
1020pub fn decode_status_light_enabled(inp: &tlv::TlvItemValue) -> anyhow::Result<bool> {
1021    if let tlv::TlvItemValue::Bool(v) = inp {
1022        Ok(*v)
1023    } else {
1024        Err(anyhow::anyhow!("Expected Bool"))
1025    }
1026}
1027
1028/// Decode StatusLightBrightness attribute (0x0028)
1029pub fn decode_status_light_brightness(inp: &tlv::TlvItemValue) -> anyhow::Result<u8> {
1030    if let tlv::TlvItemValue::Int(v) = inp {
1031        Ok(*v as u8)
1032    } else {
1033        Err(anyhow::anyhow!("Expected UInt8"))
1034    }
1035}
1036
1037
1038// JSON dispatcher function
1039
1040/// Decode attribute value and return as JSON string
1041///
1042/// # Parameters
1043/// * `cluster_id` - The cluster identifier
1044/// * `attribute_id` - The attribute identifier
1045/// * `tlv_value` - The TLV value to decode
1046///
1047/// # Returns
1048/// JSON string representation of the decoded value or error
1049pub fn decode_attribute_json(cluster_id: u32, attribute_id: u32, tlv_value: &crate::tlv::TlvItemValue) -> String {
1050    // Verify this is the correct cluster
1051    if cluster_id != 0x0551 {
1052        return format!("{{\"error\": \"Invalid cluster ID. Expected 0x0551, got {}\"}}", cluster_id);
1053    }
1054
1055    match attribute_id {
1056        0x0000 => {
1057            match decode_max_concurrent_encoders(tlv_value) {
1058                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1059                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1060            }
1061        }
1062        0x0001 => {
1063            match decode_max_encoded_pixel_rate(tlv_value) {
1064                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1065                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1066            }
1067        }
1068        0x0002 => {
1069            match decode_video_sensor_params(tlv_value) {
1070                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1071                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1072            }
1073        }
1074        0x0003 => {
1075            match decode_night_vision_uses_infrared(tlv_value) {
1076                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1077                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1078            }
1079        }
1080        0x0004 => {
1081            match decode_min_viewport_resolution(tlv_value) {
1082                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1083                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1084            }
1085        }
1086        0x0005 => {
1087            match decode_rate_distortion_trade_off_points(tlv_value) {
1088                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1089                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1090            }
1091        }
1092        0x0006 => {
1093            match decode_max_content_buffer_size(tlv_value) {
1094                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1095                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1096            }
1097        }
1098        0x0007 => {
1099            match decode_microphone_capabilities(tlv_value) {
1100                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1101                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1102            }
1103        }
1104        0x0008 => {
1105            match decode_speaker_capabilities(tlv_value) {
1106                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1107                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1108            }
1109        }
1110        0x0009 => {
1111            match decode_two_way_talk_support(tlv_value) {
1112                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1113                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1114            }
1115        }
1116        0x000A => {
1117            match decode_snapshot_capabilities(tlv_value) {
1118                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1119                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1120            }
1121        }
1122        0x000B => {
1123            match decode_max_network_bandwidth(tlv_value) {
1124                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1125                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1126            }
1127        }
1128        0x000C => {
1129            match decode_current_frame_rate(tlv_value) {
1130                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1131                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1132            }
1133        }
1134        0x000D => {
1135            match decode_hdr_mode_enabled(tlv_value) {
1136                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1137                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1138            }
1139        }
1140        0x000E => {
1141            match decode_supported_stream_usages(tlv_value) {
1142                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1143                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1144            }
1145        }
1146        0x000F => {
1147            match decode_allocated_video_streams(tlv_value) {
1148                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1149                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1150            }
1151        }
1152        0x0010 => {
1153            match decode_allocated_audio_streams(tlv_value) {
1154                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1155                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1156            }
1157        }
1158        0x0011 => {
1159            match decode_allocated_snapshot_streams(tlv_value) {
1160                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1161                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1162            }
1163        }
1164        0x0012 => {
1165            match decode_stream_usage_priorities(tlv_value) {
1166                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1167                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1168            }
1169        }
1170        0x0013 => {
1171            match decode_soft_recording_privacy_mode_enabled(tlv_value) {
1172                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1173                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1174            }
1175        }
1176        0x0014 => {
1177            match decode_soft_livestream_privacy_mode_enabled(tlv_value) {
1178                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1179                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1180            }
1181        }
1182        0x0015 => {
1183            match decode_hard_privacy_mode_on(tlv_value) {
1184                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1185                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1186            }
1187        }
1188        0x0016 => {
1189            match decode_night_vision(tlv_value) {
1190                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1191                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1192            }
1193        }
1194        0x0017 => {
1195            match decode_night_vision_illum(tlv_value) {
1196                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1197                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1198            }
1199        }
1200        0x0018 => {
1201            match decode_viewport(tlv_value) {
1202                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1203                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1204            }
1205        }
1206        0x0019 => {
1207            match decode_speaker_muted(tlv_value) {
1208                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1209                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1210            }
1211        }
1212        0x001A => {
1213            match decode_speaker_volume_level(tlv_value) {
1214                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1215                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1216            }
1217        }
1218        0x001B => {
1219            match decode_speaker_max_level(tlv_value) {
1220                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1221                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1222            }
1223        }
1224        0x001C => {
1225            match decode_speaker_min_level(tlv_value) {
1226                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1227                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1228            }
1229        }
1230        0x001D => {
1231            match decode_microphone_muted(tlv_value) {
1232                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1233                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1234            }
1235        }
1236        0x001E => {
1237            match decode_microphone_volume_level(tlv_value) {
1238                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1239                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1240            }
1241        }
1242        0x001F => {
1243            match decode_microphone_max_level(tlv_value) {
1244                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1245                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1246            }
1247        }
1248        0x0020 => {
1249            match decode_microphone_min_level(tlv_value) {
1250                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1251                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1252            }
1253        }
1254        0x0021 => {
1255            match decode_microphone_agc_enabled(tlv_value) {
1256                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1257                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1258            }
1259        }
1260        0x0022 => {
1261            match decode_image_rotation(tlv_value) {
1262                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1263                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1264            }
1265        }
1266        0x0023 => {
1267            match decode_image_flip_horizontal(tlv_value) {
1268                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1269                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1270            }
1271        }
1272        0x0024 => {
1273            match decode_image_flip_vertical(tlv_value) {
1274                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1275                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1276            }
1277        }
1278        0x0025 => {
1279            match decode_local_video_recording_enabled(tlv_value) {
1280                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1281                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1282            }
1283        }
1284        0x0026 => {
1285            match decode_local_snapshot_recording_enabled(tlv_value) {
1286                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1287                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1288            }
1289        }
1290        0x0027 => {
1291            match decode_status_light_enabled(tlv_value) {
1292                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1293                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1294            }
1295        }
1296        0x0028 => {
1297            match decode_status_light_brightness(tlv_value) {
1298                Ok(value) => serde_json::to_string(&value).unwrap_or_else(|_| "null".to_string()),
1299                Err(e) => format!("{{\"error\": \"{}\"}}", e),
1300            }
1301        }
1302        _ => format!("{{\"error\": \"Unknown attribute ID: {}\"}}", attribute_id),
1303    }
1304}
1305
1306/// Get list of all attributes supported by this cluster
1307///
1308/// # Returns
1309/// Vector of tuples containing (attribute_id, attribute_name)
1310pub fn get_attribute_list() -> Vec<(u32, &'static str)> {
1311    vec![
1312        (0x0000, "MaxConcurrentEncoders"),
1313        (0x0001, "MaxEncodedPixelRate"),
1314        (0x0002, "VideoSensorParams"),
1315        (0x0003, "NightVisionUsesInfrared"),
1316        (0x0004, "MinViewportResolution"),
1317        (0x0005, "RateDistortionTradeOffPoints"),
1318        (0x0006, "MaxContentBufferSize"),
1319        (0x0007, "MicrophoneCapabilities"),
1320        (0x0008, "SpeakerCapabilities"),
1321        (0x0009, "TwoWayTalkSupport"),
1322        (0x000A, "SnapshotCapabilities"),
1323        (0x000B, "MaxNetworkBandwidth"),
1324        (0x000C, "CurrentFrameRate"),
1325        (0x000D, "HDRModeEnabled"),
1326        (0x000E, "SupportedStreamUsages"),
1327        (0x000F, "AllocatedVideoStreams"),
1328        (0x0010, "AllocatedAudioStreams"),
1329        (0x0011, "AllocatedSnapshotStreams"),
1330        (0x0012, "StreamUsagePriorities"),
1331        (0x0013, "SoftRecordingPrivacyModeEnabled"),
1332        (0x0014, "SoftLivestreamPrivacyModeEnabled"),
1333        (0x0015, "HardPrivacyModeOn"),
1334        (0x0016, "NightVision"),
1335        (0x0017, "NightVisionIllum"),
1336        (0x0018, "Viewport"),
1337        (0x0019, "SpeakerMuted"),
1338        (0x001A, "SpeakerVolumeLevel"),
1339        (0x001B, "SpeakerMaxLevel"),
1340        (0x001C, "SpeakerMinLevel"),
1341        (0x001D, "MicrophoneMuted"),
1342        (0x001E, "MicrophoneVolumeLevel"),
1343        (0x001F, "MicrophoneMaxLevel"),
1344        (0x0020, "MicrophoneMinLevel"),
1345        (0x0021, "MicrophoneAGCEnabled"),
1346        (0x0022, "ImageRotation"),
1347        (0x0023, "ImageFlipHorizontal"),
1348        (0x0024, "ImageFlipVertical"),
1349        (0x0025, "LocalVideoRecordingEnabled"),
1350        (0x0026, "LocalSnapshotRecordingEnabled"),
1351        (0x0027, "StatusLightEnabled"),
1352        (0x0028, "StatusLightBrightness"),
1353    ]
1354}
1355
1356// Command listing
1357
1358pub fn get_command_list() -> Vec<(u32, &'static str)> {
1359    vec![
1360        (0x00, "AudioStreamAllocate"),
1361        (0x02, "AudioStreamDeallocate"),
1362        (0x03, "VideoStreamAllocate"),
1363        (0x05, "VideoStreamModify"),
1364        (0x06, "VideoStreamDeallocate"),
1365        (0x07, "SnapshotStreamAllocate"),
1366        (0x09, "SnapshotStreamModify"),
1367        (0x0A, "SnapshotStreamDeallocate"),
1368        (0x0B, "SetStreamPriorities"),
1369        (0x0C, "CaptureSnapshot"),
1370    ]
1371}
1372
1373pub fn get_command_name(cmd_id: u32) -> Option<&'static str> {
1374    match cmd_id {
1375        0x00 => Some("AudioStreamAllocate"),
1376        0x02 => Some("AudioStreamDeallocate"),
1377        0x03 => Some("VideoStreamAllocate"),
1378        0x05 => Some("VideoStreamModify"),
1379        0x06 => Some("VideoStreamDeallocate"),
1380        0x07 => Some("SnapshotStreamAllocate"),
1381        0x09 => Some("SnapshotStreamModify"),
1382        0x0A => Some("SnapshotStreamDeallocate"),
1383        0x0B => Some("SetStreamPriorities"),
1384        0x0C => Some("CaptureSnapshot"),
1385        _ => None,
1386    }
1387}
1388
1389pub fn get_command_schema(cmd_id: u32) -> Option<Vec<crate::clusters::codec::CommandField>> {
1390    match cmd_id {
1391        0x00 => Some(vec![
1392            crate::clusters::codec::CommandField { tag: 0, name: "stream_usage", kind: crate::clusters::codec::FieldKind::U8, optional: false, nullable: false },
1393            crate::clusters::codec::CommandField { tag: 1, name: "audio_codec", kind: crate::clusters::codec::FieldKind::Enum { name: "AudioCodec", variants: &[(0, "Opus"), (1, "AacLc")] }, optional: false, nullable: false },
1394            crate::clusters::codec::CommandField { tag: 2, name: "channel_count", kind: crate::clusters::codec::FieldKind::U8, optional: false, nullable: false },
1395            crate::clusters::codec::CommandField { tag: 3, name: "sample_rate", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1396            crate::clusters::codec::CommandField { tag: 4, name: "bit_rate", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1397            crate::clusters::codec::CommandField { tag: 5, name: "bit_depth", kind: crate::clusters::codec::FieldKind::U8, optional: false, nullable: false },
1398        ]),
1399        0x02 => Some(vec![
1400            crate::clusters::codec::CommandField { tag: 0, name: "audio_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1401        ]),
1402        0x03 => Some(vec![
1403            crate::clusters::codec::CommandField { tag: 0, name: "stream_usage", kind: crate::clusters::codec::FieldKind::U8, optional: false, nullable: false },
1404            crate::clusters::codec::CommandField { tag: 1, name: "video_codec", kind: crate::clusters::codec::FieldKind::Enum { name: "VideoCodec", variants: &[(0, "H264"), (1, "Hevc"), (2, "Vvc"), (3, "Av1")] }, optional: false, nullable: false },
1405            crate::clusters::codec::CommandField { tag: 2, name: "min_frame_rate", kind: crate::clusters::codec::FieldKind::U16, optional: false, nullable: false },
1406            crate::clusters::codec::CommandField { tag: 3, name: "max_frame_rate", kind: crate::clusters::codec::FieldKind::U16, optional: false, nullable: false },
1407            crate::clusters::codec::CommandField { tag: 4, name: "min_resolution", kind: crate::clusters::codec::FieldKind::Struct { name: "VideoResolutionStruct" }, optional: false, nullable: false },
1408            crate::clusters::codec::CommandField { tag: 5, name: "max_resolution", kind: crate::clusters::codec::FieldKind::Struct { name: "VideoResolutionStruct" }, optional: false, nullable: false },
1409            crate::clusters::codec::CommandField { tag: 6, name: "min_bit_rate", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1410            crate::clusters::codec::CommandField { tag: 7, name: "max_bit_rate", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1411            crate::clusters::codec::CommandField { tag: 8, name: "key_frame_interval", kind: crate::clusters::codec::FieldKind::U16, optional: false, nullable: false },
1412            crate::clusters::codec::CommandField { tag: 9, name: "watermark_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: false, nullable: false },
1413            crate::clusters::codec::CommandField { tag: 10, name: "osd_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: false, nullable: false },
1414        ]),
1415        0x05 => Some(vec![
1416            crate::clusters::codec::CommandField { tag: 0, name: "video_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1417            crate::clusters::codec::CommandField { tag: 1, name: "watermark_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: true, nullable: false },
1418            crate::clusters::codec::CommandField { tag: 2, name: "osd_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: true, nullable: false },
1419        ]),
1420        0x06 => Some(vec![
1421            crate::clusters::codec::CommandField { tag: 0, name: "video_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1422        ]),
1423        0x07 => Some(vec![
1424            crate::clusters::codec::CommandField { tag: 0, name: "image_codec", kind: crate::clusters::codec::FieldKind::Enum { name: "ImageCodec", variants: &[(0, "Jpeg"), (1, "Heic")] }, optional: false, nullable: false },
1425            crate::clusters::codec::CommandField { tag: 1, name: "max_frame_rate", kind: crate::clusters::codec::FieldKind::U16, optional: false, nullable: false },
1426            crate::clusters::codec::CommandField { tag: 2, name: "min_resolution", kind: crate::clusters::codec::FieldKind::Struct { name: "VideoResolutionStruct" }, optional: false, nullable: false },
1427            crate::clusters::codec::CommandField { tag: 3, name: "max_resolution", kind: crate::clusters::codec::FieldKind::Struct { name: "VideoResolutionStruct" }, optional: false, nullable: false },
1428            crate::clusters::codec::CommandField { tag: 4, name: "quality", kind: crate::clusters::codec::FieldKind::U8, optional: false, nullable: false },
1429            crate::clusters::codec::CommandField { tag: 5, name: "watermark_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: false, nullable: false },
1430            crate::clusters::codec::CommandField { tag: 6, name: "osd_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: false, nullable: false },
1431        ]),
1432        0x09 => Some(vec![
1433            crate::clusters::codec::CommandField { tag: 0, name: "snapshot_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1434            crate::clusters::codec::CommandField { tag: 1, name: "watermark_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: true, nullable: false },
1435            crate::clusters::codec::CommandField { tag: 2, name: "osd_enabled", kind: crate::clusters::codec::FieldKind::Bool, optional: true, nullable: false },
1436        ]),
1437        0x0A => Some(vec![
1438            crate::clusters::codec::CommandField { tag: 0, name: "snapshot_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: false },
1439        ]),
1440        0x0B => Some(vec![
1441            crate::clusters::codec::CommandField { tag: 0, name: "stream_priorities", kind: crate::clusters::codec::FieldKind::List { entry_type: "StreamUsageEnum" }, optional: false, nullable: false },
1442        ]),
1443        0x0C => Some(vec![
1444            crate::clusters::codec::CommandField { tag: 0, name: "snapshot_stream_id", kind: crate::clusters::codec::FieldKind::U32, optional: false, nullable: true },
1445            crate::clusters::codec::CommandField { tag: 1, name: "requested_resolution", kind: crate::clusters::codec::FieldKind::Struct { name: "VideoResolutionStruct" }, optional: false, nullable: false },
1446        ]),
1447        _ => None,
1448    }
1449}
1450
1451pub fn encode_command_json(cmd_id: u32, args: &serde_json::Value) -> anyhow::Result<Vec<u8>> {
1452    match cmd_id {
1453        0x00 => {
1454        let stream_usage = crate::clusters::codec::json_util::get_u8(args, "stream_usage")?;
1455        let audio_codec = {
1456            let n = crate::clusters::codec::json_util::get_u64(args, "audio_codec")?;
1457            AudioCodec::from_u8(n as u8).ok_or_else(|| anyhow::anyhow!("invalid AudioCodec: {}", n))?
1458        };
1459        let channel_count = crate::clusters::codec::json_util::get_u8(args, "channel_count")?;
1460        let sample_rate = crate::clusters::codec::json_util::get_u32(args, "sample_rate")?;
1461        let bit_rate = crate::clusters::codec::json_util::get_u32(args, "bit_rate")?;
1462        let bit_depth = crate::clusters::codec::json_util::get_u8(args, "bit_depth")?;
1463        encode_audio_stream_allocate(stream_usage, audio_codec, channel_count, sample_rate, bit_rate, bit_depth)
1464        }
1465        0x02 => {
1466        let audio_stream_id = crate::clusters::codec::json_util::get_u8(args, "audio_stream_id")?;
1467        encode_audio_stream_deallocate(audio_stream_id)
1468        }
1469        0x03 => Err(anyhow::anyhow!("command \"VideoStreamAllocate\" has complex args: use raw mode")),
1470        0x05 => {
1471        let video_stream_id = crate::clusters::codec::json_util::get_u8(args, "video_stream_id")?;
1472        let watermark_enabled = crate::clusters::codec::json_util::get_bool(args, "watermark_enabled")?;
1473        let osd_enabled = crate::clusters::codec::json_util::get_bool(args, "osd_enabled")?;
1474        encode_video_stream_modify(video_stream_id, watermark_enabled, osd_enabled)
1475        }
1476        0x06 => {
1477        let video_stream_id = crate::clusters::codec::json_util::get_u8(args, "video_stream_id")?;
1478        encode_video_stream_deallocate(video_stream_id)
1479        }
1480        0x07 => Err(anyhow::anyhow!("command \"SnapshotStreamAllocate\" has complex args: use raw mode")),
1481        0x09 => {
1482        let snapshot_stream_id = crate::clusters::codec::json_util::get_u8(args, "snapshot_stream_id")?;
1483        let watermark_enabled = crate::clusters::codec::json_util::get_bool(args, "watermark_enabled")?;
1484        let osd_enabled = crate::clusters::codec::json_util::get_bool(args, "osd_enabled")?;
1485        encode_snapshot_stream_modify(snapshot_stream_id, watermark_enabled, osd_enabled)
1486        }
1487        0x0A => {
1488        let snapshot_stream_id = crate::clusters::codec::json_util::get_u8(args, "snapshot_stream_id")?;
1489        encode_snapshot_stream_deallocate(snapshot_stream_id)
1490        }
1491        0x0B => Err(anyhow::anyhow!("command \"SetStreamPriorities\" has complex args: use raw mode")),
1492        0x0C => Err(anyhow::anyhow!("command \"CaptureSnapshot\" has complex args: use raw mode")),
1493        _ => Err(anyhow::anyhow!("unknown command ID: 0x{:02X}", cmd_id)),
1494    }
1495}
1496
1497#[derive(Debug, serde::Serialize)]
1498pub struct AudioStreamAllocateResponse {
1499    pub audio_stream_id: Option<u8>,
1500}
1501
1502#[derive(Debug, serde::Serialize)]
1503pub struct VideoStreamAllocateResponse {
1504    pub video_stream_id: Option<u8>,
1505}
1506
1507#[derive(Debug, serde::Serialize)]
1508pub struct SnapshotStreamAllocateResponse {
1509    pub snapshot_stream_id: Option<u8>,
1510}
1511
1512#[derive(Debug, serde::Serialize)]
1513pub struct CaptureSnapshotResponse {
1514    #[serde(serialize_with = "serialize_opt_bytes_as_hex")]
1515    pub data: Option<Vec<u8>>,
1516    pub image_codec: Option<ImageCodec>,
1517    pub resolution: Option<VideoResolution>,
1518}
1519
1520// Command response decoders
1521
1522/// Decode AudioStreamAllocateResponse command response (01)
1523pub fn decode_audio_stream_allocate_response(inp: &tlv::TlvItemValue) -> anyhow::Result<AudioStreamAllocateResponse> {
1524    if let tlv::TlvItemValue::List(_fields) = inp {
1525        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
1526        Ok(AudioStreamAllocateResponse {
1527                audio_stream_id: item.get_int(&[0]).map(|v| v as u8),
1528        })
1529    } else {
1530        Err(anyhow::anyhow!("Expected struct fields"))
1531    }
1532}
1533
1534/// Decode VideoStreamAllocateResponse command response (04)
1535pub fn decode_video_stream_allocate_response(inp: &tlv::TlvItemValue) -> anyhow::Result<VideoStreamAllocateResponse> {
1536    if let tlv::TlvItemValue::List(_fields) = inp {
1537        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
1538        Ok(VideoStreamAllocateResponse {
1539                video_stream_id: item.get_int(&[0]).map(|v| v as u8),
1540        })
1541    } else {
1542        Err(anyhow::anyhow!("Expected struct fields"))
1543    }
1544}
1545
1546/// Decode SnapshotStreamAllocateResponse command response (08)
1547pub fn decode_snapshot_stream_allocate_response(inp: &tlv::TlvItemValue) -> anyhow::Result<SnapshotStreamAllocateResponse> {
1548    if let tlv::TlvItemValue::List(_fields) = inp {
1549        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
1550        Ok(SnapshotStreamAllocateResponse {
1551                snapshot_stream_id: item.get_int(&[0]).map(|v| v as u8),
1552        })
1553    } else {
1554        Err(anyhow::anyhow!("Expected struct fields"))
1555    }
1556}
1557
1558/// Decode CaptureSnapshotResponse command response (0D)
1559pub fn decode_capture_snapshot_response(inp: &tlv::TlvItemValue) -> anyhow::Result<CaptureSnapshotResponse> {
1560    if let tlv::TlvItemValue::List(_fields) = inp {
1561        let item = tlv::TlvItem { tag: 0, value: inp.clone() };
1562        Ok(CaptureSnapshotResponse {
1563                data: item.get_octet_string_owned(&[0]),
1564                image_codec: item.get_int(&[1]).and_then(|v| ImageCodec::from_u8(v as u8)),
1565                resolution: {
1566                    if let Some(nested_tlv) = item.get(&[2]) {
1567                        if let tlv::TlvItemValue::List(_) = nested_tlv {
1568                            let nested_item = tlv::TlvItem { tag: 2, value: nested_tlv.clone() };
1569                            Some(VideoResolution {
1570                width: nested_item.get_int(&[0]).map(|v| v as u16),
1571                height: nested_item.get_int(&[1]).map(|v| v as u16),
1572                            })
1573                        } else {
1574                            None
1575                        }
1576                    } else {
1577                        None
1578                    }
1579                },
1580        })
1581    } else {
1582        Err(anyhow::anyhow!("Expected struct fields"))
1583    }
1584}
1585
1586// Typed facade (invokes + reads)
1587
1588/// Invoke `AudioStreamAllocate` command on cluster `Camera AV Stream Management`.
1589pub async fn audio_stream_allocate(conn: &crate::controller::Connection, endpoint: u16, stream_usage: u8, audio_codec: AudioCodec, channel_count: u8, sample_rate: u32, bit_rate: u32, bit_depth: u8) -> anyhow::Result<AudioStreamAllocateResponse> {
1590    let tlv = conn.invoke_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_AUDIOSTREAMALLOCATE, &encode_audio_stream_allocate(stream_usage, audio_codec, channel_count, sample_rate, bit_rate, bit_depth)?).await?;
1591    decode_audio_stream_allocate_response(&tlv)
1592}
1593
1594/// Invoke `AudioStreamDeallocate` command on cluster `Camera AV Stream Management`.
1595pub async fn audio_stream_deallocate(conn: &crate::controller::Connection, endpoint: u16, audio_stream_id: u8) -> anyhow::Result<()> {
1596    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_AUDIOSTREAMDEALLOCATE, &encode_audio_stream_deallocate(audio_stream_id)?).await?;
1597    Ok(())
1598}
1599
1600/// Invoke `VideoStreamAllocate` command on cluster `Camera AV Stream Management`.
1601pub async fn video_stream_allocate(conn: &crate::controller::Connection, endpoint: u16, params: VideoStreamAllocateParams) -> anyhow::Result<VideoStreamAllocateResponse> {
1602    let tlv = conn.invoke_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_VIDEOSTREAMALLOCATE, &encode_video_stream_allocate(params)?).await?;
1603    decode_video_stream_allocate_response(&tlv)
1604}
1605
1606/// Invoke `VideoStreamModify` command on cluster `Camera AV Stream Management`.
1607pub async fn video_stream_modify(conn: &crate::controller::Connection, endpoint: u16, video_stream_id: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<()> {
1608    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_VIDEOSTREAMMODIFY, &encode_video_stream_modify(video_stream_id, watermark_enabled, osd_enabled)?).await?;
1609    Ok(())
1610}
1611
1612/// Invoke `VideoStreamDeallocate` command on cluster `Camera AV Stream Management`.
1613pub async fn video_stream_deallocate(conn: &crate::controller::Connection, endpoint: u16, video_stream_id: u8) -> anyhow::Result<()> {
1614    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_VIDEOSTREAMDEALLOCATE, &encode_video_stream_deallocate(video_stream_id)?).await?;
1615    Ok(())
1616}
1617
1618/// Invoke `SnapshotStreamAllocate` command on cluster `Camera AV Stream Management`.
1619pub async fn snapshot_stream_allocate(conn: &crate::controller::Connection, endpoint: u16, image_codec: ImageCodec, max_frame_rate: u16, min_resolution: VideoResolution, max_resolution: VideoResolution, quality: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<SnapshotStreamAllocateResponse> {
1620    let tlv = conn.invoke_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_SNAPSHOTSTREAMALLOCATE, &encode_snapshot_stream_allocate(image_codec, max_frame_rate, min_resolution, max_resolution, quality, watermark_enabled, osd_enabled)?).await?;
1621    decode_snapshot_stream_allocate_response(&tlv)
1622}
1623
1624/// Invoke `SnapshotStreamModify` command on cluster `Camera AV Stream Management`.
1625pub async fn snapshot_stream_modify(conn: &crate::controller::Connection, endpoint: u16, snapshot_stream_id: u8, watermark_enabled: bool, osd_enabled: bool) -> anyhow::Result<()> {
1626    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_SNAPSHOTSTREAMMODIFY, &encode_snapshot_stream_modify(snapshot_stream_id, watermark_enabled, osd_enabled)?).await?;
1627    Ok(())
1628}
1629
1630/// Invoke `SnapshotStreamDeallocate` command on cluster `Camera AV Stream Management`.
1631pub async fn snapshot_stream_deallocate(conn: &crate::controller::Connection, endpoint: u16, snapshot_stream_id: u8) -> anyhow::Result<()> {
1632    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_SNAPSHOTSTREAMDEALLOCATE, &encode_snapshot_stream_deallocate(snapshot_stream_id)?).await?;
1633    Ok(())
1634}
1635
1636/// Invoke `SetStreamPriorities` command on cluster `Camera AV Stream Management`.
1637pub async fn set_stream_priorities(conn: &crate::controller::Connection, endpoint: u16, stream_priorities: Vec<u8>) -> anyhow::Result<()> {
1638    conn.invoke_request(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_SETSTREAMPRIORITIES, &encode_set_stream_priorities(stream_priorities)?).await?;
1639    Ok(())
1640}
1641
1642/// Invoke `CaptureSnapshot` command on cluster `Camera AV Stream Management`.
1643pub async fn capture_snapshot(conn: &crate::controller::Connection, endpoint: u16, snapshot_stream_id: Option<u8>, requested_resolution: VideoResolution) -> anyhow::Result<CaptureSnapshotResponse> {
1644    let tlv = conn.invoke_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_CMD_ID_CAPTURESNAPSHOT, &encode_capture_snapshot(snapshot_stream_id, requested_resolution)?).await?;
1645    decode_capture_snapshot_response(&tlv)
1646}
1647
1648/// Read `MaxConcurrentEncoders` attribute from cluster `Camera AV Stream Management`.
1649pub async fn read_max_concurrent_encoders(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1650    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MAXCONCURRENTENCODERS).await?;
1651    decode_max_concurrent_encoders(&tlv)
1652}
1653
1654/// Read `MaxEncodedPixelRate` attribute from cluster `Camera AV Stream Management`.
1655pub async fn read_max_encoded_pixel_rate(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u32> {
1656    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MAXENCODEDPIXELRATE).await?;
1657    decode_max_encoded_pixel_rate(&tlv)
1658}
1659
1660/// Read `VideoSensorParams` attribute from cluster `Camera AV Stream Management`.
1661pub async fn read_video_sensor_params(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<VideoSensorParams> {
1662    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_VIDEOSENSORPARAMS).await?;
1663    decode_video_sensor_params(&tlv)
1664}
1665
1666/// Read `NightVisionUsesInfrared` attribute from cluster `Camera AV Stream Management`.
1667pub async fn read_night_vision_uses_infrared(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1668    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_NIGHTVISIONUSESINFRARED).await?;
1669    decode_night_vision_uses_infrared(&tlv)
1670}
1671
1672/// Read `MinViewportResolution` attribute from cluster `Camera AV Stream Management`.
1673pub async fn read_min_viewport_resolution(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<VideoResolution> {
1674    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MINVIEWPORTRESOLUTION).await?;
1675    decode_min_viewport_resolution(&tlv)
1676}
1677
1678/// Read `RateDistortionTradeOffPoints` attribute from cluster `Camera AV Stream Management`.
1679pub async fn read_rate_distortion_trade_off_points(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<RateDistortionTradeOffPoints>> {
1680    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_RATEDISTORTIONTRADEOFFPOINTS).await?;
1681    decode_rate_distortion_trade_off_points(&tlv)
1682}
1683
1684/// Read `MaxContentBufferSize` attribute from cluster `Camera AV Stream Management`.
1685pub async fn read_max_content_buffer_size(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u32> {
1686    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MAXCONTENTBUFFERSIZE).await?;
1687    decode_max_content_buffer_size(&tlv)
1688}
1689
1690/// Read `MicrophoneCapabilities` attribute from cluster `Camera AV Stream Management`.
1691pub async fn read_microphone_capabilities(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<AudioCapabilities> {
1692    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONECAPABILITIES).await?;
1693    decode_microphone_capabilities(&tlv)
1694}
1695
1696/// Read `SpeakerCapabilities` attribute from cluster `Camera AV Stream Management`.
1697pub async fn read_speaker_capabilities(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<AudioCapabilities> {
1698    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SPEAKERCAPABILITIES).await?;
1699    decode_speaker_capabilities(&tlv)
1700}
1701
1702/// Read `TwoWayTalkSupport` attribute from cluster `Camera AV Stream Management`.
1703pub async fn read_two_way_talk_support(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<TwoWayTalkSupportType> {
1704    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_TWOWAYTALKSUPPORT).await?;
1705    decode_two_way_talk_support(&tlv)
1706}
1707
1708/// Read `SnapshotCapabilities` attribute from cluster `Camera AV Stream Management`.
1709pub async fn read_snapshot_capabilities(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<SnapshotCapabilities>> {
1710    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SNAPSHOTCAPABILITIES).await?;
1711    decode_snapshot_capabilities(&tlv)
1712}
1713
1714/// Read `MaxNetworkBandwidth` attribute from cluster `Camera AV Stream Management`.
1715pub async fn read_max_network_bandwidth(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u32> {
1716    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MAXNETWORKBANDWIDTH).await?;
1717    decode_max_network_bandwidth(&tlv)
1718}
1719
1720/// Read `CurrentFrameRate` attribute from cluster `Camera AV Stream Management`.
1721pub async fn read_current_frame_rate(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u16> {
1722    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_CURRENTFRAMERATE).await?;
1723    decode_current_frame_rate(&tlv)
1724}
1725
1726/// Read `HDRModeEnabled` attribute from cluster `Camera AV Stream Management`.
1727pub async fn read_hdr_mode_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1728    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_HDRMODEENABLED).await?;
1729    decode_hdr_mode_enabled(&tlv)
1730}
1731
1732/// Read `SupportedStreamUsages` attribute from cluster `Camera AV Stream Management`.
1733pub async fn read_supported_stream_usages(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<u8>> {
1734    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SUPPORTEDSTREAMUSAGES).await?;
1735    decode_supported_stream_usages(&tlv)
1736}
1737
1738/// Read `AllocatedVideoStreams` attribute from cluster `Camera AV Stream Management`.
1739pub async fn read_allocated_video_streams(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<VideoStream>> {
1740    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_ALLOCATEDVIDEOSTREAMS).await?;
1741    decode_allocated_video_streams(&tlv)
1742}
1743
1744/// Read `AllocatedAudioStreams` attribute from cluster `Camera AV Stream Management`.
1745pub async fn read_allocated_audio_streams(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<AudioStream>> {
1746    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_ALLOCATEDAUDIOSTREAMS).await?;
1747    decode_allocated_audio_streams(&tlv)
1748}
1749
1750/// Read `AllocatedSnapshotStreams` attribute from cluster `Camera AV Stream Management`.
1751pub async fn read_allocated_snapshot_streams(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<SnapshotStream>> {
1752    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_ALLOCATEDSNAPSHOTSTREAMS).await?;
1753    decode_allocated_snapshot_streams(&tlv)
1754}
1755
1756/// Read `StreamUsagePriorities` attribute from cluster `Camera AV Stream Management`.
1757pub async fn read_stream_usage_priorities(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<Vec<u8>> {
1758    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_STREAMUSAGEPRIORITIES).await?;
1759    decode_stream_usage_priorities(&tlv)
1760}
1761
1762/// Read `SoftRecordingPrivacyModeEnabled` attribute from cluster `Camera AV Stream Management`.
1763pub async fn read_soft_recording_privacy_mode_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1764    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SOFTRECORDINGPRIVACYMODEENABLED).await?;
1765    decode_soft_recording_privacy_mode_enabled(&tlv)
1766}
1767
1768/// Read `SoftLivestreamPrivacyModeEnabled` attribute from cluster `Camera AV Stream Management`.
1769pub async fn read_soft_livestream_privacy_mode_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1770    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SOFTLIVESTREAMPRIVACYMODEENABLED).await?;
1771    decode_soft_livestream_privacy_mode_enabled(&tlv)
1772}
1773
1774/// Read `HardPrivacyModeOn` attribute from cluster `Camera AV Stream Management`.
1775pub async fn read_hard_privacy_mode_on(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1776    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_HARDPRIVACYMODEON).await?;
1777    decode_hard_privacy_mode_on(&tlv)
1778}
1779
1780/// Read `NightVision` attribute from cluster `Camera AV Stream Management`.
1781pub async fn read_night_vision(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<TriStateAuto> {
1782    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_NIGHTVISION).await?;
1783    decode_night_vision(&tlv)
1784}
1785
1786/// Read `NightVisionIllum` attribute from cluster `Camera AV Stream Management`.
1787pub async fn read_night_vision_illum(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<TriStateAuto> {
1788    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_NIGHTVISIONILLUM).await?;
1789    decode_night_vision_illum(&tlv)
1790}
1791
1792/// Read `Viewport` attribute from cluster `Camera AV Stream Management`.
1793pub async fn read_viewport(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1794    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_VIEWPORT).await?;
1795    decode_viewport(&tlv)
1796}
1797
1798/// Read `SpeakerMuted` attribute from cluster `Camera AV Stream Management`.
1799pub async fn read_speaker_muted(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1800    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SPEAKERMUTED).await?;
1801    decode_speaker_muted(&tlv)
1802}
1803
1804/// Read `SpeakerVolumeLevel` attribute from cluster `Camera AV Stream Management`.
1805pub async fn read_speaker_volume_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1806    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SPEAKERVOLUMELEVEL).await?;
1807    decode_speaker_volume_level(&tlv)
1808}
1809
1810/// Read `SpeakerMaxLevel` attribute from cluster `Camera AV Stream Management`.
1811pub async fn read_speaker_max_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1812    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SPEAKERMAXLEVEL).await?;
1813    decode_speaker_max_level(&tlv)
1814}
1815
1816/// Read `SpeakerMinLevel` attribute from cluster `Camera AV Stream Management`.
1817pub async fn read_speaker_min_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1818    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_SPEAKERMINLEVEL).await?;
1819    decode_speaker_min_level(&tlv)
1820}
1821
1822/// Read `MicrophoneMuted` attribute from cluster `Camera AV Stream Management`.
1823pub async fn read_microphone_muted(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1824    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONEMUTED).await?;
1825    decode_microphone_muted(&tlv)
1826}
1827
1828/// Read `MicrophoneVolumeLevel` attribute from cluster `Camera AV Stream Management`.
1829pub async fn read_microphone_volume_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1830    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONEVOLUMELEVEL).await?;
1831    decode_microphone_volume_level(&tlv)
1832}
1833
1834/// Read `MicrophoneMaxLevel` attribute from cluster `Camera AV Stream Management`.
1835pub async fn read_microphone_max_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1836    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONEMAXLEVEL).await?;
1837    decode_microphone_max_level(&tlv)
1838}
1839
1840/// Read `MicrophoneMinLevel` attribute from cluster `Camera AV Stream Management`.
1841pub async fn read_microphone_min_level(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1842    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONEMINLEVEL).await?;
1843    decode_microphone_min_level(&tlv)
1844}
1845
1846/// Read `MicrophoneAGCEnabled` attribute from cluster `Camera AV Stream Management`.
1847pub async fn read_microphone_agc_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1848    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_MICROPHONEAGCENABLED).await?;
1849    decode_microphone_agc_enabled(&tlv)
1850}
1851
1852/// Read `ImageRotation` attribute from cluster `Camera AV Stream Management`.
1853pub async fn read_image_rotation(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u16> {
1854    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_IMAGEROTATION).await?;
1855    decode_image_rotation(&tlv)
1856}
1857
1858/// Read `ImageFlipHorizontal` attribute from cluster `Camera AV Stream Management`.
1859pub async fn read_image_flip_horizontal(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1860    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_IMAGEFLIPHORIZONTAL).await?;
1861    decode_image_flip_horizontal(&tlv)
1862}
1863
1864/// Read `ImageFlipVertical` attribute from cluster `Camera AV Stream Management`.
1865pub async fn read_image_flip_vertical(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1866    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_IMAGEFLIPVERTICAL).await?;
1867    decode_image_flip_vertical(&tlv)
1868}
1869
1870/// Read `LocalVideoRecordingEnabled` attribute from cluster `Camera AV Stream Management`.
1871pub async fn read_local_video_recording_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1872    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_LOCALVIDEORECORDINGENABLED).await?;
1873    decode_local_video_recording_enabled(&tlv)
1874}
1875
1876/// Read `LocalSnapshotRecordingEnabled` attribute from cluster `Camera AV Stream Management`.
1877pub async fn read_local_snapshot_recording_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1878    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_LOCALSNAPSHOTRECORDINGENABLED).await?;
1879    decode_local_snapshot_recording_enabled(&tlv)
1880}
1881
1882/// Read `StatusLightEnabled` attribute from cluster `Camera AV Stream Management`.
1883pub async fn read_status_light_enabled(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<bool> {
1884    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_STATUSLIGHTENABLED).await?;
1885    decode_status_light_enabled(&tlv)
1886}
1887
1888/// Read `StatusLightBrightness` attribute from cluster `Camera AV Stream Management`.
1889pub async fn read_status_light_brightness(conn: &crate::controller::Connection, endpoint: u16) -> anyhow::Result<u8> {
1890    let tlv = conn.read_request2(endpoint, crate::clusters::defs::CLUSTER_ID_CAMERA_AV_STREAM_MANAGEMENT, crate::clusters::defs::CLUSTER_CAMERA_AV_STREAM_MANAGEMENT_ATTR_ID_STATUSLIGHTBRIGHTNESS).await?;
1891    decode_status_light_brightness(&tlv)
1892}
1893