Skip to content

Commit 26c6aa5

Browse files
committed
fix: resolve duplicate fields and handlers after rebase
1 parent bee6579 commit 26c6aa5

19 files changed

Lines changed: 207 additions & 185 deletions

src/app/handlers/depth_camera.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ use std::time::{Duration, Instant};
1818
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
1919
use tracing::{debug, info, warn};
2020

21-
use crate::app::state::{AppModel, Message};
2221
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
2322
use crate::app::state::SceneViewMode;
23+
use crate::app::state::{AppModel, Message};
2424
use cosmic::Task;
2525

2626
/// Set up GPU shader registration data from device calibration

src/app/mod.rs

Lines changed: 40 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -329,8 +329,6 @@ impl cosmic::Application for AppModel {
329329
privacy_cover_closed: false,
330330
// Kinect state (device, motor, calibration, native backend)
331331
kinect: crate::app::state::KinectState::default(),
332-
// Motor/PTZ controls
333-
motor_picker_visible: false,
334332
// Depth visualization settings
335333
depth_viz: crate::app::state::DepthVisualizationState::default(),
336334
// 3D preview state (rotation, zoom, rendering)
@@ -653,7 +651,9 @@ impl cosmic::Application for AppModel {
653651
// Create camera pipeline using PipeWire backend
654652
// For Y10B depth formats, use V4L2 direct capture with GPU unpacking
655653
use crate::backends::camera::pipewire::PipeWirePipeline;
656-
use crate::backends::camera::types::{CameraDevice, CameraFormat, SensorType};
654+
use crate::backends::camera::types::{
655+
CameraDevice, CameraFormat, SensorType,
656+
};
657657
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
658658
use crate::backends::camera::types::{CameraFrame, PixelFormat};
659659

@@ -697,8 +697,8 @@ impl cosmic::Application for AppModel {
697697
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
698698
use crate::backends::camera::v4l2_depth::V4l2DepthPipeline;
699699
use crate::backends::camera::{
700-
NativeDepthBackend, is_depth_native_device,
701-
V4l2KernelDepthBackend, is_kernel_depth_device,
700+
NativeDepthBackend, V4l2KernelDepthBackend, is_depth_native_device,
701+
is_kernel_depth_device,
702702
};
703703

704704
// Enum to hold active pipeline - fields are used for ownership semantics
@@ -715,7 +715,8 @@ impl cosmic::Application for AppModel {
715715
// Check if this is a kernel depth device first (highest priority)
716716
let is_kernel_depth = is_kernel_depth_device(&device.path);
717717
// Then check for freedepth device (only when kernel driver not present)
718-
let is_depth_cam = !is_kernel_depth && is_depth_native_device(&device.path);
718+
let is_depth_cam =
719+
!is_kernel_depth && is_depth_native_device(&device.path);
719720
if is_kernel_depth {
720721
info!(device = %device.name, path = %device.path, "Using V4L2 kernel depth backend");
721722
} else if is_depth_cam {
@@ -764,7 +765,8 @@ impl cosmic::Application for AppModel {
764765
if is_depth_format {
765766
info!("Creating V4L2 depth pipeline for Y10B format");
766767
use crate::shaders::depth::{
767-
is_depth_colormap_enabled, is_depth_only_mode, unpack_y10b_gpu,
768+
is_depth_colormap_enabled, is_depth_only_mode,
769+
unpack_y10b_gpu,
768770
};
769771

770772
// Create depth frame channel
@@ -813,10 +815,15 @@ impl cosmic::Application for AppModel {
813815
),
814816
format: PixelFormat::Depth16,
815817
stride: result.width * 4,
816-
captured_at: depth_frame.captured_at,
817-
depth_data: Some(std::sync::Arc::from(
818-
result.depth_u16.into_boxed_slice(),
819-
)),
818+
captured_at: depth_frame
819+
.captured_at,
820+
depth_data: Some(
821+
std::sync::Arc::from(
822+
result
823+
.depth_u16
824+
.into_boxed_slice(),
825+
),
826+
),
820827
depth_width: result.width,
821828
depth_height: result.height,
822829
video_timestamp: None,
@@ -885,18 +892,21 @@ impl cosmic::Application for AppModel {
885892

886893
// For kernel depth devices, poll frames directly via V4L2
887894
if let ActivePipeline::KernelDepth(ref kernel_backend) = pipeline {
888-
889895
info!("Starting kernel depth frame polling loop");
890896
loop {
891897
// Check cancel flag
892898
if cancel_flag.load(std::sync::atomic::Ordering::Acquire) {
893-
info!("Cancel flag set - kernel depth subscription being cancelled");
899+
info!(
900+
"Cancel flag set - kernel depth subscription being cancelled"
901+
);
894902
break;
895903
}
896904

897905
// Check if output is closed
898906
if output.is_closed() {
899-
info!("Output channel closed - kernel depth subscription being cancelled");
907+
info!(
908+
"Output channel closed - kernel depth subscription being cancelled"
909+
);
900910
break;
901911
}
902912

@@ -914,11 +924,13 @@ impl cosmic::Application for AppModel {
914924
}
915925

916926
// Send frame to UI
917-
let _ = output.try_send(Message::CameraFrame(Arc::new(frame)));
927+
let _ = output
928+
.try_send(Message::CameraFrame(Arc::new(frame)));
918929
}
919930

920931
// Small sleep to avoid busy-waiting (~30fps)
921-
tokio::time::sleep(tokio::time::Duration::from_millis(16)).await;
932+
tokio::time::sleep(tokio::time::Duration::from_millis(16))
933+
.await;
922934
}
923935
}
924936

@@ -1145,7 +1157,9 @@ impl cosmic::Application for AppModel {
11451157
}
11461158

11471159
if output.is_closed() {
1148-
info!("Output channel closed - subscription being cancelled");
1160+
info!(
1161+
"Output channel closed - subscription being cancelled"
1162+
);
11491163
break;
11501164
}
11511165

@@ -1157,7 +1171,8 @@ impl cosmic::Application for AppModel {
11571171
{
11581172
Ok(Some(frame)) => {
11591173
frame_count += 1;
1160-
let latency_us = frame.captured_at.elapsed().as_micros();
1174+
let latency_us =
1175+
frame.captured_at.elapsed().as_micros();
11611176

11621177
if frame_count % 30 == 0 {
11631178
info!(
@@ -1169,10 +1184,15 @@ impl cosmic::Application for AppModel {
11691184
);
11701185
}
11711186

1172-
match output.try_send(Message::CameraFrame(Arc::new(frame))) {
1187+
match output
1188+
.try_send(Message::CameraFrame(Arc::new(frame)))
1189+
{
11731190
Ok(()) => {
11741191
if frame_count % 30 == 0 {
1175-
tracing::debug!(frame = frame_count, "Frame forwarded to UI");
1192+
tracing::debug!(
1193+
frame = frame_count,
1194+
"Frame forwarded to UI"
1195+
);
11761196
}
11771197
}
11781198
Err(e) => {

src/app/state.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -777,10 +777,6 @@ pub struct AppModel {
777777
/// Kinect device state (detection, motor, calibration, native streaming)
778778
pub kinect: KinectState,
779779

780-
// ===== Motor/PTZ Controls =====
781-
/// Whether motor controls picker is visible
782-
pub motor_picker_visible: bool,
783-
784780
// ===== Depth Visualization =====
785781
/// Depth visualization settings (overlay, grayscale mode)
786782
pub depth_viz: DepthVisualizationState,

src/app/update.rs

Lines changed: 22 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,24 @@ impl AppModel {
5050
self.exposure_picker_visible = false;
5151
self.color_picker_visible = false;
5252
self.tools_menu_visible = false;
53+
54+
// Get initial tilt from motor control if available
55+
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
56+
if self.kinect.is_device {
57+
use crate::backends::camera::motor_control::{
58+
get_motor_tilt, is_motor_available,
59+
};
60+
if is_motor_available() {
61+
match get_motor_tilt() {
62+
Ok(tilt) => {
63+
self.kinect.tilt_angle = tilt;
64+
}
65+
Err(e) => {
66+
tracing::warn!("Failed to get Kinect tilt: {}", e);
67+
}
68+
}
69+
}
70+
}
5371
}
5472
Task::none()
5573
}
@@ -71,6 +89,10 @@ impl AppModel {
7189
}
7290
Message::ResetPanTilt => {
7391
self.reset_pan_tilt();
92+
// Also reset Kinect tilt if it's a Kinect device
93+
if self.kinect.is_device {
94+
return self.handle_set_kinect_tilt(0);
95+
}
7496
Task::none()
7597
}
7698

@@ -295,60 +317,6 @@ impl AppModel {
295317
}
296318
Message::RequestPointCloudRender => self.handle_request_point_cloud_render(),
297319

298-
// ===== Motor/PTZ Controls =====
299-
Message::ToggleMotorPicker => {
300-
self.motor_picker_visible = !self.motor_picker_visible;
301-
// Close other pickers when opening motor picker
302-
if self.motor_picker_visible {
303-
self.exposure_picker_visible = false;
304-
self.color_picker_visible = false;
305-
self.tools_menu_visible = false;
306-
307-
// Get initial tilt from motor control if available
308-
#[cfg(all(target_arch = "x86_64", feature = "freedepth"))]
309-
if self.kinect.is_device {
310-
use crate::backends::camera::motor_control::{
311-
get_motor_tilt, is_motor_available,
312-
};
313-
if is_motor_available() {
314-
match get_motor_tilt() {
315-
Ok(tilt) => {
316-
self.kinect.tilt_angle = tilt;
317-
}
318-
Err(e) => {
319-
tracing::warn!("Failed to get Kinect tilt: {}", e);
320-
}
321-
}
322-
}
323-
}
324-
}
325-
Task::none()
326-
}
327-
Message::CloseMotorPicker => {
328-
self.motor_picker_visible = false;
329-
Task::none()
330-
}
331-
Message::SetPanAbsolute(value) => {
332-
self.set_v4l2_pan(value);
333-
Task::none()
334-
}
335-
Message::SetTiltAbsolute(value) => {
336-
self.set_v4l2_tilt(value);
337-
Task::none()
338-
}
339-
Message::SetZoomAbsolute(value) => {
340-
self.set_v4l2_zoom(value);
341-
Task::none()
342-
}
343-
Message::ResetPanTilt => {
344-
self.reset_pan_tilt();
345-
// Also reset Kinect tilt if it's a Kinect device
346-
if self.kinect.is_device {
347-
return self.handle_set_kinect_tilt(0);
348-
}
349-
Task::none()
350-
}
351-
352320
// ===== Kinect Controls =====
353321
Message::SetKinectTilt(degrees) => self.handle_set_kinect_tilt(degrees),
354322
Message::KinectStateUpdated(_tilt) => {

src/backends/camera/depth_native.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,10 @@ use freedepth::{
3333
};
3434
use tracing::{debug, info, warn};
3535

36+
use super::CameraBackend;
3637
use super::format_converters::{
37-
self, ir_8bit_to_rgb, ir_10bit_to_rgb, ir_10bit_unpacked_to_rgb, DepthVisualizationOptions,
38+
self, DepthVisualizationOptions, ir_8bit_to_rgb, ir_10bit_to_rgb, ir_10bit_unpacked_to_rgb,
3839
};
39-
use super::CameraBackend;
4040
use super::types::*;
4141

4242
/// Path prefix for depth camera devices to distinguish from PipeWire cameras
@@ -213,7 +213,6 @@ pub struct NativeDepthBackend {
213213
registration: Option<Box<dyn DepthRegistration>>,
214214
}
215215

216-
217216
impl NativeDepthBackend {
218217
/// Create a new native depth camera backend
219218
pub fn new() -> Self {

src/backends/camera/format_converters.rs

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -149,8 +149,8 @@ impl DepthVisualizationOptions {
149149
grayscale: false,
150150
quantize: false,
151151
quantize_bands: 32,
152-
min_depth_mm: 500, // DEPTH_MIN_USABLE_MM
153-
max_depth_mm: 4000, // DEPTH_MAX_USABLE_MM
152+
min_depth_mm: 500, // DEPTH_MIN_USABLE_MM
153+
max_depth_mm: 4000, // DEPTH_MAX_USABLE_MM
154154
invalid_value: 8191, // DEPTH_INVALID_THRESHOLD_MM
155155
}
156156
}
@@ -173,8 +173,7 @@ impl DepthVisualizationOptions {
173173
/// Based on the Google Turbo colormap.
174174
fn turbo(t: f32) -> [u8; 3] {
175175
let r = (0.13572138
176-
+ t * (4.6153926
177-
+ t * (-42.66032 + t * (132.13108 + t * (-152.54825 + t * 59.28144)))))
176+
+ t * (4.6153926 + t * (-42.66032 + t * (132.13108 + t * (-152.54825 + t * 59.28144)))))
178177
.clamp(0.0, 1.0);
179178
let g = (0.09140261
180179
+ t * (2.19418 + t * (4.84296 + t * (-14.18503 + t * (4.27805 + t * 2.53377)))))
@@ -192,7 +191,12 @@ fn turbo(t: f32) -> [u8; 3] {
192191
/// - Turbo colormap (blue=near, red=far)
193192
/// - Optional band quantization
194193
/// - Auto-ranging or fixed range
195-
pub fn depth_to_rgb(depth: &[u16], width: u32, height: u32, options: &DepthVisualizationOptions) -> Vec<u8> {
194+
pub fn depth_to_rgb(
195+
depth: &[u16],
196+
width: u32,
197+
height: u32,
198+
options: &DepthVisualizationOptions,
199+
) -> Vec<u8> {
196200
let pixel_count = (width * height) as usize;
197201
let mut rgb = Vec::with_capacity(pixel_count * 3);
198202

@@ -251,7 +255,12 @@ pub fn depth_to_rgb(depth: &[u16], width: u32, height: u32, options: &DepthVisua
251255
/// Convert 16-bit depth values to RGBA visualization
252256
///
253257
/// Same as `depth_to_rgb` but outputs RGBA with alpha=255.
254-
pub fn depth_to_rgba(depth: &[u16], width: u32, height: u32, options: &DepthVisualizationOptions) -> Vec<u8> {
258+
pub fn depth_to_rgba(
259+
depth: &[u16],
260+
width: u32,
261+
height: u32,
262+
options: &DepthVisualizationOptions,
263+
) -> Vec<u8> {
255264
let rgb = depth_to_rgb(depth, width, height, options);
256265
let mut rgba = Vec::with_capacity(rgb.len() / 3 * 4);
257266

src/backends/camera/frame_loop.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
//! across different depth camera backends, reducing code duplication and
66
//! ensuring consistent thread lifecycle handling.
77
8-
use std::sync::atomic::{AtomicBool, Ordering};
98
use std::sync::Arc;
9+
use std::sync::atomic::{AtomicBool, Ordering};
1010
use std::thread::{self, JoinHandle};
1111
use tracing::{debug, info, warn};
1212

0 commit comments

Comments
 (0)