From d2c98587f2e32bfb011feef24707e8138b4d6d56 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 00:02:15 -0800 Subject: [PATCH 01/52] add user timestamping feature --- examples/local_video/src/publisher.rs | 1 + examples/screensharing/src/lib.rs | 3 +- examples/wgpu_room/src/logo_track.rs | 3 +- libwebrtc/src/lib.rs | 2 +- libwebrtc/src/native/frame_cryptor.rs | 8 +- libwebrtc/src/native/mod.rs | 1 + .../src/native/peer_connection_factory.rs | 8 +- libwebrtc/src/native/user_timestamp.rs | 191 ++++++++ libwebrtc/src/native/video_stream.rs | 1 + libwebrtc/src/video_frame.rs | 5 + livekit-ffi/src/server/video_source.rs | 1 + livekit/src/room/e2ee/manager.rs | 55 ++- livekit/src/room/track/local_video_track.rs | 17 +- livekit/src/room/track/remote_video_track.rs | 20 +- webrtc-sys/build.rs | 2 + webrtc-sys/include/livekit/frame_cryptor.h | 7 + webrtc-sys/include/livekit/user_timestamp.h | 191 ++++++++ webrtc-sys/src/frame_cryptor.cpp | 86 ++++ webrtc-sys/src/frame_cryptor.rs | 7 + webrtc-sys/src/lib.rs | 1 + webrtc-sys/src/user_timestamp.cpp | 460 ++++++++++++++++++ webrtc-sys/src/user_timestamp.rs | 90 ++++ 22 files changed, 1138 insertions(+), 22 deletions(-) create mode 100644 libwebrtc/src/native/user_timestamp.rs create mode 100644 webrtc-sys/include/livekit/user_timestamp.h create mode 100644 webrtc-sys/src/user_timestamp.cpp create mode 100644 webrtc-sys/src/user_timestamp.rs diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index bc43745d4..4ddc6f218 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -230,6 +230,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, + user_timestamp_us: None, buffer: I420Buffer::new(width, height), }; let is_yuyv = fmt.format() == FrameFormat::YUYV; diff --git a/examples/screensharing/src/lib.rs b/examples/screensharing/src/lib.rs index 044d260e0..a10b6127a 100644 --- a/examples/screensharing/src/lib.rs +++ b/examples/screensharing/src/lib.rs @@ -185,8 +185,9 @@ mod test { let callback = { let mut frame_buffer = VideoFrame { rotation: VideoRotation::VideoRotation0, - buffer: I420Buffer::new(1, 1), timestamp_us: 0, + user_timestamp_us: None, + buffer: I420Buffer::new(1, 1), }; move |result: Result| { let frame = match result { diff --git a/examples/wgpu_room/src/logo_track.rs b/examples/wgpu_room/src/logo_track.rs index 03b1a0ec4..33ac5b0c6 100644 --- a/examples/wgpu_room/src/logo_track.rs +++ b/examples/wgpu_room/src/logo_track.rs @@ -116,8 +116,9 @@ impl LogoTrack { framebuffer: Arc::new(Mutex::new(vec![0u8; FB_WIDTH * FB_HEIGHT * 4])), video_frame: Arc::new(Mutex::new(VideoFrame { rotation: VideoRotation::VideoRotation0, - buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), timestamp_us: 0, + user_timestamp_us: None, + buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), })), pos: (0, 0), direction: (1, 1), diff --git a/libwebrtc/src/lib.rs b/libwebrtc/src/lib.rs index 1cd1eb1e5..77e125fc7 100644 --- a/libwebrtc/src/lib.rs +++ b/libwebrtc/src/lib.rs @@ -68,7 +68,7 @@ pub mod video_track; pub mod native { pub use webrtc_sys::webrtc::ffi::create_random_uuid; - pub use crate::imp::{apm, audio_mixer, audio_resampler, frame_cryptor, yuv_helper}; + pub use crate::imp::{apm, audio_mixer, audio_resampler, frame_cryptor, user_timestamp, yuv_helper}; } #[cfg(target_os = "android")] diff --git a/libwebrtc/src/native/frame_cryptor.rs b/libwebrtc/src/native/frame_cryptor.rs index f6fdf33bf..b3ae67a57 100644 --- a/libwebrtc/src/native/frame_cryptor.rs +++ b/libwebrtc/src/native/frame_cryptor.rs @@ -19,8 +19,8 @@ use parking_lot::Mutex; use webrtc_sys::frame_cryptor::{self as sys_fc}; use crate::{ - peer_connection_factory::PeerConnectionFactory, rtp_receiver::RtpReceiver, - rtp_sender::RtpSender, + native::user_timestamp::UserTimestampHandler, peer_connection_factory::PeerConnectionFactory, + rtp_receiver::RtpReceiver, rtp_sender::RtpSender, }; pub type OnStateChange = Box; @@ -168,6 +168,10 @@ impl FrameCryptor { pub fn on_state_change(&self, handler: Option) { *self.observer.state_change_handler.lock() = handler; } + + pub fn set_user_timestamp_handler(&self, handler: &UserTimestampHandler) { + self.sys_handle.set_user_timestamp_handler(handler.sys_handle()); + } } #[derive(Clone)] diff --git a/libwebrtc/src/native/mod.rs b/libwebrtc/src/native/mod.rs index 183f5ab66..f4d1700b4 100644 --- a/libwebrtc/src/native/mod.rs +++ b/libwebrtc/src/native/mod.rs @@ -34,6 +34,7 @@ pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; pub mod session_description; +pub mod user_timestamp; pub mod video_frame; pub mod video_source; pub mod video_stream; diff --git a/libwebrtc/src/native/peer_connection_factory.rs b/libwebrtc/src/native/peer_connection_factory.rs index 4edc63047..e95577e79 100644 --- a/libwebrtc/src/native/peer_connection_factory.rs +++ b/libwebrtc/src/native/peer_connection_factory.rs @@ -47,7 +47,13 @@ impl Default for PeerConnectionFactory { *log_sink = Some(sys_rtc::ffi::new_log_sink(|msg, _| { let msg = msg.strip_suffix("\r\n").or(msg.strip_suffix('\n')).unwrap_or(&msg); - log::debug!(target: "libwebrtc", "{}", msg); + // Route user timestamp transformer logs to a dedicated target so they can + // be enabled independently from the very noisy general libwebrtc logs. + if msg.contains("UserTimestampTransformer") { + log::info!(target: "user_timestamp_rtp", "{}", msg); + } else { + log::debug!(target: "libwebrtc", "{}", msg); + } })); } diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs new file mode 100644 index 000000000..1d1a3ede4 --- /dev/null +++ b/libwebrtc/src/native/user_timestamp.rs @@ -0,0 +1,191 @@ +// Copyright 2025 LiveKit, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! User timestamp support for end-to-end timestamp propagation. +//! +//! This module provides functionality to embed user-supplied timestamps +//! in encoded video frames as trailers. The timestamps are preserved +//! through the WebRTC pipeline and can be extracted on the receiver side. +//! +//! This works independently of e2ee encryption - timestamps can be +//! embedded even when encryption is disabled. + +use cxx::SharedPtr; +use webrtc_sys::user_timestamp::ffi as sys_ut; + +use crate::{ + peer_connection_factory::PeerConnectionFactory, + rtp_receiver::RtpReceiver, + rtp_sender::RtpSender, +}; + +/// Thread-safe store for mapping capture timestamps to user timestamps. +/// +/// Used on the sender side to correlate video frame capture time with +/// the user timestamp that should be embedded in the encoded frame. +#[derive(Clone)] +pub struct UserTimestampStore { + sys_handle: SharedPtr, +} + +impl UserTimestampStore { + /// Create a new user timestamp store. + pub fn new() -> Self { + Self { + sys_handle: sys_ut::new_user_timestamp_store(), + } + } + + /// Store a user timestamp associated with a capture timestamp. + /// + /// Call this when capturing a video frame with a user timestamp. + /// The `capture_timestamp_us` should match the `timestamp_us` field + /// of the VideoFrame. + pub fn store(&self, capture_timestamp_us: i64, user_timestamp_us: i64) { + log::info!( + target: "user_timestamp", + "store: capture_ts_us={}, user_ts_us={}", + capture_timestamp_us, + user_timestamp_us + ); + self.sys_handle.store(capture_timestamp_us, user_timestamp_us); + } + + /// Lookup a user timestamp by capture timestamp (for debugging). + /// Returns None if not found. + pub fn lookup(&self, capture_timestamp_us: i64) -> Option { + let result = self.sys_handle.lookup(capture_timestamp_us); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Pop the oldest user timestamp from the queue. + /// Returns None if the queue is empty. + pub fn pop(&self) -> Option { + let result = self.sys_handle.pop(); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Peek at the oldest user timestamp without removing it. + /// Returns None if the queue is empty. + pub fn peek(&self) -> Option { + let result = self.sys_handle.peek(); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Clear old entries (older than the given threshold in microseconds). + pub fn prune(&self, max_age_us: i64) { + self.sys_handle.prune(max_age_us); + } + + pub(crate) fn sys_handle(&self) -> SharedPtr { + self.sys_handle.clone() + } +} + +impl Default for UserTimestampStore { + fn default() -> Self { + Self::new() + } +} + +/// Handler for user timestamp embedding/extraction on RTP streams. +/// +/// For sender side: Embeds user timestamps as 12-byte trailers on +/// encoded frames before they are sent. +/// +/// For receiver side: Extracts user timestamps from received frames +/// and makes them available for retrieval. +#[derive(Clone)] +pub struct UserTimestampHandler { + sys_handle: SharedPtr, +} + +impl UserTimestampHandler { + /// Enable or disable timestamp embedding/extraction. + pub fn set_enabled(&self, enabled: bool) { + self.sys_handle.set_enabled(enabled); + } + + /// Check if timestamp embedding/extraction is enabled. + pub fn enabled(&self) -> bool { + self.sys_handle.enabled() + } + + /// Get the last received user timestamp (receiver side only). + /// Returns None if no timestamp has been received yet. + pub fn last_user_timestamp(&self) -> Option { + if self.sys_handle.has_user_timestamp() { + let ts = self.sys_handle.last_user_timestamp(); + if ts >= 0 { + Some(ts) + } else { + None + } + } else { + None + } + } + + pub(crate) fn sys_handle(&self) -> SharedPtr { + self.sys_handle.clone() + } +} + +/// Create a sender-side user timestamp handler. +/// +/// This handler will embed user timestamps from the provided store +/// into encoded frames before they are packetized and sent. +pub fn create_sender_handler( + peer_factory: &PeerConnectionFactory, + store: &UserTimestampStore, + sender: &RtpSender, +) -> UserTimestampHandler { + UserTimestampHandler { + sys_handle: sys_ut::new_user_timestamp_sender( + peer_factory.handle.sys_handle.clone(), + store.sys_handle(), + sender.handle.sys_handle.clone(), + ), + } +} + +/// Create a receiver-side user timestamp handler. +/// +/// This handler will extract user timestamps from received frames +/// and make them available via `last_user_timestamp()`. +pub fn create_receiver_handler( + peer_factory: &PeerConnectionFactory, + store: &UserTimestampStore, + receiver: &RtpReceiver, +) -> UserTimestampHandler { + UserTimestampHandler { + sys_handle: sys_ut::new_user_timestamp_receiver( + peer_factory.handle.sys_handle.clone(), + store.sys_handle(), + receiver.handle.sys_handle.clone(), + ), + } +} diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 07774f87b..55a143308 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -84,6 +84,7 @@ impl sys_vt::VideoSink for VideoTrackObserver { let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), timestamp_us: frame.timestamp_us(), + user_timestamp_us: None, buffer: new_video_frame_buffer(unsafe { frame.video_frame_buffer() }), }); } diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index 926b45572..d251433ae 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -59,6 +59,11 @@ where { pub rotation: VideoRotation, pub timestamp_us: i64, // When the frame was captured in microseconds + /// Optional user timestamp in microseconds, if available. + /// This is typically a hardware or device timestamp supplied by the + /// application that can be propagated end-to-end through the media + /// pipeline. + pub user_timestamp_us: Option, pub buffer: T, } diff --git a/livekit-ffi/src/server/video_source.rs b/livekit-ffi/src/server/video_source.rs index 5af7d9a38..16005aa85 100644 --- a/livekit-ffi/src/server/video_source.rs +++ b/livekit-ffi/src/server/video_source.rs @@ -65,6 +65,7 @@ impl FfiVideoSource { let frame = VideoFrame { rotation: capture.rotation().into(), timestamp_us: capture.timestamp_us, + user_timestamp_us: None, buffer, }; diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 1e583b9c4..06e86eb2f 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -15,8 +15,11 @@ use std::{collections::HashMap, sync::Arc}; use libwebrtc::{ - native::frame_cryptor::{ - DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, + native::{ + frame_cryptor::{ + DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, + }, + user_timestamp::{self, UserTimestampStore}, }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, @@ -91,48 +94,72 @@ impl E2eeManager { self.inner.lock().options.is_some() } - /// Called by the room pub(crate) fn on_track_subscribed( &self, track: RemoteTrack, publication: RemoteTrackPublication, participant: RemoteParticipant, ) { - if !self.initialized() { - return; + let identity = participant.identity(); + let receiver = track.transceiver().unwrap().receiver(); + let mut user_timestamp_handler = None; + + // Always set up user timestamp extraction for remote video tracks. + if let RemoteTrack::Video(video_track) = &track { + let store = UserTimestampStore::new(); + let handler = user_timestamp::create_receiver_handler( + LkRuntime::instance().pc_factory(), + &store, + &receiver, + ); + video_track.set_user_timestamp_handler(handler.clone()); + user_timestamp_handler = Some(handler); } - if publication.encryption_type() == EncryptionType::None { + if !self.initialized() || publication.encryption_type() == EncryptionType::None { return; } - let identity = participant.identity(); - let receiver = track.transceiver().unwrap().receiver(); let frame_cryptor = self.setup_rtp_receiver(&identity, receiver); + if let Some(handler) = user_timestamp_handler.as_ref() { + frame_cryptor.set_user_timestamp_handler(handler); + } self.setup_cryptor(&frame_cryptor); let mut inner = self.inner.lock(); inner.frame_cryptors.insert((identity, publication.sid()), frame_cryptor.clone()); } - /// Called by the room pub(crate) fn on_local_track_published( &self, track: LocalTrack, publication: LocalTrackPublication, participant: LocalParticipant, ) { - if !self.initialized() { - return; + let identity = participant.identity(); + let sender = track.transceiver().unwrap().sender(); + let mut user_timestamp_handler = None; + + // Always set up user timestamp embedding for local video tracks. + if let LocalTrack::Video(video_track) = &track { + let store = UserTimestampStore::new(); + video_track.set_user_timestamp_store(store.clone()); + let handler = user_timestamp::create_sender_handler( + LkRuntime::instance().pc_factory(), + &store, + &sender, + ); + user_timestamp_handler = Some(handler); } - if publication.encryption_type() == EncryptionType::None { + if !self.initialized() || publication.encryption_type() == EncryptionType::None { return; } - let identity = participant.identity(); - let sender = track.transceiver().unwrap().sender(); let frame_cryptor = self.setup_rtp_sender(&identity, sender); + if let Some(handler) = user_timestamp_handler.as_ref() { + frame_cryptor.set_user_timestamp_handler(handler); + } self.setup_cryptor(&frame_cryptor); let mut inner = self.inner.lock(); diff --git a/livekit/src/room/track/local_video_track.rs b/livekit/src/room/track/local_video_track.rs index c7c26649b..ae258e078 100644 --- a/livekit/src/room/track/local_video_track.rs +++ b/livekit/src/room/track/local_video_track.rs @@ -14,8 +14,9 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{prelude::*, stats::RtcStats}; +use libwebrtc::{native::user_timestamp::UserTimestampStore, prelude::*, stats::RtcStats}; use livekit_protocol as proto; +use parking_lot::Mutex; use super::TrackInner; use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; @@ -24,6 +25,7 @@ use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; pub struct LocalVideoTrack { inner: Arc, source: RtcVideoSource, + user_timestamp_store: Arc>>, } impl Debug for LocalVideoTrack { @@ -46,6 +48,7 @@ impl LocalVideoTrack { MediaStreamTrack::Video(rtc_track), )), source, + user_timestamp_store: Arc::new(Mutex::new(None)), } } @@ -123,6 +126,18 @@ impl LocalVideoTrack { self.source.clone() } + /// Returns the user timestamp store associated with this track, if any. + /// When present, callers can push per-frame user timestamps into the + /// outgoing queue which will then be embedded into encoded frames. + pub fn user_timestamp_store(&self) -> Option { + self.user_timestamp_store.lock().clone() + } + + /// Internal: set the user timestamp store used for this track. + pub(crate) fn set_user_timestamp_store(&self, store: UserTimestampStore) { + *self.user_timestamp_store.lock() = Some(store); + } + pub async fn get_stats(&self) -> RoomResult> { super::local_track::get_stats(&self.inner).await } diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index 2076a3b1c..bab9837fa 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -14,8 +14,9 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{prelude::*, stats::RtcStats}; +use libwebrtc::{native::user_timestamp::UserTimestampHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; +use parking_lot::Mutex; use super::{remote_track, TrackInner}; use crate::prelude::*; @@ -23,6 +24,7 @@ use crate::prelude::*; #[derive(Clone)] pub struct RemoteVideoTrack { inner: Arc, + user_timestamp_handler: Arc>>, } impl Debug for RemoteVideoTrack { @@ -44,6 +46,7 @@ impl RemoteVideoTrack { TrackKind::Video, MediaStreamTrack::Video(rtc_track), )), + user_timestamp_handler: Arc::new(Mutex::new(None)), } } @@ -94,6 +97,21 @@ impl RemoteVideoTrack { true } + /// Returns the last parsed user timestamp (in microseconds) for this + /// remote video track, if the user timestamp transformer is enabled and + /// a timestamp has been received. + pub fn last_user_timestamp(&self) -> Option { + self.user_timestamp_handler + .lock() + .as_ref() + .and_then(|h| h.last_user_timestamp()) + } + + /// Internal: set the handler that extracts user timestamps for this track. + pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.user_timestamp_handler.lock().replace(handler); + } + pub async fn get_stats(&self) -> RoomResult> { super::remote_track::get_stats(&self.inner).await } diff --git a/webrtc-sys/build.rs b/webrtc-sys/build.rs index 67f77f7c1..d7221b529 100644 --- a/webrtc-sys/build.rs +++ b/webrtc-sys/build.rs @@ -54,6 +54,7 @@ fn main() { "src/prohibit_libsrtp_initialization.rs", "src/apm.rs", "src/audio_mixer.rs", + "src/user_timestamp.rs", ]; if is_desktop { @@ -89,6 +90,7 @@ fn main() { "src/prohibit_libsrtp_initialization.cpp", "src/apm.cpp", "src/audio_mixer.cpp", + "src/user_timestamp.cpp", ]); if is_desktop { diff --git a/webrtc-sys/include/livekit/frame_cryptor.h b/webrtc-sys/include/livekit/frame_cryptor.h index c100aeed0..16af04d11 100644 --- a/webrtc-sys/include/livekit/frame_cryptor.h +++ b/webrtc-sys/include/livekit/frame_cryptor.h @@ -39,6 +39,7 @@ struct EncryptedPacket; enum class Algorithm : ::std::int32_t; class RtcFrameCryptorObserverWrapper; class NativeFrameCryptorObserver; +class UserTimestampHandler; /// Shared secret key for frame encryption. class KeyProvider { @@ -158,6 +159,10 @@ class FrameCryptor { void unregister_observer() const; + /// Attach a user timestamp transformer for chained processing. + void set_user_timestamp_handler( + std::shared_ptr handler) const; + private: std::shared_ptr rtc_runtime_; const rust::String participant_id_; @@ -167,6 +172,8 @@ class FrameCryptor { webrtc::scoped_refptr sender_; webrtc::scoped_refptr receiver_; mutable webrtc::scoped_refptr observer_; + mutable rtc::scoped_refptr + chained_transformer_; }; class NativeFrameCryptorObserver diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h new file mode 100644 index 000000000..a9055be2c --- /dev/null +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -0,0 +1,191 @@ +/* + * Copyright 2025 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "livekit/peer_connection.h" +#include "livekit/peer_connection_factory.h" +#include "livekit/rtp_receiver.h" +#include "livekit/rtp_sender.h" +#include "livekit/webrtc.h" +#include "rtc_base/synchronization/mutex.h" +#include "rust/cxx.h" + +namespace livekit_ffi { + +// Magic bytes to identify user timestamp trailers: "LKTS" (LiveKit TimeStamp) +constexpr uint8_t kUserTimestampMagic[4] = {'L', 'K', 'T', 'S'}; +constexpr size_t kUserTimestampTrailerSize = + 12; // 8 bytes timestamp + 4 bytes magic + +/// Thread-safe FIFO queue for user timestamps. +/// Used on the sender side to pass user timestamps to the transformer. +/// Works on the assumption that frames are captured and encoded in order. +class UserTimestampStore { + public: + UserTimestampStore() = default; + ~UserTimestampStore() = default; + + /// Push a user timestamp to the queue. + /// Call this when capturing a video frame with a user timestamp. + void store(int64_t capture_timestamp_us, + int64_t user_timestamp_us) const; + + /// Lookup a user timestamp by capture timestamp (for debugging). + /// Returns -1 if not found. + int64_t lookup(int64_t capture_timestamp_us) const; + + /// Pop the oldest entry if the queue has entries. + /// Returns the user timestamp, or -1 if empty. + int64_t pop() const; + + /// Peek at the oldest entry without removing it. + /// Returns the user timestamp, or -1 if empty. + int64_t peek() const; + + /// Clear old entries (older than the given threshold in microseconds). + void prune(int64_t max_age_us) const; + + private: + mutable webrtc::Mutex mutex_; + struct Entry { + int64_t capture_timestamp_us; + int64_t user_timestamp_us; + }; + mutable std::deque entries_; + static constexpr size_t kMaxEntries = 300; // ~10 seconds at 30fps +}; + +/// Frame transformer that appends/extracts user timestamp trailers. +/// This transformer can be used standalone or in conjunction with e2ee. +class UserTimestampTransformer : public webrtc::FrameTransformerInterface { + public: + enum class Direction { kSend, kReceive }; + + UserTimestampTransformer(Direction direction, + std::shared_ptr store); + ~UserTimestampTransformer() override = default; + + // FrameTransformerInterface implementation + void Transform( + std::unique_ptr frame) override; + void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override; + void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override; + void UnregisterTransformedFrameCallback() override; + void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override; + + /// Enable/disable timestamp embedding + void set_enabled(bool enabled); + bool enabled() const; + + /// Get the last received user timestamp (receiver side only) + std::optional last_user_timestamp() const; + + private: + void TransformSend( + std::unique_ptr frame); + void TransformReceive( + std::unique_ptr frame); + + /// Append user timestamp trailer to frame data + std::vector AppendTimestampTrailer( + rtc::ArrayView data, + int64_t user_timestamp_us); + + /// Extract and remove user timestamp trailer from frame data + /// Returns the user timestamp if found, nullopt otherwise + std::optional ExtractTimestampTrailer( + rtc::ArrayView data, + std::vector& out_data); + + const Direction direction_; + std::shared_ptr store_; + std::atomic enabled_{true}; + mutable webrtc::Mutex mutex_; + rtc::scoped_refptr callback_; + std::unordered_map> + sink_callbacks_; + mutable std::atomic last_user_timestamp_{0}; + mutable std::atomic has_last_user_timestamp_{false}; +}; + +/// Wrapper class for Rust FFI that manages user timestamp transformers. +class UserTimestampHandler { + public: + UserTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr sender); + + UserTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr receiver); + + ~UserTimestampHandler() = default; + + /// Enable/disable timestamp embedding + void set_enabled(bool enabled) const; + bool enabled() const; + + /// Get the last received user timestamp (receiver side only) + /// Returns -1 if no timestamp has been received yet + int64_t last_user_timestamp() const; + + /// Check if a user timestamp has been received + bool has_user_timestamp() const; + + /// Access the underlying transformer for chaining. + rtc::scoped_refptr transformer() const; + + private: + std::shared_ptr rtc_runtime_; + rtc::scoped_refptr transformer_; + rtc::scoped_refptr sender_; + rtc::scoped_refptr receiver_; +}; + +// Factory functions for Rust FFI +std::shared_ptr new_user_timestamp_store(); + +std::shared_ptr new_user_timestamp_sender( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr sender); + +std::shared_ptr new_user_timestamp_receiver( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr receiver); + +} // namespace livekit_ffi diff --git a/webrtc-sys/src/frame_cryptor.cpp b/webrtc-sys/src/frame_cryptor.cpp index cbbd10d0e..e044f09c0 100644 --- a/webrtc-sys/src/frame_cryptor.cpp +++ b/webrtc-sys/src/frame_cryptor.cpp @@ -22,12 +22,64 @@ #include "api/make_ref_counted.h" #include "livekit/peer_connection.h" #include "livekit/peer_connection_factory.h" +#include "livekit/user_timestamp.h" #include "livekit/webrtc.h" #include "rtc_base/thread.h" #include "webrtc-sys/src/frame_cryptor.rs.h" namespace livekit_ffi { +class ChainedFrameTransformer : public webrtc::FrameTransformerInterface, + public webrtc::TransformedFrameCallback { + public: + ChainedFrameTransformer( + rtc::scoped_refptr first, + rtc::scoped_refptr second) + : first_(std::move(first)), second_(std::move(second)) {} + + void Transform( + std::unique_ptr frame) override { + first_->Transform(std::move(frame)); + } + + void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override { + callback_ = callback; + second_->RegisterTransformedFrameCallback(callback); + first_->RegisterTransformedFrameCallback( + rtc::scoped_refptr(this)); + } + + void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override { + second_->RegisterTransformedFrameSinkCallback(callback, ssrc); + first_->RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr(this), ssrc); + } + + void UnregisterTransformedFrameCallback() override { + first_->UnregisterTransformedFrameCallback(); + second_->UnregisterTransformedFrameCallback(); + callback_ = nullptr; + } + + void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { + first_->UnregisterTransformedFrameSinkCallback(ssrc); + second_->UnregisterTransformedFrameSinkCallback(ssrc); + } + + void OnTransformedFrame( + std::unique_ptr frame) override { + second_->Transform(std::move(frame)); + } + + private: + rtc::scoped_refptr first_; + rtc::scoped_refptr second_; + rtc::scoped_refptr callback_; +}; + webrtc::FrameCryptorTransformer::Algorithm AlgorithmToFrameCryptorAlgorithm( Algorithm algorithm) { switch (algorithm) { @@ -120,6 +172,40 @@ void FrameCryptor::unregister_observer() const { e2ee_transformer_->UnRegisterFrameCryptorTransformerObserver(); } +void FrameCryptor::set_user_timestamp_handler( + std::shared_ptr handler) const { + if (!handler) { + return; + } + + auto timestamp_transformer = handler->transformer(); + if (!timestamp_transformer) { + return; + } + + rtc::scoped_refptr first; + rtc::scoped_refptr second; + if (sender_) { + first = e2ee_transformer_; + second = timestamp_transformer; + } else if (receiver_) { + first = timestamp_transformer; + second = e2ee_transformer_; + } else { + return; + } + + chained_transformer_ = + rtc::make_ref_counted(first, second); + + if (sender_) { + sender_->SetEncoderToPacketizerFrameTransformer(chained_transformer_); + } + if (receiver_) { + receiver_->SetDepacketizerToDecoderFrameTransformer(chained_transformer_); + } +} + NativeFrameCryptorObserver::NativeFrameCryptorObserver( rust::Box observer, const FrameCryptor* fc) diff --git a/webrtc-sys/src/frame_cryptor.rs b/webrtc-sys/src/frame_cryptor.rs index 84095bfff..ac8770cde 100644 --- a/webrtc-sys/src/frame_cryptor.rs +++ b/webrtc-sys/src/frame_cryptor.rs @@ -93,10 +93,12 @@ pub mod ffi { include!("livekit/rtp_sender.h"); include!("livekit/rtp_receiver.h"); include!("livekit/peer_connection_factory.h"); + include!("livekit/user_timestamp.h"); type RtpSender = crate::rtp_sender::ffi::RtpSender; type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; + type UserTimestampHandler = crate::user_timestamp::ffi::UserTimestampHandler; pub type FrameCryptor; @@ -132,6 +134,11 @@ pub mod ffi { ); pub fn unregister_observer(self: &FrameCryptor); + + pub fn set_user_timestamp_handler( + self: &FrameCryptor, + handler: SharedPtr, + ); } unsafe extern "C++" { diff --git a/webrtc-sys/src/lib.rs b/webrtc-sys/src/lib.rs index fde63e14a..ca0a436fe 100644 --- a/webrtc-sys/src/lib.rs +++ b/webrtc-sys/src/lib.rs @@ -37,6 +37,7 @@ pub mod rtp_sender; pub mod rtp_transceiver; pub mod video_frame; pub mod video_frame_buffer; +pub mod user_timestamp; pub mod video_track; pub mod webrtc; pub mod yuv_helper; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp new file mode 100644 index 000000000..bcce9f0a0 --- /dev/null +++ b/webrtc-sys/src/user_timestamp.cpp @@ -0,0 +1,460 @@ +/* + * Copyright 2025 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "livekit/user_timestamp.h" + +#include +#include +#include +#include + +#include "api/make_ref_counted.h" +#include "livekit/peer_connection_factory.h" +#include "rtc_base/logging.h" +#include "webrtc-sys/src/user_timestamp.rs.h" + +namespace livekit_ffi { + +// UserTimestampStore implementation + +void UserTimestampStore::store(int64_t capture_timestamp_us, + int64_t user_timestamp_us) const { + webrtc::MutexLock lock(&mutex_); + + // Remove old entries if we're at capacity + while (entries_.size() >= kMaxEntries) { + entries_.pop_front(); + } + + entries_.push_back({capture_timestamp_us, user_timestamp_us}); + RTC_LOG(LS_INFO) << "UserTimestampStore::store capture_ts_us=" + << capture_timestamp_us + << " user_ts_us=" << user_timestamp_us + << " size=" << entries_.size(); +} + +int64_t UserTimestampStore::lookup(int64_t capture_timestamp_us) const { + webrtc::MutexLock lock(&mutex_); + + // Search from the end (most recent) for better performance + for (auto it = entries_.rbegin(); it != entries_.rend(); ++it) { + if (it->capture_timestamp_us == capture_timestamp_us) { + return it->user_timestamp_us; + } + } + + return -1; +} + +int64_t UserTimestampStore::pop() const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + RTC_LOG(LS_INFO) << "UserTimestampStore::pop empty"; + return -1; + } + + int64_t user_ts = entries_.front().user_timestamp_us; + entries_.pop_front(); + RTC_LOG(LS_INFO) << "UserTimestampStore::pop user_ts_us=" << user_ts + << " remaining=" << entries_.size(); + return user_ts; +} + +int64_t UserTimestampStore::peek() const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + return -1; + } + + return entries_.front().user_timestamp_us; +} + +void UserTimestampStore::prune(int64_t max_age_us) const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + return; + } + + int64_t newest_timestamp = entries_.back().capture_timestamp_us; + int64_t threshold = newest_timestamp - max_age_us; + + while (!entries_.empty() && + entries_.front().capture_timestamp_us < threshold) { + entries_.pop_front(); + } +} + +// UserTimestampTransformer implementation + +UserTimestampTransformer::UserTimestampTransformer( + Direction direction, + std::shared_ptr store) + : direction_(direction), store_(store) { + RTC_LOG(LS_INFO) << "UserTimestampTransformer created direction=" + << (direction_ == Direction::kSend ? "send" : "recv"); +} + +void UserTimestampTransformer::Transform( + std::unique_ptr frame) { + uint32_t ssrc = frame->GetSsrc(); + uint32_t rtp_timestamp = frame->GetTimestamp(); + + if (!enabled_.load()) { + // Pass through without modification, but still log basic info so we know + // frames are flowing through the transformer. + RTC_LOG(LS_INFO) << "UserTimestampTransformer::Transform (disabled)" + << " direction=" + << (direction_ == Direction::kSend ? "send" : "recv") + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; + + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "UserTimestampTransformer::Transform (disabled) has no callback" + << " direction=" + << (direction_ == Direction::kSend ? "send" : "recv") + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; + } + return; + } + + if (direction_ == Direction::kSend) { + TransformSend(std::move(frame)); + } else { + TransformReceive(std::move(frame)); + } +} + +void UserTimestampTransformer::TransformSend( + std::unique_ptr frame) { + // Get the RTP timestamp from the frame for logging + uint32_t rtp_timestamp = frame->GetTimestamp(); + uint32_t ssrc = frame->GetSsrc(); + + auto data = frame->GetData(); + + // Pop the next user timestamp from the queue. + // This assumes frames are captured and encoded in order (FIFO). + int64_t ts_to_embed = 0; + + if (store_) { + int64_t popped_ts = store_->pop(); + if (popped_ts >= 0) { + ts_to_embed = popped_ts; + } else { + RTC_LOG(LS_INFO) << "UserTimestampTransformer::TransformSend no user " + "timestamp available" + << " rtp_ts=" << rtp_timestamp + << " orig_size=" << data.size(); + } + } + + // Always append trailer when enabled (even if timestamp is 0, + // which indicates no user timestamp was set for this frame) + std::vector new_data; + if (enabled_.load()) { + new_data = AppendTimestampTrailer(data, ts_to_embed); + frame->SetData(rtc::ArrayView(new_data)); + + RTC_LOG(LS_INFO) << "UserTimestampTransformer::TransformSend appended " + "trailer" + << " ts_us=" << ts_to_embed + << " rtp_ts=" << rtp_timestamp + << " ssrc=" << ssrc + << " orig_size=" << data.size() + << " new_size=" << new_data.size(); + } + + // Forward to the appropriate callback (either global or per-SSRC sink). + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "UserTimestampTransformer::TransformSend has no callback" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; + } +} + +void UserTimestampTransformer::TransformReceive( + std::unique_ptr frame) { + uint32_t ssrc = frame->GetSsrc(); + uint32_t rtp_timestamp = frame->GetTimestamp(); + auto data = frame->GetData(); + std::vector stripped_data; + + auto user_ts = ExtractTimestampTrailer(data, stripped_data); + + if (user_ts.has_value()) { + // Compute latency from embedded user timestamp to RTP receive + // time (both in microseconds since Unix epoch), so we can compare + // this with the latency logged after decode on the subscriber side. + int64_t now_us = + std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + double recv_latency_ms = + static_cast(now_us - user_ts.value()) / 1000.0; + + // Store the extracted timestamp for later retrieval + last_user_timestamp_.store(user_ts.value()); + has_last_user_timestamp_.store(true); + + // Update frame with stripped data + frame->SetData(rtc::ArrayView(stripped_data)); + + RTC_LOG(LS_INFO) << "UserTimestampTransformer" + << " user_ts=" << user_ts.value() + << " rtp_ts=" << frame->GetTimestamp() + << " recv_latency=" << recv_latency_ms << " ms"; + } else { + // Log the last few bytes so we can see whether the magic marker is present. + size_t log_len = std::min(data.size(), 16); + std::string tail_bytes; + tail_bytes.reserve(log_len * 4); + for (size_t i = data.size() - log_len; i < data.size(); ++i) { + char buf[8]; + std::snprintf(buf, sizeof(buf), "%u", + static_cast(data[i])); + if (!tail_bytes.empty()) { + tail_bytes.append(","); + } + tail_bytes.append(buf); + } + + RTC_LOG(LS_INFO) + << "UserTimestampTransformer::TransformReceive no trailer found" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp + << " size=" << data.size() + << " tail_bytes_dec=[" << tail_bytes << "]"; + } + + // Forward to the appropriate callback (either global or per-SSRC sink). + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "UserTimestampTransformer::TransformReceive has no callback" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; + } +} + +std::vector UserTimestampTransformer::AppendTimestampTrailer( + rtc::ArrayView data, + int64_t user_timestamp_us) { + std::vector result; + result.reserve(data.size() + kUserTimestampTrailerSize); + + // Copy original data + result.insert(result.end(), data.begin(), data.end()); + + // Append timestamp (big-endian) + for (int i = 7; i >= 0; --i) { + result.push_back( + static_cast((user_timestamp_us >> (i * 8)) & 0xFF)); + } + + // Append magic bytes + result.insert(result.end(), std::begin(kUserTimestampMagic), + std::end(kUserTimestampMagic)); + + return result; +} + +std::optional UserTimestampTransformer::ExtractTimestampTrailer( + rtc::ArrayView data, + std::vector& out_data) { + if (data.size() < kUserTimestampTrailerSize) { + RTC_LOG(LS_INFO) + << "UserTimestampTransformer::ExtractTimestampTrailer data too small" + << " size=" << data.size() + << " required=" << kUserTimestampTrailerSize; + out_data.assign(data.begin(), data.end()); + return std::nullopt; + } + + // Check for magic bytes at the end + const uint8_t* magic_start = data.data() + data.size() - 4; + if (std::memcmp(magic_start, kUserTimestampMagic, 4) != 0) { + RTC_LOG(LS_INFO) + << "UserTimestampTransformer::ExtractTimestampTrailer magic mismatch" + << " size=" << data.size() + << " magic_bytes_dec=[" + << static_cast(magic_start[0]) << "," + << static_cast(magic_start[1]) << "," + << static_cast(magic_start[2]) << "," + << static_cast(magic_start[3]) << "]"; + out_data.assign(data.begin(), data.end()); + return std::nullopt; + } + + // Extract timestamp (big-endian) + const uint8_t* ts_start = + data.data() + data.size() - kUserTimestampTrailerSize; + int64_t timestamp = 0; + for (int i = 0; i < 8; ++i) { + timestamp = (timestamp << 8) | ts_start[i]; + } + + // Copy data without trailer + out_data.assign(data.begin(), + data.end() - kUserTimestampTrailerSize); + + return timestamp; +} + +void UserTimestampTransformer::RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) { + webrtc::MutexLock lock(&mutex_); + callback_ = callback; +} + +void UserTimestampTransformer::RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) { + webrtc::MutexLock lock(&mutex_); + sink_callbacks_[ssrc] = callback; +} + +void UserTimestampTransformer::UnregisterTransformedFrameCallback() { + webrtc::MutexLock lock(&mutex_); + callback_ = nullptr; +} + +void UserTimestampTransformer::UnregisterTransformedFrameSinkCallback( + uint32_t ssrc) { + webrtc::MutexLock lock(&mutex_); + sink_callbacks_.erase(ssrc); +} + +void UserTimestampTransformer::set_enabled(bool enabled) { + enabled_.store(enabled); +} + +bool UserTimestampTransformer::enabled() const { + return enabled_.load(); +} + +std::optional UserTimestampTransformer::last_user_timestamp() + const { + if (!has_last_user_timestamp_.load()) { + return std::nullopt; + } + return last_user_timestamp_.load(); +} + +// UserTimestampHandler implementation + +UserTimestampHandler::UserTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr sender) + : rtc_runtime_(rtc_runtime), sender_(sender) { + transformer_ = rtc::make_ref_counted( + UserTimestampTransformer::Direction::kSend, store); + sender->SetEncoderToPacketizerFrameTransformer(transformer_); +} + +UserTimestampHandler::UserTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr receiver) + : rtc_runtime_(rtc_runtime), receiver_(receiver) { + transformer_ = rtc::make_ref_counted( + UserTimestampTransformer::Direction::kReceive, store); + receiver->SetDepacketizerToDecoderFrameTransformer(transformer_); +} + +void UserTimestampHandler::set_enabled(bool enabled) const { + transformer_->set_enabled(enabled); +} + +bool UserTimestampHandler::enabled() const { + return transformer_->enabled(); +} + +int64_t UserTimestampHandler::last_user_timestamp() const { + auto ts = transformer_->last_user_timestamp(); + return ts.value_or(-1); +} + +bool UserTimestampHandler::has_user_timestamp() const { + return transformer_->last_user_timestamp().has_value(); +} + +rtc::scoped_refptr UserTimestampHandler::transformer() const { + return transformer_; +} + +// Factory functions + +std::shared_ptr new_user_timestamp_store() { + return std::make_shared(); +} + +std::shared_ptr new_user_timestamp_sender( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr sender) { + return std::make_shared( + peer_factory->rtc_runtime(), store, sender->rtc_sender()); +} + +std::shared_ptr new_user_timestamp_receiver( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr receiver) { + return std::make_shared( + peer_factory->rtc_runtime(), store, receiver->rtc_receiver()); +} + +} // namespace livekit_ffi diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs new file mode 100644 index 000000000..728429dec --- /dev/null +++ b/webrtc-sys/src/user_timestamp.rs @@ -0,0 +1,90 @@ +// Copyright 2025 LiveKit, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::impl_thread_safety; + +#[cxx::bridge(namespace = "livekit_ffi")] +pub mod ffi { + unsafe extern "C++" { + include!("livekit/user_timestamp.h"); + include!("livekit/rtp_sender.h"); + include!("livekit/rtp_receiver.h"); + include!("livekit/peer_connection_factory.h"); + + type RtpSender = crate::rtp_sender::ffi::RtpSender; + type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; + type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; + + /// Thread-safe store for mapping capture timestamps to user timestamps. + pub type UserTimestampStore; + + /// Push a user timestamp to the queue. + fn store(self: &UserTimestampStore, capture_timestamp_us: i64, user_timestamp_us: i64); + + /// Lookup a user timestamp by capture timestamp (for debugging). + /// Returns -1 if not found. + fn lookup(self: &UserTimestampStore, capture_timestamp_us: i64) -> i64; + + /// Pop the oldest user timestamp from the queue. + /// Returns -1 if empty. + fn pop(self: &UserTimestampStore) -> i64; + + /// Peek at the oldest user timestamp without removing it. + /// Returns -1 if empty. + fn peek(self: &UserTimestampStore) -> i64; + + /// Clear old entries. + fn prune(self: &UserTimestampStore, max_age_us: i64); + + /// Create a new user timestamp store. + fn new_user_timestamp_store() -> SharedPtr; + } + + unsafe extern "C++" { + include!("livekit/user_timestamp.h"); + + /// Handler for user timestamp embedding/extraction on RTP streams. + pub type UserTimestampHandler; + + /// Enable/disable timestamp embedding. + fn set_enabled(self: &UserTimestampHandler, enabled: bool); + + /// Check if timestamp embedding is enabled. + fn enabled(self: &UserTimestampHandler) -> bool; + + /// Get the last received user timestamp (receiver side only). + /// Returns -1 if no timestamp has been received yet. + fn last_user_timestamp(self: &UserTimestampHandler) -> i64; + + /// Check if a user timestamp has been received. + fn has_user_timestamp(self: &UserTimestampHandler) -> bool; + + /// Create a new user timestamp handler for a sender. + fn new_user_timestamp_sender( + peer_factory: SharedPtr, + store: SharedPtr, + sender: SharedPtr, + ) -> SharedPtr; + + /// Create a new user timestamp handler for a receiver. + fn new_user_timestamp_receiver( + peer_factory: SharedPtr, + store: SharedPtr, + receiver: SharedPtr, + ) -> SharedPtr; + } +} + +impl_thread_safety!(ffi::UserTimestampStore, Send + Sync); +impl_thread_safety!(ffi::UserTimestampHandler, Send + Sync); From 4d4027def2e8f79d7afdb09605155c330035610c Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 00:06:53 -0800 Subject: [PATCH 02/52] attach timestamp in example --- examples/local_video/src/publisher.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 4ddc6f218..92408a573 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -18,7 +18,7 @@ use std::sync::{ atomic::{AtomicBool, Ordering}, Arc, }; -use std::time::{Duration, Instant}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use yuv_sys; #[derive(Parser, Debug)] @@ -75,6 +75,10 @@ struct Args { /// Use H.265/HEVC encoding if supported (falls back to H.264 on failure) #[arg(long, default_value_t = false)] h265: bool, + + /// Attach the current system time (microseconds since UNIX epoch) as the user timestamp on each frame + #[arg(long, default_value_t = false)] + user_timestamp: bool, } fn list_cameras() -> Result<()> { @@ -394,6 +398,12 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; + // Optionally attach wall-clock time as user timestamp + frame.user_timestamp_us = if args.user_timestamp { + Some(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64) + } else { + None + }; rtc_source.capture_frame(&frame); let t4 = Instant::now(); From b0d3c04ad8290c204e1d1c0adcee4d0a1cdb2359 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 17:03:26 -0800 Subject: [PATCH 03/52] fix the latency calcs --- examples/local_video/src/subscriber.rs | 106 ++++++++++++++++++- libwebrtc/src/native/user_timestamp.rs | 15 +++ libwebrtc/src/native/video_source.rs | 32 +++++- libwebrtc/src/native/video_stream.rs | 27 ++++- libwebrtc/src/video_source.rs | 10 ++ libwebrtc/src/video_stream.rs | 14 ++- livekit/src/room/e2ee/manager.rs | 9 ++ livekit/src/room/track/remote_video_track.rs | 9 ++ webrtc-sys/include/livekit/user_timestamp.h | 20 ++++ webrtc-sys/src/user_timestamp.cpp | 59 +++++++++-- webrtc-sys/src/user_timestamp.rs | 5 + 11 files changed, 287 insertions(+), 19 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index e8abdbca4..9aef96d95 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -17,7 +17,7 @@ use std::{ atomic::{AtomicBool, Ordering}, Arc, }, - time::{Duration, Instant}, + time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; async fn wait_for_shutdown(flag: Arc) { @@ -52,6 +52,10 @@ struct Args { /// Only subscribe to video from this participant identity #[arg(long)] participant: Option, + + /// Display user timestamp, current timestamp, and latency overlay + #[arg(long)] + display_timestamp: bool, } struct SharedYuv { @@ -66,6 +70,8 @@ struct SharedYuv { codec: String, fps: f32, dirty: bool, + /// Last received user timestamp in microseconds, if any. + user_timestamp_us: Option, } #[derive(Clone)] @@ -114,6 +120,49 @@ fn infer_quality_from_dims( } } +/// Returns the current wall-clock time as microseconds since Unix epoch. +fn current_timestamp_us() -> i64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_micros() as i64 +} + +/// Format a user timestamp (microseconds since Unix epoch) as +/// `yyyy-mm-dd hh:mm:ss.ssss`. +fn format_timestamp_us(ts_us: i64) -> String { + // Convert to calendar components without chrono — pure arithmetic. + let secs = (ts_us / 1_000_000) as u64; + let sub_sec_us = (ts_us % 1_000_000) as u32; + + // Days / time-of-day decomposition + let days = (secs / 86400) as i64; + let day_secs = (secs % 86400) as u32; + let hour = day_secs / 3600; + let minute = (day_secs % 3600) / 60; + let second = day_secs % 60; + let frac = sub_sec_us / 100; // 4-digit tenths of microseconds → 0..9999 + + // Convert days since epoch to y/m/d (civil calendar, proleptic Gregorian). + // Algorithm from Howard Hinnant (http://howardhinnant.github.io/date_algorithms.html) + let z = days + 719468; + let era = (if z >= 0 { z } else { z - 146096 }) / 146097; + let doe = (z - era * 146097) as u32; // day of era [0, 146096] + let yoe = + (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365; // year of era [0, 399] + let y = yoe as i64 + era * 400; + let doy = doe - (365 * yoe + yoe / 4 - yoe / 100); // day of year [0, 365] + let mp = (5 * doy + 2) / 153; // [0, 11] + let day = doy - (153 * mp + 2) / 5 + 1; // [1, 31] + let month = if mp < 10 { mp + 3 } else { mp - 9 }; // [1, 12] + let year = if month <= 2 { y + 1 } else { y }; + + format!( + "{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:04}", + year, month, day, hour, minute, second, frac + ) +} + fn simulcast_state_full_dims(state: &Arc>) -> Option<(u32, u32)> { let sc = state.lock(); sc.full_dims @@ -239,6 +288,10 @@ async fn handle_track_subscribed( let simulcast2 = simulcast.clone(); std::thread::spawn(move || { let mut sink = NativeVideoStream::new(video_track.rtc_track()); + // Wire up user timestamp extraction so frame.user_timestamp_us is populated + if let Some(handler) = video_track.user_timestamp_handler() { + sink.set_user_timestamp_handler(handler); + } let mut frames: u64 = 0; let mut last_log = Instant::now(); let mut logged_first = false; @@ -304,6 +357,7 @@ async fn handle_track_subscribed( std::mem::swap(&mut s.u, &mut u_buf); std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; + s.user_timestamp_us = frame.user_timestamp_us; // Update smoothed FPS (~500ms window) fps_window_frames += 1; @@ -410,6 +464,11 @@ struct VideoApp { simulcast: Arc>, ctrl_c_received: Arc, locked_aspect: Option, + display_timestamp: bool, + /// Cached latency string, updated at ~5 Hz so it's readable. + latency_display: String, + /// Last time the latency display was refreshed. + latency_last_update: Instant, } impl eframe::App for VideoApp { @@ -481,6 +540,47 @@ impl eframe::App for VideoApp { }); }); + // Timestamp overlay: user timestamp, current timestamp, and latency + if self.display_timestamp { + egui::Area::new("timestamp_hud".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 40.0)) + .interactable(false) + .show(ctx, |ui| { + let s = self.shared.lock(); + if let Some(user_ts) = s.user_timestamp_us { + let now_us = current_timestamp_us(); + + // Update the cached latency display at ~5 Hz so it's readable. + if self.latency_last_update.elapsed() >= Duration::from_millis(200) { + let delta_ms = (now_us - user_ts) as f64 / 1000.0; + self.latency_display = format!("{:.1}ms", delta_ms); + self.latency_last_update = Instant::now(); + } + + let lines = format!( + "Publish: {}\nSubscribe: {}\nLatency: {}", + format_timestamp_us(user_ts), + format_timestamp_us(now_us), + self.latency_display, + ); + egui::Frame::NONE + .fill(egui::Color32::from_black_alpha(140)) + .corner_radius(egui::CornerRadius::same(4)) + .inner_margin(egui::Margin::same(6)) + .show(ui, |ui| { + ui.add( + egui::Label::new( + egui::RichText::new(lines) + .color(egui::Color32::WHITE) + .monospace(), + ) + .extend(), + ); + }); + } + }); + } + // Simulcast layer controls: bottom-left overlay egui::Area::new("simulcast_controls".into()) .anchor(egui::Align2::LEFT_BOTTOM, egui::vec2(10.0, -10.0)) @@ -577,6 +677,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { codec: String::new(), fps: 0.0, dirty: false, + user_timestamp_us: None, })); // Subscribe to room events: on first video track, start sink task @@ -628,6 +729,9 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { simulcast, ctrl_c_received: ctrl_c_received.clone(), locked_aspect: None, + display_timestamp: args.display_timestamp, + latency_display: String::new(), + latency_last_update: Instant::now(), }; let native_options = eframe::NativeOptions::default(); eframe::run_native( diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index 1d1a3ede4..0a25418a8 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -149,6 +149,21 @@ impl UserTimestampHandler { } } + /// Pop the next received user timestamp from the receive queue. + /// Returns None if the queue is empty. + /// + /// Each decoded frame should call this once to get its matching + /// timestamp, maintaining 1:1 correspondence between received + /// encoded frames and decoded video frames. + pub fn pop_user_timestamp(&self) -> Option { + let ts = self.sys_handle.pop_user_timestamp(); + if ts >= 0 { + Some(ts) + } else { + None + } + } + pub(crate) fn sys_handle(&self) -> SharedPtr { self.sys_handle.clone() } diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 91a59df99..437b4699b 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -23,6 +23,7 @@ use parking_lot::Mutex; use webrtc_sys::{video_frame as vf_sys, video_frame::ffi::VideoRotation, video_track as vt_sys}; use crate::{ + native::user_timestamp::UserTimestampStore, video_frame::{I420Buffer, VideoBuffer, VideoFrame}, video_source::VideoResolution, }; @@ -47,6 +48,7 @@ pub struct NativeVideoSource { struct VideoSourceInner { captured_frames: usize, + user_timestamp_store: Option, } impl NativeVideoSource { @@ -55,7 +57,10 @@ impl NativeVideoSource { sys_handle: vt_sys::ffi::new_video_track_source(&vt_sys::ffi::VideoResolution::from( resolution.clone(), )), - inner: Arc::new(Mutex::new(VideoSourceInner { captured_frames: 0 })), + inner: Arc::new(Mutex::new(VideoSourceInner { + captured_frames: 0, + user_timestamp_store: None, + })), }; livekit_runtime::spawn({ @@ -99,17 +104,36 @@ impl NativeVideoSource { builder.pin_mut().set_rotation(frame.rotation.into()); builder.pin_mut().set_video_frame_buffer(frame.buffer.as_ref().sys_handle()); - if frame.timestamp_us == 0 { + let capture_ts = if frame.timestamp_us == 0 { // If the timestamp is set to 0, default to now let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); - builder.pin_mut().set_timestamp_us(now.as_micros() as i64); + now.as_micros() as i64 } else { - builder.pin_mut().set_timestamp_us(frame.timestamp_us); + frame.timestamp_us + }; + builder.pin_mut().set_timestamp_us(capture_ts); + + // If a user timestamp is provided and a store is available, record + // the mapping so the UserTimestampTransformer can embed it into the + // encoded RTP frame. + if let Some(user_ts) = frame.user_timestamp_us { + if let Some(store) = &inner.user_timestamp_store { + store.store(capture_ts, user_ts); + } } self.sys_handle.on_captured_frame(&builder.pin_mut().build()); } + /// Set the user timestamp store used by this source. + /// + /// When set, any frame captured with a `user_timestamp_us` value will + /// automatically have its timestamp pushed into the store so the + /// `UserTimestampTransformer` can embed it into the encoded frame. + pub fn set_user_timestamp_store(&self, store: UserTimestampStore) { + self.inner.lock().user_timestamp_store = Some(store); + } + pub fn video_resolution(&self) -> VideoResolution { self.sys_handle.video_resolution().into() } diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 55a143308..f9b8af5a6 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -25,12 +25,14 @@ use webrtc_sys::video_track as sys_vt; use super::video_frame::new_video_frame_buffer; use crate::{ + native::user_timestamp::UserTimestampHandler, video_frame::{BoxVideoFrame, VideoFrame}, video_track::RtcVideoTrack, }; pub struct NativeVideoStream { native_sink: SharedPtr, + observer: Arc, video_track: RtcVideoTrack, frame_rx: mpsc::UnboundedReceiver, } @@ -38,7 +40,10 @@ pub struct NativeVideoStream { impl NativeVideoStream { pub fn new(video_track: RtcVideoTrack) -> Self { let (frame_tx, frame_rx) = mpsc::unbounded_channel(); - let observer = Arc::new(VideoTrackObserver { frame_tx }); + let observer = Arc::new(VideoTrackObserver { + frame_tx, + user_timestamp_handler: parking_lot::Mutex::new(None), + }); let native_sink = sys_vt::ffi::new_native_video_sink(Box::new( sys_vt::VideoSinkWrapper::new(observer.clone()), )); @@ -46,7 +51,16 @@ impl NativeVideoStream { let video = unsafe { sys_vt::ffi::media_to_video(video_track.sys_handle()) }; video.add_sink(&native_sink); - Self { native_sink, video_track, frame_rx } + Self { native_sink, observer, video_track, frame_rx } + } + + /// Set the user timestamp handler for this stream. + /// + /// When set, each frame produced by this stream will have its + /// `user_timestamp_us` field populated from the handler's last + /// received timestamp (if available). + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + *self.observer.user_timestamp_handler.lock() = Some(handler); } pub fn track(&self) -> RtcVideoTrack { @@ -77,14 +91,21 @@ impl Stream for NativeVideoStream { struct VideoTrackObserver { frame_tx: mpsc::UnboundedSender, + user_timestamp_handler: parking_lot::Mutex>, } impl sys_vt::VideoSink for VideoTrackObserver { fn on_frame(&self, frame: UniquePtr) { + let user_timestamp_us = self + .user_timestamp_handler + .lock() + .as_ref() + .and_then(|h| h.pop_user_timestamp()); + let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), timestamp_us: frame.timestamp_us(), - user_timestamp_us: None, + user_timestamp_us, buffer: new_video_frame_buffer(unsafe { frame.video_frame_buffer() }), }); } diff --git a/libwebrtc/src/video_source.rs b/libwebrtc/src/video_source.rs index d73491748..109385866 100644 --- a/libwebrtc/src/video_source.rs +++ b/libwebrtc/src/video_source.rs @@ -50,6 +50,7 @@ pub mod native { use std::fmt::{Debug, Formatter}; use super::*; + use crate::native::user_timestamp::UserTimestampStore; use crate::video_frame::{VideoBuffer, VideoFrame}; #[derive(Clone)] @@ -78,6 +79,15 @@ pub mod native { self.handle.capture_frame(frame) } + /// Set the user timestamp store used by this source. + /// + /// When set, any frame captured with a `user_timestamp_us` value will + /// automatically have its timestamp pushed into the store so the + /// `UserTimestampTransformer` can embed it into the encoded frame. + pub fn set_user_timestamp_store(&self, store: UserTimestampStore) { + self.handle.set_user_timestamp_store(store) + } + pub fn video_resolution(&self) -> VideoResolution { self.handle.video_resolution() } diff --git a/libwebrtc/src/video_stream.rs b/libwebrtc/src/video_stream.rs index fbdd01abe..92962b507 100644 --- a/libwebrtc/src/video_stream.rs +++ b/libwebrtc/src/video_stream.rs @@ -26,7 +26,10 @@ pub mod native { }; use super::stream_imp; - use crate::{video_frame::BoxVideoFrame, video_track::RtcVideoTrack}; + use crate::{ + native::user_timestamp::UserTimestampHandler, video_frame::BoxVideoFrame, + video_track::RtcVideoTrack, + }; use livekit_runtime::Stream; pub struct NativeVideoStream { @@ -44,6 +47,15 @@ pub mod native { Self { handle: stream_imp::NativeVideoStream::new(video_track) } } + /// Set the user timestamp handler for this stream. + /// + /// When set, each frame produced by this stream will have its + /// `user_timestamp_us` field populated from the handler's last + /// received timestamp (if available). + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.handle.set_user_timestamp_handler(handler); + } + pub fn track(&self) -> RtcVideoTrack { self.handle.track() } diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 06e86eb2f..46b787e00 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -23,6 +23,7 @@ use libwebrtc::{ }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, + video_source::RtcVideoSource, }; use parking_lot::Mutex; @@ -144,6 +145,14 @@ impl E2eeManager { if let LocalTrack::Video(video_track) = &track { let store = UserTimestampStore::new(); video_track.set_user_timestamp_store(store.clone()); + + // Also set the store on the video source so that capture_frame() + // can automatically push user timestamps into it. + #[cfg(not(target_arch = "wasm32"))] + if let RtcVideoSource::Native(ref native_source) = video_track.rtc_source() { + native_source.set_user_timestamp_store(store.clone()); + } + let handler = user_timestamp::create_sender_handler( LkRuntime::instance().pc_factory(), &store, diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index bab9837fa..c0be0ca16 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -107,6 +107,15 @@ impl RemoteVideoTrack { .and_then(|h| h.last_user_timestamp()) } + /// Returns a clone of the user timestamp handler, if one has been set. + /// + /// This can be passed to a `NativeVideoStream` via + /// `set_user_timestamp_handler` so that each frame's + /// `user_timestamp_us` field is populated automatically. + pub fn user_timestamp_handler(&self) -> Option { + self.user_timestamp_handler.lock().clone() + } + /// Internal: set the handler that extracts user timestamps for this track. pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { self.user_timestamp_handler.lock().replace(handler); diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index a9055be2c..592f48142 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -110,6 +110,11 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { /// Get the last received user timestamp (receiver side only) std::optional last_user_timestamp() const; + /// Pop the next received user timestamp from the receive queue. + /// Returns the user timestamp if available, nullopt otherwise. + /// Each decoded frame should call this once to get its matching timestamp. + std::optional pop_user_timestamp(); + private: void TransformSend( std::unique_ptr frame); @@ -137,6 +142,17 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { sink_callbacks_; mutable std::atomic last_user_timestamp_{0}; mutable std::atomic has_last_user_timestamp_{false}; + + // Send-side: cache the last user timestamp we embedded, so that + // simulcast layers encoding the same frame get the same value. + mutable webrtc::Mutex send_cache_mutex_; + mutable int64_t last_sent_user_timestamp_{0}; + + // Receive-side FIFO queue: one entry per received encoded frame, popped + // one-to-one as decoded frames are delivered to the video sink. + mutable webrtc::Mutex recv_queue_mutex_; + mutable std::deque recv_queue_; + static constexpr size_t kMaxRecvQueueEntries = 300; }; /// Wrapper class for Rust FFI that manages user timestamp transformers. @@ -162,6 +178,10 @@ class UserTimestampHandler { /// Returns -1 if no timestamp has been received yet int64_t last_user_timestamp() const; + /// Pop the next received user timestamp from the receive queue. + /// Returns -1 if the queue is empty. + int64_t pop_user_timestamp() const; + /// Check if a user timestamp has been received bool has_user_timestamp() const; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index bcce9f0a0..c3b625287 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -161,19 +161,34 @@ void UserTimestampTransformer::TransformSend( auto data = frame->GetData(); - // Pop the next user timestamp from the queue. - // This assumes frames are captured and encoded in order (FIFO). + // Drain all queued user timestamps and use the most recent one. + // The encoder may skip captured frames (rate control, CPU), so the + // store can accumulate faster than TransformSend is called. Draining + // ensures we always embed the timestamp closest to the frame actually + // being encoded. With simulcast, multiple layers encode the same + // captured frame — subsequent layers will find the queue empty and + // fall back to the cached value. int64_t ts_to_embed = 0; if (store_) { - int64_t popped_ts = store_->pop(); - if (popped_ts >= 0) { - ts_to_embed = popped_ts; + int64_t newest_ts = -1; + // Drain: pop all available entries, keep the last one + for (;;) { + int64_t popped_ts = store_->pop(); + if (popped_ts < 0) break; + newest_ts = popped_ts; + } + + if (newest_ts >= 0) { + ts_to_embed = newest_ts; + // Cache for simulcast layers that encode the same frame + webrtc::MutexLock lock(&send_cache_mutex_); + last_sent_user_timestamp_ = newest_ts; } else { - RTC_LOG(LS_INFO) << "UserTimestampTransformer::TransformSend no user " - "timestamp available" - << " rtp_ts=" << rtp_timestamp - << " orig_size=" << data.size(); + // Queue was empty — use cached value (simulcast or encoder + // encoding the same frame as a previous layer) + webrtc::MutexLock lock(&send_cache_mutex_); + ts_to_embed = last_sent_user_timestamp_; } } @@ -234,10 +249,19 @@ void UserTimestampTransformer::TransformReceive( double recv_latency_ms = static_cast(now_us - user_ts.value()) / 1000.0; - // Store the extracted timestamp for later retrieval + // Store the extracted timestamp for later retrieval (legacy atomic) last_user_timestamp_.store(user_ts.value()); has_last_user_timestamp_.store(true); + // Also push to the receive queue so decoded frames can pop 1:1 + { + webrtc::MutexLock lock(&recv_queue_mutex_); + if (recv_queue_.size() >= kMaxRecvQueueEntries) { + recv_queue_.pop_front(); + } + recv_queue_.push_back(user_ts.value()); + } + // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); @@ -392,6 +416,16 @@ std::optional UserTimestampTransformer::last_user_timestamp() return last_user_timestamp_.load(); } +std::optional UserTimestampTransformer::pop_user_timestamp() { + webrtc::MutexLock lock(&recv_queue_mutex_); + if (recv_queue_.empty()) { + return std::nullopt; + } + int64_t ts = recv_queue_.front(); + recv_queue_.pop_front(); + return ts; +} + // UserTimestampHandler implementation UserTimestampHandler::UserTimestampHandler( @@ -427,6 +461,11 @@ int64_t UserTimestampHandler::last_user_timestamp() const { return ts.value_or(-1); } +int64_t UserTimestampHandler::pop_user_timestamp() const { + auto ts = transformer_->pop_user_timestamp(); + return ts.value_or(-1); +} + bool UserTimestampHandler::has_user_timestamp() const { return transformer_->last_user_timestamp().has_value(); } diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs index 728429dec..40bd5f03f 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/user_timestamp.rs @@ -67,6 +67,11 @@ pub mod ffi { /// Returns -1 if no timestamp has been received yet. fn last_user_timestamp(self: &UserTimestampHandler) -> i64; + /// Pop the next received user timestamp from the receive queue. + /// Returns -1 if the queue is empty. + /// Each decoded frame should call this once to get its matching timestamp. + fn pop_user_timestamp(self: &UserTimestampHandler) -> i64; + /// Check if a user timestamp has been received. fn has_user_timestamp(self: &UserTimestampHandler) -> bool; From 90083c346d4dcb6e89bd033cf3e09511161baf2e Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 17:08:52 -0800 Subject: [PATCH 04/52] cleanup display overlays --- examples/local_video/src/subscriber.rs | 33 +++++++++++++++++--------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 9aef96d95..604dae17d 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -469,6 +469,9 @@ struct VideoApp { latency_display: String, /// Last time the latency display was refreshed. latency_last_update: Instant, + /// Cached user timestamp so the overlay doesn't flicker when the shared + /// state momentarily has `None` between frame swaps. + cached_user_timestamp_us: Option, } impl eframe::App for VideoApp { @@ -518,9 +521,9 @@ impl eframe::App for VideoApp { ); }); - // Resolution/FPS overlay: top-left + // Resolution/FPS overlay: top-right egui::Area::new("video_hud".into()) - .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 10.0)) + .anchor(egui::Align2::RIGHT_TOP, egui::vec2(-10.0, 10.0)) .interactable(false) .show(ctx, |ui| { let s = self.shared.lock(); @@ -540,14 +543,21 @@ impl eframe::App for VideoApp { }); }); - // Timestamp overlay: user timestamp, current timestamp, and latency + // Timestamp overlay: user timestamp, current timestamp, and latency. + // We cache the last-known user timestamp so the overlay doesn't flicker + // when the shared state momentarily has `None` between frame swaps. if self.display_timestamp { - egui::Area::new("timestamp_hud".into()) - .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 40.0)) - .interactable(false) - .show(ctx, |ui| { - let s = self.shared.lock(); - if let Some(user_ts) = s.user_timestamp_us { + { + let s = self.shared.lock(); + if let Some(ts) = s.user_timestamp_us { + self.cached_user_timestamp_us = Some(ts); + } + } + if let Some(user_ts) = self.cached_user_timestamp_us { + egui::Area::new("timestamp_hud".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 10.0)) + .interactable(false) + .show(ctx, |ui| { let now_us = current_timestamp_us(); // Update the cached latency display at ~5 Hz so it's readable. @@ -577,8 +587,8 @@ impl eframe::App for VideoApp { .extend(), ); }); - } - }); + }); + } } // Simulcast layer controls: bottom-left overlay @@ -732,6 +742,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { display_timestamp: args.display_timestamp, latency_display: String::new(), latency_last_update: Instant::now(), + cached_user_timestamp_us: None, }; let native_options = eframe::NativeOptions::default(); eframe::run_native( From f84fae32f7c4d91bd21940ab9b304e496b35495d Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 17:11:35 -0800 Subject: [PATCH 05/52] rename flag --- examples/local_video/src/publisher.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 92408a573..637396bc3 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -78,7 +78,7 @@ struct Args { /// Attach the current system time (microseconds since UNIX epoch) as the user timestamp on each frame #[arg(long, default_value_t = false)] - user_timestamp: bool, + attach_timestamp: bool, } fn list_cameras() -> Result<()> { @@ -399,7 +399,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; // Optionally attach wall-clock time as user timestamp - frame.user_timestamp_us = if args.user_timestamp { + frame.user_timestamp_us = if args.attach_timestamp { Some(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64) } else { None From 03083a0fc5e83b295616baf4a1c6c65798f8e27c Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 18:04:06 -0800 Subject: [PATCH 06/52] add e2ee options --- examples/local_video/src/publisher.rs | 25 +++++++++++++++++++++++++ examples/local_video/src/subscriber.rs | 25 +++++++++++++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 637396bc3..2805b6928 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,5 +1,6 @@ use anyhow::Result; use clap::Parser; +use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::options::{TrackPublishOptions, VideoCodec, VideoEncoding}; use livekit::prelude::*; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; @@ -79,6 +80,10 @@ struct Args { /// Attach the current system time (microseconds since UNIX epoch) as the user timestamp on each frame #[arg(long, default_value_t = false)] attach_timestamp: bool, + + /// Shared encryption key for E2EE (enables AES-GCM end-to-end encryption when set) + #[arg(long)] + e2ee_key: Option, } fn list_cameras() -> Result<()> { @@ -141,10 +146,30 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + + // Configure E2EE if an encryption key is provided + if let Some(ref e2ee_key) = args.e2ee_key { + let key_provider = KeyProvider::with_shared_key( + KeyProviderOptions::default(), + e2ee_key.as_bytes().to_vec(), + ); + room_options.encryption = Some(E2eeOptions { + encryption_type: EncryptionType::Gcm, + key_provider, + }); + info!("E2EE enabled with AES-GCM encryption"); + } + let (room, _) = Room::connect(&url, &token, room_options).await?; let room = std::sync::Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); + // Enable E2EE after connection + if args.e2ee_key.is_some() { + room.e2ee_manager().set_enabled(true); + info!("End-to-end encryption activated"); + } + // Log room events { let room_clone = room.clone(); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 604dae17d..ea798ff3c 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -5,6 +5,7 @@ use eframe::wgpu::{self, util::DeviceExt}; use egui_wgpu as egui_wgpu_backend; use egui_wgpu_backend::CallbackTrait; use futures::StreamExt; +use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::prelude::*; use livekit::webrtc::video_stream::native::NativeVideoStream; use livekit_api::access_token; @@ -56,6 +57,10 @@ struct Args { /// Display user timestamp, current timestamp, and latency overlay #[arg(long)] display_timestamp: bool, + + /// Shared encryption key for E2EE (enables AES-GCM end-to-end encryption when set; must match publisher's key) + #[arg(long)] + e2ee_key: Option, } struct SharedYuv { @@ -670,10 +675,30 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + + // Configure E2EE if an encryption key is provided + if let Some(ref e2ee_key) = args.e2ee_key { + let key_provider = KeyProvider::with_shared_key( + KeyProviderOptions::default(), + e2ee_key.as_bytes().to_vec(), + ); + room_options.encryption = Some(E2eeOptions { + encryption_type: EncryptionType::Gcm, + key_provider, + }); + info!("E2EE enabled with AES-GCM encryption"); + } + let (room, _) = Room::connect(&url, &token, room_options).await?; let room = Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); + // Enable E2EE after connection + if args.e2ee_key.is_some() { + room.e2ee_manager().set_enabled(true); + info!("End-to-end encryption activated"); + } + // Shared YUV buffer for UI/GPU let shared = Arc::new(Mutex::new(SharedYuv { width: 0, From dd6217747c0ce621bc388e29533898c96d99c1fa Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Feb 2026 21:47:12 -0800 Subject: [PATCH 07/52] display simulcast state --- examples/local_video/src/subscriber.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index ea798ff3c..8d7152e45 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -535,7 +535,23 @@ impl eframe::App for VideoApp { if s.width == 0 || s.height == 0 || s.fps <= 0.0 || s.codec.is_empty() { return; } - let text = format!("{} {}x{} {:.1}fps", s.codec, s.width, s.height, s.fps); + let mut text = format!("{} {}x{} {:.1}fps", s.codec, s.width, s.height, s.fps); + let sc = self.simulcast.lock(); + if sc.available { + let layer = sc + .active_quality + .map(|q| match q { + livekit::track::VideoQuality::Low => "Low", + livekit::track::VideoQuality::Medium => "Medium", + livekit::track::VideoQuality::High => "High", + _ => "Unknown", + }) + .unwrap_or("?"); + text.push_str(&format!("\nSimulcast: {}", layer)); + } else { + text.push_str("\nSimulcast: off"); + } + drop(sc); egui::Frame::NONE .fill(egui::Color32::from_black_alpha(140)) .corner_radius(egui::CornerRadius::same(4)) From d1b0d5c3be5de31ba19f7d15ab41cb4dbe4dfb29 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 13 Feb 2026 10:23:53 -0800 Subject: [PATCH 08/52] use a mapping of rtp timestamp to user timestamp on subscriber side too --- libwebrtc/src/native/user_timestamp.rs | 18 +++++----- libwebrtc/src/native/video_stream.rs | 3 +- libwebrtc/src/video_frame.rs | 3 -- webrtc-sys/include/livekit/user_timestamp.h | 27 ++++++++------- webrtc-sys/src/user_timestamp.cpp | 37 ++++++++++++++------- webrtc-sys/src/user_timestamp.rs | 7 ++-- 6 files changed, 55 insertions(+), 40 deletions(-) diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index 0a25418a8..bb75d5be8 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -149,14 +149,14 @@ impl UserTimestampHandler { } } - /// Pop the next received user timestamp from the receive queue. - /// Returns None if the queue is empty. + /// Lookup the user timestamp for a given RTP timestamp (receiver side). + /// Returns None if no timestamp was found for this RTP timestamp. + /// The entry is removed from the map after a successful lookup. /// - /// Each decoded frame should call this once to get its matching - /// timestamp, maintaining 1:1 correspondence between received - /// encoded frames and decoded video frames. - pub fn pop_user_timestamp(&self) -> Option { - let ts = self.sys_handle.pop_user_timestamp(); + /// Use the RTP timestamp from the decoded video frame to correlate + /// it with the user timestamp that was embedded in the encoded frame. + pub fn lookup_user_timestamp(&self, rtp_timestamp: u32) -> Option { + let ts = self.sys_handle.lookup_user_timestamp(rtp_timestamp); if ts >= 0 { Some(ts) } else { @@ -190,7 +190,9 @@ pub fn create_sender_handler( /// Create a receiver-side user timestamp handler. /// /// This handler will extract user timestamps from received frames -/// and make them available via `last_user_timestamp()`. +/// and store them in a map keyed by RTP timestamp. Use +/// `lookup_user_timestamp(rtp_timestamp)` to retrieve the user +/// timestamp for a specific decoded frame. pub fn create_receiver_handler( peer_factory: &PeerConnectionFactory, store: &UserTimestampStore, diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index f9b8af5a6..17eafba40 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -96,11 +96,12 @@ struct VideoTrackObserver { impl sys_vt::VideoSink for VideoTrackObserver { fn on_frame(&self, frame: UniquePtr) { + let rtp_timestamp = frame.timestamp(); let user_timestamp_us = self .user_timestamp_handler .lock() .as_ref() - .and_then(|h| h.pop_user_timestamp()); + .and_then(|h| h.lookup_user_timestamp(rtp_timestamp)); let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index d251433ae..c56317eed 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -60,9 +60,6 @@ where pub rotation: VideoRotation, pub timestamp_us: i64, // When the frame was captured in microseconds /// Optional user timestamp in microseconds, if available. - /// This is typically a hardware or device timestamp supplied by the - /// application that can be propagated end-to-end through the media - /// pipeline. pub user_timestamp_us: Option, pub buffer: T, } diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index 592f48142..ee5b1509e 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -110,10 +110,10 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { /// Get the last received user timestamp (receiver side only) std::optional last_user_timestamp() const; - /// Pop the next received user timestamp from the receive queue. - /// Returns the user timestamp if available, nullopt otherwise. - /// Each decoded frame should call this once to get its matching timestamp. - std::optional pop_user_timestamp(); + /// Lookup the user timestamp associated with a given RTP timestamp. + /// Returns the user timestamp if found, nullopt otherwise. + /// The entry is removed from the map after lookup. + std::optional lookup_user_timestamp(uint32_t rtp_timestamp); private: void TransformSend( @@ -148,11 +148,14 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable webrtc::Mutex send_cache_mutex_; mutable int64_t last_sent_user_timestamp_{0}; - // Receive-side FIFO queue: one entry per received encoded frame, popped - // one-to-one as decoded frames are delivered to the video sink. - mutable webrtc::Mutex recv_queue_mutex_; - mutable std::deque recv_queue_; - static constexpr size_t kMaxRecvQueueEntries = 300; + // Receive-side map: RTP timestamp -> user timestamp. + // Keyed by RTP timestamp so decoded frames can look up their user + // timestamp regardless of frame drops or reordering. + mutable webrtc::Mutex recv_map_mutex_; + mutable std::unordered_map recv_map_; + // Track insertion order for pruning old entries. + mutable std::deque recv_map_order_; + static constexpr size_t kMaxRecvMapEntries = 300; }; /// Wrapper class for Rust FFI that manages user timestamp transformers. @@ -178,9 +181,9 @@ class UserTimestampHandler { /// Returns -1 if no timestamp has been received yet int64_t last_user_timestamp() const; - /// Pop the next received user timestamp from the receive queue. - /// Returns -1 if the queue is empty. - int64_t pop_user_timestamp() const; + /// Lookup the user timestamp for a given RTP timestamp (receiver side). + /// Returns -1 if not found. + int64_t lookup_user_timestamp(uint32_t rtp_timestamp) const; /// Check if a user timestamp has been received bool has_user_timestamp() const; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index c3b625287..6dfb92e09 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -253,13 +253,17 @@ void UserTimestampTransformer::TransformReceive( last_user_timestamp_.store(user_ts.value()); has_last_user_timestamp_.store(true); - // Also push to the receive queue so decoded frames can pop 1:1 + // Store in the receive map keyed by RTP timestamp so decoded frames + // can look up their user timestamp regardless of frame drops. { - webrtc::MutexLock lock(&recv_queue_mutex_); - if (recv_queue_.size() >= kMaxRecvQueueEntries) { - recv_queue_.pop_front(); + webrtc::MutexLock lock(&recv_map_mutex_); + // Evict oldest entry if at capacity + while (recv_map_.size() >= kMaxRecvMapEntries && !recv_map_order_.empty()) { + recv_map_.erase(recv_map_order_.front()); + recv_map_order_.pop_front(); } - recv_queue_.push_back(user_ts.value()); + recv_map_[rtp_timestamp] = user_ts.value(); + recv_map_order_.push_back(rtp_timestamp); } // Update frame with stripped data @@ -416,13 +420,22 @@ std::optional UserTimestampTransformer::last_user_timestamp() return last_user_timestamp_.load(); } -std::optional UserTimestampTransformer::pop_user_timestamp() { - webrtc::MutexLock lock(&recv_queue_mutex_); - if (recv_queue_.empty()) { +std::optional UserTimestampTransformer::lookup_user_timestamp( + uint32_t rtp_timestamp) { + webrtc::MutexLock lock(&recv_map_mutex_); + auto it = recv_map_.find(rtp_timestamp); + if (it == recv_map_.end()) { return std::nullopt; } - int64_t ts = recv_queue_.front(); - recv_queue_.pop_front(); + int64_t ts = it->second; + recv_map_.erase(it); + // Remove from insertion-order tracker (linear scan is fine for bounded size) + for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); ++oit) { + if (*oit == rtp_timestamp) { + recv_map_order_.erase(oit); + break; + } + } return ts; } @@ -461,8 +474,8 @@ int64_t UserTimestampHandler::last_user_timestamp() const { return ts.value_or(-1); } -int64_t UserTimestampHandler::pop_user_timestamp() const { - auto ts = transformer_->pop_user_timestamp(); +int64_t UserTimestampHandler::lookup_user_timestamp(uint32_t rtp_timestamp) const { + auto ts = transformer_->lookup_user_timestamp(rtp_timestamp); return ts.value_or(-1); } diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs index 40bd5f03f..1ab774f93 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/user_timestamp.rs @@ -67,10 +67,9 @@ pub mod ffi { /// Returns -1 if no timestamp has been received yet. fn last_user_timestamp(self: &UserTimestampHandler) -> i64; - /// Pop the next received user timestamp from the receive queue. - /// Returns -1 if the queue is empty. - /// Each decoded frame should call this once to get its matching timestamp. - fn pop_user_timestamp(self: &UserTimestampHandler) -> i64; + /// Lookup the user timestamp for a given RTP timestamp (receiver side). + /// Returns -1 if not found. The entry is removed after lookup. + fn lookup_user_timestamp(self: &UserTimestampHandler, rtp_timestamp: u32) -> i64; /// Check if a user timestamp has been received. fn has_user_timestamp(self: &UserTimestampHandler) -> bool; From f68b83e8d4d11e615da2a0b17fb416846a2b5115 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 13 Feb 2026 13:45:43 -0800 Subject: [PATCH 09/52] move the subscriber user timestamp handler to internal to clean up API --- examples/local_video/src/subscriber.rs | 6 ++--- libwebrtc/src/native/media_stream.rs | 2 +- libwebrtc/src/native/media_stream_track.rs | 2 +- .../src/native/peer_connection_factory.rs | 7 +++-- libwebrtc/src/native/video_stream.rs | 15 ++++++++--- libwebrtc/src/native/video_track.rs | 26 +++++++++++++++++++ libwebrtc/src/video_stream.rs | 9 +++++-- libwebrtc/src/video_track.rs | 19 ++++++++++++++ livekit/src/room/track/remote_video_track.rs | 20 ++++++-------- 9 files changed, 79 insertions(+), 27 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 8d7152e45..5c6886234 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -292,11 +292,9 @@ async fn handle_track_subscribed( } let simulcast2 = simulcast.clone(); std::thread::spawn(move || { + // The user timestamp handler is automatically wired from the RtcVideoTrack, + // so frame.user_timestamp_us is populated without manual setup. let mut sink = NativeVideoStream::new(video_track.rtc_track()); - // Wire up user timestamp extraction so frame.user_timestamp_us is populated - if let Some(handler) = video_track.user_timestamp_handler() { - sink.set_user_timestamp_handler(handler); - } let mut frames: u64 = 0; let mut last_log = Instant::now(); let mut logged_first = false; diff --git a/libwebrtc/src/native/media_stream.rs b/libwebrtc/src/native/media_stream.rs index 10180d79a..67b13ec57 100644 --- a/libwebrtc/src/native/media_stream.rs +++ b/libwebrtc/src/native/media_stream.rs @@ -43,7 +43,7 @@ impl MediaStream { self.sys_handle .get_video_tracks() .into_iter() - .map(|t| video_track::RtcVideoTrack { handle: RtcVideoTrack { sys_handle: t.ptr } }) + .map(|t| video_track::RtcVideoTrack { handle: RtcVideoTrack::new(t.ptr) }) .collect() } } diff --git a/libwebrtc/src/native/media_stream_track.rs b/libwebrtc/src/native/media_stream_track.rs index 36424986b..43165e1b2 100644 --- a/libwebrtc/src/native/media_stream_track.rs +++ b/libwebrtc/src/native/media_stream_track.rs @@ -44,7 +44,7 @@ pub fn new_media_stream_track( }) } else if sys_handle.kind() == MEDIA_TYPE_VIDEO { MediaStreamTrack::Video(video_track::RtcVideoTrack { - handle: RtcVideoTrack { sys_handle: unsafe { media_to_video(sys_handle) } }, + handle: RtcVideoTrack::new(unsafe { media_to_video(sys_handle) }), }) } else { panic!("unknown track kind") diff --git a/libwebrtc/src/native/peer_connection_factory.rs b/libwebrtc/src/native/peer_connection_factory.rs index e95577e79..4a3605b3b 100644 --- a/libwebrtc/src/native/peer_connection_factory.rs +++ b/libwebrtc/src/native/peer_connection_factory.rs @@ -82,11 +82,10 @@ impl PeerConnectionFactory { pub fn create_video_track(&self, label: &str, source: NativeVideoSource) -> RtcVideoTrack { RtcVideoTrack { - handle: imp_vt::RtcVideoTrack { - sys_handle: self - .sys_handle + handle: imp_vt::RtcVideoTrack::new( + self.sys_handle .create_video_track(label.to_string(), source.handle.sys_handle()), - }, + ), } } diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 17eafba40..c0e900280 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -40,9 +40,13 @@ pub struct NativeVideoStream { impl NativeVideoStream { pub fn new(video_track: RtcVideoTrack) -> Self { let (frame_tx, frame_rx) = mpsc::unbounded_channel(); + + // Auto-wire the user timestamp handler from the track if one is set. + let handler = video_track.handle.user_timestamp_handler(); + let observer = Arc::new(VideoTrackObserver { frame_tx, - user_timestamp_handler: parking_lot::Mutex::new(None), + user_timestamp_handler: parking_lot::Mutex::new(handler), }); let native_sink = sys_vt::ffi::new_native_video_sink(Box::new( sys_vt::VideoSinkWrapper::new(observer.clone()), @@ -57,8 +61,13 @@ impl NativeVideoStream { /// Set the user timestamp handler for this stream. /// /// When set, each frame produced by this stream will have its - /// `user_timestamp_us` field populated from the handler's last - /// received timestamp (if available). + /// `user_timestamp_us` field populated from the handler's receive + /// map (looked up by RTP timestamp). + /// + /// Note: If the handler was already set on the `RtcVideoTrack` before + /// creating this stream, it is automatically wired up. This method is + /// only needed if you want to override or set the handler after + /// construction. pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { *self.observer.user_timestamp_handler.lock() = Some(handler); } diff --git a/libwebrtc/src/native/video_track.rs b/libwebrtc/src/native/video_track.rs index efc3937df..4155a94b5 100644 --- a/libwebrtc/src/native/video_track.rs +++ b/libwebrtc/src/native/video_track.rs @@ -12,22 +12,48 @@ // See the License for the specific language governing permissions and // limitations under the License. +use std::sync::Arc; + use cxx::SharedPtr; +use parking_lot::Mutex; use sys_vt::ffi::video_to_media; use webrtc_sys::video_track as sys_vt; use super::media_stream_track::impl_media_stream_track; +use super::user_timestamp::UserTimestampHandler; use crate::media_stream_track::RtcTrackState; #[derive(Clone)] pub struct RtcVideoTrack { pub(crate) sys_handle: SharedPtr, + user_timestamp_handler: Arc>>, } impl RtcVideoTrack { impl_media_stream_track!(video_to_media); + pub(crate) fn new(sys_handle: SharedPtr) -> Self { + Self { + sys_handle, + user_timestamp_handler: Arc::new(Mutex::new(None)), + } + } + pub fn sys_handle(&self) -> SharedPtr { video_to_media(self.sys_handle.clone()) } + + /// Set the user timestamp handler for this track. + /// + /// When set, any `NativeVideoStream` created from this track will + /// automatically use this handler to populate `user_timestamp_us` + /// on each decoded frame. + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.user_timestamp_handler.lock().replace(handler); + } + + /// Get the user timestamp handler, if one has been set. + pub fn user_timestamp_handler(&self) -> Option { + self.user_timestamp_handler.lock().clone() + } } diff --git a/libwebrtc/src/video_stream.rs b/libwebrtc/src/video_stream.rs index 92962b507..2370b56f9 100644 --- a/libwebrtc/src/video_stream.rs +++ b/libwebrtc/src/video_stream.rs @@ -50,8 +50,13 @@ pub mod native { /// Set the user timestamp handler for this stream. /// /// When set, each frame produced by this stream will have its - /// `user_timestamp_us` field populated from the handler's last - /// received timestamp (if available). + /// `user_timestamp_us` field populated by looking up the user + /// timestamp for each frame's RTP timestamp. + /// + /// Note: If the handler was already set on the `RtcVideoTrack` + /// before creating this stream, it is automatically wired up. + /// This method is only needed to override or set the handler + /// after construction. pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { self.handle.set_user_timestamp_handler(handler); } diff --git a/libwebrtc/src/video_track.rs b/libwebrtc/src/video_track.rs index a223d4858..b8c00dc6a 100644 --- a/libwebrtc/src/video_track.rs +++ b/libwebrtc/src/video_track.rs @@ -19,6 +19,9 @@ use crate::{ media_stream_track::{media_stream_track, RtcTrackState}, }; +#[cfg(not(target_arch = "wasm32"))] +use crate::native::user_timestamp::UserTimestampHandler; + #[derive(Clone)] pub struct RtcVideoTrack { pub(crate) handle: imp_vt::RtcVideoTrack, @@ -26,6 +29,22 @@ pub struct RtcVideoTrack { impl RtcVideoTrack { media_stream_track!(); + + /// Set the user timestamp handler for this track. + /// + /// When set, any `NativeVideoStream` created from this track will + /// automatically use this handler to populate `user_timestamp_us` + /// on each decoded frame. + #[cfg(not(target_arch = "wasm32"))] + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.handle.set_user_timestamp_handler(handler); + } + + /// Get the user timestamp handler, if one has been set. + #[cfg(not(target_arch = "wasm32"))] + pub fn user_timestamp_handler(&self) -> Option { + self.handle.user_timestamp_handler() + } } impl Debug for RtcVideoTrack { diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index c0be0ca16..a356ca496 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -16,7 +16,6 @@ use std::{fmt::Debug, sync::Arc}; use libwebrtc::{native::user_timestamp::UserTimestampHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; -use parking_lot::Mutex; use super::{remote_track, TrackInner}; use crate::prelude::*; @@ -24,7 +23,6 @@ use crate::prelude::*; #[derive(Clone)] pub struct RemoteVideoTrack { inner: Arc, - user_timestamp_handler: Arc>>, } impl Debug for RemoteVideoTrack { @@ -46,7 +44,6 @@ impl RemoteVideoTrack { TrackKind::Video, MediaStreamTrack::Video(rtc_track), )), - user_timestamp_handler: Arc::new(Mutex::new(None)), } } @@ -101,24 +98,23 @@ impl RemoteVideoTrack { /// remote video track, if the user timestamp transformer is enabled and /// a timestamp has been received. pub fn last_user_timestamp(&self) -> Option { - self.user_timestamp_handler - .lock() - .as_ref() + self.rtc_track() + .user_timestamp_handler() .and_then(|h| h.last_user_timestamp()) } /// Returns a clone of the user timestamp handler, if one has been set. - /// - /// This can be passed to a `NativeVideoStream` via - /// `set_user_timestamp_handler` so that each frame's - /// `user_timestamp_us` field is populated automatically. pub fn user_timestamp_handler(&self) -> Option { - self.user_timestamp_handler.lock().clone() + self.rtc_track().user_timestamp_handler() } /// Internal: set the handler that extracts user timestamps for this track. + /// + /// The handler is stored on the underlying `RtcVideoTrack`, so any + /// `NativeVideoStream` created from this track will automatically + /// pick it up — no manual wiring required. pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.user_timestamp_handler.lock().replace(handler); + self.rtc_track().set_user_timestamp_handler(handler); } pub async fn get_stats(&self) -> RoomResult> { From 22f93d9abfb5b157466cd85c3f2442be811b85b0 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sun, 15 Feb 2026 19:26:58 -0800 Subject: [PATCH 10/52] remove UserTimestamp store in favor of simple map to track ts --- libwebrtc/src/native/user_timestamp.rs | 128 ++++---------- libwebrtc/src/native/video_source.rs | 46 ++--- libwebrtc/src/video_source.rs | 11 +- livekit/src/room/e2ee/manager.rs | 22 +-- livekit/src/room/track/local_video_track.rs | 22 +-- webrtc-sys/include/livekit/user_timestamp.h | 92 ++++------ webrtc-sys/include/livekit/video_track.h | 16 +- webrtc-sys/src/user_timestamp.cpp | 185 ++++++++------------ webrtc-sys/src/user_timestamp.rs | 39 +---- webrtc-sys/src/video_track.cpp | 32 +++- webrtc-sys/src/video_track.rs | 19 +- 11 files changed, 262 insertions(+), 350 deletions(-) diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index bb75d5be8..4e99c6b99 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -18,8 +18,13 @@ //! in encoded video frames as trailers. The timestamps are preserved //! through the WebRTC pipeline and can be extracted on the receiver side. //! -//! This works independently of e2ee encryption - timestamps can be -//! embedded even when encryption is disabled. +//! On the send side, user timestamps are stored in the handler's internal +//! map keyed by capture timestamp. When the encoder produces a frame, +//! the transformer looks up the user timestamp via the frame's CaptureTime(). +//! +//! On the receive side, extracted user timestamps are stored in an +//! internal map keyed by RTP timestamp. Decoded frames look up their +//! user timestamp via lookup_user_timestamp(rtp_timestamp). use cxx::SharedPtr; use webrtc_sys::user_timestamp::ffi as sys_ut; @@ -30,94 +35,15 @@ use crate::{ rtp_sender::RtpSender, }; -/// Thread-safe store for mapping capture timestamps to user timestamps. -/// -/// Used on the sender side to correlate video frame capture time with -/// the user timestamp that should be embedded in the encoded frame. -#[derive(Clone)] -pub struct UserTimestampStore { - sys_handle: SharedPtr, -} - -impl UserTimestampStore { - /// Create a new user timestamp store. - pub fn new() -> Self { - Self { - sys_handle: sys_ut::new_user_timestamp_store(), - } - } - - /// Store a user timestamp associated with a capture timestamp. - /// - /// Call this when capturing a video frame with a user timestamp. - /// The `capture_timestamp_us` should match the `timestamp_us` field - /// of the VideoFrame. - pub fn store(&self, capture_timestamp_us: i64, user_timestamp_us: i64) { - log::info!( - target: "user_timestamp", - "store: capture_ts_us={}, user_ts_us={}", - capture_timestamp_us, - user_timestamp_us - ); - self.sys_handle.store(capture_timestamp_us, user_timestamp_us); - } - - /// Lookup a user timestamp by capture timestamp (for debugging). - /// Returns None if not found. - pub fn lookup(&self, capture_timestamp_us: i64) -> Option { - let result = self.sys_handle.lookup(capture_timestamp_us); - if result < 0 { - None - } else { - Some(result) - } - } - - /// Pop the oldest user timestamp from the queue. - /// Returns None if the queue is empty. - pub fn pop(&self) -> Option { - let result = self.sys_handle.pop(); - if result < 0 { - None - } else { - Some(result) - } - } - - /// Peek at the oldest user timestamp without removing it. - /// Returns None if the queue is empty. - pub fn peek(&self) -> Option { - let result = self.sys_handle.peek(); - if result < 0 { - None - } else { - Some(result) - } - } - - /// Clear old entries (older than the given threshold in microseconds). - pub fn prune(&self, max_age_us: i64) { - self.sys_handle.prune(max_age_us); - } - - pub(crate) fn sys_handle(&self) -> SharedPtr { - self.sys_handle.clone() - } -} - -impl Default for UserTimestampStore { - fn default() -> Self { - Self::new() - } -} - /// Handler for user timestamp embedding/extraction on RTP streams. /// -/// For sender side: Embeds user timestamps as 12-byte trailers on -/// encoded frames before they are sent. +/// For sender side: Stores user timestamps keyed by capture timestamp +/// and embeds them as 12-byte trailers on encoded frames before they +/// are sent. Use `store_user_timestamp()` to associate a user timestamp +/// with a captured frame. /// /// For receiver side: Extracts user timestamps from received frames -/// and makes them available for retrieval. +/// and makes them available for retrieval via `lookup_user_timestamp()`. #[derive(Clone)] pub struct UserTimestampHandler { sys_handle: SharedPtr, @@ -164,6 +90,27 @@ impl UserTimestampHandler { } } + /// Store a user timestamp for a given capture timestamp (sender side). + /// + /// The `capture_timestamp_us` must be the TimestampAligner-adjusted + /// timestamp (as produced by `VideoTrackSource::on_captured_frame`), + /// NOT the original `timestamp_us` from the VideoFrame. The transformer + /// looks up the user timestamp by the frame's `CaptureTime()` which is + /// derived from the aligned value. + /// + /// In normal usage this is called automatically by the C++ layer — + /// callers should set `user_timestamp_us` on the `VideoFrame` and let + /// `capture_frame` / `on_captured_frame` handle the rest. + pub fn store_user_timestamp(&self, capture_timestamp_us: i64, user_timestamp_us: i64) { + log::info!( + target: "user_timestamp", + "store: capture_ts_us={}, user_ts_us={}", + capture_timestamp_us, + user_timestamp_us + ); + self.sys_handle.store_user_timestamp(capture_timestamp_us, user_timestamp_us); + } + pub(crate) fn sys_handle(&self) -> SharedPtr { self.sys_handle.clone() } @@ -171,17 +118,16 @@ impl UserTimestampHandler { /// Create a sender-side user timestamp handler. /// -/// This handler will embed user timestamps from the provided store -/// into encoded frames before they are packetized and sent. +/// This handler will embed user timestamps into encoded frames before +/// they are packetized and sent. Use `store_user_timestamp()` to +/// associate a user timestamp with a captured frame's capture timestamp. pub fn create_sender_handler( peer_factory: &PeerConnectionFactory, - store: &UserTimestampStore, sender: &RtpSender, ) -> UserTimestampHandler { UserTimestampHandler { sys_handle: sys_ut::new_user_timestamp_sender( peer_factory.handle.sys_handle.clone(), - store.sys_handle(), sender.handle.sys_handle.clone(), ), } @@ -195,13 +141,11 @@ pub fn create_sender_handler( /// timestamp for a specific decoded frame. pub fn create_receiver_handler( peer_factory: &PeerConnectionFactory, - store: &UserTimestampStore, receiver: &RtpReceiver, ) -> UserTimestampHandler { UserTimestampHandler { sys_handle: sys_ut::new_user_timestamp_receiver( peer_factory.handle.sys_handle.clone(), - store.sys_handle(), receiver.handle.sys_handle.clone(), ), } diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 437b4699b..b5557e5ee 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -23,7 +23,7 @@ use parking_lot::Mutex; use webrtc_sys::{video_frame as vf_sys, video_frame::ffi::VideoRotation, video_track as vt_sys}; use crate::{ - native::user_timestamp::UserTimestampStore, + native::user_timestamp::UserTimestampHandler, video_frame::{I420Buffer, VideoBuffer, VideoFrame}, video_source::VideoResolution, }; @@ -48,7 +48,6 @@ pub struct NativeVideoSource { struct VideoSourceInner { captured_frames: usize, - user_timestamp_store: Option, } impl NativeVideoSource { @@ -59,7 +58,6 @@ impl NativeVideoSource { )), inner: Arc::new(Mutex::new(VideoSourceInner { captured_frames: 0, - user_timestamp_store: None, })), }; @@ -84,7 +82,11 @@ impl NativeVideoSource { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); builder.pin_mut().set_timestamp_us(now.as_micros() as i64); - source.sys_handle.on_captured_frame(&builder.pin_mut().build()); + source.sys_handle.on_captured_frame( + &builder.pin_mut().build(), + false, + 0, + ); } } }); @@ -97,15 +99,11 @@ impl NativeVideoSource { } pub fn capture_frame>(&self, frame: &VideoFrame) { - let mut inner = self.inner.lock(); - inner.captured_frames += 1; - let mut builder = vf_sys::ffi::new_video_frame_builder(); builder.pin_mut().set_rotation(frame.rotation.into()); builder.pin_mut().set_video_frame_buffer(frame.buffer.as_ref().sys_handle()); let capture_ts = if frame.timestamp_us == 0 { - // If the timestamp is set to 0, default to now let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); now.as_micros() as i64 } else { @@ -113,25 +111,31 @@ impl NativeVideoSource { }; builder.pin_mut().set_timestamp_us(capture_ts); - // If a user timestamp is provided and a store is available, record - // the mapping so the UserTimestampTransformer can embed it into the - // encoded RTP frame. - if let Some(user_ts) = frame.user_timestamp_us { - if let Some(store) = &inner.user_timestamp_store { - store.store(capture_ts, user_ts); - } - } + // Pass the user timestamp to the C++ on_captured_frame so it can + // store the mapping keyed by the TimestampAligner-adjusted capture + // timestamp. This is the only correct key because the aligner runs + // inside on_captured_frame and replaces timestamp_us with a value + // derived from rtc::TimeMicros() (monotonic), which is what + // CaptureTime() returns in TransformSend. + let (has_user_ts, user_ts) = match frame.user_timestamp_us { + Some(ts) => (true, ts), + None => (false, 0), + }; + + self.inner.lock().captured_frames += 1; - self.sys_handle.on_captured_frame(&builder.pin_mut().build()); + self.sys_handle.on_captured_frame(&builder.pin_mut().build(), has_user_ts, user_ts); } - /// Set the user timestamp store used by this source. + /// Set the user timestamp handler used by this source. /// /// When set, any frame captured with a `user_timestamp_us` value will - /// automatically have its timestamp pushed into the store so the + /// automatically have its timestamp stored in the handler so the /// `UserTimestampTransformer` can embed it into the encoded frame. - pub fn set_user_timestamp_store(&self, store: UserTimestampStore) { - self.inner.lock().user_timestamp_store = Some(store); + /// The handler is set on the C++ VideoTrackSource so it has access to + /// the TimestampAligner-adjusted capture timestamp for correct keying. + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.sys_handle.set_user_timestamp_handler(handler.sys_handle()); } pub fn video_resolution(&self) -> VideoResolution { diff --git a/libwebrtc/src/video_source.rs b/libwebrtc/src/video_source.rs index 109385866..510e0cdc6 100644 --- a/libwebrtc/src/video_source.rs +++ b/libwebrtc/src/video_source.rs @@ -50,7 +50,7 @@ pub mod native { use std::fmt::{Debug, Formatter}; use super::*; - use crate::native::user_timestamp::UserTimestampStore; + use crate::native::user_timestamp::UserTimestampHandler; use crate::video_frame::{VideoBuffer, VideoFrame}; #[derive(Clone)] @@ -79,13 +79,14 @@ pub mod native { self.handle.capture_frame(frame) } - /// Set the user timestamp store used by this source. + /// Set the user timestamp handler used by this source. /// /// When set, any frame captured with a `user_timestamp_us` value will - /// automatically have its timestamp pushed into the store so the + /// automatically have its timestamp stored in the handler (keyed by + /// the TimestampAligner-adjusted capture timestamp) so the /// `UserTimestampTransformer` can embed it into the encoded frame. - pub fn set_user_timestamp_store(&self, store: UserTimestampStore) { - self.handle.set_user_timestamp_store(store) + pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + self.handle.set_user_timestamp_handler(handler) } pub fn video_resolution(&self) -> VideoResolution { diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 46b787e00..85f802f1a 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -19,7 +19,7 @@ use libwebrtc::{ frame_cryptor::{ DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, }, - user_timestamp::{self, UserTimestampStore}, + user_timestamp, }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, @@ -107,10 +107,8 @@ impl E2eeManager { // Always set up user timestamp extraction for remote video tracks. if let RemoteTrack::Video(video_track) = &track { - let store = UserTimestampStore::new(); let handler = user_timestamp::create_receiver_handler( LkRuntime::instance().pc_factory(), - &store, &receiver, ); video_track.set_user_timestamp_handler(handler.clone()); @@ -143,21 +141,19 @@ impl E2eeManager { // Always set up user timestamp embedding for local video tracks. if let LocalTrack::Video(video_track) = &track { - let store = UserTimestampStore::new(); - video_track.set_user_timestamp_store(store.clone()); + let handler = user_timestamp::create_sender_handler( + LkRuntime::instance().pc_factory(), + &sender, + ); + video_track.set_user_timestamp_handler(handler.clone()); - // Also set the store on the video source so that capture_frame() - // can automatically push user timestamps into it. + // Also set the handler on the video source so that capture_frame() + // can automatically store user timestamps into it. #[cfg(not(target_arch = "wasm32"))] if let RtcVideoSource::Native(ref native_source) = video_track.rtc_source() { - native_source.set_user_timestamp_store(store.clone()); + native_source.set_user_timestamp_handler(handler.clone()); } - let handler = user_timestamp::create_sender_handler( - LkRuntime::instance().pc_factory(), - &store, - &sender, - ); user_timestamp_handler = Some(handler); } diff --git a/livekit/src/room/track/local_video_track.rs b/livekit/src/room/track/local_video_track.rs index ae258e078..19cdefe64 100644 --- a/livekit/src/room/track/local_video_track.rs +++ b/livekit/src/room/track/local_video_track.rs @@ -14,7 +14,7 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{native::user_timestamp::UserTimestampStore, prelude::*, stats::RtcStats}; +use libwebrtc::{native::user_timestamp::UserTimestampHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; use parking_lot::Mutex; @@ -25,7 +25,7 @@ use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; pub struct LocalVideoTrack { inner: Arc, source: RtcVideoSource, - user_timestamp_store: Arc>>, + user_timestamp_handler: Arc>>, } impl Debug for LocalVideoTrack { @@ -48,7 +48,7 @@ impl LocalVideoTrack { MediaStreamTrack::Video(rtc_track), )), source, - user_timestamp_store: Arc::new(Mutex::new(None)), + user_timestamp_handler: Arc::new(Mutex::new(None)), } } @@ -126,16 +126,16 @@ impl LocalVideoTrack { self.source.clone() } - /// Returns the user timestamp store associated with this track, if any. - /// When present, callers can push per-frame user timestamps into the - /// outgoing queue which will then be embedded into encoded frames. - pub fn user_timestamp_store(&self) -> Option { - self.user_timestamp_store.lock().clone() + /// Returns the user timestamp handler associated with this track, if any. + /// When present on the sender side, callers can store per-frame user + /// timestamps which will be embedded into encoded frames. + pub fn user_timestamp_handler(&self) -> Option { + self.user_timestamp_handler.lock().clone() } - /// Internal: set the user timestamp store used for this track. - pub(crate) fn set_user_timestamp_store(&self, store: UserTimestampStore) { - *self.user_timestamp_store.lock() = Some(store); + /// Internal: set the user timestamp handler used for this track. + pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { + *self.user_timestamp_handler.lock() = Some(handler); } pub async fn get_stats(&self) -> RoomResult> { diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index ee5b1509e..3e660925d 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -28,15 +28,21 @@ #include "absl/types/optional.h" #include "api/frame_transformer_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" -#include "livekit/peer_connection.h" -#include "livekit/peer_connection_factory.h" -#include "livekit/rtp_receiver.h" -#include "livekit/rtp_sender.h" #include "livekit/webrtc.h" #include "rtc_base/synchronization/mutex.h" #include "rust/cxx.h" +// Forward declarations to avoid circular includes +// (video_track.h -> user_timestamp.h -> peer_connection.h -> media_stream.h -> video_track.h) +namespace livekit_ffi { +class PeerConnectionFactory; +class RtpSender; +class RtpReceiver; +} // namespace livekit_ffi + namespace livekit_ffi { // Magic bytes to identify user timestamp trailers: "LKTS" (LiveKit TimeStamp) @@ -44,52 +50,21 @@ constexpr uint8_t kUserTimestampMagic[4] = {'L', 'K', 'T', 'S'}; constexpr size_t kUserTimestampTrailerSize = 12; // 8 bytes timestamp + 4 bytes magic -/// Thread-safe FIFO queue for user timestamps. -/// Used on the sender side to pass user timestamps to the transformer. -/// Works on the assumption that frames are captured and encoded in order. -class UserTimestampStore { - public: - UserTimestampStore() = default; - ~UserTimestampStore() = default; - - /// Push a user timestamp to the queue. - /// Call this when capturing a video frame with a user timestamp. - void store(int64_t capture_timestamp_us, - int64_t user_timestamp_us) const; - - /// Lookup a user timestamp by capture timestamp (for debugging). - /// Returns -1 if not found. - int64_t lookup(int64_t capture_timestamp_us) const; - - /// Pop the oldest entry if the queue has entries. - /// Returns the user timestamp, or -1 if empty. - int64_t pop() const; - - /// Peek at the oldest entry without removing it. - /// Returns the user timestamp, or -1 if empty. - int64_t peek() const; - - /// Clear old entries (older than the given threshold in microseconds). - void prune(int64_t max_age_us) const; - - private: - mutable webrtc::Mutex mutex_; - struct Entry { - int64_t capture_timestamp_us; - int64_t user_timestamp_us; - }; - mutable std::deque entries_; - static constexpr size_t kMaxEntries = 300; // ~10 seconds at 30fps -}; - /// Frame transformer that appends/extracts user timestamp trailers. /// This transformer can be used standalone or in conjunction with e2ee. +/// +/// On the send side, user timestamps are stored in an internal map keyed +/// by capture timestamp (microseconds). When TransformSend fires it +/// looks up the user timestamp via the frame's CaptureTime(). +/// +/// On the receive side, extracted user timestamps are stored in an +/// internal map keyed by RTP timestamp (uint32_t). Decoded frames can +/// look up their user timestamp via lookup_user_timestamp(rtp_ts). class UserTimestampTransformer : public webrtc::FrameTransformerInterface { public: enum class Direction { kSend, kReceive }; - UserTimestampTransformer(Direction direction, - std::shared_ptr store); + explicit UserTimestampTransformer(Direction direction); ~UserTimestampTransformer() override = default; // FrameTransformerInterface implementation @@ -115,6 +90,13 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { /// The entry is removed from the map after lookup. std::optional lookup_user_timestamp(uint32_t rtp_timestamp); + /// Store a user timestamp for a given capture timestamp (sender side). + /// Called from VideoTrackSource::on_captured_frame with the + /// TimestampAligner-adjusted timestamp, which matches CaptureTime() + /// in the encoder pipeline. + void store_user_timestamp(int64_t capture_timestamp_us, + int64_t user_timestamp_us); + private: void TransformSend( std::unique_ptr frame); @@ -133,7 +115,6 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { std::vector& out_data); const Direction direction_; - std::shared_ptr store_; std::atomic enabled_{true}; mutable webrtc::Mutex mutex_; rtc::scoped_refptr callback_; @@ -143,10 +124,13 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable std::atomic last_user_timestamp_{0}; mutable std::atomic has_last_user_timestamp_{false}; - // Send-side: cache the last user timestamp we embedded, so that - // simulcast layers encoding the same frame get the same value. - mutable webrtc::Mutex send_cache_mutex_; - mutable int64_t last_sent_user_timestamp_{0}; + // Send-side map: capture timestamp (us) -> user timestamp (us). + // Populated by store_user_timestamp(), consumed by TransformSend() + // via CaptureTime() lookup. + mutable webrtc::Mutex send_map_mutex_; + mutable std::unordered_map send_map_; + mutable std::deque send_map_order_; + static constexpr size_t kMaxSendMapEntries = 300; // Receive-side map: RTP timestamp -> user timestamp. // Keyed by RTP timestamp so decoded frames can look up their user @@ -163,12 +147,10 @@ class UserTimestampHandler { public: UserTimestampHandler( std::shared_ptr rtc_runtime, - std::shared_ptr store, rtc::scoped_refptr sender); UserTimestampHandler( std::shared_ptr rtc_runtime, - std::shared_ptr store, rtc::scoped_refptr receiver); ~UserTimestampHandler() = default; @@ -188,6 +170,11 @@ class UserTimestampHandler { /// Check if a user timestamp has been received bool has_user_timestamp() const; + /// Store a user timestamp for a given capture timestamp (sender side). + /// Call this when capturing a video frame with a user timestamp. + void store_user_timestamp(int64_t capture_timestamp_us, + int64_t user_timestamp_us) const; + /// Access the underlying transformer for chaining. rtc::scoped_refptr transformer() const; @@ -199,16 +186,13 @@ class UserTimestampHandler { }; // Factory functions for Rust FFI -std::shared_ptr new_user_timestamp_store(); std::shared_ptr new_user_timestamp_sender( std::shared_ptr peer_factory, - std::shared_ptr store, std::shared_ptr sender); std::shared_ptr new_user_timestamp_receiver( std::shared_ptr peer_factory, - std::shared_ptr store, std::shared_ptr receiver); } // namespace livekit_ffi diff --git a/webrtc-sys/include/livekit/video_track.h b/webrtc-sys/include/livekit/video_track.h index 222458c75..1d9ce5e3d 100644 --- a/webrtc-sys/include/livekit/video_track.h +++ b/webrtc-sys/include/livekit/video_track.h @@ -33,6 +33,7 @@ namespace livekit_ffi { class VideoTrack; class NativeVideoSink; class VideoTrackSource; +class UserTimestampHandler; // forward declaration to avoid circular include } // namespace livekit_ffi #include "webrtc-sys/src/video_track.rs.h" @@ -98,12 +99,18 @@ class VideoTrackSource { SourceState state() const override; bool remote() const override; VideoResolution video_resolution() const; - bool on_captured_frame(const webrtc::VideoFrame& frame); + bool on_captured_frame(const webrtc::VideoFrame& frame, + bool has_user_timestamp, + int64_t user_timestamp_us); + + void set_user_timestamp_handler( + std::shared_ptr handler); private: mutable webrtc::Mutex mutex_; webrtc::TimestampAligner timestamp_aligner_; VideoResolution resolution_; + std::shared_ptr user_timestamp_handler_; }; public: @@ -111,9 +118,14 @@ class VideoTrackSource { VideoResolution video_resolution() const; - bool on_captured_frame(const std::unique_ptr& frame) + bool on_captured_frame(const std::unique_ptr& frame, + bool has_user_timestamp, + int64_t user_timestamp_us) const; // frames pushed from Rust (+interior mutability) + void set_user_timestamp_handler( + std::shared_ptr handler) const; + webrtc::scoped_refptr get() const; private: diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index 6dfb92e09..cc60f3636 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -23,89 +23,17 @@ #include "api/make_ref_counted.h" #include "livekit/peer_connection_factory.h" +#include "livekit/rtp_receiver.h" +#include "livekit/rtp_sender.h" #include "rtc_base/logging.h" #include "webrtc-sys/src/user_timestamp.rs.h" namespace livekit_ffi { -// UserTimestampStore implementation - -void UserTimestampStore::store(int64_t capture_timestamp_us, - int64_t user_timestamp_us) const { - webrtc::MutexLock lock(&mutex_); - - // Remove old entries if we're at capacity - while (entries_.size() >= kMaxEntries) { - entries_.pop_front(); - } - - entries_.push_back({capture_timestamp_us, user_timestamp_us}); - RTC_LOG(LS_INFO) << "UserTimestampStore::store capture_ts_us=" - << capture_timestamp_us - << " user_ts_us=" << user_timestamp_us - << " size=" << entries_.size(); -} - -int64_t UserTimestampStore::lookup(int64_t capture_timestamp_us) const { - webrtc::MutexLock lock(&mutex_); - - // Search from the end (most recent) for better performance - for (auto it = entries_.rbegin(); it != entries_.rend(); ++it) { - if (it->capture_timestamp_us == capture_timestamp_us) { - return it->user_timestamp_us; - } - } - - return -1; -} - -int64_t UserTimestampStore::pop() const { - webrtc::MutexLock lock(&mutex_); - - if (entries_.empty()) { - RTC_LOG(LS_INFO) << "UserTimestampStore::pop empty"; - return -1; - } - - int64_t user_ts = entries_.front().user_timestamp_us; - entries_.pop_front(); - RTC_LOG(LS_INFO) << "UserTimestampStore::pop user_ts_us=" << user_ts - << " remaining=" << entries_.size(); - return user_ts; -} - -int64_t UserTimestampStore::peek() const { - webrtc::MutexLock lock(&mutex_); - - if (entries_.empty()) { - return -1; - } - - return entries_.front().user_timestamp_us; -} - -void UserTimestampStore::prune(int64_t max_age_us) const { - webrtc::MutexLock lock(&mutex_); - - if (entries_.empty()) { - return; - } - - int64_t newest_timestamp = entries_.back().capture_timestamp_us; - int64_t threshold = newest_timestamp - max_age_us; - - while (!entries_.empty() && - entries_.front().capture_timestamp_us < threshold) { - entries_.pop_front(); - } -} - // UserTimestampTransformer implementation -UserTimestampTransformer::UserTimestampTransformer( - Direction direction, - std::shared_ptr store) - : direction_(direction), store_(store) { +UserTimestampTransformer::UserTimestampTransformer(Direction direction) + : direction_(direction) { RTC_LOG(LS_INFO) << "UserTimestampTransformer created direction=" << (direction_ == Direction::kSend ? "send" : "recv"); } @@ -155,41 +83,32 @@ void UserTimestampTransformer::Transform( void UserTimestampTransformer::TransformSend( std::unique_ptr frame) { - // Get the RTP timestamp from the frame for logging uint32_t rtp_timestamp = frame->GetTimestamp(); uint32_t ssrc = frame->GetSsrc(); auto data = frame->GetData(); - // Drain all queued user timestamps and use the most recent one. - // The encoder may skip captured frames (rate control, CPU), so the - // store can accumulate faster than TransformSend is called. Draining - // ensures we always embed the timestamp closest to the frame actually - // being encoded. With simulcast, multiple layers encode the same - // captured frame — subsequent layers will find the queue empty and - // fall back to the cached value. + // Look up the user timestamp by the frame's capture time. + // CaptureTime() returns Timestamp::Millis(capture_time_ms_) where + // capture_time_ms_ = timestamp_us / 1000. So capture_time->us() + // has millisecond precision (bottom 3 digits always zero). + // store_user_timestamp() truncates its key the same way. int64_t ts_to_embed = 0; - - if (store_) { - int64_t newest_ts = -1; - // Drain: pop all available entries, keep the last one - for (;;) { - int64_t popped_ts = store_->pop(); - if (popped_ts < 0) break; - newest_ts = popped_ts; - } - - if (newest_ts >= 0) { - ts_to_embed = newest_ts; - // Cache for simulcast layers that encode the same frame - webrtc::MutexLock lock(&send_cache_mutex_); - last_sent_user_timestamp_ = newest_ts; - } else { - // Queue was empty — use cached value (simulcast or encoder - // encoding the same frame as a previous layer) - webrtc::MutexLock lock(&send_cache_mutex_); - ts_to_embed = last_sent_user_timestamp_; + auto capture_time = frame->CaptureTime(); + if (capture_time.has_value()) { + int64_t capture_us = capture_time->us(); + + webrtc::MutexLock lock(&send_map_mutex_); + auto it = send_map_.find(capture_us); + if (it != send_map_.end()) { + ts_to_embed = it->second; + // Don't erase — simulcast layers share the same capture time. + // Entries are pruned by capacity in store_user_timestamp(). } + } else { + RTC_LOG(LS_WARNING) + << "UserTimestampTransformer::TransformSend CaptureTime() not available" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } // Always append trailer when enabled (even if timestamp is 0, @@ -204,6 +123,8 @@ void UserTimestampTransformer::TransformSend( << " ts_us=" << ts_to_embed << " rtp_ts=" << rtp_timestamp << " ssrc=" << ssrc + << " capture_us=" + << (capture_time.has_value() ? capture_time->us() : -1) << " orig_size=" << data.size() << " new_size=" << new_data.size(); } @@ -439,25 +360,57 @@ std::optional UserTimestampTransformer::lookup_user_timestamp( return ts; } +void UserTimestampTransformer::store_user_timestamp( + int64_t capture_timestamp_us, + int64_t user_timestamp_us) { + // Truncate to millisecond precision to match what WebRTC stores + // internally. The encoder pipeline converts the VideoFrame's + // timestamp_us to capture_time_ms_ = timestamp_us / 1000, and + // CaptureTime() returns Timestamp::Millis(capture_time_ms_). + // When we call capture_time->us() in TransformSend we get a value + // with the bottom 3 digits zeroed, so we must store with the same + // truncation to ensure the lookup succeeds. + // + // The caller (VideoTrackSource::on_captured_frame) passes the + // TimestampAligner-adjusted timestamp here, which is the same + // value that becomes CaptureTime() in the encoder pipeline. + int64_t key = (capture_timestamp_us / 1000) * 1000; + + webrtc::MutexLock lock(&send_map_mutex_); + + // Evict oldest entries if at capacity + while (send_map_.size() >= kMaxSendMapEntries && !send_map_order_.empty()) { + send_map_.erase(send_map_order_.front()); + send_map_order_.pop_front(); + } + + send_map_[key] = user_timestamp_us; + send_map_order_.push_back(key); + + RTC_LOG(LS_INFO) << "UserTimestampTransformer::store_user_timestamp" + << " capture_ts_us=" << capture_timestamp_us + << " key_us=" << key + << " user_ts_us=" << user_timestamp_us + << " size=" << send_map_.size(); +} + // UserTimestampHandler implementation UserTimestampHandler::UserTimestampHandler( std::shared_ptr rtc_runtime, - std::shared_ptr store, rtc::scoped_refptr sender) : rtc_runtime_(rtc_runtime), sender_(sender) { transformer_ = rtc::make_ref_counted( - UserTimestampTransformer::Direction::kSend, store); + UserTimestampTransformer::Direction::kSend); sender->SetEncoderToPacketizerFrameTransformer(transformer_); } UserTimestampHandler::UserTimestampHandler( std::shared_ptr rtc_runtime, - std::shared_ptr store, rtc::scoped_refptr receiver) : rtc_runtime_(rtc_runtime), receiver_(receiver) { transformer_ = rtc::make_ref_counted( - UserTimestampTransformer::Direction::kReceive, store); + UserTimestampTransformer::Direction::kReceive); receiver->SetDepacketizerToDecoderFrameTransformer(transformer_); } @@ -483,30 +436,30 @@ bool UserTimestampHandler::has_user_timestamp() const { return transformer_->last_user_timestamp().has_value(); } +void UserTimestampHandler::store_user_timestamp( + int64_t capture_timestamp_us, + int64_t user_timestamp_us) const { + transformer_->store_user_timestamp(capture_timestamp_us, user_timestamp_us); +} + rtc::scoped_refptr UserTimestampHandler::transformer() const { return transformer_; } // Factory functions -std::shared_ptr new_user_timestamp_store() { - return std::make_shared(); -} - std::shared_ptr new_user_timestamp_sender( std::shared_ptr peer_factory, - std::shared_ptr store, std::shared_ptr sender) { return std::make_shared( - peer_factory->rtc_runtime(), store, sender->rtc_sender()); + peer_factory->rtc_runtime(), sender->rtc_sender()); } std::shared_ptr new_user_timestamp_receiver( std::shared_ptr peer_factory, - std::shared_ptr store, std::shared_ptr receiver) { return std::make_shared( - peer_factory->rtc_runtime(), store, receiver->rtc_receiver()); + peer_factory->rtc_runtime(), receiver->rtc_receiver()); } } // namespace livekit_ffi diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs index 1ab774f93..6ee0afe43 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/user_timestamp.rs @@ -26,34 +26,6 @@ pub mod ffi { type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; - /// Thread-safe store for mapping capture timestamps to user timestamps. - pub type UserTimestampStore; - - /// Push a user timestamp to the queue. - fn store(self: &UserTimestampStore, capture_timestamp_us: i64, user_timestamp_us: i64); - - /// Lookup a user timestamp by capture timestamp (for debugging). - /// Returns -1 if not found. - fn lookup(self: &UserTimestampStore, capture_timestamp_us: i64) -> i64; - - /// Pop the oldest user timestamp from the queue. - /// Returns -1 if empty. - fn pop(self: &UserTimestampStore) -> i64; - - /// Peek at the oldest user timestamp without removing it. - /// Returns -1 if empty. - fn peek(self: &UserTimestampStore) -> i64; - - /// Clear old entries. - fn prune(self: &UserTimestampStore, max_age_us: i64); - - /// Create a new user timestamp store. - fn new_user_timestamp_store() -> SharedPtr; - } - - unsafe extern "C++" { - include!("livekit/user_timestamp.h"); - /// Handler for user timestamp embedding/extraction on RTP streams. pub type UserTimestampHandler; @@ -74,21 +46,26 @@ pub mod ffi { /// Check if a user timestamp has been received. fn has_user_timestamp(self: &UserTimestampHandler) -> bool; + /// Store a user timestamp for a given capture timestamp (sender side). + /// Call this when capturing a video frame with a user timestamp. + fn store_user_timestamp( + self: &UserTimestampHandler, + capture_timestamp_us: i64, + user_timestamp_us: i64, + ); + /// Create a new user timestamp handler for a sender. fn new_user_timestamp_sender( peer_factory: SharedPtr, - store: SharedPtr, sender: SharedPtr, ) -> SharedPtr; /// Create a new user timestamp handler for a receiver. fn new_user_timestamp_receiver( peer_factory: SharedPtr, - store: SharedPtr, receiver: SharedPtr, ) -> SharedPtr; } } -impl_thread_safety!(ffi::UserTimestampStore, Send + Sync); impl_thread_safety!(ffi::UserTimestampHandler, Send + Sync); diff --git a/webrtc-sys/src/video_track.cpp b/webrtc-sys/src/video_track.cpp index b9a9661d9..f03f9da70 100644 --- a/webrtc-sys/src/video_track.cpp +++ b/webrtc-sys/src/video_track.cpp @@ -26,6 +26,7 @@ #include "audio/remix_resample.h" #include "common_audio/include/audio_util.h" #include "livekit/media_stream.h" +#include "livekit/user_timestamp.h" #include "livekit/video_track.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" @@ -133,12 +134,23 @@ VideoResolution VideoTrackSource::InternalSource::video_resolution() const { } bool VideoTrackSource::InternalSource::on_captured_frame( - const webrtc::VideoFrame& frame) { + const webrtc::VideoFrame& frame, + bool has_user_timestamp, + int64_t user_timestamp_us) { webrtc::MutexLock lock(&mutex_); int64_t aligned_timestamp_us = timestamp_aligner_.TranslateTimestamp( frame.timestamp_us(), webrtc::TimeMicros()); + // If a user timestamp was provided on this frame and we have a handler, + // store the mapping keyed by the aligned timestamp. This is the value + // that CaptureTime() will return in TransformSend, so the lookup will + // succeed. + if (has_user_timestamp && user_timestamp_handler_) { + user_timestamp_handler_->store_user_timestamp( + aligned_timestamp_us, user_timestamp_us); + } + webrtc::scoped_refptr buffer = frame.video_frame_buffer(); @@ -175,6 +187,12 @@ bool VideoTrackSource::InternalSource::on_captured_frame( return true; } +void VideoTrackSource::InternalSource::set_user_timestamp_handler( + std::shared_ptr handler) { + webrtc::MutexLock lock(&mutex_); + user_timestamp_handler_ = std::move(handler); +} + VideoTrackSource::VideoTrackSource(const VideoResolution& resolution) { source_ = webrtc::make_ref_counted(resolution); } @@ -184,9 +202,17 @@ VideoResolution VideoTrackSource::video_resolution() const { } bool VideoTrackSource::on_captured_frame( - const std::unique_ptr& frame) const { + const std::unique_ptr& frame, + bool has_user_timestamp, + int64_t user_timestamp_us) const { auto rtc_frame = frame->get(); - return source_->on_captured_frame(rtc_frame); + return source_->on_captured_frame(rtc_frame, has_user_timestamp, + user_timestamp_us); +} + +void VideoTrackSource::set_user_timestamp_handler( + std::shared_ptr handler) const { + source_->set_user_timestamp_handler(std::move(handler)); } webrtc::scoped_refptr VideoTrackSource::get() diff --git a/webrtc-sys/src/video_track.rs b/webrtc-sys/src/video_track.rs index 2e2681d8b..93a36f367 100644 --- a/webrtc-sys/src/video_track.rs +++ b/webrtc-sys/src/video_track.rs @@ -50,9 +50,15 @@ pub mod ffi { type MediaStreamTrack = crate::media_stream_track::ffi::MediaStreamTrack; } - unsafe extern "C++" { + extern "C++" { + include!("livekit/user_timestamp.h"); include!("livekit/video_track.h"); + type UserTimestampHandler = crate::user_timestamp::ffi::UserTimestampHandler; + } + + unsafe extern "C++" { + type VideoTrack; type NativeVideoSink; type VideoTrackSource; @@ -66,7 +72,16 @@ pub mod ffi { fn new_native_video_sink(observer: Box) -> SharedPtr; fn video_resolution(self: &VideoTrackSource) -> VideoResolution; - fn on_captured_frame(self: &VideoTrackSource, frame: &UniquePtr) -> bool; + fn on_captured_frame( + self: &VideoTrackSource, + frame: &UniquePtr, + has_user_timestamp: bool, + user_timestamp_us: i64, + ) -> bool; + fn set_user_timestamp_handler( + self: &VideoTrackSource, + handler: SharedPtr, + ); fn new_video_track_source(resolution: &VideoResolution) -> SharedPtr; fn video_to_media(track: SharedPtr) -> SharedPtr; unsafe fn media_to_video(track: SharedPtr) -> SharedPtr; From 32b72783d2fd1dd5c2a6881f6667e3bc1149584e Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 14:35:36 -0800 Subject: [PATCH 11/52] use chrono crate instead of custom format func --- examples/local_video/Cargo.toml | 1 + examples/local_video/src/subscriber.rs | 34 +++----------------------- 2 files changed, 5 insertions(+), 30 deletions(-) diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index ef97cd376..520f28b14 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -41,6 +41,7 @@ wgpu = "25.0" winit = { version = "0.30.11", features = ["android-native-activity"] } parking_lot = { workspace = true, features = ["deadlock_detection"] } anyhow = { workspace = true } +chrono = "0.4" bytemuck = { version = "1.16", features = ["derive"] } nokhwa = { version = "0.10", default-features = false, features = ["output-threaded"] } diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 5c6886234..14c3e9f75 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -1,4 +1,5 @@ use anyhow::Result; +use chrono::{DateTime, Utc}; use clap::Parser; use eframe::egui; use eframe::wgpu::{self, util::DeviceExt}; @@ -136,36 +137,9 @@ fn current_timestamp_us() -> i64 { /// Format a user timestamp (microseconds since Unix epoch) as /// `yyyy-mm-dd hh:mm:ss.ssss`. fn format_timestamp_us(ts_us: i64) -> String { - // Convert to calendar components without chrono — pure arithmetic. - let secs = (ts_us / 1_000_000) as u64; - let sub_sec_us = (ts_us % 1_000_000) as u32; - - // Days / time-of-day decomposition - let days = (secs / 86400) as i64; - let day_secs = (secs % 86400) as u32; - let hour = day_secs / 3600; - let minute = (day_secs % 3600) / 60; - let second = day_secs % 60; - let frac = sub_sec_us / 100; // 4-digit tenths of microseconds → 0..9999 - - // Convert days since epoch to y/m/d (civil calendar, proleptic Gregorian). - // Algorithm from Howard Hinnant (http://howardhinnant.github.io/date_algorithms.html) - let z = days + 719468; - let era = (if z >= 0 { z } else { z - 146096 }) / 146097; - let doe = (z - era * 146097) as u32; // day of era [0, 146096] - let yoe = - (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365; // year of era [0, 399] - let y = yoe as i64 + era * 400; - let doy = doe - (365 * yoe + yoe / 4 - yoe / 100); // day of year [0, 365] - let mp = (5 * doy + 2) / 153; // [0, 11] - let day = doy - (153 * mp + 2) / 5 + 1; // [1, 31] - let month = if mp < 10 { mp + 3 } else { mp - 9 }; // [1, 12] - let year = if month <= 2 { y + 1 } else { y }; - - format!( - "{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:04}", - year, month, day, hour, minute, second, frac - ) + DateTime::::from_timestamp_micros(ts_us) + .map(|dt| dt.format("%Y-%m-%d %H:%M:%S%.4f").to_string()) + .unwrap_or_else(|| format!("")) } fn simulcast_state_full_dims(state: &Arc>) -> Option<(u32, u32)> { From 63c68aa290566a2fd14f2bcd1af5d664cceabcdd Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 14:36:47 -0800 Subject: [PATCH 12/52] remove comment --- examples/local_video/src/subscriber.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 14c3e9f75..196dcec3e 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -266,8 +266,6 @@ async fn handle_track_subscribed( } let simulcast2 = simulcast.clone(); std::thread::spawn(move || { - // The user timestamp handler is automatically wired from the RtcVideoTrack, - // so frame.user_timestamp_us is populated without manual setup. let mut sink = NativeVideoStream::new(video_track.rtc_track()); let mut frames: u64 = 0; let mut last_log = Instant::now(); From a60654feee4c1a2e646977abedde2b39973cbaf0 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 15:59:50 -0800 Subject: [PATCH 13/52] change update to 2hz --- examples/local_video/src/subscriber.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 196dcec3e..ecef8e0f2 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -440,7 +440,7 @@ struct VideoApp { ctrl_c_received: Arc, locked_aspect: Option, display_timestamp: bool, - /// Cached latency string, updated at ~5 Hz so it's readable. + /// Cached latency string, updated at ~2 Hz so it's readable. latency_display: String, /// Last time the latency display was refreshed. latency_last_update: Instant, @@ -551,8 +551,8 @@ impl eframe::App for VideoApp { .show(ctx, |ui| { let now_us = current_timestamp_us(); - // Update the cached latency display at ~5 Hz so it's readable. - if self.latency_last_update.elapsed() >= Duration::from_millis(200) { + // Update the cached latency display at ~2 Hz so it's readable. + if self.latency_last_update.elapsed() >= Duration::from_millis(500) { let delta_ms = (now_us - user_ts) as f64 / 1000.0; self.latency_display = format!("{:.1}ms", delta_ms); self.latency_last_update = Instant::now(); From 867e295e70ca64b214cb15090c34f9c5f907dc4f Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 17:00:41 -0800 Subject: [PATCH 14/52] remove last_user_timestamp --- Cargo.lock | 3 ++- examples/local_video/Cargo.toml | 2 +- examples/local_video/src/subscriber.rs | 4 ++-- libwebrtc/src/native/user_timestamp.rs | 15 -------------- livekit/src/room/track/remote_video_track.rs | 9 --------- webrtc-sys/include/livekit/user_timestamp.h | 13 ------------ webrtc-sys/src/user_timestamp.cpp | 21 -------------------- webrtc-sys/src/user_timestamp.rs | 7 ------- 8 files changed, 5 insertions(+), 69 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08f45ee73..102b671e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3497,10 +3497,11 @@ dependencies = [ [[package]] name = "local_video" -version = "0.1.0" +version = "0.2.0" dependencies = [ "anyhow", "bytemuck", + "chrono", "clap", "eframe", "egui", diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 520f28b14..432a9ad1f 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "local_video" -version = "0.1.0" +version = "0.2.0" edition.workspace = true publish = false diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index ecef8e0f2..cc4781b06 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -135,10 +135,10 @@ fn current_timestamp_us() -> i64 { } /// Format a user timestamp (microseconds since Unix epoch) as -/// `yyyy-mm-dd hh:mm:ss.ssss`. +/// `yyyy-mm-dd hh:mm:ss:xxx` where xxx is milliseconds. fn format_timestamp_us(ts_us: i64) -> String { DateTime::::from_timestamp_micros(ts_us) - .map(|dt| dt.format("%Y-%m-%d %H:%M:%S%.4f").to_string()) + .map(|dt| dt.format("%Y-%m-%d %H:%M:%S:").to_string() + &format!("{:03}", dt.timestamp_subsec_millis())) .unwrap_or_else(|| format!("")) } diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index 4e99c6b99..e22b64694 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -60,21 +60,6 @@ impl UserTimestampHandler { self.sys_handle.enabled() } - /// Get the last received user timestamp (receiver side only). - /// Returns None if no timestamp has been received yet. - pub fn last_user_timestamp(&self) -> Option { - if self.sys_handle.has_user_timestamp() { - let ts = self.sys_handle.last_user_timestamp(); - if ts >= 0 { - Some(ts) - } else { - None - } - } else { - None - } - } - /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns None if no timestamp was found for this RTP timestamp. /// The entry is removed from the map after a successful lookup. diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index a356ca496..c282d428e 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -94,15 +94,6 @@ impl RemoteVideoTrack { true } - /// Returns the last parsed user timestamp (in microseconds) for this - /// remote video track, if the user timestamp transformer is enabled and - /// a timestamp has been received. - pub fn last_user_timestamp(&self) -> Option { - self.rtc_track() - .user_timestamp_handler() - .and_then(|h| h.last_user_timestamp()) - } - /// Returns a clone of the user timestamp handler, if one has been set. pub fn user_timestamp_handler(&self) -> Option { self.rtc_track().user_timestamp_handler() diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index 3e660925d..8f42f68b4 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -82,9 +82,6 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { void set_enabled(bool enabled); bool enabled() const; - /// Get the last received user timestamp (receiver side only) - std::optional last_user_timestamp() const; - /// Lookup the user timestamp associated with a given RTP timestamp. /// Returns the user timestamp if found, nullopt otherwise. /// The entry is removed from the map after lookup. @@ -121,9 +118,6 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { std::unordered_map> sink_callbacks_; - mutable std::atomic last_user_timestamp_{0}; - mutable std::atomic has_last_user_timestamp_{false}; - // Send-side map: capture timestamp (us) -> user timestamp (us). // Populated by store_user_timestamp(), consumed by TransformSend() // via CaptureTime() lookup. @@ -159,17 +153,10 @@ class UserTimestampHandler { void set_enabled(bool enabled) const; bool enabled() const; - /// Get the last received user timestamp (receiver side only) - /// Returns -1 if no timestamp has been received yet - int64_t last_user_timestamp() const; - /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. int64_t lookup_user_timestamp(uint32_t rtp_timestamp) const; - /// Check if a user timestamp has been received - bool has_user_timestamp() const; - /// Store a user timestamp for a given capture timestamp (sender side). /// Call this when capturing a video frame with a user timestamp. void store_user_timestamp(int64_t capture_timestamp_us, diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index cc60f3636..aacf84e91 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -170,10 +170,6 @@ void UserTimestampTransformer::TransformReceive( double recv_latency_ms = static_cast(now_us - user_ts.value()) / 1000.0; - // Store the extracted timestamp for later retrieval (legacy atomic) - last_user_timestamp_.store(user_ts.value()); - has_last_user_timestamp_.store(true); - // Store in the receive map keyed by RTP timestamp so decoded frames // can look up their user timestamp regardless of frame drops. { @@ -333,14 +329,6 @@ bool UserTimestampTransformer::enabled() const { return enabled_.load(); } -std::optional UserTimestampTransformer::last_user_timestamp() - const { - if (!has_last_user_timestamp_.load()) { - return std::nullopt; - } - return last_user_timestamp_.load(); -} - std::optional UserTimestampTransformer::lookup_user_timestamp( uint32_t rtp_timestamp) { webrtc::MutexLock lock(&recv_map_mutex_); @@ -422,20 +410,11 @@ bool UserTimestampHandler::enabled() const { return transformer_->enabled(); } -int64_t UserTimestampHandler::last_user_timestamp() const { - auto ts = transformer_->last_user_timestamp(); - return ts.value_or(-1); -} - int64_t UserTimestampHandler::lookup_user_timestamp(uint32_t rtp_timestamp) const { auto ts = transformer_->lookup_user_timestamp(rtp_timestamp); return ts.value_or(-1); } -bool UserTimestampHandler::has_user_timestamp() const { - return transformer_->last_user_timestamp().has_value(); -} - void UserTimestampHandler::store_user_timestamp( int64_t capture_timestamp_us, int64_t user_timestamp_us) const { diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs index 6ee0afe43..996c9b106 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/user_timestamp.rs @@ -35,17 +35,10 @@ pub mod ffi { /// Check if timestamp embedding is enabled. fn enabled(self: &UserTimestampHandler) -> bool; - /// Get the last received user timestamp (receiver side only). - /// Returns -1 if no timestamp has been received yet. - fn last_user_timestamp(self: &UserTimestampHandler) -> i64; - /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. The entry is removed after lookup. fn lookup_user_timestamp(self: &UserTimestampHandler, rtp_timestamp: u32) -> i64; - /// Check if a user timestamp has been received. - fn has_user_timestamp(self: &UserTimestampHandler) -> bool; - /// Store a user timestamp for a given capture timestamp (sender side). /// Call this when capturing a video frame with a user timestamp. fn store_user_timestamp( From 5a1ba5a41349b381dd9255b72b00f6346d40074f Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 17:05:56 -0800 Subject: [PATCH 15/52] update readme --- examples/local_video/README.md | 46 +++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/examples/local_video/README.md b/examples/local_video/README.md index e6b0b04b5..19984992e 100644 --- a/examples/local_video/README.md +++ b/examples/local_video/README.md @@ -30,6 +30,20 @@ Publisher usage: --url https://your.livekit.server \ --api-key YOUR_KEY \ --api-secret YOUR_SECRET + + # publish with a user timestamp attached to every frame + cargo run -p local_video -F desktop --bin publisher -- \ + --camera-index 0 \ + --room-name demo \ + --identity cam-1 \ + --attach-timestamp + + # publish with end-to-end encryption + cargo run -p local_video -F desktop --bin publisher -- \ + --camera-index 0 \ + --room-name demo \ + --identity cam-1 \ + --e2ee-key my-secret-key ``` List devices usage: @@ -41,6 +55,8 @@ Publisher flags (in addition to the common connection flags above): - `--h265`: Use H.265/HEVC encoding if supported (falls back to H.264 on failure). - `--simulcast`: Publish simulcast video (multiple layers when the resolution is large enough). - `--max-bitrate `: Max video bitrate for the main (highest) layer in bits per second (e.g. `1500000`). +- `--attach-timestamp`: Attach the current wall-clock time (microseconds since UNIX epoch) as the user timestamp on each published frame. The subscriber can display this to measure end-to-end latency. +- `--e2ee-key `: Enable end-to-end encryption with the given shared key. The subscriber must use the same key to decrypt. Subscriber usage: ``` @@ -55,13 +71,31 @@ Subscriber usage: --api-key YOUR_KEY \ --api-secret YOUR_SECRET - # subscribe to a specific participant's video only - cargo run -p local_video -F desktop --bin subscriber -- \ - --room-name demo \ - --identity viewer-1 \ - --participant alice + # subscribe to a specific participant's video only + cargo run -p local_video -F desktop --bin subscriber -- \ + --room-name demo \ + --identity viewer-1 \ + --participant alice + + # display timestamp overlay (requires publisher to use --attach-timestamp) + cargo run -p local_video -F desktop --bin subscriber -- \ + --room-name demo \ + --identity viewer-1 \ + --display-timestamp + + # subscribe with end-to-end encryption (must match publisher's key) + cargo run -p local_video -F desktop --bin subscriber -- \ + --room-name demo \ + --identity viewer-1 \ + --e2ee-key my-secret-key ``` +Subscriber flags (in addition to the common connection flags above): +- `--participant `: Only subscribe to video tracks from the specified participant. +- `--display-timestamp`: Show a top-left overlay with the publisher's timestamp, the subscriber's current time, and the computed end-to-end latency. Requires the publisher to use `--attach-timestamp`. +- `--e2ee-key `: Enable end-to-end decryption with the given shared key. Must match the key used by the publisher. + Notes: -- `--participant` limits subscription to video tracks from the specified participant identity. - If the active video track is unsubscribed or unpublished, the app clears its state and will automatically attach to the next matching video track when it appears. +- For E2EE to work, both publisher and subscriber must specify the same `--e2ee-key` value. If the keys don't match, the subscriber will not be able to decode the video. +- The timestamp overlay updates at ~2 Hz so the latency value is readable rather than flickering every frame. From dde7de600544f6ea261656534c483e9892a8588d Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 17 Feb 2026 17:19:02 -0800 Subject: [PATCH 16/52] cargo fmt --- examples/local_video/src/publisher.rs | 6 ++---- examples/local_video/src/subscriber.rs | 16 +++++++--------- libwebrtc/src/lib.rs | 4 +++- libwebrtc/src/native/peer_connection_factory.rs | 3 +-- libwebrtc/src/native/user_timestamp.rs | 3 +-- libwebrtc/src/native/video_source.rs | 6 +----- libwebrtc/src/native/video_track.rs | 5 +---- livekit/src/room/e2ee/manager.rs | 6 ++---- webrtc-sys/src/lib.rs | 2 +- 9 files changed, 19 insertions(+), 32 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 8b8479bf8..0b04fd41e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -153,10 +153,8 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { KeyProviderOptions::default(), e2ee_key.as_bytes().to_vec(), ); - room_options.encryption = Some(E2eeOptions { - encryption_type: EncryptionType::Gcm, - key_provider, - }); + room_options.encryption = + Some(E2eeOptions { encryption_type: EncryptionType::Gcm, key_provider }); info!("E2EE enabled with AES-GCM encryption"); } diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index cc4781b06..1781b9f8e 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -128,17 +128,17 @@ fn infer_quality_from_dims( /// Returns the current wall-clock time as microseconds since Unix epoch. fn current_timestamp_us() -> i64 { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap_or_default() - .as_micros() as i64 + SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default().as_micros() as i64 } /// Format a user timestamp (microseconds since Unix epoch) as /// `yyyy-mm-dd hh:mm:ss:xxx` where xxx is milliseconds. fn format_timestamp_us(ts_us: i64) -> String { DateTime::::from_timestamp_micros(ts_us) - .map(|dt| dt.format("%Y-%m-%d %H:%M:%S:").to_string() + &format!("{:03}", dt.timestamp_subsec_millis())) + .map(|dt| { + dt.format("%Y-%m-%d %H:%M:%S:").to_string() + + &format!("{:03}", dt.timestamp_subsec_millis()) + }) .unwrap_or_else(|| format!("")) } @@ -668,10 +668,8 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { KeyProviderOptions::default(), e2ee_key.as_bytes().to_vec(), ); - room_options.encryption = Some(E2eeOptions { - encryption_type: EncryptionType::Gcm, - key_provider, - }); + room_options.encryption = + Some(E2eeOptions { encryption_type: EncryptionType::Gcm, key_provider }); info!("E2EE enabled with AES-GCM encryption"); } diff --git a/libwebrtc/src/lib.rs b/libwebrtc/src/lib.rs index 77e125fc7..c7a016793 100644 --- a/libwebrtc/src/lib.rs +++ b/libwebrtc/src/lib.rs @@ -68,7 +68,9 @@ pub mod video_track; pub mod native { pub use webrtc_sys::webrtc::ffi::create_random_uuid; - pub use crate::imp::{apm, audio_mixer, audio_resampler, frame_cryptor, user_timestamp, yuv_helper}; + pub use crate::imp::{ + apm, audio_mixer, audio_resampler, frame_cryptor, user_timestamp, yuv_helper, + }; } #[cfg(target_os = "android")] diff --git a/libwebrtc/src/native/peer_connection_factory.rs b/libwebrtc/src/native/peer_connection_factory.rs index 4a3605b3b..8980074f0 100644 --- a/libwebrtc/src/native/peer_connection_factory.rs +++ b/libwebrtc/src/native/peer_connection_factory.rs @@ -83,8 +83,7 @@ impl PeerConnectionFactory { pub fn create_video_track(&self, label: &str, source: NativeVideoSource) -> RtcVideoTrack { RtcVideoTrack { handle: imp_vt::RtcVideoTrack::new( - self.sys_handle - .create_video_track(label.to_string(), source.handle.sys_handle()), + self.sys_handle.create_video_track(label.to_string(), source.handle.sys_handle()), ), } } diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index e22b64694..f5b37d14b 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -30,8 +30,7 @@ use cxx::SharedPtr; use webrtc_sys::user_timestamp::ffi as sys_ut; use crate::{ - peer_connection_factory::PeerConnectionFactory, - rtp_receiver::RtpReceiver, + peer_connection_factory::PeerConnectionFactory, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, }; diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 2a338a43b..5626eb8ac 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -81,11 +81,7 @@ impl NativeVideoSource { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); builder.pin_mut().set_timestamp_us(now.as_micros() as i64); - source.sys_handle.on_captured_frame( - &builder.pin_mut().build(), - false, - 0, - ); + source.sys_handle.on_captured_frame(&builder.pin_mut().build(), false, 0); } } }); diff --git a/libwebrtc/src/native/video_track.rs b/libwebrtc/src/native/video_track.rs index 4155a94b5..d9d0191da 100644 --- a/libwebrtc/src/native/video_track.rs +++ b/libwebrtc/src/native/video_track.rs @@ -33,10 +33,7 @@ impl RtcVideoTrack { impl_media_stream_track!(video_to_media); pub(crate) fn new(sys_handle: SharedPtr) -> Self { - Self { - sys_handle, - user_timestamp_handler: Arc::new(Mutex::new(None)), - } + Self { sys_handle, user_timestamp_handler: Arc::new(Mutex::new(None)) } } pub fn sys_handle(&self) -> SharedPtr { diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 85f802f1a..4e7d50fe1 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -141,10 +141,8 @@ impl E2eeManager { // Always set up user timestamp embedding for local video tracks. if let LocalTrack::Video(video_track) = &track { - let handler = user_timestamp::create_sender_handler( - LkRuntime::instance().pc_factory(), - &sender, - ); + let handler = + user_timestamp::create_sender_handler(LkRuntime::instance().pc_factory(), &sender); video_track.set_user_timestamp_handler(handler.clone()); // Also set the handler on the video source so that capture_frame() diff --git a/webrtc-sys/src/lib.rs b/webrtc-sys/src/lib.rs index ca0a436fe..181cc3199 100644 --- a/webrtc-sys/src/lib.rs +++ b/webrtc-sys/src/lib.rs @@ -35,9 +35,9 @@ pub mod rtp_parameters; pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; +pub mod user_timestamp; pub mod video_frame; pub mod video_frame_buffer; -pub mod user_timestamp; pub mod video_track; pub mod webrtc; pub mod yuv_helper; From eecec0e92bdb259d413788845ecbc525805054c9 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 18 Feb 2026 12:32:16 -0800 Subject: [PATCH 17/52] remove unused callback reference --- webrtc-sys/src/frame_cryptor.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/webrtc-sys/src/frame_cryptor.cpp b/webrtc-sys/src/frame_cryptor.cpp index e044f09c0..f28536754 100644 --- a/webrtc-sys/src/frame_cryptor.cpp +++ b/webrtc-sys/src/frame_cryptor.cpp @@ -44,7 +44,6 @@ class ChainedFrameTransformer : public webrtc::FrameTransformerInterface, void RegisterTransformedFrameCallback( rtc::scoped_refptr callback) override { - callback_ = callback; second_->RegisterTransformedFrameCallback(callback); first_->RegisterTransformedFrameCallback( rtc::scoped_refptr(this)); @@ -61,7 +60,6 @@ class ChainedFrameTransformer : public webrtc::FrameTransformerInterface, void UnregisterTransformedFrameCallback() override { first_->UnregisterTransformedFrameCallback(); second_->UnregisterTransformedFrameCallback(); - callback_ = nullptr; } void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { @@ -77,7 +75,6 @@ class ChainedFrameTransformer : public webrtc::FrameTransformerInterface, private: rtc::scoped_refptr first_; rtc::scoped_refptr second_; - rtc::scoped_refptr callback_; }; webrtc::FrameCryptorTransformer::Algorithm AlgorithmToFrameCryptorAlgorithm( From aa1cdb665364231ce5422b8d2aeaa5f1d5fc83fa Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 18 Feb 2026 12:35:57 -0800 Subject: [PATCH 18/52] make the rtp lookup map insertion & removal more robust --- webrtc-sys/src/user_timestamp.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index aacf84e91..aa4aa1f00 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -179,8 +179,10 @@ void UserTimestampTransformer::TransformReceive( recv_map_.erase(recv_map_order_.front()); recv_map_order_.pop_front(); } + if (recv_map_.find(rtp_timestamp) == recv_map_.end()) { + recv_map_order_.push_back(rtp_timestamp); + } recv_map_[rtp_timestamp] = user_ts.value(); - recv_map_order_.push_back(rtp_timestamp); } // Update frame with stripped data @@ -372,8 +374,10 @@ void UserTimestampTransformer::store_user_timestamp( send_map_order_.pop_front(); } + if (send_map_.find(key) == send_map_.end()) { + send_map_order_.push_back(key); + } send_map_[key] = user_timestamp_us; - send_map_order_.push_back(key); RTC_LOG(LS_INFO) << "UserTimestampTransformer::store_user_timestamp" << " capture_ts_us=" << capture_timestamp_us From 2a52bbe6e19e6e4e2a95b658923e28e3c8e3c976 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 14:40:36 -0800 Subject: [PATCH 19/52] use new option for payload trailer features to enable timestamping --- examples/local_video/src/publisher.rs | 1 + livekit-api/src/signal_client/mod.rs | 2 +- livekit-protocol/protocol | 2 +- livekit-protocol/src/livekit.rs | 100 +-- livekit-protocol/src/livekit.serde.rs | 755 ++++-------------- livekit/src/room/e2ee/manager.rs | 66 +- livekit/src/room/options.rs | 2 + .../src/room/participant/local_participant.rs | 32 +- livekit/src/rtc_engine/rtc_session.rs | 4 + webrtc-sys/src/user_timestamp.cpp | 14 - 10 files changed, 268 insertions(+), 710 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 0b04fd41e..37b242806 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -225,6 +225,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { source: TrackSource::Camera, simulcast: args.simulcast, video_codec: codec, + user_timestamp: args.attach_timestamp, ..Default::default() }; if let Some(bitrate) = args.max_bitrate { diff --git a/livekit-api/src/signal_client/mod.rs b/livekit-api/src/signal_client/mod.rs index 32867d17a..8085b9099 100644 --- a/livekit-api/src/signal_client/mod.rs +++ b/livekit-api/src/signal_client/mod.rs @@ -52,7 +52,7 @@ pub const JOIN_RESPONSE_TIMEOUT: Duration = Duration::from_secs(5); pub const SIGNAL_CONNECT_TIMEOUT: Duration = Duration::from_secs(5); const REGION_FETCH_TIMEOUT: Duration = Duration::from_secs(3); const VALIDATE_TIMEOUT: Duration = Duration::from_secs(3); -pub const PROTOCOL_VERSION: u32 = 16; +pub const PROTOCOL_VERSION: u32 = 17; #[derive(Error, Debug)] pub enum SignalError { diff --git a/livekit-protocol/protocol b/livekit-protocol/protocol index aec2833df..75cb0752d 160000 --- a/livekit-protocol/protocol +++ b/livekit-protocol/protocol @@ -1 +1 @@ -Subproject commit aec2833dffcbc4525735f29c96238c13c10bcf64 +Subproject commit 75cb0752d5ad76b5fdb26d75c8c16f435b622f89 diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index 106c5c86e..eecf33a84 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -610,6 +610,8 @@ pub struct TrackInfo { pub audio_features: ::prost::alloc::vec::Vec, #[prost(enumeration="BackupCodecPolicy", tag="20")] pub backup_codec_policy: i32, + #[prost(enumeration="PacketTrailerFeature", repeated, tag="21")] + pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -946,6 +948,9 @@ pub struct RpcRequest { pub response_timeout_ms: u32, #[prost(uint32, tag="5")] pub version: u32, + /// Compressed payload data. When set, this field is used instead of `payload`. + #[prost(bytes="vec", tag="6")] + pub compressed_payload: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -958,7 +963,7 @@ pub struct RpcAck { pub struct RpcResponse { #[prost(string, tag="1")] pub request_id: ::prost::alloc::string::String, - #[prost(oneof="rpc_response::Value", tags="2, 3")] + #[prost(oneof="rpc_response::Value", tags="2, 3, 4")] pub value: ::core::option::Option, } /// Nested message and enum types in `RpcResponse`. @@ -970,6 +975,9 @@ pub mod rpc_response { Payload(::prost::alloc::string::String), #[prost(message, tag="3")] Error(super::RpcError), + /// Compressed payload data. When set, this field is used instead of `payload`. + #[prost(bytes, tag="4")] + CompressedPayload(::prost::alloc::vec::Vec), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -2042,6 +2050,29 @@ impl AudioTrackFeature { } } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PacketTrailerFeature { + PtfUserTimestamp = 0, +} +impl PacketTrailerFeature { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + PacketTrailerFeature::PtfUserTimestamp => "PTF_USER_TIMESTAMP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PTF_USER_TIMESTAMP" => Some(Self::PtfUserTimestamp), + _ => None, + } + } +} /// composite using a web browser #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -3356,6 +3387,8 @@ pub struct AddTrackRequest { pub backup_codec_policy: i32, #[prost(enumeration="AudioTrackFeature", repeated, tag="17")] pub audio_features: ::prost::alloc::vec::Vec, + #[prost(enumeration="PacketTrailerFeature", repeated, tag="18")] + pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -4243,7 +4276,7 @@ pub struct JobState { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WorkerMessage { - #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7, 8, 9")] + #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7")] pub message: ::core::option::Option, } /// Nested message and enum types in `WorkerMessage`. @@ -4269,17 +4302,13 @@ pub mod worker_message { SimulateJob(super::SimulateJobRequest), #[prost(message, tag="7")] MigrateJob(super::MigrateJobRequest), - #[prost(message, tag="8")] - TextResponse(super::TextMessageResponse), - #[prost(message, tag="9")] - PushText(super::PushTextRequest), } } /// from Server to Worker #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ServerMessage { - #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4, 6")] + #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4")] pub message: ::core::option::Option, } /// Nested message and enum types in `ServerMessage`. @@ -4299,8 +4328,6 @@ pub mod server_message { Termination(super::JobTermination), #[prost(message, tag="4")] Pong(super::WorkerPong), - #[prost(message, tag="6")] - TextRequest(super::TextMessageRequest), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -4430,61 +4457,6 @@ pub struct JobTermination { #[prost(string, tag="1")] pub job_id: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct AgentSessionState { - #[prost(uint64, tag="1")] - pub version: u64, - #[prost(oneof="agent_session_state::Data", tags="2, 3")] - pub data: ::core::option::Option, -} -/// Nested message and enum types in `AgentSessionState`. -pub mod agent_session_state { - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum Data { - #[prost(bytes, tag="2")] - Snapshot(::prost::alloc::vec::Vec), - #[prost(bytes, tag="3")] - Delta(::prost::alloc::vec::Vec), - } -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TextMessageRequest { - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] - pub session_id: ::prost::alloc::string::String, - #[prost(string, tag="3")] - pub agent_name: ::prost::alloc::string::String, - #[prost(string, tag="4")] - pub metadata: ::prost::alloc::string::String, - #[prost(message, optional, tag="5")] - pub session_state: ::core::option::Option, - #[prost(string, tag="6")] - pub text: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct PushTextRequest { - /// The message_id of the TextMessageRequest that this push is for - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] - pub content: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TextMessageResponse { - /// Indicate the request is completed - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(message, optional, tag="2")] - pub session_state: ::core::option::Option, - #[prost(string, tag="3")] - pub error: ::prost::alloc::string::String, -} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum JobType { diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 4d35e2a0d..21a8fcfa3 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -561,6 +561,9 @@ impl serde::Serialize for AddTrackRequest { if !self.audio_features.is_empty() { len += 1; } + if !self.packet_trailer_features.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.AddTrackRequest", len)?; if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; @@ -625,6 +628,13 @@ impl serde::Serialize for AddTrackRequest { }).collect::, _>>()?; struct_ser.serialize_field("audioFeatures", &v)?; } + if !self.packet_trailer_features.is_empty() { + let v = self.packet_trailer_features.iter().cloned().map(|v| { + PacketTrailerFeature::try_from(v) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) + }).collect::, _>>()?; + struct_ser.serialize_field("packetTrailerFeatures", &v)?; + } struct_ser.end() } } @@ -657,6 +667,8 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "backupCodecPolicy", "audio_features", "audioFeatures", + "packet_trailer_features", + "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -678,6 +690,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { Stream, BackupCodecPolicy, AudioFeatures, + PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -717,6 +730,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "stream" => Ok(GeneratedField::Stream), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), + "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -753,6 +767,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { let mut stream__ = None; let mut backup_codec_policy__ = None; let mut audio_features__ = None; + let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Cid => { @@ -861,6 +876,12 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { } audio_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); } + GeneratedField::PacketTrailerFeatures => { + if packet_trailer_features__.is_some() { + return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); + } + packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -884,6 +905,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { stream: stream__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), audio_features: audio_features__.unwrap_or_default(), + packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -1193,142 +1215,6 @@ impl<'de> serde::Deserialize<'de> for AgentDispatchState { deserializer.deserialize_struct("livekit.AgentDispatchState", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for AgentSessionState { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.version != 0 { - len += 1; - } - if self.data.is_some() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.AgentSessionState", len)?; - if self.version != 0 { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; - } - if let Some(v) = self.data.as_ref() { - match v { - agent_session_state::Data::Snapshot(v) => { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("snapshot", pbjson::private::base64::encode(&v).as_str())?; - } - agent_session_state::Data::Delta(v) => { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("delta", pbjson::private::base64::encode(&v).as_str())?; - } - } - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for AgentSessionState { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "version", - "snapshot", - "delta", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - Version, - Snapshot, - Delta, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "version" => Ok(GeneratedField::Version), - "snapshot" => Ok(GeneratedField::Snapshot), - "delta" => Ok(GeneratedField::Delta), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = AgentSessionState; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.AgentSessionState") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut version__ = None; - let mut data__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::Version => { - if version__.is_some() { - return Err(serde::de::Error::duplicate_field("version")); - } - version__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Snapshot => { - if data__.is_some() { - return Err(serde::de::Error::duplicate_field("snapshot")); - } - data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Snapshot(x.0)); - } - GeneratedField::Delta => { - if data__.is_some() { - return Err(serde::de::Error::duplicate_field("delta")); - } - data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Delta(x.0)); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(AgentSessionState { - version: version__.unwrap_or_default(), - data: data__, - }) - } - } - deserializer.deserialize_struct("livekit.AgentSessionState", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for AliOssUpload { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -21625,6 +21511,74 @@ impl<'de> serde::Deserialize<'de> for MuteTrackRequest { deserializer.deserialize_struct("livekit.MuteTrackRequest", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for PacketTrailerFeature { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::PtfUserTimestamp => "PTF_USER_TIMESTAMP", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for PacketTrailerFeature { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "PTF_USER_TIMESTAMP", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PacketTrailerFeature; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "PTF_USER_TIMESTAMP" => Ok(PacketTrailerFeature::PtfUserTimestamp), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} impl serde::Serialize for Pagination { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -24339,119 +24293,6 @@ impl<'de> serde::Deserialize<'de> for PublishDataTrackResponse { deserializer.deserialize_struct("livekit.PublishDataTrackResponse", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for PushTextRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if !self.content.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.PushTextRequest", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if !self.content.is_empty() { - struct_ser.serialize_field("content", &self.content)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for PushTextRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "content", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - Content, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "content" => Ok(GeneratedField::Content), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = PushTextRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.PushTextRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut content__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::Content => { - if content__.is_some() { - return Err(serde::de::Error::duplicate_field("content")); - } - content__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(PushTextRequest { - message_id: message_id__.unwrap_or_default(), - content: content__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.PushTextRequest", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for RtcpSenderReportState { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -29436,6 +29277,9 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { len += 1; } + if !self.compressed_payload.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.RpcRequest", len)?; if !self.id.is_empty() { struct_ser.serialize_field("id", &self.id)?; @@ -29452,6 +29296,11 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { struct_ser.serialize_field("version", &self.version)?; } + if !self.compressed_payload.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&self.compressed_payload).as_str())?; + } struct_ser.end() } } @@ -29468,6 +29317,8 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "response_timeout_ms", "responseTimeoutMs", "version", + "compressed_payload", + "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29477,6 +29328,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Payload, ResponseTimeoutMs, Version, + CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29504,6 +29356,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "payload" => Ok(GeneratedField::Payload), "responseTimeoutMs" | "response_timeout_ms" => Ok(GeneratedField::ResponseTimeoutMs), "version" => Ok(GeneratedField::Version), + "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29528,6 +29381,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { let mut payload__ = None; let mut response_timeout_ms__ = None; let mut version__ = None; + let mut compressed_payload__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Id => { @@ -29564,6 +29418,14 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::CompressedPayload => { + if compressed_payload__.is_some() { + return Err(serde::de::Error::duplicate_field("compressedPayload")); + } + compressed_payload__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -29575,6 +29437,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { payload: payload__.unwrap_or_default(), response_timeout_ms: response_timeout_ms__.unwrap_or_default(), version: version__.unwrap_or_default(), + compressed_payload: compressed_payload__.unwrap_or_default(), }) } } @@ -29607,6 +29470,11 @@ impl serde::Serialize for RpcResponse { rpc_response::Value::Error(v) => { struct_ser.serialize_field("error", v)?; } + rpc_response::Value::CompressedPayload(v) => { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&v).as_str())?; + } } } struct_ser.end() @@ -29623,6 +29491,8 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId", "payload", "error", + "compressed_payload", + "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29630,6 +29500,7 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { RequestId, Payload, Error, + CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29655,6 +29526,7 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId" | "request_id" => Ok(GeneratedField::RequestId), "payload" => Ok(GeneratedField::Payload), "error" => Ok(GeneratedField::Error), + "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29697,6 +29569,12 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { value__ = map_.next_value::<::std::option::Option<_>>()?.map(rpc_response::Value::Error) ; } + GeneratedField::CompressedPayload => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("compressedPayload")); + } + value__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| rpc_response::Value::CompressedPayload(x.0)); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -36132,9 +36010,6 @@ impl serde::Serialize for ServerMessage { server_message::Message::Pong(v) => { struct_ser.serialize_field("pong", v)?; } - server_message::Message::TextRequest(v) => { - struct_ser.serialize_field("textRequest", v)?; - } } } struct_ser.end() @@ -36152,8 +36027,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment", "termination", "pong", - "text_request", - "textRequest", ]; #[allow(clippy::enum_variant_names)] @@ -36163,7 +36036,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { Assignment, Termination, Pong, - TextRequest, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -36191,7 +36063,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment" => Ok(GeneratedField::Assignment), "termination" => Ok(GeneratedField::Termination), "pong" => Ok(GeneratedField::Pong), - "textRequest" | "text_request" => Ok(GeneratedField::TextRequest), _ => Ok(GeneratedField::__SkipField__), } } @@ -36247,13 +36118,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { return Err(serde::de::Error::duplicate_field("pong")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::Pong) -; - } - GeneratedField::TextRequest => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("textRequest")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::TextRequest) ; } GeneratedField::__SkipField__ => { @@ -40560,321 +40424,6 @@ impl<'de> serde::Deserialize<'de> for SyncState { deserializer.deserialize_struct("livekit.SyncState", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for TextMessageRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if !self.session_id.is_empty() { - len += 1; - } - if !self.agent_name.is_empty() { - len += 1; - } - if !self.metadata.is_empty() { - len += 1; - } - if self.session_state.is_some() { - len += 1; - } - if !self.text.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.TextMessageRequest", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if !self.session_id.is_empty() { - struct_ser.serialize_field("sessionId", &self.session_id)?; - } - if !self.agent_name.is_empty() { - struct_ser.serialize_field("agentName", &self.agent_name)?; - } - if !self.metadata.is_empty() { - struct_ser.serialize_field("metadata", &self.metadata)?; - } - if let Some(v) = self.session_state.as_ref() { - struct_ser.serialize_field("sessionState", v)?; - } - if !self.text.is_empty() { - struct_ser.serialize_field("text", &self.text)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for TextMessageRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "session_id", - "sessionId", - "agent_name", - "agentName", - "metadata", - "session_state", - "sessionState", - "text", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - SessionId, - AgentName, - Metadata, - SessionState, - Text, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "sessionId" | "session_id" => Ok(GeneratedField::SessionId), - "agentName" | "agent_name" => Ok(GeneratedField::AgentName), - "metadata" => Ok(GeneratedField::Metadata), - "sessionState" | "session_state" => Ok(GeneratedField::SessionState), - "text" => Ok(GeneratedField::Text), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TextMessageRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TextMessageRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut session_id__ = None; - let mut agent_name__ = None; - let mut metadata__ = None; - let mut session_state__ = None; - let mut text__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::SessionId => { - if session_id__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionId")); - } - session_id__ = Some(map_.next_value()?); - } - GeneratedField::AgentName => { - if agent_name__.is_some() { - return Err(serde::de::Error::duplicate_field("agentName")); - } - agent_name__ = Some(map_.next_value()?); - } - GeneratedField::Metadata => { - if metadata__.is_some() { - return Err(serde::de::Error::duplicate_field("metadata")); - } - metadata__ = Some(map_.next_value()?); - } - GeneratedField::SessionState => { - if session_state__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionState")); - } - session_state__ = map_.next_value()?; - } - GeneratedField::Text => { - if text__.is_some() { - return Err(serde::de::Error::duplicate_field("text")); - } - text__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(TextMessageRequest { - message_id: message_id__.unwrap_or_default(), - session_id: session_id__.unwrap_or_default(), - agent_name: agent_name__.unwrap_or_default(), - metadata: metadata__.unwrap_or_default(), - session_state: session_state__, - text: text__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.TextMessageRequest", FIELDS, GeneratedVisitor) - } -} -impl serde::Serialize for TextMessageResponse { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if self.session_state.is_some() { - len += 1; - } - if !self.error.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.TextMessageResponse", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if let Some(v) = self.session_state.as_ref() { - struct_ser.serialize_field("sessionState", v)?; - } - if !self.error.is_empty() { - struct_ser.serialize_field("error", &self.error)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for TextMessageResponse { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "session_state", - "sessionState", - "error", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - SessionState, - Error, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "sessionState" | "session_state" => Ok(GeneratedField::SessionState), - "error" => Ok(GeneratedField::Error), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TextMessageResponse; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TextMessageResponse") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut session_state__ = None; - let mut error__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::SessionState => { - if session_state__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionState")); - } - session_state__ = map_.next_value()?; - } - GeneratedField::Error => { - if error__.is_some() { - return Err(serde::de::Error::duplicate_field("error")); - } - error__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(TextMessageResponse { - message_id: message_id__.unwrap_or_default(), - session_state: session_state__, - error: error__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.TextMessageResponse", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for TimeSeriesMetric { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -41801,6 +41350,9 @@ impl serde::Serialize for TrackInfo { if self.backup_codec_policy != 0 { len += 1; } + if !self.packet_trailer_features.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.TrackInfo", len)?; if !self.sid.is_empty() { struct_ser.serialize_field("sid", &self.sid)?; @@ -41874,6 +41426,13 @@ impl serde::Serialize for TrackInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.backup_codec_policy)))?; struct_ser.serialize_field("backupCodecPolicy", &v)?; } + if !self.packet_trailer_features.is_empty() { + let v = self.packet_trailer_features.iter().cloned().map(|v| { + PacketTrailerFeature::try_from(v) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) + }).collect::, _>>()?; + struct_ser.serialize_field("packetTrailerFeatures", &v)?; + } struct_ser.end() } } @@ -41909,6 +41468,8 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "audioFeatures", "backup_codec_policy", "backupCodecPolicy", + "packet_trailer_features", + "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -41933,6 +41494,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { Version, AudioFeatures, BackupCodecPolicy, + PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -41975,6 +41537,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "version" => Ok(GeneratedField::Version), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), + "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -42014,6 +41577,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { let mut version__ = None; let mut audio_features__ = None; let mut backup_codec_policy__ = None; + let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Sid => { @@ -42140,6 +41704,12 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { } backup_codec_policy__ = Some(map_.next_value::()? as i32); } + GeneratedField::PacketTrailerFeatures => { + if packet_trailer_features__.is_some() { + return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); + } + packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -42166,6 +41736,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { version: version__, audio_features: audio_features__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), + packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -48357,12 +47928,6 @@ impl serde::Serialize for WorkerMessage { worker_message::Message::MigrateJob(v) => { struct_ser.serialize_field("migrateJob", v)?; } - worker_message::Message::TextResponse(v) => { - struct_ser.serialize_field("textResponse", v)?; - } - worker_message::Message::PushText(v) => { - struct_ser.serialize_field("pushText", v)?; - } } } struct_ser.end() @@ -48386,10 +47951,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "simulateJob", "migrate_job", "migrateJob", - "text_response", - "textResponse", - "push_text", - "pushText", ]; #[allow(clippy::enum_variant_names)] @@ -48401,8 +47962,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { Ping, SimulateJob, MigrateJob, - TextResponse, - PushText, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -48432,8 +47991,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "ping" => Ok(GeneratedField::Ping), "simulateJob" | "simulate_job" => Ok(GeneratedField::SimulateJob), "migrateJob" | "migrate_job" => Ok(GeneratedField::MigrateJob), - "textResponse" | "text_response" => Ok(GeneratedField::TextResponse), - "pushText" | "push_text" => Ok(GeneratedField::PushText), _ => Ok(GeneratedField::__SkipField__), } } @@ -48503,20 +48060,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { return Err(serde::de::Error::duplicate_field("migrateJob")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::MigrateJob) -; - } - GeneratedField::TextResponse => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("textResponse")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::TextResponse) -; - } - GeneratedField::PushText => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("pushText")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::PushText) ; } GeneratedField::__SkipField__ => { diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 4e7d50fe1..c6deab134 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -23,8 +23,8 @@ use libwebrtc::{ }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, - video_source::RtcVideoSource, }; +use livekit_protocol::PacketTrailerFeature; use parking_lot::Mutex; use super::{key_provider::KeyProvider, EncryptionType}; @@ -105,14 +105,31 @@ impl E2eeManager { let receiver = track.transceiver().unwrap().receiver(); let mut user_timestamp_handler = None; - // Always set up user timestamp extraction for remote video tracks. - if let RemoteTrack::Video(video_track) = &track { - let handler = user_timestamp::create_receiver_handler( - LkRuntime::instance().pc_factory(), - &receiver, + let has_user_timestamp = publication + .proto_info() + .packet_trailer_features + .contains(&(PacketTrailerFeature::PtfUserTimestamp as i32)); + + if has_user_timestamp { + if let RemoteTrack::Video(video_track) = &track { + log::info!( + "user_timestamp enabled for subscribed track {} from {}", + publication.sid(), + identity, + ); + let handler = user_timestamp::create_receiver_handler( + LkRuntime::instance().pc_factory(), + &receiver, + ); + video_track.set_user_timestamp_handler(handler.clone()); + user_timestamp_handler = Some(handler); + } + } else { + log::info!( + "user_timestamp not present for subscribed track {} from {}", + publication.sid(), + identity, ); - video_track.set_user_timestamp_handler(handler.clone()); - user_timestamp_handler = Some(handler); } if !self.initialized() || publication.encryption_type() == EncryptionType::None { @@ -137,23 +154,26 @@ impl E2eeManager { ) { let identity = participant.identity(); let sender = track.transceiver().unwrap().sender(); - let mut user_timestamp_handler = None; - // Always set up user timestamp embedding for local video tracks. - if let LocalTrack::Video(video_track) = &track { - let handler = - user_timestamp::create_sender_handler(LkRuntime::instance().pc_factory(), &sender); - video_track.set_user_timestamp_handler(handler.clone()); - - // Also set the handler on the video source so that capture_frame() - // can automatically store user timestamps into it. - #[cfg(not(target_arch = "wasm32"))] - if let RtcVideoSource::Native(ref native_source) = video_track.rtc_source() { - native_source.set_user_timestamp_handler(handler.clone()); + let user_timestamp_handler = if let LocalTrack::Video(video_track) = &track { + let handler = video_track.user_timestamp_handler(); + if handler.is_some() { + log::info!( + "user_timestamp enabled for published track {} from {}", + publication.sid(), + identity, + ); + } else { + log::info!( + "user_timestamp not enabled for published track {} from {}", + publication.sid(), + identity, + ); } - - user_timestamp_handler = Some(handler); - } + handler + } else { + None + }; if !self.initialized() || publication.encryption_type() == EncryptionType::None { return; diff --git a/livekit/src/room/options.rs b/livekit/src/room/options.rs index fbcb6ba94..8e70414ea 100644 --- a/livekit/src/room/options.rs +++ b/livekit/src/room/options.rs @@ -88,6 +88,7 @@ pub struct TrackPublishOptions { pub source: TrackSource, pub stream: String, pub preconnect_buffer: bool, + pub user_timestamp: bool, } impl Default for TrackPublishOptions { @@ -102,6 +103,7 @@ impl Default for TrackPublishOptions { source: TrackSource::Unknown, stream: "".to_string(), preconnect_buffer: false, + user_timestamp: false, } } } diff --git a/livekit/src/room/participant/local_participant.rs b/livekit/src/room/participant/local_participant.rs index c72b5f4c4..266a8837c 100644 --- a/livekit/src/room/participant/local_participant.rs +++ b/livekit/src/room/participant/local_participant.rs @@ -36,10 +36,15 @@ use crate::{ prelude::*, room::participant::rpc::{RpcError, RpcErrorCode, RpcInvocationData, MAX_PAYLOAD_BYTES}, rtc_engine::{EngineError, RtcEngine}, + rtc_engine::lk_runtime::LkRuntime, ChatMessage, DataPacket, RoomSession, RpcAck, RpcRequest, RpcResponse, SipDTMF, Transcription, }; use chrono::Utc; -use libwebrtc::{native::create_random_uuid, rtp_parameters::RtpEncodingParameters}; +use libwebrtc::{ + native::{create_random_uuid, user_timestamp}, + rtp_parameters::RtpEncodingParameters, + video_source::RtcVideoSource, +}; use livekit_api::signal_client::SignalError; use livekit_protocol as proto; use livekit_runtime::timeout; @@ -273,6 +278,11 @@ impl LocalParticipant { req.audio_features.push(proto::AudioTrackFeature::TfPreconnectBuffer as i32); } + if options.user_timestamp { + req.packet_trailer_features + .push(proto::PacketTrailerFeature::PtfUserTimestamp as i32); + } + let mut encodings = Vec::default(); match &track { LocalTrack::Video(video_track) => { @@ -318,6 +328,26 @@ impl LocalParticipant { track.set_transceiver(Some(transceiver)); + if options.user_timestamp { + if let LocalTrack::Video(video_track) = &track { + log::info!( + "user_timestamp enabled for local video track {}", + publication.sid(), + ); + let sender = track.transceiver().unwrap().sender(); + let handler = user_timestamp::create_sender_handler( + LkRuntime::instance().pc_factory(), + &sender, + ); + video_track.set_user_timestamp_handler(handler.clone()); + + #[cfg(not(target_arch = "wasm32"))] + if let RtcVideoSource::Native(ref native_source) = video_track.rtc_source() { + native_source.set_user_timestamp_handler(handler.clone()); + } + } + } + self.inner.rtc_engine.publisher_negotiation_needed(); publication.update_publish_options(options); diff --git a/livekit/src/rtc_engine/rtc_session.rs b/livekit/src/rtc_engine/rtc_session.rs index 8337a7f5b..b2298a371 100644 --- a/livekit/src/rtc_engine/rtc_session.rs +++ b/livekit/src/rtc_engine/rtc_session.rs @@ -1309,6 +1309,10 @@ impl SessionInner { None => (None, None), Some(proto::rpc_response::Value::Payload(payload)) => (Some(payload), None), Some(proto::rpc_response::Value::Error(err)) => (None, Some(err)), + Some(proto::rpc_response::Value::CompressedPayload(_)) => { + log::warn!("received compressed RPC response payload, decompression not yet supported"); + (None, None) + } }; self.emitter.send(SessionEvent::RpcResponse { request_id: rpc_response.request_id, diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index aa4aa1f00..6917131ff 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -117,16 +117,6 @@ void UserTimestampTransformer::TransformSend( if (enabled_.load()) { new_data = AppendTimestampTrailer(data, ts_to_embed); frame->SetData(rtc::ArrayView(new_data)); - - RTC_LOG(LS_INFO) << "UserTimestampTransformer::TransformSend appended " - "trailer" - << " ts_us=" << ts_to_embed - << " rtp_ts=" << rtp_timestamp - << " ssrc=" << ssrc - << " capture_us=" - << (capture_time.has_value() ? capture_time->us() : -1) - << " orig_size=" << data.size() - << " new_size=" << new_data.size(); } // Forward to the appropriate callback (either global or per-SSRC sink). @@ -188,10 +178,6 @@ void UserTimestampTransformer::TransformReceive( // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - RTC_LOG(LS_INFO) << "UserTimestampTransformer" - << " user_ts=" << user_ts.value() - << " rtp_ts=" << frame->GetTimestamp() - << " recv_latency=" << recv_latency_ms << " ms"; } else { // Log the last few bytes so we can see whether the magic marker is present. size_t log_len = std::min(data.size(), 16); From 38734f4a203bde7c672869b2f29b8a59d0b6a7c8 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 14:43:15 -0800 Subject: [PATCH 20/52] remove timestamp logs --- webrtc-sys/src/user_timestamp.cpp | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index 6917131ff..a1c11d0cb 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -19,7 +19,6 @@ #include #include #include -#include #include "api/make_ref_counted.h" #include "livekit/peer_connection_factory.h" @@ -150,16 +149,6 @@ void UserTimestampTransformer::TransformReceive( auto user_ts = ExtractTimestampTrailer(data, stripped_data); if (user_ts.has_value()) { - // Compute latency from embedded user timestamp to RTP receive - // time (both in microseconds since Unix epoch), so we can compare - // this with the latency logged after decode on the subscriber side. - int64_t now_us = - std::chrono::duration_cast( - std::chrono::system_clock::now().time_since_epoch()) - .count(); - double recv_latency_ms = - static_cast(now_us - user_ts.value()) / 1000.0; - // Store in the receive map keyed by RTP timestamp so decoded frames // can look up their user timestamp regardless of frame drops. { From eea46e1ff4436494d0aaf8109c5fdb9259965e9a Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 16:07:39 -0800 Subject: [PATCH 21/52] remove noisy logs --- webrtc-sys/src/user_timestamp.cpp | 51 +------------------------------ 1 file changed, 1 insertion(+), 50 deletions(-) diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index a1c11d0cb..ef77ed91a 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -17,7 +17,6 @@ #include "livekit/user_timestamp.h" #include -#include #include #include "api/make_ref_counted.h" @@ -32,10 +31,7 @@ namespace livekit_ffi { // UserTimestampTransformer implementation UserTimestampTransformer::UserTimestampTransformer(Direction direction) - : direction_(direction) { - RTC_LOG(LS_INFO) << "UserTimestampTransformer created direction=" - << (direction_ == Direction::kSend ? "send" : "recv"); -} + : direction_(direction) {} void UserTimestampTransformer::Transform( std::unique_ptr frame) { @@ -43,13 +39,6 @@ void UserTimestampTransformer::Transform( uint32_t rtp_timestamp = frame->GetTimestamp(); if (!enabled_.load()) { - // Pass through without modification, but still log basic info so we know - // frames are flowing through the transformer. - RTC_LOG(LS_INFO) << "UserTimestampTransformer::Transform (disabled)" - << " direction=" - << (direction_ == Direction::kSend ? "send" : "recv") - << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; - rtc::scoped_refptr cb; { webrtc::MutexLock lock(&mutex_); @@ -167,26 +156,6 @@ void UserTimestampTransformer::TransformReceive( // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - } else { - // Log the last few bytes so we can see whether the magic marker is present. - size_t log_len = std::min(data.size(), 16); - std::string tail_bytes; - tail_bytes.reserve(log_len * 4); - for (size_t i = data.size() - log_len; i < data.size(); ++i) { - char buf[8]; - std::snprintf(buf, sizeof(buf), "%u", - static_cast(data[i])); - if (!tail_bytes.empty()) { - tail_bytes.append(","); - } - tail_bytes.append(buf); - } - - RTC_LOG(LS_INFO) - << "UserTimestampTransformer::TransformReceive no trailer found" - << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp - << " size=" << data.size() - << " tail_bytes_dec=[" << tail_bytes << "]"; } // Forward to the appropriate callback (either global or per-SSRC sink). @@ -236,10 +205,6 @@ std::optional UserTimestampTransformer::ExtractTimestampTrailer( rtc::ArrayView data, std::vector& out_data) { if (data.size() < kUserTimestampTrailerSize) { - RTC_LOG(LS_INFO) - << "UserTimestampTransformer::ExtractTimestampTrailer data too small" - << " size=" << data.size() - << " required=" << kUserTimestampTrailerSize; out_data.assign(data.begin(), data.end()); return std::nullopt; } @@ -247,14 +212,6 @@ std::optional UserTimestampTransformer::ExtractTimestampTrailer( // Check for magic bytes at the end const uint8_t* magic_start = data.data() + data.size() - 4; if (std::memcmp(magic_start, kUserTimestampMagic, 4) != 0) { - RTC_LOG(LS_INFO) - << "UserTimestampTransformer::ExtractTimestampTrailer magic mismatch" - << " size=" << data.size() - << " magic_bytes_dec=[" - << static_cast(magic_start[0]) << "," - << static_cast(magic_start[1]) << "," - << static_cast(magic_start[2]) << "," - << static_cast(magic_start[3]) << "]"; out_data.assign(data.begin(), data.end()); return std::nullopt; } @@ -353,12 +310,6 @@ void UserTimestampTransformer::store_user_timestamp( send_map_order_.push_back(key); } send_map_[key] = user_timestamp_us; - - RTC_LOG(LS_INFO) << "UserTimestampTransformer::store_user_timestamp" - << " capture_ts_us=" << capture_timestamp_us - << " key_us=" << key - << " user_ts_us=" << user_timestamp_us - << " size=" << send_map_.size(); } // UserTimestampHandler implementation From 1c779a2971ae4fbe1effd80da36184e8525f5420 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 16:12:27 -0800 Subject: [PATCH 22/52] lint --- livekit/src/room/participant/local_participant.rs | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/livekit/src/room/participant/local_participant.rs b/livekit/src/room/participant/local_participant.rs index 266a8837c..fe48de2b0 100644 --- a/livekit/src/room/participant/local_participant.rs +++ b/livekit/src/room/participant/local_participant.rs @@ -35,8 +35,8 @@ use crate::{ options::{self, compute_video_encodings, video_layers_from_encodings, TrackPublishOptions}, prelude::*, room::participant::rpc::{RpcError, RpcErrorCode, RpcInvocationData, MAX_PAYLOAD_BYTES}, - rtc_engine::{EngineError, RtcEngine}, rtc_engine::lk_runtime::LkRuntime, + rtc_engine::{EngineError, RtcEngine}, ChatMessage, DataPacket, RoomSession, RpcAck, RpcRequest, RpcResponse, SipDTMF, Transcription, }; use chrono::Utc; @@ -279,8 +279,7 @@ impl LocalParticipant { } if options.user_timestamp { - req.packet_trailer_features - .push(proto::PacketTrailerFeature::PtfUserTimestamp as i32); + req.packet_trailer_features.push(proto::PacketTrailerFeature::PtfUserTimestamp as i32); } let mut encodings = Vec::default(); @@ -330,10 +329,7 @@ impl LocalParticipant { if options.user_timestamp { if let LocalTrack::Video(video_track) = &track { - log::info!( - "user_timestamp enabled for local video track {}", - publication.sid(), - ); + log::info!("user_timestamp enabled for local video track {}", publication.sid(),); let sender = track.transceiver().unwrap().sender(); let handler = user_timestamp::create_sender_handler( LkRuntime::instance().pc_factory(), From 6f172c1e539d83468822d0d6dda67c79fb5e7b66 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 16:24:18 -0800 Subject: [PATCH 23/52] update TrackPublishOptions in livekit-ffi --- livekit-ffi/src/conversion/room.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/livekit-ffi/src/conversion/room.rs b/livekit-ffi/src/conversion/room.rs index 87ee00a14..af8c06d63 100644 --- a/livekit-ffi/src/conversion/room.rs +++ b/livekit-ffi/src/conversion/room.rs @@ -256,6 +256,7 @@ impl From for TrackPublishOptions { preconnect_buffer: opts .preconnect_buffer .unwrap_or(default_publish_options.preconnect_buffer), + user_timestamp: default_publish_options.user_timestamp, } } } From de9cd2b419d1198812cbb815e01e309a86e560b0 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 3 Mar 2026 16:39:21 -0800 Subject: [PATCH 24/52] adding the PayloadTrailerFeatures to the ffi protos --- livekit-ffi/protocol/track.proto | 5 +++++ livekit-ffi/src/conversion/track.rs | 15 +++++++++++++++ livekit/src/prelude.rs | 2 +- livekit/src/room/publication/local.rs | 6 +++++- livekit/src/room/publication/mod.rs | 14 +++++++++++++- livekit/src/room/publication/remote.rs | 6 +++++- 6 files changed, 44 insertions(+), 4 deletions(-) diff --git a/livekit-ffi/protocol/track.proto b/livekit-ffi/protocol/track.proto index bcb9ee785..8d087cd1a 100644 --- a/livekit-ffi/protocol/track.proto +++ b/livekit-ffi/protocol/track.proto @@ -91,6 +91,7 @@ message TrackPublicationInfo { required bool remote = 10; required EncryptionType encryption_type = 11; repeated AudioTrackFeature audio_features = 12; + repeated PacketTrailerFeature packet_trailer_features = 13; } message OwnedTrackPublication { @@ -159,3 +160,7 @@ enum AudioTrackFeature { TF_ENHANCED_NOISE_CANCELLATION = 5; TF_PRECONNECT_BUFFER = 6; // client will buffer audio once available and send it to the server via bytes stream once connected } + +enum PacketTrailerFeature { + PTF_USER_TIMESTAMP = 0; +} diff --git a/livekit-ffi/src/conversion/track.rs b/livekit-ffi/src/conversion/track.rs index 9c3418b6e..9d418a75b 100644 --- a/livekit-ffi/src/conversion/track.rs +++ b/livekit-ffi/src/conversion/track.rs @@ -39,6 +39,11 @@ impl From<&FfiPublication> for proto::TrackPublicationInfo { .into_iter() .map(|i| proto::AudioTrackFeature::from(i).into()) .collect(), + packet_trailer_features: publication + .packet_trailer_features() + .into_iter() + .map(|i| proto::PacketTrailerFeature::from(i).into()) + .collect(), } } } @@ -158,3 +163,13 @@ impl From for proto::AudioTrackFeature { } } } + +impl From for proto::PacketTrailerFeature { + fn from(value: livekit_protocol::PacketTrailerFeature) -> Self { + match value { + livekit_protocol::PacketTrailerFeature::PtfUserTimestamp => { + proto::PacketTrailerFeature::PtfUserTimestamp + } + } + } +} diff --git a/livekit/src/prelude.rs b/livekit/src/prelude.rs index 3e6bda23b..2757aa63d 100644 --- a/livekit/src/prelude.rs +++ b/livekit/src/prelude.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -pub use livekit_protocol::AudioTrackFeature; +pub use livekit_protocol::{AudioTrackFeature, PacketTrailerFeature}; pub use crate::{ id::*, diff --git a/livekit/src/room/publication/local.rs b/livekit/src/room/publication/local.rs index 97e365537..0a12e4e27 100644 --- a/livekit/src/room/publication/local.rs +++ b/livekit/src/room/publication/local.rs @@ -14,7 +14,7 @@ use std::{fmt::Debug, sync::Arc}; -use livekit_protocol::{self as proto, AudioTrackFeature}; +use livekit_protocol::{self as proto, AudioTrackFeature, PacketTrailerFeature}; use parking_lot::Mutex; use super::TrackPublicationInner; @@ -149,4 +149,8 @@ impl LocalTrackPublication { pub fn audio_features(&self) -> Vec { self.inner.info.read().audio_features.clone() } + + pub fn packet_trailer_features(&self) -> Vec { + self.inner.info.read().packet_trailer_features.clone() + } } diff --git a/livekit/src/room/publication/mod.rs b/livekit/src/room/publication/mod.rs index 54e86bd4c..a64eae37d 100644 --- a/livekit/src/room/publication/mod.rs +++ b/livekit/src/room/publication/mod.rs @@ -15,7 +15,7 @@ use std::sync::Arc; use livekit_protocol::enum_dispatch; -use livekit_protocol::{self as proto, AudioTrackFeature}; +use livekit_protocol::{self as proto, AudioTrackFeature, PacketTrailerFeature}; use parking_lot::{Mutex, RwLock}; use super::track::TrackDimension; @@ -60,6 +60,7 @@ impl TrackPublication { pub fn is_remote(self: &Self) -> bool; pub fn encryption_type(self: &Self) -> EncryptionType; pub fn audio_features(self: &Self) -> Vec; + pub fn packet_trailer_features(self: &Self) -> Vec; pub(crate) fn on_muted(self: &Self, on_mute: impl Fn(TrackPublication) + Send + 'static) -> (); pub(crate) fn on_unmuted(self: &Self, on_unmute: impl Fn(TrackPublication) + Send + 'static) -> (); @@ -96,6 +97,7 @@ struct PublicationInfo { pub proto_info: proto::TrackInfo, pub encryption_type: EncryptionType, pub audio_features: Vec, + pub packet_trailer_features: Vec, } pub(crate) type MutedHandler = Box; @@ -133,6 +135,11 @@ pub(super) fn new_inner( .into_iter() .map(|item| item.try_into().unwrap()) .collect(), + packet_trailer_features: info + .packet_trailer_features + .iter() + .filter_map(|v| PacketTrailerFeature::try_from(*v).ok()) + .collect(), }; Arc::new(TrackPublicationInner { info: RwLock::new(info), events: Default::default() }) @@ -154,6 +161,11 @@ pub(super) fn update_info( info.mime_type = new_info.mime_type.clone(); info.simulcasted = new_info.simulcast; info.audio_features = new_info.audio_features().collect(); + info.packet_trailer_features = new_info + .packet_trailer_features + .iter() + .filter_map(|v| PacketTrailerFeature::try_from(*v).ok()) + .collect(); } pub(super) fn set_track( diff --git a/livekit/src/room/publication/remote.rs b/livekit/src/room/publication/remote.rs index 6d27003df..9e21b1383 100644 --- a/livekit/src/room/publication/remote.rs +++ b/livekit/src/room/publication/remote.rs @@ -14,7 +14,7 @@ use std::{fmt::Debug, sync::Arc}; -use livekit_protocol::{self as proto, AudioTrackFeature}; +use livekit_protocol::{self as proto, AudioTrackFeature, PacketTrailerFeature}; use parking_lot::{Mutex, RwLock}; use super::{PermissionStatus, SubscriptionStatus, TrackPublication, TrackPublicationInner}; @@ -399,4 +399,8 @@ impl RemoteTrackPublication { pub fn audio_features(&self) -> Vec { self.inner.info.read().audio_features.clone() } + + pub fn packet_trailer_features(&self) -> Vec { + self.inner.info.read().packet_trailer_features.clone() + } } From d40b41d7e14d735d8a29a42d2dee69398daab122 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 6 Mar 2026 16:01:15 -0800 Subject: [PATCH 25/52] attach parser transformer always --- livekit/src/room/e2ee/manager.rs | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index c6deab134..9636868f3 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -110,26 +110,25 @@ impl E2eeManager { .packet_trailer_features .contains(&(PacketTrailerFeature::PtfUserTimestamp as i32)); - if has_user_timestamp { - if let RemoteTrack::Video(video_track) = &track { + if let RemoteTrack::Video(video_track) = &track { + let handler = + user_timestamp::create_receiver_handler(LkRuntime::instance().pc_factory(), &receiver); + video_track.set_user_timestamp_handler(handler.clone()); + user_timestamp_handler = Some(handler); + + if has_user_timestamp { log::info!( - "user_timestamp enabled for subscribed track {} from {}", + "attached user_timestamp handler for subscribed track {} from {}", publication.sid(), identity, ); - let handler = user_timestamp::create_receiver_handler( - LkRuntime::instance().pc_factory(), - &receiver, + } else { + log::info!( + "attached user_timestamp handler for subscribed track {} from {} without advertised packet trailer support", + publication.sid(), + identity, ); - video_track.set_user_timestamp_handler(handler.clone()); - user_timestamp_handler = Some(handler); } - } else { - log::info!( - "user_timestamp not present for subscribed track {} from {}", - publication.sid(), - identity, - ); } if !self.initialized() || publication.encryption_type() == EncryptionType::None { From f20a93e207187c4c5241c61711dccf75505663b2 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 7 Mar 2026 13:41:05 -0800 Subject: [PATCH 26/52] add frame id to trailer --- examples/local_video/src/publisher.rs | 16 ++-- examples/local_video/src/subscriber.rs | 17 +++- examples/screensharing/src/lib.rs | 1 + examples/wgpu_room/src/logo_track.rs | 1 + libwebrtc/src/native/user_timestamp.rs | 65 +++++++++------- libwebrtc/src/native/video_source.rs | 19 +++-- libwebrtc/src/native/video_stream.rs | 10 ++- libwebrtc/src/video_frame.rs | 2 + livekit-ffi/src/server/video_source.rs | 1 + webrtc-sys/include/livekit/user_timestamp.h | 67 +++++++++------- webrtc-sys/include/livekit/video_track.h | 6 +- webrtc-sys/src/user_timestamp.cpp | 86 +++++++++++++-------- webrtc-sys/src/user_timestamp.rs | 11 ++- webrtc-sys/src/video_track.cpp | 12 +-- webrtc-sys/src/video_track.rs | 1 + 15 files changed, 200 insertions(+), 115 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 37b242806..811617ad8 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -259,6 +259,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, user_timestamp_us: None, + frame_id: None, buffer: I420Buffer::new(width, height), }; let is_yuyv = fmt.format() == FrameFormat::YUYV; @@ -288,6 +289,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let mut sum_sleep_ms = 0.0; let mut sum_iter_ms = 0.0; let mut logged_mjpeg_fallback = false; + let mut frame_counter: u32 = 0; loop { if ctrl_c_received.load(Ordering::Acquire) { break; @@ -422,12 +424,16 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; - // Optionally attach wall-clock time as user timestamp - frame.user_timestamp_us = if args.attach_timestamp { - Some(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64) + // Optionally attach wall-clock time as user timestamp and frame_id + if args.attach_timestamp { + frame.user_timestamp_us = + Some(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64); + frame.frame_id = Some(frame_counter); + frame_counter = frame_counter.wrapping_add(1); } else { - None - }; + frame.user_timestamp_us = None; + frame.frame_id = None; + } rtc_source.capture_frame(&frame); let t4 = Instant::now(); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 1781b9f8e..a4ebc9b0d 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -78,6 +78,8 @@ struct SharedYuv { dirty: bool, /// Last received user timestamp in microseconds, if any. user_timestamp_us: Option, + /// Last received frame_id, if any. + frame_id: Option, } #[derive(Clone)] @@ -333,6 +335,7 @@ async fn handle_track_subscribed( std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; s.user_timestamp_us = frame.user_timestamp_us; + s.frame_id = frame.frame_id; // Update smoothed FPS (~500ms window) fps_window_frames += 1; @@ -447,6 +450,8 @@ struct VideoApp { /// Cached user timestamp so the overlay doesn't flicker when the shared /// state momentarily has `None` between frame swaps. cached_user_timestamp_us: Option, + /// Cached frame_id so the overlay doesn't flicker. + cached_frame_id: Option, } impl eframe::App for VideoApp { @@ -543,6 +548,9 @@ impl eframe::App for VideoApp { if let Some(ts) = s.user_timestamp_us { self.cached_user_timestamp_us = Some(ts); } + if let Some(fid) = s.frame_id { + self.cached_frame_id = Some(fid); + } } if let Some(user_ts) = self.cached_user_timestamp_us { egui::Area::new("timestamp_hud".into()) @@ -558,8 +566,13 @@ impl eframe::App for VideoApp { self.latency_last_update = Instant::now(); } + let frame_id_line = match self.cached_frame_id { + Some(fid) => format!("Frame ID: {}", fid), + None => "Frame ID: N/A".to_string(), + }; let lines = format!( - "Publish: {}\nSubscribe: {}\nLatency: {}", + "{}\nPublish: {}\nSubscribe: {}\nLatency: {}", + frame_id_line, format_timestamp_us(user_ts), format_timestamp_us(now_us), self.latency_display, @@ -697,6 +710,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { fps: 0.0, dirty: false, user_timestamp_us: None, + frame_id: None, })); // Subscribe to room events: on first video track, start sink task @@ -752,6 +766,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { latency_display: String::new(), latency_last_update: Instant::now(), cached_user_timestamp_us: None, + cached_frame_id: None, }; let native_options = eframe::NativeOptions::default(); eframe::run_native( diff --git a/examples/screensharing/src/lib.rs b/examples/screensharing/src/lib.rs index ed86b10f1..c5a815f5d 100644 --- a/examples/screensharing/src/lib.rs +++ b/examples/screensharing/src/lib.rs @@ -187,6 +187,7 @@ mod test { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, user_timestamp_us: None, + frame_id: None, buffer: I420Buffer::new(1, 1), }; move |result: Result| { diff --git a/examples/wgpu_room/src/logo_track.rs b/examples/wgpu_room/src/logo_track.rs index c4f9f6d79..7a88ca888 100644 --- a/examples/wgpu_room/src/logo_track.rs +++ b/examples/wgpu_room/src/logo_track.rs @@ -118,6 +118,7 @@ impl LogoTrack { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, user_timestamp_us: None, + frame_id: None, buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), })), pos: (0, 0), diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index f5b37d14b..5376bfca6 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -22,9 +22,9 @@ //! map keyed by capture timestamp. When the encoder produces a frame, //! the transformer looks up the user timestamp via the frame's CaptureTime(). //! -//! On the receive side, extracted user timestamps are stored in an +//! On the receive side, extracted frame metadata is stored in an //! internal map keyed by RTP timestamp. Decoded frames look up their -//! user timestamp via lookup_user_timestamp(rtp_timestamp). +//! metadata via lookup_frame_metadata(rtp_timestamp). use cxx::SharedPtr; use webrtc_sys::user_timestamp::ffi as sys_ut; @@ -36,13 +36,13 @@ use crate::{ /// Handler for user timestamp embedding/extraction on RTP streams. /// -/// For sender side: Stores user timestamps keyed by capture timestamp -/// and embeds them as 12-byte trailers on encoded frames before they -/// are sent. Use `store_user_timestamp()` to associate a user timestamp -/// with a captured frame. +/// For sender side: Stores frame metadata keyed by capture timestamp +/// and embeds them as 16-byte trailers on encoded frames before they +/// are sent. Use `store_frame_metadata()` to associate metadata with +/// a captured frame. /// -/// For receiver side: Extracts user timestamps from received frames -/// and makes them available for retrieval via `lookup_user_timestamp()`. +/// For receiver side: Extracts frame metadata from received frames +/// and makes them available for retrieval via `lookup_frame_metadata()`. #[derive(Clone)] pub struct UserTimestampHandler { sys_handle: SharedPtr, @@ -59,40 +59,45 @@ impl UserTimestampHandler { self.sys_handle.enabled() } - /// Lookup the user timestamp for a given RTP timestamp (receiver side). - /// Returns None if no timestamp was found for this RTP timestamp. + /// Lookup the frame metadata for a given RTP timestamp (receiver side). + /// Returns `Some((user_timestamp_us, frame_id))` if found, `None` otherwise. /// The entry is removed from the map after a successful lookup. - /// - /// Use the RTP timestamp from the decoded video frame to correlate - /// it with the user timestamp that was embedded in the encoded frame. - pub fn lookup_user_timestamp(&self, rtp_timestamp: u32) -> Option { + pub fn lookup_frame_metadata(&self, rtp_timestamp: u32) -> Option<(i64, u32)> { let ts = self.sys_handle.lookup_user_timestamp(rtp_timestamp); if ts >= 0 { - Some(ts) + let frame_id = self.sys_handle.last_lookup_frame_id(); + Some((ts, frame_id)) } else { None } } - /// Store a user timestamp for a given capture timestamp (sender side). + /// Store frame metadata for a given capture timestamp (sender side). /// /// The `capture_timestamp_us` must be the TimestampAligner-adjusted /// timestamp (as produced by `VideoTrackSource::on_captured_frame`), /// NOT the original `timestamp_us` from the VideoFrame. The transformer - /// looks up the user timestamp by the frame's `CaptureTime()` which is + /// looks up the metadata by the frame's `CaptureTime()` which is /// derived from the aligned value. /// - /// In normal usage this is called automatically by the C++ layer — - /// callers should set `user_timestamp_us` on the `VideoFrame` and let - /// `capture_frame` / `on_captured_frame` handle the rest. - pub fn store_user_timestamp(&self, capture_timestamp_us: i64, user_timestamp_us: i64) { + /// In normal usage this is called automatically by the C++ layer -- + /// callers should set `user_timestamp_us` and `frame_id` on the + /// `VideoFrame` and let `capture_frame` / `on_captured_frame` handle + /// the rest. + pub fn store_frame_metadata( + &self, + capture_timestamp_us: i64, + user_timestamp_us: i64, + frame_id: u32, + ) { log::info!( target: "user_timestamp", - "store: capture_ts_us={}, user_ts_us={}", + "store: capture_ts_us={}, user_ts_us={}, frame_id={}", capture_timestamp_us, - user_timestamp_us + user_timestamp_us, + frame_id ); - self.sys_handle.store_user_timestamp(capture_timestamp_us, user_timestamp_us); + self.sys_handle.store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } pub(crate) fn sys_handle(&self) -> SharedPtr { @@ -102,9 +107,9 @@ impl UserTimestampHandler { /// Create a sender-side user timestamp handler. /// -/// This handler will embed user timestamps into encoded frames before -/// they are packetized and sent. Use `store_user_timestamp()` to -/// associate a user timestamp with a captured frame's capture timestamp. +/// This handler will embed frame metadata into encoded frames before +/// they are packetized and sent. Use `store_frame_metadata()` to +/// associate metadata with a captured frame's capture timestamp. pub fn create_sender_handler( peer_factory: &PeerConnectionFactory, sender: &RtpSender, @@ -119,10 +124,10 @@ pub fn create_sender_handler( /// Create a receiver-side user timestamp handler. /// -/// This handler will extract user timestamps from received frames +/// This handler will extract frame metadata from received frames /// and store them in a map keyed by RTP timestamp. Use -/// `lookup_user_timestamp(rtp_timestamp)` to retrieve the user -/// timestamp for a specific decoded frame. +/// `lookup_frame_metadata(rtp_timestamp)` to retrieve the metadata +/// for a specific decoded frame. pub fn create_receiver_handler( peer_factory: &PeerConnectionFactory, receiver: &RtpReceiver, diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 5626eb8ac..705166bf7 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -81,7 +81,7 @@ impl NativeVideoSource { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); builder.pin_mut().set_timestamp_us(now.as_micros() as i64); - source.sys_handle.on_captured_frame(&builder.pin_mut().build(), false, 0); + source.sys_handle.on_captured_frame(&builder.pin_mut().build(), false, 0, 0); } } }); @@ -106,20 +106,23 @@ impl NativeVideoSource { }; builder.pin_mut().set_timestamp_us(capture_ts); - // Pass the user timestamp to the C++ on_captured_frame so it can - // store the mapping keyed by the TimestampAligner-adjusted capture - // timestamp. This is the only correct key because the aligner runs - // inside on_captured_frame and replaces timestamp_us with a value - // derived from rtc::TimeMicros() (monotonic), which is what - // CaptureTime() returns in TransformSend. + // Pass the user timestamp and frame_id to the C++ on_captured_frame + // so it can store the mapping keyed by the TimestampAligner-adjusted + // capture timestamp. let (has_user_ts, user_ts) = match frame.user_timestamp_us { Some(ts) => (true, ts), None => (false, 0), }; + let frame_id = frame.frame_id.unwrap_or(0); self.inner.lock().captured_frames += 1; - self.sys_handle.on_captured_frame(&builder.pin_mut().build(), has_user_ts, user_ts); + self.sys_handle.on_captured_frame( + &builder.pin_mut().build(), + has_user_ts, + user_ts, + frame_id, + ); } /// Set the user timestamp handler used by this source. diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index c0e900280..29898ac59 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -106,16 +106,22 @@ struct VideoTrackObserver { impl sys_vt::VideoSink for VideoTrackObserver { fn on_frame(&self, frame: UniquePtr) { let rtp_timestamp = frame.timestamp(); - let user_timestamp_us = self + let meta = self .user_timestamp_handler .lock() .as_ref() - .and_then(|h| h.lookup_user_timestamp(rtp_timestamp)); + .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)); + + let (user_timestamp_us, frame_id) = match meta { + Some((ts, fid)) => (Some(ts), Some(fid)), + None => (None, None), + }; let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), timestamp_us: frame.timestamp_us(), user_timestamp_us, + frame_id, buffer: new_video_frame_buffer(unsafe { frame.video_frame_buffer() }), }); } diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index c56317eed..eb42464e9 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -61,6 +61,8 @@ where pub timestamp_us: i64, // When the frame was captured in microseconds /// Optional user timestamp in microseconds, if available. pub user_timestamp_us: Option, + /// Optional user-supplied frame identifier. + pub frame_id: Option, pub buffer: T, } diff --git a/livekit-ffi/src/server/video_source.rs b/livekit-ffi/src/server/video_source.rs index 88cf110ff..251b7b3c7 100644 --- a/livekit-ffi/src/server/video_source.rs +++ b/livekit-ffi/src/server/video_source.rs @@ -68,6 +68,7 @@ impl FfiVideoSource { rotation: capture.rotation().into(), timestamp_us: capture.timestamp_us, user_timestamp_us: None, + frame_id: None, buffer, }; diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index 8f42f68b4..70f3f5c9f 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -48,7 +48,12 @@ namespace livekit_ffi { // Magic bytes to identify user timestamp trailers: "LKTS" (LiveKit TimeStamp) constexpr uint8_t kUserTimestampMagic[4] = {'L', 'K', 'T', 'S'}; constexpr size_t kUserTimestampTrailerSize = - 12; // 8 bytes timestamp + 4 bytes magic + 16; // 8 bytes timestamp + 4 bytes frame_id + 4 bytes magic + +struct FrameMetadata { + int64_t user_timestamp_us; + uint32_t frame_id; +}; /// Frame transformer that appends/extracts user timestamp trailers. /// This transformer can be used standalone or in conjunction with e2ee. @@ -57,9 +62,9 @@ constexpr size_t kUserTimestampTrailerSize = /// by capture timestamp (microseconds). When TransformSend fires it /// looks up the user timestamp via the frame's CaptureTime(). /// -/// On the receive side, extracted user timestamps are stored in an +/// On the receive side, extracted frame metadata is stored in an /// internal map keyed by RTP timestamp (uint32_t). Decoded frames can -/// look up their user timestamp via lookup_user_timestamp(rtp_ts). +/// look up their metadata via lookup_frame_metadata(rtp_ts). class UserTimestampTransformer : public webrtc::FrameTransformerInterface { public: enum class Direction { kSend, kReceive }; @@ -82,17 +87,18 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { void set_enabled(bool enabled); bool enabled() const; - /// Lookup the user timestamp associated with a given RTP timestamp. - /// Returns the user timestamp if found, nullopt otherwise. + /// Lookup the frame metadata associated with a given RTP timestamp. + /// Returns the metadata if found, nullopt otherwise. /// The entry is removed from the map after lookup. - std::optional lookup_user_timestamp(uint32_t rtp_timestamp); + std::optional lookup_frame_metadata(uint32_t rtp_timestamp); - /// Store a user timestamp for a given capture timestamp (sender side). + /// Store frame metadata for a given capture timestamp (sender side). /// Called from VideoTrackSource::on_captured_frame with the /// TimestampAligner-adjusted timestamp, which matches CaptureTime() /// in the encoder pipeline. - void store_user_timestamp(int64_t capture_timestamp_us, - int64_t user_timestamp_us); + void store_frame_metadata(int64_t capture_timestamp_us, + int64_t user_timestamp_us, + uint32_t frame_id); private: void TransformSend( @@ -100,14 +106,14 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { void TransformReceive( std::unique_ptr frame); - /// Append user timestamp trailer to frame data - std::vector AppendTimestampTrailer( + /// Append frame metadata trailer to frame data + std::vector AppendTrailer( rtc::ArrayView data, - int64_t user_timestamp_us); + int64_t user_timestamp_us, + uint32_t frame_id); - /// Extract and remove user timestamp trailer from frame data - /// Returns the user timestamp if found, nullopt otherwise - std::optional ExtractTimestampTrailer( + /// Extract and remove frame metadata trailer from frame data + std::optional ExtractTrailer( rtc::ArrayView data, std::vector& out_data); @@ -118,20 +124,19 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { std::unordered_map> sink_callbacks_; - // Send-side map: capture timestamp (us) -> user timestamp (us). - // Populated by store_user_timestamp(), consumed by TransformSend() + // Send-side map: capture timestamp (us) -> frame metadata. + // Populated by store_frame_metadata(), consumed by TransformSend() // via CaptureTime() lookup. mutable webrtc::Mutex send_map_mutex_; - mutable std::unordered_map send_map_; + mutable std::unordered_map send_map_; mutable std::deque send_map_order_; static constexpr size_t kMaxSendMapEntries = 300; - // Receive-side map: RTP timestamp -> user timestamp. - // Keyed by RTP timestamp so decoded frames can look up their user - // timestamp regardless of frame drops or reordering. + // Receive-side map: RTP timestamp -> frame metadata. + // Keyed by RTP timestamp so decoded frames can look up their + // metadata regardless of frame drops or reordering. mutable webrtc::Mutex recv_map_mutex_; - mutable std::unordered_map recv_map_; - // Track insertion order for pruning old entries. + mutable std::unordered_map recv_map_; mutable std::deque recv_map_order_; static constexpr size_t kMaxRecvMapEntries = 300; }; @@ -154,13 +159,18 @@ class UserTimestampHandler { bool enabled() const; /// Lookup the user timestamp for a given RTP timestamp (receiver side). - /// Returns -1 if not found. + /// Returns -1 if not found. The entry is removed after lookup. + /// Also caches the frame_id for retrieval via last_lookup_frame_id(). int64_t lookup_user_timestamp(uint32_t rtp_timestamp) const; - /// Store a user timestamp for a given capture timestamp (sender side). - /// Call this when capturing a video frame with a user timestamp. - void store_user_timestamp(int64_t capture_timestamp_us, - int64_t user_timestamp_us) const; + /// Returns the frame_id from the most recent successful + /// lookup_user_timestamp() call. Returns 0 if no lookup succeeded. + uint32_t last_lookup_frame_id() const; + + /// Store frame metadata for a given capture timestamp (sender side). + void store_frame_metadata(int64_t capture_timestamp_us, + int64_t user_timestamp_us, + uint32_t frame_id) const; /// Access the underlying transformer for chaining. rtc::scoped_refptr transformer() const; @@ -170,6 +180,7 @@ class UserTimestampHandler { rtc::scoped_refptr transformer_; rtc::scoped_refptr sender_; rtc::scoped_refptr receiver_; + mutable uint32_t last_frame_id_{0}; }; // Factory functions for Rust FFI diff --git a/webrtc-sys/include/livekit/video_track.h b/webrtc-sys/include/livekit/video_track.h index 784a7e0f9..871b32302 100644 --- a/webrtc-sys/include/livekit/video_track.h +++ b/webrtc-sys/include/livekit/video_track.h @@ -101,7 +101,8 @@ class VideoTrackSource { VideoResolution video_resolution() const; bool on_captured_frame(const webrtc::VideoFrame& frame, bool has_user_timestamp, - int64_t user_timestamp_us); + int64_t user_timestamp_us, + uint32_t frame_id); void set_user_timestamp_handler( std::shared_ptr handler); @@ -121,7 +122,8 @@ class VideoTrackSource { bool on_captured_frame(const std::unique_ptr& frame, bool has_user_timestamp, - int64_t user_timestamp_us) + int64_t user_timestamp_us, + uint32_t frame_id) const; // frames pushed from Rust (+interior mutability) void set_user_timestamp_handler( diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index ef77ed91a..b113672d8 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -76,12 +76,12 @@ void UserTimestampTransformer::TransformSend( auto data = frame->GetData(); - // Look up the user timestamp by the frame's capture time. + // Look up the frame metadata by the frame's capture time. // CaptureTime() returns Timestamp::Millis(capture_time_ms_) where // capture_time_ms_ = timestamp_us / 1000. So capture_time->us() // has millisecond precision (bottom 3 digits always zero). - // store_user_timestamp() truncates its key the same way. - int64_t ts_to_embed = 0; + // store_frame_metadata() truncates its key the same way. + FrameMetadata meta_to_embed{0, 0}; auto capture_time = frame->CaptureTime(); if (capture_time.has_value()) { int64_t capture_us = capture_time->us(); @@ -89,9 +89,9 @@ void UserTimestampTransformer::TransformSend( webrtc::MutexLock lock(&send_map_mutex_); auto it = send_map_.find(capture_us); if (it != send_map_.end()) { - ts_to_embed = it->second; + meta_to_embed = it->second; // Don't erase — simulcast layers share the same capture time. - // Entries are pruned by capacity in store_user_timestamp(). + // Entries are pruned by capacity in store_frame_metadata(). } } else { RTC_LOG(LS_WARNING) @@ -100,10 +100,11 @@ void UserTimestampTransformer::TransformSend( } // Always append trailer when enabled (even if timestamp is 0, - // which indicates no user timestamp was set for this frame) + // which indicates no metadata was set for this frame) std::vector new_data; if (enabled_.load()) { - new_data = AppendTimestampTrailer(data, ts_to_embed); + new_data = AppendTrailer(data, meta_to_embed.user_timestamp_us, + meta_to_embed.frame_id); frame->SetData(rtc::ArrayView(new_data)); } @@ -135,11 +136,11 @@ void UserTimestampTransformer::TransformReceive( auto data = frame->GetData(); std::vector stripped_data; - auto user_ts = ExtractTimestampTrailer(data, stripped_data); + auto meta = ExtractTrailer(data, stripped_data); - if (user_ts.has_value()) { + if (meta.has_value()) { // Store in the receive map keyed by RTP timestamp so decoded frames - // can look up their user timestamp regardless of frame drops. + // can look up their metadata regardless of frame drops. { webrtc::MutexLock lock(&recv_map_mutex_); // Evict oldest entry if at capacity @@ -150,12 +151,11 @@ void UserTimestampTransformer::TransformReceive( if (recv_map_.find(rtp_timestamp) == recv_map_.end()) { recv_map_order_.push_back(rtp_timestamp); } - recv_map_[rtp_timestamp] = user_ts.value(); + recv_map_[rtp_timestamp] = meta.value(); } // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - } // Forward to the appropriate callback (either global or per-SSRC sink). @@ -179,21 +179,28 @@ void UserTimestampTransformer::TransformReceive( } } -std::vector UserTimestampTransformer::AppendTimestampTrailer( +std::vector UserTimestampTransformer::AppendTrailer( rtc::ArrayView data, - int64_t user_timestamp_us) { + int64_t user_timestamp_us, + uint32_t frame_id) { std::vector result; result.reserve(data.size() + kUserTimestampTrailerSize); // Copy original data result.insert(result.end(), data.begin(), data.end()); - // Append timestamp (big-endian) + // Append user_timestamp_us (big-endian, 8 bytes) for (int i = 7; i >= 0; --i) { result.push_back( static_cast((user_timestamp_us >> (i * 8)) & 0xFF)); } + // Append frame_id (big-endian, 4 bytes) + for (int i = 3; i >= 0; --i) { + result.push_back( + static_cast((frame_id >> (i * 8)) & 0xFF)); + } + // Append magic bytes result.insert(result.end(), std::begin(kUserTimestampMagic), std::end(kUserTimestampMagic)); @@ -201,7 +208,7 @@ std::vector UserTimestampTransformer::AppendTimestampTrailer( return result; } -std::optional UserTimestampTransformer::ExtractTimestampTrailer( +std::optional UserTimestampTransformer::ExtractTrailer( rtc::ArrayView data, std::vector& out_data) { if (data.size() < kUserTimestampTrailerSize) { @@ -216,19 +223,26 @@ std::optional UserTimestampTransformer::ExtractTimestampTrailer( return std::nullopt; } - // Extract timestamp (big-endian) - const uint8_t* ts_start = + const uint8_t* trailer_start = data.data() + data.size() - kUserTimestampTrailerSize; + + // Extract user_timestamp_us (big-endian, 8 bytes) int64_t timestamp = 0; for (int i = 0; i < 8; ++i) { - timestamp = (timestamp << 8) | ts_start[i]; + timestamp = (timestamp << 8) | trailer_start[i]; + } + + // Extract frame_id (big-endian, 4 bytes) + uint32_t frame_id = 0; + for (int i = 0; i < 4; ++i) { + frame_id = (frame_id << 8) | trailer_start[8 + i]; } // Copy data without trailer out_data.assign(data.begin(), data.end() - kUserTimestampTrailerSize); - return timestamp; + return FrameMetadata{timestamp, frame_id}; } void UserTimestampTransformer::RegisterTransformedFrameCallback( @@ -263,14 +277,14 @@ bool UserTimestampTransformer::enabled() const { return enabled_.load(); } -std::optional UserTimestampTransformer::lookup_user_timestamp( +std::optional UserTimestampTransformer::lookup_frame_metadata( uint32_t rtp_timestamp) { webrtc::MutexLock lock(&recv_map_mutex_); auto it = recv_map_.find(rtp_timestamp); if (it == recv_map_.end()) { return std::nullopt; } - int64_t ts = it->second; + FrameMetadata meta = it->second; recv_map_.erase(it); // Remove from insertion-order tracker (linear scan is fine for bounded size) for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); ++oit) { @@ -279,12 +293,13 @@ std::optional UserTimestampTransformer::lookup_user_timestamp( break; } } - return ts; + return meta; } -void UserTimestampTransformer::store_user_timestamp( +void UserTimestampTransformer::store_frame_metadata( int64_t capture_timestamp_us, - int64_t user_timestamp_us) { + int64_t user_timestamp_us, + uint32_t frame_id) { // Truncate to millisecond precision to match what WebRTC stores // internally. The encoder pipeline converts the VideoFrame's // timestamp_us to capture_time_ms_ = timestamp_us / 1000, and @@ -309,7 +324,7 @@ void UserTimestampTransformer::store_user_timestamp( if (send_map_.find(key) == send_map_.end()) { send_map_order_.push_back(key); } - send_map_[key] = user_timestamp_us; + send_map_[key] = FrameMetadata{user_timestamp_us, frame_id}; } // UserTimestampHandler implementation @@ -341,14 +356,23 @@ bool UserTimestampHandler::enabled() const { } int64_t UserTimestampHandler::lookup_user_timestamp(uint32_t rtp_timestamp) const { - auto ts = transformer_->lookup_user_timestamp(rtp_timestamp); - return ts.value_or(-1); + auto meta = transformer_->lookup_frame_metadata(rtp_timestamp); + if (meta.has_value()) { + last_frame_id_ = meta->frame_id; + return meta->user_timestamp_us; + } + return -1; +} + +uint32_t UserTimestampHandler::last_lookup_frame_id() const { + return last_frame_id_; } -void UserTimestampHandler::store_user_timestamp( +void UserTimestampHandler::store_frame_metadata( int64_t capture_timestamp_us, - int64_t user_timestamp_us) const { - transformer_->store_user_timestamp(capture_timestamp_us, user_timestamp_us); + int64_t user_timestamp_us, + uint32_t frame_id) const { + transformer_->store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } rtc::scoped_refptr UserTimestampHandler::transformer() const { diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/user_timestamp.rs index 996c9b106..55a8121fa 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/user_timestamp.rs @@ -37,14 +37,19 @@ pub mod ffi { /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. The entry is removed after lookup. + /// Also caches the frame_id for retrieval via last_lookup_frame_id(). fn lookup_user_timestamp(self: &UserTimestampHandler, rtp_timestamp: u32) -> i64; - /// Store a user timestamp for a given capture timestamp (sender side). - /// Call this when capturing a video frame with a user timestamp. - fn store_user_timestamp( + /// Returns the frame_id from the most recent successful + /// lookup_user_timestamp() call. + fn last_lookup_frame_id(self: &UserTimestampHandler) -> u32; + + /// Store frame metadata for a given capture timestamp (sender side). + fn store_frame_metadata( self: &UserTimestampHandler, capture_timestamp_us: i64, user_timestamp_us: i64, + frame_id: u32, ); /// Create a new user timestamp handler for a sender. diff --git a/webrtc-sys/src/video_track.cpp b/webrtc-sys/src/video_track.cpp index b0f0d5b77..196508105 100644 --- a/webrtc-sys/src/video_track.cpp +++ b/webrtc-sys/src/video_track.cpp @@ -136,7 +136,8 @@ VideoResolution VideoTrackSource::InternalSource::video_resolution() const { bool VideoTrackSource::InternalSource::on_captured_frame( const webrtc::VideoFrame& frame, bool has_user_timestamp, - int64_t user_timestamp_us) { + int64_t user_timestamp_us, + uint32_t frame_id) { webrtc::MutexLock lock(&mutex_); int64_t aligned_timestamp_us = timestamp_aligner_.TranslateTimestamp( @@ -147,8 +148,8 @@ bool VideoTrackSource::InternalSource::on_captured_frame( // that CaptureTime() will return in TransformSend, so the lookup will // succeed. if (has_user_timestamp && user_timestamp_handler_) { - user_timestamp_handler_->store_user_timestamp( - aligned_timestamp_us, user_timestamp_us); + user_timestamp_handler_->store_frame_metadata( + aligned_timestamp_us, user_timestamp_us, frame_id); } webrtc::scoped_refptr buffer = @@ -204,10 +205,11 @@ VideoResolution VideoTrackSource::video_resolution() const { bool VideoTrackSource::on_captured_frame( const std::unique_ptr& frame, bool has_user_timestamp, - int64_t user_timestamp_us) const { + int64_t user_timestamp_us, + uint32_t frame_id) const { auto rtc_frame = frame->get(); return source_->on_captured_frame(rtc_frame, has_user_timestamp, - user_timestamp_us); + user_timestamp_us, frame_id); } void VideoTrackSource::set_user_timestamp_handler( diff --git a/webrtc-sys/src/video_track.rs b/webrtc-sys/src/video_track.rs index 57cfd8d59..eaa6c04bf 100644 --- a/webrtc-sys/src/video_track.rs +++ b/webrtc-sys/src/video_track.rs @@ -77,6 +77,7 @@ pub mod ffi { frame: &UniquePtr, has_user_timestamp: bool, user_timestamp_us: i64, + frame_id: u32, ) -> bool; fn set_user_timestamp_handler( self: &VideoTrackSource, From 57a7f74a3bf5f99869f2a72c1508db382ac0fd26 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 7 Mar 2026 15:01:02 -0800 Subject: [PATCH 27/52] fix parsing. Explicitly set simulcast fps to 30. --- examples/local_video/src/publisher.rs | 52 ++++- examples/local_video/src/subscriber.rs | 22 ++ libwebrtc/src/native/user_timestamp.rs | 6 + libwebrtc/src/native/video_stream.rs | 10 +- livekit-ffi/src/conversion/room.rs | 1 + livekit/src/room/options.rs | 9 +- webrtc-sys/include/livekit/user_timestamp.h | 26 +++ webrtc-sys/src/user_timestamp.cpp | 213 ++++++++++++++++++-- 8 files changed, 311 insertions(+), 28 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 811617ad8..17d1d27e1 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,7 +1,7 @@ use anyhow::Result; use clap::Parser; use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; -use livekit::options::{TrackPublishOptions, VideoCodec, VideoEncoding}; +use livekit::options::{self, TrackPublishOptions, VideoCodec, VideoEncoding, VideoPreset, video as video_presets}; use livekit::prelude::*; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; @@ -146,6 +146,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + room_options.dynacast = true; // Configure E2EE if an encryption key is provided if let Some(ref e2ee_key) = args.e2ee_key { @@ -220,19 +221,40 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let requested_codec = if args.h265 { VideoCodec::H265 } else { VideoCodec::H264 }; info!("Attempting publish with codec: {}", requested_codec.as_str()); + // Compute an explicit video encoding so all simulcast layers use 30 fps. + // The SDK defaults reduce lower layers to 15/20 fps; we override that here. + let target_fps = args.fps as f64; + let main_encoding = { + let base = options::compute_appropriate_encoding(false, width, height, VideoCodec::H264); + VideoEncoding { + max_bitrate: args.max_bitrate.unwrap_or(base.max_bitrate), + max_framerate: target_fps, + } + }; + let simulcast_presets = compute_simulcast_presets_30fps(width, height, target_fps); + info!( + "Video encoding: {}x{} @ {:.0} fps, {} bps (simulcast layers: {})", + width, + height, + target_fps, + main_encoding.max_bitrate, + simulcast_presets + .iter() + .map(|p| format!("{}x{}@{:.0}fps/{}bps", p.width, p.height, p.encoding.max_framerate, p.encoding.max_bitrate)) + .collect::>() + .join(", "), + ); + let publish_opts = |codec: VideoCodec| { - let mut opts = TrackPublishOptions { + TrackPublishOptions { source: TrackSource::Camera, simulcast: args.simulcast, video_codec: codec, user_timestamp: args.attach_timestamp, + video_encoding: Some(main_encoding.clone()), + simulcast_layers: Some(simulcast_presets.clone()), ..Default::default() - }; - if let Some(bitrate) = args.max_bitrate { - opts.video_encoding = - Some(VideoEncoding { max_bitrate: bitrate, max_framerate: args.fps as f64 }); } - opts }; let publish_result = room @@ -486,3 +508,19 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { Ok(()) } + +/// Build simulcast presets that match the SDK defaults but with a uniform frame rate. +/// The SDK's built-in `DEFAULT_SIMULCAST_PRESETS` use 15/20 fps for lower layers; +/// this keeps the same resolutions and bitrates but overrides fps to `target_fps`. +fn compute_simulcast_presets_30fps(width: u32, height: u32, target_fps: f64) -> Vec { + let ar = width as f32 / height as f32; + let defaults: &[VideoPreset] = if f32::abs(ar - 16.0 / 9.0) < f32::abs(ar - 4.0 / 3.0) { + video_presets::DEFAULT_SIMULCAST_PRESETS + } else { + livekit::options::video43::DEFAULT_SIMULCAST_PRESETS + }; + defaults + .iter() + .map(|p| VideoPreset::new(p.width, p.height, p.encoding.max_bitrate, target_fps)) + .collect() +} diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index a4ebc9b0d..72bf7bd18 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -334,6 +334,26 @@ async fn handle_track_subscribed( std::mem::swap(&mut s.u, &mut u_buf); std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; + + if let Some(ts) = frame.user_timestamp_us { + let now_us = current_timestamp_us(); + let delta_ms = (now_us - ts) as f64 / 1000.0; + if ts < 0 || ts > 2_000_000_000_000_000 || delta_ms < -60_000.0 { + log::warn!( + "[Subscriber] BAD TIMESTAMP: frame_id={:?} user_ts={} \ + timestamp_us={} now_us={} delta_ms={:.1} \ + prev_user_ts={:?} prev_frame_id={:?}", + frame.frame_id, + ts, + frame.timestamp_us, + now_us, + delta_ms, + s.user_timestamp_us, + s.frame_id, + ); + } + } + s.user_timestamp_us = frame.user_timestamp_us; s.frame_id = frame.frame_id; @@ -674,6 +694,8 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + room_options.dynacast = true; + room_options.adaptive_stream = true; // Configure E2EE if an encryption key is provided if let Some(ref e2ee_key) = args.e2ee_key { diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/user_timestamp.rs index 5376bfca6..a809d5d1b 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/user_timestamp.rs @@ -66,6 +66,12 @@ impl UserTimestampHandler { let ts = self.sys_handle.lookup_user_timestamp(rtp_timestamp); if ts >= 0 { let frame_id = self.sys_handle.last_lookup_frame_id(); + if ts > 2_000_000_000_000_000 || ts < 0 { + log::warn!( + "[UserTS-FFI] C++ returned bad ts={} (0x{:016x}) fid={} rtp_ts={}", + ts, ts, frame_id, rtp_timestamp + ); + } Some((ts, frame_id)) } else { None diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 29898ac59..a3619a10a 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -113,7 +113,15 @@ impl sys_vt::VideoSink for VideoTrackObserver { .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)); let (user_timestamp_us, frame_id) = match meta { - Some((ts, fid)) => (Some(ts), Some(fid)), + Some((ts, fid)) => { + if ts < 0 || ts > 2_000_000_000_000_000 { + log::warn!( + "[on_frame] SUSPICIOUS user_ts={} fid={} rtp_ts={}", + ts, fid, rtp_timestamp + ); + } + (Some(ts), Some(fid)) + } None => (None, None), }; diff --git a/livekit-ffi/src/conversion/room.rs b/livekit-ffi/src/conversion/room.rs index af8c06d63..918fbc8e3 100644 --- a/livekit-ffi/src/conversion/room.rs +++ b/livekit-ffi/src/conversion/room.rs @@ -253,6 +253,7 @@ impl From for TrackPublishOptions { red: opts.red.unwrap_or(default_publish_options.red), simulcast: opts.simulcast.unwrap_or(default_publish_options.simulcast), stream: opts.stream.unwrap_or(default_publish_options.stream), + simulcast_layers: default_publish_options.simulcast_layers, preconnect_buffer: opts .preconnect_buffer .unwrap_or(default_publish_options.preconnect_buffer), diff --git a/livekit/src/room/options.rs b/livekit/src/room/options.rs index 8e70414ea..46f39072f 100644 --- a/livekit/src/room/options.rs +++ b/livekit/src/room/options.rs @@ -84,6 +84,9 @@ pub struct TrackPublishOptions { pub dtx: bool, pub red: bool, pub simulcast: bool, + /// Custom simulcast layer presets (low, mid). When set, these override the + /// SDK's built-in defaults which reduce fps on lower layers. + pub simulcast_layers: Option>, // pub name: String, pub source: TrackSource, pub stream: String, @@ -100,6 +103,7 @@ impl Default for TrackPublishOptions { dtx: true, red: true, simulcast: true, + simulcast_layers: None, source: TrackSource::Unknown, stream: "".to_string(), preconnect_buffer: false, @@ -149,7 +153,10 @@ pub fn compute_video_encodings( return into_rtp_encodings(width, height, &[initial_preset]); } - let mut simulcast_presets = compute_default_simulcast_presets(screenshare, &initial_preset); + let mut simulcast_presets = match options.simulcast_layers { + Some(ref custom) => custom.clone(), + None => compute_default_simulcast_presets(screenshare, &initial_preset), + }; let mid_preset = simulcast_presets.pop(); let low_preset = simulcast_presets.pop(); diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index 70f3f5c9f..daabd5314 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -53,6 +53,7 @@ constexpr size_t kUserTimestampTrailerSize = struct FrameMetadata { int64_t user_timestamp_us; uint32_t frame_id; + uint32_t ssrc; // SSRC that produced this entry (for simulcast tracking) }; /// Frame transformer that appends/extracts user timestamp trailers. @@ -132,6 +133,15 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable std::deque send_map_order_; static constexpr size_t kMaxSendMapEntries = 300; + // Send-side per-SSRC stats for diagnosing simulcast encoding delay. + struct SendSsrcStats { + uint64_t frame_count{0}; + int64_t last_user_ts{0}; + int64_t sum_encode_delay_us{0}; + uint64_t encode_delay_samples{0}; + }; + mutable std::unordered_map send_ssrc_stats_; + // Receive-side map: RTP timestamp -> frame metadata. // Keyed by RTP timestamp so decoded frames can look up their // metadata regardless of frame drops or reordering. @@ -139,6 +149,22 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable std::unordered_map recv_map_; mutable std::deque recv_map_order_; static constexpr size_t kMaxRecvMapEntries = 300; + + // Simulcast tracking: detect layer switches and flush stale entries. + mutable uint32_t recv_active_ssrc_{0}; + mutable int64_t recv_last_user_ts_{0}; + mutable uint64_t recv_frame_count_{0}; + mutable uint64_t recv_lookup_hits_{0}; + mutable uint64_t recv_lookup_misses_{0}; + + // Receive-side per-SSRC latency tracking. + struct RecvSsrcStats { + uint64_t frame_count{0}; + int64_t sum_latency_us{0}; + uint64_t latency_samples{0}; + int64_t max_latency_us{0}; + }; + mutable std::unordered_map recv_ssrc_stats_; }; /// Wrapper class for Rust FFI that manages user timestamp transformers. diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index b113672d8..36f23b9ed 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -16,6 +16,8 @@ #include "livekit/user_timestamp.h" +#include +#include #include #include @@ -81,7 +83,7 @@ void UserTimestampTransformer::TransformSend( // capture_time_ms_ = timestamp_us / 1000. So capture_time->us() // has millisecond precision (bottom 3 digits always zero). // store_frame_metadata() truncates its key the same way. - FrameMetadata meta_to_embed{0, 0}; + FrameMetadata meta_to_embed{0, 0, 0}; auto capture_time = frame->CaptureTime(); if (capture_time.has_value()) { int64_t capture_us = capture_time->us(); @@ -108,6 +110,35 @@ void UserTimestampTransformer::TransformSend( frame->SetData(rtc::ArrayView(new_data)); } + // Track per-SSRC encoding delay for simulcast diagnostics. + { + auto now_us = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + webrtc::MutexLock lock(&send_map_mutex_); + auto& stats = send_ssrc_stats_[ssrc]; + stats.frame_count++; + if (meta_to_embed.user_timestamp_us > 0) { + int64_t delay_us = now_us - meta_to_embed.user_timestamp_us; + stats.sum_encode_delay_us += delay_us; + stats.encode_delay_samples++; + if ((stats.frame_count % 60) == 1) { + double avg_ms = stats.encode_delay_samples > 0 + ? (stats.sum_encode_delay_us / + (double)stats.encode_delay_samples / 1000.0) + : 0.0; + fprintf(stderr, + "[UserTS-Send] ssrc=%u frames=%llu fid=%u " + "encode_delay=%.1fms (cur=%.1fms) user_ts=%lld\n", + ssrc, (unsigned long long)stats.frame_count, + meta_to_embed.frame_id, avg_ms, delay_us / 1000.0, + (long long)meta_to_embed.user_timestamp_us); + stats.sum_encode_delay_us = 0; + stats.encode_delay_samples = 0; + } + } + } + // Forward to the appropriate callback (either global or per-SSRC sink). rtc::scoped_refptr cb; { @@ -139,16 +170,107 @@ void UserTimestampTransformer::TransformReceive( auto meta = ExtractTrailer(data, stripped_data); if (meta.has_value()) { - // Store in the receive map keyed by RTP timestamp so decoded frames - // can look up their metadata regardless of frame drops. + meta->ssrc = ssrc; + { webrtc::MutexLock lock(&recv_map_mutex_); + + recv_frame_count_++; + + // Detect simulcast layer switch (SSRC change). + // When the SFU switches us to a different layer, the old layer's + // entries are stale and can cause RTP timestamp collisions or + // return wrong user timestamps on lookup. Flush them. + if (recv_active_ssrc_ != 0 && recv_active_ssrc_ != ssrc) { + size_t flushed = 0; + auto oit = recv_map_order_.begin(); + while (oit != recv_map_order_.end()) { + auto mit = recv_map_.find(*oit); + if (mit != recv_map_.end() && mit->second.ssrc != ssrc) { + recv_map_.erase(mit); + oit = recv_map_order_.erase(oit); + flushed++; + } else { + ++oit; + } + } + fprintf(stderr, + "[UserTS-Recv] SSRC_SWITCH old=%u new=%u flushed=%zu " + "remaining=%zu frame_count=%llu\n", + recv_active_ssrc_, ssrc, flushed, recv_map_.size(), + (unsigned long long)recv_frame_count_); + } + recv_active_ssrc_ = ssrc; + + bool collision = recv_map_.find(rtp_timestamp) != recv_map_.end(); + if (collision) { + auto& existing = recv_map_[rtp_timestamp]; + fprintf(stderr, + "[UserTS-Recv] COLLISION rtp_ts=%u ssrc=%u " + "existing: ts=%lld fid=%u ssrc=%u " + "new: ts=%lld fid=%u ssrc=%u\n", + rtp_timestamp, ssrc, + (long long)existing.user_timestamp_us, existing.frame_id, + existing.ssrc, + (long long)meta->user_timestamp_us, meta->frame_id, + meta->ssrc); + } + + // Check for timestamp regression (non-monotonic user timestamps + // indicate stale data or clock issues). + if (recv_last_user_ts_ > 0 && + meta->user_timestamp_us < recv_last_user_ts_ && + meta->user_timestamp_us > 0) { + int64_t regression_ms = + (recv_last_user_ts_ - meta->user_timestamp_us) / 1000; + fprintf(stderr, + "[UserTS-Recv] TS_REGRESSION ssrc=%u rtp_ts=%u " + "prev_ts=%lld new_ts=%lld regression=%lldms fid=%u\n", + ssrc, rtp_timestamp, + (long long)recv_last_user_ts_, + (long long)meta->user_timestamp_us, + (long long)regression_ms, meta->frame_id); + } + if (meta->user_timestamp_us > 0) { + recv_last_user_ts_ = meta->user_timestamp_us; + + // Measure end-to-end latency per SSRC. + auto now_us = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + int64_t latency_us = now_us - meta->user_timestamp_us; + auto& rstats = recv_ssrc_stats_[ssrc]; + rstats.frame_count++; + rstats.sum_latency_us += latency_us; + rstats.latency_samples++; + if (latency_us > rstats.max_latency_us) { + rstats.max_latency_us = latency_us; + } + if ((rstats.frame_count % 60) == 1) { + double avg_ms = rstats.latency_samples > 0 + ? (rstats.sum_latency_us / + (double)rstats.latency_samples / 1000.0) + : 0.0; + double max_ms = rstats.max_latency_us / 1000.0; + fprintf(stderr, + "[UserTS-Recv] LATENCY ssrc=%u frames=%llu " + "avg=%.1fms max=%.1fms cur=%.1fms fid=%u\n", + ssrc, (unsigned long long)rstats.frame_count, + avg_ms, max_ms, latency_us / 1000.0, meta->frame_id); + rstats.sum_latency_us = 0; + rstats.latency_samples = 0; + rstats.max_latency_us = 0; + } + } + // Evict oldest entry if at capacity - while (recv_map_.size() >= kMaxRecvMapEntries && !recv_map_order_.empty()) { - recv_map_.erase(recv_map_order_.front()); + while (recv_map_.size() >= kMaxRecvMapEntries && + !recv_map_order_.empty()) { + auto evicted_rtp = recv_map_order_.front(); + recv_map_.erase(evicted_rtp); recv_map_order_.pop_front(); } - if (recv_map_.find(rtp_timestamp) == recv_map_.end()) { + if (!collision) { recv_map_order_.push_back(rtp_timestamp); } recv_map_[rtp_timestamp] = meta.value(); @@ -156,6 +278,10 @@ void UserTimestampTransformer::TransformReceive( // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); + } else { + fprintf(stderr, + "[UserTS-Recv] NO_TRAILER rtp_ts=%u ssrc=%u data_size=%zu\n", + rtp_timestamp, ssrc, data.size()); } // Forward to the appropriate callback (either global or per-SSRC sink). @@ -189,19 +315,24 @@ std::vector UserTimestampTransformer::AppendTrailer( // Copy original data result.insert(result.end(), data.begin(), data.end()); - // Append user_timestamp_us (big-endian, 8 bytes) + // Append user_timestamp_us (big-endian, 8 bytes) XORed with 0xFF to + // prevent H.264 NAL start code sequences (0x000001 / 0x00000001) from + // appearing inside the trailer. The H.264 packetizer scans the full + // frame payload for start codes, and the trailer's raw bytes can + // contain 0x000001 (e.g. frame_id 256 = 0x00000100). for (int i = 7; i >= 0; --i) { result.push_back( - static_cast((user_timestamp_us >> (i * 8)) & 0xFF)); + static_cast(((user_timestamp_us >> (i * 8)) & 0xFF) ^ 0xFF)); } - // Append frame_id (big-endian, 4 bytes) + // Append frame_id (big-endian, 4 bytes), also XORed for (int i = 3; i >= 0; --i) { result.push_back( - static_cast((frame_id >> (i * 8)) & 0xFF)); + static_cast(((frame_id >> (i * 8)) & 0xFF) ^ 0xFF)); } - // Append magic bytes + // Append magic bytes (NOT XORed — they must remain recognizable and + // already contain no 0x00/0x01 bytes) result.insert(result.end(), std::begin(kUserTimestampMagic), std::end(kUserTimestampMagic)); @@ -226,23 +357,49 @@ std::optional UserTimestampTransformer::ExtractTrailer( const uint8_t* trailer_start = data.data() + data.size() - kUserTimestampTrailerSize; - // Extract user_timestamp_us (big-endian, 8 bytes) + // Extract user_timestamp_us (big-endian, 8 bytes, XORed with 0xFF) int64_t timestamp = 0; for (int i = 0; i < 8; ++i) { - timestamp = (timestamp << 8) | trailer_start[i]; + timestamp = (timestamp << 8) | (trailer_start[i] ^ 0xFF); } - // Extract frame_id (big-endian, 4 bytes) + // Extract frame_id (big-endian, 4 bytes, XORed with 0xFF) uint32_t frame_id = 0; for (int i = 0; i < 4; ++i) { - frame_id = (frame_id << 8) | trailer_start[8 + i]; + frame_id = (frame_id << 8) | (trailer_start[8 + i] ^ 0xFF); + } + + if (timestamp < 946684800000000LL || timestamp > 4102444800000000LL) { + std::string hex; + for (size_t i = 0; i < kUserTimestampTrailerSize; ++i) { + char buf[4]; + snprintf(buf, sizeof(buf), "%02x", trailer_start[i]); + hex += buf; + if (i == 7 || i == 11) hex += "|"; + } + size_t context_bytes = std::min(data.size(), kUserTimestampTrailerSize + 8); + std::string context_hex; + const uint8_t* ctx_start = data.data() + data.size() - context_bytes; + for (size_t i = 0; i < context_bytes; ++i) { + char buf[4]; + snprintf(buf, sizeof(buf), "%02x", ctx_start[i]); + context_hex += buf; + if (&ctx_start[i] == trailer_start - 1) context_hex += "|"; + else if (&ctx_start[i] == trailer_start + 7) context_hex += "|"; + else if (&ctx_start[i] == trailer_start + 11) context_hex += "|"; + } + fprintf(stderr, + "[UserTS-Extract] BAD ts=%lld fid=%u data_size=%zu " + "trailer=%s context=%s\n", + (long long)timestamp, frame_id, data.size(), + hex.c_str(), context_hex.c_str()); } // Copy data without trailer out_data.assign(data.begin(), data.end() - kUserTimestampTrailerSize); - return FrameMetadata{timestamp, frame_id}; + return FrameMetadata{timestamp, frame_id, 0}; } void UserTimestampTransformer::RegisterTransformedFrameCallback( @@ -282,17 +439,35 @@ std::optional UserTimestampTransformer::lookup_frame_metadata( webrtc::MutexLock lock(&recv_map_mutex_); auto it = recv_map_.find(rtp_timestamp); if (it == recv_map_.end()) { + recv_lookup_misses_++; + if ((recv_lookup_misses_ % 30) == 1) { + fprintf(stderr, + "[UserTS-Lookup] MISS rtp_ts=%u map=%zu " + "hits=%llu misses=%llu active_ssrc=%u\n", + rtp_timestamp, recv_map_.size(), + (unsigned long long)recv_lookup_hits_, + (unsigned long long)recv_lookup_misses_, + recv_active_ssrc_); + } return std::nullopt; } + recv_lookup_hits_++; FrameMetadata meta = it->second; recv_map_.erase(it); - // Remove from insertion-order tracker (linear scan is fine for bounded size) - for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); ++oit) { + for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); + ++oit) { if (*oit == rtp_timestamp) { recv_map_order_.erase(oit); break; } } + if (meta.user_timestamp_us < 946684800000000LL || + meta.user_timestamp_us > 4102444800000000LL) { + fprintf(stderr, + "[UserTS-Lookup] BAD rtp_ts=%u ts=%lld fid=%u ssrc=%u map=%zu\n", + rtp_timestamp, (long long)meta.user_timestamp_us, + meta.frame_id, meta.ssrc, recv_map_.size()); + } return meta; } @@ -324,7 +499,7 @@ void UserTimestampTransformer::store_frame_metadata( if (send_map_.find(key) == send_map_.end()) { send_map_order_.push_back(key); } - send_map_[key] = FrameMetadata{user_timestamp_us, frame_id}; + send_map_[key] = FrameMetadata{user_timestamp_us, frame_id, 0}; } // UserTimestampHandler implementation From d940542e6e70d3af05652cc45b1858a264d100a4 Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 9 Mar 2026 21:41:55 -0700 Subject: [PATCH 28/52] remove debug outputs --- .../src/native/peer_connection_factory.rs | 9 +- webrtc-sys/include/livekit/user_timestamp.h | 22 --- webrtc-sys/src/user_timestamp.cpp | 147 ------------------ 3 files changed, 1 insertion(+), 177 deletions(-) diff --git a/libwebrtc/src/native/peer_connection_factory.rs b/libwebrtc/src/native/peer_connection_factory.rs index 8980074f0..ae082aecc 100644 --- a/libwebrtc/src/native/peer_connection_factory.rs +++ b/libwebrtc/src/native/peer_connection_factory.rs @@ -46,14 +46,7 @@ impl Default for PeerConnectionFactory { if log_sink.is_none() { *log_sink = Some(sys_rtc::ffi::new_log_sink(|msg, _| { let msg = msg.strip_suffix("\r\n").or(msg.strip_suffix('\n')).unwrap_or(&msg); - - // Route user timestamp transformer logs to a dedicated target so they can - // be enabled independently from the very noisy general libwebrtc logs. - if msg.contains("UserTimestampTransformer") { - log::info!(target: "user_timestamp_rtp", "{}", msg); - } else { - log::debug!(target: "libwebrtc", "{}", msg); - } + log::debug!(target: "libwebrtc", "{}", msg); })); } diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index daabd5314..f55812c0a 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -133,15 +133,6 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable std::deque send_map_order_; static constexpr size_t kMaxSendMapEntries = 300; - // Send-side per-SSRC stats for diagnosing simulcast encoding delay. - struct SendSsrcStats { - uint64_t frame_count{0}; - int64_t last_user_ts{0}; - int64_t sum_encode_delay_us{0}; - uint64_t encode_delay_samples{0}; - }; - mutable std::unordered_map send_ssrc_stats_; - // Receive-side map: RTP timestamp -> frame metadata. // Keyed by RTP timestamp so decoded frames can look up their // metadata regardless of frame drops or reordering. @@ -152,19 +143,6 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { // Simulcast tracking: detect layer switches and flush stale entries. mutable uint32_t recv_active_ssrc_{0}; - mutable int64_t recv_last_user_ts_{0}; - mutable uint64_t recv_frame_count_{0}; - mutable uint64_t recv_lookup_hits_{0}; - mutable uint64_t recv_lookup_misses_{0}; - - // Receive-side per-SSRC latency tracking. - struct RecvSsrcStats { - uint64_t frame_count{0}; - int64_t sum_latency_us{0}; - uint64_t latency_samples{0}; - int64_t max_latency_us{0}; - }; - mutable std::unordered_map recv_ssrc_stats_; }; /// Wrapper class for Rust FFI that manages user timestamp transformers. diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index 36f23b9ed..c3a3f831a 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -16,8 +16,6 @@ #include "livekit/user_timestamp.h" -#include -#include #include #include @@ -110,35 +108,6 @@ void UserTimestampTransformer::TransformSend( frame->SetData(rtc::ArrayView(new_data)); } - // Track per-SSRC encoding delay for simulcast diagnostics. - { - auto now_us = std::chrono::duration_cast( - std::chrono::system_clock::now().time_since_epoch()) - .count(); - webrtc::MutexLock lock(&send_map_mutex_); - auto& stats = send_ssrc_stats_[ssrc]; - stats.frame_count++; - if (meta_to_embed.user_timestamp_us > 0) { - int64_t delay_us = now_us - meta_to_embed.user_timestamp_us; - stats.sum_encode_delay_us += delay_us; - stats.encode_delay_samples++; - if ((stats.frame_count % 60) == 1) { - double avg_ms = stats.encode_delay_samples > 0 - ? (stats.sum_encode_delay_us / - (double)stats.encode_delay_samples / 1000.0) - : 0.0; - fprintf(stderr, - "[UserTS-Send] ssrc=%u frames=%llu fid=%u " - "encode_delay=%.1fms (cur=%.1fms) user_ts=%lld\n", - ssrc, (unsigned long long)stats.frame_count, - meta_to_embed.frame_id, avg_ms, delay_us / 1000.0, - (long long)meta_to_embed.user_timestamp_us); - stats.sum_encode_delay_us = 0; - stats.encode_delay_samples = 0; - } - } - } - // Forward to the appropriate callback (either global or per-SSRC sink). rtc::scoped_refptr cb; { @@ -175,93 +144,25 @@ void UserTimestampTransformer::TransformReceive( { webrtc::MutexLock lock(&recv_map_mutex_); - recv_frame_count_++; - // Detect simulcast layer switch (SSRC change). // When the SFU switches us to a different layer, the old layer's // entries are stale and can cause RTP timestamp collisions or // return wrong user timestamps on lookup. Flush them. if (recv_active_ssrc_ != 0 && recv_active_ssrc_ != ssrc) { - size_t flushed = 0; auto oit = recv_map_order_.begin(); while (oit != recv_map_order_.end()) { auto mit = recv_map_.find(*oit); if (mit != recv_map_.end() && mit->second.ssrc != ssrc) { recv_map_.erase(mit); oit = recv_map_order_.erase(oit); - flushed++; } else { ++oit; } } - fprintf(stderr, - "[UserTS-Recv] SSRC_SWITCH old=%u new=%u flushed=%zu " - "remaining=%zu frame_count=%llu\n", - recv_active_ssrc_, ssrc, flushed, recv_map_.size(), - (unsigned long long)recv_frame_count_); } recv_active_ssrc_ = ssrc; bool collision = recv_map_.find(rtp_timestamp) != recv_map_.end(); - if (collision) { - auto& existing = recv_map_[rtp_timestamp]; - fprintf(stderr, - "[UserTS-Recv] COLLISION rtp_ts=%u ssrc=%u " - "existing: ts=%lld fid=%u ssrc=%u " - "new: ts=%lld fid=%u ssrc=%u\n", - rtp_timestamp, ssrc, - (long long)existing.user_timestamp_us, existing.frame_id, - existing.ssrc, - (long long)meta->user_timestamp_us, meta->frame_id, - meta->ssrc); - } - - // Check for timestamp regression (non-monotonic user timestamps - // indicate stale data or clock issues). - if (recv_last_user_ts_ > 0 && - meta->user_timestamp_us < recv_last_user_ts_ && - meta->user_timestamp_us > 0) { - int64_t regression_ms = - (recv_last_user_ts_ - meta->user_timestamp_us) / 1000; - fprintf(stderr, - "[UserTS-Recv] TS_REGRESSION ssrc=%u rtp_ts=%u " - "prev_ts=%lld new_ts=%lld regression=%lldms fid=%u\n", - ssrc, rtp_timestamp, - (long long)recv_last_user_ts_, - (long long)meta->user_timestamp_us, - (long long)regression_ms, meta->frame_id); - } - if (meta->user_timestamp_us > 0) { - recv_last_user_ts_ = meta->user_timestamp_us; - - // Measure end-to-end latency per SSRC. - auto now_us = std::chrono::duration_cast( - std::chrono::system_clock::now().time_since_epoch()) - .count(); - int64_t latency_us = now_us - meta->user_timestamp_us; - auto& rstats = recv_ssrc_stats_[ssrc]; - rstats.frame_count++; - rstats.sum_latency_us += latency_us; - rstats.latency_samples++; - if (latency_us > rstats.max_latency_us) { - rstats.max_latency_us = latency_us; - } - if ((rstats.frame_count % 60) == 1) { - double avg_ms = rstats.latency_samples > 0 - ? (rstats.sum_latency_us / - (double)rstats.latency_samples / 1000.0) - : 0.0; - double max_ms = rstats.max_latency_us / 1000.0; - fprintf(stderr, - "[UserTS-Recv] LATENCY ssrc=%u frames=%llu " - "avg=%.1fms max=%.1fms cur=%.1fms fid=%u\n", - ssrc, (unsigned long long)rstats.frame_count, - avg_ms, max_ms, latency_us / 1000.0, meta->frame_id); - rstats.sum_latency_us = 0; - rstats.latency_samples = 0; - rstats.max_latency_us = 0; - } - } // Evict oldest entry if at capacity while (recv_map_.size() >= kMaxRecvMapEntries && @@ -278,10 +179,6 @@ void UserTimestampTransformer::TransformReceive( // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - } else { - fprintf(stderr, - "[UserTS-Recv] NO_TRAILER rtp_ts=%u ssrc=%u data_size=%zu\n", - rtp_timestamp, ssrc, data.size()); } // Forward to the appropriate callback (either global or per-SSRC sink). @@ -369,32 +266,6 @@ std::optional UserTimestampTransformer::ExtractTrailer( frame_id = (frame_id << 8) | (trailer_start[8 + i] ^ 0xFF); } - if (timestamp < 946684800000000LL || timestamp > 4102444800000000LL) { - std::string hex; - for (size_t i = 0; i < kUserTimestampTrailerSize; ++i) { - char buf[4]; - snprintf(buf, sizeof(buf), "%02x", trailer_start[i]); - hex += buf; - if (i == 7 || i == 11) hex += "|"; - } - size_t context_bytes = std::min(data.size(), kUserTimestampTrailerSize + 8); - std::string context_hex; - const uint8_t* ctx_start = data.data() + data.size() - context_bytes; - for (size_t i = 0; i < context_bytes; ++i) { - char buf[4]; - snprintf(buf, sizeof(buf), "%02x", ctx_start[i]); - context_hex += buf; - if (&ctx_start[i] == trailer_start - 1) context_hex += "|"; - else if (&ctx_start[i] == trailer_start + 7) context_hex += "|"; - else if (&ctx_start[i] == trailer_start + 11) context_hex += "|"; - } - fprintf(stderr, - "[UserTS-Extract] BAD ts=%lld fid=%u data_size=%zu " - "trailer=%s context=%s\n", - (long long)timestamp, frame_id, data.size(), - hex.c_str(), context_hex.c_str()); - } - // Copy data without trailer out_data.assign(data.begin(), data.end() - kUserTimestampTrailerSize); @@ -439,19 +310,8 @@ std::optional UserTimestampTransformer::lookup_frame_metadata( webrtc::MutexLock lock(&recv_map_mutex_); auto it = recv_map_.find(rtp_timestamp); if (it == recv_map_.end()) { - recv_lookup_misses_++; - if ((recv_lookup_misses_ % 30) == 1) { - fprintf(stderr, - "[UserTS-Lookup] MISS rtp_ts=%u map=%zu " - "hits=%llu misses=%llu active_ssrc=%u\n", - rtp_timestamp, recv_map_.size(), - (unsigned long long)recv_lookup_hits_, - (unsigned long long)recv_lookup_misses_, - recv_active_ssrc_); - } return std::nullopt; } - recv_lookup_hits_++; FrameMetadata meta = it->second; recv_map_.erase(it); for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); @@ -461,13 +321,6 @@ std::optional UserTimestampTransformer::lookup_frame_metadata( break; } } - if (meta.user_timestamp_us < 946684800000000LL || - meta.user_timestamp_us > 4102444800000000LL) { - fprintf(stderr, - "[UserTS-Lookup] BAD rtp_ts=%u ts=%lld fid=%u ssrc=%u map=%zu\n", - rtp_timestamp, (long long)meta.user_timestamp_us, - meta.frame_id, meta.ssrc, recv_map_.size()); - } return meta; } From f7120e227e9033711805a5315f4f01b01080b34f Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 9 Mar 2026 23:31:26 -0700 Subject: [PATCH 29/52] output better encode stats --- Cargo.lock | 940 +++++++++++++++++++++++++- examples/local_video/Cargo.toml | 2 + examples/local_video/src/publisher.rs | 357 ++++++++-- 3 files changed, 1201 insertions(+), 98 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 102b671e3..4deebf20c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -89,6 +89,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "alsa" version = "0.9.1" @@ -469,6 +475,15 @@ dependencies = [ "tungstenite 0.21.0", ] +[[package]] +name = "atomic" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" +dependencies = [ + "bytemuck", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -633,15 +648,30 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec 0.6.3", +] + [[package]] name = "bit-set" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ - "bit-vec", + "bit-vec 0.8.0", ] +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bit-vec" version = "0.8.0" @@ -880,6 +910,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" +[[package]] +name = "castaway" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" +dependencies = [ + "rustversion", +] + [[package]] name = "cc" version = "1.2.54" @@ -1109,6 +1148,20 @@ dependencies = [ "memchr", ] +[[package]] +name = "compact_str" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" +dependencies = [ + "castaway 0.2.4", + "cfg-if 1.0.4", + "itoa", + "rustversion", + "ryu", + "static_assertions", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -1379,6 +1432,33 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crossterm" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.10.0", + "crossterm_winapi", + "derive_more", + "document-features", + "mio", + "parking_lot", + "rustix 1.1.3", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + [[package]] name = "crunchy" version = "0.2.4" @@ -1395,6 +1475,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "csscolorparser" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2a7d3066da2de787b7f032c736763eb7ae5d355f81a68bab2675a96008b0bf" +dependencies = [ + "lab", + "phf", +] + [[package]] name = "ctor" version = "0.6.3" @@ -1515,8 +1605,18 @@ version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.14.4", + "darling_macro 0.14.4", +] + +[[package]] +name = "darling" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" +dependencies = [ + "darling_core 0.23.0", + "darling_macro 0.23.0", ] [[package]] @@ -1533,17 +1633,41 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "darling_core" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" +dependencies = [ + "ident_case", + "proc-macro2", + "quote", + "strsim 0.11.1", + "syn 2.0.114", +] + [[package]] name = "darling_macro" version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ - "darling_core", + "darling_core 0.14.4", "quote", "syn 1.0.109", ] +[[package]] +name = "darling_macro" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" +dependencies = [ + "darling_core 0.23.0", + "quote", + "syn 2.0.114", +] + [[package]] name = "dashmap" version = "5.5.3" @@ -1569,6 +1693,12 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" +[[package]] +name = "deltae" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5729f5117e208430e437df2f4843f5e5952997175992d1414f94c57d61e270b4" + [[package]] name = "deranged" version = "0.5.5" @@ -1578,6 +1708,28 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derive_more" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.114", +] + [[package]] name = "digest" version = "0.10.7" @@ -1895,6 +2047,15 @@ version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" +[[package]] +name = "euclid" +version = "0.22.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df61bf483e837f88d5c2291dcf55c67be7e676b3a51acc48db3a7b163b91ed63" +dependencies = [ + "num-traits", +] + [[package]] name = "event-listener" version = "2.5.3" @@ -1937,6 +2098,16 @@ dependencies = [ "zune-inflate", ] +[[package]] +name = "fancy-regex" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" +dependencies = [ + "bit-set 0.5.3", + "regex", +] + [[package]] name = "fastrand" version = "1.9.0" @@ -1981,12 +2152,29 @@ dependencies = [ "simd-adler32", ] +[[package]] +name = "filedescriptor" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" +dependencies = [ + "libc", + "thiserror 1.0.69", + "winapi", +] + [[package]] name = "find-msvc-tools" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" +[[package]] +name = "finl_unicode" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9844ddc3a6e533d62bba727eb6c28b5d360921d5175e9ff0f1e621a5c590a4d5" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -2105,7 +2293,7 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55a5e644a80e6d96b2b4910fa7993301d7b7926c045b475b62202b20a36ce69e" dependencies = [ - "darling", + "darling 0.14.4", "proc-macro2", "quote", "syn 1.0.109", @@ -2289,11 +2477,24 @@ dependencies = [ "cfg-if 1.0.4", "js-sys", "libc", - "r-efi", + "r-efi 5.3.0", "wasip2", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if 1.0.4", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + [[package]] name = "gif" version = "0.13.3" @@ -2547,6 +2748,11 @@ name = "hashbrown" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", +] [[package]] name = "hdrhistogram" @@ -2579,6 +2785,12 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "hexf-parse" version = "0.2.1" @@ -2902,6 +3114,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -2990,6 +3208,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "indoc" +version = "2.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] + [[package]] name = "inout" version = "0.1.4" @@ -2999,6 +3226,19 @@ dependencies = [ "generic-array", ] +[[package]] +name = "instability" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357b7205c6cd18dd2c86ed312d1e70add149aea98e7ef72b9fdf0270e555c11d" +dependencies = [ + "darling 0.23.0", + "indoc", + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "instant" version = "0.1.13" @@ -3037,7 +3277,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" dependencies = [ "async-channel 1.9.0", - "castaway", + "castaway 0.1.2", "crossbeam-utils", "curl", "curl-sys", @@ -3198,6 +3438,17 @@ dependencies = [ "serde_json", ] +[[package]] +name = "kasuari" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fe90c1150662e858c7d5f945089b7517b0a80d8bf7ba4b1b5ffc984e7230a5b" +dependencies = [ + "hashbrown 0.16.1", + "portable-atomic", + "thiserror 2.0.18", +] + [[package]] name = "khronos-egl" version = "6.0.0" @@ -3224,6 +3475,12 @@ dependencies = [ "log", ] +[[package]] +name = "lab" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f" + [[package]] name = "lazy_static" version = "1.5.0" @@ -3236,6 +3493,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "lebe" version = "0.5.3" @@ -3321,6 +3584,15 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "line-clipping" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4de44e98ddbf09375cbf4d17714d18f39195f4f4894e8524501726fd9a8a4a" +dependencies = [ + "bitflags 2.10.0", +] + [[package]] name = "link-cplusplus" version = "1.0.12" @@ -3503,6 +3775,7 @@ dependencies = [ "bytemuck", "chrono", "clap", + "crossterm", "eframe", "egui", "egui-wgpu", @@ -3516,6 +3789,7 @@ dependencies = [ "nokhwa", "objc2 0.6.3", "parking_lot", + "ratatui", "tokio", "webrtc-sys", "wgpu 25.0.2", @@ -3541,12 +3815,31 @@ dependencies = [ "value-bag", ] +[[package]] +name = "lru" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" +dependencies = [ + "hashbrown 0.16.1", +] + [[package]] name = "lru-slab" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" +[[package]] +name = "mac_address" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0aeb26bf5e836cc1c341c8106051b573f1766dfa05aa87f0b98be5e51b02303" +dependencies = [ + "nix", + "winapi", +] + [[package]] name = "mach2" version = "0.4.3" @@ -3595,6 +3888,21 @@ dependencies = [ "libc", ] +[[package]] +name = "memmem" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a64a92489e2744ce060c349162be1c5f33c6969234104dbd99ddb5feb08b8c15" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + [[package]] name = "metal" version = "0.18.0" @@ -3654,6 +3962,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", + "log", "wasi", "windows-sys 0.61.2", ] @@ -3696,7 +4005,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e380993072e52eef724eddfcde0ed013b0c023c3f0417336ed041aa9f076994e" dependencies = [ "arrayvec", - "bit-set", + "bit-set 0.8.0", "bitflags 2.10.0", "cfg_aliases", "codespan-reporting 0.11.1", @@ -3705,7 +4014,7 @@ dependencies = [ "log", "rustc-hash 1.1.0", "spirv", - "strum", + "strum 0.26.3", "termcolor", "thiserror 2.0.18", "unicode-xid", @@ -3718,7 +4027,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b977c445f26e49757f9aca3631c3b8b836942cb278d69a92e7b80d3b24da632" dependencies = [ "arrayvec", - "bit-set", + "bit-set 0.8.0", "bitflags 2.10.0", "cfg_aliases", "codespan-reporting 0.12.0", @@ -3731,7 +4040,7 @@ dependencies = [ "once_cell", "rustc-hash 1.1.0", "spirv", - "strum", + "strum 0.26.3", "thiserror 2.0.18", "unicode-ident", ] @@ -3873,6 +4182,19 @@ dependencies = [ "jni-sys", ] +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags 2.10.0", + "cfg-if 1.0.4", + "cfg_aliases", + "libc", + "memoffset", +] + [[package]] name = "nohash-hasher" version = "0.2.0" @@ -4013,6 +4335,15 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + [[package]] name = "objc" version = "0.2.7" @@ -4544,34 +4875,129 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] -name = "petgraph" -version = "0.6.5" +name = "pest" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" dependencies = [ - "fixedbitset 0.4.2", - "indexmap 2.13.0", + "memchr", + "ucd-trie", ] [[package]] -name = "petgraph" -version = "0.8.3" +name = "pest_derive" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" dependencies = [ - "fixedbitset 0.5.7", - "hashbrown 0.15.5", - "indexmap 2.13.0", + "pest", + "pest_generator", ] [[package]] -name = "pin-project" -version = "1.1.10" +name = "pest_generator" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" dependencies = [ - "pin-project-internal", -] + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "pest_meta" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset 0.4.2", + "indexmap 2.13.0", +] + +[[package]] +name = "petgraph" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455" +dependencies = [ + "fixedbitset 0.5.7", + "hashbrown 0.15.5", + "indexmap 2.13.0", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros", + "phf_shared", +] + +[[package]] +name = "phf_codegen" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" +dependencies = [ + "phf_generator", + "phf_shared", +] + +[[package]] +name = "phf_generator" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher 1.0.2", +] + +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] [[package]] name = "pin-project-internal" @@ -5010,6 +5436,12 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + [[package]] name = "rand" version = "0.8.5" @@ -5075,6 +5507,91 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3d6831663a5098ea164f89cff59c6284e95f4e3c76ce9848d4529f5ccca9bde" +[[package]] +name = "ratatui" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1ce67fb8ba4446454d1c8dbaeda0557ff5e94d39d5e5ed7f10a65eb4c8266bc" +dependencies = [ + "instability", + "ratatui-core", + "ratatui-crossterm", + "ratatui-macros", + "ratatui-termwiz", + "ratatui-widgets", +] + +[[package]] +name = "ratatui-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef8dea09a92caaf73bff7adb70b76162e5937524058a7e5bff37869cbbec293" +dependencies = [ + "bitflags 2.10.0", + "compact_str", + "hashbrown 0.16.1", + "indoc", + "itertools 0.14.0", + "kasuari", + "lru", + "strum 0.27.2", + "thiserror 2.0.18", + "unicode-segmentation", + "unicode-truncate", + "unicode-width 0.2.2", +] + +[[package]] +name = "ratatui-crossterm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "577c9b9f652b4c121fb25c6a391dd06406d3b092ba68827e6d2f09550edc54b3" +dependencies = [ + "cfg-if 1.0.4", + "crossterm", + "instability", + "ratatui-core", +] + +[[package]] +name = "ratatui-macros" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7f1342a13e83e4bb9d0b793d0ea762be633f9582048c892ae9041ef39c936f4" +dependencies = [ + "ratatui-core", + "ratatui-widgets", +] + +[[package]] +name = "ratatui-termwiz" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f76fe0bd0ed4295f0321b1676732e2454024c15a35d01904ddb315afd3d545c" +dependencies = [ + "ratatui-core", + "termwiz", +] + +[[package]] +name = "ratatui-widgets" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7dbfa023cd4e604c2553483820c5fe8aa9d71a42eea5aa77c6e7f35756612db" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.16.1", + "indoc", + "instability", + "itertools 0.14.0", + "line-clipping", + "ratatui-core", + "strum 0.27.2", + "time", + "unicode-segmentation", + "unicode-width 0.2.2", +] + [[package]] name = "raw-window-handle" version = "0.6.2" @@ -5278,6 +5795,15 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.38.44" @@ -5667,6 +6193,27 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc" +dependencies = [ + "libc", + "mio", + "signal-hook", +] + [[package]] name = "signal-hook-registry" version = "1.4.8" @@ -5689,6 +6236,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + [[package]] name = "slab" version = "0.4.11" @@ -5881,7 +6434,16 @@ version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ - "strum_macros", + "strum_macros 0.26.4", +] + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros 0.27.2", ] [[package]] @@ -5897,6 +6459,18 @@ dependencies = [ "syn 2.0.114", ] +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.114", +] + [[package]] name = "subtle" version = "2.6.1" @@ -5992,6 +6566,69 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "terminfo" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ea810f0692f9f51b382fff5893887bb4580f5fa246fde546e0b13e7fcee662" +dependencies = [ + "fnv", + "nom", + "phf", + "phf_codegen", +] + +[[package]] +name = "termios" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "411c5bf740737c7918b8b1fe232dca4dc9f8e754b8ad5e20966814001ed0ac6b" +dependencies = [ + "libc", +] + +[[package]] +name = "termwiz" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4676b37242ccbd1aabf56edb093a4827dc49086c0ffd764a5705899e0f35f8f7" +dependencies = [ + "anyhow", + "base64 0.22.1", + "bitflags 2.10.0", + "fancy-regex", + "filedescriptor", + "finl_unicode", + "fixedbitset 0.4.2", + "hex", + "lazy_static", + "libc", + "log", + "memmem", + "nix", + "num-derive", + "num-traits", + "ordered-float", + "pest", + "pest_derive", + "phf", + "sha2", + "signal-hook", + "siphasher 1.0.2", + "terminfo", + "termios", + "thiserror 1.0.69", + "ucd-trie", + "unicode-segmentation", + "vtparse", + "wezterm-bidi", + "wezterm-blob-leases", + "wezterm-color-types", + "wezterm-dynamic", + "wezterm-input-types", + "winapi", +] + [[package]] name = "test-log" version = "0.2.19" @@ -6104,7 +6741,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9da98b7d9b7dad93488a84b8248efc35352b0b2657397d4167e7ad67e5d535e5" dependencies = [ "deranged", + "libc", "num-conv", + "num_threads", "powerfmt", "serde_core", "time-core", @@ -6561,6 +7200,12 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + [[package]] name = "unicode-ident" version = "1.0.22" @@ -6573,6 +7218,17 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-truncate" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b380a1238663e5f8a691f9039c73e1cdae598a30e9855f541d29b08b53e9a5" +dependencies = [ + "itertools 0.14.0", + "unicode-segmentation", + "unicode-width 0.2.2", +] + [[package]] name = "unicode-width" version = "0.1.14" @@ -6694,7 +7350,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a138823392dba19b0aa494872689f97d0ee157de5852e2bec157ce6de9cdc22" dependencies = [ "anyhow", - "siphasher", + "siphasher 0.3.11", "uniffi_internal_macros", "uniffi_pipeline", ] @@ -6760,6 +7416,18 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" +dependencies = [ + "atomic", + "getrandom 0.4.2", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "v4l" version = "0.14.0" @@ -6810,6 +7478,15 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vtparse" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d9b2acfb050df409c972a37d3b8e08cdea3bddb0c09db9d53137e504cfabed0" +dependencies = [ + "utf8parse", +] + [[package]] name = "waker-fn" version = "1.2.0" @@ -6850,6 +7527,15 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" version = "0.2.108" @@ -6909,6 +7595,40 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + [[package]] name = "wayland-backend" version = "0.3.12" @@ -7146,6 +7866,78 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" +[[package]] +name = "wezterm-bidi" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0a6e355560527dd2d1cf7890652f4f09bb3433b6aadade4c9b5ed76de5f3ec" +dependencies = [ + "log", + "wezterm-dynamic", +] + +[[package]] +name = "wezterm-blob-leases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692daff6d93d94e29e4114544ef6d5c942a7ed998b37abdc19b17136ea428eb7" +dependencies = [ + "getrandom 0.3.4", + "mac_address", + "sha2", + "thiserror 1.0.69", + "uuid", +] + +[[package]] +name = "wezterm-color-types" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7de81ef35c9010270d63772bebef2f2d6d1f2d20a983d27505ac850b8c4b4296" +dependencies = [ + "csscolorparser", + "deltae", + "lazy_static", + "wezterm-dynamic", +] + +[[package]] +name = "wezterm-dynamic" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f2ab60e120fd6eaa68d9567f3226e876684639d22a4219b313ff69ec0ccd5ac" +dependencies = [ + "log", + "ordered-float", + "strsim 0.11.1", + "thiserror 1.0.69", + "wezterm-dynamic-derive", +] + +[[package]] +name = "wezterm-dynamic-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c0cf2d539c645b448eaffec9ec494b8b19bd5077d9e58cb1ae7efece8d575b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "wezterm-input-types" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7012add459f951456ec9d6c7e6fc340b1ce15d6fc9629f8c42853412c029e57e" +dependencies = [ + "bitflags 1.3.2", + "euclid", + "lazy_static", + "serde", + "wezterm-dynamic", +] + [[package]] name = "wgpu" version = "24.0.5" @@ -7207,7 +7999,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f0aa306497a238d169b9dc70659105b4a096859a34894544ca81719242e1499" dependencies = [ "arrayvec", - "bit-vec", + "bit-vec 0.8.0", "bitflags 2.10.0", "cfg_aliases", "document-features", @@ -7232,8 +8024,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7b882196f8368511d613c6aeec80655160db6646aebddf8328879a88d54e500" dependencies = [ "arrayvec", - "bit-set", - "bit-vec", + "bit-set 0.8.0", + "bit-vec 0.8.0", "bitflags 2.10.0", "cfg_aliases", "document-features", @@ -7334,7 +8126,7 @@ dependencies = [ "android_system_properties", "arrayvec", "ash", - "bit-set", + "bit-set 0.8.0", "bitflags 2.10.0", "block", "bytemuck", @@ -8029,6 +8821,88 @@ name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap 2.13.0", + "prettyplease", + "syn 2.0.114", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.114", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.10.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 432a9ad1f..a0ed2835a 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -45,6 +45,8 @@ chrono = "0.4" bytemuck = { version = "1.16", features = ["derive"] } nokhwa = { version = "0.10", default-features = false, features = ["output-threaded"] } +ratatui = "0.30.0" +crossterm = "0.29.0" [target.'cfg(target_os = "macos")'.dependencies] nokhwa = { version = "0.10", default-features = false, features = ["input-avfoundation"] } diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 17d1d27e1..4935667d0 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,8 +1,11 @@ use anyhow::Result; use clap::Parser; use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; -use livekit::options::{self, TrackPublishOptions, VideoCodec, VideoEncoding, VideoPreset, video as video_presets}; +use livekit::options::{ + self, video as video_presets, TrackPublishOptions, VideoCodec, VideoEncoding, VideoPreset, +}; use livekit::prelude::*; +use livekit::webrtc::stats::RtcStats; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; @@ -14,6 +17,7 @@ use nokhwa::utils::{ Resolution, }; use nokhwa::Camera; +use std::collections::VecDeque; use std::env; use std::sync::{ atomic::{AtomicBool, Ordering}, @@ -86,6 +90,212 @@ struct Args { e2ee_key: Option, } +const OUTBOUND_STATS_POLL_INTERVAL: Duration = Duration::from_millis(100); +const MAX_PENDING_TIMING_FRAMES: usize = 512; + +fn unix_time_us_now() -> i64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64 +} + +#[derive(Default)] +struct RollingMs { + total_ms: f64, + samples: u64, +} + +impl RollingMs { + fn record(&mut self, value_ms: f64) { + self.total_ms += value_ms; + self.samples += 1; + } + + fn record_total(&mut self, total_ms: f64, samples: u64) { + if samples == 0 { + return; + } + + self.total_ms += total_ms; + self.samples += samples; + } + + fn average(&self) -> Option { + (self.samples > 0).then_some(self.total_ms / self.samples as f64) + } + + fn display(&self) -> String { + self.average().map(|value| format!("{value:.2}")).unwrap_or_else(|| "n/a".to_string()) + } + + fn reset(&mut self) { + *self = Self::default(); + } +} + +#[derive(Default)] +struct PublisherTimingSummary { + camera_capture_ms: RollingMs, + decode_ms: RollingMs, + buffer_convert_ms: RollingMs, + buffer_prepare_ms: RollingMs, + webrtc_capture_ms: RollingMs, + capture_to_buffer_ready_ms: RollingMs, + capture_to_webrtc_capture_ms: RollingMs, + encode_complete_estimate_ms: RollingMs, + packet_departure_estimate_ms: RollingMs, + encode_cpu_ms: RollingMs, + packet_send_delay_ms: RollingMs, + sleep_ms: RollingMs, + iteration_ms: RollingMs, +} + +impl PublisherTimingSummary { + fn reset(&mut self) { + self.camera_capture_ms.reset(); + self.decode_ms.reset(); + self.buffer_convert_ms.reset(); + self.buffer_prepare_ms.reset(); + self.webrtc_capture_ms.reset(); + self.capture_to_buffer_ready_ms.reset(); + self.capture_to_webrtc_capture_ms.reset(); + self.encode_complete_estimate_ms.reset(); + self.packet_departure_estimate_ms.reset(); + self.encode_cpu_ms.reset(); + self.packet_send_delay_ms.reset(); + self.sleep_ms.reset(); + self.iteration_ms.reset(); + } +} + +#[derive(Clone)] +struct PrimaryOutboundStatsSnapshot { + stream_key: String, + frames_encoded: u32, + frames_sent: u32, + packets_sent: u64, + total_encode_time_s: f64, + total_packet_send_delay_s: f64, +} + +#[derive(Default)] +struct OutboundTimingTracker { + pending_encode_capture_times_us: VecDeque, + pending_departure_capture_times_us: VecDeque, + last_snapshot: Option, +} + +impl OutboundTimingTracker { + fn on_frame_captured(&mut self, capture_wall_time_us: i64) { + self.pending_encode_capture_times_us.push_back(capture_wall_time_us); + self.pending_departure_capture_times_us.push_back(capture_wall_time_us); + + while self.pending_encode_capture_times_us.len() > MAX_PENDING_TIMING_FRAMES { + self.pending_encode_capture_times_us.pop_front(); + } + + while self.pending_departure_capture_times_us.len() > MAX_PENDING_TIMING_FRAMES { + self.pending_departure_capture_times_us.pop_front(); + } + } + + fn update_from_stats(&mut self, stats: &[RtcStats], timings: &mut PublisherTimingSummary) { + let Some(snapshot) = select_primary_outbound_video_stats(stats) else { + return; + }; + + let Some(previous_snapshot) = self.last_snapshot.as_ref() else { + self.last_snapshot = Some(snapshot); + return; + }; + + if previous_snapshot.stream_key != snapshot.stream_key { + self.last_snapshot = Some(snapshot); + return; + } + + let encoded_delta = + snapshot.frames_encoded.saturating_sub(previous_snapshot.frames_encoded); + let frames_sent_delta = snapshot.frames_sent.saturating_sub(previous_snapshot.frames_sent); + let packets_sent_delta = + snapshot.packets_sent.saturating_sub(previous_snapshot.packets_sent); + let now_us = unix_time_us_now(); + + for _ in 0..encoded_delta.min(self.pending_encode_capture_times_us.len() as u32) { + if let Some(capture_wall_time_us) = self.pending_encode_capture_times_us.pop_front() { + timings + .encode_complete_estimate_ms + .record((now_us - capture_wall_time_us) as f64 / 1000.0); + } + } + + for _ in 0..frames_sent_delta.min(self.pending_departure_capture_times_us.len() as u32) { + if let Some(capture_wall_time_us) = self.pending_departure_capture_times_us.pop_front() + { + timings + .packet_departure_estimate_ms + .record((now_us - capture_wall_time_us) as f64 / 1000.0); + } + } + + if encoded_delta > 0 { + let encode_total_delta_ms = + (snapshot.total_encode_time_s - previous_snapshot.total_encode_time_s).max(0.0) + * 1000.0; + timings.encode_cpu_ms.record_total(encode_total_delta_ms, encoded_delta as u64); + } + + if packets_sent_delta > 0 { + let packet_send_delay_total_delta_ms = (snapshot.total_packet_send_delay_s + - previous_snapshot.total_packet_send_delay_s) + .max(0.0) + * 1000.0; + timings + .packet_send_delay_ms + .record_total(packet_send_delay_total_delta_ms, packets_sent_delta); + } + + self.last_snapshot = Some(snapshot); + } +} + +fn select_primary_outbound_video_stats(stats: &[RtcStats]) -> Option { + stats + .iter() + .filter_map(|stat| match stat { + RtcStats::OutboundRtp(outbound) if outbound.outbound.active => Some(outbound), + _ => None, + }) + .max_by(|left, right| { + let left_rank = ( + left.outbound.frame_width as u64 * left.outbound.frame_height as u64, + left.outbound.frames_sent as u64, + left.outbound.frames_encoded as u64, + ); + let right_rank = ( + right.outbound.frame_width as u64 * right.outbound.frame_height as u64, + right.outbound.frames_sent as u64, + right.outbound.frames_encoded as u64, + ); + left_rank.cmp(&right_rank) + }) + .map(|outbound| PrimaryOutboundStatsSnapshot { + stream_key: format!( + "{}:{}x{}", + if outbound.outbound.rid.is_empty() { + "primary" + } else { + outbound.outbound.rid.as_str() + }, + outbound.outbound.frame_width, + outbound.outbound.frame_height, + ), + frames_encoded: outbound.outbound.frames_encoded, + frames_sent: outbound.outbound.frames_sent, + packets_sent: outbound.sent.packets_sent, + total_encode_time_s: outbound.outbound.total_encode_time, + total_packet_send_delay_s: outbound.outbound.total_packet_send_delay, + }) +} + fn list_cameras() -> Result<()> { let cams = nokhwa::query(ApiBackend::Auto)?; println!("Available cameras:"); @@ -240,21 +450,22 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { main_encoding.max_bitrate, simulcast_presets .iter() - .map(|p| format!("{}x{}@{:.0}fps/{}bps", p.width, p.height, p.encoding.max_framerate, p.encoding.max_bitrate)) + .map(|p| format!( + "{}x{}@{:.0}fps/{}bps", + p.width, p.height, p.encoding.max_framerate, p.encoding.max_bitrate + )) .collect::>() .join(", "), ); - let publish_opts = |codec: VideoCodec| { - TrackPublishOptions { - source: TrackSource::Camera, - simulcast: args.simulcast, - video_codec: codec, - user_timestamp: args.attach_timestamp, - video_encoding: Some(main_encoding.clone()), - simulcast_layers: Some(simulcast_presets.clone()), - ..Default::default() - } + let publish_opts = |codec: VideoCodec| TrackPublishOptions { + source: TrackSource::Camera, + simulcast: args.simulcast, + video_codec: codec, + user_timestamp: args.attach_timestamp, + video_encoding: Some(main_encoding.clone()), + simulcast_layers: Some(simulcast_presets.clone()), + ..Default::default() }; let publish_result = room @@ -300,16 +511,13 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Capture loop let mut frames: u64 = 0; let mut last_fps_log = Instant::now(); + let mut last_outbound_stats_poll = Instant::now(); let target = Duration::from_secs_f64(1.0 / pace_fps); info!("Target frame interval: {:.2} ms", target.as_secs_f64() * 1000.0); // Timing accumulators (ms) for rolling stats - let mut sum_get_ms = 0.0; - let mut sum_decode_ms = 0.0; - let mut sum_convert_ms = 0.0; - let mut sum_capture_ms = 0.0; - let mut sum_sleep_ms = 0.0; - let mut sum_iter_ms = 0.0; + let mut timings = PublisherTimingSummary::default(); + let mut outbound_timing_tracker = OutboundTimingTracker::default(); let mut logged_mjpeg_fallback = false; let mut frame_counter: u32 = 0; loop { @@ -321,18 +529,19 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { ticker.tick().await; let iter_start = Instant::now(); - // Get frame as RGB24 (decoded by nokhwa if needed) - let t0 = Instant::now(); + // Capture the frame as early as possible and use the wall clock as the + // end-to-end reference for later encode and egress estimates. + let capture_wall_time_us = unix_time_us_now(); + let camera_capture_started_at = Instant::now(); let frame_buf = camera.frame()?; - let t1 = Instant::now(); + let camera_frame_acquired_at = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - // Fast path for YUYV: convert directly to I420 via libyuv - let t2 = if is_yuyv { + let (decode_finished_at, buffer_ready_at) = if is_yuyv { + // Fast path for YUYV: convert directly to I420 via libyuv let src = frame_buf.buffer(); let src_bytes = src.as_ref(); let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 - let t2_local = t1; // no decode step in YUYV path unsafe { // returns 0 on success let _ = yuv_sys::rs_YUY2ToI420( @@ -348,11 +557,11 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - t2_local + (camera_frame_acquired_at, Instant::now()) } else { // Auto path (either RGB24 already or compressed MJPEG) let src = frame_buf.buffer(); - let t2_local = if src.len() == (width as usize * height as usize * 3) { + if src.len() == (width as usize * height as usize * 3) { // Already RGB24 from backend; convert directly unsafe { let _ = yuv_sys::rs_RGB24ToI420( @@ -368,11 +577,11 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - Instant::now() + (camera_frame_acquired_at, Instant::now()) } else { // Try fast MJPEG->I420 via libyuv if available; fallback to image crate let mut used_fast_mjpeg = false; - let t2_try = unsafe { + let fast_mjpeg_buffer_ready_at = unsafe { // rs_MJPGToI420 returns 0 on success let ret = yuv_sys::rs_MJPGToI420( src.as_ref().as_ptr(), @@ -392,16 +601,17 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { used_fast_mjpeg = true; Instant::now() } else { - t1 + camera_frame_acquired_at } }; if used_fast_mjpeg { - t2_try + (camera_frame_acquired_at, fast_mjpeg_buffer_ready_at) } else { // Fallback: decode MJPEG using image crate then RGB24->I420 match image::load_from_memory(src.as_ref()) { Ok(img_dyn) => { let rgb8 = img_dyn.to_rgb8(); + let decode_finished_at = Instant::now(); let dec_w = rgb8.width() as u32; let dec_h = rgb8.height() as u32; if dec_w != width || dec_h != height { @@ -425,7 +635,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - Instant::now() + (decode_finished_at, Instant::now()) } Err(e2) => { if !logged_mjpeg_fallback { @@ -439,17 +649,14 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { } } } - }; - t2_local + } }; - let t3 = Instant::now(); // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; // Optionally attach wall-clock time as user timestamp and frame_id if args.attach_timestamp { - frame.user_timestamp_us = - Some(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64); + frame.user_timestamp_us = Some(capture_wall_time_us); frame.frame_id = Some(frame_counter); frame_counter = frame_counter.wrapping_add(1); } else { @@ -457,51 +664,71 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { frame.frame_id = None; } rtc_source.capture_frame(&frame); - let t4 = Instant::now(); + let webrtc_capture_finished_at = Instant::now(); + outbound_timing_tracker.on_frame_captured(capture_wall_time_us); frames += 1; // We already paced via interval; measure actual sleep time for logging only - let sleep_dur = iter_start - wait_start; + let sleep_duration = iter_start - wait_start; // Per-iteration timing bookkeeping - let t_end = Instant::now(); - let get_ms = (t1 - t0).as_secs_f64() * 1000.0; - let decode_ms = (t2 - t1).as_secs_f64() * 1000.0; - let convert_ms = (t3 - t2).as_secs_f64() * 1000.0; - let capture_ms = (t4 - t3).as_secs_f64() * 1000.0; - let sleep_ms = sleep_dur.as_secs_f64() * 1000.0; - let iter_ms = (t_end - iter_start).as_secs_f64() * 1000.0; - sum_get_ms += get_ms; - sum_decode_ms += decode_ms; - sum_convert_ms += convert_ms; - sum_capture_ms += capture_ms; - sum_sleep_ms += sleep_ms; - sum_iter_ms += iter_ms; + let iteration_finished_at = Instant::now(); + timings + .camera_capture_ms + .record((camera_frame_acquired_at - camera_capture_started_at).as_secs_f64() * 1000.0); + timings + .decode_ms + .record((decode_finished_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); + timings + .buffer_convert_ms + .record((buffer_ready_at - decode_finished_at).as_secs_f64() * 1000.0); + timings + .buffer_prepare_ms + .record((buffer_ready_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); + timings + .webrtc_capture_ms + .record((webrtc_capture_finished_at - buffer_ready_at).as_secs_f64() * 1000.0); + timings + .capture_to_buffer_ready_ms + .record((buffer_ready_at - camera_capture_started_at).as_secs_f64() * 1000.0); + timings.capture_to_webrtc_capture_ms.record( + (webrtc_capture_finished_at - camera_capture_started_at).as_secs_f64() * 1000.0, + ); + timings.sleep_ms.record(sleep_duration.as_secs_f64() * 1000.0); + timings.iteration_ms.record((iteration_finished_at - iter_start).as_secs_f64() * 1000.0); + + if last_outbound_stats_poll.elapsed() >= OUTBOUND_STATS_POLL_INTERVAL { + if let Ok(stats) = track.get_stats().await { + outbound_timing_tracker.update_from_stats(&stats, &mut timings); + } + last_outbound_stats_poll = Instant::now(); + } if last_fps_log.elapsed() >= std::time::Duration::from_secs(2) { let secs = last_fps_log.elapsed().as_secs_f64(); let fps_est = frames as f64 / secs; - let n = frames.max(1) as f64; info!( - "Publishing video: {}x{}, ~{:.1} fps | avg ms: get {:.2}, decode {:.2}, convert {:.2}, capture {:.2}, sleep {:.2}, iter {:.2} | target {:.2}", + "Publishing video: {}x{}, ~{:.1} fps | stage ms: camera {:.2}, decode {:.2}, convert {:.2}, buffer {:.2}, webrtc {:.2}, sleep {:.2}, iter {:.2} | capture->ms: i420 {:.2}, webrtc {:.2}, encode~ {}, egress~ {} | stats ms: encode_cpu {}, packet_send_delay {} | target {:.2}", width, height, fps_est, - sum_get_ms / n, - sum_decode_ms / n, - sum_convert_ms / n, - sum_capture_ms / n, - sum_sleep_ms / n, - sum_iter_ms / n, + timings.camera_capture_ms.average().unwrap_or_default(), + timings.decode_ms.average().unwrap_or_default(), + timings.buffer_convert_ms.average().unwrap_or_default(), + timings.buffer_prepare_ms.average().unwrap_or_default(), + timings.webrtc_capture_ms.average().unwrap_or_default(), + timings.sleep_ms.average().unwrap_or_default(), + timings.iteration_ms.average().unwrap_or_default(), + timings.capture_to_buffer_ready_ms.average().unwrap_or_default(), + timings.capture_to_webrtc_capture_ms.average().unwrap_or_default(), + timings.encode_complete_estimate_ms.display(), + timings.packet_departure_estimate_ms.display(), + timings.encode_cpu_ms.display(), + timings.packet_send_delay_ms.display(), target.as_secs_f64() * 1000.0, ); frames = 0; - sum_get_ms = 0.0; - sum_decode_ms = 0.0; - sum_convert_ms = 0.0; - sum_capture_ms = 0.0; - sum_sleep_ms = 0.0; - sum_iter_ms = 0.0; + timings.reset(); last_fps_log = Instant::now(); } } From 91283fa759141693ca9a87562a8660c09f9e64fb Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 9 Mar 2026 23:36:58 -0700 Subject: [PATCH 30/52] simplify stats --- examples/local_video/src/publisher.rs | 233 ++++---------------------- 1 file changed, 34 insertions(+), 199 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 4935667d0..a39c7e89e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -5,7 +5,6 @@ use livekit::options::{ self, video as video_presets, TrackPublishOptions, VideoCodec, VideoEncoding, VideoPreset, }; use livekit::prelude::*; -use livekit::webrtc::stats::RtcStats; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; @@ -17,7 +16,6 @@ use nokhwa::utils::{ Resolution, }; use nokhwa::Camera; -use std::collections::VecDeque; use std::env; use std::sync::{ atomic::{AtomicBool, Ordering}, @@ -90,9 +88,6 @@ struct Args { e2ee_key: Option, } -const OUTBOUND_STATS_POLL_INTERVAL: Duration = Duration::from_millis(100); -const MAX_PENDING_TIMING_FRAMES: usize = 512; - fn unix_time_us_now() -> i64 { SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64 } @@ -109,23 +104,10 @@ impl RollingMs { self.samples += 1; } - fn record_total(&mut self, total_ms: f64, samples: u64) { - if samples == 0 { - return; - } - - self.total_ms += total_ms; - self.samples += samples; - } - fn average(&self) -> Option { (self.samples > 0).then_some(self.total_ms / self.samples as f64) } - fn display(&self) -> String { - self.average().map(|value| format!("{value:.2}")).unwrap_or_else(|| "n/a".to_string()) - } - fn reset(&mut self) { *self = Self::default(); } @@ -138,14 +120,7 @@ struct PublisherTimingSummary { buffer_convert_ms: RollingMs, buffer_prepare_ms: RollingMs, webrtc_capture_ms: RollingMs, - capture_to_buffer_ready_ms: RollingMs, capture_to_webrtc_capture_ms: RollingMs, - encode_complete_estimate_ms: RollingMs, - packet_departure_estimate_ms: RollingMs, - encode_cpu_ms: RollingMs, - packet_send_delay_ms: RollingMs, - sleep_ms: RollingMs, - iteration_ms: RollingMs, } impl PublisherTimingSummary { @@ -155,145 +130,33 @@ impl PublisherTimingSummary { self.buffer_convert_ms.reset(); self.buffer_prepare_ms.reset(); self.webrtc_capture_ms.reset(); - self.capture_to_buffer_ready_ms.reset(); self.capture_to_webrtc_capture_ms.reset(); - self.encode_complete_estimate_ms.reset(); - self.packet_departure_estimate_ms.reset(); - self.encode_cpu_ms.reset(); - self.packet_send_delay_ms.reset(); - self.sleep_ms.reset(); - self.iteration_ms.reset(); } } -#[derive(Clone)] -struct PrimaryOutboundStatsSnapshot { - stream_key: String, - frames_encoded: u32, - frames_sent: u32, - packets_sent: u64, - total_encode_time_s: f64, - total_packet_send_delay_s: f64, -} - -#[derive(Default)] -struct OutboundTimingTracker { - pending_encode_capture_times_us: VecDeque, - pending_departure_capture_times_us: VecDeque, - last_snapshot: Option, -} - -impl OutboundTimingTracker { - fn on_frame_captured(&mut self, capture_wall_time_us: i64) { - self.pending_encode_capture_times_us.push_back(capture_wall_time_us); - self.pending_departure_capture_times_us.push_back(capture_wall_time_us); +fn format_timing_line(timings: &PublisherTimingSummary) -> String { + let mut parts = + vec![format!("capture {:.2}", timings.camera_capture_ms.average().unwrap_or_default())]; - while self.pending_encode_capture_times_us.len() > MAX_PENDING_TIMING_FRAMES { - self.pending_encode_capture_times_us.pop_front(); - } - - while self.pending_departure_capture_times_us.len() > MAX_PENDING_TIMING_FRAMES { - self.pending_departure_capture_times_us.pop_front(); - } + if let Some(decode_ms) = timings.decode_ms.average() { + parts.push(format!("decode {:.2}", decode_ms)); } - fn update_from_stats(&mut self, stats: &[RtcStats], timings: &mut PublisherTimingSummary) { - let Some(snapshot) = select_primary_outbound_video_stats(stats) else { - return; - }; - - let Some(previous_snapshot) = self.last_snapshot.as_ref() else { - self.last_snapshot = Some(snapshot); - return; - }; - - if previous_snapshot.stream_key != snapshot.stream_key { - self.last_snapshot = Some(snapshot); - return; - } - - let encoded_delta = - snapshot.frames_encoded.saturating_sub(previous_snapshot.frames_encoded); - let frames_sent_delta = snapshot.frames_sent.saturating_sub(previous_snapshot.frames_sent); - let packets_sent_delta = - snapshot.packets_sent.saturating_sub(previous_snapshot.packets_sent); - let now_us = unix_time_us_now(); - - for _ in 0..encoded_delta.min(self.pending_encode_capture_times_us.len() as u32) { - if let Some(capture_wall_time_us) = self.pending_encode_capture_times_us.pop_front() { - timings - .encode_complete_estimate_ms - .record((now_us - capture_wall_time_us) as f64 / 1000.0); - } - } - - for _ in 0..frames_sent_delta.min(self.pending_departure_capture_times_us.len() as u32) { - if let Some(capture_wall_time_us) = self.pending_departure_capture_times_us.pop_front() - { - timings - .packet_departure_estimate_ms - .record((now_us - capture_wall_time_us) as f64 / 1000.0); - } - } - - if encoded_delta > 0 { - let encode_total_delta_ms = - (snapshot.total_encode_time_s - previous_snapshot.total_encode_time_s).max(0.0) - * 1000.0; - timings.encode_cpu_ms.record_total(encode_total_delta_ms, encoded_delta as u64); - } - - if packets_sent_delta > 0 { - let packet_send_delay_total_delta_ms = (snapshot.total_packet_send_delay_s - - previous_snapshot.total_packet_send_delay_s) - .max(0.0) - * 1000.0; - timings - .packet_send_delay_ms - .record_total(packet_send_delay_total_delta_ms, packets_sent_delta); - } - - self.last_snapshot = Some(snapshot); - } -} - -fn select_primary_outbound_video_stats(stats: &[RtcStats]) -> Option { - stats - .iter() - .filter_map(|stat| match stat { - RtcStats::OutboundRtp(outbound) if outbound.outbound.active => Some(outbound), - _ => None, - }) - .max_by(|left, right| { - let left_rank = ( - left.outbound.frame_width as u64 * left.outbound.frame_height as u64, - left.outbound.frames_sent as u64, - left.outbound.frames_encoded as u64, - ); - let right_rank = ( - right.outbound.frame_width as u64 * right.outbound.frame_height as u64, - right.outbound.frames_sent as u64, - right.outbound.frames_encoded as u64, - ); - left_rank.cmp(&right_rank) - }) - .map(|outbound| PrimaryOutboundStatsSnapshot { - stream_key: format!( - "{}:{}x{}", - if outbound.outbound.rid.is_empty() { - "primary" - } else { - outbound.outbound.rid.as_str() - }, - outbound.outbound.frame_width, - outbound.outbound.frame_height, - ), - frames_encoded: outbound.outbound.frames_encoded, - frames_sent: outbound.outbound.frames_sent, - packets_sent: outbound.sent.packets_sent, - total_encode_time_s: outbound.outbound.total_encode_time, - total_packet_send_delay_s: outbound.outbound.total_packet_send_delay, - }) + parts.push(format!( + "convert_to_i420 {:.2}", + timings.buffer_convert_ms.average().unwrap_or_default() + )); + parts.push(format!("buffer {:.2}", timings.buffer_prepare_ms.average().unwrap_or_default())); + parts.push(format!( + "webrtc_capture {:.2}", + timings.webrtc_capture_ms.average().unwrap_or_default() + )); + parts.push(format!( + "capture_to_webrtc {:.2}", + timings.capture_to_webrtc_capture_ms.average().unwrap_or_default() + )); + + format!("Timing ms: {}", parts.join(" | ")) } fn list_cameras() -> Result<()> { @@ -511,13 +374,11 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Capture loop let mut frames: u64 = 0; let mut last_fps_log = Instant::now(); - let mut last_outbound_stats_poll = Instant::now(); let target = Duration::from_secs_f64(1.0 / pace_fps); info!("Target frame interval: {:.2} ms", target.as_secs_f64() * 1000.0); // Timing accumulators (ms) for rolling stats let mut timings = PublisherTimingSummary::default(); - let mut outbound_timing_tracker = OutboundTimingTracker::default(); let mut logged_mjpeg_fallback = false; let mut frame_counter: u32 = 0; loop { @@ -529,15 +390,15 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { ticker.tick().await; let iter_start = Instant::now(); - // Capture the frame as early as possible and use the wall clock as the - // end-to-end reference for later encode and egress estimates. + // Capture the frame as early as possible so the attached timestamp is + // close to the camera acquisition point. let capture_wall_time_us = unix_time_us_now(); let camera_capture_started_at = Instant::now(); let frame_buf = camera.frame()?; let camera_frame_acquired_at = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - let (decode_finished_at, buffer_ready_at) = if is_yuyv { + let (decode_finished_at, buffer_ready_at, used_decode_path) = if is_yuyv { // Fast path for YUYV: convert directly to I420 via libyuv let src = frame_buf.buffer(); let src_bytes = src.as_ref(); @@ -557,7 +418,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - (camera_frame_acquired_at, Instant::now()) + (camera_frame_acquired_at, Instant::now(), false) } else { // Auto path (either RGB24 already or compressed MJPEG) let src = frame_buf.buffer(); @@ -577,7 +438,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - (camera_frame_acquired_at, Instant::now()) + (camera_frame_acquired_at, Instant::now(), false) } else { // Try fast MJPEG->I420 via libyuv if available; fallback to image crate let mut used_fast_mjpeg = false; @@ -605,7 +466,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { } }; if used_fast_mjpeg { - (camera_frame_acquired_at, fast_mjpeg_buffer_ready_at) + (fast_mjpeg_buffer_ready_at, fast_mjpeg_buffer_ready_at, true) } else { // Fallback: decode MJPEG using image crate then RGB24->I420 match image::load_from_memory(src.as_ref()) { @@ -635,7 +496,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { height as i32, ); } - (decode_finished_at, Instant::now()) + (decode_finished_at, Instant::now(), true) } Err(e2) => { if !logged_mjpeg_fallback { @@ -665,20 +526,18 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { } rtc_source.capture_frame(&frame); let webrtc_capture_finished_at = Instant::now(); - outbound_timing_tracker.on_frame_captured(capture_wall_time_us); frames += 1; - // We already paced via interval; measure actual sleep time for logging only - let sleep_duration = iter_start - wait_start; // Per-iteration timing bookkeeping - let iteration_finished_at = Instant::now(); timings .camera_capture_ms .record((camera_frame_acquired_at - camera_capture_started_at).as_secs_f64() * 1000.0); - timings - .decode_ms - .record((decode_finished_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); + if used_decode_path { + timings + .decode_ms + .record((decode_finished_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); + } timings .buffer_convert_ms .record((buffer_ready_at - decode_finished_at).as_secs_f64() * 1000.0); @@ -688,45 +547,21 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { timings .webrtc_capture_ms .record((webrtc_capture_finished_at - buffer_ready_at).as_secs_f64() * 1000.0); - timings - .capture_to_buffer_ready_ms - .record((buffer_ready_at - camera_capture_started_at).as_secs_f64() * 1000.0); timings.capture_to_webrtc_capture_ms.record( (webrtc_capture_finished_at - camera_capture_started_at).as_secs_f64() * 1000.0, ); - timings.sleep_ms.record(sleep_duration.as_secs_f64() * 1000.0); - timings.iteration_ms.record((iteration_finished_at - iter_start).as_secs_f64() * 1000.0); - - if last_outbound_stats_poll.elapsed() >= OUTBOUND_STATS_POLL_INTERVAL { - if let Ok(stats) = track.get_stats().await { - outbound_timing_tracker.update_from_stats(&stats, &mut timings); - } - last_outbound_stats_poll = Instant::now(); - } if last_fps_log.elapsed() >= std::time::Duration::from_secs(2) { let secs = last_fps_log.elapsed().as_secs_f64(); let fps_est = frames as f64 / secs; info!( - "Publishing video: {}x{}, ~{:.1} fps | stage ms: camera {:.2}, decode {:.2}, convert {:.2}, buffer {:.2}, webrtc {:.2}, sleep {:.2}, iter {:.2} | capture->ms: i420 {:.2}, webrtc {:.2}, encode~ {}, egress~ {} | stats ms: encode_cpu {}, packet_send_delay {} | target {:.2}", + "Video status: {}x{} | ~{:.1} fps | target {:.2} ms", width, height, fps_est, - timings.camera_capture_ms.average().unwrap_or_default(), - timings.decode_ms.average().unwrap_or_default(), - timings.buffer_convert_ms.average().unwrap_or_default(), - timings.buffer_prepare_ms.average().unwrap_or_default(), - timings.webrtc_capture_ms.average().unwrap_or_default(), - timings.sleep_ms.average().unwrap_or_default(), - timings.iteration_ms.average().unwrap_or_default(), - timings.capture_to_buffer_ready_ms.average().unwrap_or_default(), - timings.capture_to_webrtc_capture_ms.average().unwrap_or_default(), - timings.encode_complete_estimate_ms.display(), - timings.packet_departure_estimate_ms.display(), - timings.encode_cpu_ms.display(), - timings.packet_send_delay_ms.display(), target.as_secs_f64() * 1000.0, ); + info!("{}", format_timing_line(&timings)); frames = 0; timings.reset(); last_fps_log = Instant::now(); From a8412fbaf795519fd2e66c7a4218b0a422e386ed Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 10 Mar 2026 22:13:24 -0700 Subject: [PATCH 31/52] add timestamp on video frame option --- examples/local_video/src/publisher.rs | 41 +++++++++++++++++++-------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index a39c7e89e..1f18121cc 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -24,6 +24,10 @@ use std::sync::{ use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use yuv_sys; +mod timestamp_burn; + +use timestamp_burn::TimestampOverlay; + #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] struct Args { @@ -83,6 +87,10 @@ struct Args { #[arg(long, default_value_t = false)] attach_timestamp: bool, + /// Burn the attached timestamp into each video frame; does nothing unless --attach-timestamp is also enabled + #[arg(long, default_value_t = false)] + burn_timestamp: bool, + /// Shared encryption key for E2EE (enables AES-GCM end-to-end encryption when set) #[arg(long)] e2ee_key: Option, @@ -118,8 +126,8 @@ struct PublisherTimingSummary { camera_capture_ms: RollingMs, decode_ms: RollingMs, buffer_convert_ms: RollingMs, + frame_draw_ms: RollingMs, buffer_prepare_ms: RollingMs, - webrtc_capture_ms: RollingMs, capture_to_webrtc_capture_ms: RollingMs, } @@ -128,8 +136,8 @@ impl PublisherTimingSummary { self.camera_capture_ms.reset(); self.decode_ms.reset(); self.buffer_convert_ms.reset(); + self.frame_draw_ms.reset(); self.buffer_prepare_ms.reset(); - self.webrtc_capture_ms.reset(); self.capture_to_webrtc_capture_ms.reset(); } } @@ -146,11 +154,10 @@ fn format_timing_line(timings: &PublisherTimingSummary) -> String { "convert_to_i420 {:.2}", timings.buffer_convert_ms.average().unwrap_or_default() )); + if let Some(frame_draw_ms) = timings.frame_draw_ms.average() { + parts.push(format!("frame_draw {:.2}", frame_draw_ms)); + } parts.push(format!("buffer {:.2}", timings.buffer_prepare_ms.average().unwrap_or_default())); - parts.push(format!( - "webrtc_capture {:.2}", - timings.webrtc_capture_ms.average().unwrap_or_default() - )); parts.push(format!( "capture_to_webrtc {:.2}", timings.capture_to_webrtc_capture_ms.average().unwrap_or_default() @@ -381,14 +388,14 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let mut timings = PublisherTimingSummary::default(); let mut logged_mjpeg_fallback = false; let mut frame_counter: u32 = 0; + let mut timestamp_overlay = (args.attach_timestamp && args.burn_timestamp) + .then(|| TimestampOverlay::new(width, height)); loop { if ctrl_c_received.load(Ordering::Acquire) { break; } // Wait until the scheduled next frame time - let wait_start = Instant::now(); ticker.tick().await; - let iter_start = Instant::now(); // Capture the frame as early as possible so the attached timestamp is // close to the camera acquisition point. @@ -398,7 +405,8 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let camera_frame_acquired_at = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - let (decode_finished_at, buffer_ready_at, used_decode_path) = if is_yuyv { + let stride_y_usize = stride_y as usize; + let (decode_finished_at, mut buffer_ready_at, used_decode_path) = if is_yuyv { // Fast path for YUYV: convert directly to I420 via libyuv let src = frame_buf.buffer(); let src_bytes = src.as_ref(); @@ -513,6 +521,15 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { } }; + let mut frame_draw_ms = None; + if let Some(overlay) = timestamp_overlay.as_mut() { + let overlay_started_at = Instant::now(); + overlay.draw(data_y, stride_y_usize, capture_wall_time_us); + let overlay_finished_at = Instant::now(); + frame_draw_ms = Some((overlay_finished_at - overlay_started_at).as_secs_f64() * 1000.0); + buffer_ready_at = overlay_finished_at; + } + // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; // Optionally attach wall-clock time as user timestamp and frame_id @@ -541,12 +558,12 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { timings .buffer_convert_ms .record((buffer_ready_at - decode_finished_at).as_secs_f64() * 1000.0); + if let Some(frame_draw_ms) = frame_draw_ms { + timings.frame_draw_ms.record(frame_draw_ms); + } timings .buffer_prepare_ms .record((buffer_ready_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); - timings - .webrtc_capture_ms - .record((webrtc_capture_finished_at - buffer_ready_at).as_secs_f64() * 1000.0); timings.capture_to_webrtc_capture_ms.record( (webrtc_capture_finished_at - camera_capture_started_at).as_secs_f64() * 1000.0, ); From 6ab6250cbf7a80d96fe36344eca977aa67f3cf1d Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Mar 2026 00:15:43 -0700 Subject: [PATCH 32/52] add timestamp burn mod --- examples/local_video/src/timestamp_burn.rs | 161 +++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 examples/local_video/src/timestamp_burn.rs diff --git a/examples/local_video/src/timestamp_burn.rs b/examples/local_video/src/timestamp_burn.rs new file mode 100644 index 000000000..8a86dd3a7 --- /dev/null +++ b/examples/local_video/src/timestamp_burn.rs @@ -0,0 +1,161 @@ +use chrono::{DateTime, Datelike, Timelike, Utc}; + +const TIMESTAMP_TEXT_LEN: usize = 23; // YYYY-MM-DD HH:MM:SS:SSS +const TIMESTAMP_GLYPH_COUNT: usize = 13; // 0-9, :, -, space +const TIMESTAMP_GLYPH_WIDTH: usize = 5; +const TIMESTAMP_GLYPH_HEIGHT: usize = 7; +const TIMESTAMP_GLYPH_SCALE: usize = 2; +const TIMESTAMP_GLYPH_SPACING: usize = 2; +const TIMESTAMP_PADDING_X: usize = 4; +const TIMESTAMP_PADDING_Y: usize = 4; +const TIMESTAMP_MARGIN: usize = 8; +const TIMESTAMP_BG_LUMA: u8 = 16; +const TIMESTAMP_FG_LUMA: u8 = 235; +const TIMESTAMP_RASTER_WIDTH: usize = TIMESTAMP_GLYPH_WIDTH * TIMESTAMP_GLYPH_SCALE; +const TIMESTAMP_RASTER_HEIGHT: usize = TIMESTAMP_GLYPH_HEIGHT * TIMESTAMP_GLYPH_SCALE; +const TIMESTAMP_GLYPH_COLON: u8 = 10; +const TIMESTAMP_GLYPH_DASH: u8 = 11; +const TIMESTAMP_GLYPH_SPACE: u8 = 12; + +type TimestampGlyph = [[u8; TIMESTAMP_RASTER_WIDTH]; TIMESTAMP_RASTER_HEIGHT]; + +const TIMESTAMP_GLYPH_PATTERNS: [[u8; TIMESTAMP_GLYPH_HEIGHT]; TIMESTAMP_GLYPH_COUNT] = [ + [0b01110, 0b10001, 0b10011, 0b10101, 0b11001, 0b10001, 0b01110], // 0 + [0b00100, 0b01100, 0b00100, 0b00100, 0b00100, 0b00100, 0b01110], // 1 + [0b01110, 0b10001, 0b00001, 0b00010, 0b00100, 0b01000, 0b11111], // 2 + [0b11110, 0b00001, 0b00001, 0b01110, 0b00001, 0b00001, 0b11110], // 3 + [0b00010, 0b00110, 0b01010, 0b10010, 0b11111, 0b00010, 0b00010], // 4 + [0b11111, 0b10000, 0b10000, 0b11110, 0b00001, 0b00001, 0b11110], // 5 + [0b01110, 0b10000, 0b10000, 0b11110, 0b10001, 0b10001, 0b01110], // 6 + [0b11111, 0b00001, 0b00010, 0b00100, 0b01000, 0b01000, 0b01000], // 7 + [0b01110, 0b10001, 0b10001, 0b01110, 0b10001, 0b10001, 0b01110], // 8 + [0b01110, 0b10001, 0b10001, 0b01111, 0b00001, 0b00001, 0b01110], // 9 + [0b00000, 0b00000, 0b00100, 0b00000, 0b00100, 0b00000, 0b00000], // : + [0b00000, 0b00000, 0b00000, 0b01110, 0b00000, 0b00000, 0b00000], // - + [0b00000, 0b00000, 0b00000, 0b00000, 0b00000, 0b00000, 0b00000], // space +]; + +pub struct TimestampOverlay { + glyphs: [TimestampGlyph; TIMESTAMP_GLYPH_COUNT], + glyph_ids: [u8; TIMESTAMP_TEXT_LEN], + box_x: usize, + box_y: usize, + box_width: usize, + box_height: usize, + text_x: usize, + text_y: usize, + enabled: bool, +} + +impl TimestampOverlay { + pub fn new(frame_width: u32, frame_height: u32) -> Self { + let text_width = TIMESTAMP_TEXT_LEN * TIMESTAMP_RASTER_WIDTH + + (TIMESTAMP_TEXT_LEN.saturating_sub(1)) * TIMESTAMP_GLYPH_SPACING; + let box_width = text_width + TIMESTAMP_PADDING_X * 2; + let box_height = TIMESTAMP_RASTER_HEIGHT + TIMESTAMP_PADDING_Y * 2; + let frame_width = frame_width as usize; + let frame_height = frame_height as usize; + let enabled = frame_width >= box_width + TIMESTAMP_MARGIN + && frame_height >= box_height + TIMESTAMP_MARGIN; + let box_x = TIMESTAMP_MARGIN; + let box_y = frame_height.saturating_sub(TIMESTAMP_MARGIN + box_height); + + Self { + glyphs: rasterize_timestamp_glyphs(), + glyph_ids: [0; TIMESTAMP_TEXT_LEN], + box_x, + box_y, + box_width, + box_height, + text_x: box_x + TIMESTAMP_PADDING_X, + text_y: box_y + TIMESTAMP_PADDING_Y, + enabled, + } + } + + pub fn draw(&mut self, data_y: &mut [u8], stride_y: usize, timestamp_us: i64) { + if !self.enabled { + return; + } + + format_timestamp_glyphs(timestamp_us, &mut self.glyph_ids); + + for row in 0..self.box_height { + let row_start = (self.box_y + row) * stride_y + self.box_x; + let row_end = row_start + self.box_width; + data_y[row_start..row_end].fill(TIMESTAMP_BG_LUMA); + } + + for (glyph_pos, glyph_id) in self.glyph_ids.iter().copied().enumerate() { + let glyph = &self.glyphs[glyph_id as usize]; + let glyph_x = + self.text_x + glyph_pos * (TIMESTAMP_RASTER_WIDTH + TIMESTAMP_GLYPH_SPACING); + for (row, glyph_row) in glyph.iter().enumerate() { + let row_start = (self.text_y + row) * stride_y + glyph_x; + let row_end = row_start + TIMESTAMP_RASTER_WIDTH; + data_y[row_start..row_end].copy_from_slice(glyph_row); + } + } + } +} + +fn rasterize_timestamp_glyphs() -> [TimestampGlyph; TIMESTAMP_GLYPH_COUNT] { + let mut glyphs = [[[TIMESTAMP_BG_LUMA; TIMESTAMP_RASTER_WIDTH]; TIMESTAMP_RASTER_HEIGHT]; + TIMESTAMP_GLYPH_COUNT]; + + for (glyph_idx, pattern) in TIMESTAMP_GLYPH_PATTERNS.iter().enumerate() { + for (src_y, row_bits) in pattern.iter().copied().enumerate() { + for scale_y in 0..TIMESTAMP_GLYPH_SCALE { + let dst_row = &mut glyphs[glyph_idx][src_y * TIMESTAMP_GLYPH_SCALE + scale_y]; + for src_x in 0..TIMESTAMP_GLYPH_WIDTH { + let bit = 1 << (TIMESTAMP_GLYPH_WIDTH - 1 - src_x); + if row_bits & bit != 0 { + let dst_x = src_x * TIMESTAMP_GLYPH_SCALE; + dst_row[dst_x..dst_x + TIMESTAMP_GLYPH_SCALE].fill(TIMESTAMP_FG_LUMA); + } + } + } + } + } + + glyphs +} + +fn format_timestamp_glyphs(timestamp_us: i64, out: &mut [u8; TIMESTAMP_TEXT_LEN]) { + let Some(dt) = DateTime::::from_timestamp_micros(timestamp_us) else { + out.fill(0); + return; + }; + + write_four_digits(&mut out[0..4], dt.year_ce().1); + out[4] = TIMESTAMP_GLYPH_DASH; + write_two_digits(&mut out[5..7], dt.month()); + out[7] = TIMESTAMP_GLYPH_DASH; + write_two_digits(&mut out[8..10], dt.day()); + out[10] = TIMESTAMP_GLYPH_SPACE; + write_two_digits(&mut out[11..13], dt.hour()); + out[13] = TIMESTAMP_GLYPH_COLON; + write_two_digits(&mut out[14..16], dt.minute()); + out[16] = TIMESTAMP_GLYPH_COLON; + write_two_digits(&mut out[17..19], dt.second()); + out[19] = TIMESTAMP_GLYPH_COLON; + write_three_digits(&mut out[20..23], dt.timestamp_subsec_millis()); +} + +fn write_two_digits(dst: &mut [u8], value: u32) { + dst[0] = (value / 10) as u8; + dst[1] = (value % 10) as u8; +} + +fn write_three_digits(dst: &mut [u8], value: u32) { + dst[0] = (value / 100) as u8; + dst[1] = ((value / 10) % 10) as u8; + dst[2] = (value % 10) as u8; +} + +fn write_four_digits(dst: &mut [u8], value: u32) { + dst[0] = ((value / 1_000) % 10) as u8; + dst[1] = ((value / 100) % 10) as u8; + dst[2] = ((value / 10) % 10) as u8; + dst[3] = (value % 10) as u8; +} From 790d2542b7a1c476e18c04900d3bb7a819c7b3cb Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Mar 2026 11:29:25 -0700 Subject: [PATCH 33/52] move to TLV trailer format --- webrtc-sys/include/livekit/user_timestamp.h | 17 ++++- webrtc-sys/src/user_timestamp.cpp | 82 +++++++++++++++------ 2 files changed, 74 insertions(+), 25 deletions(-) diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index f55812c0a..dc7449573 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -47,8 +47,21 @@ namespace livekit_ffi { // Magic bytes to identify user timestamp trailers: "LKTS" (LiveKit TimeStamp) constexpr uint8_t kUserTimestampMagic[4] = {'L', 'K', 'T', 'S'}; -constexpr size_t kUserTimestampTrailerSize = - 16; // 8 bytes timestamp + 4 bytes frame_id + 4 bytes magic + +// Trailer envelope: [trailer_len: 1B] [magic: 4B] = 5 bytes. +// Always present at the end of every trailer. +constexpr size_t kTrailerEnvelopeSize = 5; + +// TLV element overhead: [tag: 1B] [len: 1B] = 2 bytes before value. +// All TLV bytes (tag, len, value) are XORed with 0xFF. + +// TLV tag IDs +constexpr uint8_t kTagTimestampUs = 0x01; // value: 8 bytes big-endian int64 +constexpr uint8_t kTagFrameId = 0x02; // value: 4 bytes big-endian uint32 + +// Current trailer size with both TLV elements: +// (1+1+8) + (1+1+4) + 5 envelope = 21 bytes +constexpr size_t kUserTimestampTrailerSize = 21; struct FrameMetadata { int64_t user_timestamp_us; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index c3a3f831a..32d80f409 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -212,24 +212,28 @@ std::vector UserTimestampTransformer::AppendTrailer( // Copy original data result.insert(result.end(), data.begin(), data.end()); - // Append user_timestamp_us (big-endian, 8 bytes) XORed with 0xFF to - // prevent H.264 NAL start code sequences (0x000001 / 0x00000001) from - // appearing inside the trailer. The H.264 packetizer scans the full - // frame payload for start codes, and the trailer's raw bytes can - // contain 0x000001 (e.g. frame_id 256 = 0x00000100). + // All TLV bytes are XORed with 0xFF to prevent H.264 NAL start code + // sequences (0x000001 / 0x00000001) from appearing inside the trailer. + + // TLV: timestamp_us (tag=0x01, len=8, 8 bytes big-endian) + result.push_back(kTagTimestampUs ^ 0xFF); + result.push_back(8 ^ 0xFF); for (int i = 7; i >= 0; --i) { result.push_back( static_cast(((user_timestamp_us >> (i * 8)) & 0xFF) ^ 0xFF)); } - // Append frame_id (big-endian, 4 bytes), also XORed + // TLV: frame_id (tag=0x02, len=4, 4 bytes big-endian) + result.push_back(kTagFrameId ^ 0xFF); + result.push_back(4 ^ 0xFF); for (int i = 3; i >= 0; --i) { result.push_back( static_cast(((frame_id >> (i * 8)) & 0xFF) ^ 0xFF)); } - // Append magic bytes (NOT XORed — they must remain recognizable and - // already contain no 0x00/0x01 bytes) + // Envelope: trailer_len (1B, XORed) + magic (4B, NOT XORed) + result.push_back( + static_cast(kUserTimestampTrailerSize ^ 0xFF)); result.insert(result.end(), std::begin(kUserTimestampMagic), std::end(kUserTimestampMagic)); @@ -239,7 +243,7 @@ std::vector UserTimestampTransformer::AppendTrailer( std::optional UserTimestampTransformer::ExtractTrailer( rtc::ArrayView data, std::vector& out_data) { - if (data.size() < kUserTimestampTrailerSize) { + if (data.size() < kTrailerEnvelopeSize) { out_data.assign(data.begin(), data.end()); return std::nullopt; } @@ -251,26 +255,58 @@ std::optional UserTimestampTransformer::ExtractTrailer( return std::nullopt; } - const uint8_t* trailer_start = - data.data() + data.size() - kUserTimestampTrailerSize; + uint8_t trailer_len = data[data.size() - 5] ^ 0xFF; - // Extract user_timestamp_us (big-endian, 8 bytes, XORed with 0xFF) - int64_t timestamp = 0; - for (int i = 0; i < 8; ++i) { - timestamp = (timestamp << 8) | (trailer_start[i] ^ 0xFF); + if (trailer_len < kTrailerEnvelopeSize || trailer_len > data.size()) { + out_data.assign(data.begin(), data.end()); + return std::nullopt; } - // Extract frame_id (big-endian, 4 bytes, XORed with 0xFF) - uint32_t frame_id = 0; - for (int i = 0; i < 4; ++i) { - frame_id = (frame_id << 8) | (trailer_start[8 + i] ^ 0xFF); + // Walk the TLV region: everything from trailer_start up to the envelope. + const uint8_t* trailer_start = data.data() + data.size() - trailer_len; + size_t tlv_region_len = trailer_len - kTrailerEnvelopeSize; + + FrameMetadata meta{0, 0, 0}; + bool found_any = false; + size_t pos = 0; + + while (pos + 2 <= tlv_region_len) { + uint8_t tag = trailer_start[pos] ^ 0xFF; + uint8_t len = trailer_start[pos + 1] ^ 0xFF; + pos += 2; + + if (pos + len > tlv_region_len) { + break; + } + + const uint8_t* val = trailer_start + pos; + + if (tag == kTagTimestampUs && len == 8) { + int64_t ts = 0; + for (int i = 0; i < 8; ++i) { + ts = (ts << 8) | (val[i] ^ 0xFF); + } + meta.user_timestamp_us = ts; + found_any = true; + } else if (tag == kTagFrameId && len == 4) { + uint32_t fid = 0; + for (int i = 0; i < 4; ++i) { + fid = (fid << 8) | (val[i] ^ 0xFF); + } + meta.frame_id = fid; + found_any = true; + } + // Unknown tags are silently skipped. + + pos += len; } - // Copy data without trailer - out_data.assign(data.begin(), - data.end() - kUserTimestampTrailerSize); + out_data.assign(data.begin(), data.end() - trailer_len); - return FrameMetadata{timestamp, frame_id, 0}; + if (!found_any) { + return std::nullopt; + } + return meta; } void UserTimestampTransformer::RegisterTransformedFrameCallback( From 8c2a96ff9a1d1068796fb1a1072b66e2127d03d4 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 11 Mar 2026 15:27:34 -0700 Subject: [PATCH 34/52] add changeset & add more details to subscriber example for timing --- examples/local_video/src/publisher.rs | 75 ++- examples/local_video/src/subscriber.rs | 579 ++++++++++++-------- examples/local_video/src/timestamp_burn.rs | 2 +- webrtc-sys/include/livekit/user_timestamp.h | 11 +- webrtc-sys/src/user_timestamp.cpp | 23 +- 5 files changed, 426 insertions(+), 264 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 1f18121cc..78aeba206 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -123,47 +123,62 @@ impl RollingMs { #[derive(Default)] struct PublisherTimingSummary { - camera_capture_ms: RollingMs, - decode_ms: RollingMs, + paced_wait_ms: RollingMs, + camera_frame_read_ms: RollingMs, + decode_mjpeg_ms: RollingMs, buffer_convert_ms: RollingMs, frame_draw_ms: RollingMs, - buffer_prepare_ms: RollingMs, - capture_to_webrtc_capture_ms: RollingMs, + submit_to_webrtc_ms: RollingMs, + capture_to_webrtc_total_ms: RollingMs, } impl PublisherTimingSummary { fn reset(&mut self) { - self.camera_capture_ms.reset(); - self.decode_ms.reset(); + self.paced_wait_ms.reset(); + self.camera_frame_read_ms.reset(); + self.decode_mjpeg_ms.reset(); self.buffer_convert_ms.reset(); self.frame_draw_ms.reset(); - self.buffer_prepare_ms.reset(); - self.capture_to_webrtc_capture_ms.reset(); + self.submit_to_webrtc_ms.reset(); + self.capture_to_webrtc_total_ms.reset(); } } fn format_timing_line(timings: &PublisherTimingSummary) -> String { - let mut parts = - vec![format!("capture {:.2}", timings.camera_capture_ms.average().unwrap_or_default())]; - - if let Some(decode_ms) = timings.decode_ms.average() { - parts.push(format!("decode {:.2}", decode_ms)); + let line_one = vec![ + format!("paced_wait {:.2}", timings.paced_wait_ms.average().unwrap_or_default()), + format!( + "camera_frame_read {:.2}", + timings.camera_frame_read_ms.average().unwrap_or_default() + ), + ]; + let mut line_two = Vec::new(); + + if let Some(decode_ms) = timings.decode_mjpeg_ms.average() { + line_two.push(format!("decode_mjpeg {:.2}", decode_ms)); } - parts.push(format!( + line_two.push(format!( "convert_to_i420 {:.2}", timings.buffer_convert_ms.average().unwrap_or_default() )); if let Some(frame_draw_ms) = timings.frame_draw_ms.average() { - parts.push(format!("frame_draw {:.2}", frame_draw_ms)); + line_two.push(format!("frame_draw {:.2}", frame_draw_ms)); } - parts.push(format!("buffer {:.2}", timings.buffer_prepare_ms.average().unwrap_or_default())); - parts.push(format!( - "capture_to_webrtc {:.2}", - timings.capture_to_webrtc_capture_ms.average().unwrap_or_default() + line_two.push(format!( + "submit_to_webrtc {:.2}", + timings.submit_to_webrtc_ms.average().unwrap_or_default() + )); + line_two.push(format!( + "capture_to_webrtc_total {:.2}", + timings.capture_to_webrtc_total_ms.average().unwrap_or_default() )); - format!("Timing ms: {}", parts.join(" | ")) + format!( + "Timing ms: {}\nTiming ms: {}", + line_one.join(" | "), + line_two.join(" | ") + ) } fn list_cameras() -> Result<()> { @@ -395,7 +410,9 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { break; } // Wait until the scheduled next frame time + let paced_wait_started_at = Instant::now(); ticker.tick().await; + let paced_wait_finished_at = Instant::now(); // Capture the frame as early as possible so the attached timestamp is // close to the camera acquisition point. @@ -406,7 +423,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); let stride_y_usize = stride_y as usize; - let (decode_finished_at, mut buffer_ready_at, used_decode_path) = if is_yuyv { + let (decode_finished_at, convert_finished_at, used_decode_path) = if is_yuyv { // Fast path for YUYV: convert directly to I420 via libyuv let src = frame_buf.buffer(); let src_bytes = src.as_ref(); @@ -521,6 +538,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { } }; + let mut buffer_ready_at = convert_finished_at; let mut frame_draw_ms = None; if let Some(overlay) = timestamp_overlay.as_mut() { let overlay_started_at = Instant::now(); @@ -548,23 +566,26 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Per-iteration timing bookkeeping timings - .camera_capture_ms + .paced_wait_ms + .record((paced_wait_finished_at - paced_wait_started_at).as_secs_f64() * 1000.0); + timings + .camera_frame_read_ms .record((camera_frame_acquired_at - camera_capture_started_at).as_secs_f64() * 1000.0); if used_decode_path { timings - .decode_ms + .decode_mjpeg_ms .record((decode_finished_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); } timings .buffer_convert_ms - .record((buffer_ready_at - decode_finished_at).as_secs_f64() * 1000.0); + .record((convert_finished_at - decode_finished_at).as_secs_f64() * 1000.0); if let Some(frame_draw_ms) = frame_draw_ms { timings.frame_draw_ms.record(frame_draw_ms); } timings - .buffer_prepare_ms - .record((buffer_ready_at - camera_frame_acquired_at).as_secs_f64() * 1000.0); - timings.capture_to_webrtc_capture_ms.record( + .submit_to_webrtc_ms + .record((webrtc_capture_finished_at - buffer_ready_at).as_secs_f64() * 1000.0); + timings.capture_to_webrtc_total_ms.record( (webrtc_capture_finished_at - camera_capture_started_at).as_secs_f64() * 1000.0, ); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 72bf7bd18..bd8d778ed 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -67,19 +67,35 @@ struct Args { struct SharedYuv { width: u32, height: u32, - stride_y: u32, - stride_u: u32, - stride_v: u32, + y_bytes_per_row: u32, + uv_bytes_per_row: u32, y: Vec, u: Vec, v: Vec, codec: String, fps: f32, dirty: bool, + /// Time when the latest frame became available to the subscriber code. + received_at_us: Option, /// Last received user timestamp in microseconds, if any. user_timestamp_us: Option, /// Last received frame_id, if any. frame_id: Option, + /// Timing for the latest received frame on the subscriber thread. + latest_to_i420_ms: f32, + latest_pack_ms: f32, + /// Timing for the last frame uploaded to the GPU in `prepare()`. + last_uploaded_frame_id: Option, + last_uploaded_user_timestamp_us: Option, + last_uploaded_receive_us: Option, + last_uploaded_us: Option, + last_upload_ms: f32, + /// Timing for the last frame seen by the paint callback. + last_painted_frame_id: Option, + last_painted_user_timestamp_us: Option, + last_painted_receive_us: Option, + last_painted_upload_us: Option, + last_painted_us: Option, } #[derive(Clone)] @@ -128,6 +144,66 @@ fn infer_quality_from_dims( } } +fn find_video_inbound_stats( + stats: &[livekit::webrtc::stats::RtcStats], +) -> Option { + stats.iter().find_map(|stat| match stat { + livekit::webrtc::stats::RtcStats::InboundRtp(inbound) if inbound.stream.kind == "video" => { + Some(inbound.clone()) + } + _ => None, + }) +} + +fn log_video_inbound_stats(stats: &[livekit::webrtc::stats::RtcStats]) { + let mut codec_by_id: HashMap = HashMap::new(); + for stat in stats { + if let livekit::webrtc::stats::RtcStats::Codec(codec) = stat { + codec_by_id.insert( + codec.rtc.id.clone(), + (codec.codec.mime_type.clone(), codec.codec.sdp_fmtp_line.clone()), + ); + } + } + + if let Some(inbound) = find_video_inbound_stats(stats) { + if let Some((mime, fmtp)) = codec_by_id.get(&inbound.stream.codec_id) { + info!("Inbound codec: {} (fmtp: {})", mime, fmtp); + } else { + info!("Inbound codec id: {}", inbound.stream.codec_id); + } + info!( + "Inbound current layer: {}x{} ~{:.1} fps, decoder: {}, power_efficient: {}", + inbound.inbound.frame_width, + inbound.inbound.frame_height, + inbound.inbound.frames_per_second, + inbound.inbound.decoder_implementation, + inbound.inbound.power_efficient_decoder + ); + } +} + +fn update_simulcast_quality_from_stats( + stats: &[livekit::webrtc::stats::RtcStats], + simulcast: &Arc>, +) { + let Some(inbound) = find_video_inbound_stats(stats) else { + return; + }; + let Some((fw, fh)) = simulcast_state_full_dims(simulcast) else { + return; + }; + + let q = infer_quality_from_dims( + fw, + fh, + inbound.inbound.frame_width as u32, + inbound.inbound.frame_height as u32, + ); + let mut sc = simulcast.lock(); + sc.active_quality = Some(q); +} + /// Returns the current wall-clock time as microseconds since Unix epoch. fn current_timestamp_us() -> i64 { SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default().as_micros() as i64 @@ -144,6 +220,17 @@ fn format_timestamp_us(ts_us: i64) -> String { .unwrap_or_else(|| format!("")) } +fn format_optional_timestamp_us(ts_us: Option) -> String { + ts_us.map(format_timestamp_us).unwrap_or_else(|| "N/A".to_string()) +} + +fn format_delta_ms(start_us: Option, end_us: Option) -> String { + match (start_us, end_us) { + (Some(start), Some(end)) => format!("{:.1}ms", (end - start) as f64 / 1000.0), + _ => "N/A".to_string(), + } +} + fn simulcast_state_full_dims(state: &Arc>) -> Option<(u32, u32)> { let sc = state.lock(); sc.full_dims @@ -209,48 +296,9 @@ async fn handle_track_subscribed( publication.dimension().1 ); - // Try to fetch inbound RTP/codec stats for more details - match video_track.get_stats().await { - Ok(stats) => { - let mut codec_by_id: HashMap = HashMap::new(); - let mut inbound: Option = None; - for s in stats.iter() { - match s { - livekit::webrtc::stats::RtcStats::Codec(c) => { - codec_by_id.insert( - c.rtc.id.clone(), - (c.codec.mime_type.clone(), c.codec.sdp_fmtp_line.clone()), - ); - } - livekit::webrtc::stats::RtcStats::InboundRtp(i) => { - if i.stream.kind == "video" { - inbound = Some(i.clone()); - } - } - _ => {} - } - } - - if let Some(i) = inbound { - if let Some((mime, fmtp)) = codec_by_id.get(&i.stream.codec_id) { - info!("Inbound codec: {} (fmtp: {})", mime, fmtp); - } else { - info!("Inbound codec id: {}", i.stream.codec_id); - } - info!( - "Inbound current layer: {}x{} ~{:.1} fps, decoder: {}, power_efficient: {}", - i.inbound.frame_width, - i.inbound.frame_height, - i.inbound.frames_per_second, - i.inbound.decoder_implementation, - i.inbound.power_efficient_decoder - ); - } - } - Err(e) => debug!("Failed to get stats for video track: {:?}", e), - } + let rtc_track = video_track.rtc_track(); - // Start background sink thread + // Start background sink thread immediately so stats lookup cannot delay first-frame handling. let shared2 = shared.clone(); let active_sid2 = active_sid.clone(); let my_sid = sid.clone(); @@ -266,13 +314,11 @@ async fn handle_track_subscribed( sc.active_quality = None; sc.publication = Some(publication.clone()); } - let simulcast2 = simulcast.clone(); std::thread::spawn(move || { - let mut sink = NativeVideoStream::new(video_track.rtc_track()); + let mut sink = NativeVideoStream::new(rtc_track); let mut frames: u64 = 0; let mut last_log = Instant::now(); let mut logged_first = false; - let mut last_stats = Instant::now(); let mut fps_window_frames: u64 = 0; let mut fps_window_start = Instant::now(); let mut fps_smoothed: f32 = 0.0; @@ -291,6 +337,7 @@ async fn handle_track_subscribed( } }); let Some(frame) = next else { break }; + let received_at_us = current_timestamp_us(); let w = frame.buffer.width(); let h = frame.buffer.height(); @@ -300,44 +347,41 @@ async fn handle_track_subscribed( } // Convert to I420 on CPU, but keep planes separate for GPU sampling + let to_i420_started = Instant::now(); let i420 = frame.buffer.to_i420(); + let to_i420_ms = to_i420_started.elapsed().as_secs_f64() * 1000.0; let (sy, su, sv) = i420.strides(); let (dy, du, dv) = i420.data(); - let ch = (h + 1) / 2; + let width = w as u32; + let height = h as u32; + let uv_w = (width + 1) / 2; + let uv_h = (height + 1) / 2; + let y_bytes_per_row = align_up(width, 256); + let uv_bytes_per_row = align_up(uv_w, 256); - // Ensure capacity and copy full plane slices - let y_size = (sy * h) as usize; - let u_size = (su * ch) as usize; - let v_size = (sv * ch) as usize; - if y_buf.len() != y_size { - y_buf.resize(y_size, 0); - } - if u_buf.len() != u_size { - u_buf.resize(u_size, 0); - } - if v_buf.len() != v_size { - v_buf.resize(v_size, 0); - } - y_buf.copy_from_slice(dy); - u_buf.copy_from_slice(du); - v_buf.copy_from_slice(dv); + // Pre-pack planes into GPU-ready rows on the sink thread so prepare() + // can upload directly without another repack pass. + let pack_started = Instant::now(); + pack_plane(dy, sy as u32, width, height, y_bytes_per_row, &mut y_buf); + pack_plane(du, su as u32, uv_w, uv_h, uv_bytes_per_row, &mut u_buf); + pack_plane(dv, sv as u32, uv_w, uv_h, uv_bytes_per_row, &mut v_buf); + let pack_ms = pack_started.elapsed().as_secs_f64() * 1000.0; // Swap buffers into shared state let mut s = shared2.lock(); - s.width = w as u32; - s.height = h as u32; - s.stride_y = sy as u32; - s.stride_u = su as u32; - s.stride_v = sv as u32; + s.width = width; + s.height = height; + s.y_bytes_per_row = y_bytes_per_row; + s.uv_bytes_per_row = uv_bytes_per_row; std::mem::swap(&mut s.y, &mut y_buf); std::mem::swap(&mut s.u, &mut u_buf); std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; + s.received_at_us = Some(received_at_us); if let Some(ts) = frame.user_timestamp_us { - let now_us = current_timestamp_us(); - let delta_ms = (now_us - ts) as f64 / 1000.0; + let delta_ms = (received_at_us - ts) as f64 / 1000.0; if ts < 0 || ts > 2_000_000_000_000_000 || delta_ms < -60_000.0 { log::warn!( "[Subscriber] BAD TIMESTAMP: frame_id={:?} user_ts={} \ @@ -346,7 +390,7 @@ async fn handle_track_subscribed( frame.frame_id, ts, frame.timestamp_us, - now_us, + received_at_us, delta_ms, s.user_timestamp_us, s.frame_id, @@ -356,6 +400,8 @@ async fn handle_track_subscribed( s.user_timestamp_us = frame.user_timestamp_us; s.frame_id = frame.frame_id; + s.latest_to_i420_ms = to_i420_ms as f32; + s.latest_pack_ms = pack_ms as f32; // Update smoothed FPS (~500ms window) fps_window_frames += 1; @@ -381,32 +427,6 @@ async fn handle_track_subscribed( frames = 0; last_log = Instant::now(); } - // Periodically infer active simulcast quality from inbound stats - if last_stats.elapsed() >= Duration::from_secs(1) { - if let Ok(stats) = rt_clone.block_on(video_track.get_stats()) { - let mut inbound: Option = None; - for s in stats.iter() { - if let livekit::webrtc::stats::RtcStats::InboundRtp(i) = s { - if i.stream.kind == "video" { - inbound = Some(i.clone()); - } - } - } - if let Some(i) = inbound { - if let Some((fw, fh)) = simulcast_state_full_dims(&simulcast2) { - let q = infer_quality_from_dims( - fw, - fh, - i.inbound.frame_width as u32, - i.inbound.frame_height as u32, - ); - let mut sc = simulcast2.lock(); - sc.active_quality = Some(q); - } - } - } - last_stats = Instant::now(); - } } info!("Video stream ended for {}", my_sid); // Clear active sid if still ours @@ -415,6 +435,42 @@ async fn handle_track_subscribed( *active = None; } }); + + let ctrl_c_stats = ctrl_c_received.clone(); + let active_sid_stats = active_sid.clone(); + let my_sid_stats = sid.clone(); + let simulcast_stats = simulcast.clone(); + tokio::spawn(async move { + let mut logged_initial = false; + let mut interval = tokio::time::interval(Duration::from_secs(1)); + interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + + loop { + if ctrl_c_stats.load(Ordering::Acquire) { + break; + } + if active_sid_stats.lock().as_ref() != Some(&my_sid_stats) { + break; + } + + match video_track.get_stats().await { + Ok(stats) => { + if !logged_initial { + log_video_inbound_stats(&stats); + logged_initial = true; + } + update_simulcast_quality_from_stats(&stats, &simulcast_stats); + } + Err(e) if !logged_initial => { + debug!("Failed to get stats for video track: {:?}", e); + logged_initial = true; + } + Err(_) => {} + } + + interval.tick().await; + } + }); } fn clear_hud_and_simulcast(shared: &Arc>, simulcast: &Arc>) { @@ -422,6 +478,18 @@ fn clear_hud_and_simulcast(shared: &Arc>, simulcast: &Arc, locked_aspect: Option, display_timestamp: bool, - /// Cached latency string, updated at ~2 Hz so it's readable. - latency_display: String, - /// Last time the latency display was refreshed. - latency_last_update: Instant, - /// Cached user timestamp so the overlay doesn't flicker when the shared - /// state momentarily has `None` between frame swaps. - cached_user_timestamp_us: Option, - /// Cached frame_id so the overlay doesn't flicker. - cached_frame_id: Option, + timestamp_metrics_text: String, + timestamp_metrics_last_update: Instant, } impl eframe::App for VideoApp { @@ -539,7 +600,6 @@ impl eframe::App for VideoApp { livekit::track::VideoQuality::Low => "Low", livekit::track::VideoQuality::Medium => "Medium", livekit::track::VideoQuality::High => "High", - _ => "Unknown", }) .unwrap_or("?"); text.push_str(&format!("\nSimulcast: {}", layer)); @@ -559,44 +619,58 @@ impl eframe::App for VideoApp { }); }); - // Timestamp overlay: user timestamp, current timestamp, and latency. - // We cache the last-known user timestamp so the overlay doesn't flicker - // when the shared state momentarily has `None` between frame swaps. + // Timestamp overlay: publish, receive, upload, and paint milestones. + // `Paint` is the time when the callback issued the draw call, not the + // exact physical scan-out time on the display. if self.display_timestamp { - { - let s = self.shared.lock(); - if let Some(ts) = s.user_timestamp_us { - self.cached_user_timestamp_us = Some(ts); - } - if let Some(fid) = s.frame_id { - self.cached_frame_id = Some(fid); + let s = self.shared.lock(); + let frame_id = s.last_painted_frame_id.or(s.frame_id); + let publish_us = s.last_painted_user_timestamp_us.or(s.user_timestamp_us); + let receive_us = s.last_painted_receive_us.or(s.received_at_us); + let upload_us = s.last_painted_upload_us.or(s.last_uploaded_us); + let paint_us = s.last_painted_us; + let to_i420_ms = s.latest_to_i420_ms; + let pack_ms = s.latest_pack_ms; + let upload_ms = s.last_upload_ms; + drop(s); + + if publish_us.is_some() || frame_id.is_some() { + if self.timestamp_metrics_last_update.elapsed() >= Duration::from_millis(500) + || self.timestamp_metrics_text.is_empty() + { + self.timestamp_metrics_text = format!( + "Pub->Recv: {}\nRecv->Up: {}\nUp->Paint: {}\nPub->Paint: {}\nPaint->Now: {}\nto_i420: {:.2}ms\nPack: {:.2}ms\nUpload: {:.2}ms", + format_delta_ms(publish_us, receive_us), + format_delta_ms(receive_us, upload_us), + format_delta_ms(upload_us, paint_us), + format_delta_ms(publish_us, paint_us), + format_delta_ms(paint_us, Some(current_timestamp_us())), + to_i420_ms, + pack_ms, + upload_ms, + ); + self.timestamp_metrics_last_update = Instant::now(); } - } - if let Some(user_ts) = self.cached_user_timestamp_us { + + let frame_id_line = match frame_id { + Some(fid) => format!("Frame ID: {}", fid), + None => "Frame ID: N/A".to_string(), + }; + let timestamp_overlay_text = format!( + "{}\nPublish: {}\nReceive: {}\nUpload: {}\nPaint: {}\nNow: {}\n{}", + frame_id_line, + format_optional_timestamp_us(publish_us), + format_optional_timestamp_us(receive_us), + format_optional_timestamp_us(upload_us), + format_optional_timestamp_us(paint_us), + format_timestamp_us(current_timestamp_us()), + self.timestamp_metrics_text, + ); + egui::Area::new("timestamp_hud".into()) .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 10.0)) .interactable(false) .show(ctx, |ui| { - let now_us = current_timestamp_us(); - - // Update the cached latency display at ~2 Hz so it's readable. - if self.latency_last_update.elapsed() >= Duration::from_millis(500) { - let delta_ms = (now_us - user_ts) as f64 / 1000.0; - self.latency_display = format!("{:.1}ms", delta_ms); - self.latency_last_update = Instant::now(); - } - - let frame_id_line = match self.cached_frame_id { - Some(fid) => format!("Frame ID: {}", fid), - None => "Frame ID: N/A".to_string(), - }; - let lines = format!( - "{}\nPublish: {}\nSubscribe: {}\nLatency: {}", - frame_id_line, - format_timestamp_us(user_ts), - format_timestamp_us(now_us), - self.latency_display, - ); egui::Frame::NONE .fill(egui::Color32::from_black_alpha(140)) .corner_radius(egui::CornerRadius::same(4)) @@ -604,7 +678,7 @@ impl eframe::App for VideoApp { .show(ui, |ui| { ui.add( egui::Label::new( - egui::RichText::new(lines) + egui::RichText::new(×tamp_overlay_text) .color(egui::Color32::WHITE) .monospace(), ) @@ -612,6 +686,8 @@ impl eframe::App for VideoApp { ); }); }); + } else { + self.timestamp_metrics_text.clear(); } } @@ -722,17 +798,29 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let shared = Arc::new(Mutex::new(SharedYuv { width: 0, height: 0, - stride_y: 0, - stride_u: 0, - stride_v: 0, + y_bytes_per_row: 0, + uv_bytes_per_row: 0, y: Vec::new(), u: Vec::new(), v: Vec::new(), codec: String::new(), fps: 0.0, dirty: false, + received_at_us: None, user_timestamp_us: None, frame_id: None, + latest_to_i420_ms: 0.0, + latest_pack_ms: 0.0, + last_uploaded_frame_id: None, + last_uploaded_user_timestamp_us: None, + last_uploaded_receive_us: None, + last_uploaded_us: None, + last_upload_ms: 0.0, + last_painted_frame_id: None, + last_painted_user_timestamp_us: None, + last_painted_receive_us: None, + last_painted_upload_us: None, + last_painted_us: None, })); // Subscribe to room events: on first video track, start sink task @@ -785,12 +873,10 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { ctrl_c_received: ctrl_c_received.clone(), locked_aspect: None, display_timestamp: args.display_timestamp, - latency_display: String::new(), - latency_last_update: Instant::now(), - cached_user_timestamp_us: None, - cached_frame_id: None, + timestamp_metrics_text: String::new(), + timestamp_metrics_last_update: Instant::now(), }; - let native_options = eframe::NativeOptions::default(); + let native_options = eframe::NativeOptions { vsync: false, ..Default::default() }; eframe::run_native( "LiveKit Video Subscriber", native_options, @@ -824,6 +910,13 @@ struct YuvGpuState { y_pad_w: u32, uv_pad_w: u32, dims: (u32, u32), + upload_y: Vec, + upload_u: Vec, + upload_v: Vec, + uploaded_frame_id: Option, + uploaded_user_timestamp_us: Option, + uploaded_receive_us: Option, + uploaded_at_us: Option, } impl YuvGpuState { @@ -869,6 +962,32 @@ fn align_up(value: u32, alignment: u32) -> u32 { ((value + alignment - 1) / alignment) * alignment } +fn resize_reused_buffer(buf: &mut Vec, len: usize) { + if buf.len() != len { + buf.resize(len, 0); + } +} + +fn pack_plane( + src: &[u8], + src_stride: u32, + row_width: u32, + rows: u32, + dst_stride: u32, + dst: &mut Vec, +) { + resize_reused_buffer(dst, (dst_stride * rows) as usize); + for row in 0..rows { + let src_off = (row * src_stride) as usize; + let dst_off = (row * dst_stride) as usize; + let row_end = dst_off + row_width as usize; + dst[dst_off..row_end].copy_from_slice(&src[src_off..src_off + row_width as usize]); + if dst_stride > row_width { + dst[row_end..dst_off + dst_stride as usize].fill(0); + } + } +} + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] struct ParamsUniform { @@ -1069,24 +1188,43 @@ impl CallbackTrait for YuvPaintCallback { y_pad_w: 256, uv_pad_w: 256, dims: (0, 0), + upload_y: Vec::new(), + upload_u: Vec::new(), + upload_v: Vec::new(), + uploaded_frame_id: None, + uploaded_user_timestamp_us: None, + uploaded_receive_us: None, + uploaded_at_us: None, }; resources.insert(new_state); } let state = resources.get_mut::().unwrap(); - // Upload planes when marked dirty - // Recreate textures/bind group on size change - if state.dims != (shared.width, shared.height) { - let y_pad_w = align_up(shared.width, 256); - let uv_w = (shared.width + 1) / 2; + let dims = (shared.width, shared.height); + let upload_row_bytes = (shared.y_bytes_per_row, shared.uv_bytes_per_row); + let dirty_frame_meta = ( + shared.frame_id, + shared.user_timestamp_us, + shared.received_at_us, + ); + let has_dirty_frame = if shared.dirty { + std::mem::swap(&mut state.upload_y, &mut shared.y); + std::mem::swap(&mut state.upload_u, &mut shared.u); + std::mem::swap(&mut state.upload_v, &mut shared.v); + shared.dirty = false; + true + } else { + false + }; + drop(shared); + + // Recreate textures/bind group on size change. + if state.dims != dims { + let y_pad_w = align_up(dims.0, 256); + let uv_w = (dims.0 + 1) / 2; let uv_pad_w = align_up(uv_w, 256); - let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = YuvGpuState::create_textures( - device, - shared.width, - shared.height, - y_pad_w, - uv_pad_w, - ); + let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = + YuvGpuState::create_textures(device, dims.0, dims.1, y_pad_w, uv_pad_w); let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { label: Some("yuv_bind_group"), layout: &state.bind_layout, @@ -1122,106 +1260,101 @@ impl CallbackTrait for YuvPaintCallback { state.bind_group = bind_group; state.y_pad_w = y_pad_w; state.uv_pad_w = uv_pad_w; - state.dims = (shared.width, shared.height); + state.dims = dims; } - if shared.dirty { - let y_bytes_per_row = align_up(shared.width, 256); - let uv_w = (shared.width + 1) / 2; - let uv_h = (shared.height + 1) / 2; - let uv_bytes_per_row = align_up(uv_w, 256); + if has_dirty_frame { + let upload_started = Instant::now(); + let uv_w = (dims.0 + 1) / 2; + let uv_h = (dims.1 + 1) / 2; - // Pack and upload Y - if shared.stride_y >= shared.width { - let mut packed = vec![0u8; (y_bytes_per_row * shared.height) as usize]; - for row in 0..shared.height { - let src = - &shared.y[(row * shared.stride_y) as usize..][..shared.width as usize]; - let dst_off = (row * y_bytes_per_row) as usize; - packed[dst_off..dst_off + shared.width as usize].copy_from_slice(src); - } + if upload_row_bytes.0 >= dims.0 { queue.write_texture( - wgpu::ImageCopyTexture { + wgpu::TexelCopyTextureInfo { texture: &state.y_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All, }, - &packed, - wgpu::ImageDataLayout { + &state.upload_y, + wgpu::TexelCopyBufferLayout { offset: 0, - bytes_per_row: Some(y_bytes_per_row), - rows_per_image: Some(shared.height), + bytes_per_row: Some(upload_row_bytes.0), + rows_per_image: Some(dims.1), }, wgpu::Extent3d { - width: state.y_pad_w, - height: shared.height, + width: dims.0, + height: dims.1, depth_or_array_layers: 1, }, ); } - // Pack and upload U,V - if shared.stride_u >= uv_w && shared.stride_v >= uv_w { - let mut packed_u = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; - let mut packed_v = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; - for row in 0..uv_h { - let src_u = &shared.u[(row * shared.stride_u) as usize..][..uv_w as usize]; - let src_v = &shared.v[(row * shared.stride_v) as usize..][..uv_w as usize]; - let dst_off = (row * uv_bytes_per_row) as usize; - packed_u[dst_off..dst_off + uv_w as usize].copy_from_slice(src_u); - packed_v[dst_off..dst_off + uv_w as usize].copy_from_slice(src_v); - } + if upload_row_bytes.1 >= uv_w { queue.write_texture( - wgpu::ImageCopyTexture { + wgpu::TexelCopyTextureInfo { texture: &state.u_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All, }, - &packed_u, - wgpu::ImageDataLayout { + &state.upload_u, + wgpu::TexelCopyBufferLayout { offset: 0, - bytes_per_row: Some(uv_bytes_per_row), + bytes_per_row: Some(upload_row_bytes.1), rows_per_image: Some(uv_h), }, wgpu::Extent3d { - width: state.uv_pad_w, + width: uv_w, height: uv_h, depth_or_array_layers: 1, }, ); queue.write_texture( - wgpu::ImageCopyTexture { + wgpu::TexelCopyTextureInfo { texture: &state.v_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All, }, - &packed_v, - wgpu::ImageDataLayout { + &state.upload_v, + wgpu::TexelCopyBufferLayout { offset: 0, - bytes_per_row: Some(uv_bytes_per_row), + bytes_per_row: Some(upload_row_bytes.1), rows_per_image: Some(uv_h), }, wgpu::Extent3d { - width: state.uv_pad_w, + width: uv_w, height: uv_h, depth_or_array_layers: 1, }, ); } - // Update params uniform - let params = ParamsUniform { - src_w: shared.width, - src_h: shared.height, - y_tex_w: state.y_pad_w, - uv_tex_w: state.uv_pad_w, - }; - queue.write_buffer(&state.params_buf, 0, bytemuck::bytes_of(¶ms)); + queue.write_buffer( + &state.params_buf, + 0, + bytemuck::bytes_of(&ParamsUniform { + src_w: dims.0, + src_h: dims.1, + y_tex_w: state.y_pad_w, + uv_tex_w: state.uv_pad_w, + }), + ); - shared.dirty = false; + let uploaded_at_us = current_timestamp_us(); + let upload_ms = upload_started.elapsed().as_secs_f64() * 1000.0; + state.uploaded_frame_id = dirty_frame_meta.0; + state.uploaded_user_timestamp_us = dirty_frame_meta.1; + state.uploaded_receive_us = dirty_frame_meta.2; + state.uploaded_at_us = Some(uploaded_at_us); + + let mut shared = self.shared.lock(); + shared.last_uploaded_frame_id = dirty_frame_meta.0; + shared.last_uploaded_user_timestamp_us = dirty_frame_meta.1; + shared.last_uploaded_receive_us = dirty_frame_meta.2; + shared.last_uploaded_us = Some(uploaded_at_us); + shared.last_upload_ms = upload_ms as f32; } Vec::new() @@ -1233,20 +1366,10 @@ impl CallbackTrait for YuvPaintCallback { render_pass: &mut wgpu::RenderPass<'static>, resources: &egui_wgpu_backend::CallbackResources, ) { - // Acquire device/queue via screen_descriptor? Not available; use resources to fetch our state - let shared = self.shared.lock(); - if shared.width == 0 || shared.height == 0 { - return; - } - - // Build pipeline and textures on first paint or on resize let Some(state) = resources.get::() else { - // prepare may not have created the state yet (race with first frame); skip this paint return; }; - - if state.dims != (shared.width, shared.height) { - // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild + if state.dims == (0, 0) { return; } @@ -1254,5 +1377,13 @@ impl CallbackTrait for YuvPaintCallback { render_pass.set_bind_group(0, &state.bind_group, &[]); // Fullscreen triangle without vertex buffer render_pass.draw(0..3, 0..1); + + let painted_at_us = current_timestamp_us(); + let mut shared = self.shared.lock(); + shared.last_painted_frame_id = state.uploaded_frame_id; + shared.last_painted_user_timestamp_us = state.uploaded_user_timestamp_us; + shared.last_painted_receive_us = state.uploaded_receive_us; + shared.last_painted_upload_us = state.uploaded_at_us; + shared.last_painted_us = Some(painted_at_us); } } diff --git a/examples/local_video/src/timestamp_burn.rs b/examples/local_video/src/timestamp_burn.rs index 8a86dd3a7..7d4cc971c 100644 --- a/examples/local_video/src/timestamp_burn.rs +++ b/examples/local_video/src/timestamp_burn.rs @@ -4,7 +4,7 @@ const TIMESTAMP_TEXT_LEN: usize = 23; // YYYY-MM-DD HH:MM:SS:SSS const TIMESTAMP_GLYPH_COUNT: usize = 13; // 0-9, :, -, space const TIMESTAMP_GLYPH_WIDTH: usize = 5; const TIMESTAMP_GLYPH_HEIGHT: usize = 7; -const TIMESTAMP_GLYPH_SCALE: usize = 2; +const TIMESTAMP_GLYPH_SCALE: usize = 4; const TIMESTAMP_GLYPH_SPACING: usize = 2; const TIMESTAMP_PADDING_X: usize = 4; const TIMESTAMP_PADDING_Y: usize = 4; diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/user_timestamp.h index dc7449573..fa65a2a02 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/user_timestamp.h @@ -59,9 +59,14 @@ constexpr size_t kTrailerEnvelopeSize = 5; constexpr uint8_t kTagTimestampUs = 0x01; // value: 8 bytes big-endian int64 constexpr uint8_t kTagFrameId = 0x02; // value: 4 bytes big-endian uint32 -// Current trailer size with both TLV elements: -// (1+1+8) + (1+1+4) + 5 envelope = 21 bytes -constexpr size_t kUserTimestampTrailerSize = 21; +constexpr size_t kTimestampTlvSize = 10; // tag + len + 8-byte value +constexpr size_t kFrameIdTlvSize = 6; // tag + len + 4-byte value + +// Trailer size varies because frame_id is omitted when it is unset (0). +constexpr size_t kUserTimestampTrailerMinSize = + kTimestampTlvSize + kTrailerEnvelopeSize; +constexpr size_t kUserTimestampTrailerMaxSize = + kTimestampTlvSize + kFrameIdTlvSize + kTrailerEnvelopeSize; struct FrameMetadata { int64_t user_timestamp_us; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/user_timestamp.cpp index 32d80f409..914989612 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/user_timestamp.cpp @@ -206,8 +206,12 @@ std::vector UserTimestampTransformer::AppendTrailer( rtc::ArrayView data, int64_t user_timestamp_us, uint32_t frame_id) { + const bool has_frame_id = frame_id != 0; + const size_t trailer_len = kTimestampTlvSize + + (has_frame_id ? kFrameIdTlvSize : 0) + + kTrailerEnvelopeSize; std::vector result; - result.reserve(data.size() + kUserTimestampTrailerSize); + result.reserve(data.size() + trailer_len); // Copy original data result.insert(result.end(), data.begin(), data.end()); @@ -223,17 +227,18 @@ std::vector UserTimestampTransformer::AppendTrailer( static_cast(((user_timestamp_us >> (i * 8)) & 0xFF) ^ 0xFF)); } - // TLV: frame_id (tag=0x02, len=4, 4 bytes big-endian) - result.push_back(kTagFrameId ^ 0xFF); - result.push_back(4 ^ 0xFF); - for (int i = 3; i >= 0; --i) { - result.push_back( - static_cast(((frame_id >> (i * 8)) & 0xFF) ^ 0xFF)); + if (has_frame_id) { + // TLV: frame_id (tag=0x02, len=4, 4 bytes big-endian) + result.push_back(kTagFrameId ^ 0xFF); + result.push_back(4 ^ 0xFF); + for (int i = 3; i >= 0; --i) { + result.push_back( + static_cast(((frame_id >> (i * 8)) & 0xFF) ^ 0xFF)); + } } // Envelope: trailer_len (1B, XORed) + magic (4B, NOT XORed) - result.push_back( - static_cast(kUserTimestampTrailerSize ^ 0xFF)); + result.push_back(static_cast(trailer_len ^ 0xFF)); result.insert(result.end(), std::begin(kUserTimestampMagic), std::end(kUserTimestampMagic)); From 86fdd788c45758def9ebd2371187f25ea22d71ad Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 12 Mar 2026 11:29:04 -0700 Subject: [PATCH 35/52] actually add changeset --- ..._support_for_frame_level_user_timestamp.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 .changeset/add_support_for_frame_level_user_timestamp.md diff --git a/.changeset/add_support_for_frame_level_user_timestamp.md b/.changeset/add_support_for_frame_level_user_timestamp.md new file mode 100644 index 000000000..bf6094aa7 --- /dev/null +++ b/.changeset/add_support_for_frame_level_user_timestamp.md @@ -0,0 +1,19 @@ +--- +livekit: minor +livekit-protocol: minor +livekit-api: minor +livekit-wakeword: no changelog additions +soxr-sys: no changelog additions +webrtc-sys-build: no changelog additions +webrtc-sys: minor +livekit-ffi: minor +yuv-sys: CHANGE_TYno changelog additionsPE +libwebrtc: minor +imgproc: no changelog additions +--- + +# Add support for frame level user timestamp + +#890 by @chenosaurus + +- Add support to attach/parse frame level timestamps to VideoTracks as a custom payload trailer. \ No newline at end of file From 339f886813e68069135f85f060d347f29b14db6c Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 13 Mar 2026 02:25:24 +0000 Subject: [PATCH 36/52] generated protobuf --- livekit-protocol/src/livekit.rs | 100 ++-- livekit-protocol/src/livekit.serde.rs | 755 +++++++++++++++++++++----- 2 files changed, 670 insertions(+), 185 deletions(-) diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index eecf33a84..106c5c86e 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -610,8 +610,6 @@ pub struct TrackInfo { pub audio_features: ::prost::alloc::vec::Vec, #[prost(enumeration="BackupCodecPolicy", tag="20")] pub backup_codec_policy: i32, - #[prost(enumeration="PacketTrailerFeature", repeated, tag="21")] - pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -948,9 +946,6 @@ pub struct RpcRequest { pub response_timeout_ms: u32, #[prost(uint32, tag="5")] pub version: u32, - /// Compressed payload data. When set, this field is used instead of `payload`. - #[prost(bytes="vec", tag="6")] - pub compressed_payload: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -963,7 +958,7 @@ pub struct RpcAck { pub struct RpcResponse { #[prost(string, tag="1")] pub request_id: ::prost::alloc::string::String, - #[prost(oneof="rpc_response::Value", tags="2, 3, 4")] + #[prost(oneof="rpc_response::Value", tags="2, 3")] pub value: ::core::option::Option, } /// Nested message and enum types in `RpcResponse`. @@ -975,9 +970,6 @@ pub mod rpc_response { Payload(::prost::alloc::string::String), #[prost(message, tag="3")] Error(super::RpcError), - /// Compressed payload data. When set, this field is used instead of `payload`. - #[prost(bytes, tag="4")] - CompressedPayload(::prost::alloc::vec::Vec), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -2050,29 +2042,6 @@ impl AudioTrackFeature { } } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] -#[repr(i32)] -pub enum PacketTrailerFeature { - PtfUserTimestamp = 0, -} -impl PacketTrailerFeature { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - PacketTrailerFeature::PtfUserTimestamp => "PTF_USER_TIMESTAMP", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "PTF_USER_TIMESTAMP" => Some(Self::PtfUserTimestamp), - _ => None, - } - } -} /// composite using a web browser #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -3387,8 +3356,6 @@ pub struct AddTrackRequest { pub backup_codec_policy: i32, #[prost(enumeration="AudioTrackFeature", repeated, tag="17")] pub audio_features: ::prost::alloc::vec::Vec, - #[prost(enumeration="PacketTrailerFeature", repeated, tag="18")] - pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -4276,7 +4243,7 @@ pub struct JobState { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WorkerMessage { - #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7")] + #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7, 8, 9")] pub message: ::core::option::Option, } /// Nested message and enum types in `WorkerMessage`. @@ -4302,13 +4269,17 @@ pub mod worker_message { SimulateJob(super::SimulateJobRequest), #[prost(message, tag="7")] MigrateJob(super::MigrateJobRequest), + #[prost(message, tag="8")] + TextResponse(super::TextMessageResponse), + #[prost(message, tag="9")] + PushText(super::PushTextRequest), } } /// from Server to Worker #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ServerMessage { - #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4")] + #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4, 6")] pub message: ::core::option::Option, } /// Nested message and enum types in `ServerMessage`. @@ -4328,6 +4299,8 @@ pub mod server_message { Termination(super::JobTermination), #[prost(message, tag="4")] Pong(super::WorkerPong), + #[prost(message, tag="6")] + TextRequest(super::TextMessageRequest), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -4457,6 +4430,61 @@ pub struct JobTermination { #[prost(string, tag="1")] pub job_id: ::prost::alloc::string::String, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AgentSessionState { + #[prost(uint64, tag="1")] + pub version: u64, + #[prost(oneof="agent_session_state::Data", tags="2, 3")] + pub data: ::core::option::Option, +} +/// Nested message and enum types in `AgentSessionState`. +pub mod agent_session_state { + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Data { + #[prost(bytes, tag="2")] + Snapshot(::prost::alloc::vec::Vec), + #[prost(bytes, tag="3")] + Delta(::prost::alloc::vec::Vec), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TextMessageRequest { + #[prost(string, tag="1")] + pub message_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub session_id: ::prost::alloc::string::String, + #[prost(string, tag="3")] + pub agent_name: ::prost::alloc::string::String, + #[prost(string, tag="4")] + pub metadata: ::prost::alloc::string::String, + #[prost(message, optional, tag="5")] + pub session_state: ::core::option::Option, + #[prost(string, tag="6")] + pub text: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PushTextRequest { + /// The message_id of the TextMessageRequest that this push is for + #[prost(string, tag="1")] + pub message_id: ::prost::alloc::string::String, + #[prost(string, tag="2")] + pub content: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TextMessageResponse { + /// Indicate the request is completed + #[prost(string, tag="1")] + pub message_id: ::prost::alloc::string::String, + #[prost(message, optional, tag="2")] + pub session_state: ::core::option::Option, + #[prost(string, tag="3")] + pub error: ::prost::alloc::string::String, +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum JobType { diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 21a8fcfa3..4d35e2a0d 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -561,9 +561,6 @@ impl serde::Serialize for AddTrackRequest { if !self.audio_features.is_empty() { len += 1; } - if !self.packet_trailer_features.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.AddTrackRequest", len)?; if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; @@ -628,13 +625,6 @@ impl serde::Serialize for AddTrackRequest { }).collect::, _>>()?; struct_ser.serialize_field("audioFeatures", &v)?; } - if !self.packet_trailer_features.is_empty() { - let v = self.packet_trailer_features.iter().cloned().map(|v| { - PacketTrailerFeature::try_from(v) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) - }).collect::, _>>()?; - struct_ser.serialize_field("packetTrailerFeatures", &v)?; - } struct_ser.end() } } @@ -667,8 +657,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "backupCodecPolicy", "audio_features", "audioFeatures", - "packet_trailer_features", - "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -690,7 +678,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { Stream, BackupCodecPolicy, AudioFeatures, - PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -730,7 +717,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "stream" => Ok(GeneratedField::Stream), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), - "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -767,7 +753,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { let mut stream__ = None; let mut backup_codec_policy__ = None; let mut audio_features__ = None; - let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Cid => { @@ -876,12 +861,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { } audio_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); } - GeneratedField::PacketTrailerFeatures => { - if packet_trailer_features__.is_some() { - return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); - } - packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -905,7 +884,6 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { stream: stream__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), audio_features: audio_features__.unwrap_or_default(), - packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -1215,6 +1193,142 @@ impl<'de> serde::Deserialize<'de> for AgentDispatchState { deserializer.deserialize_struct("livekit.AgentDispatchState", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for AgentSessionState { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.version != 0 { + len += 1; + } + if self.data.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.AgentSessionState", len)?; + if self.version != 0 { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; + } + if let Some(v) = self.data.as_ref() { + match v { + agent_session_state::Data::Snapshot(v) => { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("snapshot", pbjson::private::base64::encode(&v).as_str())?; + } + agent_session_state::Data::Delta(v) => { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("delta", pbjson::private::base64::encode(&v).as_str())?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for AgentSessionState { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "version", + "snapshot", + "delta", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Version, + Snapshot, + Delta, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "version" => Ok(GeneratedField::Version), + "snapshot" => Ok(GeneratedField::Snapshot), + "delta" => Ok(GeneratedField::Delta), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = AgentSessionState; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.AgentSessionState") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut version__ = None; + let mut data__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Version => { + if version__.is_some() { + return Err(serde::de::Error::duplicate_field("version")); + } + version__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Snapshot => { + if data__.is_some() { + return Err(serde::de::Error::duplicate_field("snapshot")); + } + data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Snapshot(x.0)); + } + GeneratedField::Delta => { + if data__.is_some() { + return Err(serde::de::Error::duplicate_field("delta")); + } + data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Delta(x.0)); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(AgentSessionState { + version: version__.unwrap_or_default(), + data: data__, + }) + } + } + deserializer.deserialize_struct("livekit.AgentSessionState", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for AliOssUpload { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -21511,74 +21625,6 @@ impl<'de> serde::Deserialize<'de> for MuteTrackRequest { deserializer.deserialize_struct("livekit.MuteTrackRequest", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for PacketTrailerFeature { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - let variant = match self { - Self::PtfUserTimestamp => "PTF_USER_TIMESTAMP", - }; - serializer.serialize_str(variant) - } -} -impl<'de> serde::Deserialize<'de> for PacketTrailerFeature { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "PTF_USER_TIMESTAMP", - ]; - - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = PacketTrailerFeature; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - fn visit_i64(self, v: i64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) - }) - } - - fn visit_u64(self, v: u64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) - }) - } - - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "PTF_USER_TIMESTAMP" => Ok(PacketTrailerFeature::PtfUserTimestamp), - _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), - } - } - } - deserializer.deserialize_any(GeneratedVisitor) - } -} impl serde::Serialize for Pagination { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -24293,6 +24339,119 @@ impl<'de> serde::Deserialize<'de> for PublishDataTrackResponse { deserializer.deserialize_struct("livekit.PublishDataTrackResponse", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for PushTextRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.message_id.is_empty() { + len += 1; + } + if !self.content.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.PushTextRequest", len)?; + if !self.message_id.is_empty() { + struct_ser.serialize_field("messageId", &self.message_id)?; + } + if !self.content.is_empty() { + struct_ser.serialize_field("content", &self.content)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PushTextRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "message_id", + "messageId", + "content", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + MessageId, + Content, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "messageId" | "message_id" => Ok(GeneratedField::MessageId), + "content" => Ok(GeneratedField::Content), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PushTextRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.PushTextRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut message_id__ = None; + let mut content__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::MessageId => { + if message_id__.is_some() { + return Err(serde::de::Error::duplicate_field("messageId")); + } + message_id__ = Some(map_.next_value()?); + } + GeneratedField::Content => { + if content__.is_some() { + return Err(serde::de::Error::duplicate_field("content")); + } + content__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PushTextRequest { + message_id: message_id__.unwrap_or_default(), + content: content__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.PushTextRequest", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for RtcpSenderReportState { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -29277,9 +29436,6 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { len += 1; } - if !self.compressed_payload.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.RpcRequest", len)?; if !self.id.is_empty() { struct_ser.serialize_field("id", &self.id)?; @@ -29296,11 +29452,6 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { struct_ser.serialize_field("version", &self.version)?; } - if !self.compressed_payload.is_empty() { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&self.compressed_payload).as_str())?; - } struct_ser.end() } } @@ -29317,8 +29468,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "response_timeout_ms", "responseTimeoutMs", "version", - "compressed_payload", - "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29328,7 +29477,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Payload, ResponseTimeoutMs, Version, - CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29356,7 +29504,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "payload" => Ok(GeneratedField::Payload), "responseTimeoutMs" | "response_timeout_ms" => Ok(GeneratedField::ResponseTimeoutMs), "version" => Ok(GeneratedField::Version), - "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29381,7 +29528,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { let mut payload__ = None; let mut response_timeout_ms__ = None; let mut version__ = None; - let mut compressed_payload__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Id => { @@ -29418,14 +29564,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } - GeneratedField::CompressedPayload => { - if compressed_payload__.is_some() { - return Err(serde::de::Error::duplicate_field("compressedPayload")); - } - compressed_payload__ = - Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) - ; - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -29437,7 +29575,6 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { payload: payload__.unwrap_or_default(), response_timeout_ms: response_timeout_ms__.unwrap_or_default(), version: version__.unwrap_or_default(), - compressed_payload: compressed_payload__.unwrap_or_default(), }) } } @@ -29470,11 +29607,6 @@ impl serde::Serialize for RpcResponse { rpc_response::Value::Error(v) => { struct_ser.serialize_field("error", v)?; } - rpc_response::Value::CompressedPayload(v) => { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&v).as_str())?; - } } } struct_ser.end() @@ -29491,8 +29623,6 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId", "payload", "error", - "compressed_payload", - "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29500,7 +29630,6 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { RequestId, Payload, Error, - CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29526,7 +29655,6 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId" | "request_id" => Ok(GeneratedField::RequestId), "payload" => Ok(GeneratedField::Payload), "error" => Ok(GeneratedField::Error), - "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29569,12 +29697,6 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { value__ = map_.next_value::<::std::option::Option<_>>()?.map(rpc_response::Value::Error) ; } - GeneratedField::CompressedPayload => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("compressedPayload")); - } - value__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| rpc_response::Value::CompressedPayload(x.0)); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -36010,6 +36132,9 @@ impl serde::Serialize for ServerMessage { server_message::Message::Pong(v) => { struct_ser.serialize_field("pong", v)?; } + server_message::Message::TextRequest(v) => { + struct_ser.serialize_field("textRequest", v)?; + } } } struct_ser.end() @@ -36027,6 +36152,8 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment", "termination", "pong", + "text_request", + "textRequest", ]; #[allow(clippy::enum_variant_names)] @@ -36036,6 +36163,7 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { Assignment, Termination, Pong, + TextRequest, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -36063,6 +36191,7 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment" => Ok(GeneratedField::Assignment), "termination" => Ok(GeneratedField::Termination), "pong" => Ok(GeneratedField::Pong), + "textRequest" | "text_request" => Ok(GeneratedField::TextRequest), _ => Ok(GeneratedField::__SkipField__), } } @@ -36118,6 +36247,13 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { return Err(serde::de::Error::duplicate_field("pong")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::Pong) +; + } + GeneratedField::TextRequest => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("textRequest")); + } + message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::TextRequest) ; } GeneratedField::__SkipField__ => { @@ -40424,6 +40560,321 @@ impl<'de> serde::Deserialize<'de> for SyncState { deserializer.deserialize_struct("livekit.SyncState", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for TextMessageRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.message_id.is_empty() { + len += 1; + } + if !self.session_id.is_empty() { + len += 1; + } + if !self.agent_name.is_empty() { + len += 1; + } + if !self.metadata.is_empty() { + len += 1; + } + if self.session_state.is_some() { + len += 1; + } + if !self.text.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.TextMessageRequest", len)?; + if !self.message_id.is_empty() { + struct_ser.serialize_field("messageId", &self.message_id)?; + } + if !self.session_id.is_empty() { + struct_ser.serialize_field("sessionId", &self.session_id)?; + } + if !self.agent_name.is_empty() { + struct_ser.serialize_field("agentName", &self.agent_name)?; + } + if !self.metadata.is_empty() { + struct_ser.serialize_field("metadata", &self.metadata)?; + } + if let Some(v) = self.session_state.as_ref() { + struct_ser.serialize_field("sessionState", v)?; + } + if !self.text.is_empty() { + struct_ser.serialize_field("text", &self.text)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TextMessageRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "message_id", + "messageId", + "session_id", + "sessionId", + "agent_name", + "agentName", + "metadata", + "session_state", + "sessionState", + "text", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + MessageId, + SessionId, + AgentName, + Metadata, + SessionState, + Text, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "messageId" | "message_id" => Ok(GeneratedField::MessageId), + "sessionId" | "session_id" => Ok(GeneratedField::SessionId), + "agentName" | "agent_name" => Ok(GeneratedField::AgentName), + "metadata" => Ok(GeneratedField::Metadata), + "sessionState" | "session_state" => Ok(GeneratedField::SessionState), + "text" => Ok(GeneratedField::Text), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TextMessageRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.TextMessageRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut message_id__ = None; + let mut session_id__ = None; + let mut agent_name__ = None; + let mut metadata__ = None; + let mut session_state__ = None; + let mut text__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::MessageId => { + if message_id__.is_some() { + return Err(serde::de::Error::duplicate_field("messageId")); + } + message_id__ = Some(map_.next_value()?); + } + GeneratedField::SessionId => { + if session_id__.is_some() { + return Err(serde::de::Error::duplicate_field("sessionId")); + } + session_id__ = Some(map_.next_value()?); + } + GeneratedField::AgentName => { + if agent_name__.is_some() { + return Err(serde::de::Error::duplicate_field("agentName")); + } + agent_name__ = Some(map_.next_value()?); + } + GeneratedField::Metadata => { + if metadata__.is_some() { + return Err(serde::de::Error::duplicate_field("metadata")); + } + metadata__ = Some(map_.next_value()?); + } + GeneratedField::SessionState => { + if session_state__.is_some() { + return Err(serde::de::Error::duplicate_field("sessionState")); + } + session_state__ = map_.next_value()?; + } + GeneratedField::Text => { + if text__.is_some() { + return Err(serde::de::Error::duplicate_field("text")); + } + text__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(TextMessageRequest { + message_id: message_id__.unwrap_or_default(), + session_id: session_id__.unwrap_or_default(), + agent_name: agent_name__.unwrap_or_default(), + metadata: metadata__.unwrap_or_default(), + session_state: session_state__, + text: text__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.TextMessageRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TextMessageResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.message_id.is_empty() { + len += 1; + } + if self.session_state.is_some() { + len += 1; + } + if !self.error.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.TextMessageResponse", len)?; + if !self.message_id.is_empty() { + struct_ser.serialize_field("messageId", &self.message_id)?; + } + if let Some(v) = self.session_state.as_ref() { + struct_ser.serialize_field("sessionState", v)?; + } + if !self.error.is_empty() { + struct_ser.serialize_field("error", &self.error)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TextMessageResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "message_id", + "messageId", + "session_state", + "sessionState", + "error", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + MessageId, + SessionState, + Error, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "messageId" | "message_id" => Ok(GeneratedField::MessageId), + "sessionState" | "session_state" => Ok(GeneratedField::SessionState), + "error" => Ok(GeneratedField::Error), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TextMessageResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.TextMessageResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut message_id__ = None; + let mut session_state__ = None; + let mut error__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::MessageId => { + if message_id__.is_some() { + return Err(serde::de::Error::duplicate_field("messageId")); + } + message_id__ = Some(map_.next_value()?); + } + GeneratedField::SessionState => { + if session_state__.is_some() { + return Err(serde::de::Error::duplicate_field("sessionState")); + } + session_state__ = map_.next_value()?; + } + GeneratedField::Error => { + if error__.is_some() { + return Err(serde::de::Error::duplicate_field("error")); + } + error__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(TextMessageResponse { + message_id: message_id__.unwrap_or_default(), + session_state: session_state__, + error: error__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.TextMessageResponse", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for TimeSeriesMetric { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -41350,9 +41801,6 @@ impl serde::Serialize for TrackInfo { if self.backup_codec_policy != 0 { len += 1; } - if !self.packet_trailer_features.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.TrackInfo", len)?; if !self.sid.is_empty() { struct_ser.serialize_field("sid", &self.sid)?; @@ -41426,13 +41874,6 @@ impl serde::Serialize for TrackInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.backup_codec_policy)))?; struct_ser.serialize_field("backupCodecPolicy", &v)?; } - if !self.packet_trailer_features.is_empty() { - let v = self.packet_trailer_features.iter().cloned().map(|v| { - PacketTrailerFeature::try_from(v) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) - }).collect::, _>>()?; - struct_ser.serialize_field("packetTrailerFeatures", &v)?; - } struct_ser.end() } } @@ -41468,8 +41909,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "audioFeatures", "backup_codec_policy", "backupCodecPolicy", - "packet_trailer_features", - "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -41494,7 +41933,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { Version, AudioFeatures, BackupCodecPolicy, - PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -41537,7 +41975,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "version" => Ok(GeneratedField::Version), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), - "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -41577,7 +42014,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { let mut version__ = None; let mut audio_features__ = None; let mut backup_codec_policy__ = None; - let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Sid => { @@ -41704,12 +42140,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { } backup_codec_policy__ = Some(map_.next_value::()? as i32); } - GeneratedField::PacketTrailerFeatures => { - if packet_trailer_features__.is_some() { - return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); - } - packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -41736,7 +42166,6 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { version: version__, audio_features: audio_features__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), - packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -47928,6 +48357,12 @@ impl serde::Serialize for WorkerMessage { worker_message::Message::MigrateJob(v) => { struct_ser.serialize_field("migrateJob", v)?; } + worker_message::Message::TextResponse(v) => { + struct_ser.serialize_field("textResponse", v)?; + } + worker_message::Message::PushText(v) => { + struct_ser.serialize_field("pushText", v)?; + } } } struct_ser.end() @@ -47951,6 +48386,10 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "simulateJob", "migrate_job", "migrateJob", + "text_response", + "textResponse", + "push_text", + "pushText", ]; #[allow(clippy::enum_variant_names)] @@ -47962,6 +48401,8 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { Ping, SimulateJob, MigrateJob, + TextResponse, + PushText, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -47991,6 +48432,8 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "ping" => Ok(GeneratedField::Ping), "simulateJob" | "simulate_job" => Ok(GeneratedField::SimulateJob), "migrateJob" | "migrate_job" => Ok(GeneratedField::MigrateJob), + "textResponse" | "text_response" => Ok(GeneratedField::TextResponse), + "pushText" | "push_text" => Ok(GeneratedField::PushText), _ => Ok(GeneratedField::__SkipField__), } } @@ -48060,6 +48503,20 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { return Err(serde::de::Error::duplicate_field("migrateJob")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::MigrateJob) +; + } + GeneratedField::TextResponse => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("textResponse")); + } + message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::TextResponse) +; + } + GeneratedField::PushText => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("pushText")); + } + message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::PushText) ; } GeneratedField::__SkipField__ => { From 10b9220af3f1c81dc4bff86e2bb61b5e1728a57b Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 13 Mar 2026 13:42:31 -0700 Subject: [PATCH 37/52] rename to packet trailer --- ...support_for_frame_level_packet_trailer.md} | 2 +- examples/local_video/src/publisher.rs | 2 +- examples/local_video/src/subscriber.rs | 139 ++---------------- libwebrtc/src/lib.rs | 2 +- libwebrtc/src/native/frame_cryptor.rs | 6 +- libwebrtc/src/native/mod.rs | 2 +- .../{user_timestamp.rs => packet_trailer.rs} | 36 ++--- libwebrtc/src/native/video_source.rs | 17 +-- libwebrtc/src/native/video_stream.rs | 18 +-- libwebrtc/src/native/video_track.rs | 18 +-- libwebrtc/src/video_source.rs | 10 +- libwebrtc/src/video_stream.rs | 8 +- libwebrtc/src/video_track.rs | 14 +- livekit-ffi/src/conversion/room.rs | 2 +- livekit/src/room/e2ee/manager.rs | 34 ++--- livekit/src/room/options.rs | 4 +- .../src/room/participant/local_participant.rs | 14 +- livekit/src/room/track/local_video_track.rs | 18 +-- livekit/src/room/track/remote_video_track.rs | 14 +- webrtc-sys/build.rs | 4 +- webrtc-sys/include/livekit/frame_cryptor.h | 8 +- .../{user_timestamp.h => packet_trailer.h} | 40 ++--- webrtc-sys/include/livekit/video_track.h | 16 +- webrtc-sys/src/frame_cryptor.cpp | 6 +- webrtc-sys/src/frame_cryptor.rs | 8 +- webrtc-sys/src/lib.rs | 2 +- ...{user_timestamp.cpp => packet_trailer.cpp} | 82 +++++------ .../{user_timestamp.rs => packet_trailer.rs} | 32 ++-- webrtc-sys/src/video_track.cpp | 26 ++-- webrtc-sys/src/video_track.rs | 10 +- 30 files changed, 237 insertions(+), 357 deletions(-) rename .changeset/{add_support_for_frame_level_user_timestamp.md => add_support_for_frame_level_packet_trailer.md} (90%) rename libwebrtc/src/native/{user_timestamp.rs => packet_trailer.rs} (84%) rename webrtc-sys/include/livekit/{user_timestamp.h => packet_trailer.h} (86%) rename webrtc-sys/src/{user_timestamp.cpp => packet_trailer.cpp} (84%) rename webrtc-sys/src/{user_timestamp.rs => packet_trailer.rs} (69%) diff --git a/.changeset/add_support_for_frame_level_user_timestamp.md b/.changeset/add_support_for_frame_level_packet_trailer.md similarity index 90% rename from .changeset/add_support_for_frame_level_user_timestamp.md rename to .changeset/add_support_for_frame_level_packet_trailer.md index bf6094aa7..7a312dde5 100644 --- a/.changeset/add_support_for_frame_level_user_timestamp.md +++ b/.changeset/add_support_for_frame_level_packet_trailer.md @@ -12,7 +12,7 @@ libwebrtc: minor imgproc: no changelog additions --- -# Add support for frame level user timestamp +# Add support for frame level packet trailer #890 by @chenosaurus diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 78aeba206..8e54e184d 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -347,7 +347,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { source: TrackSource::Camera, simulcast: args.simulcast, video_codec: codec, - user_timestamp: args.attach_timestamp, + packet_trailer: args.attach_timestamp, video_encoding: Some(main_encoding.clone()), simulcast_layers: Some(simulcast_presets.clone()), ..Default::default() diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index bd8d778ed..696d8bd24 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -81,21 +81,6 @@ struct SharedYuv { user_timestamp_us: Option, /// Last received frame_id, if any. frame_id: Option, - /// Timing for the latest received frame on the subscriber thread. - latest_to_i420_ms: f32, - latest_pack_ms: f32, - /// Timing for the last frame uploaded to the GPU in `prepare()`. - last_uploaded_frame_id: Option, - last_uploaded_user_timestamp_us: Option, - last_uploaded_receive_us: Option, - last_uploaded_us: Option, - last_upload_ms: f32, - /// Timing for the last frame seen by the paint callback. - last_painted_frame_id: Option, - last_painted_user_timestamp_us: Option, - last_painted_receive_us: Option, - last_painted_upload_us: Option, - last_painted_us: Option, } #[derive(Clone)] @@ -224,13 +209,6 @@ fn format_optional_timestamp_us(ts_us: Option) -> String { ts_us.map(format_timestamp_us).unwrap_or_else(|| "N/A".to_string()) } -fn format_delta_ms(start_us: Option, end_us: Option) -> String { - match (start_us, end_us) { - (Some(start), Some(end)) => format!("{:.1}ms", (end - start) as f64 / 1000.0), - _ => "N/A".to_string(), - } -} - fn simulcast_state_full_dims(state: &Arc>) -> Option<(u32, u32)> { let sc = state.lock(); sc.full_dims @@ -346,10 +324,7 @@ async fn handle_track_subscribed( logged_first = true; } - // Convert to I420 on CPU, but keep planes separate for GPU sampling - let to_i420_started = Instant::now(); let i420 = frame.buffer.to_i420(); - let to_i420_ms = to_i420_started.elapsed().as_secs_f64() * 1000.0; let (sy, su, sv) = i420.strides(); let (dy, du, dv) = i420.data(); @@ -360,13 +335,9 @@ async fn handle_track_subscribed( let y_bytes_per_row = align_up(width, 256); let uv_bytes_per_row = align_up(uv_w, 256); - // Pre-pack planes into GPU-ready rows on the sink thread so prepare() - // can upload directly without another repack pass. - let pack_started = Instant::now(); pack_plane(dy, sy as u32, width, height, y_bytes_per_row, &mut y_buf); pack_plane(du, su as u32, uv_w, uv_h, uv_bytes_per_row, &mut u_buf); pack_plane(dv, sv as u32, uv_w, uv_h, uv_bytes_per_row, &mut v_buf); - let pack_ms = pack_started.elapsed().as_secs_f64() * 1000.0; // Swap buffers into shared state let mut s = shared2.lock(); @@ -400,8 +371,6 @@ async fn handle_track_subscribed( s.user_timestamp_us = frame.user_timestamp_us; s.frame_id = frame.frame_id; - s.latest_to_i420_ms = to_i420_ms as f32; - s.latest_pack_ms = pack_ms as f32; // Update smoothed FPS (~500ms window) fps_window_frames += 1; @@ -481,15 +450,6 @@ fn clear_hud_and_simulcast(shared: &Arc>, simulcast: &Arc, locked_aspect: Option, display_timestamp: bool, - timestamp_metrics_text: String, - timestamp_metrics_last_update: Instant, } impl eframe::App for VideoApp { @@ -619,52 +577,30 @@ impl eframe::App for VideoApp { }); }); - // Timestamp overlay: publish, receive, upload, and paint milestones. - // `Paint` is the time when the callback issued the draw call, not the - // exact physical scan-out time on the display. if self.display_timestamp { let s = self.shared.lock(); - let frame_id = s.last_painted_frame_id.or(s.frame_id); - let publish_us = s.last_painted_user_timestamp_us.or(s.user_timestamp_us); - let receive_us = s.last_painted_receive_us.or(s.received_at_us); - let upload_us = s.last_painted_upload_us.or(s.last_uploaded_us); - let paint_us = s.last_painted_us; - let to_i420_ms = s.latest_to_i420_ms; - let pack_ms = s.latest_pack_ms; - let upload_ms = s.last_upload_ms; + let frame_id = s.frame_id; + let publish_us = s.user_timestamp_us; + let receive_us = s.received_at_us; drop(s); if publish_us.is_some() || frame_id.is_some() { - if self.timestamp_metrics_last_update.elapsed() >= Duration::from_millis(500) - || self.timestamp_metrics_text.is_empty() - { - self.timestamp_metrics_text = format!( - "Pub->Recv: {}\nRecv->Up: {}\nUp->Paint: {}\nPub->Paint: {}\nPaint->Now: {}\nto_i420: {:.2}ms\nPack: {:.2}ms\nUpload: {:.2}ms", - format_delta_ms(publish_us, receive_us), - format_delta_ms(receive_us, upload_us), - format_delta_ms(upload_us, paint_us), - format_delta_ms(publish_us, paint_us), - format_delta_ms(paint_us, Some(current_timestamp_us())), - to_i420_ms, - pack_ms, - upload_ms, - ); - self.timestamp_metrics_last_update = Instant::now(); - } - let frame_id_line = match frame_id { Some(fid) => format!("Frame ID: {}", fid), None => "Frame ID: N/A".to_string(), }; + let latency = match (publish_us, receive_us) { + (Some(pub_ts), Some(recv_ts)) => { + format!("{:.1}ms", (recv_ts - pub_ts) as f64 / 1000.0) + } + _ => "N/A".to_string(), + }; let timestamp_overlay_text = format!( - "{}\nPublish: {}\nReceive: {}\nUpload: {}\nPaint: {}\nNow: {}\n{}", + "{}\nPublish: {}\nReceive: {}\nLatency: {}", frame_id_line, format_optional_timestamp_us(publish_us), format_optional_timestamp_us(receive_us), - format_optional_timestamp_us(upload_us), - format_optional_timestamp_us(paint_us), - format_timestamp_us(current_timestamp_us()), - self.timestamp_metrics_text, + latency, ); egui::Area::new("timestamp_hud".into()) @@ -686,8 +622,6 @@ impl eframe::App for VideoApp { ); }); }); - } else { - self.timestamp_metrics_text.clear(); } } @@ -809,18 +743,6 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { received_at_us: None, user_timestamp_us: None, frame_id: None, - latest_to_i420_ms: 0.0, - latest_pack_ms: 0.0, - last_uploaded_frame_id: None, - last_uploaded_user_timestamp_us: None, - last_uploaded_receive_us: None, - last_uploaded_us: None, - last_upload_ms: 0.0, - last_painted_frame_id: None, - last_painted_user_timestamp_us: None, - last_painted_receive_us: None, - last_painted_upload_us: None, - last_painted_us: None, })); // Subscribe to room events: on first video track, start sink task @@ -873,8 +795,6 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { ctrl_c_received: ctrl_c_received.clone(), locked_aspect: None, display_timestamp: args.display_timestamp, - timestamp_metrics_text: String::new(), - timestamp_metrics_last_update: Instant::now(), }; let native_options = eframe::NativeOptions { vsync: false, ..Default::default() }; eframe::run_native( @@ -913,10 +833,6 @@ struct YuvGpuState { upload_y: Vec, upload_u: Vec, upload_v: Vec, - uploaded_frame_id: Option, - uploaded_user_timestamp_us: Option, - uploaded_receive_us: Option, - uploaded_at_us: Option, } impl YuvGpuState { @@ -1191,10 +1107,6 @@ impl CallbackTrait for YuvPaintCallback { upload_y: Vec::new(), upload_u: Vec::new(), upload_v: Vec::new(), - uploaded_frame_id: None, - uploaded_user_timestamp_us: None, - uploaded_receive_us: None, - uploaded_at_us: None, }; resources.insert(new_state); } @@ -1202,11 +1114,6 @@ impl CallbackTrait for YuvPaintCallback { let dims = (shared.width, shared.height); let upload_row_bytes = (shared.y_bytes_per_row, shared.uv_bytes_per_row); - let dirty_frame_meta = ( - shared.frame_id, - shared.user_timestamp_us, - shared.received_at_us, - ); let has_dirty_frame = if shared.dirty { std::mem::swap(&mut state.upload_y, &mut shared.y); std::mem::swap(&mut state.upload_u, &mut shared.u); @@ -1264,7 +1171,6 @@ impl CallbackTrait for YuvPaintCallback { } if has_dirty_frame { - let upload_started = Instant::now(); let uv_w = (dims.0 + 1) / 2; let uv_h = (dims.1 + 1) / 2; @@ -1341,20 +1247,6 @@ impl CallbackTrait for YuvPaintCallback { uv_tex_w: state.uv_pad_w, }), ); - - let uploaded_at_us = current_timestamp_us(); - let upload_ms = upload_started.elapsed().as_secs_f64() * 1000.0; - state.uploaded_frame_id = dirty_frame_meta.0; - state.uploaded_user_timestamp_us = dirty_frame_meta.1; - state.uploaded_receive_us = dirty_frame_meta.2; - state.uploaded_at_us = Some(uploaded_at_us); - - let mut shared = self.shared.lock(); - shared.last_uploaded_frame_id = dirty_frame_meta.0; - shared.last_uploaded_user_timestamp_us = dirty_frame_meta.1; - shared.last_uploaded_receive_us = dirty_frame_meta.2; - shared.last_uploaded_us = Some(uploaded_at_us); - shared.last_upload_ms = upload_ms as f32; } Vec::new() @@ -1375,15 +1267,6 @@ impl CallbackTrait for YuvPaintCallback { render_pass.set_pipeline(&state.pipeline); render_pass.set_bind_group(0, &state.bind_group, &[]); - // Fullscreen triangle without vertex buffer render_pass.draw(0..3, 0..1); - - let painted_at_us = current_timestamp_us(); - let mut shared = self.shared.lock(); - shared.last_painted_frame_id = state.uploaded_frame_id; - shared.last_painted_user_timestamp_us = state.uploaded_user_timestamp_us; - shared.last_painted_receive_us = state.uploaded_receive_us; - shared.last_painted_upload_us = state.uploaded_at_us; - shared.last_painted_us = Some(painted_at_us); } } diff --git a/libwebrtc/src/lib.rs b/libwebrtc/src/lib.rs index c7a016793..38c79762f 100644 --- a/libwebrtc/src/lib.rs +++ b/libwebrtc/src/lib.rs @@ -69,7 +69,7 @@ pub mod native { pub use webrtc_sys::webrtc::ffi::create_random_uuid; pub use crate::imp::{ - apm, audio_mixer, audio_resampler, frame_cryptor, user_timestamp, yuv_helper, + apm, audio_mixer, audio_resampler, frame_cryptor, packet_trailer, yuv_helper, }; } diff --git a/libwebrtc/src/native/frame_cryptor.rs b/libwebrtc/src/native/frame_cryptor.rs index b3ae67a57..b3e2755fe 100644 --- a/libwebrtc/src/native/frame_cryptor.rs +++ b/libwebrtc/src/native/frame_cryptor.rs @@ -19,7 +19,7 @@ use parking_lot::Mutex; use webrtc_sys::frame_cryptor::{self as sys_fc}; use crate::{ - native::user_timestamp::UserTimestampHandler, peer_connection_factory::PeerConnectionFactory, + native::packet_trailer::PacketTrailerHandler, peer_connection_factory::PeerConnectionFactory, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, }; @@ -169,8 +169,8 @@ impl FrameCryptor { *self.observer.state_change_handler.lock() = handler; } - pub fn set_user_timestamp_handler(&self, handler: &UserTimestampHandler) { - self.sys_handle.set_user_timestamp_handler(handler.sys_handle()); + pub fn set_packet_trailer_handler(&self, handler: &PacketTrailerHandler) { + self.sys_handle.set_packet_trailer_handler(handler.sys_handle()); } } diff --git a/libwebrtc/src/native/mod.rs b/libwebrtc/src/native/mod.rs index f4d1700b4..c8106221e 100644 --- a/libwebrtc/src/native/mod.rs +++ b/libwebrtc/src/native/mod.rs @@ -34,7 +34,7 @@ pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; pub mod session_description; -pub mod user_timestamp; +pub mod packet_trailer; pub mod video_frame; pub mod video_source; pub mod video_stream; diff --git a/libwebrtc/src/native/user_timestamp.rs b/libwebrtc/src/native/packet_trailer.rs similarity index 84% rename from libwebrtc/src/native/user_timestamp.rs rename to libwebrtc/src/native/packet_trailer.rs index a809d5d1b..ef993a7a7 100644 --- a/libwebrtc/src/native/user_timestamp.rs +++ b/libwebrtc/src/native/packet_trailer.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! User timestamp support for end-to-end timestamp propagation. +//! Packet trailer support for end-to-end timestamp propagation. //! //! This module provides functionality to embed user-supplied timestamps //! in encoded video frames as trailers. The timestamps are preserved @@ -27,14 +27,14 @@ //! metadata via lookup_frame_metadata(rtp_timestamp). use cxx::SharedPtr; -use webrtc_sys::user_timestamp::ffi as sys_ut; +use webrtc_sys::packet_trailer::ffi as sys_pt; use crate::{ peer_connection_factory::PeerConnectionFactory, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, }; -/// Handler for user timestamp embedding/extraction on RTP streams. +/// Handler for packet trailer embedding/extraction on RTP streams. /// /// For sender side: Stores frame metadata keyed by capture timestamp /// and embeds them as 16-byte trailers on encoded frames before they @@ -44,11 +44,11 @@ use crate::{ /// For receiver side: Extracts frame metadata from received frames /// and makes them available for retrieval via `lookup_frame_metadata()`. #[derive(Clone)] -pub struct UserTimestampHandler { - sys_handle: SharedPtr, +pub struct PacketTrailerHandler { + sys_handle: SharedPtr, } -impl UserTimestampHandler { +impl PacketTrailerHandler { /// Enable or disable timestamp embedding/extraction. pub fn set_enabled(&self, enabled: bool) { self.sys_handle.set_enabled(enabled); @@ -63,12 +63,12 @@ impl UserTimestampHandler { /// Returns `Some((user_timestamp_us, frame_id))` if found, `None` otherwise. /// The entry is removed from the map after a successful lookup. pub fn lookup_frame_metadata(&self, rtp_timestamp: u32) -> Option<(i64, u32)> { - let ts = self.sys_handle.lookup_user_timestamp(rtp_timestamp); + let ts = self.sys_handle.lookup_timestamp(rtp_timestamp); if ts >= 0 { let frame_id = self.sys_handle.last_lookup_frame_id(); if ts > 2_000_000_000_000_000 || ts < 0 { log::warn!( - "[UserTS-FFI] C++ returned bad ts={} (0x{:016x}) fid={} rtp_ts={}", + "[PacketTrailer-FFI] C++ returned bad ts={} (0x{:016x}) fid={} rtp_ts={}", ts, ts, frame_id, rtp_timestamp ); } @@ -97,7 +97,7 @@ impl UserTimestampHandler { frame_id: u32, ) { log::info!( - target: "user_timestamp", + target: "packet_trailer", "store: capture_ts_us={}, user_ts_us={}, frame_id={}", capture_timestamp_us, user_timestamp_us, @@ -106,12 +106,12 @@ impl UserTimestampHandler { self.sys_handle.store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } - pub(crate) fn sys_handle(&self) -> SharedPtr { + pub(crate) fn sys_handle(&self) -> SharedPtr { self.sys_handle.clone() } } -/// Create a sender-side user timestamp handler. +/// Create a sender-side packet trailer handler. /// /// This handler will embed frame metadata into encoded frames before /// they are packetized and sent. Use `store_frame_metadata()` to @@ -119,16 +119,16 @@ impl UserTimestampHandler { pub fn create_sender_handler( peer_factory: &PeerConnectionFactory, sender: &RtpSender, -) -> UserTimestampHandler { - UserTimestampHandler { - sys_handle: sys_ut::new_user_timestamp_sender( +) -> PacketTrailerHandler { + PacketTrailerHandler { + sys_handle: sys_pt::new_packet_trailer_sender( peer_factory.handle.sys_handle.clone(), sender.handle.sys_handle.clone(), ), } } -/// Create a receiver-side user timestamp handler. +/// Create a receiver-side packet trailer handler. /// /// This handler will extract frame metadata from received frames /// and store them in a map keyed by RTP timestamp. Use @@ -137,9 +137,9 @@ pub fn create_sender_handler( pub fn create_receiver_handler( peer_factory: &PeerConnectionFactory, receiver: &RtpReceiver, -) -> UserTimestampHandler { - UserTimestampHandler { - sys_handle: sys_ut::new_user_timestamp_receiver( +) -> PacketTrailerHandler { + PacketTrailerHandler { + sys_handle: sys_pt::new_packet_trailer_receiver( peer_factory.handle.sys_handle.clone(), receiver.handle.sys_handle.clone(), ), diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 705166bf7..eb0c3abb3 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -23,7 +23,7 @@ use parking_lot::Mutex; use webrtc_sys::{video_frame as vf_sys, video_frame::ffi::VideoRotation, video_track as vt_sys}; use crate::{ - native::user_timestamp::UserTimestampHandler, + native::packet_trailer::PacketTrailerHandler, video_frame::{I420Buffer, VideoBuffer, VideoFrame}, video_source::VideoResolution, }; @@ -106,10 +106,7 @@ impl NativeVideoSource { }; builder.pin_mut().set_timestamp_us(capture_ts); - // Pass the user timestamp and frame_id to the C++ on_captured_frame - // so it can store the mapping keyed by the TimestampAligner-adjusted - // capture timestamp. - let (has_user_ts, user_ts) = match frame.user_timestamp_us { + let (has_trailer, user_ts) = match frame.user_timestamp_us { Some(ts) => (true, ts), None => (false, 0), }; @@ -119,21 +116,21 @@ impl NativeVideoSource { self.sys_handle.on_captured_frame( &builder.pin_mut().build(), - has_user_ts, + has_trailer, user_ts, frame_id, ); } - /// Set the user timestamp handler used by this source. + /// Set the packet trailer handler used by this source. /// /// When set, any frame captured with a `user_timestamp_us` value will /// automatically have its timestamp stored in the handler so the - /// `UserTimestampTransformer` can embed it into the encoded frame. + /// `PacketTrailerTransformer` can embed it into the encoded frame. /// The handler is set on the C++ VideoTrackSource so it has access to /// the TimestampAligner-adjusted capture timestamp for correct keying. - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.sys_handle.set_user_timestamp_handler(handler.sys_handle()); + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.sys_handle.set_packet_trailer_handler(handler.sys_handle()); } pub fn video_resolution(&self) -> VideoResolution { diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index a3619a10a..a32edd30b 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -25,7 +25,7 @@ use webrtc_sys::video_track as sys_vt; use super::video_frame::new_video_frame_buffer; use crate::{ - native::user_timestamp::UserTimestampHandler, + native::packet_trailer::PacketTrailerHandler, video_frame::{BoxVideoFrame, VideoFrame}, video_track::RtcVideoTrack, }; @@ -41,12 +41,12 @@ impl NativeVideoStream { pub fn new(video_track: RtcVideoTrack) -> Self { let (frame_tx, frame_rx) = mpsc::unbounded_channel(); - // Auto-wire the user timestamp handler from the track if one is set. - let handler = video_track.handle.user_timestamp_handler(); + // Auto-wire the packet trailer handler from the track if one is set. + let handler = video_track.handle.packet_trailer_handler(); let observer = Arc::new(VideoTrackObserver { frame_tx, - user_timestamp_handler: parking_lot::Mutex::new(handler), + packet_trailer_handler: parking_lot::Mutex::new(handler), }); let native_sink = sys_vt::ffi::new_native_video_sink(Box::new( sys_vt::VideoSinkWrapper::new(observer.clone()), @@ -58,7 +58,7 @@ impl NativeVideoStream { Self { native_sink, observer, video_track, frame_rx } } - /// Set the user timestamp handler for this stream. + /// Set the packet trailer handler for this stream. /// /// When set, each frame produced by this stream will have its /// `user_timestamp_us` field populated from the handler's receive @@ -68,8 +68,8 @@ impl NativeVideoStream { /// creating this stream, it is automatically wired up. This method is /// only needed if you want to override or set the handler after /// construction. - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - *self.observer.user_timestamp_handler.lock() = Some(handler); + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + *self.observer.packet_trailer_handler.lock() = Some(handler); } pub fn track(&self) -> RtcVideoTrack { @@ -100,14 +100,14 @@ impl Stream for NativeVideoStream { struct VideoTrackObserver { frame_tx: mpsc::UnboundedSender, - user_timestamp_handler: parking_lot::Mutex>, + packet_trailer_handler: parking_lot::Mutex>, } impl sys_vt::VideoSink for VideoTrackObserver { fn on_frame(&self, frame: UniquePtr) { let rtp_timestamp = frame.timestamp(); let meta = self - .user_timestamp_handler + .packet_trailer_handler .lock() .as_ref() .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)); diff --git a/libwebrtc/src/native/video_track.rs b/libwebrtc/src/native/video_track.rs index d9d0191da..64699e153 100644 --- a/libwebrtc/src/native/video_track.rs +++ b/libwebrtc/src/native/video_track.rs @@ -20,37 +20,37 @@ use sys_vt::ffi::video_to_media; use webrtc_sys::video_track as sys_vt; use super::media_stream_track::impl_media_stream_track; -use super::user_timestamp::UserTimestampHandler; +use super::packet_trailer::PacketTrailerHandler; use crate::media_stream_track::RtcTrackState; #[derive(Clone)] pub struct RtcVideoTrack { pub(crate) sys_handle: SharedPtr, - user_timestamp_handler: Arc>>, + packet_trailer_handler: Arc>>, } impl RtcVideoTrack { impl_media_stream_track!(video_to_media); pub(crate) fn new(sys_handle: SharedPtr) -> Self { - Self { sys_handle, user_timestamp_handler: Arc::new(Mutex::new(None)) } + Self { sys_handle, packet_trailer_handler: Arc::new(Mutex::new(None)) } } pub fn sys_handle(&self) -> SharedPtr { video_to_media(self.sys_handle.clone()) } - /// Set the user timestamp handler for this track. + /// Set the packet trailer handler for this track. /// /// When set, any `NativeVideoStream` created from this track will /// automatically use this handler to populate `user_timestamp_us` /// on each decoded frame. - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.user_timestamp_handler.lock().replace(handler); + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.packet_trailer_handler.lock().replace(handler); } - /// Get the user timestamp handler, if one has been set. - pub fn user_timestamp_handler(&self) -> Option { - self.user_timestamp_handler.lock().clone() + /// Get the packet trailer handler, if one has been set. + pub fn packet_trailer_handler(&self) -> Option { + self.packet_trailer_handler.lock().clone() } } diff --git a/libwebrtc/src/video_source.rs b/libwebrtc/src/video_source.rs index 2127b6f60..dc9e62afc 100644 --- a/libwebrtc/src/video_source.rs +++ b/libwebrtc/src/video_source.rs @@ -50,7 +50,7 @@ pub mod native { use std::fmt::{Debug, Formatter}; use super::*; - use crate::native::user_timestamp::UserTimestampHandler; + use crate::native::packet_trailer::PacketTrailerHandler; use crate::video_frame::{VideoBuffer, VideoFrame}; #[derive(Clone)] @@ -79,14 +79,14 @@ pub mod native { self.handle.capture_frame(frame) } - /// Set the user timestamp handler used by this source. + /// Set the packet trailer handler used by this source. /// /// When set, any frame captured with a `user_timestamp_us` value will /// automatically have its timestamp stored in the handler (keyed by /// the TimestampAligner-adjusted capture timestamp) so the - /// `UserTimestampTransformer` can embed it into the encoded frame. - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.handle.set_user_timestamp_handler(handler) + /// `PacketTrailerTransformer` can embed it into the encoded frame. + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.handle.set_packet_trailer_handler(handler) } pub fn video_resolution(&self) -> VideoResolution { diff --git a/libwebrtc/src/video_stream.rs b/libwebrtc/src/video_stream.rs index 2370b56f9..b7a2fa952 100644 --- a/libwebrtc/src/video_stream.rs +++ b/libwebrtc/src/video_stream.rs @@ -27,7 +27,7 @@ pub mod native { use super::stream_imp; use crate::{ - native::user_timestamp::UserTimestampHandler, video_frame::BoxVideoFrame, + native::packet_trailer::PacketTrailerHandler, video_frame::BoxVideoFrame, video_track::RtcVideoTrack, }; use livekit_runtime::Stream; @@ -47,7 +47,7 @@ pub mod native { Self { handle: stream_imp::NativeVideoStream::new(video_track) } } - /// Set the user timestamp handler for this stream. + /// Set the packet trailer handler for this stream. /// /// When set, each frame produced by this stream will have its /// `user_timestamp_us` field populated by looking up the user @@ -57,8 +57,8 @@ pub mod native { /// before creating this stream, it is automatically wired up. /// This method is only needed to override or set the handler /// after construction. - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.handle.set_user_timestamp_handler(handler); + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.handle.set_packet_trailer_handler(handler); } pub fn track(&self) -> RtcVideoTrack { diff --git a/libwebrtc/src/video_track.rs b/libwebrtc/src/video_track.rs index b8c00dc6a..706da563b 100644 --- a/libwebrtc/src/video_track.rs +++ b/libwebrtc/src/video_track.rs @@ -20,7 +20,7 @@ use crate::{ }; #[cfg(not(target_arch = "wasm32"))] -use crate::native::user_timestamp::UserTimestampHandler; +use crate::native::packet_trailer::PacketTrailerHandler; #[derive(Clone)] pub struct RtcVideoTrack { @@ -30,20 +30,20 @@ pub struct RtcVideoTrack { impl RtcVideoTrack { media_stream_track!(); - /// Set the user timestamp handler for this track. + /// Set the packet trailer handler for this track. /// /// When set, any `NativeVideoStream` created from this track will /// automatically use this handler to populate `user_timestamp_us` /// on each decoded frame. #[cfg(not(target_arch = "wasm32"))] - pub fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.handle.set_user_timestamp_handler(handler); + pub fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.handle.set_packet_trailer_handler(handler); } - /// Get the user timestamp handler, if one has been set. + /// Get the packet trailer handler, if one has been set. #[cfg(not(target_arch = "wasm32"))] - pub fn user_timestamp_handler(&self) -> Option { - self.handle.user_timestamp_handler() + pub fn packet_trailer_handler(&self) -> Option { + self.handle.packet_trailer_handler() } } diff --git a/livekit-ffi/src/conversion/room.rs b/livekit-ffi/src/conversion/room.rs index 918fbc8e3..dd6a0770c 100644 --- a/livekit-ffi/src/conversion/room.rs +++ b/livekit-ffi/src/conversion/room.rs @@ -257,7 +257,7 @@ impl From for TrackPublishOptions { preconnect_buffer: opts .preconnect_buffer .unwrap_or(default_publish_options.preconnect_buffer), - user_timestamp: default_publish_options.user_timestamp, + packet_trailer: default_publish_options.packet_trailer, } } } diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 9636868f3..1833ac326 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -19,7 +19,7 @@ use libwebrtc::{ frame_cryptor::{ DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, }, - user_timestamp, + packet_trailer, }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, @@ -103,28 +103,28 @@ impl E2eeManager { ) { let identity = participant.identity(); let receiver = track.transceiver().unwrap().receiver(); - let mut user_timestamp_handler = None; + let mut packet_trailer_handler = None; - let has_user_timestamp = publication + let has_packet_trailer = publication .proto_info() .packet_trailer_features .contains(&(PacketTrailerFeature::PtfUserTimestamp as i32)); if let RemoteTrack::Video(video_track) = &track { let handler = - user_timestamp::create_receiver_handler(LkRuntime::instance().pc_factory(), &receiver); - video_track.set_user_timestamp_handler(handler.clone()); - user_timestamp_handler = Some(handler); + packet_trailer::create_receiver_handler(LkRuntime::instance().pc_factory(), &receiver); + video_track.set_packet_trailer_handler(handler.clone()); + packet_trailer_handler = Some(handler); - if has_user_timestamp { + if has_packet_trailer { log::info!( - "attached user_timestamp handler for subscribed track {} from {}", + "attached packet_trailer handler for subscribed track {} from {}", publication.sid(), identity, ); } else { log::info!( - "attached user_timestamp handler for subscribed track {} from {} without advertised packet trailer support", + "attached packet_trailer handler for subscribed track {} from {} without advertised packet trailer support", publication.sid(), identity, ); @@ -136,8 +136,8 @@ impl E2eeManager { } let frame_cryptor = self.setup_rtp_receiver(&identity, receiver); - if let Some(handler) = user_timestamp_handler.as_ref() { - frame_cryptor.set_user_timestamp_handler(handler); + if let Some(handler) = packet_trailer_handler.as_ref() { + frame_cryptor.set_packet_trailer_handler(handler); } self.setup_cryptor(&frame_cryptor); @@ -154,17 +154,17 @@ impl E2eeManager { let identity = participant.identity(); let sender = track.transceiver().unwrap().sender(); - let user_timestamp_handler = if let LocalTrack::Video(video_track) = &track { - let handler = video_track.user_timestamp_handler(); + let packet_trailer_handler = if let LocalTrack::Video(video_track) = &track { + let handler = video_track.packet_trailer_handler(); if handler.is_some() { log::info!( - "user_timestamp enabled for published track {} from {}", + "packet_trailer enabled for published track {} from {}", publication.sid(), identity, ); } else { log::info!( - "user_timestamp not enabled for published track {} from {}", + "packet_trailer not enabled for published track {} from {}", publication.sid(), identity, ); @@ -179,8 +179,8 @@ impl E2eeManager { } let frame_cryptor = self.setup_rtp_sender(&identity, sender); - if let Some(handler) = user_timestamp_handler.as_ref() { - frame_cryptor.set_user_timestamp_handler(handler); + if let Some(handler) = packet_trailer_handler.as_ref() { + frame_cryptor.set_packet_trailer_handler(handler); } self.setup_cryptor(&frame_cryptor); diff --git a/livekit/src/room/options.rs b/livekit/src/room/options.rs index 46f39072f..618bed7cf 100644 --- a/livekit/src/room/options.rs +++ b/livekit/src/room/options.rs @@ -91,7 +91,7 @@ pub struct TrackPublishOptions { pub source: TrackSource, pub stream: String, pub preconnect_buffer: bool, - pub user_timestamp: bool, + pub packet_trailer: bool, } impl Default for TrackPublishOptions { @@ -107,7 +107,7 @@ impl Default for TrackPublishOptions { source: TrackSource::Unknown, stream: "".to_string(), preconnect_buffer: false, - user_timestamp: false, + packet_trailer: false, } } } diff --git a/livekit/src/room/participant/local_participant.rs b/livekit/src/room/participant/local_participant.rs index fe48de2b0..e036e2844 100644 --- a/livekit/src/room/participant/local_participant.rs +++ b/livekit/src/room/participant/local_participant.rs @@ -41,7 +41,7 @@ use crate::{ }; use chrono::Utc; use libwebrtc::{ - native::{create_random_uuid, user_timestamp}, + native::{create_random_uuid, packet_trailer}, rtp_parameters::RtpEncodingParameters, video_source::RtcVideoSource, }; @@ -278,7 +278,7 @@ impl LocalParticipant { req.audio_features.push(proto::AudioTrackFeature::TfPreconnectBuffer as i32); } - if options.user_timestamp { + if options.packet_trailer { req.packet_trailer_features.push(proto::PacketTrailerFeature::PtfUserTimestamp as i32); } @@ -327,19 +327,19 @@ impl LocalParticipant { track.set_transceiver(Some(transceiver)); - if options.user_timestamp { + if options.packet_trailer { if let LocalTrack::Video(video_track) = &track { - log::info!("user_timestamp enabled for local video track {}", publication.sid(),); + log::info!("packet_trailer enabled for local video track {}", publication.sid(),); let sender = track.transceiver().unwrap().sender(); - let handler = user_timestamp::create_sender_handler( + let handler = packet_trailer::create_sender_handler( LkRuntime::instance().pc_factory(), &sender, ); - video_track.set_user_timestamp_handler(handler.clone()); + video_track.set_packet_trailer_handler(handler.clone()); #[cfg(not(target_arch = "wasm32"))] if let RtcVideoSource::Native(ref native_source) = video_track.rtc_source() { - native_source.set_user_timestamp_handler(handler.clone()); + native_source.set_packet_trailer_handler(handler.clone()); } } } diff --git a/livekit/src/room/track/local_video_track.rs b/livekit/src/room/track/local_video_track.rs index 19cdefe64..811fabca1 100644 --- a/livekit/src/room/track/local_video_track.rs +++ b/livekit/src/room/track/local_video_track.rs @@ -14,7 +14,7 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{native::user_timestamp::UserTimestampHandler, prelude::*, stats::RtcStats}; +use libwebrtc::{native::packet_trailer::PacketTrailerHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; use parking_lot::Mutex; @@ -25,7 +25,7 @@ use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; pub struct LocalVideoTrack { inner: Arc, source: RtcVideoSource, - user_timestamp_handler: Arc>>, + packet_trailer_handler: Arc>>, } impl Debug for LocalVideoTrack { @@ -48,7 +48,7 @@ impl LocalVideoTrack { MediaStreamTrack::Video(rtc_track), )), source, - user_timestamp_handler: Arc::new(Mutex::new(None)), + packet_trailer_handler: Arc::new(Mutex::new(None)), } } @@ -126,16 +126,16 @@ impl LocalVideoTrack { self.source.clone() } - /// Returns the user timestamp handler associated with this track, if any. + /// Returns the packet trailer handler associated with this track, if any. /// When present on the sender side, callers can store per-frame user /// timestamps which will be embedded into encoded frames. - pub fn user_timestamp_handler(&self) -> Option { - self.user_timestamp_handler.lock().clone() + pub fn packet_trailer_handler(&self) -> Option { + self.packet_trailer_handler.lock().clone() } - /// Internal: set the user timestamp handler used for this track. - pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - *self.user_timestamp_handler.lock() = Some(handler); + /// Internal: set the packet trailer handler used for this track. + pub(crate) fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + *self.packet_trailer_handler.lock() = Some(handler); } pub async fn get_stats(&self) -> RoomResult> { diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index c282d428e..87f9beb3c 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -14,7 +14,7 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{native::user_timestamp::UserTimestampHandler, prelude::*, stats::RtcStats}; +use libwebrtc::{native::packet_trailer::PacketTrailerHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; use super::{remote_track, TrackInner}; @@ -94,18 +94,18 @@ impl RemoteVideoTrack { true } - /// Returns a clone of the user timestamp handler, if one has been set. - pub fn user_timestamp_handler(&self) -> Option { - self.rtc_track().user_timestamp_handler() + /// Returns a clone of the packet trailer handler, if one has been set. + pub fn packet_trailer_handler(&self) -> Option { + self.rtc_track().packet_trailer_handler() } - /// Internal: set the handler that extracts user timestamps for this track. + /// Internal: set the handler that extracts packet trailers for this track. /// /// The handler is stored on the underlying `RtcVideoTrack`, so any /// `NativeVideoStream` created from this track will automatically /// pick it up — no manual wiring required. - pub(crate) fn set_user_timestamp_handler(&self, handler: UserTimestampHandler) { - self.rtc_track().set_user_timestamp_handler(handler); + pub(crate) fn set_packet_trailer_handler(&self, handler: PacketTrailerHandler) { + self.rtc_track().set_packet_trailer_handler(handler); } pub async fn get_stats(&self) -> RoomResult> { diff --git a/webrtc-sys/build.rs b/webrtc-sys/build.rs index 76eb174e6..072794ecf 100644 --- a/webrtc-sys/build.rs +++ b/webrtc-sys/build.rs @@ -54,7 +54,7 @@ fn main() { "src/prohibit_libsrtp_initialization.rs", "src/apm.rs", "src/audio_mixer.rs", - "src/user_timestamp.rs", + "src/packet_trailer.rs", ]; if is_desktop { @@ -90,7 +90,7 @@ fn main() { "src/prohibit_libsrtp_initialization.cpp", "src/apm.cpp", "src/audio_mixer.cpp", - "src/user_timestamp.cpp", + "src/packet_trailer.cpp", ]); if is_desktop { diff --git a/webrtc-sys/include/livekit/frame_cryptor.h b/webrtc-sys/include/livekit/frame_cryptor.h index 16af04d11..da36356b5 100644 --- a/webrtc-sys/include/livekit/frame_cryptor.h +++ b/webrtc-sys/include/livekit/frame_cryptor.h @@ -39,7 +39,7 @@ struct EncryptedPacket; enum class Algorithm : ::std::int32_t; class RtcFrameCryptorObserverWrapper; class NativeFrameCryptorObserver; -class UserTimestampHandler; +class PacketTrailerHandler; /// Shared secret key for frame encryption. class KeyProvider { @@ -159,9 +159,9 @@ class FrameCryptor { void unregister_observer() const; - /// Attach a user timestamp transformer for chained processing. - void set_user_timestamp_handler( - std::shared_ptr handler) const; + /// Attach a packet trailer transformer for chained processing. + void set_packet_trailer_handler( + std::shared_ptr handler) const; private: std::shared_ptr rtc_runtime_; diff --git a/webrtc-sys/include/livekit/user_timestamp.h b/webrtc-sys/include/livekit/packet_trailer.h similarity index 86% rename from webrtc-sys/include/livekit/user_timestamp.h rename to webrtc-sys/include/livekit/packet_trailer.h index fa65a2a02..6ea96c6c7 100644 --- a/webrtc-sys/include/livekit/user_timestamp.h +++ b/webrtc-sys/include/livekit/packet_trailer.h @@ -36,7 +36,7 @@ #include "rust/cxx.h" // Forward declarations to avoid circular includes -// (video_track.h -> user_timestamp.h -> peer_connection.h -> media_stream.h -> video_track.h) +// (video_track.h -> packet_trailer.h -> peer_connection.h -> media_stream.h -> video_track.h) namespace livekit_ffi { class PeerConnectionFactory; class RtpSender; @@ -45,8 +45,8 @@ class RtpReceiver; namespace livekit_ffi { -// Magic bytes to identify user timestamp trailers: "LKTS" (LiveKit TimeStamp) -constexpr uint8_t kUserTimestampMagic[4] = {'L', 'K', 'T', 'S'}; +// Magic bytes to identify packet trailers: "LKTS" (LiveKit TimeStamp) +constexpr uint8_t kPacketTrailerMagic[4] = {'L', 'K', 'T', 'S'}; // Trailer envelope: [trailer_len: 1B] [magic: 4B] = 5 bytes. // Always present at the end of every trailer. @@ -63,9 +63,9 @@ constexpr size_t kTimestampTlvSize = 10; // tag + len + 8-byte value constexpr size_t kFrameIdTlvSize = 6; // tag + len + 4-byte value // Trailer size varies because frame_id is omitted when it is unset (0). -constexpr size_t kUserTimestampTrailerMinSize = +constexpr size_t kPacketTrailerMinSize = kTimestampTlvSize + kTrailerEnvelopeSize; -constexpr size_t kUserTimestampTrailerMaxSize = +constexpr size_t kPacketTrailerMaxSize = kTimestampTlvSize + kFrameIdTlvSize + kTrailerEnvelopeSize; struct FrameMetadata { @@ -74,7 +74,7 @@ struct FrameMetadata { uint32_t ssrc; // SSRC that produced this entry (for simulcast tracking) }; -/// Frame transformer that appends/extracts user timestamp trailers. +/// Frame transformer that appends/extracts packet trailers. /// This transformer can be used standalone or in conjunction with e2ee. /// /// On the send side, user timestamps are stored in an internal map keyed @@ -84,12 +84,12 @@ struct FrameMetadata { /// On the receive side, extracted frame metadata is stored in an /// internal map keyed by RTP timestamp (uint32_t). Decoded frames can /// look up their metadata via lookup_frame_metadata(rtp_ts). -class UserTimestampTransformer : public webrtc::FrameTransformerInterface { +class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { public: enum class Direction { kSend, kReceive }; - explicit UserTimestampTransformer(Direction direction); - ~UserTimestampTransformer() override = default; + explicit PacketTrailerTransformer(Direction direction); + ~PacketTrailerTransformer() override = default; // FrameTransformerInterface implementation void Transform( @@ -163,18 +163,18 @@ class UserTimestampTransformer : public webrtc::FrameTransformerInterface { mutable uint32_t recv_active_ssrc_{0}; }; -/// Wrapper class for Rust FFI that manages user timestamp transformers. -class UserTimestampHandler { +/// Wrapper class for Rust FFI that manages packet trailer transformers. +class PacketTrailerHandler { public: - UserTimestampHandler( + PacketTrailerHandler( std::shared_ptr rtc_runtime, rtc::scoped_refptr sender); - UserTimestampHandler( + PacketTrailerHandler( std::shared_ptr rtc_runtime, rtc::scoped_refptr receiver); - ~UserTimestampHandler() = default; + ~PacketTrailerHandler() = default; /// Enable/disable timestamp embedding void set_enabled(bool enabled) const; @@ -183,10 +183,10 @@ class UserTimestampHandler { /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. The entry is removed after lookup. /// Also caches the frame_id for retrieval via last_lookup_frame_id(). - int64_t lookup_user_timestamp(uint32_t rtp_timestamp) const; + int64_t lookup_timestamp(uint32_t rtp_timestamp) const; /// Returns the frame_id from the most recent successful - /// lookup_user_timestamp() call. Returns 0 if no lookup succeeded. + /// lookup_timestamp() call. Returns 0 if no lookup succeeded. uint32_t last_lookup_frame_id() const; /// Store frame metadata for a given capture timestamp (sender side). @@ -195,11 +195,11 @@ class UserTimestampHandler { uint32_t frame_id) const; /// Access the underlying transformer for chaining. - rtc::scoped_refptr transformer() const; + rtc::scoped_refptr transformer() const; private: std::shared_ptr rtc_runtime_; - rtc::scoped_refptr transformer_; + rtc::scoped_refptr transformer_; rtc::scoped_refptr sender_; rtc::scoped_refptr receiver_; mutable uint32_t last_frame_id_{0}; @@ -207,11 +207,11 @@ class UserTimestampHandler { // Factory functions for Rust FFI -std::shared_ptr new_user_timestamp_sender( +std::shared_ptr new_packet_trailer_sender( std::shared_ptr peer_factory, std::shared_ptr sender); -std::shared_ptr new_user_timestamp_receiver( +std::shared_ptr new_packet_trailer_receiver( std::shared_ptr peer_factory, std::shared_ptr receiver); diff --git a/webrtc-sys/include/livekit/video_track.h b/webrtc-sys/include/livekit/video_track.h index 871b32302..ccef9e836 100644 --- a/webrtc-sys/include/livekit/video_track.h +++ b/webrtc-sys/include/livekit/video_track.h @@ -33,7 +33,7 @@ namespace livekit_ffi { class VideoTrack; class NativeVideoSink; class VideoTrackSource; -class UserTimestampHandler; // forward declaration to avoid circular include +class PacketTrailerHandler; // forward declaration to avoid circular include } // namespace livekit_ffi #include "webrtc-sys/src/video_track.rs.h" @@ -100,18 +100,18 @@ class VideoTrackSource { bool remote() const override; VideoResolution video_resolution() const; bool on_captured_frame(const webrtc::VideoFrame& frame, - bool has_user_timestamp, + bool has_packet_trailer, int64_t user_timestamp_us, uint32_t frame_id); - void set_user_timestamp_handler( - std::shared_ptr handler); + void set_packet_trailer_handler( + std::shared_ptr handler); private: mutable webrtc::Mutex mutex_; webrtc::TimestampAligner timestamp_aligner_; VideoResolution resolution_; - std::shared_ptr user_timestamp_handler_; + std::shared_ptr packet_trailer_handler_; bool is_screencast_; }; @@ -121,13 +121,13 @@ class VideoTrackSource { VideoResolution video_resolution() const; bool on_captured_frame(const std::unique_ptr& frame, - bool has_user_timestamp, + bool has_packet_trailer, int64_t user_timestamp_us, uint32_t frame_id) const; // frames pushed from Rust (+interior mutability) - void set_user_timestamp_handler( - std::shared_ptr handler) const; + void set_packet_trailer_handler( + std::shared_ptr handler) const; webrtc::scoped_refptr get() const; diff --git a/webrtc-sys/src/frame_cryptor.cpp b/webrtc-sys/src/frame_cryptor.cpp index f28536754..f23406c0b 100644 --- a/webrtc-sys/src/frame_cryptor.cpp +++ b/webrtc-sys/src/frame_cryptor.cpp @@ -22,7 +22,7 @@ #include "api/make_ref_counted.h" #include "livekit/peer_connection.h" #include "livekit/peer_connection_factory.h" -#include "livekit/user_timestamp.h" +#include "livekit/packet_trailer.h" #include "livekit/webrtc.h" #include "rtc_base/thread.h" #include "webrtc-sys/src/frame_cryptor.rs.h" @@ -169,8 +169,8 @@ void FrameCryptor::unregister_observer() const { e2ee_transformer_->UnRegisterFrameCryptorTransformerObserver(); } -void FrameCryptor::set_user_timestamp_handler( - std::shared_ptr handler) const { +void FrameCryptor::set_packet_trailer_handler( + std::shared_ptr handler) const { if (!handler) { return; } diff --git a/webrtc-sys/src/frame_cryptor.rs b/webrtc-sys/src/frame_cryptor.rs index ac8770cde..dedd9933f 100644 --- a/webrtc-sys/src/frame_cryptor.rs +++ b/webrtc-sys/src/frame_cryptor.rs @@ -93,12 +93,12 @@ pub mod ffi { include!("livekit/rtp_sender.h"); include!("livekit/rtp_receiver.h"); include!("livekit/peer_connection_factory.h"); - include!("livekit/user_timestamp.h"); + include!("livekit/packet_trailer.h"); type RtpSender = crate::rtp_sender::ffi::RtpSender; type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; - type UserTimestampHandler = crate::user_timestamp::ffi::UserTimestampHandler; + type PacketTrailerHandler = crate::packet_trailer::ffi::PacketTrailerHandler; pub type FrameCryptor; @@ -135,9 +135,9 @@ pub mod ffi { pub fn unregister_observer(self: &FrameCryptor); - pub fn set_user_timestamp_handler( + pub fn set_packet_trailer_handler( self: &FrameCryptor, - handler: SharedPtr, + handler: SharedPtr, ); } diff --git a/webrtc-sys/src/lib.rs b/webrtc-sys/src/lib.rs index 181cc3199..061112fe8 100644 --- a/webrtc-sys/src/lib.rs +++ b/webrtc-sys/src/lib.rs @@ -35,7 +35,7 @@ pub mod rtp_parameters; pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; -pub mod user_timestamp; +pub mod packet_trailer; pub mod video_frame; pub mod video_frame_buffer; pub mod video_track; diff --git a/webrtc-sys/src/user_timestamp.cpp b/webrtc-sys/src/packet_trailer.cpp similarity index 84% rename from webrtc-sys/src/user_timestamp.cpp rename to webrtc-sys/src/packet_trailer.cpp index 914989612..4d724c239 100644 --- a/webrtc-sys/src/user_timestamp.cpp +++ b/webrtc-sys/src/packet_trailer.cpp @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "livekit/user_timestamp.h" +#include "livekit/packet_trailer.h" #include #include @@ -24,16 +24,16 @@ #include "livekit/rtp_receiver.h" #include "livekit/rtp_sender.h" #include "rtc_base/logging.h" -#include "webrtc-sys/src/user_timestamp.rs.h" +#include "webrtc-sys/src/packet_trailer.rs.h" namespace livekit_ffi { -// UserTimestampTransformer implementation +// PacketTrailerTransformer implementation -UserTimestampTransformer::UserTimestampTransformer(Direction direction) +PacketTrailerTransformer::PacketTrailerTransformer(Direction direction) : direction_(direction) {} -void UserTimestampTransformer::Transform( +void PacketTrailerTransformer::Transform( std::unique_ptr frame) { uint32_t ssrc = frame->GetSsrc(); uint32_t rtp_timestamp = frame->GetTimestamp(); @@ -54,7 +54,7 @@ void UserTimestampTransformer::Transform( cb->OnTransformedFrame(std::move(frame)); } else { RTC_LOG(LS_WARNING) - << "UserTimestampTransformer::Transform (disabled) has no callback" + << "PacketTrailerTransformer::Transform (disabled) has no callback" << " direction=" << (direction_ == Direction::kSend ? "send" : "recv") << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; @@ -69,7 +69,7 @@ void UserTimestampTransformer::Transform( } } -void UserTimestampTransformer::TransformSend( +void PacketTrailerTransformer::TransformSend( std::unique_ptr frame) { uint32_t rtp_timestamp = frame->GetTimestamp(); uint32_t ssrc = frame->GetSsrc(); @@ -95,7 +95,7 @@ void UserTimestampTransformer::TransformSend( } } else { RTC_LOG(LS_WARNING) - << "UserTimestampTransformer::TransformSend CaptureTime() not available" + << "PacketTrailerTransformer::TransformSend CaptureTime() not available" << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } @@ -124,12 +124,12 @@ void UserTimestampTransformer::TransformSend( cb->OnTransformedFrame(std::move(frame)); } else { RTC_LOG(LS_WARNING) - << "UserTimestampTransformer::TransformSend has no callback" + << "PacketTrailerTransformer::TransformSend has no callback" << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } } -void UserTimestampTransformer::TransformReceive( +void PacketTrailerTransformer::TransformReceive( std::unique_ptr frame) { uint32_t ssrc = frame->GetSsrc(); uint32_t rtp_timestamp = frame->GetTimestamp(); @@ -197,12 +197,12 @@ void UserTimestampTransformer::TransformReceive( cb->OnTransformedFrame(std::move(frame)); } else { RTC_LOG(LS_WARNING) - << "UserTimestampTransformer::TransformReceive has no callback" + << "PacketTrailerTransformer::TransformReceive has no callback" << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } } -std::vector UserTimestampTransformer::AppendTrailer( +std::vector PacketTrailerTransformer::AppendTrailer( rtc::ArrayView data, int64_t user_timestamp_us, uint32_t frame_id) { @@ -239,13 +239,13 @@ std::vector UserTimestampTransformer::AppendTrailer( // Envelope: trailer_len (1B, XORed) + magic (4B, NOT XORed) result.push_back(static_cast(trailer_len ^ 0xFF)); - result.insert(result.end(), std::begin(kUserTimestampMagic), - std::end(kUserTimestampMagic)); + result.insert(result.end(), std::begin(kPacketTrailerMagic), + std::end(kPacketTrailerMagic)); return result; } -std::optional UserTimestampTransformer::ExtractTrailer( +std::optional PacketTrailerTransformer::ExtractTrailer( rtc::ArrayView data, std::vector& out_data) { if (data.size() < kTrailerEnvelopeSize) { @@ -255,7 +255,7 @@ std::optional UserTimestampTransformer::ExtractTrailer( // Check for magic bytes at the end const uint8_t* magic_start = data.data() + data.size() - 4; - if (std::memcmp(magic_start, kUserTimestampMagic, 4) != 0) { + if (std::memcmp(magic_start, kPacketTrailerMagic, 4) != 0) { out_data.assign(data.begin(), data.end()); return std::nullopt; } @@ -314,39 +314,39 @@ std::optional UserTimestampTransformer::ExtractTrailer( return meta; } -void UserTimestampTransformer::RegisterTransformedFrameCallback( +void PacketTrailerTransformer::RegisterTransformedFrameCallback( rtc::scoped_refptr callback) { webrtc::MutexLock lock(&mutex_); callback_ = callback; } -void UserTimestampTransformer::RegisterTransformedFrameSinkCallback( +void PacketTrailerTransformer::RegisterTransformedFrameSinkCallback( rtc::scoped_refptr callback, uint32_t ssrc) { webrtc::MutexLock lock(&mutex_); sink_callbacks_[ssrc] = callback; } -void UserTimestampTransformer::UnregisterTransformedFrameCallback() { +void PacketTrailerTransformer::UnregisterTransformedFrameCallback() { webrtc::MutexLock lock(&mutex_); callback_ = nullptr; } -void UserTimestampTransformer::UnregisterTransformedFrameSinkCallback( +void PacketTrailerTransformer::UnregisterTransformedFrameSinkCallback( uint32_t ssrc) { webrtc::MutexLock lock(&mutex_); sink_callbacks_.erase(ssrc); } -void UserTimestampTransformer::set_enabled(bool enabled) { +void PacketTrailerTransformer::set_enabled(bool enabled) { enabled_.store(enabled); } -bool UserTimestampTransformer::enabled() const { +bool PacketTrailerTransformer::enabled() const { return enabled_.load(); } -std::optional UserTimestampTransformer::lookup_frame_metadata( +std::optional PacketTrailerTransformer::lookup_frame_metadata( uint32_t rtp_timestamp) { webrtc::MutexLock lock(&recv_map_mutex_); auto it = recv_map_.find(rtp_timestamp); @@ -365,7 +365,7 @@ std::optional UserTimestampTransformer::lookup_frame_metadata( return meta; } -void UserTimestampTransformer::store_frame_metadata( +void PacketTrailerTransformer::store_frame_metadata( int64_t capture_timestamp_us, int64_t user_timestamp_us, uint32_t frame_id) { @@ -396,35 +396,35 @@ void UserTimestampTransformer::store_frame_metadata( send_map_[key] = FrameMetadata{user_timestamp_us, frame_id, 0}; } -// UserTimestampHandler implementation +// PacketTrailerHandler implementation -UserTimestampHandler::UserTimestampHandler( +PacketTrailerHandler::PacketTrailerHandler( std::shared_ptr rtc_runtime, rtc::scoped_refptr sender) : rtc_runtime_(rtc_runtime), sender_(sender) { - transformer_ = rtc::make_ref_counted( - UserTimestampTransformer::Direction::kSend); + transformer_ = rtc::make_ref_counted( + PacketTrailerTransformer::Direction::kSend); sender->SetEncoderToPacketizerFrameTransformer(transformer_); } -UserTimestampHandler::UserTimestampHandler( +PacketTrailerHandler::PacketTrailerHandler( std::shared_ptr rtc_runtime, rtc::scoped_refptr receiver) : rtc_runtime_(rtc_runtime), receiver_(receiver) { - transformer_ = rtc::make_ref_counted( - UserTimestampTransformer::Direction::kReceive); + transformer_ = rtc::make_ref_counted( + PacketTrailerTransformer::Direction::kReceive); receiver->SetDepacketizerToDecoderFrameTransformer(transformer_); } -void UserTimestampHandler::set_enabled(bool enabled) const { +void PacketTrailerHandler::set_enabled(bool enabled) const { transformer_->set_enabled(enabled); } -bool UserTimestampHandler::enabled() const { +bool PacketTrailerHandler::enabled() const { return transformer_->enabled(); } -int64_t UserTimestampHandler::lookup_user_timestamp(uint32_t rtp_timestamp) const { +int64_t PacketTrailerHandler::lookup_timestamp(uint32_t rtp_timestamp) const { auto meta = transformer_->lookup_frame_metadata(rtp_timestamp); if (meta.has_value()) { last_frame_id_ = meta->frame_id; @@ -433,34 +433,34 @@ int64_t UserTimestampHandler::lookup_user_timestamp(uint32_t rtp_timestamp) cons return -1; } -uint32_t UserTimestampHandler::last_lookup_frame_id() const { +uint32_t PacketTrailerHandler::last_lookup_frame_id() const { return last_frame_id_; } -void UserTimestampHandler::store_frame_metadata( +void PacketTrailerHandler::store_frame_metadata( int64_t capture_timestamp_us, int64_t user_timestamp_us, uint32_t frame_id) const { transformer_->store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } -rtc::scoped_refptr UserTimestampHandler::transformer() const { +rtc::scoped_refptr PacketTrailerHandler::transformer() const { return transformer_; } // Factory functions -std::shared_ptr new_user_timestamp_sender( +std::shared_ptr new_packet_trailer_sender( std::shared_ptr peer_factory, std::shared_ptr sender) { - return std::make_shared( + return std::make_shared( peer_factory->rtc_runtime(), sender->rtc_sender()); } -std::shared_ptr new_user_timestamp_receiver( +std::shared_ptr new_packet_trailer_receiver( std::shared_ptr peer_factory, std::shared_ptr receiver) { - return std::make_shared( + return std::make_shared( peer_factory->rtc_runtime(), receiver->rtc_receiver()); } diff --git a/webrtc-sys/src/user_timestamp.rs b/webrtc-sys/src/packet_trailer.rs similarity index 69% rename from webrtc-sys/src/user_timestamp.rs rename to webrtc-sys/src/packet_trailer.rs index 55a8121fa..7af72a4b0 100644 --- a/webrtc-sys/src/user_timestamp.rs +++ b/webrtc-sys/src/packet_trailer.rs @@ -17,7 +17,7 @@ use crate::impl_thread_safety; #[cxx::bridge(namespace = "livekit_ffi")] pub mod ffi { unsafe extern "C++" { - include!("livekit/user_timestamp.h"); + include!("livekit/packet_trailer.h"); include!("livekit/rtp_sender.h"); include!("livekit/rtp_receiver.h"); include!("livekit/peer_connection_factory.h"); @@ -26,44 +26,44 @@ pub mod ffi { type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; - /// Handler for user timestamp embedding/extraction on RTP streams. - pub type UserTimestampHandler; + /// Handler for packet trailer embedding/extraction on RTP streams. + pub type PacketTrailerHandler; /// Enable/disable timestamp embedding. - fn set_enabled(self: &UserTimestampHandler, enabled: bool); + fn set_enabled(self: &PacketTrailerHandler, enabled: bool); /// Check if timestamp embedding is enabled. - fn enabled(self: &UserTimestampHandler) -> bool; + fn enabled(self: &PacketTrailerHandler) -> bool; /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. The entry is removed after lookup. /// Also caches the frame_id for retrieval via last_lookup_frame_id(). - fn lookup_user_timestamp(self: &UserTimestampHandler, rtp_timestamp: u32) -> i64; + fn lookup_timestamp(self: &PacketTrailerHandler, rtp_timestamp: u32) -> i64; /// Returns the frame_id from the most recent successful - /// lookup_user_timestamp() call. - fn last_lookup_frame_id(self: &UserTimestampHandler) -> u32; + /// lookup_timestamp() call. + fn last_lookup_frame_id(self: &PacketTrailerHandler) -> u32; /// Store frame metadata for a given capture timestamp (sender side). fn store_frame_metadata( - self: &UserTimestampHandler, + self: &PacketTrailerHandler, capture_timestamp_us: i64, user_timestamp_us: i64, frame_id: u32, ); - /// Create a new user timestamp handler for a sender. - fn new_user_timestamp_sender( + /// Create a new packet trailer handler for a sender. + fn new_packet_trailer_sender( peer_factory: SharedPtr, sender: SharedPtr, - ) -> SharedPtr; + ) -> SharedPtr; - /// Create a new user timestamp handler for a receiver. - fn new_user_timestamp_receiver( + /// Create a new packet trailer handler for a receiver. + fn new_packet_trailer_receiver( peer_factory: SharedPtr, receiver: SharedPtr, - ) -> SharedPtr; + ) -> SharedPtr; } } -impl_thread_safety!(ffi::UserTimestampHandler, Send + Sync); +impl_thread_safety!(ffi::PacketTrailerHandler, Send + Sync); diff --git a/webrtc-sys/src/video_track.cpp b/webrtc-sys/src/video_track.cpp index 196508105..ceb4be938 100644 --- a/webrtc-sys/src/video_track.cpp +++ b/webrtc-sys/src/video_track.cpp @@ -26,7 +26,7 @@ #include "audio/remix_resample.h" #include "common_audio/include/audio_util.h" #include "livekit/media_stream.h" -#include "livekit/user_timestamp.h" +#include "livekit/packet_trailer.h" #include "livekit/video_track.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" @@ -135,7 +135,7 @@ VideoResolution VideoTrackSource::InternalSource::video_resolution() const { bool VideoTrackSource::InternalSource::on_captured_frame( const webrtc::VideoFrame& frame, - bool has_user_timestamp, + bool has_packet_trailer, int64_t user_timestamp_us, uint32_t frame_id) { webrtc::MutexLock lock(&mutex_); @@ -143,12 +143,12 @@ bool VideoTrackSource::InternalSource::on_captured_frame( int64_t aligned_timestamp_us = timestamp_aligner_.TranslateTimestamp( frame.timestamp_us(), webrtc::TimeMicros()); - // If a user timestamp was provided on this frame and we have a handler, + // If a packet trailer was provided on this frame and we have a handler, // store the mapping keyed by the aligned timestamp. This is the value // that CaptureTime() will return in TransformSend, so the lookup will // succeed. - if (has_user_timestamp && user_timestamp_handler_) { - user_timestamp_handler_->store_frame_metadata( + if (has_packet_trailer && packet_trailer_handler_) { + packet_trailer_handler_->store_frame_metadata( aligned_timestamp_us, user_timestamp_us, frame_id); } @@ -188,10 +188,10 @@ bool VideoTrackSource::InternalSource::on_captured_frame( return true; } -void VideoTrackSource::InternalSource::set_user_timestamp_handler( - std::shared_ptr handler) { +void VideoTrackSource::InternalSource::set_packet_trailer_handler( + std::shared_ptr handler) { webrtc::MutexLock lock(&mutex_); - user_timestamp_handler_ = std::move(handler); + packet_trailer_handler_ = std::move(handler); } VideoTrackSource::VideoTrackSource(const VideoResolution& resolution, bool is_screencast) { @@ -204,17 +204,17 @@ VideoResolution VideoTrackSource::video_resolution() const { bool VideoTrackSource::on_captured_frame( const std::unique_ptr& frame, - bool has_user_timestamp, + bool has_packet_trailer, int64_t user_timestamp_us, uint32_t frame_id) const { auto rtc_frame = frame->get(); - return source_->on_captured_frame(rtc_frame, has_user_timestamp, + return source_->on_captured_frame(rtc_frame, has_packet_trailer, user_timestamp_us, frame_id); } -void VideoTrackSource::set_user_timestamp_handler( - std::shared_ptr handler) const { - source_->set_user_timestamp_handler(std::move(handler)); +void VideoTrackSource::set_packet_trailer_handler( + std::shared_ptr handler) const { + source_->set_packet_trailer_handler(std::move(handler)); } webrtc::scoped_refptr VideoTrackSource::get() diff --git a/webrtc-sys/src/video_track.rs b/webrtc-sys/src/video_track.rs index eaa6c04bf..114ea2680 100644 --- a/webrtc-sys/src/video_track.rs +++ b/webrtc-sys/src/video_track.rs @@ -51,10 +51,10 @@ pub mod ffi { } extern "C++" { - include!("livekit/user_timestamp.h"); + include!("livekit/packet_trailer.h"); include!("livekit/video_track.h"); - type UserTimestampHandler = crate::user_timestamp::ffi::UserTimestampHandler; + type PacketTrailerHandler = crate::packet_trailer::ffi::PacketTrailerHandler; } unsafe extern "C++" { @@ -75,13 +75,13 @@ pub mod ffi { fn on_captured_frame( self: &VideoTrackSource, frame: &UniquePtr, - has_user_timestamp: bool, + has_packet_trailer: bool, user_timestamp_us: i64, frame_id: u32, ) -> bool; - fn set_user_timestamp_handler( + fn set_packet_trailer_handler( self: &VideoTrackSource, - handler: SharedPtr, + handler: SharedPtr, ); fn new_video_track_source( resolution: &VideoResolution, From 351780f6db58343fbda9ec08a75bbcf88c1af6c9 Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 23 Mar 2026 14:10:16 -0700 Subject: [PATCH 38/52] use packet trailer features for enabling specific trailers --- examples/local_video/src/publisher.rs | 24 +- examples/local_video/src/subscriber.rs | 47 +- libwebrtc/src/native/video_source.rs | 6 +- livekit-api/src/services/connector.rs | 3 + livekit-api/src/services/sip.rs | 5 + livekit-ffi/src/conversion/room.rs | 2 +- livekit-protocol/generate_proto.sh | 3 + livekit-protocol/protocol | 2 +- livekit-protocol/src/livekit.rs | 150 +-- livekit-protocol/src/livekit.serde.rs | 1015 +++++++---------- livekit/src/proto.rs | 1 + livekit/src/room/e2ee/manager.rs | 6 +- livekit/src/room/options.rs | 5 +- .../src/room/participant/local_participant.rs | 7 +- livekit/src/room/participant/mod.rs | 1 + 15 files changed, 569 insertions(+), 708 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 8e54e184d..477d6ca8e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -91,6 +91,10 @@ struct Args { #[arg(long, default_value_t = false)] burn_timestamp: bool, + /// Attach a monotonically increasing frame ID to each published frame via the packet trailer + #[arg(long, default_value_t = false)] + attach_frame_id: bool, + /// Shared encryption key for E2EE (enables AES-GCM end-to-end encryption when set) #[arg(long)] e2ee_key: Option, @@ -343,11 +347,19 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { .join(", "), ); + let mut packet_trailer_features = Vec::new(); + if args.attach_timestamp { + packet_trailer_features.push(PacketTrailerFeature::PtfUserTimestamp); + } + if args.attach_frame_id { + packet_trailer_features.push(PacketTrailerFeature::PtfFrameId); + } + let publish_opts = |codec: VideoCodec| TrackPublishOptions { source: TrackSource::Camera, simulcast: args.simulcast, video_codec: codec, - packet_trailer: args.attach_timestamp, + packet_trailer_features: packet_trailer_features.clone(), video_encoding: Some(main_encoding.clone()), simulcast_layers: Some(simulcast_presets.clone()), ..Default::default() @@ -402,7 +414,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Timing accumulators (ms) for rolling stats let mut timings = PublisherTimingSummary::default(); let mut logged_mjpeg_fallback = false; - let mut frame_counter: u32 = 0; + let mut frame_counter: u32 = 1; let mut timestamp_overlay = (args.attach_timestamp && args.burn_timestamp) .then(|| TimestampOverlay::new(width, height)); loop { @@ -550,13 +562,17 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; - // Optionally attach wall-clock time as user timestamp and frame_id + // Optionally attach wall-clock time as user timestamp if args.attach_timestamp { frame.user_timestamp_us = Some(capture_wall_time_us); + } else { + frame.user_timestamp_us = None; + } + // Optionally attach a monotonically increasing frame ID + if args.attach_frame_id { frame.frame_id = Some(frame_counter); frame_counter = frame_counter.wrapping_add(1); } else { - frame.user_timestamp_us = None; frame.frame_id = None; } rtc_source.capture_frame(&frame); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 696d8bd24..fcd8ca9f7 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -81,6 +81,8 @@ struct SharedYuv { user_timestamp_us: Option, /// Last received frame_id, if any. frame_id: Option, + /// Whether the publisher advertised PTF_USER_TIMESTAMP in its track info. + has_user_timestamp: bool, } #[derive(Clone)] @@ -257,10 +259,13 @@ async fn handle_track_subscribed( *active = Some(sid.clone()); } - // Update HUD codec label early (before first frame arrives) + // Update HUD codec label and feature flags early (before first frame arrives) { let mut s = shared.lock(); s.codec = codec; + s.has_user_timestamp = publication + .packet_trailer_features() + .contains(&PacketTrailerFeature::PtfUserTimestamp); } info!( @@ -450,6 +455,7 @@ fn clear_hud_and_simulcast(shared: &Arc>, simulcast: &Arc, locked_aspect: Option, display_timestamp: bool, + /// Cached timestamp overlay text to avoid layout churn on every repaint. + last_timestamp_text: String, } impl eframe::App for VideoApp { @@ -582,6 +590,7 @@ impl eframe::App for VideoApp { let frame_id = s.frame_id; let publish_us = s.user_timestamp_us; let receive_us = s.received_at_us; + let has_user_timestamp = s.has_user_timestamp; drop(s); if publish_us.is_some() || frame_id.is_some() { @@ -589,20 +598,26 @@ impl eframe::App for VideoApp { Some(fid) => format!("Frame ID: {}", fid), None => "Frame ID: N/A".to_string(), }; - let latency = match (publish_us, receive_us) { - (Some(pub_ts), Some(recv_ts)) => { - format!("{:.1}ms", (recv_ts - pub_ts) as f64 / 1000.0) - } - _ => "N/A".to_string(), - }; - let timestamp_overlay_text = format!( - "{}\nPublish: {}\nReceive: {}\nLatency: {}", - frame_id_line, - format_optional_timestamp_us(publish_us), - format_optional_timestamp_us(receive_us), - latency, - ); + if has_user_timestamp { + let latency = match (publish_us, receive_us) { + (Some(pub_ts), Some(recv_ts)) => { + format!("{:.1}ms", (recv_ts - pub_ts) as f64 / 1000.0) + } + _ => "N/A".to_string(), + }; + self.last_timestamp_text = format!( + "{}\nPublish: {}\nReceive: {}\nLatency: {}", + frame_id_line, + format_optional_timestamp_us(publish_us), + format_optional_timestamp_us(receive_us), + latency, + ); + } else { + self.last_timestamp_text = frame_id_line; + } + } + if !self.last_timestamp_text.is_empty() { egui::Area::new("timestamp_hud".into()) .anchor(egui::Align2::LEFT_TOP, egui::vec2(10.0, 10.0)) .interactable(false) @@ -614,7 +629,7 @@ impl eframe::App for VideoApp { .show(ui, |ui| { ui.add( egui::Label::new( - egui::RichText::new(×tamp_overlay_text) + egui::RichText::new(&self.last_timestamp_text) .color(egui::Color32::WHITE) .monospace(), ) @@ -743,6 +758,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { received_at_us: None, user_timestamp_us: None, frame_id: None, + has_user_timestamp: false, })); // Subscribe to room events: on first video track, start sink task @@ -795,6 +811,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { ctrl_c_received: ctrl_c_received.clone(), locked_aspect: None, display_timestamp: args.display_timestamp, + last_timestamp_text: String::new(), }; let native_options = eframe::NativeOptions { vsync: false, ..Default::default() }; eframe::run_native( diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index eb0c3abb3..dac8dd7f2 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -106,11 +106,9 @@ impl NativeVideoSource { }; builder.pin_mut().set_timestamp_us(capture_ts); - let (has_trailer, user_ts) = match frame.user_timestamp_us { - Some(ts) => (true, ts), - None => (false, 0), - }; + let user_ts = frame.user_timestamp_us.unwrap_or(0); let frame_id = frame.frame_id.unwrap_or(0); + let has_trailer = frame.user_timestamp_us.is_some() || frame.frame_id.is_some(); self.inner.lock().captured_frames += 1; diff --git a/livekit-api/src/services/connector.rs b/livekit-api/src/services/connector.rs index 02eec319b..b5f0818f1 100644 --- a/livekit-api/src/services/connector.rs +++ b/livekit-api/src/services/connector.rs @@ -136,6 +136,7 @@ impl ConnectorClient { participant_metadata: options.participant_metadata.unwrap_or_default(), participant_attributes: options.participant_attributes.unwrap_or_default(), destination_country: options.destination_country.unwrap_or_default(), + ringing_timeout: Default::default(), }, self.base .auth_header(VideoGrants { room_create: true, ..Default::default() }, None)?, @@ -242,6 +243,8 @@ impl ConnectorClient { participant_metadata: options.participant_metadata.unwrap_or_default(), participant_attributes: options.participant_attributes.unwrap_or_default(), destination_country: options.destination_country.unwrap_or_default(), + ringing_timeout: Default::default(), + wait_until_answered: Default::default(), }, self.base .auth_header(VideoGrants { room_create: true, ..Default::default() }, None)?, diff --git a/livekit-api/src/services/sip.rs b/livekit-api/src/services/sip.rs index 7c7cfc076..3a550ef1e 100644 --- a/livekit-api/src/services/sip.rs +++ b/livekit-api/src/services/sip.rs @@ -201,6 +201,8 @@ impl SIPClient { // TODO: support these attributes include_headers: Default::default(), media_encryption: Default::default(), + created_at: Default::default(), + updated_at: Default::default(), }), }, self.base.auth_header( @@ -243,6 +245,9 @@ impl SIPClient { include_headers: Default::default(), media_encryption: Default::default(), destination_country: Default::default(), + created_at: Default::default(), + from_host: Default::default(), + updated_at: Default::default(), }), }, self.base.auth_header( diff --git a/livekit-ffi/src/conversion/room.rs b/livekit-ffi/src/conversion/room.rs index dd6a0770c..ab4118a1a 100644 --- a/livekit-ffi/src/conversion/room.rs +++ b/livekit-ffi/src/conversion/room.rs @@ -257,7 +257,7 @@ impl From for TrackPublishOptions { preconnect_buffer: opts .preconnect_buffer .unwrap_or(default_publish_options.preconnect_buffer), - packet_trailer: default_publish_options.packet_trailer, + packet_trailer_features: default_publish_options.packet_trailer_features, } } } diff --git a/livekit-protocol/generate_proto.sh b/livekit-protocol/generate_proto.sh index a244ed1bc..5fa01f46b 100755 --- a/livekit-protocol/generate_proto.sh +++ b/livekit-protocol/generate_proto.sh @@ -19,8 +19,11 @@ PROTOCOL=protocol/protobufs OUT_RUST=src +PROTOBUF_INCLUDE=$(brew --prefix protobuf)/include + protoc \ -I=$PROTOCOL \ + -I=$PROTOBUF_INCLUDE \ --prost_out=$OUT_RUST \ --prost_opt=compile_well_known_types \ --prost_opt=extern_path=.google.protobuf=::pbjson_types \ diff --git a/livekit-protocol/protocol b/livekit-protocol/protocol index aec2833df..c5536cb98 160000 --- a/livekit-protocol/protocol +++ b/livekit-protocol/protocol @@ -1 +1 @@ -Subproject commit aec2833dffcbc4525735f29c96238c13c10bcf64 +Subproject commit c5536cb98c1f32d7fd2dae384478d79fb2df5978 diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index 106c5c86e..29f52480c 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -610,6 +610,8 @@ pub struct TrackInfo { pub audio_features: ::prost::alloc::vec::Vec, #[prost(enumeration="BackupCodecPolicy", tag="20")] pub backup_codec_policy: i32, + #[prost(enumeration="PacketTrailerFeature", repeated, tag="21")] + pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -946,6 +948,9 @@ pub struct RpcRequest { pub response_timeout_ms: u32, #[prost(uint32, tag="5")] pub version: u32, + /// Compressed payload data. When set, this field is used instead of `payload`. + #[prost(bytes="vec", tag="6")] + pub compressed_payload: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -958,7 +963,7 @@ pub struct RpcAck { pub struct RpcResponse { #[prost(string, tag="1")] pub request_id: ::prost::alloc::string::String, - #[prost(oneof="rpc_response::Value", tags="2, 3")] + #[prost(oneof="rpc_response::Value", tags="2, 3, 4")] pub value: ::core::option::Option, } /// Nested message and enum types in `RpcResponse`. @@ -970,6 +975,9 @@ pub mod rpc_response { Payload(::prost::alloc::string::String), #[prost(message, tag="3")] Error(super::RpcError), + /// Compressed payload data. When set, this field is used instead of `payload`. + #[prost(bytes, tag="4")] + CompressedPayload(::prost::alloc::vec::Vec), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -1887,6 +1895,8 @@ pub enum DisconnectReason { ConnectionTimeout = 14, /// media stream failure or media timeout MediaFailure = 15, + /// agent encountered an error + AgentError = 16, } impl DisconnectReason { /// String value of the enum field names used in the ProtoBuf definition. @@ -1911,6 +1921,7 @@ impl DisconnectReason { DisconnectReason::SipTrunkFailure => "SIP_TRUNK_FAILURE", DisconnectReason::ConnectionTimeout => "CONNECTION_TIMEOUT", DisconnectReason::MediaFailure => "MEDIA_FAILURE", + DisconnectReason::AgentError => "AGENT_ERROR", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1932,6 +1943,7 @@ impl DisconnectReason { "SIP_TRUNK_FAILURE" => Some(Self::SipTrunkFailure), "CONNECTION_TIMEOUT" => Some(Self::ConnectionTimeout), "MEDIA_FAILURE" => Some(Self::MediaFailure), + "AGENT_ERROR" => Some(Self::AgentError), _ => None, } } @@ -2042,6 +2054,32 @@ impl AudioTrackFeature { } } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PacketTrailerFeature { + PtfUserTimestamp = 0, + PtfFrameId = 1, +} +impl PacketTrailerFeature { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + PacketTrailerFeature::PtfUserTimestamp => "PTF_USER_TIMESTAMP", + PacketTrailerFeature::PtfFrameId => "PTF_FRAME_ID", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PTF_USER_TIMESTAMP" => Some(Self::PtfUserTimestamp), + "PTF_FRAME_ID" => Some(Self::PtfFrameId), + _ => None, + } + } +} /// composite using a web browser #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -2631,9 +2669,11 @@ pub struct EgressInfo { pub image_results: ::prost::alloc::vec::Vec, #[prost(string, tag="23")] pub manifest_location: ::prost::alloc::string::String, - /// next ID: 27 #[prost(bool, tag="25")] pub backup_storage_used: bool, + /// next ID: 28 + #[prost(int32, tag="27")] + pub retry_count: i32, #[prost(oneof="egress_info::Request", tags="4, 14, 19, 5, 6")] pub request: ::core::option::Option, /// deprecated (use _result fields) @@ -3356,6 +3396,8 @@ pub struct AddTrackRequest { pub backup_codec_policy: i32, #[prost(enumeration="AudioTrackFeature", repeated, tag="17")] pub audio_features: ::prost::alloc::vec::Vec, + #[prost(enumeration="PacketTrailerFeature", repeated, tag="18")] + pub packet_trailer_features: ::prost::alloc::vec::Vec, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -4243,7 +4285,7 @@ pub struct JobState { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WorkerMessage { - #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7, 8, 9")] + #[prost(oneof="worker_message::Message", tags="1, 2, 3, 4, 5, 6, 7")] pub message: ::core::option::Option, } /// Nested message and enum types in `WorkerMessage`. @@ -4269,17 +4311,13 @@ pub mod worker_message { SimulateJob(super::SimulateJobRequest), #[prost(message, tag="7")] MigrateJob(super::MigrateJobRequest), - #[prost(message, tag="8")] - TextResponse(super::TextMessageResponse), - #[prost(message, tag="9")] - PushText(super::PushTextRequest), } } /// from Server to Worker #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ServerMessage { - #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4, 6")] + #[prost(oneof="server_message::Message", tags="1, 2, 3, 5, 4")] pub message: ::core::option::Option, } /// Nested message and enum types in `ServerMessage`. @@ -4299,8 +4337,6 @@ pub mod server_message { Termination(super::JobTermination), #[prost(message, tag="4")] Pong(super::WorkerPong), - #[prost(message, tag="6")] - TextRequest(super::TextMessageRequest), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -4430,61 +4466,6 @@ pub struct JobTermination { #[prost(string, tag="1")] pub job_id: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct AgentSessionState { - #[prost(uint64, tag="1")] - pub version: u64, - #[prost(oneof="agent_session_state::Data", tags="2, 3")] - pub data: ::core::option::Option, -} -/// Nested message and enum types in `AgentSessionState`. -pub mod agent_session_state { - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum Data { - #[prost(bytes, tag="2")] - Snapshot(::prost::alloc::vec::Vec), - #[prost(bytes, tag="3")] - Delta(::prost::alloc::vec::Vec), - } -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TextMessageRequest { - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] - pub session_id: ::prost::alloc::string::String, - #[prost(string, tag="3")] - pub agent_name: ::prost::alloc::string::String, - #[prost(string, tag="4")] - pub metadata: ::prost::alloc::string::String, - #[prost(message, optional, tag="5")] - pub session_state: ::core::option::Option, - #[prost(string, tag="6")] - pub text: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct PushTextRequest { - /// The message_id of the TextMessageRequest that this push is for - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(string, tag="2")] - pub content: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TextMessageResponse { - /// Indicate the request is completed - #[prost(string, tag="1")] - pub message_id: ::prost::alloc::string::String, - #[prost(message, optional, tag="2")] - pub session_state: ::core::option::Option, - #[prost(string, tag="3")] - pub error: ::prost::alloc::string::String, -} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum JobType { @@ -5604,6 +5585,10 @@ pub struct SipInboundTrunkInfo { pub krisp_enabled: bool, #[prost(enumeration="SipMediaEncryption", tag="16")] pub media_encryption: i32, + #[prost(message, optional, tag="17")] + pub created_at: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(message, optional, tag="18")] + pub updated_at: ::core::option::Option<::pbjson_types::Timestamp>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -5703,6 +5688,15 @@ pub struct SipOutboundTrunkInfo { pub include_headers: i32, #[prost(enumeration="SipMediaEncryption", tag="13")] pub media_encryption: i32, + /// Optional custom hostname for the 'From' SIP header in outbound INVITEs. + /// When set, outbound calls from this trunk will use this host instead of the default project SIP domain. + /// Enables originating calls from custom domains. + #[prost(string, tag="15")] + pub from_host: ::prost::alloc::string::String, + #[prost(message, optional, tag="16")] + pub created_at: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(message, optional, tag="17")] + pub updated_at: ::core::option::Option<::pbjson_types::Timestamp>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -5725,6 +5719,8 @@ pub struct SipOutboundTrunkUpdate { pub metadata: ::core::option::Option<::prost::alloc::string::String>, #[prost(enumeration="SipMediaEncryption", optional, tag="8")] pub media_encryption: ::core::option::Option, + #[prost(string, optional, tag="10")] + pub from_host: ::core::option::Option<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -5827,6 +5823,9 @@ pub struct SipDispatchRuleIndividual { /// Optional pin required to enter room #[prost(string, tag="2")] pub pin: ::prost::alloc::string::String, + /// Optionally append random suffix + #[prost(bool, tag="3")] + pub no_randomness: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -5965,9 +5964,12 @@ pub struct SipDispatchRuleInfo { pub room_config: ::core::option::Option, #[prost(bool, tag="11")] pub krisp_enabled: bool, - /// NEXT ID: 14 #[prost(enumeration="SipMediaEncryption", tag="12")] pub media_encryption: i32, + #[prost(message, optional, tag="14")] + pub created_at: ::core::option::Option<::pbjson_types::Timestamp>, + #[prost(message, optional, tag="15")] + pub updated_at: ::core::option::Option<::pbjson_types::Timestamp>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -6037,6 +6039,9 @@ pub struct SipOutboundConfig { /// Keys are the names of attributes and values are the names of X-* headers they will be mapped to. #[prost(map="string, string", tag="6")] pub attributes_to_headers: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + /// Optional custom hostname for the 'From' SIP header. When set, outbound calls use this host instead of the default project SIP domain. + #[prost(string, tag="8")] + pub from_host: ::prost::alloc::string::String, } /// A SIP Participant is a singular SIP session connected to a LiveKit room via /// a SIP Trunk into a SIP DispatchRule @@ -6796,10 +6801,11 @@ pub struct DialWhatsAppCallRequest { #[prost(map="string, string", tag="10")] pub participant_attributes: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, /// Optional - Country where the call terminates as ISO 3166-1 alpha-2 (). This will be used by the livekit infrastructure to route calls. - /// - /// Next - 13 #[prost(string, tag="11")] pub destination_country: ::prost::alloc::string::String, + /// Max time for the callee to answer the call. + #[prost(message, optional, tag="13")] + pub ringing_timeout: ::core::option::Option<::pbjson_types::Duration>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -6922,10 +6928,14 @@ pub struct AcceptWhatsAppCallRequest { #[prost(map="string, string", tag="11")] pub participant_attributes: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, /// Optional - Country where the call terminates as ISO 3166-1 alpha-2 (). This will be used by the livekit infrastructure to route calls. - /// - /// Next - 14 #[prost(string, tag="12")] pub destination_country: ::prost::alloc::string::String, + /// Max time for the callee to answer the call. + #[prost(message, optional, tag="14")] + pub ringing_timeout: ::core::option::Option<::pbjson_types::Duration>, + /// Wait for the answer for the call before returning. + #[prost(bool, tag="15")] + pub wait_until_answered: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 4d35e2a0d..8c84d300c 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -46,6 +46,12 @@ impl serde::Serialize for AcceptWhatsAppCallRequest { if !self.destination_country.is_empty() { len += 1; } + if self.ringing_timeout.is_some() { + len += 1; + } + if self.wait_until_answered { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.AcceptWhatsAppCallRequest", len)?; if !self.whatsapp_phone_number_id.is_empty() { struct_ser.serialize_field("whatsappPhoneNumberId", &self.whatsapp_phone_number_id)?; @@ -86,6 +92,12 @@ impl serde::Serialize for AcceptWhatsAppCallRequest { if !self.destination_country.is_empty() { struct_ser.serialize_field("destinationCountry", &self.destination_country)?; } + if let Some(v) = self.ringing_timeout.as_ref() { + struct_ser.serialize_field("ringingTimeout", v)?; + } + if self.wait_until_answered { + struct_ser.serialize_field("waitUntilAnswered", &self.wait_until_answered)?; + } struct_ser.end() } } @@ -120,6 +132,10 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { "participantAttributes", "destination_country", "destinationCountry", + "ringing_timeout", + "ringingTimeout", + "wait_until_answered", + "waitUntilAnswered", ]; #[allow(clippy::enum_variant_names)] @@ -137,6 +153,8 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { ParticipantMetadata, ParticipantAttributes, DestinationCountry, + RingingTimeout, + WaitUntilAnswered, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -172,6 +190,8 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { "participantMetadata" | "participant_metadata" => Ok(GeneratedField::ParticipantMetadata), "participantAttributes" | "participant_attributes" => Ok(GeneratedField::ParticipantAttributes), "destinationCountry" | "destination_country" => Ok(GeneratedField::DestinationCountry), + "ringingTimeout" | "ringing_timeout" => Ok(GeneratedField::RingingTimeout), + "waitUntilAnswered" | "wait_until_answered" => Ok(GeneratedField::WaitUntilAnswered), _ => Ok(GeneratedField::__SkipField__), } } @@ -204,6 +224,8 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { let mut participant_metadata__ = None; let mut participant_attributes__ = None; let mut destination_country__ = None; + let mut ringing_timeout__ = None; + let mut wait_until_answered__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::WhatsappPhoneNumberId => { @@ -286,6 +308,18 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { } destination_country__ = Some(map_.next_value()?); } + GeneratedField::RingingTimeout => { + if ringing_timeout__.is_some() { + return Err(serde::de::Error::duplicate_field("ringingTimeout")); + } + ringing_timeout__ = map_.next_value()?; + } + GeneratedField::WaitUntilAnswered => { + if wait_until_answered__.is_some() { + return Err(serde::de::Error::duplicate_field("waitUntilAnswered")); + } + wait_until_answered__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -305,6 +339,8 @@ impl<'de> serde::Deserialize<'de> for AcceptWhatsAppCallRequest { participant_metadata: participant_metadata__.unwrap_or_default(), participant_attributes: participant_attributes__.unwrap_or_default(), destination_country: destination_country__.unwrap_or_default(), + ringing_timeout: ringing_timeout__, + wait_until_answered: wait_until_answered__.unwrap_or_default(), }) } } @@ -561,6 +597,9 @@ impl serde::Serialize for AddTrackRequest { if !self.audio_features.is_empty() { len += 1; } + if !self.packet_trailer_features.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.AddTrackRequest", len)?; if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; @@ -625,6 +664,13 @@ impl serde::Serialize for AddTrackRequest { }).collect::, _>>()?; struct_ser.serialize_field("audioFeatures", &v)?; } + if !self.packet_trailer_features.is_empty() { + let v = self.packet_trailer_features.iter().cloned().map(|v| { + PacketTrailerFeature::try_from(v) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) + }).collect::, _>>()?; + struct_ser.serialize_field("packetTrailerFeatures", &v)?; + } struct_ser.end() } } @@ -657,6 +703,8 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "backupCodecPolicy", "audio_features", "audioFeatures", + "packet_trailer_features", + "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -678,6 +726,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { Stream, BackupCodecPolicy, AudioFeatures, + PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -717,6 +766,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { "stream" => Ok(GeneratedField::Stream), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), + "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -753,6 +803,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { let mut stream__ = None; let mut backup_codec_policy__ = None; let mut audio_features__ = None; + let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Cid => { @@ -861,6 +912,12 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { } audio_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); } + GeneratedField::PacketTrailerFeatures => { + if packet_trailer_features__.is_some() { + return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); + } + packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -884,6 +941,7 @@ impl<'de> serde::Deserialize<'de> for AddTrackRequest { stream: stream__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), audio_features: audio_features__.unwrap_or_default(), + packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -1193,142 +1251,6 @@ impl<'de> serde::Deserialize<'de> for AgentDispatchState { deserializer.deserialize_struct("livekit.AgentDispatchState", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for AgentSessionState { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.version != 0 { - len += 1; - } - if self.data.is_some() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.AgentSessionState", len)?; - if self.version != 0 { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; - } - if let Some(v) = self.data.as_ref() { - match v { - agent_session_state::Data::Snapshot(v) => { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("snapshot", pbjson::private::base64::encode(&v).as_str())?; - } - agent_session_state::Data::Delta(v) => { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("delta", pbjson::private::base64::encode(&v).as_str())?; - } - } - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for AgentSessionState { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "version", - "snapshot", - "delta", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - Version, - Snapshot, - Delta, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "version" => Ok(GeneratedField::Version), - "snapshot" => Ok(GeneratedField::Snapshot), - "delta" => Ok(GeneratedField::Delta), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = AgentSessionState; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.AgentSessionState") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut version__ = None; - let mut data__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::Version => { - if version__.is_some() { - return Err(serde::de::Error::duplicate_field("version")); - } - version__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Snapshot => { - if data__.is_some() { - return Err(serde::de::Error::duplicate_field("snapshot")); - } - data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Snapshot(x.0)); - } - GeneratedField::Delta => { - if data__.is_some() { - return Err(serde::de::Error::duplicate_field("delta")); - } - data__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| agent_session_state::Data::Delta(x.0)); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(AgentSessionState { - version: version__.unwrap_or_default(), - data: data__, - }) - } - } - deserializer.deserialize_struct("livekit.AgentSessionState", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for AliOssUpload { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -9738,6 +9660,9 @@ impl serde::Serialize for DialWhatsAppCallRequest { if !self.destination_country.is_empty() { len += 1; } + if self.ringing_timeout.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.DialWhatsAppCallRequest", len)?; if !self.whatsapp_phone_number_id.is_empty() { struct_ser.serialize_field("whatsappPhoneNumberId", &self.whatsapp_phone_number_id)?; @@ -9775,6 +9700,9 @@ impl serde::Serialize for DialWhatsAppCallRequest { if !self.destination_country.is_empty() { struct_ser.serialize_field("destinationCountry", &self.destination_country)?; } + if let Some(v) = self.ringing_timeout.as_ref() { + struct_ser.serialize_field("ringingTimeout", v)?; + } struct_ser.end() } } @@ -9808,6 +9736,8 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { "participantAttributes", "destination_country", "destinationCountry", + "ringing_timeout", + "ringingTimeout", ]; #[allow(clippy::enum_variant_names)] @@ -9824,6 +9754,7 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { ParticipantMetadata, ParticipantAttributes, DestinationCountry, + RingingTimeout, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -9858,6 +9789,7 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { "participantMetadata" | "participant_metadata" => Ok(GeneratedField::ParticipantMetadata), "participantAttributes" | "participant_attributes" => Ok(GeneratedField::ParticipantAttributes), "destinationCountry" | "destination_country" => Ok(GeneratedField::DestinationCountry), + "ringingTimeout" | "ringing_timeout" => Ok(GeneratedField::RingingTimeout), _ => Ok(GeneratedField::__SkipField__), } } @@ -9889,6 +9821,7 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { let mut participant_metadata__ = None; let mut participant_attributes__ = None; let mut destination_country__ = None; + let mut ringing_timeout__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::WhatsappPhoneNumberId => { @@ -9965,6 +9898,12 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { } destination_country__ = Some(map_.next_value()?); } + GeneratedField::RingingTimeout => { + if ringing_timeout__.is_some() { + return Err(serde::de::Error::duplicate_field("ringingTimeout")); + } + ringing_timeout__ = map_.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -9983,6 +9922,7 @@ impl<'de> serde::Deserialize<'de> for DialWhatsAppCallRequest { participant_metadata: participant_metadata__.unwrap_or_default(), participant_attributes: participant_attributes__.unwrap_or_default(), destination_country: destination_country__.unwrap_or_default(), + ringing_timeout: ringing_timeout__, }) } } @@ -10412,6 +10352,7 @@ impl serde::Serialize for DisconnectReason { Self::SipTrunkFailure => "SIP_TRUNK_FAILURE", Self::ConnectionTimeout => "CONNECTION_TIMEOUT", Self::MediaFailure => "MEDIA_FAILURE", + Self::AgentError => "AGENT_ERROR", }; serializer.serialize_str(variant) } @@ -10439,6 +10380,7 @@ impl<'de> serde::Deserialize<'de> for DisconnectReason { "SIP_TRUNK_FAILURE", "CONNECTION_TIMEOUT", "MEDIA_FAILURE", + "AGENT_ERROR", ]; struct GeneratedVisitor; @@ -10495,6 +10437,7 @@ impl<'de> serde::Deserialize<'de> for DisconnectReason { "SIP_TRUNK_FAILURE" => Ok(DisconnectReason::SipTrunkFailure), "CONNECTION_TIMEOUT" => Ok(DisconnectReason::ConnectionTimeout), "MEDIA_FAILURE" => Ok(DisconnectReason::MediaFailure), + "AGENT_ERROR" => Ok(DisconnectReason::AgentError), _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), } } @@ -10838,6 +10781,9 @@ impl serde::Serialize for EgressInfo { if self.backup_storage_used { len += 1; } + if self.retry_count != 0 { + len += 1; + } if self.request.is_some() { len += 1; } @@ -10906,6 +10852,9 @@ impl serde::Serialize for EgressInfo { if self.backup_storage_used { struct_ser.serialize_field("backupStorageUsed", &self.backup_storage_used)?; } + if self.retry_count != 0 { + struct_ser.serialize_field("retryCount", &self.retry_count)?; + } if let Some(v) = self.request.as_ref() { match v { egress_info::Request::RoomComposite(v) => { @@ -10979,6 +10928,8 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { "manifestLocation", "backup_storage_used", "backupStorageUsed", + "retry_count", + "retryCount", "room_composite", "roomComposite", "web", @@ -11010,6 +10961,7 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { ImageResults, ManifestLocation, BackupStorageUsed, + RetryCount, RoomComposite, Web, Participant, @@ -11057,6 +11009,7 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { "imageResults" | "image_results" => Ok(GeneratedField::ImageResults), "manifestLocation" | "manifest_location" => Ok(GeneratedField::ManifestLocation), "backupStorageUsed" | "backup_storage_used" => Ok(GeneratedField::BackupStorageUsed), + "retryCount" | "retry_count" => Ok(GeneratedField::RetryCount), "roomComposite" | "room_composite" => Ok(GeneratedField::RoomComposite), "web" => Ok(GeneratedField::Web), "participant" => Ok(GeneratedField::Participant), @@ -11101,6 +11054,7 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { let mut image_results__ = None; let mut manifest_location__ = None; let mut backup_storage_used__ = None; + let mut retry_count__ = None; let mut request__ = None; let mut result__ = None; while let Some(k) = map_.next_key()? { @@ -11215,6 +11169,14 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { } backup_storage_used__ = Some(map_.next_value()?); } + GeneratedField::RetryCount => { + if retry_count__.is_some() { + return Err(serde::de::Error::duplicate_field("retryCount")); + } + retry_count__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } GeneratedField::RoomComposite => { if request__.is_some() { return Err(serde::de::Error::duplicate_field("roomComposite")); @@ -11294,6 +11256,7 @@ impl<'de> serde::Deserialize<'de> for EgressInfo { image_results: image_results__.unwrap_or_default(), manifest_location: manifest_location__.unwrap_or_default(), backup_storage_used: backup_storage_used__.unwrap_or_default(), + retry_count: retry_count__.unwrap_or_default(), request: request__, result: result__, }) @@ -21625,6 +21588,77 @@ impl<'de> serde::Deserialize<'de> for MuteTrackRequest { deserializer.deserialize_struct("livekit.MuteTrackRequest", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for PacketTrailerFeature { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::PtfUserTimestamp => "PTF_USER_TIMESTAMP", + Self::PtfFrameId => "PTF_FRAME_ID", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for PacketTrailerFeature { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "PTF_USER_TIMESTAMP", + "PTF_FRAME_ID", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PacketTrailerFeature; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "PTF_USER_TIMESTAMP" => Ok(PacketTrailerFeature::PtfUserTimestamp), + "PTF_FRAME_ID" => Ok(PacketTrailerFeature::PtfFrameId), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} impl serde::Serialize for Pagination { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -24339,119 +24373,6 @@ impl<'de> serde::Deserialize<'de> for PublishDataTrackResponse { deserializer.deserialize_struct("livekit.PublishDataTrackResponse", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for PushTextRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if !self.content.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.PushTextRequest", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if !self.content.is_empty() { - struct_ser.serialize_field("content", &self.content)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for PushTextRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "content", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - Content, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "content" => Ok(GeneratedField::Content), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = PushTextRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.PushTextRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut content__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::Content => { - if content__.is_some() { - return Err(serde::de::Error::duplicate_field("content")); - } - content__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(PushTextRequest { - message_id: message_id__.unwrap_or_default(), - content: content__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.PushTextRequest", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for RtcpSenderReportState { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -29436,6 +29357,9 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { len += 1; } + if !self.compressed_payload.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.RpcRequest", len)?; if !self.id.is_empty() { struct_ser.serialize_field("id", &self.id)?; @@ -29452,6 +29376,11 @@ impl serde::Serialize for RpcRequest { if self.version != 0 { struct_ser.serialize_field("version", &self.version)?; } + if !self.compressed_payload.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&self.compressed_payload).as_str())?; + } struct_ser.end() } } @@ -29468,6 +29397,8 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "response_timeout_ms", "responseTimeoutMs", "version", + "compressed_payload", + "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29477,6 +29408,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Payload, ResponseTimeoutMs, Version, + CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29504,6 +29436,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { "payload" => Ok(GeneratedField::Payload), "responseTimeoutMs" | "response_timeout_ms" => Ok(GeneratedField::ResponseTimeoutMs), "version" => Ok(GeneratedField::Version), + "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29528,6 +29461,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { let mut payload__ = None; let mut response_timeout_ms__ = None; let mut version__ = None; + let mut compressed_payload__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Id => { @@ -29564,6 +29498,14 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::CompressedPayload => { + if compressed_payload__.is_some() { + return Err(serde::de::Error::duplicate_field("compressedPayload")); + } + compressed_payload__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -29575,6 +29517,7 @@ impl<'de> serde::Deserialize<'de> for RpcRequest { payload: payload__.unwrap_or_default(), response_timeout_ms: response_timeout_ms__.unwrap_or_default(), version: version__.unwrap_or_default(), + compressed_payload: compressed_payload__.unwrap_or_default(), }) } } @@ -29607,6 +29550,11 @@ impl serde::Serialize for RpcResponse { rpc_response::Value::Error(v) => { struct_ser.serialize_field("error", v)?; } + rpc_response::Value::CompressedPayload(v) => { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("compressedPayload", pbjson::private::base64::encode(&v).as_str())?; + } } } struct_ser.end() @@ -29623,6 +29571,8 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId", "payload", "error", + "compressed_payload", + "compressedPayload", ]; #[allow(clippy::enum_variant_names)] @@ -29630,6 +29580,7 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { RequestId, Payload, Error, + CompressedPayload, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -29655,6 +29606,7 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { "requestId" | "request_id" => Ok(GeneratedField::RequestId), "payload" => Ok(GeneratedField::Payload), "error" => Ok(GeneratedField::Error), + "compressedPayload" | "compressed_payload" => Ok(GeneratedField::CompressedPayload), _ => Ok(GeneratedField::__SkipField__), } } @@ -29697,6 +29649,12 @@ impl<'de> serde::Deserialize<'de> for RpcResponse { value__ = map_.next_value::<::std::option::Option<_>>()?.map(rpc_response::Value::Error) ; } + GeneratedField::CompressedPayload => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("compressedPayload")); + } + value__ = map_.next_value::<::std::option::Option<::pbjson::private::BytesDeserialize<_>>>()?.map(|x| rpc_response::Value::CompressedPayload(x.0)); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -31174,6 +31132,9 @@ impl serde::Serialize for SipDispatchRuleIndividual { if !self.pin.is_empty() { len += 1; } + if self.no_randomness { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPDispatchRuleIndividual", len)?; if !self.room_prefix.is_empty() { struct_ser.serialize_field("roomPrefix", &self.room_prefix)?; @@ -31181,6 +31142,9 @@ impl serde::Serialize for SipDispatchRuleIndividual { if !self.pin.is_empty() { struct_ser.serialize_field("pin", &self.pin)?; } + if self.no_randomness { + struct_ser.serialize_field("noRandomness", &self.no_randomness)?; + } struct_ser.end() } } @@ -31194,12 +31158,15 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleIndividual { "room_prefix", "roomPrefix", "pin", + "no_randomness", + "noRandomness", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { RoomPrefix, Pin, + NoRandomness, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -31224,6 +31191,7 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleIndividual { match value { "roomPrefix" | "room_prefix" => Ok(GeneratedField::RoomPrefix), "pin" => Ok(GeneratedField::Pin), + "noRandomness" | "no_randomness" => Ok(GeneratedField::NoRandomness), _ => Ok(GeneratedField::__SkipField__), } } @@ -31245,6 +31213,7 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleIndividual { { let mut room_prefix__ = None; let mut pin__ = None; + let mut no_randomness__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::RoomPrefix => { @@ -31259,6 +31228,12 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleIndividual { } pin__ = Some(map_.next_value()?); } + GeneratedField::NoRandomness => { + if no_randomness__.is_some() { + return Err(serde::de::Error::duplicate_field("noRandomness")); + } + no_randomness__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -31267,6 +31242,7 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleIndividual { Ok(SipDispatchRuleIndividual { room_prefix: room_prefix__.unwrap_or_default(), pin: pin__.unwrap_or_default(), + no_randomness: no_randomness__.unwrap_or_default(), }) } } @@ -31320,6 +31296,12 @@ impl serde::Serialize for SipDispatchRuleInfo { if self.media_encryption != 0 { len += 1; } + if self.created_at.is_some() { + len += 1; + } + if self.updated_at.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPDispatchRuleInfo", len)?; if !self.sip_dispatch_rule_id.is_empty() { struct_ser.serialize_field("sipDispatchRuleId", &self.sip_dispatch_rule_id)?; @@ -31362,6 +31344,12 @@ impl serde::Serialize for SipDispatchRuleInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.media_encryption)))?; struct_ser.serialize_field("mediaEncryption", &v)?; } + if let Some(v) = self.created_at.as_ref() { + struct_ser.serialize_field("createdAt", v)?; + } + if let Some(v) = self.updated_at.as_ref() { + struct_ser.serialize_field("updatedAt", v)?; + } struct_ser.end() } } @@ -31393,6 +31381,10 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { "krispEnabled", "media_encryption", "mediaEncryption", + "created_at", + "createdAt", + "updated_at", + "updatedAt", ]; #[allow(clippy::enum_variant_names)] @@ -31410,6 +31402,8 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { RoomConfig, KrispEnabled, MediaEncryption, + CreatedAt, + UpdatedAt, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -31445,6 +31439,8 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { "roomConfig" | "room_config" => Ok(GeneratedField::RoomConfig), "krispEnabled" | "krisp_enabled" => Ok(GeneratedField::KrispEnabled), "mediaEncryption" | "media_encryption" => Ok(GeneratedField::MediaEncryption), + "createdAt" | "created_at" => Ok(GeneratedField::CreatedAt), + "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), _ => Ok(GeneratedField::__SkipField__), } } @@ -31477,6 +31473,8 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { let mut room_config__ = None; let mut krisp_enabled__ = None; let mut media_encryption__ = None; + let mut created_at__ = None; + let mut updated_at__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::SipDispatchRuleId => { @@ -31559,6 +31557,18 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { } media_encryption__ = Some(map_.next_value::()? as i32); } + GeneratedField::CreatedAt => { + if created_at__.is_some() { + return Err(serde::de::Error::duplicate_field("createdAt")); + } + created_at__ = map_.next_value()?; + } + GeneratedField::UpdatedAt => { + if updated_at__.is_some() { + return Err(serde::de::Error::duplicate_field("updatedAt")); + } + updated_at__ = map_.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -31578,6 +31588,8 @@ impl<'de> serde::Deserialize<'de> for SipDispatchRuleInfo { room_config: room_config__, krisp_enabled: krisp_enabled__.unwrap_or_default(), media_encryption: media_encryption__.unwrap_or_default(), + created_at: created_at__, + updated_at: updated_at__, }) } } @@ -31971,6 +31983,12 @@ impl serde::Serialize for SipInboundTrunkInfo { if self.media_encryption != 0 { len += 1; } + if self.created_at.is_some() { + len += 1; + } + if self.updated_at.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPInboundTrunkInfo", len)?; if !self.sip_trunk_id.is_empty() { struct_ser.serialize_field("sipTrunkId", &self.sip_trunk_id)?; @@ -32024,6 +32042,12 @@ impl serde::Serialize for SipInboundTrunkInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.media_encryption)))?; struct_ser.serialize_field("mediaEncryption", &v)?; } + if let Some(v) = self.created_at.as_ref() { + struct_ser.serialize_field("createdAt", v)?; + } + if let Some(v) = self.updated_at.as_ref() { + struct_ser.serialize_field("updatedAt", v)?; + } struct_ser.end() } } @@ -32062,6 +32086,10 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { "krispEnabled", "media_encryption", "mediaEncryption", + "created_at", + "createdAt", + "updated_at", + "updatedAt", ]; #[allow(clippy::enum_variant_names)] @@ -32082,6 +32110,8 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { MaxCallDuration, KrispEnabled, MediaEncryption, + CreatedAt, + UpdatedAt, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32120,6 +32150,8 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { "maxCallDuration" | "max_call_duration" => Ok(GeneratedField::MaxCallDuration), "krispEnabled" | "krisp_enabled" => Ok(GeneratedField::KrispEnabled), "mediaEncryption" | "media_encryption" => Ok(GeneratedField::MediaEncryption), + "createdAt" | "created_at" => Ok(GeneratedField::CreatedAt), + "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), _ => Ok(GeneratedField::__SkipField__), } } @@ -32155,6 +32187,8 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { let mut max_call_duration__ = None; let mut krisp_enabled__ = None; let mut media_encryption__ = None; + let mut created_at__ = None; + let mut updated_at__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::SipTrunkId => { @@ -32259,6 +32293,18 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { } media_encryption__ = Some(map_.next_value::()? as i32); } + GeneratedField::CreatedAt => { + if created_at__.is_some() { + return Err(serde::de::Error::duplicate_field("createdAt")); + } + created_at__ = map_.next_value()?; + } + GeneratedField::UpdatedAt => { + if updated_at__.is_some() { + return Err(serde::de::Error::duplicate_field("updatedAt")); + } + updated_at__ = map_.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32281,6 +32327,8 @@ impl<'de> serde::Deserialize<'de> for SipInboundTrunkInfo { max_call_duration: max_call_duration__, krisp_enabled: krisp_enabled__.unwrap_or_default(), media_encryption: media_encryption__.unwrap_or_default(), + created_at: created_at__, + updated_at: updated_at__, }) } } @@ -32611,6 +32659,9 @@ impl serde::Serialize for SipOutboundConfig { if !self.attributes_to_headers.is_empty() { len += 1; } + if !self.from_host.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPOutboundConfig", len)?; if !self.hostname.is_empty() { struct_ser.serialize_field("hostname", &self.hostname)?; @@ -32635,6 +32686,9 @@ impl serde::Serialize for SipOutboundConfig { if !self.attributes_to_headers.is_empty() { struct_ser.serialize_field("attributesToHeaders", &self.attributes_to_headers)?; } + if !self.from_host.is_empty() { + struct_ser.serialize_field("fromHost", &self.from_host)?; + } struct_ser.end() } } @@ -32657,6 +32711,8 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { "headersToAttributes", "attributes_to_headers", "attributesToHeaders", + "from_host", + "fromHost", ]; #[allow(clippy::enum_variant_names)] @@ -32668,6 +32724,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { AuthPassword, HeadersToAttributes, AttributesToHeaders, + FromHost, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32697,6 +32754,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { "authPassword" | "auth_password" => Ok(GeneratedField::AuthPassword), "headersToAttributes" | "headers_to_attributes" => Ok(GeneratedField::HeadersToAttributes), "attributesToHeaders" | "attributes_to_headers" => Ok(GeneratedField::AttributesToHeaders), + "fromHost" | "from_host" => Ok(GeneratedField::FromHost), _ => Ok(GeneratedField::__SkipField__), } } @@ -32723,6 +32781,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { let mut auth_password__ = None; let mut headers_to_attributes__ = None; let mut attributes_to_headers__ = None; + let mut from_host__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Hostname => { @@ -32771,6 +32830,12 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { map_.next_value::>()? ); } + GeneratedField::FromHost => { + if from_host__.is_some() { + return Err(serde::de::Error::duplicate_field("fromHost")); + } + from_host__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32784,6 +32849,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundConfig { auth_password: auth_password__.unwrap_or_default(), headers_to_attributes: headers_to_attributes__.unwrap_or_default(), attributes_to_headers: attributes_to_headers__.unwrap_or_default(), + from_host: from_host__.unwrap_or_default(), }) } } @@ -32840,6 +32906,15 @@ impl serde::Serialize for SipOutboundTrunkInfo { if self.media_encryption != 0 { len += 1; } + if !self.from_host.is_empty() { + len += 1; + } + if self.created_at.is_some() { + len += 1; + } + if self.updated_at.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPOutboundTrunkInfo", len)?; if !self.sip_trunk_id.is_empty() { struct_ser.serialize_field("sipTrunkId", &self.sip_trunk_id)?; @@ -32889,6 +32964,15 @@ impl serde::Serialize for SipOutboundTrunkInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.media_encryption)))?; struct_ser.serialize_field("mediaEncryption", &v)?; } + if !self.from_host.is_empty() { + struct_ser.serialize_field("fromHost", &self.from_host)?; + } + if let Some(v) = self.created_at.as_ref() { + struct_ser.serialize_field("createdAt", v)?; + } + if let Some(v) = self.updated_at.as_ref() { + struct_ser.serialize_field("updatedAt", v)?; + } struct_ser.end() } } @@ -32921,6 +33005,12 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { "includeHeaders", "media_encryption", "mediaEncryption", + "from_host", + "fromHost", + "created_at", + "createdAt", + "updated_at", + "updatedAt", ]; #[allow(clippy::enum_variant_names)] @@ -32939,6 +33029,9 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { AttributesToHeaders, IncludeHeaders, MediaEncryption, + FromHost, + CreatedAt, + UpdatedAt, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32975,6 +33068,9 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { "attributesToHeaders" | "attributes_to_headers" => Ok(GeneratedField::AttributesToHeaders), "includeHeaders" | "include_headers" => Ok(GeneratedField::IncludeHeaders), "mediaEncryption" | "media_encryption" => Ok(GeneratedField::MediaEncryption), + "fromHost" | "from_host" => Ok(GeneratedField::FromHost), + "createdAt" | "created_at" => Ok(GeneratedField::CreatedAt), + "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), _ => Ok(GeneratedField::__SkipField__), } } @@ -33008,6 +33104,9 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { let mut attributes_to_headers__ = None; let mut include_headers__ = None; let mut media_encryption__ = None; + let mut from_host__ = None; + let mut created_at__ = None; + let mut updated_at__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::SipTrunkId => { @@ -33100,6 +33199,24 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { } media_encryption__ = Some(map_.next_value::()? as i32); } + GeneratedField::FromHost => { + if from_host__.is_some() { + return Err(serde::de::Error::duplicate_field("fromHost")); + } + from_host__ = Some(map_.next_value()?); + } + GeneratedField::CreatedAt => { + if created_at__.is_some() { + return Err(serde::de::Error::duplicate_field("createdAt")); + } + created_at__ = map_.next_value()?; + } + GeneratedField::UpdatedAt => { + if updated_at__.is_some() { + return Err(serde::de::Error::duplicate_field("updatedAt")); + } + updated_at__ = map_.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -33120,6 +33237,9 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkInfo { attributes_to_headers: attributes_to_headers__.unwrap_or_default(), include_headers: include_headers__.unwrap_or_default(), media_encryption: media_encryption__.unwrap_or_default(), + from_host: from_host__.unwrap_or_default(), + created_at: created_at__, + updated_at: updated_at__, }) } } @@ -33161,6 +33281,9 @@ impl serde::Serialize for SipOutboundTrunkUpdate { if self.media_encryption.is_some() { len += 1; } + if self.from_host.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SIPOutboundTrunkUpdate", len)?; if let Some(v) = self.address.as_ref() { struct_ser.serialize_field("address", v)?; @@ -33193,6 +33316,9 @@ impl serde::Serialize for SipOutboundTrunkUpdate { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", *v)))?; struct_ser.serialize_field("mediaEncryption", &v)?; } + if let Some(v) = self.from_host.as_ref() { + struct_ser.serialize_field("fromHost", v)?; + } struct_ser.end() } } @@ -33216,6 +33342,8 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { "metadata", "media_encryption", "mediaEncryption", + "from_host", + "fromHost", ]; #[allow(clippy::enum_variant_names)] @@ -33229,6 +33357,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { Name, Metadata, MediaEncryption, + FromHost, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -33260,6 +33389,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { "name" => Ok(GeneratedField::Name), "metadata" => Ok(GeneratedField::Metadata), "mediaEncryption" | "media_encryption" => Ok(GeneratedField::MediaEncryption), + "fromHost" | "from_host" => Ok(GeneratedField::FromHost), _ => Ok(GeneratedField::__SkipField__), } } @@ -33288,6 +33418,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { let mut name__ = None; let mut metadata__ = None; let mut media_encryption__ = None; + let mut from_host__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Address => { @@ -33344,6 +33475,12 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { } media_encryption__ = map_.next_value::<::std::option::Option>()?.map(|x| x as i32); } + GeneratedField::FromHost => { + if from_host__.is_some() { + return Err(serde::de::Error::duplicate_field("fromHost")); + } + from_host__ = map_.next_value()?; + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -33359,6 +33496,7 @@ impl<'de> serde::Deserialize<'de> for SipOutboundTrunkUpdate { name: name__, metadata: metadata__, media_encryption: media_encryption__, + from_host: from_host__, }) } } @@ -36132,9 +36270,6 @@ impl serde::Serialize for ServerMessage { server_message::Message::Pong(v) => { struct_ser.serialize_field("pong", v)?; } - server_message::Message::TextRequest(v) => { - struct_ser.serialize_field("textRequest", v)?; - } } } struct_ser.end() @@ -36152,8 +36287,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment", "termination", "pong", - "text_request", - "textRequest", ]; #[allow(clippy::enum_variant_names)] @@ -36163,7 +36296,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { Assignment, Termination, Pong, - TextRequest, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -36191,7 +36323,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { "assignment" => Ok(GeneratedField::Assignment), "termination" => Ok(GeneratedField::Termination), "pong" => Ok(GeneratedField::Pong), - "textRequest" | "text_request" => Ok(GeneratedField::TextRequest), _ => Ok(GeneratedField::__SkipField__), } } @@ -36247,13 +36378,6 @@ impl<'de> serde::Deserialize<'de> for ServerMessage { return Err(serde::de::Error::duplicate_field("pong")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::Pong) -; - } - GeneratedField::TextRequest => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("textRequest")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(server_message::Message::TextRequest) ; } GeneratedField::__SkipField__ => { @@ -40560,321 +40684,6 @@ impl<'de> serde::Deserialize<'de> for SyncState { deserializer.deserialize_struct("livekit.SyncState", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for TextMessageRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if !self.session_id.is_empty() { - len += 1; - } - if !self.agent_name.is_empty() { - len += 1; - } - if !self.metadata.is_empty() { - len += 1; - } - if self.session_state.is_some() { - len += 1; - } - if !self.text.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.TextMessageRequest", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if !self.session_id.is_empty() { - struct_ser.serialize_field("sessionId", &self.session_id)?; - } - if !self.agent_name.is_empty() { - struct_ser.serialize_field("agentName", &self.agent_name)?; - } - if !self.metadata.is_empty() { - struct_ser.serialize_field("metadata", &self.metadata)?; - } - if let Some(v) = self.session_state.as_ref() { - struct_ser.serialize_field("sessionState", v)?; - } - if !self.text.is_empty() { - struct_ser.serialize_field("text", &self.text)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for TextMessageRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "session_id", - "sessionId", - "agent_name", - "agentName", - "metadata", - "session_state", - "sessionState", - "text", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - SessionId, - AgentName, - Metadata, - SessionState, - Text, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "sessionId" | "session_id" => Ok(GeneratedField::SessionId), - "agentName" | "agent_name" => Ok(GeneratedField::AgentName), - "metadata" => Ok(GeneratedField::Metadata), - "sessionState" | "session_state" => Ok(GeneratedField::SessionState), - "text" => Ok(GeneratedField::Text), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TextMessageRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TextMessageRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut session_id__ = None; - let mut agent_name__ = None; - let mut metadata__ = None; - let mut session_state__ = None; - let mut text__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::SessionId => { - if session_id__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionId")); - } - session_id__ = Some(map_.next_value()?); - } - GeneratedField::AgentName => { - if agent_name__.is_some() { - return Err(serde::de::Error::duplicate_field("agentName")); - } - agent_name__ = Some(map_.next_value()?); - } - GeneratedField::Metadata => { - if metadata__.is_some() { - return Err(serde::de::Error::duplicate_field("metadata")); - } - metadata__ = Some(map_.next_value()?); - } - GeneratedField::SessionState => { - if session_state__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionState")); - } - session_state__ = map_.next_value()?; - } - GeneratedField::Text => { - if text__.is_some() { - return Err(serde::de::Error::duplicate_field("text")); - } - text__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(TextMessageRequest { - message_id: message_id__.unwrap_or_default(), - session_id: session_id__.unwrap_or_default(), - agent_name: agent_name__.unwrap_or_default(), - metadata: metadata__.unwrap_or_default(), - session_state: session_state__, - text: text__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.TextMessageRequest", FIELDS, GeneratedVisitor) - } -} -impl serde::Serialize for TextMessageResponse { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.message_id.is_empty() { - len += 1; - } - if self.session_state.is_some() { - len += 1; - } - if !self.error.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.TextMessageResponse", len)?; - if !self.message_id.is_empty() { - struct_ser.serialize_field("messageId", &self.message_id)?; - } - if let Some(v) = self.session_state.as_ref() { - struct_ser.serialize_field("sessionState", v)?; - } - if !self.error.is_empty() { - struct_ser.serialize_field("error", &self.error)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for TextMessageResponse { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "message_id", - "messageId", - "session_state", - "sessionState", - "error", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - MessageId, - SessionState, - Error, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "messageId" | "message_id" => Ok(GeneratedField::MessageId), - "sessionState" | "session_state" => Ok(GeneratedField::SessionState), - "error" => Ok(GeneratedField::Error), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TextMessageResponse; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TextMessageResponse") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut message_id__ = None; - let mut session_state__ = None; - let mut error__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::MessageId => { - if message_id__.is_some() { - return Err(serde::de::Error::duplicate_field("messageId")); - } - message_id__ = Some(map_.next_value()?); - } - GeneratedField::SessionState => { - if session_state__.is_some() { - return Err(serde::de::Error::duplicate_field("sessionState")); - } - session_state__ = map_.next_value()?; - } - GeneratedField::Error => { - if error__.is_some() { - return Err(serde::de::Error::duplicate_field("error")); - } - error__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(TextMessageResponse { - message_id: message_id__.unwrap_or_default(), - session_state: session_state__, - error: error__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.TextMessageResponse", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for TimeSeriesMetric { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -41801,6 +41610,9 @@ impl serde::Serialize for TrackInfo { if self.backup_codec_policy != 0 { len += 1; } + if !self.packet_trailer_features.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.TrackInfo", len)?; if !self.sid.is_empty() { struct_ser.serialize_field("sid", &self.sid)?; @@ -41874,6 +41686,13 @@ impl serde::Serialize for TrackInfo { .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.backup_codec_policy)))?; struct_ser.serialize_field("backupCodecPolicy", &v)?; } + if !self.packet_trailer_features.is_empty() { + let v = self.packet_trailer_features.iter().cloned().map(|v| { + PacketTrailerFeature::try_from(v) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", v))) + }).collect::, _>>()?; + struct_ser.serialize_field("packetTrailerFeatures", &v)?; + } struct_ser.end() } } @@ -41909,6 +41728,8 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "audioFeatures", "backup_codec_policy", "backupCodecPolicy", + "packet_trailer_features", + "packetTrailerFeatures", ]; #[allow(clippy::enum_variant_names)] @@ -41933,6 +41754,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { Version, AudioFeatures, BackupCodecPolicy, + PacketTrailerFeatures, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -41975,6 +41797,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { "version" => Ok(GeneratedField::Version), "audioFeatures" | "audio_features" => Ok(GeneratedField::AudioFeatures), "backupCodecPolicy" | "backup_codec_policy" => Ok(GeneratedField::BackupCodecPolicy), + "packetTrailerFeatures" | "packet_trailer_features" => Ok(GeneratedField::PacketTrailerFeatures), _ => Ok(GeneratedField::__SkipField__), } } @@ -42014,6 +41837,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { let mut version__ = None; let mut audio_features__ = None; let mut backup_codec_policy__ = None; + let mut packet_trailer_features__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Sid => { @@ -42140,6 +41964,12 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { } backup_codec_policy__ = Some(map_.next_value::()? as i32); } + GeneratedField::PacketTrailerFeatures => { + if packet_trailer_features__.is_some() { + return Err(serde::de::Error::duplicate_field("packetTrailerFeatures")); + } + packet_trailer_features__ = Some(map_.next_value::>()?.into_iter().map(|x| x as i32).collect()); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -42166,6 +41996,7 @@ impl<'de> serde::Deserialize<'de> for TrackInfo { version: version__, audio_features: audio_features__.unwrap_or_default(), backup_codec_policy: backup_codec_policy__.unwrap_or_default(), + packet_trailer_features: packet_trailer_features__.unwrap_or_default(), }) } } @@ -48357,12 +48188,6 @@ impl serde::Serialize for WorkerMessage { worker_message::Message::MigrateJob(v) => { struct_ser.serialize_field("migrateJob", v)?; } - worker_message::Message::TextResponse(v) => { - struct_ser.serialize_field("textResponse", v)?; - } - worker_message::Message::PushText(v) => { - struct_ser.serialize_field("pushText", v)?; - } } } struct_ser.end() @@ -48386,10 +48211,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "simulateJob", "migrate_job", "migrateJob", - "text_response", - "textResponse", - "push_text", - "pushText", ]; #[allow(clippy::enum_variant_names)] @@ -48401,8 +48222,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { Ping, SimulateJob, MigrateJob, - TextResponse, - PushText, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -48432,8 +48251,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { "ping" => Ok(GeneratedField::Ping), "simulateJob" | "simulate_job" => Ok(GeneratedField::SimulateJob), "migrateJob" | "migrate_job" => Ok(GeneratedField::MigrateJob), - "textResponse" | "text_response" => Ok(GeneratedField::TextResponse), - "pushText" | "push_text" => Ok(GeneratedField::PushText), _ => Ok(GeneratedField::__SkipField__), } } @@ -48503,20 +48320,6 @@ impl<'de> serde::Deserialize<'de> for WorkerMessage { return Err(serde::de::Error::duplicate_field("migrateJob")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::MigrateJob) -; - } - GeneratedField::TextResponse => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("textResponse")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::TextResponse) -; - } - GeneratedField::PushText => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("pushText")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(worker_message::Message::PushText) ; } GeneratedField::__SkipField__ => { diff --git a/livekit/src/proto.rs b/livekit/src/proto.rs index 3c77b5754..68bf283df 100644 --- a/livekit/src/proto.rs +++ b/livekit/src/proto.rs @@ -49,6 +49,7 @@ impl From for participant::DisconnectReason { DisconnectReason::SipTrunkFailure => Self::SipTrunkFailure, DisconnectReason::ConnectionTimeout => Self::ConnectionTimeout, DisconnectReason::MediaFailure => Self::MediaFailure, + DisconnectReason::AgentError => Self::AgentError, } } } diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 1833ac326..f49a18116 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -108,7 +108,11 @@ impl E2eeManager { let has_packet_trailer = publication .proto_info() .packet_trailer_features - .contains(&(PacketTrailerFeature::PtfUserTimestamp as i32)); + .iter() + .any(|f| { + *f == PacketTrailerFeature::PtfUserTimestamp as i32 + || *f == PacketTrailerFeature::PtfFrameId as i32 + }); if let RemoteTrack::Video(video_track) = &track { let handler = diff --git a/livekit/src/room/options.rs b/livekit/src/room/options.rs index 618bed7cf..b043b7f52 100644 --- a/livekit/src/room/options.rs +++ b/livekit/src/room/options.rs @@ -14,6 +14,7 @@ use libwebrtc::prelude::*; use livekit_protocol as proto; +use proto::PacketTrailerFeature; use crate::prelude::*; @@ -91,7 +92,7 @@ pub struct TrackPublishOptions { pub source: TrackSource, pub stream: String, pub preconnect_buffer: bool, - pub packet_trailer: bool, + pub packet_trailer_features: Vec, } impl Default for TrackPublishOptions { @@ -107,7 +108,7 @@ impl Default for TrackPublishOptions { source: TrackSource::Unknown, stream: "".to_string(), preconnect_buffer: false, - packet_trailer: false, + packet_trailer_features: Vec::new(), } } } diff --git a/livekit/src/room/participant/local_participant.rs b/livekit/src/room/participant/local_participant.rs index e036e2844..0098a8858 100644 --- a/livekit/src/room/participant/local_participant.rs +++ b/livekit/src/room/participant/local_participant.rs @@ -278,9 +278,8 @@ impl LocalParticipant { req.audio_features.push(proto::AudioTrackFeature::TfPreconnectBuffer as i32); } - if options.packet_trailer { - req.packet_trailer_features.push(proto::PacketTrailerFeature::PtfUserTimestamp as i32); - } + req.packet_trailer_features = + options.packet_trailer_features.iter().map(|f| *f as i32).collect(); let mut encodings = Vec::default(); match &track { @@ -327,7 +326,7 @@ impl LocalParticipant { track.set_transceiver(Some(transceiver)); - if options.packet_trailer { + if !options.packet_trailer_features.is_empty() { if let LocalTrack::Video(video_track) = &track { log::info!("packet_trailer enabled for local video track {}", publication.sid(),); let sender = track.transceiver().unwrap().sender(); diff --git a/livekit/src/room/participant/mod.rs b/livekit/src/room/participant/mod.rs index 9c660dc3a..56acd02e3 100644 --- a/livekit/src/room/participant/mod.rs +++ b/livekit/src/room/participant/mod.rs @@ -75,6 +75,7 @@ pub enum DisconnectReason { SipTrunkFailure, ConnectionTimeout, MediaFailure, + AgentError, } #[derive(Debug, Clone)] From aa99c371c725bf28e87db695ed4ced68137f327c Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 23 Mar 2026 14:15:53 -0700 Subject: [PATCH 39/52] revert local changes --- livekit-protocol/generate_proto.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/livekit-protocol/generate_proto.sh b/livekit-protocol/generate_proto.sh index 5fa01f46b..a244ed1bc 100755 --- a/livekit-protocol/generate_proto.sh +++ b/livekit-protocol/generate_proto.sh @@ -19,11 +19,8 @@ PROTOCOL=protocol/protobufs OUT_RUST=src -PROTOBUF_INCLUDE=$(brew --prefix protobuf)/include - protoc \ -I=$PROTOCOL \ - -I=$PROTOBUF_INCLUDE \ --prost_out=$OUT_RUST \ --prost_opt=compile_well_known_types \ --prost_opt=extern_path=.google.protobuf=::pbjson_types \ From c3f44e8c821d0e93a0eaa93e6b5fff394eaafd8e Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 23 Mar 2026 15:41:49 -0700 Subject: [PATCH 40/52] add frame ID to ffi --- livekit-ffi/protocol/track.proto | 1 + livekit-ffi/src/conversion/track.rs | 3 +++ 2 files changed, 4 insertions(+) diff --git a/livekit-ffi/protocol/track.proto b/livekit-ffi/protocol/track.proto index 8d087cd1a..17ced5ec3 100644 --- a/livekit-ffi/protocol/track.proto +++ b/livekit-ffi/protocol/track.proto @@ -163,4 +163,5 @@ enum AudioTrackFeature { enum PacketTrailerFeature { PTF_USER_TIMESTAMP = 0; + PTF_FRAME_ID = 1; } diff --git a/livekit-ffi/src/conversion/track.rs b/livekit-ffi/src/conversion/track.rs index 9d418a75b..653af6c36 100644 --- a/livekit-ffi/src/conversion/track.rs +++ b/livekit-ffi/src/conversion/track.rs @@ -170,6 +170,9 @@ impl From for proto::PacketTrailerFeatur livekit_protocol::PacketTrailerFeature::PtfUserTimestamp => { proto::PacketTrailerFeature::PtfUserTimestamp } + livekit_protocol::PacketTrailerFeature::PtfFrameId => { + proto::PacketTrailerFeature::PtfFrameId + } } } } From d995159e3b1714b4e2dad9e66d80b1a6186d2fe3 Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 23 Mar 2026 15:54:49 -0700 Subject: [PATCH 41/52] cargo fmt --- examples/local_video/src/publisher.rs | 6 +----- examples/local_video/src/subscriber.rs | 23 +++++------------------ libwebrtc/src/native/mod.rs | 2 +- libwebrtc/src/native/packet_trailer.rs | 5 ++++- libwebrtc/src/native/video_stream.rs | 4 +++- livekit/src/room/e2ee/manager.rs | 18 ++++++++---------- webrtc-sys/src/lib.rs | 2 +- 7 files changed, 23 insertions(+), 37 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 477d6ca8e..5a6239f77 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -178,11 +178,7 @@ fn format_timing_line(timings: &PublisherTimingSummary) -> String { timings.capture_to_webrtc_total_ms.average().unwrap_or_default() )); - format!( - "Timing ms: {}\nTiming ms: {}", - line_one.join(" | "), - line_two.join(" | ") - ) + format!("Timing ms: {}\nTiming ms: {}", line_one.join(" | "), line_two.join(" | ")) } fn list_cameras() -> Result<()> { diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index fcd8ca9f7..123e5e817 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -263,9 +263,8 @@ async fn handle_track_subscribed( { let mut s = shared.lock(); s.codec = codec; - s.has_user_timestamp = publication - .packet_trailer_features() - .contains(&PacketTrailerFeature::PtfUserTimestamp); + s.has_user_timestamp = + publication.packet_trailer_features().contains(&PacketTrailerFeature::PtfUserTimestamp); } info!( @@ -1205,11 +1204,7 @@ impl CallbackTrait for YuvPaintCallback { bytes_per_row: Some(upload_row_bytes.0), rows_per_image: Some(dims.1), }, - wgpu::Extent3d { - width: dims.0, - height: dims.1, - depth_or_array_layers: 1, - }, + wgpu::Extent3d { width: dims.0, height: dims.1, depth_or_array_layers: 1 }, ); } @@ -1227,11 +1222,7 @@ impl CallbackTrait for YuvPaintCallback { bytes_per_row: Some(upload_row_bytes.1), rows_per_image: Some(uv_h), }, - wgpu::Extent3d { - width: uv_w, - height: uv_h, - depth_or_array_layers: 1, - }, + wgpu::Extent3d { width: uv_w, height: uv_h, depth_or_array_layers: 1 }, ); queue.write_texture( wgpu::TexelCopyTextureInfo { @@ -1246,11 +1237,7 @@ impl CallbackTrait for YuvPaintCallback { bytes_per_row: Some(upload_row_bytes.1), rows_per_image: Some(uv_h), }, - wgpu::Extent3d { - width: uv_w, - height: uv_h, - depth_or_array_layers: 1, - }, + wgpu::Extent3d { width: uv_w, height: uv_h, depth_or_array_layers: 1 }, ); } diff --git a/libwebrtc/src/native/mod.rs b/libwebrtc/src/native/mod.rs index c8106221e..de56e3345 100644 --- a/libwebrtc/src/native/mod.rs +++ b/libwebrtc/src/native/mod.rs @@ -27,6 +27,7 @@ pub mod frame_cryptor; pub mod ice_candidate; pub mod media_stream; pub mod media_stream_track; +pub mod packet_trailer; pub mod peer_connection; pub mod peer_connection_factory; pub mod rtp_parameters; @@ -34,7 +35,6 @@ pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; pub mod session_description; -pub mod packet_trailer; pub mod video_frame; pub mod video_source; pub mod video_stream; diff --git a/libwebrtc/src/native/packet_trailer.rs b/libwebrtc/src/native/packet_trailer.rs index ef993a7a7..38df7678e 100644 --- a/libwebrtc/src/native/packet_trailer.rs +++ b/libwebrtc/src/native/packet_trailer.rs @@ -69,7 +69,10 @@ impl PacketTrailerHandler { if ts > 2_000_000_000_000_000 || ts < 0 { log::warn!( "[PacketTrailer-FFI] C++ returned bad ts={} (0x{:016x}) fid={} rtp_ts={}", - ts, ts, frame_id, rtp_timestamp + ts, + ts, + frame_id, + rtp_timestamp ); } Some((ts, frame_id)) diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index a32edd30b..97e6cb4d6 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -117,7 +117,9 @@ impl sys_vt::VideoSink for VideoTrackObserver { if ts < 0 || ts > 2_000_000_000_000_000 { log::warn!( "[on_frame] SUSPICIOUS user_ts={} fid={} rtp_ts={}", - ts, fid, rtp_timestamp + ts, + fid, + rtp_timestamp ); } (Some(ts), Some(fid)) diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index f49a18116..5f82caccf 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -105,18 +105,16 @@ impl E2eeManager { let receiver = track.transceiver().unwrap().receiver(); let mut packet_trailer_handler = None; - let has_packet_trailer = publication - .proto_info() - .packet_trailer_features - .iter() - .any(|f| { - *f == PacketTrailerFeature::PtfUserTimestamp as i32 - || *f == PacketTrailerFeature::PtfFrameId as i32 - }); + let has_packet_trailer = publication.proto_info().packet_trailer_features.iter().any(|f| { + *f == PacketTrailerFeature::PtfUserTimestamp as i32 + || *f == PacketTrailerFeature::PtfFrameId as i32 + }); if let RemoteTrack::Video(video_track) = &track { - let handler = - packet_trailer::create_receiver_handler(LkRuntime::instance().pc_factory(), &receiver); + let handler = packet_trailer::create_receiver_handler( + LkRuntime::instance().pc_factory(), + &receiver, + ); video_track.set_packet_trailer_handler(handler.clone()); packet_trailer_handler = Some(handler); diff --git a/webrtc-sys/src/lib.rs b/webrtc-sys/src/lib.rs index 061112fe8..94f4eed0c 100644 --- a/webrtc-sys/src/lib.rs +++ b/webrtc-sys/src/lib.rs @@ -27,6 +27,7 @@ pub mod helper; pub mod jsep; pub mod media_stream; pub mod media_stream_track; +pub mod packet_trailer; pub mod peer_connection; pub mod peer_connection_factory; pub mod prohibit_libsrtp_initialization; @@ -35,7 +36,6 @@ pub mod rtp_parameters; pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; -pub mod packet_trailer; pub mod video_frame; pub mod video_frame_buffer; pub mod video_track; From 79cabbfd263245c4f6d165adeee1c4a27ee44667 Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 23 Mar 2026 16:39:15 -0700 Subject: [PATCH 42/52] fix typo in changeset --- .changeset/add_support_for_frame_level_packet_trailer.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.changeset/add_support_for_frame_level_packet_trailer.md b/.changeset/add_support_for_frame_level_packet_trailer.md index 7a312dde5..1c82c5b83 100644 --- a/.changeset/add_support_for_frame_level_packet_trailer.md +++ b/.changeset/add_support_for_frame_level_packet_trailer.md @@ -7,7 +7,7 @@ soxr-sys: no changelog additions webrtc-sys-build: no changelog additions webrtc-sys: minor livekit-ffi: minor -yuv-sys: CHANGE_TYno changelog additionsPE +yuv-sys: no changelog additions libwebrtc: minor imgproc: no changelog additions --- From 278c925268673df3c66d2280f09c939e62f23f23 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 24 Mar 2026 13:35:37 -0700 Subject: [PATCH 43/52] update readme --- examples/local_video/README.md | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/examples/local_video/README.md b/examples/local_video/README.md index 19984992e..7cc6f3a20 100644 --- a/examples/local_video/README.md +++ b/examples/local_video/README.md @@ -38,6 +38,24 @@ Publisher usage: --identity cam-1 \ --attach-timestamp + # publish with timestamp burned into the video and a frame ID in the packet trailer + cargo run -p local_video -F desktop --bin publisher -- \ + --camera-index 0 \ + --room-name demo \ + --identity cam-1 \ + --attach-timestamp \ + --burn-timestamp \ + --attach-frame-id + + # publish at a custom resolution and framerate + cargo run -p local_video -F desktop --bin publisher -- \ + --camera-index 0 \ + --width 1920 \ + --height 1080 \ + --fps 60 \ + --room-name demo \ + --identity cam-1 + # publish with end-to-end encryption cargo run -p local_video -F desktop --bin publisher -- \ --camera-index 0 \ @@ -52,10 +70,16 @@ List devices usage: ``` Publisher flags (in addition to the common connection flags above): +- `--camera-index `: Camera index to use (default: `0`). Use `--list-cameras` to see available indices. +- `--width `: Desired capture width (default: `1280`). +- `--height `: Desired capture height (default: `720`). +- `--fps `: Desired capture framerate (default: `30`). - `--h265`: Use H.265/HEVC encoding if supported (falls back to H.264 on failure). - `--simulcast`: Publish simulcast video (multiple layers when the resolution is large enough). - `--max-bitrate `: Max video bitrate for the main (highest) layer in bits per second (e.g. `1500000`). - `--attach-timestamp`: Attach the current wall-clock time (microseconds since UNIX epoch) as the user timestamp on each published frame. The subscriber can display this to measure end-to-end latency. +- `--burn-timestamp`: Burn the attached timestamp into the video frame as a visible overlay. Has no effect unless `--attach-timestamp` is also set. +- `--attach-frame-id`: Attach a monotonically increasing frame ID to each published frame via the packet trailer. The subscriber displays this in the timestamp overlay when `--display-timestamp` is used. - `--e2ee-key `: Enable end-to-end encryption with the given shared key. The subscriber must use the same key to decrypt. Subscriber usage: @@ -92,7 +116,7 @@ Subscriber usage: Subscriber flags (in addition to the common connection flags above): - `--participant `: Only subscribe to video tracks from the specified participant. -- `--display-timestamp`: Show a top-left overlay with the publisher's timestamp, the subscriber's current time, and the computed end-to-end latency. Requires the publisher to use `--attach-timestamp`. +- `--display-timestamp`: Show a top-left overlay with frame ID, the publisher's timestamp, the subscriber's current time, and the computed end-to-end latency. Timestamp fields require the publisher to use `--attach-timestamp`; frame ID requires `--attach-frame-id`. - `--e2ee-key `: Enable end-to-end decryption with the given shared key. Must match the key used by the publisher. Notes: From f27d3d52cdc5b71c7b501ecadd432411e62c2866 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 24 Mar 2026 14:11:40 -0700 Subject: [PATCH 44/52] remove unused crates --- examples/local_video/Cargo.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 866977f54..3206a5a2b 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -45,8 +45,6 @@ chrono = "0.4" bytemuck = { version = "1.16", features = ["derive"] } nokhwa = { version = "0.10", default-features = false, features = ["output-threaded"] } -ratatui = "0.30.0" -crossterm = "0.29.0" [target.'cfg(target_os = "macos")'.dependencies] nokhwa = { version = "0.10", default-features = false, features = ["input-avfoundation"] } From 17c076b7329a46f4754b464c3d49787ac48c308e Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 24 Mar 2026 15:49:31 -0700 Subject: [PATCH 45/52] add new() to VideoFrame with defaults for frame_id & user_timestamp_us, fix e2e test --- Cargo.lock | 666 +----------------------------- libwebrtc/src/video_frame.rs | 6 + livekit/tests/common/e2e/video.rs | 3 +- 3 files changed, 14 insertions(+), 661 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7fb9449d6..822b4bfbb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -555,15 +555,6 @@ dependencies = [ "tungstenite 0.21.0", ] -[[package]] -name = "atomic" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" -dependencies = [ - "bytemuck", -] - [[package]] name = "atomic-waker" version = "1.1.2" @@ -1063,15 +1054,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - [[package]] name = "cc" version = "1.2.57" @@ -1291,20 +1273,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "compact_str" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" -dependencies = [ - "castaway 0.2.4", - "cfg-if 1.0.4", - "itoa", - "rustversion", - "ryu", - "static_assertions", -] - [[package]] name = "concurrent-queue" version = "2.5.0" @@ -1363,15 +1331,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "convert_case" version = "0.11.0" @@ -1599,33 +1558,6 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" -[[package]] -name = "crossterm" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" -dependencies = [ - "bitflags 2.11.0", - "crossterm_winapi", - "derive_more", - "document-features", - "mio", - "parking_lot", - "rustix 1.1.4", - "signal-hook", - "signal-hook-mio", - "winapi", -] - -[[package]] -name = "crossterm_winapi" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" -dependencies = [ - "winapi", -] - [[package]] name = "crunchy" version = "0.2.4" @@ -1654,16 +1586,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "csscolorparser" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb2a7d3066da2de787b7f032c736763eb7ae5d355f81a68bab2675a96008b0bf" -dependencies = [ - "lab", - "phf", -] - [[package]] name = "ctor" version = "0.6.3" @@ -1825,16 +1747,6 @@ dependencies = [ "darling_macro 0.20.11", ] -[[package]] -name = "darling" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" -dependencies = [ - "darling_core 0.23.0", - "darling_macro 0.23.0", -] - [[package]] name = "darling_core" version = "0.14.4" @@ -1862,19 +1774,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "darling_core" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" -dependencies = [ - "ident_case", - "proc-macro2", - "quote", - "strsim 0.11.1", - "syn 2.0.117", -] - [[package]] name = "darling_macro" version = "0.14.4" @@ -1897,17 +1796,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "darling_macro" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" -dependencies = [ - "darling_core 0.23.0", - "quote", - "syn 2.0.117", -] - [[package]] name = "dashmap" version = "5.5.3" @@ -1933,12 +1821,6 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" -[[package]] -name = "deltae" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5729f5117e208430e437df2f4843f5e5952997175992d1414f94c57d61e270b4" - [[package]] name = "der" version = "0.7.10" @@ -1970,28 +1852,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case 0.10.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.117", -] - [[package]] name = "deunicode" version = "1.6.2" @@ -2422,15 +2282,6 @@ version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" -[[package]] -name = "euclid" -version = "0.22.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a05365e3b1c6d1650318537c7460c6923f1abdd272ad6842baa2b509957a06" -dependencies = [ - "num-traits", -] - [[package]] name = "event-listener" version = "2.5.3" @@ -2485,16 +2336,6 @@ dependencies = [ "rand 0.9.2", ] -[[package]] -name = "fancy-regex" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" -dependencies = [ - "bit-set 0.5.3", - "regex", -] - [[package]] name = "fastrand" version = "1.9.0" @@ -2555,17 +2396,6 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "filedescriptor" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" -dependencies = [ - "libc", - "thiserror 1.0.69", - "winapi", -] - [[package]] name = "filetime" version = "0.2.27" @@ -2583,12 +2413,6 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" -[[package]] -name = "finl_unicode" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9844ddc3a6e533d62bba727eb6c28b5d360921d5175e9ff0f1e621a5c590a4d5" - [[package]] name = "fixedbitset" version = "0.4.2" @@ -3227,12 +3051,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - [[package]] name = "hexf-parse" version = "0.2.1" @@ -3666,15 +3484,6 @@ dependencies = [ "serde_core", ] -[[package]] -name = "indoc" -version = "2.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" -dependencies = [ - "rustversion", -] - [[package]] name = "inout" version = "0.1.4" @@ -3684,19 +3493,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "instability" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb2d60ef19920a3a9193c3e371f726ec1dafc045dac788d0fb3704272458971" -dependencies = [ - "darling 0.23.0", - "indoc", - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "instant" version = "0.1.13" @@ -3746,7 +3542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" dependencies = [ "async-channel 1.9.0", - "castaway 0.1.2", + "castaway", "crossbeam-utils", "curl", "curl-sys", @@ -3955,17 +3751,6 @@ dependencies = [ "signature", ] -[[package]] -name = "kasuari" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bde5057d6143cc94e861d90f591b9303d6716c6b9602309150bd068853c10899" -dependencies = [ - "hashbrown 0.16.1", - "portable-atomic", - "thiserror 2.0.18", -] - [[package]] name = "khronos-egl" version = "6.0.0" @@ -4002,12 +3787,6 @@ dependencies = [ "log", ] -[[package]] -name = "lab" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f" - [[package]] name = "lazy_static" version = "1.5.0" @@ -4126,15 +3905,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "line-clipping" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4de44e98ddbf09375cbf4d17714d18f39195f4f4894e8524501726fd9a8a4a" -dependencies = [ - "bitflags 2.11.0", -] - [[package]] name = "link-cplusplus" version = "1.0.12" @@ -4404,7 +4174,6 @@ dependencies = [ "bytemuck", "chrono", "clap", - "crossterm", "eframe", "egui", "egui-wgpu", @@ -4418,7 +4187,6 @@ dependencies = [ "nokhwa", "objc2 0.6.4", "parking_lot", - "ratatui", "tokio", "webrtc-sys", "wgpu 28.0.0", @@ -4453,31 +4221,12 @@ dependencies = [ "imgref", ] -[[package]] -name = "lru" -version = "0.16.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" -dependencies = [ - "hashbrown 0.16.1", -] - [[package]] name = "lru-slab" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" -[[package]] -name = "mac_address" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0aeb26bf5e836cc1c341c8106051b573f1766dfa05aa87f0b98be5e51b02303" -dependencies = [ - "nix 0.29.0", - "winapi", -] - [[package]] name = "mach2" version = "0.4.3" @@ -4552,21 +4301,6 @@ dependencies = [ "libc", ] -[[package]] -name = "memmem" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64a92489e2744ce060c349162be1c5f33c6969234104dbd99ddb5feb08b8c15" - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - [[package]] name = "metal" version = "0.18.0" @@ -4641,7 +4375,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", - "log", "wasi", "windows-sys 0.61.2", ] @@ -4766,7 +4499,7 @@ version = "3.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c914b5e420182bfb73504e0607592cdb8e2e21437d450883077669fb72a114d" dependencies = [ - "convert_case 0.11.0", + "convert_case", "ctor", "napi-derive-backend", "proc-macro2", @@ -4780,7 +4513,7 @@ version = "5.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0864cf6a82e2cfb69067374b64c9253d7e910e5b34db833ed7495dda56ccb18" dependencies = [ - "convert_case 0.11.0", + "convert_case", "proc-macro2", "quote", "semver", @@ -4902,19 +4635,6 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" -[[package]] -name = "nix" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" -dependencies = [ - "bitflags 2.11.0", - "cfg-if 1.0.4", - "cfg_aliases", - "libc", - "memoffset", -] - [[package]] name = "nix" version = "0.30.1" @@ -5157,15 +4877,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - [[package]] name = "objc" version = "0.2.7" @@ -5667,15 +5378,6 @@ dependencies = [ "libredox", ] -[[package]] -name = "ordered-float" -version = "4.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" -dependencies = [ - "num-traits", -] - [[package]] name = "ordered-float" version = "5.1.0" @@ -5722,7 +5424,7 @@ checksum = "e4022a17595a00d6a369236fdae483f0de7f0a339960a53118b818238e132224" dependencies = [ "android_system_properties", "log", - "nix 0.30.1", + "nix", "objc2 0.6.4", "objc2-foundation 0.3.2", "objc2-ui-kit 0.3.2", @@ -5951,58 +5653,6 @@ dependencies = [ "indexmap 2.13.0", ] -[[package]] -name = "phf" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" -dependencies = [ - "phf_macros", - "phf_shared", -] - -[[package]] -name = "phf_codegen" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" -dependencies = [ - "phf_generator", - "phf_shared", -] - -[[package]] -name = "phf_generator" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" -dependencies = [ - "phf_shared", - "rand 0.8.5", -] - -[[package]] -name = "phf_macros" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", - "syn 2.0.117", -] - -[[package]] -name = "phf_shared" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" -dependencies = [ - "siphasher 1.0.2", -] - [[package]] name = "pin-project" version = "1.1.11" @@ -6566,91 +6216,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca45419789ae5a7899559e9512e58ca889e41f04f1f2445e9f4b290ceccd1d08" -[[package]] -name = "ratatui" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1ce67fb8ba4446454d1c8dbaeda0557ff5e94d39d5e5ed7f10a65eb4c8266bc" -dependencies = [ - "instability", - "ratatui-core", - "ratatui-crossterm", - "ratatui-macros", - "ratatui-termwiz", - "ratatui-widgets", -] - -[[package]] -name = "ratatui-core" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef8dea09a92caaf73bff7adb70b76162e5937524058a7e5bff37869cbbec293" -dependencies = [ - "bitflags 2.11.0", - "compact_str", - "hashbrown 0.16.1", - "indoc", - "itertools 0.14.0", - "kasuari", - "lru", - "strum", - "thiserror 2.0.18", - "unicode-segmentation", - "unicode-truncate", - "unicode-width", -] - -[[package]] -name = "ratatui-crossterm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "577c9b9f652b4c121fb25c6a391dd06406d3b092ba68827e6d2f09550edc54b3" -dependencies = [ - "cfg-if 1.0.4", - "crossterm", - "instability", - "ratatui-core", -] - -[[package]] -name = "ratatui-macros" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7f1342a13e83e4bb9d0b793d0ea762be633f9582048c892ae9041ef39c936f4" -dependencies = [ - "ratatui-core", - "ratatui-widgets", -] - -[[package]] -name = "ratatui-termwiz" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f76fe0bd0ed4295f0321b1676732e2454024c15a35d01904ddb315afd3d545c" -dependencies = [ - "ratatui-core", - "termwiz", -] - -[[package]] -name = "ratatui-widgets" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7dbfa023cd4e604c2553483820c5fe8aa9d71a42eea5aa77c6e7f35756612db" -dependencies = [ - "bitflags 2.11.0", - "hashbrown 0.16.1", - "indoc", - "instability", - "itertools 0.14.0", - "line-clipping", - "ratatui-core", - "strum", - "time", - "unicode-segmentation", - "unicode-width", -] - [[package]] name = "rav1e" version = "0.8.1" @@ -7404,27 +6969,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "signal-hook" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" -dependencies = [ - "libc", - "signal-hook-registry", -] - -[[package]] -name = "signal-hook-mio" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc" -dependencies = [ - "libc", - "mio", - "signal-hook", -] - [[package]] name = "signal-hook-registry" version = "1.4.8" @@ -7482,12 +7026,6 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" -[[package]] -name = "siphasher" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" - [[package]] name = "slab" version = "0.4.12" @@ -7701,27 +7239,6 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "subtle" version = "2.6.1" @@ -7828,69 +7345,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "terminfo" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4ea810f0692f9f51b382fff5893887bb4580f5fa246fde546e0b13e7fcee662" -dependencies = [ - "fnv", - "nom 7.1.3", - "phf", - "phf_codegen", -] - -[[package]] -name = "termios" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "411c5bf740737c7918b8b1fe232dca4dc9f8e754b8ad5e20966814001ed0ac6b" -dependencies = [ - "libc", -] - -[[package]] -name = "termwiz" -version = "0.23.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4676b37242ccbd1aabf56edb093a4827dc49086c0ffd764a5705899e0f35f8f7" -dependencies = [ - "anyhow", - "base64 0.22.1", - "bitflags 2.11.0", - "fancy-regex", - "filedescriptor", - "finl_unicode", - "fixedbitset 0.4.2", - "hex", - "lazy_static", - "libc", - "log", - "memmem", - "nix 0.29.0", - "num-derive", - "num-traits", - "ordered-float 4.6.0", - "pest", - "pest_derive", - "phf", - "sha2", - "signal-hook", - "siphasher 1.0.2", - "terminfo", - "termios", - "thiserror 1.0.69", - "ucd-trie", - "unicode-segmentation", - "vtparse", - "wezterm-bidi", - "wezterm-blob-leases", - "wezterm-color-types", - "wezterm-dynamic", - "wezterm-input-types", - "winapi", -] - [[package]] name = "test-case" version = "3.3.1" @@ -8026,9 +7480,7 @@ checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" dependencies = [ "deranged", "itoa", - "libc", "num-conv", - "num_threads", "powerfmt", "serde_core", "time-core", @@ -8686,17 +8138,6 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" -[[package]] -name = "unicode-truncate" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b380a1238663e5f8a691f9039c73e1cdae598a30e9855f541d29b08b53e9a5" -dependencies = [ - "itertools 0.14.0", - "unicode-segmentation", - "unicode-width", -] - [[package]] name = "unicode-width" version = "0.2.2" @@ -8812,7 +8253,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a138823392dba19b0aa494872689f97d0ee157de5852e2bec157ce6de9cdc22" dependencies = [ "anyhow", - "siphasher 0.3.11", + "siphasher", "uniffi_internal_macros", "uniffi_pipeline", ] @@ -8878,18 +8319,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "uuid" -version = "1.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" -dependencies = [ - "atomic", - "getrandom 0.4.2", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "v4l" version = "0.14.0" @@ -8951,15 +8380,6 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "vtparse" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d9b2acfb050df409c972a37d3b8e08cdea3bddb0c09db9d53137e504cfabed0" -dependencies = [ - "utf8parse", -] - [[package]] name = "waker-fn" version = "1.2.0" @@ -9339,78 +8759,6 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28ac98ddc8b9274cb41bb4d9d4d5c425b6020c50c46f25559911905610b4a88" -[[package]] -name = "wezterm-bidi" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0a6e355560527dd2d1cf7890652f4f09bb3433b6aadade4c9b5ed76de5f3ec" -dependencies = [ - "log", - "wezterm-dynamic", -] - -[[package]] -name = "wezterm-blob-leases" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692daff6d93d94e29e4114544ef6d5c942a7ed998b37abdc19b17136ea428eb7" -dependencies = [ - "getrandom 0.3.4", - "mac_address", - "sha2", - "thiserror 1.0.69", - "uuid", -] - -[[package]] -name = "wezterm-color-types" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7de81ef35c9010270d63772bebef2f2d6d1f2d20a983d27505ac850b8c4b4296" -dependencies = [ - "csscolorparser", - "deltae", - "lazy_static", - "wezterm-dynamic", -] - -[[package]] -name = "wezterm-dynamic" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f2ab60e120fd6eaa68d9567f3226e876684639d22a4219b313ff69ec0ccd5ac" -dependencies = [ - "log", - "ordered-float 4.6.0", - "strsim 0.11.1", - "thiserror 1.0.69", - "wezterm-dynamic-derive", -] - -[[package]] -name = "wezterm-dynamic-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c0cf2d539c645b448eaffec9ec494b8b19bd5077d9e58cb1ae7efece8d575b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "wezterm-input-types" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7012add459f951456ec9d6c7e6fc340b1ce15d6fc9629f8c42853412c029e57e" -dependencies = [ - "bitflags 1.3.2", - "euclid", - "lazy_static", - "serde", - "wezterm-dynamic", -] - [[package]] name = "wgpu" version = "27.0.1" @@ -9620,7 +8968,7 @@ dependencies = [ "ndk-sys 0.6.0+11769913", "objc", "once_cell", - "ordered-float 5.1.0", + "ordered-float", "parking_lot", "portable-atomic", "portable-atomic-util", @@ -9668,7 +9016,7 @@ dependencies = [ "ndk-sys 0.6.0+11769913", "objc", "once_cell", - "ordered-float 5.1.0", + "ordered-float", "parking_lot", "portable-atomic", "portable-atomic-util", diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index eb42464e9..9015629a0 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -66,6 +66,12 @@ where pub buffer: T, } +impl> VideoFrame { + pub fn new(rotation: VideoRotation, buffer: T) -> Self { + Self { rotation, timestamp_us: 0, user_timestamp_us: None, frame_id: None, buffer } + } +} + pub type BoxVideoBuffer = Box; pub type BoxVideoFrame = VideoFrame; diff --git a/livekit/tests/common/e2e/video.rs b/livekit/tests/common/e2e/video.rs index 3887e3698..fac27cac1 100644 --- a/livekit/tests/common/e2e/video.rs +++ b/livekit/tests/common/e2e/video.rs @@ -107,8 +107,7 @@ impl SolidColorTrack { data_u.fill(128); data_v.fill(128); - let frame = - VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer }; + let frame = VideoFrame::new(VideoRotation::VideoRotation0, buffer); rtc_source.capture_frame(&frame); time::sleep(interval).await; } From 28502fabacd6debd0e866734a017c542e0e40796 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 24 Mar 2026 16:33:12 -0700 Subject: [PATCH 46/52] update changeset & remove debug logs --- ..._support_for_frame_level_packet_trailer.md | 2 +- examples/local_video/src/subscriber.rs | 18 ----------- libwebrtc/src/native/packet_trailer.rs | 32 +++++-------------- libwebrtc/src/native/video_stream.rs | 12 +------ webrtc-sys/src/packet_trailer.rs | 2 +- 5 files changed, 11 insertions(+), 55 deletions(-) diff --git a/.changeset/add_support_for_frame_level_packet_trailer.md b/.changeset/add_support_for_frame_level_packet_trailer.md index 1c82c5b83..3c7c6e90c 100644 --- a/.changeset/add_support_for_frame_level_packet_trailer.md +++ b/.changeset/add_support_for_frame_level_packet_trailer.md @@ -16,4 +16,4 @@ imgproc: no changelog additions #890 by @chenosaurus -- Add support to attach/parse frame level timestamps to VideoTracks as a custom payload trailer. \ No newline at end of file +- Add support to attach/parse frame level timestamps & frame ID to VideoTracks as a custom payload trailer. \ No newline at end of file diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 123e5e817..55b8719e9 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -355,24 +355,6 @@ async fn handle_track_subscribed( s.dirty = true; s.received_at_us = Some(received_at_us); - if let Some(ts) = frame.user_timestamp_us { - let delta_ms = (received_at_us - ts) as f64 / 1000.0; - if ts < 0 || ts > 2_000_000_000_000_000 || delta_ms < -60_000.0 { - log::warn!( - "[Subscriber] BAD TIMESTAMP: frame_id={:?} user_ts={} \ - timestamp_us={} now_us={} delta_ms={:.1} \ - prev_user_ts={:?} prev_frame_id={:?}", - frame.frame_id, - ts, - frame.timestamp_us, - received_at_us, - delta_ms, - s.user_timestamp_us, - s.frame_id, - ); - } - } - s.user_timestamp_us = frame.user_timestamp_us; s.frame_id = frame.frame_id; diff --git a/libwebrtc/src/native/packet_trailer.rs b/libwebrtc/src/native/packet_trailer.rs index 38df7678e..6bd3da5b2 100644 --- a/libwebrtc/src/native/packet_trailer.rs +++ b/libwebrtc/src/native/packet_trailer.rs @@ -1,4 +1,4 @@ -// Copyright 2025 LiveKit, Inc. +// Copyright 2026 LiveKit, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,15 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Packet trailer support for end-to-end timestamp propagation. +//! Packet trailer support for end-to-end frame metadata propagation. //! -//! This module provides functionality to embed user-supplied timestamps -//! in encoded video frames as trailers. The timestamps are preserved +//! This module provides functionality to embed user-supplied metadata +//! in encoded video frames as trailers. The timestamps/frameIDs are preserved //! through the WebRTC pipeline and can be extracted on the receiver side. //! -//! On the send side, user timestamps are stored in the handler's internal -//! map keyed by capture timestamp. When the encoder produces a frame, -//! the transformer looks up the user timestamp via the frame's CaptureTime(). +//! On the send side, user timestamps/frameIDs are stored in the handler's internal +//! map keyed by RTP timestamp. When the encoder produces a frame, +//! the transformer looks up the metadata via the frame's CaptureTime(). //! //! On the receive side, extracted frame metadata is stored in an //! internal map keyed by RTP timestamp. Decoded frames look up their @@ -37,7 +37,7 @@ use crate::{ /// Handler for packet trailer embedding/extraction on RTP streams. /// /// For sender side: Stores frame metadata keyed by capture timestamp -/// and embeds them as 16-byte trailers on encoded frames before they +/// and embeds them as binary payload trailers on encoded frames before they /// are sent. Use `store_frame_metadata()` to associate metadata with /// a captured frame. /// @@ -66,15 +66,6 @@ impl PacketTrailerHandler { let ts = self.sys_handle.lookup_timestamp(rtp_timestamp); if ts >= 0 { let frame_id = self.sys_handle.last_lookup_frame_id(); - if ts > 2_000_000_000_000_000 || ts < 0 { - log::warn!( - "[PacketTrailer-FFI] C++ returned bad ts={} (0x{:016x}) fid={} rtp_ts={}", - ts, - ts, - frame_id, - rtp_timestamp - ); - } Some((ts, frame_id)) } else { None @@ -99,13 +90,6 @@ impl PacketTrailerHandler { user_timestamp_us: i64, frame_id: u32, ) { - log::info!( - target: "packet_trailer", - "store: capture_ts_us={}, user_ts_us={}, frame_id={}", - capture_timestamp_us, - user_timestamp_us, - frame_id - ); self.sys_handle.store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 97e6cb4d6..19f6e78b7 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -113,17 +113,7 @@ impl sys_vt::VideoSink for VideoTrackObserver { .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)); let (user_timestamp_us, frame_id) = match meta { - Some((ts, fid)) => { - if ts < 0 || ts > 2_000_000_000_000_000 { - log::warn!( - "[on_frame] SUSPICIOUS user_ts={} fid={} rtp_ts={}", - ts, - fid, - rtp_timestamp - ); - } - (Some(ts), Some(fid)) - } + Some((ts, fid)) => (Some(ts), Some(fid)), None => (None, None), }; diff --git a/webrtc-sys/src/packet_trailer.rs b/webrtc-sys/src/packet_trailer.rs index 7af72a4b0..dff87cd4b 100644 --- a/webrtc-sys/src/packet_trailer.rs +++ b/webrtc-sys/src/packet_trailer.rs @@ -1,4 +1,4 @@ -// Copyright 2025 LiveKit, Inc. +// Copyright 2026 LiveKit, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. From 4689d974a180a78aa76d515c3d88d3ab5c7dd5fd Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 25 Mar 2026 22:49:23 -0700 Subject: [PATCH 47/52] fix rendering when packet trailer feature is not populated --- examples/local_video/src/subscriber.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 55b8719e9..966d4e9db 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -268,14 +268,15 @@ async fn handle_track_subscribed( } info!( - "Subscribed to video track: {} (sid {}) from {} - codec: {}, simulcast: {}, dimension: {}x{}", + "Subscribed to video track: {} (sid {}) from {} - codec: {}, simulcast: {}, dimension: {}x{}, packet_trailer_features: {:?}", publication.name(), publication.sid(), participant.identity(), publication.mime_type(), publication.simulcasted(), publication.dimension().0, - publication.dimension().1 + publication.dimension().1, + publication.packet_trailer_features(), ); let rtc_track = video_track.rtc_track(); @@ -358,6 +359,10 @@ async fn handle_track_subscribed( s.user_timestamp_us = frame.user_timestamp_us; s.frame_id = frame.frame_id; + if !s.has_user_timestamp && frame.user_timestamp_us.is_some() { + s.has_user_timestamp = true; + } + // Update smoothed FPS (~500ms window) fps_window_frames += 1; let win_elapsed = fps_window_start.elapsed(); From 516b338a9fac43392174eb355dc7372c68966f2d Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 25 Mar 2026 22:55:54 -0700 Subject: [PATCH 48/52] put frame metadata into FrameMetadata struct --- libwebrtc/src/native/video_source.rs | 17 +++++++++++++---- webrtc-sys/include/livekit/packet_trailer.h | 10 +++++----- webrtc-sys/include/livekit/video_track.h | 8 ++------ webrtc-sys/src/packet_trailer.cpp | 12 ++++++------ webrtc-sys/src/video_track.cpp | 16 ++++++---------- webrtc-sys/src/video_track.rs | 11 ++++++++--- 6 files changed, 40 insertions(+), 34 deletions(-) diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index dac8dd7f2..5e926380a 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -81,7 +81,14 @@ impl NativeVideoSource { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); builder.pin_mut().set_timestamp_us(now.as_micros() as i64); - source.sys_handle.on_captured_frame(&builder.pin_mut().build(), false, 0, 0); + source.sys_handle.on_captured_frame( + &builder.pin_mut().build(), + &vt_sys::ffi::FrameMetadata { + has_packet_trailer: false, + user_timestamp_us: 0, + frame_id: 0, + }, + ); } } }); @@ -114,9 +121,11 @@ impl NativeVideoSource { self.sys_handle.on_captured_frame( &builder.pin_mut().build(), - has_trailer, - user_ts, - frame_id, + &vt_sys::ffi::FrameMetadata { + has_packet_trailer: has_trailer, + user_timestamp_us: user_ts, + frame_id, + }, ); } diff --git a/webrtc-sys/include/livekit/packet_trailer.h b/webrtc-sys/include/livekit/packet_trailer.h index 6ea96c6c7..defd1dcf7 100644 --- a/webrtc-sys/include/livekit/packet_trailer.h +++ b/webrtc-sys/include/livekit/packet_trailer.h @@ -68,7 +68,7 @@ constexpr size_t kPacketTrailerMinSize = constexpr size_t kPacketTrailerMaxSize = kTimestampTlvSize + kFrameIdTlvSize + kTrailerEnvelopeSize; -struct FrameMetadata { +struct PacketTrailerMetadata { int64_t user_timestamp_us; uint32_t frame_id; uint32_t ssrc; // SSRC that produced this entry (for simulcast tracking) @@ -109,7 +109,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { /// Lookup the frame metadata associated with a given RTP timestamp. /// Returns the metadata if found, nullopt otherwise. /// The entry is removed from the map after lookup. - std::optional lookup_frame_metadata(uint32_t rtp_timestamp); + std::optional lookup_frame_metadata(uint32_t rtp_timestamp); /// Store frame metadata for a given capture timestamp (sender side). /// Called from VideoTrackSource::on_captured_frame with the @@ -132,7 +132,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { uint32_t frame_id); /// Extract and remove frame metadata trailer from frame data - std::optional ExtractTrailer( + std::optional ExtractTrailer( rtc::ArrayView data, std::vector& out_data); @@ -147,7 +147,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { // Populated by store_frame_metadata(), consumed by TransformSend() // via CaptureTime() lookup. mutable webrtc::Mutex send_map_mutex_; - mutable std::unordered_map send_map_; + mutable std::unordered_map send_map_; mutable std::deque send_map_order_; static constexpr size_t kMaxSendMapEntries = 300; @@ -155,7 +155,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { // Keyed by RTP timestamp so decoded frames can look up their // metadata regardless of frame drops or reordering. mutable webrtc::Mutex recv_map_mutex_; - mutable std::unordered_map recv_map_; + mutable std::unordered_map recv_map_; mutable std::deque recv_map_order_; static constexpr size_t kMaxRecvMapEntries = 300; diff --git a/webrtc-sys/include/livekit/video_track.h b/webrtc-sys/include/livekit/video_track.h index ccef9e836..3f4e1c1ac 100644 --- a/webrtc-sys/include/livekit/video_track.h +++ b/webrtc-sys/include/livekit/video_track.h @@ -100,9 +100,7 @@ class VideoTrackSource { bool remote() const override; VideoResolution video_resolution() const; bool on_captured_frame(const webrtc::VideoFrame& frame, - bool has_packet_trailer, - int64_t user_timestamp_us, - uint32_t frame_id); + const FrameMetadata& frame_metadata); void set_packet_trailer_handler( std::shared_ptr handler); @@ -121,9 +119,7 @@ class VideoTrackSource { VideoResolution video_resolution() const; bool on_captured_frame(const std::unique_ptr& frame, - bool has_packet_trailer, - int64_t user_timestamp_us, - uint32_t frame_id) + const FrameMetadata& frame_metadata) const; // frames pushed from Rust (+interior mutability) void set_packet_trailer_handler( diff --git a/webrtc-sys/src/packet_trailer.cpp b/webrtc-sys/src/packet_trailer.cpp index 4d724c239..ab20e42bf 100644 --- a/webrtc-sys/src/packet_trailer.cpp +++ b/webrtc-sys/src/packet_trailer.cpp @@ -81,7 +81,7 @@ void PacketTrailerTransformer::TransformSend( // capture_time_ms_ = timestamp_us / 1000. So capture_time->us() // has millisecond precision (bottom 3 digits always zero). // store_frame_metadata() truncates its key the same way. - FrameMetadata meta_to_embed{0, 0, 0}; + PacketTrailerMetadata meta_to_embed{0, 0, 0}; auto capture_time = frame->CaptureTime(); if (capture_time.has_value()) { int64_t capture_us = capture_time->us(); @@ -245,7 +245,7 @@ std::vector PacketTrailerTransformer::AppendTrailer( return result; } -std::optional PacketTrailerTransformer::ExtractTrailer( +std::optional PacketTrailerTransformer::ExtractTrailer( rtc::ArrayView data, std::vector& out_data) { if (data.size() < kTrailerEnvelopeSize) { @@ -271,7 +271,7 @@ std::optional PacketTrailerTransformer::ExtractTrailer( const uint8_t* trailer_start = data.data() + data.size() - trailer_len; size_t tlv_region_len = trailer_len - kTrailerEnvelopeSize; - FrameMetadata meta{0, 0, 0}; + PacketTrailerMetadata meta{0, 0, 0}; bool found_any = false; size_t pos = 0; @@ -346,14 +346,14 @@ bool PacketTrailerTransformer::enabled() const { return enabled_.load(); } -std::optional PacketTrailerTransformer::lookup_frame_metadata( +std::optional PacketTrailerTransformer::lookup_frame_metadata( uint32_t rtp_timestamp) { webrtc::MutexLock lock(&recv_map_mutex_); auto it = recv_map_.find(rtp_timestamp); if (it == recv_map_.end()) { return std::nullopt; } - FrameMetadata meta = it->second; + PacketTrailerMetadata meta = it->second; recv_map_.erase(it); for (auto oit = recv_map_order_.begin(); oit != recv_map_order_.end(); ++oit) { @@ -393,7 +393,7 @@ void PacketTrailerTransformer::store_frame_metadata( if (send_map_.find(key) == send_map_.end()) { send_map_order_.push_back(key); } - send_map_[key] = FrameMetadata{user_timestamp_us, frame_id, 0}; + send_map_[key] = PacketTrailerMetadata{user_timestamp_us, frame_id, 0}; } // PacketTrailerHandler implementation diff --git a/webrtc-sys/src/video_track.cpp b/webrtc-sys/src/video_track.cpp index ceb4be938..637cad28e 100644 --- a/webrtc-sys/src/video_track.cpp +++ b/webrtc-sys/src/video_track.cpp @@ -135,9 +135,7 @@ VideoResolution VideoTrackSource::InternalSource::video_resolution() const { bool VideoTrackSource::InternalSource::on_captured_frame( const webrtc::VideoFrame& frame, - bool has_packet_trailer, - int64_t user_timestamp_us, - uint32_t frame_id) { + const FrameMetadata& frame_metadata) { webrtc::MutexLock lock(&mutex_); int64_t aligned_timestamp_us = timestamp_aligner_.TranslateTimestamp( @@ -147,9 +145,10 @@ bool VideoTrackSource::InternalSource::on_captured_frame( // store the mapping keyed by the aligned timestamp. This is the value // that CaptureTime() will return in TransformSend, so the lookup will // succeed. - if (has_packet_trailer && packet_trailer_handler_) { + if (frame_metadata.has_packet_trailer && packet_trailer_handler_) { packet_trailer_handler_->store_frame_metadata( - aligned_timestamp_us, user_timestamp_us, frame_id); + aligned_timestamp_us, frame_metadata.user_timestamp_us, + frame_metadata.frame_id); } webrtc::scoped_refptr buffer = @@ -204,12 +203,9 @@ VideoResolution VideoTrackSource::video_resolution() const { bool VideoTrackSource::on_captured_frame( const std::unique_ptr& frame, - bool has_packet_trailer, - int64_t user_timestamp_us, - uint32_t frame_id) const { + const FrameMetadata& frame_metadata) const { auto rtc_frame = frame->get(); - return source_->on_captured_frame(rtc_frame, has_packet_trailer, - user_timestamp_us, frame_id); + return source_->on_captured_frame(rtc_frame, frame_metadata); } void VideoTrackSource::set_packet_trailer_handler( diff --git a/webrtc-sys/src/video_track.rs b/webrtc-sys/src/video_track.rs index 114ea2680..c680a65a8 100644 --- a/webrtc-sys/src/video_track.rs +++ b/webrtc-sys/src/video_track.rs @@ -42,6 +42,13 @@ pub mod ffi { pub height: u32, } + #[derive(Debug)] + pub struct FrameMetadata { + pub has_packet_trailer: bool, + pub user_timestamp_us: i64, + pub frame_id: u32, + } + extern "C++" { include!("livekit/video_frame.h"); include!("livekit/media_stream_track.h"); @@ -75,9 +82,7 @@ pub mod ffi { fn on_captured_frame( self: &VideoTrackSource, frame: &UniquePtr, - has_packet_trailer: bool, - user_timestamp_us: i64, - frame_id: u32, + frame_metadata: &FrameMetadata, ) -> bool; fn set_packet_trailer_handler( self: &VideoTrackSource, From 1f18f0f407a15accacf238d1a76d00f9cf0b0f4b Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 25 Mar 2026 23:10:55 -0700 Subject: [PATCH 49/52] update VideoFrame to use FrameMetadata --- examples/local_video/src/publisher.rs | 28 +++++++++++++------------- examples/local_video/src/subscriber.rs | 25 +++++++++++------------ examples/screensharing/src/lib.rs | 3 +-- examples/wgpu_room/src/logo_track.rs | 3 +-- libwebrtc/src/native/video_source.rs | 10 +++++---- libwebrtc/src/native/video_stream.rs | 18 ++++++++--------- libwebrtc/src/video_frame.rs | 20 +++++++++++++----- livekit-ffi/src/server/video_source.rs | 3 +-- 8 files changed, 58 insertions(+), 52 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 5a6239f77..c451b98db 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -5,7 +5,7 @@ use livekit::options::{ self, video as video_presets, TrackPublishOptions, VideoCodec, VideoEncoding, VideoPreset, }; use livekit::prelude::*; -use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; +use livekit::webrtc::video_frame::{FrameMetadata, I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; use livekit_api::access_token; @@ -384,8 +384,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, - user_timestamp_us: None, - frame_id: None, + frame_metadata: None, buffer: I420Buffer::new(width, height), }; let is_yuyv = fmt.format() == FrameFormat::YUYV; @@ -558,19 +557,20 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; - // Optionally attach wall-clock time as user timestamp - if args.attach_timestamp { - frame.user_timestamp_us = Some(capture_wall_time_us); - } else { - frame.user_timestamp_us = None; - } - // Optionally attach a monotonically increasing frame ID - if args.attach_frame_id { - frame.frame_id = Some(frame_counter); + // Build frame metadata from enabled packet trailer features + let user_ts = if args.attach_timestamp { Some(capture_wall_time_us) } else { None }; + let fid = if args.attach_frame_id { + let id = frame_counter; frame_counter = frame_counter.wrapping_add(1); + Some(id) } else { - frame.frame_id = None; - } + None + }; + frame.frame_metadata = if user_ts.is_some() || fid.is_some() { + Some(FrameMetadata { user_timestamp_us: user_ts, frame_id: fid }) + } else { + None + }; rtc_source.capture_frame(&frame); let webrtc_capture_finished_at = Instant::now(); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 966d4e9db..c4ce1e808 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -77,10 +77,8 @@ struct SharedYuv { dirty: bool, /// Time when the latest frame became available to the subscriber code. received_at_us: Option, - /// Last received user timestamp in microseconds, if any. - user_timestamp_us: Option, - /// Last received frame_id, if any. - frame_id: Option, + /// Packet-trailer metadata from the most recent frame, if any. + frame_metadata: Option, /// Whether the publisher advertised PTF_USER_TIMESTAMP in its track info. has_user_timestamp: bool, } @@ -356,10 +354,11 @@ async fn handle_track_subscribed( s.dirty = true; s.received_at_us = Some(received_at_us); - s.user_timestamp_us = frame.user_timestamp_us; - s.frame_id = frame.frame_id; + s.frame_metadata = frame.frame_metadata; - if !s.has_user_timestamp && frame.user_timestamp_us.is_some() { + if !s.has_user_timestamp + && frame.frame_metadata.and_then(|m| m.user_timestamp_us).is_some() + { s.has_user_timestamp = true; } @@ -439,8 +438,7 @@ fn clear_hud_and_simulcast(shared: &Arc>, simulcast: &Arc format!("Frame ID: {}", fid), @@ -742,8 +742,7 @@ async fn run(args: Args, ctrl_c_received: Arc) -> Result<()> { fps: 0.0, dirty: false, received_at_us: None, - user_timestamp_us: None, - frame_id: None, + frame_metadata: None, has_user_timestamp: false, })); diff --git a/examples/screensharing/src/lib.rs b/examples/screensharing/src/lib.rs index c5a815f5d..f8e26bec6 100644 --- a/examples/screensharing/src/lib.rs +++ b/examples/screensharing/src/lib.rs @@ -186,8 +186,7 @@ mod test { let mut frame_buffer = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, - user_timestamp_us: None, - frame_id: None, + frame_metadata: None, buffer: I420Buffer::new(1, 1), }; move |result: Result| { diff --git a/examples/wgpu_room/src/logo_track.rs b/examples/wgpu_room/src/logo_track.rs index 7a88ca888..4e184f661 100644 --- a/examples/wgpu_room/src/logo_track.rs +++ b/examples/wgpu_room/src/logo_track.rs @@ -117,8 +117,7 @@ impl LogoTrack { video_frame: Arc::new(Mutex::new(VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, - user_timestamp_us: None, - frame_id: None, + frame_metadata: None, buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), })), pos: (0, 0), diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 5e926380a..8fef571f4 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -89,6 +89,7 @@ impl NativeVideoSource { frame_id: 0, }, ); + } } }); @@ -113,9 +114,10 @@ impl NativeVideoSource { }; builder.pin_mut().set_timestamp_us(capture_ts); - let user_ts = frame.user_timestamp_us.unwrap_or(0); - let frame_id = frame.frame_id.unwrap_or(0); - let has_trailer = frame.user_timestamp_us.is_some() || frame.frame_id.is_some(); + let (has_trailer, user_ts, fid) = match frame.frame_metadata { + Some(meta) => (true, meta.user_timestamp_us.unwrap_or(0), meta.frame_id.unwrap_or(0)), + None => (false, 0, 0), + }; self.inner.lock().captured_frames += 1; @@ -124,7 +126,7 @@ impl NativeVideoSource { &vt_sys::ffi::FrameMetadata { has_packet_trailer: has_trailer, user_timestamp_us: user_ts, - frame_id, + frame_id: fid, }, ); } diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 19f6e78b7..aacfd3c40 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -26,7 +26,7 @@ use webrtc_sys::video_track as sys_vt; use super::video_frame::new_video_frame_buffer; use crate::{ native::packet_trailer::PacketTrailerHandler, - video_frame::{BoxVideoFrame, VideoFrame}, + video_frame::{BoxVideoFrame, FrameMetadata, VideoFrame}, video_track::RtcVideoTrack, }; @@ -106,22 +106,20 @@ struct VideoTrackObserver { impl sys_vt::VideoSink for VideoTrackObserver { fn on_frame(&self, frame: UniquePtr) { let rtp_timestamp = frame.timestamp(); - let meta = self + let frame_metadata = self .packet_trailer_handler .lock() .as_ref() - .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)); - - let (user_timestamp_us, frame_id) = match meta { - Some((ts, fid)) => (Some(ts), Some(fid)), - None => (None, None), - }; + .and_then(|h| h.lookup_frame_metadata(rtp_timestamp)) + .map(|(ts, fid)| FrameMetadata { + user_timestamp_us: Some(ts), + frame_id: if fid != 0 { Some(fid) } else { None }, + }); let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), timestamp_us: frame.timestamp_us(), - user_timestamp_us, - frame_id, + frame_metadata, buffer: new_video_frame_buffer(unsafe { frame.video_frame_buffer() }), }); } diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index 9015629a0..dca9d3a8f 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -52,6 +52,18 @@ pub enum VideoBufferType { NV12, } +/// Metadata carried alongside a video frame via the packet trailer mechanism. +/// +/// Each field corresponds to an independently negotiable packet trailer feature +/// (`PTF_USER_TIMESTAMP`, `PTF_FRAME_ID`), so individual fields are `Option`. +#[derive(Debug, Clone, Copy)] +pub struct FrameMetadata { + /// Wall-clock capture time in microseconds, when `PTF_USER_TIMESTAMP` is enabled. + pub user_timestamp_us: Option, + /// Monotonically increasing frame identifier, when `PTF_FRAME_ID` is enabled. + pub frame_id: Option, +} + #[derive(Debug)] pub struct VideoFrame where @@ -59,16 +71,14 @@ where { pub rotation: VideoRotation, pub timestamp_us: i64, // When the frame was captured in microseconds - /// Optional user timestamp in microseconds, if available. - pub user_timestamp_us: Option, - /// Optional user-supplied frame identifier. - pub frame_id: Option, + /// Packet-trailer metadata, if any trailer features are active. + pub frame_metadata: Option, pub buffer: T, } impl> VideoFrame { pub fn new(rotation: VideoRotation, buffer: T) -> Self { - Self { rotation, timestamp_us: 0, user_timestamp_us: None, frame_id: None, buffer } + Self { rotation, timestamp_us: 0, frame_metadata: None, buffer } } } diff --git a/livekit-ffi/src/server/video_source.rs b/livekit-ffi/src/server/video_source.rs index 251b7b3c7..8e9c70794 100644 --- a/livekit-ffi/src/server/video_source.rs +++ b/livekit-ffi/src/server/video_source.rs @@ -67,8 +67,7 @@ impl FfiVideoSource { let frame = VideoFrame { rotation: capture.rotation().into(), timestamp_us: capture.timestamp_us, - user_timestamp_us: None, - frame_id: None, + frame_metadata: None, buffer, }; From b311d9412c7368df19f362b7afb9bc9e22c77402 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 25 Mar 2026 23:28:59 -0700 Subject: [PATCH 50/52] change user_timestamp to u64 --- examples/local_video/src/publisher.rs | 4 ++-- examples/local_video/src/subscriber.rs | 14 +++++++------- examples/local_video/src/timestamp_burn.rs | 6 +++--- libwebrtc/src/native/packet_trailer.rs | 6 +++--- libwebrtc/src/video_frame.rs | 2 +- webrtc-sys/include/livekit/packet_trailer.h | 14 +++++++------- webrtc-sys/src/packet_trailer.cpp | 12 ++++++------ webrtc-sys/src/packet_trailer.rs | 4 ++-- webrtc-sys/src/video_track.rs | 2 +- 9 files changed, 32 insertions(+), 32 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index c451b98db..21aa4950a 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -100,8 +100,8 @@ struct Args { e2ee_key: Option, } -fn unix_time_us_now() -> i64 { - SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as i64 +fn unix_time_us_now() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros() as u64 } #[derive(Default)] diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index c4ce1e808..70291c894 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -76,7 +76,7 @@ struct SharedYuv { fps: f32, dirty: bool, /// Time when the latest frame became available to the subscriber code. - received_at_us: Option, + received_at_us: Option, /// Packet-trailer metadata from the most recent frame, if any. frame_metadata: Option, /// Whether the publisher advertised PTF_USER_TIMESTAMP in its track info. @@ -190,14 +190,14 @@ fn update_simulcast_quality_from_stats( } /// Returns the current wall-clock time as microseconds since Unix epoch. -fn current_timestamp_us() -> i64 { - SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default().as_micros() as i64 +fn current_timestamp_us() -> u64 { + SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_default().as_micros() as u64 } /// Format a user timestamp (microseconds since Unix epoch) as /// `yyyy-mm-dd hh:mm:ss:xxx` where xxx is milliseconds. -fn format_timestamp_us(ts_us: i64) -> String { - DateTime::::from_timestamp_micros(ts_us) +fn format_timestamp_us(ts_us: u64) -> String { + DateTime::::from_timestamp_micros(ts_us as i64) .map(|dt| { dt.format("%Y-%m-%d %H:%M:%S:").to_string() + &format!("{:03}", dt.timestamp_subsec_millis()) @@ -205,7 +205,7 @@ fn format_timestamp_us(ts_us: i64) -> String { .unwrap_or_else(|| format!("")) } -fn format_optional_timestamp_us(ts_us: Option) -> String { +fn format_optional_timestamp_us(ts_us: Option) -> String { ts_us.map(format_timestamp_us).unwrap_or_else(|| "N/A".to_string()) } @@ -587,7 +587,7 @@ impl eframe::App for VideoApp { if has_user_timestamp { let latency = match (publish_us, receive_us) { (Some(pub_ts), Some(recv_ts)) => { - format!("{:.1}ms", (recv_ts - pub_ts) as f64 / 1000.0) + format!("{:.1}ms", recv_ts.saturating_sub(pub_ts) as f64 / 1000.0) } _ => "N/A".to_string(), }; diff --git a/examples/local_video/src/timestamp_burn.rs b/examples/local_video/src/timestamp_burn.rs index 7d4cc971c..e4c557f2f 100644 --- a/examples/local_video/src/timestamp_burn.rs +++ b/examples/local_video/src/timestamp_burn.rs @@ -73,7 +73,7 @@ impl TimestampOverlay { } } - pub fn draw(&mut self, data_y: &mut [u8], stride_y: usize, timestamp_us: i64) { + pub fn draw(&mut self, data_y: &mut [u8], stride_y: usize, timestamp_us: u64) { if !self.enabled { return; } @@ -121,8 +121,8 @@ fn rasterize_timestamp_glyphs() -> [TimestampGlyph; TIMESTAMP_GLYPH_COUNT] { glyphs } -fn format_timestamp_glyphs(timestamp_us: i64, out: &mut [u8; TIMESTAMP_TEXT_LEN]) { - let Some(dt) = DateTime::::from_timestamp_micros(timestamp_us) else { +fn format_timestamp_glyphs(timestamp_us: u64, out: &mut [u8; TIMESTAMP_TEXT_LEN]) { + let Some(dt) = DateTime::::from_timestamp_micros(timestamp_us as i64) else { out.fill(0); return; }; diff --git a/libwebrtc/src/native/packet_trailer.rs b/libwebrtc/src/native/packet_trailer.rs index 6bd3da5b2..b06e37c55 100644 --- a/libwebrtc/src/native/packet_trailer.rs +++ b/libwebrtc/src/native/packet_trailer.rs @@ -62,9 +62,9 @@ impl PacketTrailerHandler { /// Lookup the frame metadata for a given RTP timestamp (receiver side). /// Returns `Some((user_timestamp_us, frame_id))` if found, `None` otherwise. /// The entry is removed from the map after a successful lookup. - pub fn lookup_frame_metadata(&self, rtp_timestamp: u32) -> Option<(i64, u32)> { + pub fn lookup_frame_metadata(&self, rtp_timestamp: u32) -> Option<(u64, u32)> { let ts = self.sys_handle.lookup_timestamp(rtp_timestamp); - if ts >= 0 { + if ts != u64::MAX { let frame_id = self.sys_handle.last_lookup_frame_id(); Some((ts, frame_id)) } else { @@ -87,7 +87,7 @@ impl PacketTrailerHandler { pub fn store_frame_metadata( &self, capture_timestamp_us: i64, - user_timestamp_us: i64, + user_timestamp_us: u64, frame_id: u32, ) { self.sys_handle.store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index dca9d3a8f..6046c1713 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -59,7 +59,7 @@ pub enum VideoBufferType { #[derive(Debug, Clone, Copy)] pub struct FrameMetadata { /// Wall-clock capture time in microseconds, when `PTF_USER_TIMESTAMP` is enabled. - pub user_timestamp_us: Option, + pub user_timestamp_us: Option, /// Monotonically increasing frame identifier, when `PTF_FRAME_ID` is enabled. pub frame_id: Option, } diff --git a/webrtc-sys/include/livekit/packet_trailer.h b/webrtc-sys/include/livekit/packet_trailer.h index defd1dcf7..e4c6a4e7e 100644 --- a/webrtc-sys/include/livekit/packet_trailer.h +++ b/webrtc-sys/include/livekit/packet_trailer.h @@ -56,7 +56,7 @@ constexpr size_t kTrailerEnvelopeSize = 5; // All TLV bytes (tag, len, value) are XORed with 0xFF. // TLV tag IDs -constexpr uint8_t kTagTimestampUs = 0x01; // value: 8 bytes big-endian int64 +constexpr uint8_t kTagTimestampUs = 0x01; // value: 8 bytes big-endian uint64 constexpr uint8_t kTagFrameId = 0x02; // value: 4 bytes big-endian uint32 constexpr size_t kTimestampTlvSize = 10; // tag + len + 8-byte value @@ -69,7 +69,7 @@ constexpr size_t kPacketTrailerMaxSize = kTimestampTlvSize + kFrameIdTlvSize + kTrailerEnvelopeSize; struct PacketTrailerMetadata { - int64_t user_timestamp_us; + uint64_t user_timestamp_us; uint32_t frame_id; uint32_t ssrc; // SSRC that produced this entry (for simulcast tracking) }; @@ -116,7 +116,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { /// TimestampAligner-adjusted timestamp, which matches CaptureTime() /// in the encoder pipeline. void store_frame_metadata(int64_t capture_timestamp_us, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id); private: @@ -128,7 +128,7 @@ class PacketTrailerTransformer : public webrtc::FrameTransformerInterface { /// Append frame metadata trailer to frame data std::vector AppendTrailer( rtc::ArrayView data, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id); /// Extract and remove frame metadata trailer from frame data @@ -181,9 +181,9 @@ class PacketTrailerHandler { bool enabled() const; /// Lookup the user timestamp for a given RTP timestamp (receiver side). - /// Returns -1 if not found. The entry is removed after lookup. + /// Returns UINT64_MAX if not found. The entry is removed after lookup. /// Also caches the frame_id for retrieval via last_lookup_frame_id(). - int64_t lookup_timestamp(uint32_t rtp_timestamp) const; + uint64_t lookup_timestamp(uint32_t rtp_timestamp) const; /// Returns the frame_id from the most recent successful /// lookup_timestamp() call. Returns 0 if no lookup succeeded. @@ -191,7 +191,7 @@ class PacketTrailerHandler { /// Store frame metadata for a given capture timestamp (sender side). void store_frame_metadata(int64_t capture_timestamp_us, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id) const; /// Access the underlying transformer for chaining. diff --git a/webrtc-sys/src/packet_trailer.cpp b/webrtc-sys/src/packet_trailer.cpp index ab20e42bf..0d6899263 100644 --- a/webrtc-sys/src/packet_trailer.cpp +++ b/webrtc-sys/src/packet_trailer.cpp @@ -204,7 +204,7 @@ void PacketTrailerTransformer::TransformReceive( std::vector PacketTrailerTransformer::AppendTrailer( rtc::ArrayView data, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id) { const bool has_frame_id = frame_id != 0; const size_t trailer_len = kTimestampTlvSize + @@ -287,7 +287,7 @@ std::optional PacketTrailerTransformer::ExtractTrailer( const uint8_t* val = trailer_start + pos; if (tag == kTagTimestampUs && len == 8) { - int64_t ts = 0; + uint64_t ts = 0; for (int i = 0; i < 8; ++i) { ts = (ts << 8) | (val[i] ^ 0xFF); } @@ -367,7 +367,7 @@ std::optional PacketTrailerTransformer::lookup_frame_meta void PacketTrailerTransformer::store_frame_metadata( int64_t capture_timestamp_us, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id) { // Truncate to millisecond precision to match what WebRTC stores // internally. The encoder pipeline converts the VideoFrame's @@ -424,13 +424,13 @@ bool PacketTrailerHandler::enabled() const { return transformer_->enabled(); } -int64_t PacketTrailerHandler::lookup_timestamp(uint32_t rtp_timestamp) const { +uint64_t PacketTrailerHandler::lookup_timestamp(uint32_t rtp_timestamp) const { auto meta = transformer_->lookup_frame_metadata(rtp_timestamp); if (meta.has_value()) { last_frame_id_ = meta->frame_id; return meta->user_timestamp_us; } - return -1; + return UINT64_MAX; } uint32_t PacketTrailerHandler::last_lookup_frame_id() const { @@ -439,7 +439,7 @@ uint32_t PacketTrailerHandler::last_lookup_frame_id() const { void PacketTrailerHandler::store_frame_metadata( int64_t capture_timestamp_us, - int64_t user_timestamp_us, + uint64_t user_timestamp_us, uint32_t frame_id) const { transformer_->store_frame_metadata(capture_timestamp_us, user_timestamp_us, frame_id); } diff --git a/webrtc-sys/src/packet_trailer.rs b/webrtc-sys/src/packet_trailer.rs index dff87cd4b..03c3408ab 100644 --- a/webrtc-sys/src/packet_trailer.rs +++ b/webrtc-sys/src/packet_trailer.rs @@ -38,7 +38,7 @@ pub mod ffi { /// Lookup the user timestamp for a given RTP timestamp (receiver side). /// Returns -1 if not found. The entry is removed after lookup. /// Also caches the frame_id for retrieval via last_lookup_frame_id(). - fn lookup_timestamp(self: &PacketTrailerHandler, rtp_timestamp: u32) -> i64; + fn lookup_timestamp(self: &PacketTrailerHandler, rtp_timestamp: u32) -> u64; /// Returns the frame_id from the most recent successful /// lookup_timestamp() call. @@ -48,7 +48,7 @@ pub mod ffi { fn store_frame_metadata( self: &PacketTrailerHandler, capture_timestamp_us: i64, - user_timestamp_us: i64, + user_timestamp_us: u64, frame_id: u32, ); diff --git a/webrtc-sys/src/video_track.rs b/webrtc-sys/src/video_track.rs index c680a65a8..dccf9668f 100644 --- a/webrtc-sys/src/video_track.rs +++ b/webrtc-sys/src/video_track.rs @@ -45,7 +45,7 @@ pub mod ffi { #[derive(Debug)] pub struct FrameMetadata { pub has_packet_trailer: bool, - pub user_timestamp_us: i64, + pub user_timestamp_us: u64, pub frame_id: u32, } From 8bc47792be7ad6bc01de7820a7ea3a3863ed0b43 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 25 Mar 2026 23:29:24 -0700 Subject: [PATCH 51/52] cargo fmt --- libwebrtc/src/native/video_source.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/libwebrtc/src/native/video_source.rs b/libwebrtc/src/native/video_source.rs index 8fef571f4..619a0ea77 100644 --- a/libwebrtc/src/native/video_source.rs +++ b/libwebrtc/src/native/video_source.rs @@ -89,7 +89,6 @@ impl NativeVideoSource { frame_id: 0, }, ); - } } }); From b6747fa089da9d2abe2835b4d28a3f1dadfb915c Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 26 Mar 2026 06:30:07 +0000 Subject: [PATCH 52/52] generated protobuf --- .../proto/audio_frame_pb.d.ts | 20 +++++++++++++++ .../proto/audio_frame_pb.js | 2 ++ livekit-ffi-node-bindings/proto/e2ee_pb.d.ts | 25 +++++++++++++++++++ livekit-ffi-node-bindings/proto/e2ee_pb.js | 14 +++++++++++ .../proto/participant_pb.d.ts | 5 ++++ .../proto/participant_pb.js | 1 + livekit-ffi-node-bindings/proto/room_pb.d.ts | 19 ++++++++++++++ livekit-ffi-node-bindings/proto/room_pb.js | 3 +++ livekit-ffi-node-bindings/proto/track_pb.d.ts | 20 +++++++++++++++ livekit-ffi-node-bindings/proto/track_pb.js | 13 ++++++++++ .../proto/video_frame_pb.d.ts | 7 +++++- .../proto/video_frame_pb.js | 1 + 12 files changed, 129 insertions(+), 1 deletion(-) diff --git a/livekit-ffi-node-bindings/proto/audio_frame_pb.d.ts b/livekit-ffi-node-bindings/proto/audio_frame_pb.d.ts index 29303e20c..1cb120b5e 100644 --- a/livekit-ffi-node-bindings/proto/audio_frame_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/audio_frame_pb.d.ts @@ -185,6 +185,16 @@ export declare class NewAudioStreamRequest extends Message); static readonly runtime: typeof proto2; @@ -268,6 +278,16 @@ export declare class AudioStreamFromParticipantRequest extends Message); static readonly runtime: typeof proto2; diff --git a/livekit-ffi-node-bindings/proto/audio_frame_pb.js b/livekit-ffi-node-bindings/proto/audio_frame_pb.js index 17e6d9f0d..2e33235ea 100644 --- a/livekit-ffi-node-bindings/proto/audio_frame_pb.js +++ b/livekit-ffi-node-bindings/proto/audio_frame_pb.js @@ -101,6 +101,7 @@ const NewAudioStreamRequest = /*@__PURE__*/ proto2.makeMessageType( { no: 5, name: "audio_filter_module_id", kind: "scalar", T: 9 /* ScalarType.STRING */, opt: true }, { no: 6, name: "audio_filter_options", kind: "scalar", T: 9 /* ScalarType.STRING */, opt: true }, { no: 7, name: "frame_size_ms", kind: "scalar", T: 13 /* ScalarType.UINT32 */, opt: true }, + { no: 8, name: "queue_size_frames", kind: "scalar", T: 13 /* ScalarType.UINT32 */, opt: true }, ], ); @@ -128,6 +129,7 @@ const AudioStreamFromParticipantRequest = /*@__PURE__*/ proto2.makeMessageType( { no: 7, name: "audio_filter_module_id", kind: "scalar", T: 9 /* ScalarType.STRING */, opt: true }, { no: 8, name: "audio_filter_options", kind: "scalar", T: 9 /* ScalarType.STRING */, opt: true }, { no: 9, name: "frame_size_ms", kind: "scalar", T: 13 /* ScalarType.UINT32 */, opt: true }, + { no: 10, name: "queue_size_frames", kind: "scalar", T: 13 /* ScalarType.UINT32 */, opt: true }, ], ); diff --git a/livekit-ffi-node-bindings/proto/e2ee_pb.d.ts b/livekit-ffi-node-bindings/proto/e2ee_pb.d.ts index 6c815b49d..3e6a5316c 100644 --- a/livekit-ffi-node-bindings/proto/e2ee_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/e2ee_pb.d.ts @@ -40,6 +40,21 @@ export declare enum EncryptionType { CUSTOM = 2, } +/** + * @generated from enum livekit.proto.KeyDerivationFunction + */ +export declare enum KeyDerivationFunction { + /** + * @generated from enum value: PBKDF2 = 0; + */ + PBKDF2 = 0, + + /** + * @generated from enum value: HKDF = 1; + */ + HKDF = 1, +} + /** * @generated from enum livekit.proto.EncryptionState */ @@ -147,6 +162,16 @@ export declare class KeyProviderOptions extends Message { */ failureTolerance?: number; + /** + * @generated from field: required int32 key_ring_size = 5; + */ + keyRingSize?: number; + + /** + * @generated from field: required livekit.proto.KeyDerivationFunction key_derivation_function = 6; + */ + keyDerivationFunction?: KeyDerivationFunction; + constructor(data?: PartialMessage); static readonly runtime: typeof proto2; diff --git a/livekit-ffi-node-bindings/proto/e2ee_pb.js b/livekit-ffi-node-bindings/proto/e2ee_pb.js index 511338d6e..8696e62e3 100644 --- a/livekit-ffi-node-bindings/proto/e2ee_pb.js +++ b/livekit-ffi-node-bindings/proto/e2ee_pb.js @@ -34,6 +34,17 @@ const EncryptionType = /*@__PURE__*/ proto2.makeEnum( ], ); +/** + * @generated from enum livekit.proto.KeyDerivationFunction + */ +const KeyDerivationFunction = /*@__PURE__*/ proto2.makeEnum( + "livekit.proto.KeyDerivationFunction", + [ + {no: 0, name: "PBKDF2"}, + {no: 1, name: "HKDF"}, + ], +); + /** * @generated from enum livekit.proto.EncryptionState */ @@ -73,6 +84,8 @@ const KeyProviderOptions = /*@__PURE__*/ proto2.makeMessageType( { no: 2, name: "ratchet_window_size", kind: "scalar", T: 5 /* ScalarType.INT32 */, req: true }, { no: 3, name: "ratchet_salt", kind: "scalar", T: 12 /* ScalarType.BYTES */, req: true }, { no: 4, name: "failure_tolerance", kind: "scalar", T: 5 /* ScalarType.INT32 */, req: true }, + { no: 5, name: "key_ring_size", kind: "scalar", T: 5 /* ScalarType.INT32 */, req: true }, + { no: 6, name: "key_derivation_function", kind: "enum", T: proto2.getEnumType(KeyDerivationFunction), req: true }, ], ); @@ -325,6 +338,7 @@ const E2eeResponse = /*@__PURE__*/ proto2.makeMessageType( exports.EncryptionType = EncryptionType; +exports.KeyDerivationFunction = KeyDerivationFunction; exports.EncryptionState = EncryptionState; exports.FrameCryptor = FrameCryptor; exports.KeyProviderOptions = KeyProviderOptions; diff --git a/livekit-ffi-node-bindings/proto/participant_pb.d.ts b/livekit-ffi-node-bindings/proto/participant_pb.d.ts index 932b8879c..053486b08 100644 --- a/livekit-ffi-node-bindings/proto/participant_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/participant_pb.d.ts @@ -201,6 +201,11 @@ export declare enum DisconnectReason { * @generated from enum value: MEDIA_FAILURE = 15; */ MEDIA_FAILURE = 15, + + /** + * @generated from enum value: AGENT_ERROR = 16; + */ + AGENT_ERROR = 16, } /** diff --git a/livekit-ffi-node-bindings/proto/participant_pb.js b/livekit-ffi-node-bindings/proto/participant_pb.js index 739cf93c6..b7252e24e 100644 --- a/livekit-ffi-node-bindings/proto/participant_pb.js +++ b/livekit-ffi-node-bindings/proto/participant_pb.js @@ -76,6 +76,7 @@ const DisconnectReason = /*@__PURE__*/ proto2.makeEnum( {no: 13, name: "SIP_TRUNK_FAILURE"}, {no: 14, name: "CONNECTION_TIMEOUT"}, {no: 15, name: "MEDIA_FAILURE"}, + {no: 16, name: "AGENT_ERROR"}, ], ); diff --git a/livekit-ffi-node-bindings/proto/room_pb.d.ts b/livekit-ffi-node-bindings/proto/room_pb.d.ts index 14d81e121..842f1e9d0 100644 --- a/livekit-ffi-node-bindings/proto/room_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/room_pb.d.ts @@ -310,6 +310,11 @@ export declare class DisconnectRequest extends Message { */ requestAsyncId?: bigint; + /** + * @generated from field: optional livekit.proto.DisconnectReason reason = 3; + */ + reason?: DisconnectReason; + constructor(data?: PartialMessage); static readonly runtime: typeof proto2; @@ -1744,6 +1749,20 @@ export declare class RoomOptions extends Message { */ encryption?: E2eeOptions; + /** + * use single peer connection for both publish/subscribe (default: false) + * + * @generated from field: optional bool single_peer_connection = 8; + */ + singlePeerConnection?: boolean; + + /** + * timeout in milliseconds for each signal connection attempt (default: 5000) + * + * @generated from field: optional uint64 connect_timeout_ms = 9; + */ + connectTimeoutMs?: bigint; + constructor(data?: PartialMessage); static readonly runtime: typeof proto2; diff --git a/livekit-ffi-node-bindings/proto/room_pb.js b/livekit-ffi-node-bindings/proto/room_pb.js index 579d0474d..410908d8f 100644 --- a/livekit-ffi-node-bindings/proto/room_pb.js +++ b/livekit-ffi-node-bindings/proto/room_pb.js @@ -160,6 +160,7 @@ const DisconnectRequest = /*@__PURE__*/ proto2.makeMessageType( () => [ { no: 1, name: "room_handle", kind: "scalar", T: 4 /* ScalarType.UINT64 */, req: true }, { no: 2, name: "request_async_id", kind: "scalar", T: 4 /* ScalarType.UINT64 */, opt: true }, + { no: 3, name: "reason", kind: "enum", T: proto2.getEnumType(DisconnectReason), opt: true }, ], ); @@ -679,6 +680,8 @@ const RoomOptions = /*@__PURE__*/ proto2.makeMessageType( { no: 5, name: "rtc_config", kind: "message", T: RtcConfig, opt: true }, { no: 6, name: "join_retries", kind: "scalar", T: 13 /* ScalarType.UINT32 */, opt: true }, { no: 7, name: "encryption", kind: "message", T: E2eeOptions, opt: true }, + { no: 8, name: "single_peer_connection", kind: "scalar", T: 8 /* ScalarType.BOOL */, opt: true }, + { no: 9, name: "connect_timeout_ms", kind: "scalar", T: 4 /* ScalarType.UINT64 */, opt: true }, ], ); diff --git a/livekit-ffi-node-bindings/proto/track_pb.d.ts b/livekit-ffi-node-bindings/proto/track_pb.d.ts index 89cb7e8b8..e4a854afd 100644 --- a/livekit-ffi-node-bindings/proto/track_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/track_pb.d.ts @@ -135,6 +135,21 @@ export declare enum AudioTrackFeature { TF_PRECONNECT_BUFFER = 6, } +/** + * @generated from enum livekit.proto.PacketTrailerFeature + */ +export declare enum PacketTrailerFeature { + /** + * @generated from enum value: PTF_USER_TIMESTAMP = 0; + */ + PTF_USER_TIMESTAMP = 0, + + /** + * @generated from enum value: PTF_FRAME_ID = 1; + */ + PTF_FRAME_ID = 1, +} + /** * Create a new VideoTrack from a VideoSource * @@ -415,6 +430,11 @@ export declare class TrackPublicationInfo extends Message */ audioFeatures: AudioTrackFeature[]; + /** + * @generated from field: repeated livekit.proto.PacketTrailerFeature packet_trailer_features = 13; + */ + packetTrailerFeatures: PacketTrailerFeature[]; + constructor(data?: PartialMessage); static readonly runtime: typeof proto2; diff --git a/livekit-ffi-node-bindings/proto/track_pb.js b/livekit-ffi-node-bindings/proto/track_pb.js index 4a150c747..4755e0b2f 100644 --- a/livekit-ffi-node-bindings/proto/track_pb.js +++ b/livekit-ffi-node-bindings/proto/track_pb.js @@ -79,6 +79,17 @@ const AudioTrackFeature = /*@__PURE__*/ proto2.makeEnum( ], ); +/** + * @generated from enum livekit.proto.PacketTrailerFeature + */ +const PacketTrailerFeature = /*@__PURE__*/ proto2.makeEnum( + "livekit.proto.PacketTrailerFeature", + [ + {no: 0, name: "PTF_USER_TIMESTAMP"}, + {no: 1, name: "PTF_FRAME_ID"}, + ], +); + /** * Create a new VideoTrack from a VideoSource * @@ -184,6 +195,7 @@ const TrackPublicationInfo = /*@__PURE__*/ proto2.makeMessageType( { no: 10, name: "remote", kind: "scalar", T: 8 /* ScalarType.BOOL */, req: true }, { no: 11, name: "encryption_type", kind: "enum", T: proto2.getEnumType(EncryptionType), req: true }, { no: 12, name: "audio_features", kind: "enum", T: proto2.getEnumType(AudioTrackFeature), repeated: true }, + { no: 13, name: "packet_trailer_features", kind: "enum", T: proto2.getEnumType(PacketTrailerFeature), repeated: true }, ], ); @@ -307,6 +319,7 @@ exports.TrackKind = TrackKind; exports.TrackSource = TrackSource; exports.StreamState = StreamState; exports.AudioTrackFeature = AudioTrackFeature; +exports.PacketTrailerFeature = PacketTrailerFeature; exports.CreateVideoTrackRequest = CreateVideoTrackRequest; exports.CreateVideoTrackResponse = CreateVideoTrackResponse; exports.CreateAudioTrackRequest = CreateAudioTrackRequest; diff --git a/livekit-ffi-node-bindings/proto/video_frame_pb.d.ts b/livekit-ffi-node-bindings/proto/video_frame_pb.d.ts index 14f952268..83027f4bf 100644 --- a/livekit-ffi-node-bindings/proto/video_frame_pb.d.ts +++ b/livekit-ffi-node-bindings/proto/video_frame_pb.d.ts @@ -324,12 +324,17 @@ export declare class NewVideoSourceRequest extends Message); static readonly runtime: typeof proto2; diff --git a/livekit-ffi-node-bindings/proto/video_frame_pb.js b/livekit-ffi-node-bindings/proto/video_frame_pb.js index a6b174c4f..86f14d906 100644 --- a/livekit-ffi-node-bindings/proto/video_frame_pb.js +++ b/livekit-ffi-node-bindings/proto/video_frame_pb.js @@ -159,6 +159,7 @@ const NewVideoSourceRequest = /*@__PURE__*/ proto2.makeMessageType( () => [ { no: 1, name: "type", kind: "enum", T: proto2.getEnumType(VideoSourceType), req: true }, { no: 2, name: "resolution", kind: "message", T: VideoSourceResolution, req: true }, + { no: 3, name: "is_screencast", kind: "scalar", T: 8 /* ScalarType.BOOL */, opt: true }, ], );