From ad2c84124b399217362af1851f96cc1a6afb7724 Mon Sep 17 00:00:00 2001 From: David Chen Date: Mon, 27 Oct 2025 22:44:58 -0700 Subject: [PATCH 01/39] wip --- Cargo.lock | 506 ++++++++++++++++++++++--- Cargo.toml | 1 + examples/local_video/Cargo.toml | 38 ++ examples/local_video/README.md | 22 ++ examples/local_video/src/publisher.rs | 200 ++++++++++ examples/local_video/src/subscriber.rs | 168 ++++++++ livekit-protocol/protocol | 2 +- 7 files changed, 883 insertions(+), 54 deletions(-) create mode 100644 examples/local_video/Cargo.toml create mode 100644 examples/local_video/README.md create mode 100644 examples/local_video/src/publisher.rs create mode 100644 examples/local_video/src/subscriber.rs diff --git a/Cargo.lock b/Cargo.lock index 940ee82c0..dc18295c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -49,7 +49,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "cipher", "cpufeatures", ] @@ -72,7 +72,7 @@ version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "getrandom 0.3.4", "once_cell", "serde", @@ -97,7 +97,7 @@ checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43" dependencies = [ "alsa-sys", "bitflags 2.9.4", - "cfg-if", + "cfg-if 1.0.3", "libc", ] @@ -335,7 +335,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.3", "concurrent-queue", "futures-io", "futures-lite 2.6.1", @@ -491,12 +491,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", - "cfg-if", + "cfg-if 1.0.3", "libc", "miniz_oxide", "object", "rustc-demangle", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -539,6 +539,29 @@ dependencies = [ "tokio", ] +[[package]] +name = "bindgen" +version = "0.65.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.106", + "which", +] + [[package]] name = "bindgen" version = "0.72.1" @@ -776,6 +799,12 @@ dependencies = [ "nom", ] +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.3" @@ -798,7 +827,7 @@ dependencies = [ "js-sys", "num-traits", "wasm-bindgen", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -871,6 +900,34 @@ dependencies = [ "error-code", ] +[[package]] +name = "cocoa" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c49e86fc36d5704151f5996b7b3795385f50ce09e3be0f47a0cfde869681cf8" +dependencies = [ + "bitflags 1.3.2", + "block", + "core-foundation 0.7.0", + "core-graphics 0.19.2", + "foreign-types 0.3.2", + "libc", + "objc", +] + +[[package]] +name = "cocoa-foundation" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81411967c50ee9a1fc11365f8c585f863a22a9697c89239c452292c40ba79b0d" +dependencies = [ + "bitflags 2.9.4", + "block", + "core-foundation 0.10.1", + "core-graphics-types 0.2.0", + "objc", +] + [[package]] name = "codespan-reporting" version = "0.11.1" @@ -975,13 +1032,23 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "core-foundation" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d24c7a13c43e870e37c1556b74555437870a04514f7685f5b354e090567171" +dependencies = [ + "core-foundation-sys 0.7.0", + "libc", +] + [[package]] name = "core-foundation" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ - "core-foundation-sys", + "core-foundation-sys 0.8.7", "libc", ] @@ -991,16 +1058,34 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ - "core-foundation-sys", + "core-foundation-sys 0.8.7", "libc", ] +[[package]] +name = "core-foundation-sys" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" + [[package]] name = "core-foundation-sys" version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +[[package]] +name = "core-graphics" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3889374e6ea6ab25dba90bb5d96202f61108058361f6dc72e8b03e6f8bbe923" +dependencies = [ + "bitflags 1.3.2", + "core-foundation 0.7.0", + "foreign-types 0.3.2", + "libc", +] + [[package]] name = "core-graphics" version = "0.23.2" @@ -1009,7 +1094,7 @@ checksum = "c07782be35f9e1140080c6b96f0d44b739e2278479f64e02fdab4e32dfd8b081" dependencies = [ "bitflags 1.3.2", "core-foundation 0.9.4", - "core-graphics-types", + "core-graphics-types 0.1.3", "foreign-types 0.5.0", "libc", ] @@ -1025,6 +1110,42 @@ dependencies = [ "libc", ] +[[package]] +name = "core-graphics-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" +dependencies = [ + "bitflags 2.9.4", + "core-foundation 0.10.1", + "libc", +] + +[[package]] +name = "core-media-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "273bf3fc5bf51fd06a7766a84788c1540b6527130a0bce39e00567d6ab9f31f1" +dependencies = [ + "cfg-if 0.1.10", + "core-foundation-sys 0.7.0", + "libc", +] + +[[package]] +name = "core-video-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ecad23610ad9757664d644e369246edde1803fcb43ed72876565098a5d3828" +dependencies = [ + "cfg-if 0.1.10", + "core-foundation-sys 0.7.0", + "core-graphics 0.19.2", + "libc", + "metal 0.18.0", + "objc", +] + [[package]] name = "coreaudio-rs" version = "0.11.3" @@ -1032,7 +1153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "321077172d79c662f64f5071a03120748d5bb652f5231570141be24cfcd2bace" dependencies = [ "bitflags 1.3.2", - "core-foundation-sys", + "core-foundation-sys 0.8.7", "coreaudio-sys", ] @@ -1042,7 +1163,7 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ceec7a6067e62d6f931a2baf6f3a751f4a892595bcec1461a3c94ef9949864b6" dependencies = [ - "bindgen", + "bindgen 0.72.1", ] [[package]] @@ -1052,7 +1173,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "873dab07c8f743075e57f524c583985fbaf745602acbe916a01539364369a779" dependencies = [ "alsa", - "core-foundation-sys", + "core-foundation-sys 0.8.7", "coreaudio-rs", "dasp_sample", "jni", @@ -1083,7 +1204,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", ] [[package]] @@ -1276,7 +1397,7 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "hashbrown 0.14.5", "lock_api", "once_cell", @@ -1500,7 +1621,7 @@ version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", ] [[package]] @@ -1723,6 +1844,18 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "flume" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +dependencies = [ + "futures-core", + "futures-sink", + "nanorand", + "spin", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1957,7 +2090,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bd49230192a3797a9a4d6abe9b3eed6f7fa4c8a8a4947977c6f80025f92cbd8" dependencies = [ "rustix 1.1.2", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -1966,7 +2099,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "js-sys", "libc", "wasi", @@ -1979,7 +2112,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "js-sys", "libc", "r-efi", @@ -2129,7 +2262,7 @@ version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "crunchy", "num-traits", "zerocopy", @@ -2419,7 +2552,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", - "core-foundation-sys", + "core-foundation-sys 0.8.7", "iana-time-zone-haiku", "js-sys", "log", @@ -2623,7 +2756,7 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", ] [[package]] @@ -2770,7 +2903,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" dependencies = [ "cesu8", - "cfg-if", + "cfg-if 1.0.3", "combine", "jni-sys", "log", @@ -2859,6 +2992,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "lebe" version = "0.5.3" @@ -2877,8 +3016,8 @@ version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ - "cfg-if", - "windows-link", + "cfg-if 1.0.3", + "windows-link 0.2.1", ] [[package]] @@ -3094,6 +3233,31 @@ dependencies = [ "tokio", ] +[[package]] +name = "local_video" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "eframe", + "egui", + "egui-wgpu", + "env_logger 0.10.2", + "futures", + "image 0.24.9", + "libwebrtc", + "livekit", + "livekit-api", + "log", + "nokhwa", + "objc2 0.6.3", + "parking_lot", + "tokio", + "webrtc-sys", + "wgpu 25.0.2", + "winit", +] + [[package]] name = "lock_api" version = "0.4.14" @@ -3166,6 +3330,21 @@ dependencies = [ "libc", ] +[[package]] +name = "metal" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e198a0ee42bdbe9ef2c09d0b9426f3b2b47d90d93a4a9b0395c4cea605e92dc0" +dependencies = [ + "bitflags 1.3.2", + "block", + "cocoa", + "core-graphics 0.19.2", + "foreign-types 0.3.2", + "log", + "objc", +] + [[package]] name = "metal" version = "0.31.0" @@ -3174,7 +3353,7 @@ checksum = "f569fb946490b5743ad69813cb19629130ce9374034abe31614a36402d18f99e" dependencies = [ "bitflags 2.9.4", "block", - "core-graphics-types", + "core-graphics-types 0.1.3", "foreign-types 0.5.0", "log", "objc", @@ -3290,6 +3469,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "nanorand" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3" +dependencies = [ + "getrandom 0.2.16", +] + [[package]] name = "native-tls" version = "0.2.14" @@ -3366,6 +3554,72 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" +[[package]] +name = "nokhwa" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c35ed9613f002f8095aafc97ad839e0bb6cebf79111c68265d8df212a5a294" +dependencies = [ + "flume", + "image 0.25.8", + "nokhwa-bindings-linux", + "nokhwa-bindings-macos", + "nokhwa-bindings-windows", + "nokhwa-core", + "parking_lot", + "paste", + "thiserror 2.0.17", +] + +[[package]] +name = "nokhwa-bindings-linux" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9201817bb00fa911c0aaf5ae7653b2f7a81a0492d119753ac85b74c2c5f177f" +dependencies = [ + "nokhwa-core", + "v4l", +] + +[[package]] +name = "nokhwa-bindings-macos" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de78eb4a2d47a68f490899aa0516070d7a972f853ec2bb374ab53be0bd39b60f" +dependencies = [ + "block", + "cocoa-foundation", + "core-foundation 0.10.1", + "core-media-sys", + "core-video-sys", + "flume", + "nokhwa-core", + "objc", + "once_cell", +] + +[[package]] +name = "nokhwa-bindings-windows" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2597245a984c92a9f2bcb239d85bbc62b34f8b277c2648f51f5c78b84b38da46" +dependencies = [ + "nokhwa-core", + "once_cell", + "windows 0.61.3", +] + +[[package]] +name = "nokhwa-core" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "903f3e0f406f7e9aad4fa0566c1d97cc7f88aab57847e1f919d1a34812dedee3" +dependencies = [ + "bytes", + "image 0.25.8", + "thiserror 2.0.17", +] + [[package]] name = "nom" version = "7.1.3" @@ -3441,6 +3695,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" dependencies = [ "malloc_buf", + "objc_exception", ] [[package]] @@ -3713,6 +3968,15 @@ dependencies = [ "objc2-foundation 0.2.2", ] +[[package]] +name = "objc_exception" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad970fb455818ad6cba4c122ad012fae53ae8b4795f86378bce65e4f6bab2ca4" +dependencies = [ + "cc", +] + [[package]] name = "object" version = "0.37.3" @@ -3764,7 +4028,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ "bitflags 2.9.4", - "cfg-if", + "cfg-if 1.0.3", "foreign-types 0.3.2", "libc", "once_cell", @@ -3861,12 +4125,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "backtrace", - "cfg-if", + "cfg-if 1.0.3", "libc", "petgraph 0.6.5", "redox_syscall 0.5.18", "smallvec", - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -3935,6 +4199,12 @@ dependencies = [ "sha2", ] +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + [[package]] name = "percent-encoding" version = "2.3.2" @@ -4055,7 +4325,7 @@ checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ "autocfg", "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.3", "concurrent-queue", "libc", "log", @@ -4069,7 +4339,7 @@ version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "concurrent-queue", "hermit-abi", "pin-project-lite", @@ -4597,7 +4867,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", - "cfg-if", + "cfg-if 1.0.3", "getrandom 0.2.16", "libc", "untrusted", @@ -4852,7 +5122,7 @@ checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.9.4", "core-foundation 0.9.4", - "core-foundation-sys", + "core-foundation-sys 0.8.7", "libc", "security-framework-sys", ] @@ -4865,7 +5135,7 @@ checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ "bitflags 2.9.4", "core-foundation 0.10.1", - "core-foundation-sys", + "core-foundation-sys 0.8.7", "libc", "security-framework-sys", ] @@ -4876,7 +5146,7 @@ version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ - "core-foundation-sys", + "core-foundation-sys 0.8.7", "libc", ] @@ -4960,7 +5230,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "cpufeatures", "digest", ] @@ -4971,7 +5241,7 @@ version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "cpufeatures", "digest", ] @@ -5111,6 +5381,15 @@ dependencies = [ "hound", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + [[package]] name = "spirv" version = "0.3.0+sdk-1.3.268.0" @@ -5316,7 +5595,7 @@ version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", ] [[package]] @@ -5372,7 +5651,7 @@ dependencies = [ "arrayref", "arrayvec", "bytemuck", - "cfg-if", + "cfg-if 1.0.3", "log", "tiny-skia-path", ] @@ -5853,6 +6132,26 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "v4l" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8fbfea44a46799d62c55323f3c55d06df722fbe577851d848d328a1041c3403" +dependencies = [ + "bitflags 1.3.2", + "libc", + "v4l2-sys-mit", +] + +[[package]] +name = "v4l2-sys-mit" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6779878362b9bacadc7893eac76abe69612e8837ef746573c4a5239daf11990b" +dependencies = [ + "bindgen 0.65.1", +] + [[package]] name = "valuable" version = "0.1.1" @@ -5923,7 +6222,7 @@ version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "once_cell", "rustversion", "wasm-bindgen-macro", @@ -5950,7 +6249,7 @@ version = "0.4.54" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" dependencies = [ - "cfg-if", + "cfg-if 1.0.3", "js-sys", "once_cell", "wasm-bindgen", @@ -6341,7 +6640,7 @@ dependencies = [ "block", "bytemuck", "cfg_aliases", - "core-graphics-types", + "core-graphics-types 0.1.3", "glow", "glutin_wgl_sys", "gpu-alloc", @@ -6351,7 +6650,7 @@ dependencies = [ "libc", "libloading", "log", - "metal", + "metal 0.31.0", "naga 24.0.0", "ndk-sys 0.5.0+25.2.9519653", "objc", @@ -6383,9 +6682,9 @@ dependencies = [ "bitflags 2.9.4", "block", "bytemuck", - "cfg-if", + "cfg-if 1.0.3", "cfg_aliases", - "core-graphics-types", + "core-graphics-types 0.1.3", "glow", "glutin_wgl_sys", "gpu-alloc", @@ -6397,7 +6696,7 @@ dependencies = [ "libc", "libloading", "log", - "metal", + "metal 0.31.0", "naga 25.0.1", "ndk-sys 0.5.0+25.2.9519653", "objc", @@ -6465,6 +6764,18 @@ dependencies = [ "winit", ] +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.44", +] + [[package]] name = "winapi" version = "0.3.9" @@ -6516,6 +6827,28 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + [[package]] name = "windows-core" version = "0.54.0" @@ -6539,6 +6872,19 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + [[package]] name = "windows-core" version = "0.62.2" @@ -6547,11 +6893,22 @@ checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement 0.60.2", "windows-interface 0.59.3", - "windows-link", + "windows-link 0.2.1", "windows-result 0.4.1", "windows-strings 0.5.1", ] +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", +] + [[package]] name = "windows-implement" version = "0.58.0" @@ -6596,12 +6953,28 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -6620,13 +6993,22 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-result" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -6639,13 +7021,22 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-strings" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -6699,7 +7090,7 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link", + "windows-link 0.2.1", ] [[package]] @@ -6754,7 +7145,7 @@ version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link", + "windows-link 0.2.1", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", @@ -6765,6 +7156,15 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -6961,7 +7361,7 @@ dependencies = [ "cfg_aliases", "concurrent-queue", "core-foundation 0.9.4", - "core-graphics", + "core-graphics 0.23.2", "cursor-icon", "dpi", "js-sys", @@ -7109,7 +7509,7 @@ dependencies = [ name = "yuv-sys" version = "0.3.10" dependencies = [ - "bindgen", + "bindgen 0.72.1", "cc", "rayon", "regex", diff --git a/Cargo.toml b/Cargo.toml index 03f91301f..76fab7509 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ members = [ "examples/basic_text_stream", "examples/encrypted_text_stream", "examples/local_audio", + "examples/local_video", "examples/mobile", "examples/play_from_disk", "examples/rpc", diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml new file mode 100644 index 000000000..382e9744e --- /dev/null +++ b/examples/local_video/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "local_video" +version = "0.1.0" +edition = "2021" +publish = false + +[[bin]] +name = "publisher" +path = "src/publisher.rs" + +[[bin]] +name = "subscriber" +path = "src/subscriber.rs" + +[dependencies] +tokio = { version = "1", features = ["full", "parking_lot"] } +livekit = { workspace = true, features = ["rustls-tls-native-roots"] } +webrtc-sys = { workspace = true } +libwebrtc = { workspace = true } +livekit-api = { workspace = true } +futures = "0.3" +clap = { version = "4.5", features = ["derive"] } +log = "0.4" +env_logger = "0.10.0" +nokhwa = { version = "0.10", default-features = false, features = ["input-avfoundation", "input-v4l", "input-msmf", "output-threaded"] } +image = "0.24" +egui = "0.31.1" +egui-wgpu = "0.31.1" +eframe = { version = "0.31.1", default-features = false, features = ["default_fonts", "wgpu", "persistence"] } +wgpu = "25.0" +winit = { version = "0.30.11", features = ["android-native-activity"] } +parking_lot = { version = "0.12.1", features = ["deadlock_detection"] } +anyhow = "1" + +[target.'cfg(target_os = "macos")'.dependencies] +objc2 = { version = "0.6.0", features = ["relax-sign-encoding"] } + + diff --git a/examples/local_video/README.md b/examples/local_video/README.md new file mode 100644 index 000000000..5e3588fb9 --- /dev/null +++ b/examples/local_video/README.md @@ -0,0 +1,22 @@ +# local_video + +Two examples demonstrating capturing frames from a local camera video and publishing to LiveKit, and subscribing to render video in a window. + +- publisher: capture from a selected camera and publish a video track +- subscriber: connect to a room, subscribe to video tracks, and display in a window + +Environment variables required for both: +- LIVEKIT_URL +- LIVEKIT_API_KEY +- LIVEKIT_API_SECRET + +Publisher usage: +``` + cargo run -p local_video --bin publisher -- --list-cameras + cargo run -p local_video --bin publisher -- --camera-index 0 --room-name demo --identity cam-1 +``` + +Subscriber usage: +``` + cargo run -p local_video --bin subscriber -- --room-name demo --identity viewer-1 +``` diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs new file mode 100644 index 000000000..11936e3eb --- /dev/null +++ b/examples/local_video/src/publisher.rs @@ -0,0 +1,200 @@ +use anyhow::Result; +use clap::Parser; +use livekit::webrtc::native::yuv_helper; +use livekit::options::{TrackPublishOptions, VideoCodec}; +use livekit::prelude::*; +use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; +use livekit::webrtc::video_source::native::NativeVideoSource; +use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; +use livekit_api::access_token; +use log::{error, info, warn}; +use nokhwa::pixel_format::RgbFormat; +use nokhwa::utils::{ApiBackend, CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution}; +use nokhwa::Camera; +use std::env; +use std::time::{Duration, Instant}; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// List available cameras and exit + #[arg(long)] + list_cameras: bool, + + /// Camera index to use (numeric) + #[arg(long, default_value_t = 0)] + camera_index: usize, + + /// Desired width + #[arg(long, default_value_t = 1280)] + width: u32, + + /// Desired height + #[arg(long, default_value_t = 720)] + height: u32, + + /// Desired framerate + #[arg(long, default_value_t = 30)] + fps: u32, + + /// LiveKit participant identity + #[arg(long, default_value = "rust-camera-pub")] + identity: String, + + /// LiveKit room name + #[arg(long, default_value = "video-room")] + room_name: String, + + /// LiveKit server URL + #[arg(long)] + url: Option, + + /// LiveKit API key + #[arg(long)] + api_key: Option, + + /// LiveKit API secret + #[arg(long)] + api_secret: Option, +} + +fn list_cameras() -> Result<()> { + let cams = nokhwa::query(ApiBackend::Auto)?; + println!("Available cameras:"); + for (i, cam) in cams.iter().enumerate() { + println!("{}. {}", i, cam.human_name()); + } + Ok(()) +} + +#[tokio::main] +async fn main() -> Result<()> { + env_logger::init(); + let args = Args::parse(); + + if args.list_cameras { + return list_cameras(); + } + + // LiveKit connection details + let url = args.url.or_else(|| env::var("LIVEKIT_URL").ok()).expect( + "LIVEKIT_URL must be provided via --url or env", + ); + let api_key = args + .api_key + .or_else(|| env::var("LIVEKIT_API_KEY").ok()) + .expect("LIVEKIT_API_KEY must be provided via --api-key or env"); + let api_secret = args + .api_secret + .or_else(|| env::var("LIVEKIT_API_SECRET").ok()) + .expect("LIVEKIT_API_SECRET must be provided via --api-secret or env"); + + let token = access_token::AccessToken::with_api_key(&api_key, &api_secret) + .with_identity(&args.identity) + .with_name(&args.identity) + .with_grants(access_token::VideoGrants { + room_join: true, + room: args.room_name.clone(), + can_publish: true, + ..Default::default() + }) + .to_jwt()?; + + info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); + let mut room_options = RoomOptions::default(); + room_options.auto_subscribe = true; + let (room, _) = Room::connect(&url, &token, room_options).await?; + let room = std::sync::Arc::new(room); + info!("Connected: {} - {}", room.name(), room.sid().await); + + // Setup camera + let index = CameraIndex::Index(args.camera_index as u32); + let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); + let mut camera = Camera::new(index, requested)?; + // Try to honor requested size/fps if supported + let _ = camera.set_camera_format(CameraFormat::new( + Resolution::new(args.width, args.height), + FrameFormat::MJPEG, + args.fps, + )); + camera.open_stream()?; + let fmt = camera.camera_format(); + let width = fmt.width(); + let height = fmt.height(); + let fps = fmt.frame_rate(); + info!("Camera opened: {}x{} @ {} fps", width, height, fps); + + // Create LiveKit video source and track + let rtc_source = NativeVideoSource::new(VideoResolution { width, height }); + let track = LocalVideoTrack::create_video_track( + "camera", + RtcVideoSource::Native(rtc_source.clone()), + ); + + room + .local_participant() + .publish_track( + LocalTrack::Video(track.clone()), + TrackPublishOptions { + source: TrackSource::Camera, + simulcast: true, + video_codec: VideoCodec::H264, + ..Default::default() + }, + ) + .await?; + info!("Published camera track"); + + // Reusable I420 buffer and frame + let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; + + // Capture loop + let mut last = Instant::now(); + loop { + // Get frame as RGB + let frame_buf = camera.frame()?; + let rgb = frame_buf.decode_image::()?; + let rgba_stride = (width * 4) as u32; + + // Convert RGB to ABGR in-place buffer (expand to 4 channels) + // Build a temporary ABGR buffer + let mut abgr = vec![0u8; (width * height * 4) as usize]; + for (i, chunk) in rgb.as_raw().chunks_exact(3).enumerate() { + let r = chunk[0]; + let g = chunk[1]; + let b = chunk[2]; + let o = i * 4; + // ABGR layout + abgr[o] = 255; + abgr[o + 1] = b; + abgr[o + 2] = g; + abgr[o + 3] = r; + } + + // Fill i420 buffer + let (stride_y, stride_u, stride_v) = frame.buffer.strides(); + let (data_y, data_u, data_v) = frame.buffer.data_mut(); + yuv_helper::abgr_to_i420( + &abgr, + rgba_stride, + data_y, + stride_y, + data_u, + stride_u, + data_v, + stride_v, + width as i32, + height as i32, + ); + + rtc_source.capture_frame(&frame); + + // Simple pacing + let elapsed = last.elapsed(); + let target = Duration::from_secs_f32(1.0 / fps as f32); + if elapsed < target { tokio::time::sleep(target - elapsed).await; } + last = Instant::now(); + } +} + + diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs new file mode 100644 index 000000000..7722ad790 --- /dev/null +++ b/examples/local_video/src/subscriber.rs @@ -0,0 +1,168 @@ +use anyhow::Result; +use clap::Parser; +use eframe::egui; +use futures::StreamExt; +use livekit::prelude::*; +use libwebrtc::prelude::VideoBuffer; +use livekit::webrtc::video_stream::native::NativeVideoStream; +use livekit_api::access_token; +use log::{debug, info}; +use parking_lot::Mutex; +use std::sync::Arc; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// LiveKit participant identity + #[arg(long, default_value = "rust-video-subscriber")] + identity: String, + + /// LiveKit room name + #[arg(long, default_value = "video-room")] + room_name: String, + + /// LiveKit server URL + #[arg(long)] + url: Option, + + /// LiveKit API key + #[arg(long)] + api_key: Option, + + /// LiveKit API secret + #[arg(long)] + api_secret: Option, +} + +struct SharedFrame { + width: u32, + height: u32, + rgba: Vec, + dirty: bool, +} + +struct VideoApp { + shared: Arc>, + texture: Option, +} + +impl eframe::App for VideoApp { + fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { + egui::CentralPanel::default().show(ctx, |ui| { + let mut shared = self.shared.lock(); + if shared.dirty { + let size = [shared.width as usize, shared.height as usize]; + let image = egui::ColorImage::from_rgba_unmultiplied(size, &shared.rgba); + match &mut self.texture { + Some(tex) => tex.set(image, egui::TextureOptions::LINEAR), + None => { + self.texture = Some(ui.ctx().load_texture( + "remote-video", + image, + egui::TextureOptions::LINEAR, + )); + } + } + shared.dirty = false; + } + + if let Some(tex) = &self.texture { + let tex_size = tex.size_vec2(); + let available = ui.available_size(); + let scale = (available.x / tex_size.x).min(available.y / tex_size.y).max(1.0); + let desired = tex_size * scale; + ui.image((tex.id(), desired)); + } else { + ui.heading("Waiting for video..."); + } + }); + + ctx.request_repaint(); + } +} + +#[tokio::main] +async fn main() -> Result<()> { + env_logger::init(); + let args = Args::parse(); + + // LiveKit connection details + let url = std::env::var("LIVEKIT_URL").ok().or(args.url).expect( + "LIVEKIT_URL must be provided via --url or env", + ); + let api_key = std::env::var("LIVEKIT_API_KEY").ok().or(args.api_key).expect( + "LIVEKIT_API_KEY must be provided via --api-key or env", + ); + let api_secret = std::env::var("LIVEKIT_API_SECRET").ok().or(args.api_secret).expect( + "LIVEKIT_API_SECRET must be provided via --api-secret or env", + ); + + let token = access_token::AccessToken::with_api_key(&api_key, &api_secret) + .with_identity(&args.identity) + .with_name(&args.identity) + .with_grants(access_token::VideoGrants { + room_join: true, + room: args.room_name.clone(), + can_subscribe: true, + ..Default::default() + }) + .to_jwt()?; + + info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); + let mut room_options = RoomOptions::default(); + room_options.auto_subscribe = true; + let (room, _) = Room::connect(&url, &token, room_options).await?; + let room = Arc::new(room); + info!("Connected: {} - {}", room.name(), room.sid().await); + + // Shared frame buffer for UI + let shared = Arc::new(Mutex::new(SharedFrame { width: 0, height: 0, rgba: Vec::new(), dirty: false })); + + // Subscribe to room events: on first video track, start sink task + let shared_clone = shared.clone(); + let rt = tokio::runtime::Handle::current(); + tokio::spawn(async move { + let mut events = room.subscribe(); + while let Some(evt) = events.recv().await { + if let RoomEvent::TrackSubscribed { track, .. } = evt { + if let livekit::track::RemoteTrack::Video(video_track) = track { + info!("Subscribed to video track: {}", video_track.name()); + // Start background sink thread + let shared2 = shared_clone.clone(); + std::thread::spawn(move || { + let mut sink = NativeVideoStream::new(video_track.rtc_track()); + while let Some(frame) = rt.block_on(sink.next()) { + let buffer = frame.buffer.to_i420(); + let w = buffer.width(); + let h = buffer.height(); + + let (sy, su, sv) = buffer.strides(); + let (dy, du, dv) = buffer.data(); + + let mut rgba = vec![0u8; (w * h * 4) as usize]; + libwebrtc::native::yuv_helper::i420_to_rgba( + dy, sy, du, su, dv, sv, &mut rgba, w * 4, w as i32, h as i32, + ); + + let mut s = shared2.lock(); + s.width = w; + s.height = h; + s.rgba = rgba; + s.dirty = true; + } + }); + break; + } + } + } + }); + + // Start UI + let app = VideoApp { shared, texture: None }; + let native_options = eframe::NativeOptions::default(); + eframe::run_native("LiveKit Video Subscriber", native_options, Box::new(|_| Ok::, _>(Box::new(app))))?; + + Ok(()) +} + + diff --git a/livekit-protocol/protocol b/livekit-protocol/protocol index 2bc93ddc2..e038e7944 160000 --- a/livekit-protocol/protocol +++ b/livekit-protocol/protocol @@ -1 +1 @@ -Subproject commit 2bc93ddc27ccfa66ee8d270a1bcd115586fb601d +Subproject commit e038e7944595dd9a00871ee5ed52ba6062f76c1e From 664d4b338bee4b36d43e1b085892d0e64d3b599e Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 28 Oct 2025 00:04:03 -0700 Subject: [PATCH 02/39] wip publishing from local camera --- Cargo.lock | 1 + examples/local_video/Cargo.toml | 1 + examples/local_video/README.md | 26 +++- examples/local_video/src/publisher.rs | 197 +++++++++++++++++++------ examples/local_video/src/subscriber.rs | 61 ++++++-- 5 files changed, 220 insertions(+), 66 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dc18295c3..8a3df9840 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3256,6 +3256,7 @@ dependencies = [ "webrtc-sys", "wgpu 25.0.2", "winit", + "yuv-sys", ] [[package]] diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 382e9744e..7ded6dc99 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -18,6 +18,7 @@ livekit = { workspace = true, features = ["rustls-tls-native-roots"] } webrtc-sys = { workspace = true } libwebrtc = { workspace = true } livekit-api = { workspace = true } +yuv-sys = { workspace = true } futures = "0.3" clap = { version = "4.5", features = ["derive"] } log = "0.4" diff --git a/examples/local_video/README.md b/examples/local_video/README.md index 5e3588fb9..5589638c4 100644 --- a/examples/local_video/README.md +++ b/examples/local_video/README.md @@ -5,18 +5,36 @@ Two examples demonstrating capturing frames from a local camera video and publis - publisher: capture from a selected camera and publish a video track - subscriber: connect to a room, subscribe to video tracks, and display in a window -Environment variables required for both: -- LIVEKIT_URL -- LIVEKIT_API_KEY -- LIVEKIT_API_SECRET +LiveKit connection can be provided via flags or environment variables: +- `--url` or `LIVEKIT_URL` +- `--api-key` or `LIVEKIT_API_KEY` +- `--api-secret` or `LIVEKIT_API_SECRET` Publisher usage: ``` cargo run -p local_video --bin publisher -- --list-cameras cargo run -p local_video --bin publisher -- --camera-index 0 --room-name demo --identity cam-1 + + # with explicit LiveKit connection flags + cargo run -p local_video --bin publisher -- \ + --camera-index 0 \ + --room-name demo \ + --identity cam-1 \ + --url https://your.livekit.server \ + --api-key YOUR_KEY \ + --api-secret YOUR_SECRET ``` Subscriber usage: ``` + # relies on env vars LIVEKIT_URL, LIVEKIT_API_KEY, LIVEKIT_API_SECRET cargo run -p local_video --bin subscriber -- --room-name demo --identity viewer-1 + + # or pass credentials via flags + cargo run -p local_video --bin subscriber -- \ + --room-name demo \ + --identity viewer-1 \ + --url https://your.livekit.server \ + --api-key YOUR_KEY \ + --api-secret YOUR_SECRET ``` diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 11936e3eb..197ae9dc0 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,13 +1,13 @@ use anyhow::Result; use clap::Parser; -use livekit::webrtc::native::yuv_helper; use livekit::options::{TrackPublishOptions, VideoCodec}; use livekit::prelude::*; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; use livekit_api::access_token; -use log::{error, info, warn}; +use log::{debug, info}; +use yuv_sys as yuv_sys; use nokhwa::pixel_format::RgbFormat; use nokhwa::utils::{ApiBackend, CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution}; use nokhwa::Camera; @@ -107,22 +107,46 @@ async fn main() -> Result<()> { let room = std::sync::Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); + // Log room events + { + let room_clone = room.clone(); + tokio::spawn(async move { + let mut events = room_clone.subscribe(); + info!("Subscribed to room events"); + while let Some(evt) = events.recv().await { + debug!("Room event: {:?}", evt); + } + }); + } + // Setup camera let index = CameraIndex::Index(args.camera_index as u32); let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); let mut camera = Camera::new(index, requested)?; - // Try to honor requested size/fps if supported - let _ = camera.set_camera_format(CameraFormat::new( + // Try raw YUYV first (cheaper than MJPEG), fall back to MJPEG + let wanted = CameraFormat::new( Resolution::new(args.width, args.height), - FrameFormat::MJPEG, + FrameFormat::YUYV, args.fps, - )); + ); + let mut using_fmt = "YUYV"; + if let Err(_) = camera.set_camera_format(wanted) { + let alt = CameraFormat::new( + Resolution::new(args.width, args.height), + FrameFormat::MJPEG, + args.fps, + ); + using_fmt = "MJPEG"; + let _ = camera.set_camera_format(alt); + } camera.open_stream()?; let fmt = camera.camera_format(); let width = fmt.width(); let height = fmt.height(); let fps = fmt.frame_rate(); - info!("Camera opened: {}x{} @ {} fps", width, height, fps); + info!("Camera opened: {}x{} @ {} fps (format: {})", width, height, fps, using_fmt); + // Pace publishing at the requested FPS (not the camera-reported FPS) to hit desired cadence + let pace_fps = args.fps as f64; // Create LiveKit video source and track let rtc_source = NativeVideoSource::new(VideoResolution { width, height }); @@ -137,7 +161,7 @@ async fn main() -> Result<()> { LocalTrack::Video(track.clone()), TrackPublishOptions { source: TrackSource::Camera, - simulcast: true, + simulcast: false, video_codec: VideoCodec::H264, ..Default::default() }, @@ -147,53 +171,134 @@ async fn main() -> Result<()> { // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; + let is_yuyv = using_fmt == "YUYV"; + + // Accurate pacing using absolute schedule (no drift) + let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); + ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + // Align the first tick to now + ticker.tick().await; + let start_ts = Instant::now(); // Capture loop - let mut last = Instant::now(); + let mut frames: u64 = 0; + let mut last_fps_log = Instant::now(); + let target = Duration::from_secs_f64(1.0 / pace_fps); + info!("Target frame interval: {:.2} ms", target.as_secs_f64() * 1000.0); + + // Timing accumulators (ms) for rolling stats + let mut sum_get_ms = 0.0; + let mut sum_decode_ms = 0.0; + let mut sum_convert_ms = 0.0; + let mut sum_capture_ms = 0.0; + let mut sum_sleep_ms = 0.0; + let mut sum_iter_ms = 0.0; loop { - // Get frame as RGB - let frame_buf = camera.frame()?; - let rgb = frame_buf.decode_image::()?; - let rgba_stride = (width * 4) as u32; - - // Convert RGB to ABGR in-place buffer (expand to 4 channels) - // Build a temporary ABGR buffer - let mut abgr = vec![0u8; (width * height * 4) as usize]; - for (i, chunk) in rgb.as_raw().chunks_exact(3).enumerate() { - let r = chunk[0]; - let g = chunk[1]; - let b = chunk[2]; - let o = i * 4; - // ABGR layout - abgr[o] = 255; - abgr[o + 1] = b; - abgr[o + 2] = g; - abgr[o + 3] = r; - } + // Wait until the scheduled next frame time + let wait_start = Instant::now(); + ticker.tick().await; + let iter_start = Instant::now(); - // Fill i420 buffer + // Get frame as RGB24 (decoded by nokhwa if needed) + let t0 = Instant::now(); + let frame_buf = camera.frame()?; + let t1 = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - yuv_helper::abgr_to_i420( - &abgr, - rgba_stride, - data_y, - stride_y, - data_u, - stride_u, - data_v, - stride_v, - width as i32, - height as i32, - ); + // Fast path for YUYV: convert directly to I420 via libyuv + let t2 = if is_yuyv { + let src = frame_buf.buffer(); + let src_bytes = src.as_ref(); + let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 + let t2_local = t1; // no decode step in YUYV path + unsafe { + // returns 0 on success + let _ = yuv_sys::rs_YUY2ToI420( + src_bytes.as_ptr(), + src_stride, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + t2_local + } else { + // Fallback (e.g., MJPEG): decode to RGB24 then convert to I420 + let rgb = frame_buf.decode_image::()?; + let t2_local = Instant::now(); + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb.as_raw().as_ptr(), + (width * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + t2_local + }; + let t3 = Instant::now(); + // Update RTP timestamp (monotonic, microseconds since start) + frame.timestamp_us = start_ts.elapsed().as_micros() as i64; rtc_source.capture_frame(&frame); + let t4 = Instant::now(); - // Simple pacing - let elapsed = last.elapsed(); - let target = Duration::from_secs_f32(1.0 / fps as f32); - if elapsed < target { tokio::time::sleep(target - elapsed).await; } - last = Instant::now(); + frames += 1; + // We already paced via interval; measure actual sleep time for logging only + let sleep_dur = (iter_start - wait_start); + + // Per-iteration timing bookkeeping + let t_end = Instant::now(); + let get_ms = (t1 - t0).as_secs_f64() * 1000.0; + let decode_ms = (t2 - t1).as_secs_f64() * 1000.0; + let convert_ms = (t3 - t2).as_secs_f64() * 1000.0; + let capture_ms = (t4 - t3).as_secs_f64() * 1000.0; + let sleep_ms = sleep_dur.as_secs_f64() * 1000.0; + let iter_ms = (t_end - iter_start).as_secs_f64() * 1000.0; + sum_get_ms += get_ms; + sum_decode_ms += decode_ms; + sum_convert_ms += convert_ms; + sum_capture_ms += capture_ms; + sum_sleep_ms += sleep_ms; + sum_iter_ms += iter_ms; + + if last_fps_log.elapsed() >= std::time::Duration::from_secs(2) { + let secs = last_fps_log.elapsed().as_secs_f64(); + let fps_est = frames as f64 / secs; + let n = frames.max(1) as f64; + info!( + "Publishing video: {}x{}, ~{:.1} fps | avg ms: get {:.2}, decode {:.2}, convert {:.2}, capture {:.2}, sleep {:.2}, iter {:.2} | target {:.2}", + width, + height, + fps_est, + sum_get_ms / n, + sum_decode_ms / n, + sum_convert_ms / n, + sum_capture_ms / n, + sum_sleep_ms / n, + sum_iter_ms / n, + target.as_secs_f64() * 1000.0, + ); + frames = 0; + sum_get_ms = 0.0; + sum_decode_ms = 0.0; + sum_convert_ms = 0.0; + sum_capture_ms = 0.0; + sum_sleep_ms = 0.0; + sum_iter_ms = 0.0; + last_fps_log = Instant::now(); + } } } diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 7722ad790..14484d212 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -8,7 +8,7 @@ use livekit::webrtc::video_stream::native::NativeVideoStream; use livekit_api::access_token; use log::{debug, info}; use parking_lot::Mutex; -use std::sync::Arc; +use std::{env, sync::Arc, time::{Duration, Instant}}; #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -25,11 +25,11 @@ struct Args { #[arg(long)] url: Option, - /// LiveKit API key + /// LiveKit API key (can also be set via LIVEKIT_API_KEY environment variable) #[arg(long)] api_key: Option, - /// LiveKit API secret + /// LiveKit API secret (can also be set via LIVEKIT_API_SECRET environment variable) #[arg(long)] api_secret: Option, } @@ -54,8 +54,11 @@ impl eframe::App for VideoApp { let size = [shared.width as usize, shared.height as usize]; let image = egui::ColorImage::from_rgba_unmultiplied(size, &shared.rgba); match &mut self.texture { - Some(tex) => tex.set(image, egui::TextureOptions::LINEAR), + Some(tex) => { + tex.set(image, egui::TextureOptions::LINEAR) + } None => { + debug!("Creating texture for remote video: {}x{}", shared.width, shared.height); self.texture = Some(ui.ctx().load_texture( "remote-video", image, @@ -69,7 +72,7 @@ impl eframe::App for VideoApp { if let Some(tex) = &self.texture { let tex_size = tex.size_vec2(); let available = ui.available_size(); - let scale = (available.x / tex_size.x).min(available.y / tex_size.y).max(1.0); + let scale = (available.x / tex_size.x).min(available.y / tex_size.y); let desired = tex_size * scale; ui.image((tex.id(), desired)); } else { @@ -77,7 +80,7 @@ impl eframe::App for VideoApp { } }); - ctx.request_repaint(); + ctx.request_repaint_after(Duration::from_millis(16)); } } @@ -86,16 +89,19 @@ async fn main() -> Result<()> { env_logger::init(); let args = Args::parse(); - // LiveKit connection details - let url = std::env::var("LIVEKIT_URL").ok().or(args.url).expect( - "LIVEKIT_URL must be provided via --url or env", - ); - let api_key = std::env::var("LIVEKIT_API_KEY").ok().or(args.api_key).expect( - "LIVEKIT_API_KEY must be provided via --api-key or env", - ); - let api_secret = std::env::var("LIVEKIT_API_SECRET").ok().or(args.api_secret).expect( - "LIVEKIT_API_SECRET must be provided via --api-secret or env", - ); + // LiveKit connection details (prefer CLI args, fallback to env vars) + let url = args + .url + .or_else(|| env::var("LIVEKIT_URL").ok()) + .expect("LiveKit URL must be provided via --url argument or LIVEKIT_URL environment variable"); + let api_key = args + .api_key + .or_else(|| env::var("LIVEKIT_API_KEY").ok()) + .expect("LiveKit API key must be provided via --api-key argument or LIVEKIT_API_KEY environment variable"); + let api_secret = args + .api_secret + .or_else(|| env::var("LIVEKIT_API_SECRET").ok()) + .expect("LiveKit API secret must be provided via --api-secret argument or LIVEKIT_API_SECRET environment variable"); let token = access_token::AccessToken::with_api_key(&api_key, &api_secret) .with_identity(&args.identity) @@ -123,7 +129,9 @@ async fn main() -> Result<()> { let rt = tokio::runtime::Handle::current(); tokio::spawn(async move { let mut events = room.subscribe(); + info!("Subscribed to room events"); while let Some(evt) = events.recv().await { + debug!("Room event: {:?}", evt); if let RoomEvent::TrackSubscribed { track, .. } = evt { if let livekit::track::RemoteTrack::Video(video_track) = track { info!("Subscribed to video track: {}", video_track.name()); @@ -131,6 +139,9 @@ async fn main() -> Result<()> { let shared2 = shared_clone.clone(); std::thread::spawn(move || { let mut sink = NativeVideoStream::new(video_track.rtc_track()); + let mut frames: u64 = 0; + let mut last_log = Instant::now(); + let mut logged_first = false; while let Some(frame) = rt.block_on(sink.next()) { let buffer = frame.buffer.to_i420(); let w = buffer.width(); @@ -139,6 +150,14 @@ async fn main() -> Result<()> { let (sy, su, sv) = buffer.strides(); let (dy, du, dv) = buffer.data(); + if !logged_first { + debug!( + "First frame I420: {}x{}, strides Y/U/V = {}/{}/{}", + w, h, sy, su, sv + ); + logged_first = true; + } + let mut rgba = vec![0u8; (w * h * 4) as usize]; libwebrtc::native::yuv_helper::i420_to_rgba( dy, sy, du, su, dv, sv, &mut rgba, w * 4, w as i32, h as i32, @@ -149,7 +168,17 @@ async fn main() -> Result<()> { s.height = h; s.rgba = rgba; s.dirty = true; + + frames += 1; + let elapsed = last_log.elapsed(); + if elapsed >= Duration::from_secs(2) { + let fps = frames as f64 / elapsed.as_secs_f64(); + info!("Receiving video: {}x{}, ~{:.1} fps", w, h, fps); + frames = 0; + last_log = Instant::now(); + } } + info!("Video stream ended"); }); break; } From af08d3f921892e1db84a9e03e680a700c9ae61a4 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 28 Oct 2025 15:29:55 -0700 Subject: [PATCH 03/39] clean up warnings --- examples/local_video/src/publisher.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 197ae9dc0..ee130b179 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -130,14 +130,14 @@ async fn main() -> Result<()> { args.fps, ); let mut using_fmt = "YUYV"; - if let Err(_) = camera.set_camera_format(wanted) { + if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { let alt = CameraFormat::new( Resolution::new(args.width, args.height), FrameFormat::MJPEG, args.fps, ); using_fmt = "MJPEG"; - let _ = camera.set_camera_format(alt); + let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); } camera.open_stream()?; let fmt = camera.camera_format(); @@ -256,7 +256,7 @@ async fn main() -> Result<()> { frames += 1; // We already paced via interval; measure actual sleep time for logging only - let sleep_dur = (iter_start - wait_start); + let sleep_dur = iter_start - wait_start; // Per-iteration timing bookkeeping let t_end = Instant::now(); From dff3168316d7aef9e2215d77329fa562c71a96bf Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 23:08:26 +0000 Subject: [PATCH 04/39] generated protobuf --- livekit-protocol/src/livekit.rs | 251 +--- livekit-protocol/src/livekit.serde.rs | 1579 +------------------------ 2 files changed, 36 insertions(+), 1794 deletions(-) diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index 676becb62..e39604819 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -199,12 +199,6 @@ pub struct Pagination { #[prost(int32, tag="2")] pub limit: i32, } -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct TokenPagination { - #[prost(string, tag="1")] - pub token: ::prost::alloc::string::String, -} /// ListUpdate is used for updated APIs where 'repeated string' field is modified. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -212,15 +206,6 @@ pub struct ListUpdate { /// set the field to a new list #[prost(string, repeated, tag="1")] pub set: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, - /// append items to a list, avoiding duplicates - #[prost(string, repeated, tag="2")] - pub add: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, - /// delete items from a list - #[prost(string, repeated, tag="3")] - pub del: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, - /// sets the list to an empty list - #[prost(bool, tag="4")] - pub clear: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -499,14 +484,6 @@ pub struct SimulcastCodecInfo { pub cid: ::prost::alloc::string::String, #[prost(message, repeated, tag="4")] pub layers: ::prost::alloc::vec::Vec, - #[prost(enumeration="video_layer::Mode", tag="5")] - pub video_layer_mode: i32, - /// cid (client side id for track) could be different between - /// signalling (AddTrackRequest) and SDP offer. This field - /// will be populated only if it is different to avoid - /// duplication and keep the representation concise. - #[prost(string, tag="6")] - pub sdp_cid: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -521,30 +498,20 @@ pub struct TrackInfo { pub muted: bool, /// original width of video (unset for audio) /// clients may receive a lower resolution version with simulcast - #[deprecated] #[prost(uint32, tag="5")] pub width: u32, /// original height of video (unset for audio) - #[deprecated] #[prost(uint32, tag="6")] pub height: u32, /// true if track is simulcasted - /// - /// see `video_layer_mode` in `codecs` - #[deprecated] #[prost(bool, tag="7")] pub simulcast: bool, /// true if DTX (Discontinuous Transmission) is disabled for audio - /// - /// deprecated in favor of `audio_features` - #[deprecated] #[prost(bool, tag="8")] pub disable_dtx: bool, /// source of media #[prost(enumeration="TrackSource", tag="9")] pub source: i32, - /// see `codecs` for layers of individual codec - #[deprecated] #[prost(message, repeated, tag="10")] pub layers: ::prost::alloc::vec::Vec, /// mime type of codec @@ -554,8 +521,6 @@ pub struct TrackInfo { pub mid: ::prost::alloc::string::String, #[prost(message, repeated, tag="13")] pub codecs: ::prost::alloc::vec::Vec, - /// deprecated in favor of `audio_features` - #[deprecated] #[prost(bool, tag="14")] pub stereo: bool, /// true if RED (Redundant Encoding) is disabled for audio @@ -588,42 +553,6 @@ pub struct VideoLayer { pub bitrate: u32, #[prost(uint32, tag="5")] pub ssrc: u32, - #[prost(int32, tag="6")] - pub spatial_layer: i32, - #[prost(string, tag="7")] - pub rid: ::prost::alloc::string::String, -} -/// Nested message and enum types in `VideoLayer`. -pub mod video_layer { - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] - #[repr(i32)] - pub enum Mode { - Unused = 0, - OneSpatialLayerPerStream = 1, - MultipleSpatialLayersPerStream = 2, - } - impl Mode { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - Mode::Unused => "MODE_UNUSED", - Mode::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", - Mode::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "MODE_UNUSED" => Some(Self::Unused), - "ONE_SPATIAL_LAYER_PER_STREAM" => Some(Self::OneSpatialLayerPerStream), - "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Some(Self::MultipleSpatialLayersPerStream), - _ => None, - } - } - } } /// new DataPacket API #[allow(clippy::derive_partial_eq_without_eq)] @@ -644,7 +573,7 @@ pub struct DataPacket { /// sid of the user that sent the message #[prost(string, tag="17")] pub participant_sid: ::prost::alloc::string::String, - #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18")] + #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15")] pub value: ::core::option::Option, } /// Nested message and enum types in `DataPacket`. @@ -702,50 +631,6 @@ pub mod data_packet { StreamChunk(super::data_stream::Chunk), #[prost(message, tag="15")] StreamTrailer(super::data_stream::Trailer), - #[prost(message, tag="18")] - EncryptedPacket(super::EncryptedPacket), - } -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct EncryptedPacket { - #[prost(enumeration="encryption::Type", tag="1")] - pub encryption_type: i32, - #[prost(bytes="vec", tag="2")] - pub iv: ::prost::alloc::vec::Vec, - #[prost(uint32, tag="3")] - pub key_index: u32, - /// This is an encrypted EncryptedPacketPayload message representation - #[prost(bytes="vec", tag="4")] - pub encrypted_value: ::prost::alloc::vec::Vec, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct EncryptedPacketPayload { - #[prost(oneof="encrypted_packet_payload::Value", tags="1, 3, 4, 5, 6, 7, 8, 9")] - pub value: ::core::option::Option, -} -/// Nested message and enum types in `EncryptedPacketPayload`. -pub mod encrypted_packet_payload { - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum Value { - #[prost(message, tag="1")] - User(super::UserPacket), - #[prost(message, tag="3")] - ChatMessage(super::ChatMessage), - #[prost(message, tag="4")] - RpcRequest(super::RpcRequest), - #[prost(message, tag="5")] - RpcAck(super::RpcAck), - #[prost(message, tag="6")] - RpcResponse(super::RpcResponse), - #[prost(message, tag="7")] - StreamHeader(super::data_stream::Header), - #[prost(message, tag="8")] - StreamChunk(super::data_stream::Chunk), - #[prost(message, tag="9")] - StreamTrailer(super::data_stream::Trailer), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -790,7 +675,7 @@ pub struct UserPacket { /// topic under which the message was published #[prost(string, optional, tag="4")] pub topic: ::core::option::Option<::prost::alloc::string::String>, - /// Unique ID to identify the message + /// Unique ID to indentify the message #[prost(string, optional, tag="8")] pub id: ::core::option::Option<::prost::alloc::string::String>, /// start and end time allow relating the message to specific media time @@ -1013,7 +898,6 @@ pub mod client_info { UnityWeb = 11, Node = 12, Unreal = 13, - Esp32 = 14, } impl Sdk { /// String value of the enum field names used in the ProtoBuf definition. @@ -1036,7 +920,6 @@ pub mod client_info { Sdk::UnityWeb => "UNITY_WEB", Sdk::Node => "NODE", Sdk::Unreal => "UNREAL", - Sdk::Esp32 => "ESP32", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1056,7 +939,6 @@ pub mod client_info { "UNITY_WEB" => Some(Self::UnityWeb), "NODE" => Some(Self::Node), "UNREAL" => Some(Self::Unreal), - "ESP32" => Some(Self::Esp32), _ => None, } } @@ -1349,8 +1231,7 @@ pub mod data_stream { /// only populated for finite streams, if it's a stream of unknown size this stays empty #[prost(uint64, optional, tag="5")] pub total_length: ::core::option::Option, - /// this is set on the DataPacket - #[deprecated] + /// defaults to NONE #[prost(enumeration="super::encryption::Type", tag="7")] pub encryption_type: i32, /// user defined attributes map that can carry additional info @@ -1386,8 +1267,7 @@ pub mod data_stream { /// a version indicating that this chunk_index has been retroactively modified and the original one needs to be replaced #[prost(int32, tag="4")] pub version: i32, - /// this is set on the DataPacket - #[deprecated] + /// optional, initialization vector for AES-GCM encryption #[prost(bytes="vec", optional, tag="5")] pub iv: ::core::option::Option<::prost::alloc::vec::Vec>, } @@ -2308,12 +2188,6 @@ pub struct S3Upload { pub secret: ::prost::alloc::string::String, #[prost(string, tag="11")] pub session_token: ::prost::alloc::string::String, - /// ARN of the role to assume for file upload. Egress will make an AssumeRole API call using the provided access_key and secret to assume that role. On LiveKit cloud, this is only available on accounts that have the feature enabled - #[prost(string, tag="12")] - pub assume_role_arn: ::prost::alloc::string::String, - /// ExternalID to use when assuming role for upload - #[prost(string, tag="13")] - pub assume_role_external_id: ::prost::alloc::string::String, #[prost(string, tag="3")] pub region: ::prost::alloc::string::String, #[prost(string, tag="4")] @@ -3000,10 +2874,10 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Message { - /// participant offer for publisher + /// initial join exchange, for publisher #[prost(message, tag="1")] Offer(super::SessionDescription), - /// participant answering subscriber offer + /// participant answering publisher offer #[prost(message, tag="2")] Answer(super::SessionDescription), #[prost(message, tag="3")] @@ -3056,7 +2930,7 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignalResponse { - #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25")] + #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24")] pub message: ::core::option::Option, } /// Nested message and enum types in `SignalResponse`. @@ -3136,9 +3010,6 @@ pub mod signal_response { /// notify to the participant when they have been moved to a new room #[prost(message, tag="24")] RoomMoved(super::RoomMovedResponse), - /// notify number of required media sections to satisfy subscribed tracks - #[prost(message, tag="25")] - MediaSectionsRequirement(super::MediaSectionsRequirement), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -3148,10 +3019,6 @@ pub struct SimulcastCodec { pub codec: ::prost::alloc::string::String, #[prost(string, tag="2")] pub cid: ::prost::alloc::string::String, - #[prost(message, repeated, tag="4")] - pub layers: ::prost::alloc::vec::Vec, - #[prost(enumeration="video_layer::Mode", tag="5")] - pub video_layer_mode: i32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -3163,6 +3030,7 @@ pub struct AddTrackRequest { pub name: ::prost::alloc::string::String, #[prost(enumeration="TrackType", tag="3")] pub r#type: i32, + /// to be deprecated in favor of layers #[prost(uint32, tag="4")] pub width: u32, #[prost(uint32, tag="5")] @@ -3566,9 +3434,7 @@ pub struct RoomMovedResponse { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncState { - /// last subscribe/publish answer before reconnecting - /// subscribe answer if using dual peer connection - /// publish answer if using single peer connection + /// last subscribe answer before reconnecting #[prost(message, optional, tag="1")] pub answer: ::core::option::Option, #[prost(message, optional, tag="2")] @@ -3577,9 +3443,7 @@ pub struct SyncState { pub publish_tracks: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag="4")] pub data_channels: ::prost::alloc::vec::Vec, - /// last received server side offer/sent client side offer before reconnecting - /// received server side offer if using dual peer connection - /// sent client side offer if using single peer connection + /// last received server side offer before reconnecting #[prost(message, optional, tag="5")] pub offer: ::core::option::Option, #[prost(string, repeated, tag="6")] @@ -3739,92 +3603,6 @@ pub struct TrackSubscribed { #[prost(string, tag="1")] pub track_sid: ::prost::alloc::string::String, } -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ConnectionSettings { - #[prost(bool, tag="1")] - pub auto_subscribe: bool, - #[prost(bool, tag="2")] - pub adaptive_stream: bool, - #[prost(bool, optional, tag="3")] - pub subscriber_allow_pause: ::core::option::Option, - #[prost(bool, tag="4")] - pub disable_ice_lite: bool, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct JoinRequest { - #[prost(message, optional, tag="1")] - pub client_info: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub connection_settings: ::core::option::Option, - /// if not empty, will overwrite `metadata` in token - #[prost(string, tag="3")] - pub metadata: ::prost::alloc::string::String, - /// will set keys provided via this - /// will overwrite if the same key is in the token - /// will not delete keys from token if there is a key collision and this sets that key to empty value - #[prost(map="string, string", tag="4")] - pub participant_attributes: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, - #[prost(message, repeated, tag="5")] - pub add_track_requests: ::prost::alloc::vec::Vec, - #[prost(message, optional, tag="6")] - pub publisher_offer: ::core::option::Option, - #[prost(bool, tag="7")] - pub reconnect: bool, - #[prost(enumeration="ReconnectReason", tag="8")] - pub reconnect_reason: i32, - #[prost(string, tag="9")] - pub participant_sid: ::prost::alloc::string::String, - #[prost(message, optional, tag="10")] - pub sync_state: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct WrappedJoinRequest { - #[prost(enumeration="wrapped_join_request::Compression", tag="1")] - pub compression: i32, - /// marshalled JoinRequest + potentially compressed - #[prost(bytes="vec", tag="2")] - pub join_request: ::prost::alloc::vec::Vec, -} -/// Nested message and enum types in `WrappedJoinRequest`. -pub mod wrapped_join_request { - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] - #[repr(i32)] - pub enum Compression { - None = 0, - Gzip = 1, - } - impl Compression { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - Compression::None => "NONE", - Compression::Gzip => "GZIP", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "NONE" => Some(Self::None), - "GZIP" => Some(Self::Gzip), - _ => None, - } - } - } -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct MediaSectionsRequirement { - #[prost(uint32, tag="1")] - pub num_audios: u32, - #[prost(uint32, tag="2")] - pub num_videos: u32, -} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum SignalTarget { @@ -3944,10 +3722,6 @@ pub struct JobState { pub updated_at: i64, #[prost(string, tag="6")] pub participant_identity: ::prost::alloc::string::String, - #[prost(string, tag="7")] - pub worker_id: ::prost::alloc::string::String, - #[prost(string, tag="8")] - pub agent_id: ::prost::alloc::string::String, } /// from Worker to Server #[allow(clippy::derive_partial_eq_without_eq)] @@ -4516,9 +4290,6 @@ pub struct RoomConfiguration { /// limit number of participants that can be in a room, excluding Egress and Ingress participants #[prost(uint32, tag="4")] pub max_participants: u32, - /// metadata of room - #[prost(string, tag="11")] - pub metadata: ::prost::alloc::string::String, /// egress #[prost(message, optional, tag="5")] pub egress: ::core::option::Option, @@ -4980,7 +4751,7 @@ impl IngressVideoEncodingPreset { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WebhookEvent { - /// one of room_started, room_finished, participant_joined, participant_left, participant_connection_aborted, + /// one of room_started, room_finished, participant_joined, participant_left, /// track_published, track_unpublished, egress_started, egress_updated, egress_ended, /// ingress_started, ingress_ended #[prost(string, tag="1")] diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 3c79f6c7a..5c411a433 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -2848,7 +2848,6 @@ impl serde::Serialize for client_info::Sdk { Self::UnityWeb => "UNITY_WEB", Self::Node => "NODE", Self::Unreal => "UNREAL", - Self::Esp32 => "ESP32", }; serializer.serialize_str(variant) } @@ -2874,7 +2873,6 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB", "NODE", "UNREAL", - "ESP32", ]; struct GeneratedVisitor; @@ -2929,7 +2927,6 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB" => Ok(client_info::Sdk::UnityWeb), "NODE" => Ok(client_info::Sdk::Node), "UNREAL" => Ok(client_info::Sdk::Unreal), - "ESP32" => Ok(client_info::Sdk::Esp32), _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), } } @@ -3356,156 +3353,6 @@ impl<'de> serde::Deserialize<'de> for ConnectionQualityUpdate { deserializer.deserialize_struct("livekit.ConnectionQualityUpdate", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for ConnectionSettings { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.auto_subscribe { - len += 1; - } - if self.adaptive_stream { - len += 1; - } - if self.subscriber_allow_pause.is_some() { - len += 1; - } - if self.disable_ice_lite { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.ConnectionSettings", len)?; - if self.auto_subscribe { - struct_ser.serialize_field("autoSubscribe", &self.auto_subscribe)?; - } - if self.adaptive_stream { - struct_ser.serialize_field("adaptiveStream", &self.adaptive_stream)?; - } - if let Some(v) = self.subscriber_allow_pause.as_ref() { - struct_ser.serialize_field("subscriberAllowPause", v)?; - } - if self.disable_ice_lite { - struct_ser.serialize_field("disableIceLite", &self.disable_ice_lite)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for ConnectionSettings { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "auto_subscribe", - "autoSubscribe", - "adaptive_stream", - "adaptiveStream", - "subscriber_allow_pause", - "subscriberAllowPause", - "disable_ice_lite", - "disableIceLite", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - AutoSubscribe, - AdaptiveStream, - SubscriberAllowPause, - DisableIceLite, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "autoSubscribe" | "auto_subscribe" => Ok(GeneratedField::AutoSubscribe), - "adaptiveStream" | "adaptive_stream" => Ok(GeneratedField::AdaptiveStream), - "subscriberAllowPause" | "subscriber_allow_pause" => Ok(GeneratedField::SubscriberAllowPause), - "disableIceLite" | "disable_ice_lite" => Ok(GeneratedField::DisableIceLite), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = ConnectionSettings; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.ConnectionSettings") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut auto_subscribe__ = None; - let mut adaptive_stream__ = None; - let mut subscriber_allow_pause__ = None; - let mut disable_ice_lite__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::AutoSubscribe => { - if auto_subscribe__.is_some() { - return Err(serde::de::Error::duplicate_field("autoSubscribe")); - } - auto_subscribe__ = Some(map_.next_value()?); - } - GeneratedField::AdaptiveStream => { - if adaptive_stream__.is_some() { - return Err(serde::de::Error::duplicate_field("adaptiveStream")); - } - adaptive_stream__ = Some(map_.next_value()?); - } - GeneratedField::SubscriberAllowPause => { - if subscriber_allow_pause__.is_some() { - return Err(serde::de::Error::duplicate_field("subscriberAllowPause")); - } - subscriber_allow_pause__ = map_.next_value()?; - } - GeneratedField::DisableIceLite => { - if disable_ice_lite__.is_some() { - return Err(serde::de::Error::duplicate_field("disableIceLite")); - } - disable_ice_lite__ = Some(map_.next_value()?); - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(ConnectionSettings { - auto_subscribe: auto_subscribe__.unwrap_or_default(), - adaptive_stream: adaptive_stream__.unwrap_or_default(), - subscriber_allow_pause: subscriber_allow_pause__, - disable_ice_lite: disable_ice_lite__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.ConnectionSettings", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for CreateAgentDispatchRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -5739,9 +5586,6 @@ impl serde::Serialize for DataPacket { data_packet::Value::StreamTrailer(v) => { struct_ser.serialize_field("streamTrailer", v)?; } - data_packet::Value::EncryptedPacket(v) => { - struct_ser.serialize_field("encryptedPacket", v)?; - } } } struct_ser.end() @@ -5782,8 +5626,6 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamChunk", "stream_trailer", "streamTrailer", - "encrypted_packet", - "encryptedPacket", ]; #[allow(clippy::enum_variant_names)] @@ -5805,7 +5647,6 @@ impl<'de> serde::Deserialize<'de> for DataPacket { StreamHeader, StreamChunk, StreamTrailer, - EncryptedPacket, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -5845,7 +5686,6 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), - "encryptedPacket" | "encrypted_packet" => Ok(GeneratedField::EncryptedPacket), _ => Ok(GeneratedField::__SkipField__), } } @@ -5987,13 +5827,6 @@ impl<'de> serde::Deserialize<'de> for DataPacket { return Err(serde::de::Error::duplicate_field("streamTrailer")); } value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::StreamTrailer) -; - } - GeneratedField::EncryptedPacket => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("encryptedPacket")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::EncryptedPacket) ; } GeneratedField::__SkipField__ => { @@ -9378,365 +9211,6 @@ impl<'de> serde::Deserialize<'de> for EncodingOptionsPreset { deserializer.deserialize_any(GeneratedVisitor) } } -impl serde::Serialize for EncryptedPacket { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.encryption_type != 0 { - len += 1; - } - if !self.iv.is_empty() { - len += 1; - } - if self.key_index != 0 { - len += 1; - } - if !self.encrypted_value.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacket", len)?; - if self.encryption_type != 0 { - let v = encryption::Type::try_from(self.encryption_type) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.encryption_type)))?; - struct_ser.serialize_field("encryptionType", &v)?; - } - if !self.iv.is_empty() { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("iv", pbjson::private::base64::encode(&self.iv).as_str())?; - } - if self.key_index != 0 { - struct_ser.serialize_field("keyIndex", &self.key_index)?; - } - if !self.encrypted_value.is_empty() { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("encryptedValue", pbjson::private::base64::encode(&self.encrypted_value).as_str())?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for EncryptedPacket { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "encryption_type", - "encryptionType", - "iv", - "key_index", - "keyIndex", - "encrypted_value", - "encryptedValue", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - EncryptionType, - Iv, - KeyIndex, - EncryptedValue, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "encryptionType" | "encryption_type" => Ok(GeneratedField::EncryptionType), - "iv" => Ok(GeneratedField::Iv), - "keyIndex" | "key_index" => Ok(GeneratedField::KeyIndex), - "encryptedValue" | "encrypted_value" => Ok(GeneratedField::EncryptedValue), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = EncryptedPacket; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.EncryptedPacket") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut encryption_type__ = None; - let mut iv__ = None; - let mut key_index__ = None; - let mut encrypted_value__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::EncryptionType => { - if encryption_type__.is_some() { - return Err(serde::de::Error::duplicate_field("encryptionType")); - } - encryption_type__ = Some(map_.next_value::()? as i32); - } - GeneratedField::Iv => { - if iv__.is_some() { - return Err(serde::de::Error::duplicate_field("iv")); - } - iv__ = - Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) - ; - } - GeneratedField::KeyIndex => { - if key_index__.is_some() { - return Err(serde::de::Error::duplicate_field("keyIndex")); - } - key_index__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::EncryptedValue => { - if encrypted_value__.is_some() { - return Err(serde::de::Error::duplicate_field("encryptedValue")); - } - encrypted_value__ = - Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) - ; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(EncryptedPacket { - encryption_type: encryption_type__.unwrap_or_default(), - iv: iv__.unwrap_or_default(), - key_index: key_index__.unwrap_or_default(), - encrypted_value: encrypted_value__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.EncryptedPacket", FIELDS, GeneratedVisitor) - } -} -impl serde::Serialize for EncryptedPacketPayload { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.value.is_some() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacketPayload", len)?; - if let Some(v) = self.value.as_ref() { - match v { - encrypted_packet_payload::Value::User(v) => { - struct_ser.serialize_field("user", v)?; - } - encrypted_packet_payload::Value::ChatMessage(v) => { - struct_ser.serialize_field("chatMessage", v)?; - } - encrypted_packet_payload::Value::RpcRequest(v) => { - struct_ser.serialize_field("rpcRequest", v)?; - } - encrypted_packet_payload::Value::RpcAck(v) => { - struct_ser.serialize_field("rpcAck", v)?; - } - encrypted_packet_payload::Value::RpcResponse(v) => { - struct_ser.serialize_field("rpcResponse", v)?; - } - encrypted_packet_payload::Value::StreamHeader(v) => { - struct_ser.serialize_field("streamHeader", v)?; - } - encrypted_packet_payload::Value::StreamChunk(v) => { - struct_ser.serialize_field("streamChunk", v)?; - } - encrypted_packet_payload::Value::StreamTrailer(v) => { - struct_ser.serialize_field("streamTrailer", v)?; - } - } - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for EncryptedPacketPayload { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "user", - "chat_message", - "chatMessage", - "rpc_request", - "rpcRequest", - "rpc_ack", - "rpcAck", - "rpc_response", - "rpcResponse", - "stream_header", - "streamHeader", - "stream_chunk", - "streamChunk", - "stream_trailer", - "streamTrailer", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - User, - ChatMessage, - RpcRequest, - RpcAck, - RpcResponse, - StreamHeader, - StreamChunk, - StreamTrailer, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "user" => Ok(GeneratedField::User), - "chatMessage" | "chat_message" => Ok(GeneratedField::ChatMessage), - "rpcRequest" | "rpc_request" => Ok(GeneratedField::RpcRequest), - "rpcAck" | "rpc_ack" => Ok(GeneratedField::RpcAck), - "rpcResponse" | "rpc_response" => Ok(GeneratedField::RpcResponse), - "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), - "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), - "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = EncryptedPacketPayload; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.EncryptedPacketPayload") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut value__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::User => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("user")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::User) -; - } - GeneratedField::ChatMessage => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("chatMessage")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::ChatMessage) -; - } - GeneratedField::RpcRequest => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("rpcRequest")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcRequest) -; - } - GeneratedField::RpcAck => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("rpcAck")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcAck) -; - } - GeneratedField::RpcResponse => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("rpcResponse")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcResponse) -; - } - GeneratedField::StreamHeader => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("streamHeader")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamHeader) -; - } - GeneratedField::StreamChunk => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("streamChunk")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamChunk) -; - } - GeneratedField::StreamTrailer => { - if value__.is_some() { - return Err(serde::de::Error::duplicate_field("streamTrailer")); - } - value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamTrailer) -; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(EncryptedPacketPayload { - value: value__, - }) - } - } - deserializer.deserialize_struct("livekit.EncryptedPacketPayload", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for Encryption { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -14011,12 +13485,6 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { len += 1; } - if !self.worker_id.is_empty() { - len += 1; - } - if !self.agent_id.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.JobState", len)?; if self.status != 0 { let v = JobStatus::try_from(self.status) @@ -14044,12 +13512,6 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { struct_ser.serialize_field("participantIdentity", &self.participant_identity)?; } - if !self.worker_id.is_empty() { - struct_ser.serialize_field("workerId", &self.worker_id)?; - } - if !self.agent_id.is_empty() { - struct_ser.serialize_field("agentId", &self.agent_id)?; - } struct_ser.end() } } @@ -14070,10 +13532,6 @@ impl<'de> serde::Deserialize<'de> for JobState { "updatedAt", "participant_identity", "participantIdentity", - "worker_id", - "workerId", - "agent_id", - "agentId", ]; #[allow(clippy::enum_variant_names)] @@ -14084,8 +13542,6 @@ impl<'de> serde::Deserialize<'de> for JobState { EndedAt, UpdatedAt, ParticipantIdentity, - WorkerId, - AgentId, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -14114,8 +13570,6 @@ impl<'de> serde::Deserialize<'de> for JobState { "endedAt" | "ended_at" => Ok(GeneratedField::EndedAt), "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), "participantIdentity" | "participant_identity" => Ok(GeneratedField::ParticipantIdentity), - "workerId" | "worker_id" => Ok(GeneratedField::WorkerId), - "agentId" | "agent_id" => Ok(GeneratedField::AgentId), _ => Ok(GeneratedField::__SkipField__), } } @@ -14141,8 +13595,6 @@ impl<'de> serde::Deserialize<'de> for JobState { let mut ended_at__ = None; let mut updated_at__ = None; let mut participant_identity__ = None; - let mut worker_id__ = None; - let mut agent_id__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Status => { @@ -14187,18 +13639,6 @@ impl<'de> serde::Deserialize<'de> for JobState { } participant_identity__ = Some(map_.next_value()?); } - GeneratedField::WorkerId => { - if worker_id__.is_some() { - return Err(serde::de::Error::duplicate_field("workerId")); - } - worker_id__ = Some(map_.next_value()?); - } - GeneratedField::AgentId => { - if agent_id__.is_some() { - return Err(serde::de::Error::duplicate_field("agentId")); - } - agent_id__ = Some(map_.next_value()?); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -14211,8 +13651,6 @@ impl<'de> serde::Deserialize<'de> for JobState { ended_at: ended_at__.unwrap_or_default(), updated_at: updated_at__.unwrap_or_default(), participant_identity: participant_identity__.unwrap_or_default(), - worker_id: worker_id__.unwrap_or_default(), - agent_id: agent_id__.unwrap_or_default(), }) } } @@ -14466,266 +13904,6 @@ impl<'de> serde::Deserialize<'de> for JobType { deserializer.deserialize_any(GeneratedVisitor) } } -impl serde::Serialize for JoinRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.client_info.is_some() { - len += 1; - } - if self.connection_settings.is_some() { - len += 1; - } - if !self.metadata.is_empty() { - len += 1; - } - if !self.participant_attributes.is_empty() { - len += 1; - } - if !self.add_track_requests.is_empty() { - len += 1; - } - if self.publisher_offer.is_some() { - len += 1; - } - if self.reconnect { - len += 1; - } - if self.reconnect_reason != 0 { - len += 1; - } - if !self.participant_sid.is_empty() { - len += 1; - } - if self.sync_state.is_some() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.JoinRequest", len)?; - if let Some(v) = self.client_info.as_ref() { - struct_ser.serialize_field("clientInfo", v)?; - } - if let Some(v) = self.connection_settings.as_ref() { - struct_ser.serialize_field("connectionSettings", v)?; - } - if !self.metadata.is_empty() { - struct_ser.serialize_field("metadata", &self.metadata)?; - } - if !self.participant_attributes.is_empty() { - struct_ser.serialize_field("participantAttributes", &self.participant_attributes)?; - } - if !self.add_track_requests.is_empty() { - struct_ser.serialize_field("addTrackRequests", &self.add_track_requests)?; - } - if let Some(v) = self.publisher_offer.as_ref() { - struct_ser.serialize_field("publisherOffer", v)?; - } - if self.reconnect { - struct_ser.serialize_field("reconnect", &self.reconnect)?; - } - if self.reconnect_reason != 0 { - let v = ReconnectReason::try_from(self.reconnect_reason) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.reconnect_reason)))?; - struct_ser.serialize_field("reconnectReason", &v)?; - } - if !self.participant_sid.is_empty() { - struct_ser.serialize_field("participantSid", &self.participant_sid)?; - } - if let Some(v) = self.sync_state.as_ref() { - struct_ser.serialize_field("syncState", v)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for JoinRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "client_info", - "clientInfo", - "connection_settings", - "connectionSettings", - "metadata", - "participant_attributes", - "participantAttributes", - "add_track_requests", - "addTrackRequests", - "publisher_offer", - "publisherOffer", - "reconnect", - "reconnect_reason", - "reconnectReason", - "participant_sid", - "participantSid", - "sync_state", - "syncState", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - ClientInfo, - ConnectionSettings, - Metadata, - ParticipantAttributes, - AddTrackRequests, - PublisherOffer, - Reconnect, - ReconnectReason, - ParticipantSid, - SyncState, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "clientInfo" | "client_info" => Ok(GeneratedField::ClientInfo), - "connectionSettings" | "connection_settings" => Ok(GeneratedField::ConnectionSettings), - "metadata" => Ok(GeneratedField::Metadata), - "participantAttributes" | "participant_attributes" => Ok(GeneratedField::ParticipantAttributes), - "addTrackRequests" | "add_track_requests" => Ok(GeneratedField::AddTrackRequests), - "publisherOffer" | "publisher_offer" => Ok(GeneratedField::PublisherOffer), - "reconnect" => Ok(GeneratedField::Reconnect), - "reconnectReason" | "reconnect_reason" => Ok(GeneratedField::ReconnectReason), - "participantSid" | "participant_sid" => Ok(GeneratedField::ParticipantSid), - "syncState" | "sync_state" => Ok(GeneratedField::SyncState), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = JoinRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.JoinRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut client_info__ = None; - let mut connection_settings__ = None; - let mut metadata__ = None; - let mut participant_attributes__ = None; - let mut add_track_requests__ = None; - let mut publisher_offer__ = None; - let mut reconnect__ = None; - let mut reconnect_reason__ = None; - let mut participant_sid__ = None; - let mut sync_state__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::ClientInfo => { - if client_info__.is_some() { - return Err(serde::de::Error::duplicate_field("clientInfo")); - } - client_info__ = map_.next_value()?; - } - GeneratedField::ConnectionSettings => { - if connection_settings__.is_some() { - return Err(serde::de::Error::duplicate_field("connectionSettings")); - } - connection_settings__ = map_.next_value()?; - } - GeneratedField::Metadata => { - if metadata__.is_some() { - return Err(serde::de::Error::duplicate_field("metadata")); - } - metadata__ = Some(map_.next_value()?); - } - GeneratedField::ParticipantAttributes => { - if participant_attributes__.is_some() { - return Err(serde::de::Error::duplicate_field("participantAttributes")); - } - participant_attributes__ = Some( - map_.next_value::>()? - ); - } - GeneratedField::AddTrackRequests => { - if add_track_requests__.is_some() { - return Err(serde::de::Error::duplicate_field("addTrackRequests")); - } - add_track_requests__ = Some(map_.next_value()?); - } - GeneratedField::PublisherOffer => { - if publisher_offer__.is_some() { - return Err(serde::de::Error::duplicate_field("publisherOffer")); - } - publisher_offer__ = map_.next_value()?; - } - GeneratedField::Reconnect => { - if reconnect__.is_some() { - return Err(serde::de::Error::duplicate_field("reconnect")); - } - reconnect__ = Some(map_.next_value()?); - } - GeneratedField::ReconnectReason => { - if reconnect_reason__.is_some() { - return Err(serde::de::Error::duplicate_field("reconnectReason")); - } - reconnect_reason__ = Some(map_.next_value::()? as i32); - } - GeneratedField::ParticipantSid => { - if participant_sid__.is_some() { - return Err(serde::de::Error::duplicate_field("participantSid")); - } - participant_sid__ = Some(map_.next_value()?); - } - GeneratedField::SyncState => { - if sync_state__.is_some() { - return Err(serde::de::Error::duplicate_field("syncState")); - } - sync_state__ = map_.next_value()?; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(JoinRequest { - client_info: client_info__, - connection_settings: connection_settings__, - metadata: metadata__.unwrap_or_default(), - participant_attributes: participant_attributes__.unwrap_or_default(), - add_track_requests: add_track_requests__.unwrap_or_default(), - publisher_offer: publisher_offer__, - reconnect: reconnect__.unwrap_or_default(), - reconnect_reason: reconnect_reason__.unwrap_or_default(), - participant_sid: participant_sid__.unwrap_or_default(), - sync_state: sync_state__, - }) - } - } - deserializer.deserialize_struct("livekit.JoinRequest", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for JoinResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -17206,28 +16384,10 @@ impl serde::Serialize for ListUpdate { if !self.set.is_empty() { len += 1; } - if !self.add.is_empty() { - len += 1; - } - if !self.del.is_empty() { - len += 1; - } - if self.clear { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.ListUpdate", len)?; if !self.set.is_empty() { struct_ser.serialize_field("set", &self.set)?; } - if !self.add.is_empty() { - struct_ser.serialize_field("add", &self.add)?; - } - if !self.del.is_empty() { - struct_ser.serialize_field("del", &self.del)?; - } - if self.clear { - struct_ser.serialize_field("clear", &self.clear)?; - } struct_ser.end() } } @@ -17239,17 +16399,11 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { const FIELDS: &[&str] = &[ "set", - "add", - "del", - "clear", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Set, - Add, - Del, - Clear, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -17273,9 +16427,6 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { match value { "set" => Ok(GeneratedField::Set), - "add" => Ok(GeneratedField::Add), - "del" => Ok(GeneratedField::Del), - "clear" => Ok(GeneratedField::Clear), _ => Ok(GeneratedField::__SkipField__), } } @@ -17296,9 +16447,6 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { V: serde::de::MapAccess<'de>, { let mut set__ = None; - let mut add__ = None; - let mut del__ = None; - let mut clear__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Set => { @@ -17307,24 +16455,6 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } set__ = Some(map_.next_value()?); } - GeneratedField::Add => { - if add__.is_some() { - return Err(serde::de::Error::duplicate_field("add")); - } - add__ = Some(map_.next_value()?); - } - GeneratedField::Del => { - if del__.is_some() { - return Err(serde::de::Error::duplicate_field("del")); - } - del__ = Some(map_.next_value()?); - } - GeneratedField::Clear => { - if clear__.is_some() { - return Err(serde::de::Error::duplicate_field("clear")); - } - clear__ = Some(map_.next_value()?); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -17332,133 +16462,12 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } Ok(ListUpdate { set: set__.unwrap_or_default(), - add: add__.unwrap_or_default(), - del: del__.unwrap_or_default(), - clear: clear__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.ListUpdate", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for MediaSectionsRequirement { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.num_audios != 0 { - len += 1; - } - if self.num_videos != 0 { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.MediaSectionsRequirement", len)?; - if self.num_audios != 0 { - struct_ser.serialize_field("numAudios", &self.num_audios)?; - } - if self.num_videos != 0 { - struct_ser.serialize_field("numVideos", &self.num_videos)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for MediaSectionsRequirement { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "num_audios", - "numAudios", - "num_videos", - "numVideos", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - NumAudios, - NumVideos, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "numAudios" | "num_audios" => Ok(GeneratedField::NumAudios), - "numVideos" | "num_videos" => Ok(GeneratedField::NumVideos), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = MediaSectionsRequirement; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.MediaSectionsRequirement") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut num_audios__ = None; - let mut num_videos__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::NumAudios => { - if num_audios__.is_some() { - return Err(serde::de::Error::duplicate_field("numAudios")); - } - num_audios__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::NumVideos => { - if num_videos__.is_some() { - return Err(serde::de::Error::duplicate_field("numVideos")); - } - num_videos__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(MediaSectionsRequirement { - num_audios: num_audios__.unwrap_or_default(), - num_videos: num_videos__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.MediaSectionsRequirement", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for MetricLabel { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -24343,9 +23352,6 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { len += 1; } - if !self.metadata.is_empty() { - len += 1; - } if self.egress.is_some() { len += 1; } @@ -24374,9 +23380,6 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { struct_ser.serialize_field("maxParticipants", &self.max_participants)?; } - if !self.metadata.is_empty() { - struct_ser.serialize_field("metadata", &self.metadata)?; - } if let Some(v) = self.egress.as_ref() { struct_ser.serialize_field("egress", v)?; } @@ -24409,7 +23412,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "departureTimeout", "max_participants", "maxParticipants", - "metadata", "egress", "min_playout_delay", "minPlayoutDelay", @@ -24426,7 +23428,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { EmptyTimeout, DepartureTimeout, MaxParticipants, - Metadata, Egress, MinPlayoutDelay, MaxPlayoutDelay, @@ -24458,7 +23459,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "emptyTimeout" | "empty_timeout" => Ok(GeneratedField::EmptyTimeout), "departureTimeout" | "departure_timeout" => Ok(GeneratedField::DepartureTimeout), "maxParticipants" | "max_participants" => Ok(GeneratedField::MaxParticipants), - "metadata" => Ok(GeneratedField::Metadata), "egress" => Ok(GeneratedField::Egress), "minPlayoutDelay" | "min_playout_delay" => Ok(GeneratedField::MinPlayoutDelay), "maxPlayoutDelay" | "max_playout_delay" => Ok(GeneratedField::MaxPlayoutDelay), @@ -24487,7 +23487,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { let mut empty_timeout__ = None; let mut departure_timeout__ = None; let mut max_participants__ = None; - let mut metadata__ = None; let mut egress__ = None; let mut min_playout_delay__ = None; let mut max_playout_delay__ = None; @@ -24525,12 +23524,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } - GeneratedField::Metadata => { - if metadata__.is_some() { - return Err(serde::de::Error::duplicate_field("metadata")); - } - metadata__ = Some(map_.next_value()?); - } GeneratedField::Egress => { if egress__.is_some() { return Err(serde::de::Error::duplicate_field("egress")); @@ -24575,7 +23568,6 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { empty_timeout: empty_timeout__.unwrap_or_default(), departure_timeout: departure_timeout__.unwrap_or_default(), max_participants: max_participants__.unwrap_or_default(), - metadata: metadata__.unwrap_or_default(), egress: egress__, min_playout_delay: min_playout_delay__.unwrap_or_default(), max_playout_delay: max_playout_delay__.unwrap_or_default(), @@ -25612,12 +24604,6 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { len += 1; } - if !self.assume_role_arn.is_empty() { - len += 1; - } - if !self.assume_role_external_id.is_empty() { - len += 1; - } if !self.region.is_empty() { len += 1; } @@ -25652,12 +24638,6 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { struct_ser.serialize_field("sessionToken", &self.session_token)?; } - if !self.assume_role_arn.is_empty() { - struct_ser.serialize_field("assumeRoleArn", &self.assume_role_arn)?; - } - if !self.assume_role_external_id.is_empty() { - struct_ser.serialize_field("assumeRoleExternalId", &self.assume_role_external_id)?; - } if !self.region.is_empty() { struct_ser.serialize_field("region", &self.region)?; } @@ -25697,10 +24677,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "secret", "session_token", "sessionToken", - "assume_role_arn", - "assumeRoleArn", - "assume_role_external_id", - "assumeRoleExternalId", "region", "endpoint", "bucket", @@ -25718,8 +24694,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { AccessKey, Secret, SessionToken, - AssumeRoleArn, - AssumeRoleExternalId, Region, Endpoint, Bucket, @@ -25753,8 +24727,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "accessKey" | "access_key" => Ok(GeneratedField::AccessKey), "secret" => Ok(GeneratedField::Secret), "sessionToken" | "session_token" => Ok(GeneratedField::SessionToken), - "assumeRoleArn" | "assume_role_arn" => Ok(GeneratedField::AssumeRoleArn), - "assumeRoleExternalId" | "assume_role_external_id" => Ok(GeneratedField::AssumeRoleExternalId), "region" => Ok(GeneratedField::Region), "endpoint" => Ok(GeneratedField::Endpoint), "bucket" => Ok(GeneratedField::Bucket), @@ -25785,8 +24757,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { let mut access_key__ = None; let mut secret__ = None; let mut session_token__ = None; - let mut assume_role_arn__ = None; - let mut assume_role_external_id__ = None; let mut region__ = None; let mut endpoint__ = None; let mut bucket__ = None; @@ -25815,18 +24785,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { } session_token__ = Some(map_.next_value()?); } - GeneratedField::AssumeRoleArn => { - if assume_role_arn__.is_some() { - return Err(serde::de::Error::duplicate_field("assumeRoleArn")); - } - assume_role_arn__ = Some(map_.next_value()?); - } - GeneratedField::AssumeRoleExternalId => { - if assume_role_external_id__.is_some() { - return Err(serde::de::Error::duplicate_field("assumeRoleExternalId")); - } - assume_role_external_id__ = Some(map_.next_value()?); - } GeneratedField::Region => { if region__.is_some() { return Err(serde::de::Error::duplicate_field("region")); @@ -25886,8 +24844,6 @@ impl<'de> serde::Deserialize<'de> for S3Upload { access_key: access_key__.unwrap_or_default(), secret: secret__.unwrap_or_default(), session_token: session_token__.unwrap_or_default(), - assume_role_arn: assume_role_arn__.unwrap_or_default(), - assume_role_external_id: assume_role_external_id__.unwrap_or_default(), region: region__.unwrap_or_default(), endpoint: endpoint__.unwrap_or_default(), bucket: bucket__.unwrap_or_default(), @@ -32503,9 +31459,6 @@ impl serde::Serialize for SignalResponse { signal_response::Message::RoomMoved(v) => { struct_ser.serialize_field("roomMoved", v)?; } - signal_response::Message::MediaSectionsRequirement(v) => { - struct_ser.serialize_field("mediaSectionsRequirement", v)?; - } } } struct_ser.end() @@ -32555,8 +31508,6 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "trackSubscribed", "room_moved", "roomMoved", - "media_sections_requirement", - "mediaSectionsRequirement", ]; #[allow(clippy::enum_variant_names)] @@ -32584,7 +31535,6 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { RequestResponse, TrackSubscribed, RoomMoved, - MediaSectionsRequirement, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32630,7 +31580,6 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "requestResponse" | "request_response" => Ok(GeneratedField::RequestResponse), "trackSubscribed" | "track_subscribed" => Ok(GeneratedField::TrackSubscribed), "roomMoved" | "room_moved" => Ok(GeneratedField::RoomMoved), - "mediaSectionsRequirement" | "media_sections_requirement" => Ok(GeneratedField::MediaSectionsRequirement), _ => Ok(GeneratedField::__SkipField__), } } @@ -32810,13 +31759,6 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { return Err(serde::de::Error::duplicate_field("roomMoved")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::RoomMoved) -; - } - GeneratedField::MediaSectionsRequirement => { - if message__.is_some() { - return Err(serde::de::Error::duplicate_field("mediaSectionsRequirement")); - } - message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::MediaSectionsRequirement) ; } GeneratedField::__SkipField__ => { @@ -33255,12 +32197,6 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { len += 1; } - if !self.layers.is_empty() { - len += 1; - } - if self.video_layer_mode != 0 { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodec", len)?; if !self.codec.is_empty() { struct_ser.serialize_field("codec", &self.codec)?; @@ -33268,14 +32204,6 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; } - if !self.layers.is_empty() { - struct_ser.serialize_field("layers", &self.layers)?; - } - if self.video_layer_mode != 0 { - let v = video_layer::Mode::try_from(self.video_layer_mode) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; - struct_ser.serialize_field("videoLayerMode", &v)?; - } struct_ser.end() } } @@ -33288,17 +32216,12 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { const FIELDS: &[&str] = &[ "codec", "cid", - "layers", - "video_layer_mode", - "videoLayerMode", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Codec, Cid, - Layers, - VideoLayerMode, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -33323,8 +32246,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { match value { "codec" => Ok(GeneratedField::Codec), "cid" => Ok(GeneratedField::Cid), - "layers" => Ok(GeneratedField::Layers), - "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), _ => Ok(GeneratedField::__SkipField__), } } @@ -33346,8 +32267,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { { let mut codec__ = None; let mut cid__ = None; - let mut layers__ = None; - let mut video_layer_mode__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Codec => { @@ -33362,18 +32281,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { } cid__ = Some(map_.next_value()?); } - GeneratedField::Layers => { - if layers__.is_some() { - return Err(serde::de::Error::duplicate_field("layers")); - } - layers__ = Some(map_.next_value()?); - } - GeneratedField::VideoLayerMode => { - if video_layer_mode__.is_some() { - return Err(serde::de::Error::duplicate_field("videoLayerMode")); - } - video_layer_mode__ = Some(map_.next_value::()? as i32); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -33382,8 +32289,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { Ok(SimulcastCodec { codec: codec__.unwrap_or_default(), cid: cid__.unwrap_or_default(), - layers: layers__.unwrap_or_default(), - video_layer_mode: video_layer_mode__.unwrap_or_default(), }) } } @@ -33410,12 +32315,6 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { len += 1; } - if self.video_layer_mode != 0 { - len += 1; - } - if !self.sdp_cid.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodecInfo", len)?; if !self.mime_type.is_empty() { struct_ser.serialize_field("mimeType", &self.mime_type)?; @@ -33429,14 +32328,6 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { struct_ser.serialize_field("layers", &self.layers)?; } - if self.video_layer_mode != 0 { - let v = video_layer::Mode::try_from(self.video_layer_mode) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; - struct_ser.serialize_field("videoLayerMode", &v)?; - } - if !self.sdp_cid.is_empty() { - struct_ser.serialize_field("sdpCid", &self.sdp_cid)?; - } struct_ser.end() } } @@ -33452,10 +32343,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid", "cid", "layers", - "video_layer_mode", - "videoLayerMode", - "sdp_cid", - "sdpCid", ]; #[allow(clippy::enum_variant_names)] @@ -33464,8 +32351,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { Mid, Cid, Layers, - VideoLayerMode, - SdpCid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -33492,8 +32377,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid" => Ok(GeneratedField::Mid), "cid" => Ok(GeneratedField::Cid), "layers" => Ok(GeneratedField::Layers), - "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), - "sdpCid" | "sdp_cid" => Ok(GeneratedField::SdpCid), _ => Ok(GeneratedField::__SkipField__), } } @@ -33517,8 +32400,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { let mut mid__ = None; let mut cid__ = None; let mut layers__ = None; - let mut video_layer_mode__ = None; - let mut sdp_cid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::MimeType => { @@ -33545,18 +32426,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { } layers__ = Some(map_.next_value()?); } - GeneratedField::VideoLayerMode => { - if video_layer_mode__.is_some() { - return Err(serde::de::Error::duplicate_field("videoLayerMode")); - } - video_layer_mode__ = Some(map_.next_value::()? as i32); - } - GeneratedField::SdpCid => { - if sdp_cid__.is_some() { - return Err(serde::de::Error::duplicate_field("sdpCid")); - } - sdp_cid__ = Some(map_.next_value()?); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -33567,8 +32436,6 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { mid: mid__.unwrap_or_default(), cid: cid__.unwrap_or_default(), layers: layers__.unwrap_or_default(), - video_layer_mode: video_layer_mode__.unwrap_or_default(), - sdp_cid: sdp_cid__.unwrap_or_default(), }) } } @@ -36094,115 +34961,8 @@ impl<'de> serde::Deserialize<'de> for TimedVersion { E: serde::de::Error, { match value { - "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), - "ticks" => Ok(GeneratedField::Ticks), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TimedVersion; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TimedVersion") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut unix_micro__ = None; - let mut ticks__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::UnixMicro => { - if unix_micro__.is_some() { - return Err(serde::de::Error::duplicate_field("unixMicro")); - } - unix_micro__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Ticks => { - if ticks__.is_some() { - return Err(serde::de::Error::duplicate_field("ticks")); - } - ticks__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(TimedVersion { - unix_micro: unix_micro__.unwrap_or_default(), - ticks: ticks__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) - } -} -impl serde::Serialize for TokenPagination { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.token.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.TokenPagination", len)?; - if !self.token.is_empty() { - struct_ser.serialize_field("token", &self.token)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for TokenPagination { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "token", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - Token, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "token" => Ok(GeneratedField::Token), + "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), + "ticks" => Ok(GeneratedField::Ticks), _ => Ok(GeneratedField::__SkipField__), } } @@ -36212,36 +34972,48 @@ impl<'de> serde::Deserialize<'de> for TokenPagination { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TokenPagination; + type Value = TimedVersion; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TokenPagination") + formatter.write_str("struct livekit.TimedVersion") } - fn visit_map(self, mut map_: V) -> std::result::Result + fn visit_map(self, mut map_: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut token__ = None; + let mut unix_micro__ = None; + let mut ticks__ = None; while let Some(k) = map_.next_key()? { match k { - GeneratedField::Token => { - if token__.is_some() { - return Err(serde::de::Error::duplicate_field("token")); + GeneratedField::UnixMicro => { + if unix_micro__.is_some() { + return Err(serde::de::Error::duplicate_field("unixMicro")); } - token__ = Some(map_.next_value()?); + unix_micro__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Ticks => { + if ticks__.is_some() { + return Err(serde::de::Error::duplicate_field("ticks")); + } + ticks__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } } } - Ok(TokenPagination { - token: token__.unwrap_or_default(), + Ok(TimedVersion { + unix_micro: unix_micro__.unwrap_or_default(), + ticks: ticks__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("livekit.TokenPagination", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) } } impl serde::Serialize for TrackCompositeEgressRequest { @@ -41645,12 +40417,6 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { len += 1; } - if self.spatial_layer != 0 { - len += 1; - } - if !self.rid.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("livekit.VideoLayer", len)?; if self.quality != 0 { let v = VideoQuality::try_from(self.quality) @@ -41669,12 +40435,6 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { struct_ser.serialize_field("ssrc", &self.ssrc)?; } - if self.spatial_layer != 0 { - struct_ser.serialize_field("spatialLayer", &self.spatial_layer)?; - } - if !self.rid.is_empty() { - struct_ser.serialize_field("rid", &self.rid)?; - } struct_ser.end() } } @@ -41690,9 +40450,6 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height", "bitrate", "ssrc", - "spatial_layer", - "spatialLayer", - "rid", ]; #[allow(clippy::enum_variant_names)] @@ -41702,8 +40459,6 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Height, Bitrate, Ssrc, - SpatialLayer, - Rid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -41731,8 +40486,6 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height" => Ok(GeneratedField::Height), "bitrate" => Ok(GeneratedField::Bitrate), "ssrc" => Ok(GeneratedField::Ssrc), - "spatialLayer" | "spatial_layer" => Ok(GeneratedField::SpatialLayer), - "rid" => Ok(GeneratedField::Rid), _ => Ok(GeneratedField::__SkipField__), } } @@ -41757,8 +40510,6 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { let mut height__ = None; let mut bitrate__ = None; let mut ssrc__ = None; - let mut spatial_layer__ = None; - let mut rid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Quality => { @@ -41799,20 +40550,6 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } - GeneratedField::SpatialLayer => { - if spatial_layer__.is_some() { - return Err(serde::de::Error::duplicate_field("spatialLayer")); - } - spatial_layer__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Rid => { - if rid__.is_some() { - return Err(serde::de::Error::duplicate_field("rid")); - } - rid__ = Some(map_.next_value()?); - } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -41824,88 +40561,12 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { height: height__.unwrap_or_default(), bitrate: bitrate__.unwrap_or_default(), ssrc: ssrc__.unwrap_or_default(), - spatial_layer: spatial_layer__.unwrap_or_default(), - rid: rid__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.VideoLayer", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for video_layer::Mode { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - let variant = match self { - Self::Unused => "MODE_UNUSED", - Self::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", - Self::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", - }; - serializer.serialize_str(variant) - } -} -impl<'de> serde::Deserialize<'de> for video_layer::Mode { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "MODE_UNUSED", - "ONE_SPATIAL_LAYER_PER_STREAM", - "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", - ]; - - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = video_layer::Mode; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - fn visit_i64(self, v: i64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) - }) - } - - fn visit_u64(self, v: u64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) - }) - } - - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "MODE_UNUSED" => Ok(video_layer::Mode::Unused), - "ONE_SPATIAL_LAYER_PER_STREAM" => Ok(video_layer::Mode::OneSpatialLayerPerStream), - "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Ok(video_layer::Mode::MultipleSpatialLayersPerStream), - _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), - } - } - } - deserializer.deserialize_any(GeneratedVisitor) - } -} impl serde::Serialize for VideoQuality { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -43132,193 +41793,3 @@ impl<'de> serde::Deserialize<'de> for WorkerStatus { deserializer.deserialize_any(GeneratedVisitor) } } -impl serde::Serialize for WrappedJoinRequest { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if self.compression != 0 { - len += 1; - } - if !self.join_request.is_empty() { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("livekit.WrappedJoinRequest", len)?; - if self.compression != 0 { - let v = wrapped_join_request::Compression::try_from(self.compression) - .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.compression)))?; - struct_ser.serialize_field("compression", &v)?; - } - if !self.join_request.is_empty() { - #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] - struct_ser.serialize_field("joinRequest", pbjson::private::base64::encode(&self.join_request).as_str())?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for WrappedJoinRequest { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "compression", - "join_request", - "joinRequest", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - Compression, - JoinRequest, - __SkipField__, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "compression" => Ok(GeneratedField::Compression), - "joinRequest" | "join_request" => Ok(GeneratedField::JoinRequest), - _ => Ok(GeneratedField::__SkipField__), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = WrappedJoinRequest; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.WrappedJoinRequest") - } - - fn visit_map(self, mut map_: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut compression__ = None; - let mut join_request__ = None; - while let Some(k) = map_.next_key()? { - match k { - GeneratedField::Compression => { - if compression__.is_some() { - return Err(serde::de::Error::duplicate_field("compression")); - } - compression__ = Some(map_.next_value::()? as i32); - } - GeneratedField::JoinRequest => { - if join_request__.is_some() { - return Err(serde::de::Error::duplicate_field("joinRequest")); - } - join_request__ = - Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) - ; - } - GeneratedField::__SkipField__ => { - let _ = map_.next_value::()?; - } - } - } - Ok(WrappedJoinRequest { - compression: compression__.unwrap_or_default(), - join_request: join_request__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("livekit.WrappedJoinRequest", FIELDS, GeneratedVisitor) - } -} -impl serde::Serialize for wrapped_join_request::Compression { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - let variant = match self { - Self::None => "NONE", - Self::Gzip => "GZIP", - }; - serializer.serialize_str(variant) - } -} -impl<'de> serde::Deserialize<'de> for wrapped_join_request::Compression { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "NONE", - "GZIP", - ]; - - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = wrapped_join_request::Compression; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - fn visit_i64(self, v: i64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) - }) - } - - fn visit_u64(self, v: u64) -> std::result::Result - where - E: serde::de::Error, - { - i32::try_from(v) - .ok() - .and_then(|x| x.try_into().ok()) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) - }) - } - - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "NONE" => Ok(wrapped_join_request::Compression::None), - "GZIP" => Ok(wrapped_join_request::Compression::Gzip), - _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), - } - } - } - deserializer.deserialize_any(GeneratedVisitor) - } -} From 9f1ebe6bcbb10da4a5f46ae0040d7b2906ca6220 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 09:44:14 -0700 Subject: [PATCH 05/39] support UYUV format --- examples/local_video/src/publisher.rs | 68 ++++++++++++++++++--------- 1 file changed, 47 insertions(+), 21 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index ee130b179..5f3f6763e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -123,7 +123,7 @@ async fn main() -> Result<()> { let index = CameraIndex::Index(args.camera_index as u32); let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); let mut camera = Camera::new(index, requested)?; - // Try raw YUYV first (cheaper than MJPEG), fall back to MJPEG + // Try raw YUYV first (cheaper than MJPEG), then UYVY, fall back to MJPEG let wanted = CameraFormat::new( Resolution::new(args.width, args.height), FrameFormat::YUYV, @@ -131,13 +131,23 @@ async fn main() -> Result<()> { ); let mut using_fmt = "YUYV"; if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { - let alt = CameraFormat::new( + // Try UYVY as an alternative packed 4:2:2 format + let alt_uyvy = CameraFormat::new( Resolution::new(args.width, args.height), - FrameFormat::MJPEG, + FrameFormat::UYVY, args.fps, ); - using_fmt = "MJPEG"; - let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); + if camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt_uyvy))).is_ok() { + using_fmt = "UYVY"; + } else { + let alt = CameraFormat::new( + Resolution::new(args.width, args.height), + FrameFormat::MJPEG, + args.fps, + ); + using_fmt = "MJPEG"; + let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); + } } camera.open_stream()?; let fmt = camera.camera_format(); @@ -172,6 +182,7 @@ async fn main() -> Result<()> { // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; let is_yuyv = using_fmt == "YUYV"; + let is_uyvy = using_fmt == "UYVY"; // Accurate pacing using absolute schedule (no drift) let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); @@ -205,26 +216,41 @@ async fn main() -> Result<()> { let t1 = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - // Fast path for YUYV: convert directly to I420 via libyuv - let t2 = if is_yuyv { + // Fast path for YUYV/UYVY: convert directly to I420 via libyuv + let t2 = if is_yuyv || is_uyvy { let src = frame_buf.buffer(); let src_bytes = src.as_ref(); - let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 - let t2_local = t1; // no decode step in YUYV path + let src_stride = (width * 2) as i32; // packed 4:2:2 + let t2_local = t1; // no decode step in packed YUV path unsafe { // returns 0 on success - let _ = yuv_sys::rs_YUY2ToI420( - src_bytes.as_ptr(), - src_stride, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); + if is_yuyv { + let _ = yuv_sys::rs_YUY2ToI420( + src_bytes.as_ptr(), + src_stride, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } else { + let _ = yuv_sys::rs_UYVYToI420( + src_bytes.as_ptr(), + src_stride, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } } t2_local } else { From 1a808e79b496fc9be5fa0debd7a6d0eeaf777ce7 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 12:56:24 -0700 Subject: [PATCH 06/39] update protocol --- livekit-protocol/protocol | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/livekit-protocol/protocol b/livekit-protocol/protocol index e038e7944..2bc93ddc2 160000 --- a/livekit-protocol/protocol +++ b/livekit-protocol/protocol @@ -1 +1 @@ -Subproject commit e038e7944595dd9a00871ee5ed52ba6062f76c1e +Subproject commit 2bc93ddc27ccfa66ee8d270a1bcd115586fb601d From ae42dfbed88a7cbdfa71b4be2231522a561bff94 Mon Sep 17 00:00:00 2001 From: github-actions <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 29 Oct 2025 20:00:12 +0000 Subject: [PATCH 07/39] generated protobuf --- livekit-protocol/src/livekit.rs | 251 +++- livekit-protocol/src/livekit.serde.rs | 1579 ++++++++++++++++++++++++- 2 files changed, 1794 insertions(+), 36 deletions(-) diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index e39604819..676becb62 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -199,6 +199,12 @@ pub struct Pagination { #[prost(int32, tag="2")] pub limit: i32, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TokenPagination { + #[prost(string, tag="1")] + pub token: ::prost::alloc::string::String, +} /// ListUpdate is used for updated APIs where 'repeated string' field is modified. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -206,6 +212,15 @@ pub struct ListUpdate { /// set the field to a new list #[prost(string, repeated, tag="1")] pub set: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// append items to a list, avoiding duplicates + #[prost(string, repeated, tag="2")] + pub add: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// delete items from a list + #[prost(string, repeated, tag="3")] + pub del: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// sets the list to an empty list + #[prost(bool, tag="4")] + pub clear: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -484,6 +499,14 @@ pub struct SimulcastCodecInfo { pub cid: ::prost::alloc::string::String, #[prost(message, repeated, tag="4")] pub layers: ::prost::alloc::vec::Vec, + #[prost(enumeration="video_layer::Mode", tag="5")] + pub video_layer_mode: i32, + /// cid (client side id for track) could be different between + /// signalling (AddTrackRequest) and SDP offer. This field + /// will be populated only if it is different to avoid + /// duplication and keep the representation concise. + #[prost(string, tag="6")] + pub sdp_cid: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -498,20 +521,30 @@ pub struct TrackInfo { pub muted: bool, /// original width of video (unset for audio) /// clients may receive a lower resolution version with simulcast + #[deprecated] #[prost(uint32, tag="5")] pub width: u32, /// original height of video (unset for audio) + #[deprecated] #[prost(uint32, tag="6")] pub height: u32, /// true if track is simulcasted + /// + /// see `video_layer_mode` in `codecs` + #[deprecated] #[prost(bool, tag="7")] pub simulcast: bool, /// true if DTX (Discontinuous Transmission) is disabled for audio + /// + /// deprecated in favor of `audio_features` + #[deprecated] #[prost(bool, tag="8")] pub disable_dtx: bool, /// source of media #[prost(enumeration="TrackSource", tag="9")] pub source: i32, + /// see `codecs` for layers of individual codec + #[deprecated] #[prost(message, repeated, tag="10")] pub layers: ::prost::alloc::vec::Vec, /// mime type of codec @@ -521,6 +554,8 @@ pub struct TrackInfo { pub mid: ::prost::alloc::string::String, #[prost(message, repeated, tag="13")] pub codecs: ::prost::alloc::vec::Vec, + /// deprecated in favor of `audio_features` + #[deprecated] #[prost(bool, tag="14")] pub stereo: bool, /// true if RED (Redundant Encoding) is disabled for audio @@ -553,6 +588,42 @@ pub struct VideoLayer { pub bitrate: u32, #[prost(uint32, tag="5")] pub ssrc: u32, + #[prost(int32, tag="6")] + pub spatial_layer: i32, + #[prost(string, tag="7")] + pub rid: ::prost::alloc::string::String, +} +/// Nested message and enum types in `VideoLayer`. +pub mod video_layer { + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum Mode { + Unused = 0, + OneSpatialLayerPerStream = 1, + MultipleSpatialLayersPerStream = 2, + } + impl Mode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Mode::Unused => "MODE_UNUSED", + Mode::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", + Mode::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "MODE_UNUSED" => Some(Self::Unused), + "ONE_SPATIAL_LAYER_PER_STREAM" => Some(Self::OneSpatialLayerPerStream), + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Some(Self::MultipleSpatialLayersPerStream), + _ => None, + } + } + } } /// new DataPacket API #[allow(clippy::derive_partial_eq_without_eq)] @@ -573,7 +644,7 @@ pub struct DataPacket { /// sid of the user that sent the message #[prost(string, tag="17")] pub participant_sid: ::prost::alloc::string::String, - #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15")] + #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18")] pub value: ::core::option::Option, } /// Nested message and enum types in `DataPacket`. @@ -631,6 +702,50 @@ pub mod data_packet { StreamChunk(super::data_stream::Chunk), #[prost(message, tag="15")] StreamTrailer(super::data_stream::Trailer), + #[prost(message, tag="18")] + EncryptedPacket(super::EncryptedPacket), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EncryptedPacket { + #[prost(enumeration="encryption::Type", tag="1")] + pub encryption_type: i32, + #[prost(bytes="vec", tag="2")] + pub iv: ::prost::alloc::vec::Vec, + #[prost(uint32, tag="3")] + pub key_index: u32, + /// This is an encrypted EncryptedPacketPayload message representation + #[prost(bytes="vec", tag="4")] + pub encrypted_value: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EncryptedPacketPayload { + #[prost(oneof="encrypted_packet_payload::Value", tags="1, 3, 4, 5, 6, 7, 8, 9")] + pub value: ::core::option::Option, +} +/// Nested message and enum types in `EncryptedPacketPayload`. +pub mod encrypted_packet_payload { + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Value { + #[prost(message, tag="1")] + User(super::UserPacket), + #[prost(message, tag="3")] + ChatMessage(super::ChatMessage), + #[prost(message, tag="4")] + RpcRequest(super::RpcRequest), + #[prost(message, tag="5")] + RpcAck(super::RpcAck), + #[prost(message, tag="6")] + RpcResponse(super::RpcResponse), + #[prost(message, tag="7")] + StreamHeader(super::data_stream::Header), + #[prost(message, tag="8")] + StreamChunk(super::data_stream::Chunk), + #[prost(message, tag="9")] + StreamTrailer(super::data_stream::Trailer), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -675,7 +790,7 @@ pub struct UserPacket { /// topic under which the message was published #[prost(string, optional, tag="4")] pub topic: ::core::option::Option<::prost::alloc::string::String>, - /// Unique ID to indentify the message + /// Unique ID to identify the message #[prost(string, optional, tag="8")] pub id: ::core::option::Option<::prost::alloc::string::String>, /// start and end time allow relating the message to specific media time @@ -898,6 +1013,7 @@ pub mod client_info { UnityWeb = 11, Node = 12, Unreal = 13, + Esp32 = 14, } impl Sdk { /// String value of the enum field names used in the ProtoBuf definition. @@ -920,6 +1036,7 @@ pub mod client_info { Sdk::UnityWeb => "UNITY_WEB", Sdk::Node => "NODE", Sdk::Unreal => "UNREAL", + Sdk::Esp32 => "ESP32", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -939,6 +1056,7 @@ pub mod client_info { "UNITY_WEB" => Some(Self::UnityWeb), "NODE" => Some(Self::Node), "UNREAL" => Some(Self::Unreal), + "ESP32" => Some(Self::Esp32), _ => None, } } @@ -1231,7 +1349,8 @@ pub mod data_stream { /// only populated for finite streams, if it's a stream of unknown size this stays empty #[prost(uint64, optional, tag="5")] pub total_length: ::core::option::Option, - /// defaults to NONE + /// this is set on the DataPacket + #[deprecated] #[prost(enumeration="super::encryption::Type", tag="7")] pub encryption_type: i32, /// user defined attributes map that can carry additional info @@ -1267,7 +1386,8 @@ pub mod data_stream { /// a version indicating that this chunk_index has been retroactively modified and the original one needs to be replaced #[prost(int32, tag="4")] pub version: i32, - /// optional, initialization vector for AES-GCM encryption + /// this is set on the DataPacket + #[deprecated] #[prost(bytes="vec", optional, tag="5")] pub iv: ::core::option::Option<::prost::alloc::vec::Vec>, } @@ -2188,6 +2308,12 @@ pub struct S3Upload { pub secret: ::prost::alloc::string::String, #[prost(string, tag="11")] pub session_token: ::prost::alloc::string::String, + /// ARN of the role to assume for file upload. Egress will make an AssumeRole API call using the provided access_key and secret to assume that role. On LiveKit cloud, this is only available on accounts that have the feature enabled + #[prost(string, tag="12")] + pub assume_role_arn: ::prost::alloc::string::String, + /// ExternalID to use when assuming role for upload + #[prost(string, tag="13")] + pub assume_role_external_id: ::prost::alloc::string::String, #[prost(string, tag="3")] pub region: ::prost::alloc::string::String, #[prost(string, tag="4")] @@ -2874,10 +3000,10 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Message { - /// initial join exchange, for publisher + /// participant offer for publisher #[prost(message, tag="1")] Offer(super::SessionDescription), - /// participant answering publisher offer + /// participant answering subscriber offer #[prost(message, tag="2")] Answer(super::SessionDescription), #[prost(message, tag="3")] @@ -2930,7 +3056,7 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignalResponse { - #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24")] + #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25")] pub message: ::core::option::Option, } /// Nested message and enum types in `SignalResponse`. @@ -3010,6 +3136,9 @@ pub mod signal_response { /// notify to the participant when they have been moved to a new room #[prost(message, tag="24")] RoomMoved(super::RoomMovedResponse), + /// notify number of required media sections to satisfy subscribed tracks + #[prost(message, tag="25")] + MediaSectionsRequirement(super::MediaSectionsRequirement), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -3019,6 +3148,10 @@ pub struct SimulcastCodec { pub codec: ::prost::alloc::string::String, #[prost(string, tag="2")] pub cid: ::prost::alloc::string::String, + #[prost(message, repeated, tag="4")] + pub layers: ::prost::alloc::vec::Vec, + #[prost(enumeration="video_layer::Mode", tag="5")] + pub video_layer_mode: i32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -3030,7 +3163,6 @@ pub struct AddTrackRequest { pub name: ::prost::alloc::string::String, #[prost(enumeration="TrackType", tag="3")] pub r#type: i32, - /// to be deprecated in favor of layers #[prost(uint32, tag="4")] pub width: u32, #[prost(uint32, tag="5")] @@ -3434,7 +3566,9 @@ pub struct RoomMovedResponse { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncState { - /// last subscribe answer before reconnecting + /// last subscribe/publish answer before reconnecting + /// subscribe answer if using dual peer connection + /// publish answer if using single peer connection #[prost(message, optional, tag="1")] pub answer: ::core::option::Option, #[prost(message, optional, tag="2")] @@ -3443,7 +3577,9 @@ pub struct SyncState { pub publish_tracks: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag="4")] pub data_channels: ::prost::alloc::vec::Vec, - /// last received server side offer before reconnecting + /// last received server side offer/sent client side offer before reconnecting + /// received server side offer if using dual peer connection + /// sent client side offer if using single peer connection #[prost(message, optional, tag="5")] pub offer: ::core::option::Option, #[prost(string, repeated, tag="6")] @@ -3603,6 +3739,92 @@ pub struct TrackSubscribed { #[prost(string, tag="1")] pub track_sid: ::prost::alloc::string::String, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConnectionSettings { + #[prost(bool, tag="1")] + pub auto_subscribe: bool, + #[prost(bool, tag="2")] + pub adaptive_stream: bool, + #[prost(bool, optional, tag="3")] + pub subscriber_allow_pause: ::core::option::Option, + #[prost(bool, tag="4")] + pub disable_ice_lite: bool, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JoinRequest { + #[prost(message, optional, tag="1")] + pub client_info: ::core::option::Option, + #[prost(message, optional, tag="2")] + pub connection_settings: ::core::option::Option, + /// if not empty, will overwrite `metadata` in token + #[prost(string, tag="3")] + pub metadata: ::prost::alloc::string::String, + /// will set keys provided via this + /// will overwrite if the same key is in the token + /// will not delete keys from token if there is a key collision and this sets that key to empty value + #[prost(map="string, string", tag="4")] + pub participant_attributes: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(message, repeated, tag="5")] + pub add_track_requests: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag="6")] + pub publisher_offer: ::core::option::Option, + #[prost(bool, tag="7")] + pub reconnect: bool, + #[prost(enumeration="ReconnectReason", tag="8")] + pub reconnect_reason: i32, + #[prost(string, tag="9")] + pub participant_sid: ::prost::alloc::string::String, + #[prost(message, optional, tag="10")] + pub sync_state: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WrappedJoinRequest { + #[prost(enumeration="wrapped_join_request::Compression", tag="1")] + pub compression: i32, + /// marshalled JoinRequest + potentially compressed + #[prost(bytes="vec", tag="2")] + pub join_request: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `WrappedJoinRequest`. +pub mod wrapped_join_request { + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum Compression { + None = 0, + Gzip = 1, + } + impl Compression { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Compression::None => "NONE", + Compression::Gzip => "GZIP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NONE" => Some(Self::None), + "GZIP" => Some(Self::Gzip), + _ => None, + } + } + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MediaSectionsRequirement { + #[prost(uint32, tag="1")] + pub num_audios: u32, + #[prost(uint32, tag="2")] + pub num_videos: u32, +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum SignalTarget { @@ -3722,6 +3944,10 @@ pub struct JobState { pub updated_at: i64, #[prost(string, tag="6")] pub participant_identity: ::prost::alloc::string::String, + #[prost(string, tag="7")] + pub worker_id: ::prost::alloc::string::String, + #[prost(string, tag="8")] + pub agent_id: ::prost::alloc::string::String, } /// from Worker to Server #[allow(clippy::derive_partial_eq_without_eq)] @@ -4290,6 +4516,9 @@ pub struct RoomConfiguration { /// limit number of participants that can be in a room, excluding Egress and Ingress participants #[prost(uint32, tag="4")] pub max_participants: u32, + /// metadata of room + #[prost(string, tag="11")] + pub metadata: ::prost::alloc::string::String, /// egress #[prost(message, optional, tag="5")] pub egress: ::core::option::Option, @@ -4751,7 +4980,7 @@ impl IngressVideoEncodingPreset { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WebhookEvent { - /// one of room_started, room_finished, participant_joined, participant_left, + /// one of room_started, room_finished, participant_joined, participant_left, participant_connection_aborted, /// track_published, track_unpublished, egress_started, egress_updated, egress_ended, /// ingress_started, ingress_ended #[prost(string, tag="1")] diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 5c411a433..3c79f6c7a 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -2848,6 +2848,7 @@ impl serde::Serialize for client_info::Sdk { Self::UnityWeb => "UNITY_WEB", Self::Node => "NODE", Self::Unreal => "UNREAL", + Self::Esp32 => "ESP32", }; serializer.serialize_str(variant) } @@ -2873,6 +2874,7 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB", "NODE", "UNREAL", + "ESP32", ]; struct GeneratedVisitor; @@ -2927,6 +2929,7 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB" => Ok(client_info::Sdk::UnityWeb), "NODE" => Ok(client_info::Sdk::Node), "UNREAL" => Ok(client_info::Sdk::Unreal), + "ESP32" => Ok(client_info::Sdk::Esp32), _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), } } @@ -3353,6 +3356,156 @@ impl<'de> serde::Deserialize<'de> for ConnectionQualityUpdate { deserializer.deserialize_struct("livekit.ConnectionQualityUpdate", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for ConnectionSettings { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.auto_subscribe { + len += 1; + } + if self.adaptive_stream { + len += 1; + } + if self.subscriber_allow_pause.is_some() { + len += 1; + } + if self.disable_ice_lite { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.ConnectionSettings", len)?; + if self.auto_subscribe { + struct_ser.serialize_field("autoSubscribe", &self.auto_subscribe)?; + } + if self.adaptive_stream { + struct_ser.serialize_field("adaptiveStream", &self.adaptive_stream)?; + } + if let Some(v) = self.subscriber_allow_pause.as_ref() { + struct_ser.serialize_field("subscriberAllowPause", v)?; + } + if self.disable_ice_lite { + struct_ser.serialize_field("disableIceLite", &self.disable_ice_lite)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ConnectionSettings { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "auto_subscribe", + "autoSubscribe", + "adaptive_stream", + "adaptiveStream", + "subscriber_allow_pause", + "subscriberAllowPause", + "disable_ice_lite", + "disableIceLite", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + AutoSubscribe, + AdaptiveStream, + SubscriberAllowPause, + DisableIceLite, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "autoSubscribe" | "auto_subscribe" => Ok(GeneratedField::AutoSubscribe), + "adaptiveStream" | "adaptive_stream" => Ok(GeneratedField::AdaptiveStream), + "subscriberAllowPause" | "subscriber_allow_pause" => Ok(GeneratedField::SubscriberAllowPause), + "disableIceLite" | "disable_ice_lite" => Ok(GeneratedField::DisableIceLite), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ConnectionSettings; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.ConnectionSettings") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut auto_subscribe__ = None; + let mut adaptive_stream__ = None; + let mut subscriber_allow_pause__ = None; + let mut disable_ice_lite__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::AutoSubscribe => { + if auto_subscribe__.is_some() { + return Err(serde::de::Error::duplicate_field("autoSubscribe")); + } + auto_subscribe__ = Some(map_.next_value()?); + } + GeneratedField::AdaptiveStream => { + if adaptive_stream__.is_some() { + return Err(serde::de::Error::duplicate_field("adaptiveStream")); + } + adaptive_stream__ = Some(map_.next_value()?); + } + GeneratedField::SubscriberAllowPause => { + if subscriber_allow_pause__.is_some() { + return Err(serde::de::Error::duplicate_field("subscriberAllowPause")); + } + subscriber_allow_pause__ = map_.next_value()?; + } + GeneratedField::DisableIceLite => { + if disable_ice_lite__.is_some() { + return Err(serde::de::Error::duplicate_field("disableIceLite")); + } + disable_ice_lite__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(ConnectionSettings { + auto_subscribe: auto_subscribe__.unwrap_or_default(), + adaptive_stream: adaptive_stream__.unwrap_or_default(), + subscriber_allow_pause: subscriber_allow_pause__, + disable_ice_lite: disable_ice_lite__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.ConnectionSettings", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for CreateAgentDispatchRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -5586,6 +5739,9 @@ impl serde::Serialize for DataPacket { data_packet::Value::StreamTrailer(v) => { struct_ser.serialize_field("streamTrailer", v)?; } + data_packet::Value::EncryptedPacket(v) => { + struct_ser.serialize_field("encryptedPacket", v)?; + } } } struct_ser.end() @@ -5626,6 +5782,8 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamChunk", "stream_trailer", "streamTrailer", + "encrypted_packet", + "encryptedPacket", ]; #[allow(clippy::enum_variant_names)] @@ -5647,6 +5805,7 @@ impl<'de> serde::Deserialize<'de> for DataPacket { StreamHeader, StreamChunk, StreamTrailer, + EncryptedPacket, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -5686,6 +5845,7 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), + "encryptedPacket" | "encrypted_packet" => Ok(GeneratedField::EncryptedPacket), _ => Ok(GeneratedField::__SkipField__), } } @@ -5827,6 +5987,13 @@ impl<'de> serde::Deserialize<'de> for DataPacket { return Err(serde::de::Error::duplicate_field("streamTrailer")); } value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::StreamTrailer) +; + } + GeneratedField::EncryptedPacket => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptedPacket")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::EncryptedPacket) ; } GeneratedField::__SkipField__ => { @@ -9211,6 +9378,365 @@ impl<'de> serde::Deserialize<'de> for EncodingOptionsPreset { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for EncryptedPacket { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.encryption_type != 0 { + len += 1; + } + if !self.iv.is_empty() { + len += 1; + } + if self.key_index != 0 { + len += 1; + } + if !self.encrypted_value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacket", len)?; + if self.encryption_type != 0 { + let v = encryption::Type::try_from(self.encryption_type) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.encryption_type)))?; + struct_ser.serialize_field("encryptionType", &v)?; + } + if !self.iv.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("iv", pbjson::private::base64::encode(&self.iv).as_str())?; + } + if self.key_index != 0 { + struct_ser.serialize_field("keyIndex", &self.key_index)?; + } + if !self.encrypted_value.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("encryptedValue", pbjson::private::base64::encode(&self.encrypted_value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for EncryptedPacket { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "encryption_type", + "encryptionType", + "iv", + "key_index", + "keyIndex", + "encrypted_value", + "encryptedValue", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + EncryptionType, + Iv, + KeyIndex, + EncryptedValue, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "encryptionType" | "encryption_type" => Ok(GeneratedField::EncryptionType), + "iv" => Ok(GeneratedField::Iv), + "keyIndex" | "key_index" => Ok(GeneratedField::KeyIndex), + "encryptedValue" | "encrypted_value" => Ok(GeneratedField::EncryptedValue), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = EncryptedPacket; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.EncryptedPacket") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut encryption_type__ = None; + let mut iv__ = None; + let mut key_index__ = None; + let mut encrypted_value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::EncryptionType => { + if encryption_type__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptionType")); + } + encryption_type__ = Some(map_.next_value::()? as i32); + } + GeneratedField::Iv => { + if iv__.is_some() { + return Err(serde::de::Error::duplicate_field("iv")); + } + iv__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::KeyIndex => { + if key_index__.is_some() { + return Err(serde::de::Error::duplicate_field("keyIndex")); + } + key_index__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::EncryptedValue => { + if encrypted_value__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptedValue")); + } + encrypted_value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(EncryptedPacket { + encryption_type: encryption_type__.unwrap_or_default(), + iv: iv__.unwrap_or_default(), + key_index: key_index__.unwrap_or_default(), + encrypted_value: encrypted_value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.EncryptedPacket", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for EncryptedPacketPayload { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacketPayload", len)?; + if let Some(v) = self.value.as_ref() { + match v { + encrypted_packet_payload::Value::User(v) => { + struct_ser.serialize_field("user", v)?; + } + encrypted_packet_payload::Value::ChatMessage(v) => { + struct_ser.serialize_field("chatMessage", v)?; + } + encrypted_packet_payload::Value::RpcRequest(v) => { + struct_ser.serialize_field("rpcRequest", v)?; + } + encrypted_packet_payload::Value::RpcAck(v) => { + struct_ser.serialize_field("rpcAck", v)?; + } + encrypted_packet_payload::Value::RpcResponse(v) => { + struct_ser.serialize_field("rpcResponse", v)?; + } + encrypted_packet_payload::Value::StreamHeader(v) => { + struct_ser.serialize_field("streamHeader", v)?; + } + encrypted_packet_payload::Value::StreamChunk(v) => { + struct_ser.serialize_field("streamChunk", v)?; + } + encrypted_packet_payload::Value::StreamTrailer(v) => { + struct_ser.serialize_field("streamTrailer", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for EncryptedPacketPayload { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "user", + "chat_message", + "chatMessage", + "rpc_request", + "rpcRequest", + "rpc_ack", + "rpcAck", + "rpc_response", + "rpcResponse", + "stream_header", + "streamHeader", + "stream_chunk", + "streamChunk", + "stream_trailer", + "streamTrailer", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + User, + ChatMessage, + RpcRequest, + RpcAck, + RpcResponse, + StreamHeader, + StreamChunk, + StreamTrailer, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "user" => Ok(GeneratedField::User), + "chatMessage" | "chat_message" => Ok(GeneratedField::ChatMessage), + "rpcRequest" | "rpc_request" => Ok(GeneratedField::RpcRequest), + "rpcAck" | "rpc_ack" => Ok(GeneratedField::RpcAck), + "rpcResponse" | "rpc_response" => Ok(GeneratedField::RpcResponse), + "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), + "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), + "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = EncryptedPacketPayload; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.EncryptedPacketPayload") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::User => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("user")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::User) +; + } + GeneratedField::ChatMessage => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("chatMessage")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::ChatMessage) +; + } + GeneratedField::RpcRequest => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcRequest")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcRequest) +; + } + GeneratedField::RpcAck => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcAck")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcAck) +; + } + GeneratedField::RpcResponse => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcResponse")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcResponse) +; + } + GeneratedField::StreamHeader => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamHeader")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamHeader) +; + } + GeneratedField::StreamChunk => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamChunk")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamChunk) +; + } + GeneratedField::StreamTrailer => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamTrailer")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamTrailer) +; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(EncryptedPacketPayload { + value: value__, + }) + } + } + deserializer.deserialize_struct("livekit.EncryptedPacketPayload", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for Encryption { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -13485,6 +14011,12 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { len += 1; } + if !self.worker_id.is_empty() { + len += 1; + } + if !self.agent_id.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.JobState", len)?; if self.status != 0 { let v = JobStatus::try_from(self.status) @@ -13512,6 +14044,12 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { struct_ser.serialize_field("participantIdentity", &self.participant_identity)?; } + if !self.worker_id.is_empty() { + struct_ser.serialize_field("workerId", &self.worker_id)?; + } + if !self.agent_id.is_empty() { + struct_ser.serialize_field("agentId", &self.agent_id)?; + } struct_ser.end() } } @@ -13532,6 +14070,10 @@ impl<'de> serde::Deserialize<'de> for JobState { "updatedAt", "participant_identity", "participantIdentity", + "worker_id", + "workerId", + "agent_id", + "agentId", ]; #[allow(clippy::enum_variant_names)] @@ -13542,6 +14084,8 @@ impl<'de> serde::Deserialize<'de> for JobState { EndedAt, UpdatedAt, ParticipantIdentity, + WorkerId, + AgentId, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -13570,6 +14114,8 @@ impl<'de> serde::Deserialize<'de> for JobState { "endedAt" | "ended_at" => Ok(GeneratedField::EndedAt), "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), "participantIdentity" | "participant_identity" => Ok(GeneratedField::ParticipantIdentity), + "workerId" | "worker_id" => Ok(GeneratedField::WorkerId), + "agentId" | "agent_id" => Ok(GeneratedField::AgentId), _ => Ok(GeneratedField::__SkipField__), } } @@ -13595,6 +14141,8 @@ impl<'de> serde::Deserialize<'de> for JobState { let mut ended_at__ = None; let mut updated_at__ = None; let mut participant_identity__ = None; + let mut worker_id__ = None; + let mut agent_id__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Status => { @@ -13639,6 +14187,18 @@ impl<'de> serde::Deserialize<'de> for JobState { } participant_identity__ = Some(map_.next_value()?); } + GeneratedField::WorkerId => { + if worker_id__.is_some() { + return Err(serde::de::Error::duplicate_field("workerId")); + } + worker_id__ = Some(map_.next_value()?); + } + GeneratedField::AgentId => { + if agent_id__.is_some() { + return Err(serde::de::Error::duplicate_field("agentId")); + } + agent_id__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -13651,6 +14211,8 @@ impl<'de> serde::Deserialize<'de> for JobState { ended_at: ended_at__.unwrap_or_default(), updated_at: updated_at__.unwrap_or_default(), participant_identity: participant_identity__.unwrap_or_default(), + worker_id: worker_id__.unwrap_or_default(), + agent_id: agent_id__.unwrap_or_default(), }) } } @@ -13904,6 +14466,266 @@ impl<'de> serde::Deserialize<'de> for JobType { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for JoinRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.client_info.is_some() { + len += 1; + } + if self.connection_settings.is_some() { + len += 1; + } + if !self.metadata.is_empty() { + len += 1; + } + if !self.participant_attributes.is_empty() { + len += 1; + } + if !self.add_track_requests.is_empty() { + len += 1; + } + if self.publisher_offer.is_some() { + len += 1; + } + if self.reconnect { + len += 1; + } + if self.reconnect_reason != 0 { + len += 1; + } + if !self.participant_sid.is_empty() { + len += 1; + } + if self.sync_state.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.JoinRequest", len)?; + if let Some(v) = self.client_info.as_ref() { + struct_ser.serialize_field("clientInfo", v)?; + } + if let Some(v) = self.connection_settings.as_ref() { + struct_ser.serialize_field("connectionSettings", v)?; + } + if !self.metadata.is_empty() { + struct_ser.serialize_field("metadata", &self.metadata)?; + } + if !self.participant_attributes.is_empty() { + struct_ser.serialize_field("participantAttributes", &self.participant_attributes)?; + } + if !self.add_track_requests.is_empty() { + struct_ser.serialize_field("addTrackRequests", &self.add_track_requests)?; + } + if let Some(v) = self.publisher_offer.as_ref() { + struct_ser.serialize_field("publisherOffer", v)?; + } + if self.reconnect { + struct_ser.serialize_field("reconnect", &self.reconnect)?; + } + if self.reconnect_reason != 0 { + let v = ReconnectReason::try_from(self.reconnect_reason) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.reconnect_reason)))?; + struct_ser.serialize_field("reconnectReason", &v)?; + } + if !self.participant_sid.is_empty() { + struct_ser.serialize_field("participantSid", &self.participant_sid)?; + } + if let Some(v) = self.sync_state.as_ref() { + struct_ser.serialize_field("syncState", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for JoinRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "client_info", + "clientInfo", + "connection_settings", + "connectionSettings", + "metadata", + "participant_attributes", + "participantAttributes", + "add_track_requests", + "addTrackRequests", + "publisher_offer", + "publisherOffer", + "reconnect", + "reconnect_reason", + "reconnectReason", + "participant_sid", + "participantSid", + "sync_state", + "syncState", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + ClientInfo, + ConnectionSettings, + Metadata, + ParticipantAttributes, + AddTrackRequests, + PublisherOffer, + Reconnect, + ReconnectReason, + ParticipantSid, + SyncState, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "clientInfo" | "client_info" => Ok(GeneratedField::ClientInfo), + "connectionSettings" | "connection_settings" => Ok(GeneratedField::ConnectionSettings), + "metadata" => Ok(GeneratedField::Metadata), + "participantAttributes" | "participant_attributes" => Ok(GeneratedField::ParticipantAttributes), + "addTrackRequests" | "add_track_requests" => Ok(GeneratedField::AddTrackRequests), + "publisherOffer" | "publisher_offer" => Ok(GeneratedField::PublisherOffer), + "reconnect" => Ok(GeneratedField::Reconnect), + "reconnectReason" | "reconnect_reason" => Ok(GeneratedField::ReconnectReason), + "participantSid" | "participant_sid" => Ok(GeneratedField::ParticipantSid), + "syncState" | "sync_state" => Ok(GeneratedField::SyncState), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = JoinRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.JoinRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut client_info__ = None; + let mut connection_settings__ = None; + let mut metadata__ = None; + let mut participant_attributes__ = None; + let mut add_track_requests__ = None; + let mut publisher_offer__ = None; + let mut reconnect__ = None; + let mut reconnect_reason__ = None; + let mut participant_sid__ = None; + let mut sync_state__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::ClientInfo => { + if client_info__.is_some() { + return Err(serde::de::Error::duplicate_field("clientInfo")); + } + client_info__ = map_.next_value()?; + } + GeneratedField::ConnectionSettings => { + if connection_settings__.is_some() { + return Err(serde::de::Error::duplicate_field("connectionSettings")); + } + connection_settings__ = map_.next_value()?; + } + GeneratedField::Metadata => { + if metadata__.is_some() { + return Err(serde::de::Error::duplicate_field("metadata")); + } + metadata__ = Some(map_.next_value()?); + } + GeneratedField::ParticipantAttributes => { + if participant_attributes__.is_some() { + return Err(serde::de::Error::duplicate_field("participantAttributes")); + } + participant_attributes__ = Some( + map_.next_value::>()? + ); + } + GeneratedField::AddTrackRequests => { + if add_track_requests__.is_some() { + return Err(serde::de::Error::duplicate_field("addTrackRequests")); + } + add_track_requests__ = Some(map_.next_value()?); + } + GeneratedField::PublisherOffer => { + if publisher_offer__.is_some() { + return Err(serde::de::Error::duplicate_field("publisherOffer")); + } + publisher_offer__ = map_.next_value()?; + } + GeneratedField::Reconnect => { + if reconnect__.is_some() { + return Err(serde::de::Error::duplicate_field("reconnect")); + } + reconnect__ = Some(map_.next_value()?); + } + GeneratedField::ReconnectReason => { + if reconnect_reason__.is_some() { + return Err(serde::de::Error::duplicate_field("reconnectReason")); + } + reconnect_reason__ = Some(map_.next_value::()? as i32); + } + GeneratedField::ParticipantSid => { + if participant_sid__.is_some() { + return Err(serde::de::Error::duplicate_field("participantSid")); + } + participant_sid__ = Some(map_.next_value()?); + } + GeneratedField::SyncState => { + if sync_state__.is_some() { + return Err(serde::de::Error::duplicate_field("syncState")); + } + sync_state__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(JoinRequest { + client_info: client_info__, + connection_settings: connection_settings__, + metadata: metadata__.unwrap_or_default(), + participant_attributes: participant_attributes__.unwrap_or_default(), + add_track_requests: add_track_requests__.unwrap_or_default(), + publisher_offer: publisher_offer__, + reconnect: reconnect__.unwrap_or_default(), + reconnect_reason: reconnect_reason__.unwrap_or_default(), + participant_sid: participant_sid__.unwrap_or_default(), + sync_state: sync_state__, + }) + } + } + deserializer.deserialize_struct("livekit.JoinRequest", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for JoinResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -16384,10 +17206,28 @@ impl serde::Serialize for ListUpdate { if !self.set.is_empty() { len += 1; } + if !self.add.is_empty() { + len += 1; + } + if !self.del.is_empty() { + len += 1; + } + if self.clear { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.ListUpdate", len)?; if !self.set.is_empty() { struct_ser.serialize_field("set", &self.set)?; } + if !self.add.is_empty() { + struct_ser.serialize_field("add", &self.add)?; + } + if !self.del.is_empty() { + struct_ser.serialize_field("del", &self.del)?; + } + if self.clear { + struct_ser.serialize_field("clear", &self.clear)?; + } struct_ser.end() } } @@ -16399,11 +17239,17 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { const FIELDS: &[&str] = &[ "set", + "add", + "del", + "clear", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Set, + Add, + Del, + Clear, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -16427,6 +17273,9 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { match value { "set" => Ok(GeneratedField::Set), + "add" => Ok(GeneratedField::Add), + "del" => Ok(GeneratedField::Del), + "clear" => Ok(GeneratedField::Clear), _ => Ok(GeneratedField::__SkipField__), } } @@ -16447,6 +17296,9 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { V: serde::de::MapAccess<'de>, { let mut set__ = None; + let mut add__ = None; + let mut del__ = None; + let mut clear__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Set => { @@ -16455,6 +17307,24 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } set__ = Some(map_.next_value()?); } + GeneratedField::Add => { + if add__.is_some() { + return Err(serde::de::Error::duplicate_field("add")); + } + add__ = Some(map_.next_value()?); + } + GeneratedField::Del => { + if del__.is_some() { + return Err(serde::de::Error::duplicate_field("del")); + } + del__ = Some(map_.next_value()?); + } + GeneratedField::Clear => { + if clear__.is_some() { + return Err(serde::de::Error::duplicate_field("clear")); + } + clear__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -16462,12 +17332,133 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } Ok(ListUpdate { set: set__.unwrap_or_default(), + add: add__.unwrap_or_default(), + del: del__.unwrap_or_default(), + clear: clear__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.ListUpdate", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for MediaSectionsRequirement { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.num_audios != 0 { + len += 1; + } + if self.num_videos != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.MediaSectionsRequirement", len)?; + if self.num_audios != 0 { + struct_ser.serialize_field("numAudios", &self.num_audios)?; + } + if self.num_videos != 0 { + struct_ser.serialize_field("numVideos", &self.num_videos)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for MediaSectionsRequirement { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "num_audios", + "numAudios", + "num_videos", + "numVideos", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + NumAudios, + NumVideos, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "numAudios" | "num_audios" => Ok(GeneratedField::NumAudios), + "numVideos" | "num_videos" => Ok(GeneratedField::NumVideos), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = MediaSectionsRequirement; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.MediaSectionsRequirement") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut num_audios__ = None; + let mut num_videos__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::NumAudios => { + if num_audios__.is_some() { + return Err(serde::de::Error::duplicate_field("numAudios")); + } + num_audios__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::NumVideos => { + if num_videos__.is_some() { + return Err(serde::de::Error::duplicate_field("numVideos")); + } + num_videos__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(MediaSectionsRequirement { + num_audios: num_audios__.unwrap_or_default(), + num_videos: num_videos__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.MediaSectionsRequirement", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for MetricLabel { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -23352,6 +24343,9 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { len += 1; } + if !self.metadata.is_empty() { + len += 1; + } if self.egress.is_some() { len += 1; } @@ -23380,6 +24374,9 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { struct_ser.serialize_field("maxParticipants", &self.max_participants)?; } + if !self.metadata.is_empty() { + struct_ser.serialize_field("metadata", &self.metadata)?; + } if let Some(v) = self.egress.as_ref() { struct_ser.serialize_field("egress", v)?; } @@ -23412,6 +24409,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "departureTimeout", "max_participants", "maxParticipants", + "metadata", "egress", "min_playout_delay", "minPlayoutDelay", @@ -23428,6 +24426,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { EmptyTimeout, DepartureTimeout, MaxParticipants, + Metadata, Egress, MinPlayoutDelay, MaxPlayoutDelay, @@ -23459,6 +24458,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "emptyTimeout" | "empty_timeout" => Ok(GeneratedField::EmptyTimeout), "departureTimeout" | "departure_timeout" => Ok(GeneratedField::DepartureTimeout), "maxParticipants" | "max_participants" => Ok(GeneratedField::MaxParticipants), + "metadata" => Ok(GeneratedField::Metadata), "egress" => Ok(GeneratedField::Egress), "minPlayoutDelay" | "min_playout_delay" => Ok(GeneratedField::MinPlayoutDelay), "maxPlayoutDelay" | "max_playout_delay" => Ok(GeneratedField::MaxPlayoutDelay), @@ -23487,6 +24487,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { let mut empty_timeout__ = None; let mut departure_timeout__ = None; let mut max_participants__ = None; + let mut metadata__ = None; let mut egress__ = None; let mut min_playout_delay__ = None; let mut max_playout_delay__ = None; @@ -23524,6 +24525,12 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::Metadata => { + if metadata__.is_some() { + return Err(serde::de::Error::duplicate_field("metadata")); + } + metadata__ = Some(map_.next_value()?); + } GeneratedField::Egress => { if egress__.is_some() { return Err(serde::de::Error::duplicate_field("egress")); @@ -23568,6 +24575,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { empty_timeout: empty_timeout__.unwrap_or_default(), departure_timeout: departure_timeout__.unwrap_or_default(), max_participants: max_participants__.unwrap_or_default(), + metadata: metadata__.unwrap_or_default(), egress: egress__, min_playout_delay: min_playout_delay__.unwrap_or_default(), max_playout_delay: max_playout_delay__.unwrap_or_default(), @@ -24604,6 +25612,12 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { len += 1; } + if !self.assume_role_arn.is_empty() { + len += 1; + } + if !self.assume_role_external_id.is_empty() { + len += 1; + } if !self.region.is_empty() { len += 1; } @@ -24638,6 +25652,12 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { struct_ser.serialize_field("sessionToken", &self.session_token)?; } + if !self.assume_role_arn.is_empty() { + struct_ser.serialize_field("assumeRoleArn", &self.assume_role_arn)?; + } + if !self.assume_role_external_id.is_empty() { + struct_ser.serialize_field("assumeRoleExternalId", &self.assume_role_external_id)?; + } if !self.region.is_empty() { struct_ser.serialize_field("region", &self.region)?; } @@ -24677,6 +25697,10 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "secret", "session_token", "sessionToken", + "assume_role_arn", + "assumeRoleArn", + "assume_role_external_id", + "assumeRoleExternalId", "region", "endpoint", "bucket", @@ -24694,6 +25718,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { AccessKey, Secret, SessionToken, + AssumeRoleArn, + AssumeRoleExternalId, Region, Endpoint, Bucket, @@ -24727,6 +25753,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "accessKey" | "access_key" => Ok(GeneratedField::AccessKey), "secret" => Ok(GeneratedField::Secret), "sessionToken" | "session_token" => Ok(GeneratedField::SessionToken), + "assumeRoleArn" | "assume_role_arn" => Ok(GeneratedField::AssumeRoleArn), + "assumeRoleExternalId" | "assume_role_external_id" => Ok(GeneratedField::AssumeRoleExternalId), "region" => Ok(GeneratedField::Region), "endpoint" => Ok(GeneratedField::Endpoint), "bucket" => Ok(GeneratedField::Bucket), @@ -24757,6 +25785,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { let mut access_key__ = None; let mut secret__ = None; let mut session_token__ = None; + let mut assume_role_arn__ = None; + let mut assume_role_external_id__ = None; let mut region__ = None; let mut endpoint__ = None; let mut bucket__ = None; @@ -24785,6 +25815,18 @@ impl<'de> serde::Deserialize<'de> for S3Upload { } session_token__ = Some(map_.next_value()?); } + GeneratedField::AssumeRoleArn => { + if assume_role_arn__.is_some() { + return Err(serde::de::Error::duplicate_field("assumeRoleArn")); + } + assume_role_arn__ = Some(map_.next_value()?); + } + GeneratedField::AssumeRoleExternalId => { + if assume_role_external_id__.is_some() { + return Err(serde::de::Error::duplicate_field("assumeRoleExternalId")); + } + assume_role_external_id__ = Some(map_.next_value()?); + } GeneratedField::Region => { if region__.is_some() { return Err(serde::de::Error::duplicate_field("region")); @@ -24844,6 +25886,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { access_key: access_key__.unwrap_or_default(), secret: secret__.unwrap_or_default(), session_token: session_token__.unwrap_or_default(), + assume_role_arn: assume_role_arn__.unwrap_or_default(), + assume_role_external_id: assume_role_external_id__.unwrap_or_default(), region: region__.unwrap_or_default(), endpoint: endpoint__.unwrap_or_default(), bucket: bucket__.unwrap_or_default(), @@ -31459,6 +32503,9 @@ impl serde::Serialize for SignalResponse { signal_response::Message::RoomMoved(v) => { struct_ser.serialize_field("roomMoved", v)?; } + signal_response::Message::MediaSectionsRequirement(v) => { + struct_ser.serialize_field("mediaSectionsRequirement", v)?; + } } } struct_ser.end() @@ -31508,6 +32555,8 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "trackSubscribed", "room_moved", "roomMoved", + "media_sections_requirement", + "mediaSectionsRequirement", ]; #[allow(clippy::enum_variant_names)] @@ -31535,6 +32584,7 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { RequestResponse, TrackSubscribed, RoomMoved, + MediaSectionsRequirement, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -31580,6 +32630,7 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "requestResponse" | "request_response" => Ok(GeneratedField::RequestResponse), "trackSubscribed" | "track_subscribed" => Ok(GeneratedField::TrackSubscribed), "roomMoved" | "room_moved" => Ok(GeneratedField::RoomMoved), + "mediaSectionsRequirement" | "media_sections_requirement" => Ok(GeneratedField::MediaSectionsRequirement), _ => Ok(GeneratedField::__SkipField__), } } @@ -31759,6 +32810,13 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { return Err(serde::de::Error::duplicate_field("roomMoved")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::RoomMoved) +; + } + GeneratedField::MediaSectionsRequirement => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("mediaSectionsRequirement")); + } + message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::MediaSectionsRequirement) ; } GeneratedField::__SkipField__ => { @@ -32197,6 +33255,12 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { len += 1; } + if !self.layers.is_empty() { + len += 1; + } + if self.video_layer_mode != 0 { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodec", len)?; if !self.codec.is_empty() { struct_ser.serialize_field("codec", &self.codec)?; @@ -32204,6 +33268,14 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; } + if !self.layers.is_empty() { + struct_ser.serialize_field("layers", &self.layers)?; + } + if self.video_layer_mode != 0 { + let v = video_layer::Mode::try_from(self.video_layer_mode) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; + struct_ser.serialize_field("videoLayerMode", &v)?; + } struct_ser.end() } } @@ -32216,12 +33288,17 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { const FIELDS: &[&str] = &[ "codec", "cid", + "layers", + "video_layer_mode", + "videoLayerMode", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Codec, Cid, + Layers, + VideoLayerMode, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32246,6 +33323,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { match value { "codec" => Ok(GeneratedField::Codec), "cid" => Ok(GeneratedField::Cid), + "layers" => Ok(GeneratedField::Layers), + "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), _ => Ok(GeneratedField::__SkipField__), } } @@ -32267,6 +33346,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { { let mut codec__ = None; let mut cid__ = None; + let mut layers__ = None; + let mut video_layer_mode__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Codec => { @@ -32281,6 +33362,18 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { } cid__ = Some(map_.next_value()?); } + GeneratedField::Layers => { + if layers__.is_some() { + return Err(serde::de::Error::duplicate_field("layers")); + } + layers__ = Some(map_.next_value()?); + } + GeneratedField::VideoLayerMode => { + if video_layer_mode__.is_some() { + return Err(serde::de::Error::duplicate_field("videoLayerMode")); + } + video_layer_mode__ = Some(map_.next_value::()? as i32); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32289,6 +33382,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { Ok(SimulcastCodec { codec: codec__.unwrap_or_default(), cid: cid__.unwrap_or_default(), + layers: layers__.unwrap_or_default(), + video_layer_mode: video_layer_mode__.unwrap_or_default(), }) } } @@ -32315,6 +33410,12 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { len += 1; } + if self.video_layer_mode != 0 { + len += 1; + } + if !self.sdp_cid.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodecInfo", len)?; if !self.mime_type.is_empty() { struct_ser.serialize_field("mimeType", &self.mime_type)?; @@ -32328,6 +33429,14 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { struct_ser.serialize_field("layers", &self.layers)?; } + if self.video_layer_mode != 0 { + let v = video_layer::Mode::try_from(self.video_layer_mode) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; + struct_ser.serialize_field("videoLayerMode", &v)?; + } + if !self.sdp_cid.is_empty() { + struct_ser.serialize_field("sdpCid", &self.sdp_cid)?; + } struct_ser.end() } } @@ -32343,6 +33452,10 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid", "cid", "layers", + "video_layer_mode", + "videoLayerMode", + "sdp_cid", + "sdpCid", ]; #[allow(clippy::enum_variant_names)] @@ -32351,6 +33464,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { Mid, Cid, Layers, + VideoLayerMode, + SdpCid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32377,6 +33492,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid" => Ok(GeneratedField::Mid), "cid" => Ok(GeneratedField::Cid), "layers" => Ok(GeneratedField::Layers), + "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), + "sdpCid" | "sdp_cid" => Ok(GeneratedField::SdpCid), _ => Ok(GeneratedField::__SkipField__), } } @@ -32400,6 +33517,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { let mut mid__ = None; let mut cid__ = None; let mut layers__ = None; + let mut video_layer_mode__ = None; + let mut sdp_cid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::MimeType => { @@ -32426,6 +33545,18 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { } layers__ = Some(map_.next_value()?); } + GeneratedField::VideoLayerMode => { + if video_layer_mode__.is_some() { + return Err(serde::de::Error::duplicate_field("videoLayerMode")); + } + video_layer_mode__ = Some(map_.next_value::()? as i32); + } + GeneratedField::SdpCid => { + if sdp_cid__.is_some() { + return Err(serde::de::Error::duplicate_field("sdpCid")); + } + sdp_cid__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32436,6 +33567,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { mid: mid__.unwrap_or_default(), cid: cid__.unwrap_or_default(), layers: layers__.unwrap_or_default(), + video_layer_mode: video_layer_mode__.unwrap_or_default(), + sdp_cid: sdp_cid__.unwrap_or_default(), }) } } @@ -34961,8 +36094,115 @@ impl<'de> serde::Deserialize<'de> for TimedVersion { E: serde::de::Error, { match value { - "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), - "ticks" => Ok(GeneratedField::Ticks), + "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), + "ticks" => Ok(GeneratedField::Ticks), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TimedVersion; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.TimedVersion") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut unix_micro__ = None; + let mut ticks__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::UnixMicro => { + if unix_micro__.is_some() { + return Err(serde::de::Error::duplicate_field("unixMicro")); + } + unix_micro__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Ticks => { + if ticks__.is_some() { + return Err(serde::de::Error::duplicate_field("ticks")); + } + ticks__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(TimedVersion { + unix_micro: unix_micro__.unwrap_or_default(), + ticks: ticks__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TokenPagination { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.token.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.TokenPagination", len)?; + if !self.token.is_empty() { + struct_ser.serialize_field("token", &self.token)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TokenPagination { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "token", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Token, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "token" => Ok(GeneratedField::Token), _ => Ok(GeneratedField::__SkipField__), } } @@ -34972,48 +36212,36 @@ impl<'de> serde::Deserialize<'de> for TimedVersion { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TimedVersion; + type Value = TokenPagination; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TimedVersion") + formatter.write_str("struct livekit.TokenPagination") } - fn visit_map(self, mut map_: V) -> std::result::Result + fn visit_map(self, mut map_: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut unix_micro__ = None; - let mut ticks__ = None; + let mut token__ = None; while let Some(k) = map_.next_key()? { match k { - GeneratedField::UnixMicro => { - if unix_micro__.is_some() { - return Err(serde::de::Error::duplicate_field("unixMicro")); - } - unix_micro__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Ticks => { - if ticks__.is_some() { - return Err(serde::de::Error::duplicate_field("ticks")); + GeneratedField::Token => { + if token__.is_some() { + return Err(serde::de::Error::duplicate_field("token")); } - ticks__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; + token__ = Some(map_.next_value()?); } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } } } - Ok(TimedVersion { - unix_micro: unix_micro__.unwrap_or_default(), - ticks: ticks__.unwrap_or_default(), + Ok(TokenPagination { + token: token__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("livekit.TokenPagination", FIELDS, GeneratedVisitor) } } impl serde::Serialize for TrackCompositeEgressRequest { @@ -40417,6 +41645,12 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { len += 1; } + if self.spatial_layer != 0 { + len += 1; + } + if !self.rid.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.VideoLayer", len)?; if self.quality != 0 { let v = VideoQuality::try_from(self.quality) @@ -40435,6 +41669,12 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { struct_ser.serialize_field("ssrc", &self.ssrc)?; } + if self.spatial_layer != 0 { + struct_ser.serialize_field("spatialLayer", &self.spatial_layer)?; + } + if !self.rid.is_empty() { + struct_ser.serialize_field("rid", &self.rid)?; + } struct_ser.end() } } @@ -40450,6 +41690,9 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height", "bitrate", "ssrc", + "spatial_layer", + "spatialLayer", + "rid", ]; #[allow(clippy::enum_variant_names)] @@ -40459,6 +41702,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Height, Bitrate, Ssrc, + SpatialLayer, + Rid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -40486,6 +41731,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height" => Ok(GeneratedField::Height), "bitrate" => Ok(GeneratedField::Bitrate), "ssrc" => Ok(GeneratedField::Ssrc), + "spatialLayer" | "spatial_layer" => Ok(GeneratedField::SpatialLayer), + "rid" => Ok(GeneratedField::Rid), _ => Ok(GeneratedField::__SkipField__), } } @@ -40510,6 +41757,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { let mut height__ = None; let mut bitrate__ = None; let mut ssrc__ = None; + let mut spatial_layer__ = None; + let mut rid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Quality => { @@ -40550,6 +41799,20 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::SpatialLayer => { + if spatial_layer__.is_some() { + return Err(serde::de::Error::duplicate_field("spatialLayer")); + } + spatial_layer__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Rid => { + if rid__.is_some() { + return Err(serde::de::Error::duplicate_field("rid")); + } + rid__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -40561,12 +41824,88 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { height: height__.unwrap_or_default(), bitrate: bitrate__.unwrap_or_default(), ssrc: ssrc__.unwrap_or_default(), + spatial_layer: spatial_layer__.unwrap_or_default(), + rid: rid__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.VideoLayer", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for video_layer::Mode { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::Unused => "MODE_UNUSED", + Self::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", + Self::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for video_layer::Mode { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "MODE_UNUSED", + "ONE_SPATIAL_LAYER_PER_STREAM", + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = video_layer::Mode; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "MODE_UNUSED" => Ok(video_layer::Mode::Unused), + "ONE_SPATIAL_LAYER_PER_STREAM" => Ok(video_layer::Mode::OneSpatialLayerPerStream), + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Ok(video_layer::Mode::MultipleSpatialLayersPerStream), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} impl serde::Serialize for VideoQuality { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -41793,3 +43132,193 @@ impl<'de> serde::Deserialize<'de> for WorkerStatus { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for WrappedJoinRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.compression != 0 { + len += 1; + } + if !self.join_request.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.WrappedJoinRequest", len)?; + if self.compression != 0 { + let v = wrapped_join_request::Compression::try_from(self.compression) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.compression)))?; + struct_ser.serialize_field("compression", &v)?; + } + if !self.join_request.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("joinRequest", pbjson::private::base64::encode(&self.join_request).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WrappedJoinRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "compression", + "join_request", + "joinRequest", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Compression, + JoinRequest, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "compression" => Ok(GeneratedField::Compression), + "joinRequest" | "join_request" => Ok(GeneratedField::JoinRequest), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WrappedJoinRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.WrappedJoinRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut compression__ = None; + let mut join_request__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Compression => { + if compression__.is_some() { + return Err(serde::de::Error::duplicate_field("compression")); + } + compression__ = Some(map_.next_value::()? as i32); + } + GeneratedField::JoinRequest => { + if join_request__.is_some() { + return Err(serde::de::Error::duplicate_field("joinRequest")); + } + join_request__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WrappedJoinRequest { + compression: compression__.unwrap_or_default(), + join_request: join_request__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.WrappedJoinRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for wrapped_join_request::Compression { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::None => "NONE", + Self::Gzip => "GZIP", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for wrapped_join_request::Compression { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "NONE", + "GZIP", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = wrapped_join_request::Compression; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "NONE" => Ok(wrapped_join_request::Compression::None), + "GZIP" => Ok(wrapped_join_request::Compression::Gzip), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} From 58382963f51446050f8651b57f67c47798f2f5a9 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 22:25:04 -0700 Subject: [PATCH 08/39] remove UYVY --- examples/local_video/src/publisher.rs | 68 +- livekit-protocol/src/livekit.rs | 251 +++- livekit-protocol/src/livekit.serde.rs | 1579 ++++++++++++++++++++++++- 3 files changed, 1815 insertions(+), 83 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 5f3f6763e..ee130b179 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -123,7 +123,7 @@ async fn main() -> Result<()> { let index = CameraIndex::Index(args.camera_index as u32); let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); let mut camera = Camera::new(index, requested)?; - // Try raw YUYV first (cheaper than MJPEG), then UYVY, fall back to MJPEG + // Try raw YUYV first (cheaper than MJPEG), fall back to MJPEG let wanted = CameraFormat::new( Resolution::new(args.width, args.height), FrameFormat::YUYV, @@ -131,23 +131,13 @@ async fn main() -> Result<()> { ); let mut using_fmt = "YUYV"; if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { - // Try UYVY as an alternative packed 4:2:2 format - let alt_uyvy = CameraFormat::new( + let alt = CameraFormat::new( Resolution::new(args.width, args.height), - FrameFormat::UYVY, + FrameFormat::MJPEG, args.fps, ); - if camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt_uyvy))).is_ok() { - using_fmt = "UYVY"; - } else { - let alt = CameraFormat::new( - Resolution::new(args.width, args.height), - FrameFormat::MJPEG, - args.fps, - ); - using_fmt = "MJPEG"; - let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); - } + using_fmt = "MJPEG"; + let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); } camera.open_stream()?; let fmt = camera.camera_format(); @@ -182,7 +172,6 @@ async fn main() -> Result<()> { // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; let is_yuyv = using_fmt == "YUYV"; - let is_uyvy = using_fmt == "UYVY"; // Accurate pacing using absolute schedule (no drift) let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); @@ -216,41 +205,26 @@ async fn main() -> Result<()> { let t1 = Instant::now(); let (stride_y, stride_u, stride_v) = frame.buffer.strides(); let (data_y, data_u, data_v) = frame.buffer.data_mut(); - // Fast path for YUYV/UYVY: convert directly to I420 via libyuv - let t2 = if is_yuyv || is_uyvy { + // Fast path for YUYV: convert directly to I420 via libyuv + let t2 = if is_yuyv { let src = frame_buf.buffer(); let src_bytes = src.as_ref(); - let src_stride = (width * 2) as i32; // packed 4:2:2 - let t2_local = t1; // no decode step in packed YUV path + let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 + let t2_local = t1; // no decode step in YUYV path unsafe { // returns 0 on success - if is_yuyv { - let _ = yuv_sys::rs_YUY2ToI420( - src_bytes.as_ptr(), - src_stride, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } else { - let _ = yuv_sys::rs_UYVYToI420( - src_bytes.as_ptr(), - src_stride, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } + let _ = yuv_sys::rs_YUY2ToI420( + src_bytes.as_ptr(), + src_stride, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); } t2_local } else { diff --git a/livekit-protocol/src/livekit.rs b/livekit-protocol/src/livekit.rs index e39604819..676becb62 100644 --- a/livekit-protocol/src/livekit.rs +++ b/livekit-protocol/src/livekit.rs @@ -199,6 +199,12 @@ pub struct Pagination { #[prost(int32, tag="2")] pub limit: i32, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TokenPagination { + #[prost(string, tag="1")] + pub token: ::prost::alloc::string::String, +} /// ListUpdate is used for updated APIs where 'repeated string' field is modified. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -206,6 +212,15 @@ pub struct ListUpdate { /// set the field to a new list #[prost(string, repeated, tag="1")] pub set: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// append items to a list, avoiding duplicates + #[prost(string, repeated, tag="2")] + pub add: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// delete items from a list + #[prost(string, repeated, tag="3")] + pub del: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// sets the list to an empty list + #[prost(bool, tag="4")] + pub clear: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -484,6 +499,14 @@ pub struct SimulcastCodecInfo { pub cid: ::prost::alloc::string::String, #[prost(message, repeated, tag="4")] pub layers: ::prost::alloc::vec::Vec, + #[prost(enumeration="video_layer::Mode", tag="5")] + pub video_layer_mode: i32, + /// cid (client side id for track) could be different between + /// signalling (AddTrackRequest) and SDP offer. This field + /// will be populated only if it is different to avoid + /// duplication and keep the representation concise. + #[prost(string, tag="6")] + pub sdp_cid: ::prost::alloc::string::String, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -498,20 +521,30 @@ pub struct TrackInfo { pub muted: bool, /// original width of video (unset for audio) /// clients may receive a lower resolution version with simulcast + #[deprecated] #[prost(uint32, tag="5")] pub width: u32, /// original height of video (unset for audio) + #[deprecated] #[prost(uint32, tag="6")] pub height: u32, /// true if track is simulcasted + /// + /// see `video_layer_mode` in `codecs` + #[deprecated] #[prost(bool, tag="7")] pub simulcast: bool, /// true if DTX (Discontinuous Transmission) is disabled for audio + /// + /// deprecated in favor of `audio_features` + #[deprecated] #[prost(bool, tag="8")] pub disable_dtx: bool, /// source of media #[prost(enumeration="TrackSource", tag="9")] pub source: i32, + /// see `codecs` for layers of individual codec + #[deprecated] #[prost(message, repeated, tag="10")] pub layers: ::prost::alloc::vec::Vec, /// mime type of codec @@ -521,6 +554,8 @@ pub struct TrackInfo { pub mid: ::prost::alloc::string::String, #[prost(message, repeated, tag="13")] pub codecs: ::prost::alloc::vec::Vec, + /// deprecated in favor of `audio_features` + #[deprecated] #[prost(bool, tag="14")] pub stereo: bool, /// true if RED (Redundant Encoding) is disabled for audio @@ -553,6 +588,42 @@ pub struct VideoLayer { pub bitrate: u32, #[prost(uint32, tag="5")] pub ssrc: u32, + #[prost(int32, tag="6")] + pub spatial_layer: i32, + #[prost(string, tag="7")] + pub rid: ::prost::alloc::string::String, +} +/// Nested message and enum types in `VideoLayer`. +pub mod video_layer { + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum Mode { + Unused = 0, + OneSpatialLayerPerStream = 1, + MultipleSpatialLayersPerStream = 2, + } + impl Mode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Mode::Unused => "MODE_UNUSED", + Mode::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", + Mode::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "MODE_UNUSED" => Some(Self::Unused), + "ONE_SPATIAL_LAYER_PER_STREAM" => Some(Self::OneSpatialLayerPerStream), + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Some(Self::MultipleSpatialLayersPerStream), + _ => None, + } + } + } } /// new DataPacket API #[allow(clippy::derive_partial_eq_without_eq)] @@ -573,7 +644,7 @@ pub struct DataPacket { /// sid of the user that sent the message #[prost(string, tag="17")] pub participant_sid: ::prost::alloc::string::String, - #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15")] + #[prost(oneof="data_packet::Value", tags="2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 18")] pub value: ::core::option::Option, } /// Nested message and enum types in `DataPacket`. @@ -631,6 +702,50 @@ pub mod data_packet { StreamChunk(super::data_stream::Chunk), #[prost(message, tag="15")] StreamTrailer(super::data_stream::Trailer), + #[prost(message, tag="18")] + EncryptedPacket(super::EncryptedPacket), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EncryptedPacket { + #[prost(enumeration="encryption::Type", tag="1")] + pub encryption_type: i32, + #[prost(bytes="vec", tag="2")] + pub iv: ::prost::alloc::vec::Vec, + #[prost(uint32, tag="3")] + pub key_index: u32, + /// This is an encrypted EncryptedPacketPayload message representation + #[prost(bytes="vec", tag="4")] + pub encrypted_value: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EncryptedPacketPayload { + #[prost(oneof="encrypted_packet_payload::Value", tags="1, 3, 4, 5, 6, 7, 8, 9")] + pub value: ::core::option::Option, +} +/// Nested message and enum types in `EncryptedPacketPayload`. +pub mod encrypted_packet_payload { + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Value { + #[prost(message, tag="1")] + User(super::UserPacket), + #[prost(message, tag="3")] + ChatMessage(super::ChatMessage), + #[prost(message, tag="4")] + RpcRequest(super::RpcRequest), + #[prost(message, tag="5")] + RpcAck(super::RpcAck), + #[prost(message, tag="6")] + RpcResponse(super::RpcResponse), + #[prost(message, tag="7")] + StreamHeader(super::data_stream::Header), + #[prost(message, tag="8")] + StreamChunk(super::data_stream::Chunk), + #[prost(message, tag="9")] + StreamTrailer(super::data_stream::Trailer), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -675,7 +790,7 @@ pub struct UserPacket { /// topic under which the message was published #[prost(string, optional, tag="4")] pub topic: ::core::option::Option<::prost::alloc::string::String>, - /// Unique ID to indentify the message + /// Unique ID to identify the message #[prost(string, optional, tag="8")] pub id: ::core::option::Option<::prost::alloc::string::String>, /// start and end time allow relating the message to specific media time @@ -898,6 +1013,7 @@ pub mod client_info { UnityWeb = 11, Node = 12, Unreal = 13, + Esp32 = 14, } impl Sdk { /// String value of the enum field names used in the ProtoBuf definition. @@ -920,6 +1036,7 @@ pub mod client_info { Sdk::UnityWeb => "UNITY_WEB", Sdk::Node => "NODE", Sdk::Unreal => "UNREAL", + Sdk::Esp32 => "ESP32", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -939,6 +1056,7 @@ pub mod client_info { "UNITY_WEB" => Some(Self::UnityWeb), "NODE" => Some(Self::Node), "UNREAL" => Some(Self::Unreal), + "ESP32" => Some(Self::Esp32), _ => None, } } @@ -1231,7 +1349,8 @@ pub mod data_stream { /// only populated for finite streams, if it's a stream of unknown size this stays empty #[prost(uint64, optional, tag="5")] pub total_length: ::core::option::Option, - /// defaults to NONE + /// this is set on the DataPacket + #[deprecated] #[prost(enumeration="super::encryption::Type", tag="7")] pub encryption_type: i32, /// user defined attributes map that can carry additional info @@ -1267,7 +1386,8 @@ pub mod data_stream { /// a version indicating that this chunk_index has been retroactively modified and the original one needs to be replaced #[prost(int32, tag="4")] pub version: i32, - /// optional, initialization vector for AES-GCM encryption + /// this is set on the DataPacket + #[deprecated] #[prost(bytes="vec", optional, tag="5")] pub iv: ::core::option::Option<::prost::alloc::vec::Vec>, } @@ -2188,6 +2308,12 @@ pub struct S3Upload { pub secret: ::prost::alloc::string::String, #[prost(string, tag="11")] pub session_token: ::prost::alloc::string::String, + /// ARN of the role to assume for file upload. Egress will make an AssumeRole API call using the provided access_key and secret to assume that role. On LiveKit cloud, this is only available on accounts that have the feature enabled + #[prost(string, tag="12")] + pub assume_role_arn: ::prost::alloc::string::String, + /// ExternalID to use when assuming role for upload + #[prost(string, tag="13")] + pub assume_role_external_id: ::prost::alloc::string::String, #[prost(string, tag="3")] pub region: ::prost::alloc::string::String, #[prost(string, tag="4")] @@ -2874,10 +3000,10 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Message { - /// initial join exchange, for publisher + /// participant offer for publisher #[prost(message, tag="1")] Offer(super::SessionDescription), - /// participant answering publisher offer + /// participant answering subscriber offer #[prost(message, tag="2")] Answer(super::SessionDescription), #[prost(message, tag="3")] @@ -2930,7 +3056,7 @@ pub mod signal_request { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignalResponse { - #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24")] + #[prost(oneof="signal_response::Message", tags="1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25")] pub message: ::core::option::Option, } /// Nested message and enum types in `SignalResponse`. @@ -3010,6 +3136,9 @@ pub mod signal_response { /// notify to the participant when they have been moved to a new room #[prost(message, tag="24")] RoomMoved(super::RoomMovedResponse), + /// notify number of required media sections to satisfy subscribed tracks + #[prost(message, tag="25")] + MediaSectionsRequirement(super::MediaSectionsRequirement), } } #[allow(clippy::derive_partial_eq_without_eq)] @@ -3019,6 +3148,10 @@ pub struct SimulcastCodec { pub codec: ::prost::alloc::string::String, #[prost(string, tag="2")] pub cid: ::prost::alloc::string::String, + #[prost(message, repeated, tag="4")] + pub layers: ::prost::alloc::vec::Vec, + #[prost(enumeration="video_layer::Mode", tag="5")] + pub video_layer_mode: i32, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -3030,7 +3163,6 @@ pub struct AddTrackRequest { pub name: ::prost::alloc::string::String, #[prost(enumeration="TrackType", tag="3")] pub r#type: i32, - /// to be deprecated in favor of layers #[prost(uint32, tag="4")] pub width: u32, #[prost(uint32, tag="5")] @@ -3434,7 +3566,9 @@ pub struct RoomMovedResponse { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncState { - /// last subscribe answer before reconnecting + /// last subscribe/publish answer before reconnecting + /// subscribe answer if using dual peer connection + /// publish answer if using single peer connection #[prost(message, optional, tag="1")] pub answer: ::core::option::Option, #[prost(message, optional, tag="2")] @@ -3443,7 +3577,9 @@ pub struct SyncState { pub publish_tracks: ::prost::alloc::vec::Vec, #[prost(message, repeated, tag="4")] pub data_channels: ::prost::alloc::vec::Vec, - /// last received server side offer before reconnecting + /// last received server side offer/sent client side offer before reconnecting + /// received server side offer if using dual peer connection + /// sent client side offer if using single peer connection #[prost(message, optional, tag="5")] pub offer: ::core::option::Option, #[prost(string, repeated, tag="6")] @@ -3603,6 +3739,92 @@ pub struct TrackSubscribed { #[prost(string, tag="1")] pub track_sid: ::prost::alloc::string::String, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConnectionSettings { + #[prost(bool, tag="1")] + pub auto_subscribe: bool, + #[prost(bool, tag="2")] + pub adaptive_stream: bool, + #[prost(bool, optional, tag="3")] + pub subscriber_allow_pause: ::core::option::Option, + #[prost(bool, tag="4")] + pub disable_ice_lite: bool, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JoinRequest { + #[prost(message, optional, tag="1")] + pub client_info: ::core::option::Option, + #[prost(message, optional, tag="2")] + pub connection_settings: ::core::option::Option, + /// if not empty, will overwrite `metadata` in token + #[prost(string, tag="3")] + pub metadata: ::prost::alloc::string::String, + /// will set keys provided via this + /// will overwrite if the same key is in the token + /// will not delete keys from token if there is a key collision and this sets that key to empty value + #[prost(map="string, string", tag="4")] + pub participant_attributes: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, + #[prost(message, repeated, tag="5")] + pub add_track_requests: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag="6")] + pub publisher_offer: ::core::option::Option, + #[prost(bool, tag="7")] + pub reconnect: bool, + #[prost(enumeration="ReconnectReason", tag="8")] + pub reconnect_reason: i32, + #[prost(string, tag="9")] + pub participant_sid: ::prost::alloc::string::String, + #[prost(message, optional, tag="10")] + pub sync_state: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WrappedJoinRequest { + #[prost(enumeration="wrapped_join_request::Compression", tag="1")] + pub compression: i32, + /// marshalled JoinRequest + potentially compressed + #[prost(bytes="vec", tag="2")] + pub join_request: ::prost::alloc::vec::Vec, +} +/// Nested message and enum types in `WrappedJoinRequest`. +pub mod wrapped_join_request { + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum Compression { + None = 0, + Gzip = 1, + } + impl Compression { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Compression::None => "NONE", + Compression::Gzip => "GZIP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NONE" => Some(Self::None), + "GZIP" => Some(Self::Gzip), + _ => None, + } + } + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MediaSectionsRequirement { + #[prost(uint32, tag="1")] + pub num_audios: u32, + #[prost(uint32, tag="2")] + pub num_videos: u32, +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum SignalTarget { @@ -3722,6 +3944,10 @@ pub struct JobState { pub updated_at: i64, #[prost(string, tag="6")] pub participant_identity: ::prost::alloc::string::String, + #[prost(string, tag="7")] + pub worker_id: ::prost::alloc::string::String, + #[prost(string, tag="8")] + pub agent_id: ::prost::alloc::string::String, } /// from Worker to Server #[allow(clippy::derive_partial_eq_without_eq)] @@ -4290,6 +4516,9 @@ pub struct RoomConfiguration { /// limit number of participants that can be in a room, excluding Egress and Ingress participants #[prost(uint32, tag="4")] pub max_participants: u32, + /// metadata of room + #[prost(string, tag="11")] + pub metadata: ::prost::alloc::string::String, /// egress #[prost(message, optional, tag="5")] pub egress: ::core::option::Option, @@ -4751,7 +4980,7 @@ impl IngressVideoEncodingPreset { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WebhookEvent { - /// one of room_started, room_finished, participant_joined, participant_left, + /// one of room_started, room_finished, participant_joined, participant_left, participant_connection_aborted, /// track_published, track_unpublished, egress_started, egress_updated, egress_ended, /// ingress_started, ingress_ended #[prost(string, tag="1")] diff --git a/livekit-protocol/src/livekit.serde.rs b/livekit-protocol/src/livekit.serde.rs index 5c411a433..3c79f6c7a 100644 --- a/livekit-protocol/src/livekit.serde.rs +++ b/livekit-protocol/src/livekit.serde.rs @@ -2848,6 +2848,7 @@ impl serde::Serialize for client_info::Sdk { Self::UnityWeb => "UNITY_WEB", Self::Node => "NODE", Self::Unreal => "UNREAL", + Self::Esp32 => "ESP32", }; serializer.serialize_str(variant) } @@ -2873,6 +2874,7 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB", "NODE", "UNREAL", + "ESP32", ]; struct GeneratedVisitor; @@ -2927,6 +2929,7 @@ impl<'de> serde::Deserialize<'de> for client_info::Sdk { "UNITY_WEB" => Ok(client_info::Sdk::UnityWeb), "NODE" => Ok(client_info::Sdk::Node), "UNREAL" => Ok(client_info::Sdk::Unreal), + "ESP32" => Ok(client_info::Sdk::Esp32), _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), } } @@ -3353,6 +3356,156 @@ impl<'de> serde::Deserialize<'de> for ConnectionQualityUpdate { deserializer.deserialize_struct("livekit.ConnectionQualityUpdate", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for ConnectionSettings { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.auto_subscribe { + len += 1; + } + if self.adaptive_stream { + len += 1; + } + if self.subscriber_allow_pause.is_some() { + len += 1; + } + if self.disable_ice_lite { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.ConnectionSettings", len)?; + if self.auto_subscribe { + struct_ser.serialize_field("autoSubscribe", &self.auto_subscribe)?; + } + if self.adaptive_stream { + struct_ser.serialize_field("adaptiveStream", &self.adaptive_stream)?; + } + if let Some(v) = self.subscriber_allow_pause.as_ref() { + struct_ser.serialize_field("subscriberAllowPause", v)?; + } + if self.disable_ice_lite { + struct_ser.serialize_field("disableIceLite", &self.disable_ice_lite)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for ConnectionSettings { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "auto_subscribe", + "autoSubscribe", + "adaptive_stream", + "adaptiveStream", + "subscriber_allow_pause", + "subscriberAllowPause", + "disable_ice_lite", + "disableIceLite", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + AutoSubscribe, + AdaptiveStream, + SubscriberAllowPause, + DisableIceLite, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "autoSubscribe" | "auto_subscribe" => Ok(GeneratedField::AutoSubscribe), + "adaptiveStream" | "adaptive_stream" => Ok(GeneratedField::AdaptiveStream), + "subscriberAllowPause" | "subscriber_allow_pause" => Ok(GeneratedField::SubscriberAllowPause), + "disableIceLite" | "disable_ice_lite" => Ok(GeneratedField::DisableIceLite), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = ConnectionSettings; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.ConnectionSettings") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut auto_subscribe__ = None; + let mut adaptive_stream__ = None; + let mut subscriber_allow_pause__ = None; + let mut disable_ice_lite__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::AutoSubscribe => { + if auto_subscribe__.is_some() { + return Err(serde::de::Error::duplicate_field("autoSubscribe")); + } + auto_subscribe__ = Some(map_.next_value()?); + } + GeneratedField::AdaptiveStream => { + if adaptive_stream__.is_some() { + return Err(serde::de::Error::duplicate_field("adaptiveStream")); + } + adaptive_stream__ = Some(map_.next_value()?); + } + GeneratedField::SubscriberAllowPause => { + if subscriber_allow_pause__.is_some() { + return Err(serde::de::Error::duplicate_field("subscriberAllowPause")); + } + subscriber_allow_pause__ = map_.next_value()?; + } + GeneratedField::DisableIceLite => { + if disable_ice_lite__.is_some() { + return Err(serde::de::Error::duplicate_field("disableIceLite")); + } + disable_ice_lite__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(ConnectionSettings { + auto_subscribe: auto_subscribe__.unwrap_or_default(), + adaptive_stream: adaptive_stream__.unwrap_or_default(), + subscriber_allow_pause: subscriber_allow_pause__, + disable_ice_lite: disable_ice_lite__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.ConnectionSettings", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for CreateAgentDispatchRequest { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -5586,6 +5739,9 @@ impl serde::Serialize for DataPacket { data_packet::Value::StreamTrailer(v) => { struct_ser.serialize_field("streamTrailer", v)?; } + data_packet::Value::EncryptedPacket(v) => { + struct_ser.serialize_field("encryptedPacket", v)?; + } } } struct_ser.end() @@ -5626,6 +5782,8 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamChunk", "stream_trailer", "streamTrailer", + "encrypted_packet", + "encryptedPacket", ]; #[allow(clippy::enum_variant_names)] @@ -5647,6 +5805,7 @@ impl<'de> serde::Deserialize<'de> for DataPacket { StreamHeader, StreamChunk, StreamTrailer, + EncryptedPacket, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -5686,6 +5845,7 @@ impl<'de> serde::Deserialize<'de> for DataPacket { "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), + "encryptedPacket" | "encrypted_packet" => Ok(GeneratedField::EncryptedPacket), _ => Ok(GeneratedField::__SkipField__), } } @@ -5827,6 +5987,13 @@ impl<'de> serde::Deserialize<'de> for DataPacket { return Err(serde::de::Error::duplicate_field("streamTrailer")); } value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::StreamTrailer) +; + } + GeneratedField::EncryptedPacket => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptedPacket")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(data_packet::Value::EncryptedPacket) ; } GeneratedField::__SkipField__ => { @@ -9211,6 +9378,365 @@ impl<'de> serde::Deserialize<'de> for EncodingOptionsPreset { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for EncryptedPacket { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.encryption_type != 0 { + len += 1; + } + if !self.iv.is_empty() { + len += 1; + } + if self.key_index != 0 { + len += 1; + } + if !self.encrypted_value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacket", len)?; + if self.encryption_type != 0 { + let v = encryption::Type::try_from(self.encryption_type) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.encryption_type)))?; + struct_ser.serialize_field("encryptionType", &v)?; + } + if !self.iv.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("iv", pbjson::private::base64::encode(&self.iv).as_str())?; + } + if self.key_index != 0 { + struct_ser.serialize_field("keyIndex", &self.key_index)?; + } + if !self.encrypted_value.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("encryptedValue", pbjson::private::base64::encode(&self.encrypted_value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for EncryptedPacket { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "encryption_type", + "encryptionType", + "iv", + "key_index", + "keyIndex", + "encrypted_value", + "encryptedValue", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + EncryptionType, + Iv, + KeyIndex, + EncryptedValue, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "encryptionType" | "encryption_type" => Ok(GeneratedField::EncryptionType), + "iv" => Ok(GeneratedField::Iv), + "keyIndex" | "key_index" => Ok(GeneratedField::KeyIndex), + "encryptedValue" | "encrypted_value" => Ok(GeneratedField::EncryptedValue), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = EncryptedPacket; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.EncryptedPacket") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut encryption_type__ = None; + let mut iv__ = None; + let mut key_index__ = None; + let mut encrypted_value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::EncryptionType => { + if encryption_type__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptionType")); + } + encryption_type__ = Some(map_.next_value::()? as i32); + } + GeneratedField::Iv => { + if iv__.is_some() { + return Err(serde::de::Error::duplicate_field("iv")); + } + iv__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::KeyIndex => { + if key_index__.is_some() { + return Err(serde::de::Error::duplicate_field("keyIndex")); + } + key_index__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::EncryptedValue => { + if encrypted_value__.is_some() { + return Err(serde::de::Error::duplicate_field("encryptedValue")); + } + encrypted_value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(EncryptedPacket { + encryption_type: encryption_type__.unwrap_or_default(), + iv: iv__.unwrap_or_default(), + key_index: key_index__.unwrap_or_default(), + encrypted_value: encrypted_value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.EncryptedPacket", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for EncryptedPacketPayload { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.EncryptedPacketPayload", len)?; + if let Some(v) = self.value.as_ref() { + match v { + encrypted_packet_payload::Value::User(v) => { + struct_ser.serialize_field("user", v)?; + } + encrypted_packet_payload::Value::ChatMessage(v) => { + struct_ser.serialize_field("chatMessage", v)?; + } + encrypted_packet_payload::Value::RpcRequest(v) => { + struct_ser.serialize_field("rpcRequest", v)?; + } + encrypted_packet_payload::Value::RpcAck(v) => { + struct_ser.serialize_field("rpcAck", v)?; + } + encrypted_packet_payload::Value::RpcResponse(v) => { + struct_ser.serialize_field("rpcResponse", v)?; + } + encrypted_packet_payload::Value::StreamHeader(v) => { + struct_ser.serialize_field("streamHeader", v)?; + } + encrypted_packet_payload::Value::StreamChunk(v) => { + struct_ser.serialize_field("streamChunk", v)?; + } + encrypted_packet_payload::Value::StreamTrailer(v) => { + struct_ser.serialize_field("streamTrailer", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for EncryptedPacketPayload { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "user", + "chat_message", + "chatMessage", + "rpc_request", + "rpcRequest", + "rpc_ack", + "rpcAck", + "rpc_response", + "rpcResponse", + "stream_header", + "streamHeader", + "stream_chunk", + "streamChunk", + "stream_trailer", + "streamTrailer", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + User, + ChatMessage, + RpcRequest, + RpcAck, + RpcResponse, + StreamHeader, + StreamChunk, + StreamTrailer, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "user" => Ok(GeneratedField::User), + "chatMessage" | "chat_message" => Ok(GeneratedField::ChatMessage), + "rpcRequest" | "rpc_request" => Ok(GeneratedField::RpcRequest), + "rpcAck" | "rpc_ack" => Ok(GeneratedField::RpcAck), + "rpcResponse" | "rpc_response" => Ok(GeneratedField::RpcResponse), + "streamHeader" | "stream_header" => Ok(GeneratedField::StreamHeader), + "streamChunk" | "stream_chunk" => Ok(GeneratedField::StreamChunk), + "streamTrailer" | "stream_trailer" => Ok(GeneratedField::StreamTrailer), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = EncryptedPacketPayload; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.EncryptedPacketPayload") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::User => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("user")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::User) +; + } + GeneratedField::ChatMessage => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("chatMessage")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::ChatMessage) +; + } + GeneratedField::RpcRequest => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcRequest")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcRequest) +; + } + GeneratedField::RpcAck => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcAck")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcAck) +; + } + GeneratedField::RpcResponse => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("rpcResponse")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::RpcResponse) +; + } + GeneratedField::StreamHeader => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamHeader")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamHeader) +; + } + GeneratedField::StreamChunk => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamChunk")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamChunk) +; + } + GeneratedField::StreamTrailer => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("streamTrailer")); + } + value__ = map_.next_value::<::std::option::Option<_>>()?.map(encrypted_packet_payload::Value::StreamTrailer) +; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(EncryptedPacketPayload { + value: value__, + }) + } + } + deserializer.deserialize_struct("livekit.EncryptedPacketPayload", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for Encryption { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -13485,6 +14011,12 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { len += 1; } + if !self.worker_id.is_empty() { + len += 1; + } + if !self.agent_id.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.JobState", len)?; if self.status != 0 { let v = JobStatus::try_from(self.status) @@ -13512,6 +14044,12 @@ impl serde::Serialize for JobState { if !self.participant_identity.is_empty() { struct_ser.serialize_field("participantIdentity", &self.participant_identity)?; } + if !self.worker_id.is_empty() { + struct_ser.serialize_field("workerId", &self.worker_id)?; + } + if !self.agent_id.is_empty() { + struct_ser.serialize_field("agentId", &self.agent_id)?; + } struct_ser.end() } } @@ -13532,6 +14070,10 @@ impl<'de> serde::Deserialize<'de> for JobState { "updatedAt", "participant_identity", "participantIdentity", + "worker_id", + "workerId", + "agent_id", + "agentId", ]; #[allow(clippy::enum_variant_names)] @@ -13542,6 +14084,8 @@ impl<'de> serde::Deserialize<'de> for JobState { EndedAt, UpdatedAt, ParticipantIdentity, + WorkerId, + AgentId, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -13570,6 +14114,8 @@ impl<'de> serde::Deserialize<'de> for JobState { "endedAt" | "ended_at" => Ok(GeneratedField::EndedAt), "updatedAt" | "updated_at" => Ok(GeneratedField::UpdatedAt), "participantIdentity" | "participant_identity" => Ok(GeneratedField::ParticipantIdentity), + "workerId" | "worker_id" => Ok(GeneratedField::WorkerId), + "agentId" | "agent_id" => Ok(GeneratedField::AgentId), _ => Ok(GeneratedField::__SkipField__), } } @@ -13595,6 +14141,8 @@ impl<'de> serde::Deserialize<'de> for JobState { let mut ended_at__ = None; let mut updated_at__ = None; let mut participant_identity__ = None; + let mut worker_id__ = None; + let mut agent_id__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Status => { @@ -13639,6 +14187,18 @@ impl<'de> serde::Deserialize<'de> for JobState { } participant_identity__ = Some(map_.next_value()?); } + GeneratedField::WorkerId => { + if worker_id__.is_some() { + return Err(serde::de::Error::duplicate_field("workerId")); + } + worker_id__ = Some(map_.next_value()?); + } + GeneratedField::AgentId => { + if agent_id__.is_some() { + return Err(serde::de::Error::duplicate_field("agentId")); + } + agent_id__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -13651,6 +14211,8 @@ impl<'de> serde::Deserialize<'de> for JobState { ended_at: ended_at__.unwrap_or_default(), updated_at: updated_at__.unwrap_or_default(), participant_identity: participant_identity__.unwrap_or_default(), + worker_id: worker_id__.unwrap_or_default(), + agent_id: agent_id__.unwrap_or_default(), }) } } @@ -13904,6 +14466,266 @@ impl<'de> serde::Deserialize<'de> for JobType { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for JoinRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.client_info.is_some() { + len += 1; + } + if self.connection_settings.is_some() { + len += 1; + } + if !self.metadata.is_empty() { + len += 1; + } + if !self.participant_attributes.is_empty() { + len += 1; + } + if !self.add_track_requests.is_empty() { + len += 1; + } + if self.publisher_offer.is_some() { + len += 1; + } + if self.reconnect { + len += 1; + } + if self.reconnect_reason != 0 { + len += 1; + } + if !self.participant_sid.is_empty() { + len += 1; + } + if self.sync_state.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.JoinRequest", len)?; + if let Some(v) = self.client_info.as_ref() { + struct_ser.serialize_field("clientInfo", v)?; + } + if let Some(v) = self.connection_settings.as_ref() { + struct_ser.serialize_field("connectionSettings", v)?; + } + if !self.metadata.is_empty() { + struct_ser.serialize_field("metadata", &self.metadata)?; + } + if !self.participant_attributes.is_empty() { + struct_ser.serialize_field("participantAttributes", &self.participant_attributes)?; + } + if !self.add_track_requests.is_empty() { + struct_ser.serialize_field("addTrackRequests", &self.add_track_requests)?; + } + if let Some(v) = self.publisher_offer.as_ref() { + struct_ser.serialize_field("publisherOffer", v)?; + } + if self.reconnect { + struct_ser.serialize_field("reconnect", &self.reconnect)?; + } + if self.reconnect_reason != 0 { + let v = ReconnectReason::try_from(self.reconnect_reason) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.reconnect_reason)))?; + struct_ser.serialize_field("reconnectReason", &v)?; + } + if !self.participant_sid.is_empty() { + struct_ser.serialize_field("participantSid", &self.participant_sid)?; + } + if let Some(v) = self.sync_state.as_ref() { + struct_ser.serialize_field("syncState", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for JoinRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "client_info", + "clientInfo", + "connection_settings", + "connectionSettings", + "metadata", + "participant_attributes", + "participantAttributes", + "add_track_requests", + "addTrackRequests", + "publisher_offer", + "publisherOffer", + "reconnect", + "reconnect_reason", + "reconnectReason", + "participant_sid", + "participantSid", + "sync_state", + "syncState", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + ClientInfo, + ConnectionSettings, + Metadata, + ParticipantAttributes, + AddTrackRequests, + PublisherOffer, + Reconnect, + ReconnectReason, + ParticipantSid, + SyncState, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "clientInfo" | "client_info" => Ok(GeneratedField::ClientInfo), + "connectionSettings" | "connection_settings" => Ok(GeneratedField::ConnectionSettings), + "metadata" => Ok(GeneratedField::Metadata), + "participantAttributes" | "participant_attributes" => Ok(GeneratedField::ParticipantAttributes), + "addTrackRequests" | "add_track_requests" => Ok(GeneratedField::AddTrackRequests), + "publisherOffer" | "publisher_offer" => Ok(GeneratedField::PublisherOffer), + "reconnect" => Ok(GeneratedField::Reconnect), + "reconnectReason" | "reconnect_reason" => Ok(GeneratedField::ReconnectReason), + "participantSid" | "participant_sid" => Ok(GeneratedField::ParticipantSid), + "syncState" | "sync_state" => Ok(GeneratedField::SyncState), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = JoinRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.JoinRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut client_info__ = None; + let mut connection_settings__ = None; + let mut metadata__ = None; + let mut participant_attributes__ = None; + let mut add_track_requests__ = None; + let mut publisher_offer__ = None; + let mut reconnect__ = None; + let mut reconnect_reason__ = None; + let mut participant_sid__ = None; + let mut sync_state__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::ClientInfo => { + if client_info__.is_some() { + return Err(serde::de::Error::duplicate_field("clientInfo")); + } + client_info__ = map_.next_value()?; + } + GeneratedField::ConnectionSettings => { + if connection_settings__.is_some() { + return Err(serde::de::Error::duplicate_field("connectionSettings")); + } + connection_settings__ = map_.next_value()?; + } + GeneratedField::Metadata => { + if metadata__.is_some() { + return Err(serde::de::Error::duplicate_field("metadata")); + } + metadata__ = Some(map_.next_value()?); + } + GeneratedField::ParticipantAttributes => { + if participant_attributes__.is_some() { + return Err(serde::de::Error::duplicate_field("participantAttributes")); + } + participant_attributes__ = Some( + map_.next_value::>()? + ); + } + GeneratedField::AddTrackRequests => { + if add_track_requests__.is_some() { + return Err(serde::de::Error::duplicate_field("addTrackRequests")); + } + add_track_requests__ = Some(map_.next_value()?); + } + GeneratedField::PublisherOffer => { + if publisher_offer__.is_some() { + return Err(serde::de::Error::duplicate_field("publisherOffer")); + } + publisher_offer__ = map_.next_value()?; + } + GeneratedField::Reconnect => { + if reconnect__.is_some() { + return Err(serde::de::Error::duplicate_field("reconnect")); + } + reconnect__ = Some(map_.next_value()?); + } + GeneratedField::ReconnectReason => { + if reconnect_reason__.is_some() { + return Err(serde::de::Error::duplicate_field("reconnectReason")); + } + reconnect_reason__ = Some(map_.next_value::()? as i32); + } + GeneratedField::ParticipantSid => { + if participant_sid__.is_some() { + return Err(serde::de::Error::duplicate_field("participantSid")); + } + participant_sid__ = Some(map_.next_value()?); + } + GeneratedField::SyncState => { + if sync_state__.is_some() { + return Err(serde::de::Error::duplicate_field("syncState")); + } + sync_state__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(JoinRequest { + client_info: client_info__, + connection_settings: connection_settings__, + metadata: metadata__.unwrap_or_default(), + participant_attributes: participant_attributes__.unwrap_or_default(), + add_track_requests: add_track_requests__.unwrap_or_default(), + publisher_offer: publisher_offer__, + reconnect: reconnect__.unwrap_or_default(), + reconnect_reason: reconnect_reason__.unwrap_or_default(), + participant_sid: participant_sid__.unwrap_or_default(), + sync_state: sync_state__, + }) + } + } + deserializer.deserialize_struct("livekit.JoinRequest", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for JoinResponse { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -16384,10 +17206,28 @@ impl serde::Serialize for ListUpdate { if !self.set.is_empty() { len += 1; } + if !self.add.is_empty() { + len += 1; + } + if !self.del.is_empty() { + len += 1; + } + if self.clear { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.ListUpdate", len)?; if !self.set.is_empty() { struct_ser.serialize_field("set", &self.set)?; } + if !self.add.is_empty() { + struct_ser.serialize_field("add", &self.add)?; + } + if !self.del.is_empty() { + struct_ser.serialize_field("del", &self.del)?; + } + if self.clear { + struct_ser.serialize_field("clear", &self.clear)?; + } struct_ser.end() } } @@ -16399,11 +17239,17 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { const FIELDS: &[&str] = &[ "set", + "add", + "del", + "clear", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Set, + Add, + Del, + Clear, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -16427,6 +17273,9 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { { match value { "set" => Ok(GeneratedField::Set), + "add" => Ok(GeneratedField::Add), + "del" => Ok(GeneratedField::Del), + "clear" => Ok(GeneratedField::Clear), _ => Ok(GeneratedField::__SkipField__), } } @@ -16447,6 +17296,9 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { V: serde::de::MapAccess<'de>, { let mut set__ = None; + let mut add__ = None; + let mut del__ = None; + let mut clear__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Set => { @@ -16455,6 +17307,24 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } set__ = Some(map_.next_value()?); } + GeneratedField::Add => { + if add__.is_some() { + return Err(serde::de::Error::duplicate_field("add")); + } + add__ = Some(map_.next_value()?); + } + GeneratedField::Del => { + if del__.is_some() { + return Err(serde::de::Error::duplicate_field("del")); + } + del__ = Some(map_.next_value()?); + } + GeneratedField::Clear => { + if clear__.is_some() { + return Err(serde::de::Error::duplicate_field("clear")); + } + clear__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -16462,12 +17332,133 @@ impl<'de> serde::Deserialize<'de> for ListUpdate { } Ok(ListUpdate { set: set__.unwrap_or_default(), + add: add__.unwrap_or_default(), + del: del__.unwrap_or_default(), + clear: clear__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.ListUpdate", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for MediaSectionsRequirement { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.num_audios != 0 { + len += 1; + } + if self.num_videos != 0 { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.MediaSectionsRequirement", len)?; + if self.num_audios != 0 { + struct_ser.serialize_field("numAudios", &self.num_audios)?; + } + if self.num_videos != 0 { + struct_ser.serialize_field("numVideos", &self.num_videos)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for MediaSectionsRequirement { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "num_audios", + "numAudios", + "num_videos", + "numVideos", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + NumAudios, + NumVideos, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "numAudios" | "num_audios" => Ok(GeneratedField::NumAudios), + "numVideos" | "num_videos" => Ok(GeneratedField::NumVideos), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = MediaSectionsRequirement; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.MediaSectionsRequirement") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut num_audios__ = None; + let mut num_videos__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::NumAudios => { + if num_audios__.is_some() { + return Err(serde::de::Error::duplicate_field("numAudios")); + } + num_audios__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::NumVideos => { + if num_videos__.is_some() { + return Err(serde::de::Error::duplicate_field("numVideos")); + } + num_videos__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(MediaSectionsRequirement { + num_audios: num_audios__.unwrap_or_default(), + num_videos: num_videos__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.MediaSectionsRequirement", FIELDS, GeneratedVisitor) + } +} impl serde::Serialize for MetricLabel { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -23352,6 +24343,9 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { len += 1; } + if !self.metadata.is_empty() { + len += 1; + } if self.egress.is_some() { len += 1; } @@ -23380,6 +24374,9 @@ impl serde::Serialize for RoomConfiguration { if self.max_participants != 0 { struct_ser.serialize_field("maxParticipants", &self.max_participants)?; } + if !self.metadata.is_empty() { + struct_ser.serialize_field("metadata", &self.metadata)?; + } if let Some(v) = self.egress.as_ref() { struct_ser.serialize_field("egress", v)?; } @@ -23412,6 +24409,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "departureTimeout", "max_participants", "maxParticipants", + "metadata", "egress", "min_playout_delay", "minPlayoutDelay", @@ -23428,6 +24426,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { EmptyTimeout, DepartureTimeout, MaxParticipants, + Metadata, Egress, MinPlayoutDelay, MaxPlayoutDelay, @@ -23459,6 +24458,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { "emptyTimeout" | "empty_timeout" => Ok(GeneratedField::EmptyTimeout), "departureTimeout" | "departure_timeout" => Ok(GeneratedField::DepartureTimeout), "maxParticipants" | "max_participants" => Ok(GeneratedField::MaxParticipants), + "metadata" => Ok(GeneratedField::Metadata), "egress" => Ok(GeneratedField::Egress), "minPlayoutDelay" | "min_playout_delay" => Ok(GeneratedField::MinPlayoutDelay), "maxPlayoutDelay" | "max_playout_delay" => Ok(GeneratedField::MaxPlayoutDelay), @@ -23487,6 +24487,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { let mut empty_timeout__ = None; let mut departure_timeout__ = None; let mut max_participants__ = None; + let mut metadata__ = None; let mut egress__ = None; let mut min_playout_delay__ = None; let mut max_playout_delay__ = None; @@ -23524,6 +24525,12 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::Metadata => { + if metadata__.is_some() { + return Err(serde::de::Error::duplicate_field("metadata")); + } + metadata__ = Some(map_.next_value()?); + } GeneratedField::Egress => { if egress__.is_some() { return Err(serde::de::Error::duplicate_field("egress")); @@ -23568,6 +24575,7 @@ impl<'de> serde::Deserialize<'de> for RoomConfiguration { empty_timeout: empty_timeout__.unwrap_or_default(), departure_timeout: departure_timeout__.unwrap_or_default(), max_participants: max_participants__.unwrap_or_default(), + metadata: metadata__.unwrap_or_default(), egress: egress__, min_playout_delay: min_playout_delay__.unwrap_or_default(), max_playout_delay: max_playout_delay__.unwrap_or_default(), @@ -24604,6 +25612,12 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { len += 1; } + if !self.assume_role_arn.is_empty() { + len += 1; + } + if !self.assume_role_external_id.is_empty() { + len += 1; + } if !self.region.is_empty() { len += 1; } @@ -24638,6 +25652,12 @@ impl serde::Serialize for S3Upload { if !self.session_token.is_empty() { struct_ser.serialize_field("sessionToken", &self.session_token)?; } + if !self.assume_role_arn.is_empty() { + struct_ser.serialize_field("assumeRoleArn", &self.assume_role_arn)?; + } + if !self.assume_role_external_id.is_empty() { + struct_ser.serialize_field("assumeRoleExternalId", &self.assume_role_external_id)?; + } if !self.region.is_empty() { struct_ser.serialize_field("region", &self.region)?; } @@ -24677,6 +25697,10 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "secret", "session_token", "sessionToken", + "assume_role_arn", + "assumeRoleArn", + "assume_role_external_id", + "assumeRoleExternalId", "region", "endpoint", "bucket", @@ -24694,6 +25718,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { AccessKey, Secret, SessionToken, + AssumeRoleArn, + AssumeRoleExternalId, Region, Endpoint, Bucket, @@ -24727,6 +25753,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { "accessKey" | "access_key" => Ok(GeneratedField::AccessKey), "secret" => Ok(GeneratedField::Secret), "sessionToken" | "session_token" => Ok(GeneratedField::SessionToken), + "assumeRoleArn" | "assume_role_arn" => Ok(GeneratedField::AssumeRoleArn), + "assumeRoleExternalId" | "assume_role_external_id" => Ok(GeneratedField::AssumeRoleExternalId), "region" => Ok(GeneratedField::Region), "endpoint" => Ok(GeneratedField::Endpoint), "bucket" => Ok(GeneratedField::Bucket), @@ -24757,6 +25785,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { let mut access_key__ = None; let mut secret__ = None; let mut session_token__ = None; + let mut assume_role_arn__ = None; + let mut assume_role_external_id__ = None; let mut region__ = None; let mut endpoint__ = None; let mut bucket__ = None; @@ -24785,6 +25815,18 @@ impl<'de> serde::Deserialize<'de> for S3Upload { } session_token__ = Some(map_.next_value()?); } + GeneratedField::AssumeRoleArn => { + if assume_role_arn__.is_some() { + return Err(serde::de::Error::duplicate_field("assumeRoleArn")); + } + assume_role_arn__ = Some(map_.next_value()?); + } + GeneratedField::AssumeRoleExternalId => { + if assume_role_external_id__.is_some() { + return Err(serde::de::Error::duplicate_field("assumeRoleExternalId")); + } + assume_role_external_id__ = Some(map_.next_value()?); + } GeneratedField::Region => { if region__.is_some() { return Err(serde::de::Error::duplicate_field("region")); @@ -24844,6 +25886,8 @@ impl<'de> serde::Deserialize<'de> for S3Upload { access_key: access_key__.unwrap_or_default(), secret: secret__.unwrap_or_default(), session_token: session_token__.unwrap_or_default(), + assume_role_arn: assume_role_arn__.unwrap_or_default(), + assume_role_external_id: assume_role_external_id__.unwrap_or_default(), region: region__.unwrap_or_default(), endpoint: endpoint__.unwrap_or_default(), bucket: bucket__.unwrap_or_default(), @@ -31459,6 +32503,9 @@ impl serde::Serialize for SignalResponse { signal_response::Message::RoomMoved(v) => { struct_ser.serialize_field("roomMoved", v)?; } + signal_response::Message::MediaSectionsRequirement(v) => { + struct_ser.serialize_field("mediaSectionsRequirement", v)?; + } } } struct_ser.end() @@ -31508,6 +32555,8 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "trackSubscribed", "room_moved", "roomMoved", + "media_sections_requirement", + "mediaSectionsRequirement", ]; #[allow(clippy::enum_variant_names)] @@ -31535,6 +32584,7 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { RequestResponse, TrackSubscribed, RoomMoved, + MediaSectionsRequirement, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -31580,6 +32630,7 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { "requestResponse" | "request_response" => Ok(GeneratedField::RequestResponse), "trackSubscribed" | "track_subscribed" => Ok(GeneratedField::TrackSubscribed), "roomMoved" | "room_moved" => Ok(GeneratedField::RoomMoved), + "mediaSectionsRequirement" | "media_sections_requirement" => Ok(GeneratedField::MediaSectionsRequirement), _ => Ok(GeneratedField::__SkipField__), } } @@ -31759,6 +32810,13 @@ impl<'de> serde::Deserialize<'de> for SignalResponse { return Err(serde::de::Error::duplicate_field("roomMoved")); } message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::RoomMoved) +; + } + GeneratedField::MediaSectionsRequirement => { + if message__.is_some() { + return Err(serde::de::Error::duplicate_field("mediaSectionsRequirement")); + } + message__ = map_.next_value::<::std::option::Option<_>>()?.map(signal_response::Message::MediaSectionsRequirement) ; } GeneratedField::__SkipField__ => { @@ -32197,6 +33255,12 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { len += 1; } + if !self.layers.is_empty() { + len += 1; + } + if self.video_layer_mode != 0 { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodec", len)?; if !self.codec.is_empty() { struct_ser.serialize_field("codec", &self.codec)?; @@ -32204,6 +33268,14 @@ impl serde::Serialize for SimulcastCodec { if !self.cid.is_empty() { struct_ser.serialize_field("cid", &self.cid)?; } + if !self.layers.is_empty() { + struct_ser.serialize_field("layers", &self.layers)?; + } + if self.video_layer_mode != 0 { + let v = video_layer::Mode::try_from(self.video_layer_mode) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; + struct_ser.serialize_field("videoLayerMode", &v)?; + } struct_ser.end() } } @@ -32216,12 +33288,17 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { const FIELDS: &[&str] = &[ "codec", "cid", + "layers", + "video_layer_mode", + "videoLayerMode", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Codec, Cid, + Layers, + VideoLayerMode, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32246,6 +33323,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { match value { "codec" => Ok(GeneratedField::Codec), "cid" => Ok(GeneratedField::Cid), + "layers" => Ok(GeneratedField::Layers), + "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), _ => Ok(GeneratedField::__SkipField__), } } @@ -32267,6 +33346,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { { let mut codec__ = None; let mut cid__ = None; + let mut layers__ = None; + let mut video_layer_mode__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Codec => { @@ -32281,6 +33362,18 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { } cid__ = Some(map_.next_value()?); } + GeneratedField::Layers => { + if layers__.is_some() { + return Err(serde::de::Error::duplicate_field("layers")); + } + layers__ = Some(map_.next_value()?); + } + GeneratedField::VideoLayerMode => { + if video_layer_mode__.is_some() { + return Err(serde::de::Error::duplicate_field("videoLayerMode")); + } + video_layer_mode__ = Some(map_.next_value::()? as i32); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32289,6 +33382,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodec { Ok(SimulcastCodec { codec: codec__.unwrap_or_default(), cid: cid__.unwrap_or_default(), + layers: layers__.unwrap_or_default(), + video_layer_mode: video_layer_mode__.unwrap_or_default(), }) } } @@ -32315,6 +33410,12 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { len += 1; } + if self.video_layer_mode != 0 { + len += 1; + } + if !self.sdp_cid.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.SimulcastCodecInfo", len)?; if !self.mime_type.is_empty() { struct_ser.serialize_field("mimeType", &self.mime_type)?; @@ -32328,6 +33429,14 @@ impl serde::Serialize for SimulcastCodecInfo { if !self.layers.is_empty() { struct_ser.serialize_field("layers", &self.layers)?; } + if self.video_layer_mode != 0 { + let v = video_layer::Mode::try_from(self.video_layer_mode) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.video_layer_mode)))?; + struct_ser.serialize_field("videoLayerMode", &v)?; + } + if !self.sdp_cid.is_empty() { + struct_ser.serialize_field("sdpCid", &self.sdp_cid)?; + } struct_ser.end() } } @@ -32343,6 +33452,10 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid", "cid", "layers", + "video_layer_mode", + "videoLayerMode", + "sdp_cid", + "sdpCid", ]; #[allow(clippy::enum_variant_names)] @@ -32351,6 +33464,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { Mid, Cid, Layers, + VideoLayerMode, + SdpCid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -32377,6 +33492,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { "mid" => Ok(GeneratedField::Mid), "cid" => Ok(GeneratedField::Cid), "layers" => Ok(GeneratedField::Layers), + "videoLayerMode" | "video_layer_mode" => Ok(GeneratedField::VideoLayerMode), + "sdpCid" | "sdp_cid" => Ok(GeneratedField::SdpCid), _ => Ok(GeneratedField::__SkipField__), } } @@ -32400,6 +33517,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { let mut mid__ = None; let mut cid__ = None; let mut layers__ = None; + let mut video_layer_mode__ = None; + let mut sdp_cid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::MimeType => { @@ -32426,6 +33545,18 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { } layers__ = Some(map_.next_value()?); } + GeneratedField::VideoLayerMode => { + if video_layer_mode__.is_some() { + return Err(serde::de::Error::duplicate_field("videoLayerMode")); + } + video_layer_mode__ = Some(map_.next_value::()? as i32); + } + GeneratedField::SdpCid => { + if sdp_cid__.is_some() { + return Err(serde::de::Error::duplicate_field("sdpCid")); + } + sdp_cid__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -32436,6 +33567,8 @@ impl<'de> serde::Deserialize<'de> for SimulcastCodecInfo { mid: mid__.unwrap_or_default(), cid: cid__.unwrap_or_default(), layers: layers__.unwrap_or_default(), + video_layer_mode: video_layer_mode__.unwrap_or_default(), + sdp_cid: sdp_cid__.unwrap_or_default(), }) } } @@ -34961,8 +36094,115 @@ impl<'de> serde::Deserialize<'de> for TimedVersion { E: serde::de::Error, { match value { - "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), - "ticks" => Ok(GeneratedField::Ticks), + "unixMicro" | "unix_micro" => Ok(GeneratedField::UnixMicro), + "ticks" => Ok(GeneratedField::Ticks), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = TimedVersion; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.TimedVersion") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut unix_micro__ = None; + let mut ticks__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::UnixMicro => { + if unix_micro__.is_some() { + return Err(serde::de::Error::duplicate_field("unixMicro")); + } + unix_micro__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Ticks => { + if ticks__.is_some() { + return Err(serde::de::Error::duplicate_field("ticks")); + } + ticks__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(TimedVersion { + unix_micro: unix_micro__.unwrap_or_default(), + ticks: ticks__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for TokenPagination { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.token.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.TokenPagination", len)?; + if !self.token.is_empty() { + struct_ser.serialize_field("token", &self.token)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for TokenPagination { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "token", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Token, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "token" => Ok(GeneratedField::Token), _ => Ok(GeneratedField::__SkipField__), } } @@ -34972,48 +36212,36 @@ impl<'de> serde::Deserialize<'de> for TimedVersion { } struct GeneratedVisitor; impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = TimedVersion; + type Value = TokenPagination; fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct livekit.TimedVersion") + formatter.write_str("struct livekit.TokenPagination") } - fn visit_map(self, mut map_: V) -> std::result::Result + fn visit_map(self, mut map_: V) -> std::result::Result where V: serde::de::MapAccess<'de>, { - let mut unix_micro__ = None; - let mut ticks__ = None; + let mut token__ = None; while let Some(k) = map_.next_key()? { match k { - GeneratedField::UnixMicro => { - if unix_micro__.is_some() { - return Err(serde::de::Error::duplicate_field("unixMicro")); - } - unix_micro__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; - } - GeneratedField::Ticks => { - if ticks__.is_some() { - return Err(serde::de::Error::duplicate_field("ticks")); + GeneratedField::Token => { + if token__.is_some() { + return Err(serde::de::Error::duplicate_field("token")); } - ticks__ = - Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) - ; + token__ = Some(map_.next_value()?); } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } } } - Ok(TimedVersion { - unix_micro: unix_micro__.unwrap_or_default(), - ticks: ticks__.unwrap_or_default(), + Ok(TokenPagination { + token: token__.unwrap_or_default(), }) } } - deserializer.deserialize_struct("livekit.TimedVersion", FIELDS, GeneratedVisitor) + deserializer.deserialize_struct("livekit.TokenPagination", FIELDS, GeneratedVisitor) } } impl serde::Serialize for TrackCompositeEgressRequest { @@ -40417,6 +41645,12 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { len += 1; } + if self.spatial_layer != 0 { + len += 1; + } + if !self.rid.is_empty() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("livekit.VideoLayer", len)?; if self.quality != 0 { let v = VideoQuality::try_from(self.quality) @@ -40435,6 +41669,12 @@ impl serde::Serialize for VideoLayer { if self.ssrc != 0 { struct_ser.serialize_field("ssrc", &self.ssrc)?; } + if self.spatial_layer != 0 { + struct_ser.serialize_field("spatialLayer", &self.spatial_layer)?; + } + if !self.rid.is_empty() { + struct_ser.serialize_field("rid", &self.rid)?; + } struct_ser.end() } } @@ -40450,6 +41690,9 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height", "bitrate", "ssrc", + "spatial_layer", + "spatialLayer", + "rid", ]; #[allow(clippy::enum_variant_names)] @@ -40459,6 +41702,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Height, Bitrate, Ssrc, + SpatialLayer, + Rid, __SkipField__, } impl<'de> serde::Deserialize<'de> for GeneratedField { @@ -40486,6 +41731,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { "height" => Ok(GeneratedField::Height), "bitrate" => Ok(GeneratedField::Bitrate), "ssrc" => Ok(GeneratedField::Ssrc), + "spatialLayer" | "spatial_layer" => Ok(GeneratedField::SpatialLayer), + "rid" => Ok(GeneratedField::Rid), _ => Ok(GeneratedField::__SkipField__), } } @@ -40510,6 +41757,8 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { let mut height__ = None; let mut bitrate__ = None; let mut ssrc__ = None; + let mut spatial_layer__ = None; + let mut rid__ = None; while let Some(k) = map_.next_key()? { match k { GeneratedField::Quality => { @@ -40550,6 +41799,20 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) ; } + GeneratedField::SpatialLayer => { + if spatial_layer__.is_some() { + return Err(serde::de::Error::duplicate_field("spatialLayer")); + } + spatial_layer__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Rid => { + if rid__.is_some() { + return Err(serde::de::Error::duplicate_field("rid")); + } + rid__ = Some(map_.next_value()?); + } GeneratedField::__SkipField__ => { let _ = map_.next_value::()?; } @@ -40561,12 +41824,88 @@ impl<'de> serde::Deserialize<'de> for VideoLayer { height: height__.unwrap_or_default(), bitrate: bitrate__.unwrap_or_default(), ssrc: ssrc__.unwrap_or_default(), + spatial_layer: spatial_layer__.unwrap_or_default(), + rid: rid__.unwrap_or_default(), }) } } deserializer.deserialize_struct("livekit.VideoLayer", FIELDS, GeneratedVisitor) } } +impl serde::Serialize for video_layer::Mode { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::Unused => "MODE_UNUSED", + Self::OneSpatialLayerPerStream => "ONE_SPATIAL_LAYER_PER_STREAM", + Self::MultipleSpatialLayersPerStream => "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for video_layer::Mode { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "MODE_UNUSED", + "ONE_SPATIAL_LAYER_PER_STREAM", + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = video_layer::Mode; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "MODE_UNUSED" => Ok(video_layer::Mode::Unused), + "ONE_SPATIAL_LAYER_PER_STREAM" => Ok(video_layer::Mode::OneSpatialLayerPerStream), + "MULTIPLE_SPATIAL_LAYERS_PER_STREAM" => Ok(video_layer::Mode::MultipleSpatialLayersPerStream), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} impl serde::Serialize for VideoQuality { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -41793,3 +43132,193 @@ impl<'de> serde::Deserialize<'de> for WorkerStatus { deserializer.deserialize_any(GeneratedVisitor) } } +impl serde::Serialize for WrappedJoinRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.compression != 0 { + len += 1; + } + if !self.join_request.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("livekit.WrappedJoinRequest", len)?; + if self.compression != 0 { + let v = wrapped_join_request::Compression::try_from(self.compression) + .map_err(|_| serde::ser::Error::custom(format!("Invalid variant {}", self.compression)))?; + struct_ser.serialize_field("compression", &v)?; + } + if !self.join_request.is_empty() { + #[allow(clippy::needless_borrow)] + #[allow(clippy::needless_borrows_for_generic_args)] + struct_ser.serialize_field("joinRequest", pbjson::private::base64::encode(&self.join_request).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WrappedJoinRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "compression", + "join_request", + "joinRequest", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Compression, + JoinRequest, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "compression" => Ok(GeneratedField::Compression), + "joinRequest" | "join_request" => Ok(GeneratedField::JoinRequest), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WrappedJoinRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct livekit.WrappedJoinRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut compression__ = None; + let mut join_request__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Compression => { + if compression__.is_some() { + return Err(serde::de::Error::duplicate_field("compression")); + } + compression__ = Some(map_.next_value::()? as i32); + } + GeneratedField::JoinRequest => { + if join_request__.is_some() { + return Err(serde::de::Error::duplicate_field("joinRequest")); + } + join_request__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WrappedJoinRequest { + compression: compression__.unwrap_or_default(), + join_request: join_request__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("livekit.WrappedJoinRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for wrapped_join_request::Compression { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + let variant = match self { + Self::None => "NONE", + Self::Gzip => "GZIP", + }; + serializer.serialize_str(variant) + } +} +impl<'de> serde::Deserialize<'de> for wrapped_join_request::Compression { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "NONE", + "GZIP", + ]; + + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = wrapped_join_request::Compression; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + fn visit_i64(self, v: i64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) + }) + } + + fn visit_u64(self, v: u64) -> std::result::Result + where + E: serde::de::Error, + { + i32::try_from(v) + .ok() + .and_then(|x| x.try_into().ok()) + .ok_or_else(|| { + serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) + }) + } + + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "NONE" => Ok(wrapped_join_request::Compression::None), + "GZIP" => Ok(wrapped_join_request::Compression::Gzip), + _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), + } + } + } + deserializer.deserialize_any(GeneratedVisitor) + } +} From 16e624a8324310faad3578c4084b4ba07d8527b8 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 23:33:16 -0700 Subject: [PATCH 09/39] remove extraneous feature --- examples/local_video/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 7ded6dc99..6872927ec 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -14,7 +14,7 @@ path = "src/subscriber.rs" [dependencies] tokio = { version = "1", features = ["full", "parking_lot"] } -livekit = { workspace = true, features = ["rustls-tls-native-roots"] } +livekit = { workspace = true } webrtc-sys = { workspace = true } libwebrtc = { workspace = true } livekit-api = { workspace = true } From 5d39dde2f0728774ba79fa766d3a1a4df898f512 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 23:39:31 -0700 Subject: [PATCH 10/39] wip fixing mjpeg decode --- examples/local_video/src/publisher.rs | 90 ++++++++++++++++++++++----- 1 file changed, 73 insertions(+), 17 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index ee130b179..a9eeb3c44 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -145,6 +145,7 @@ async fn main() -> Result<()> { let height = fmt.height(); let fps = fmt.frame_rate(); info!("Camera opened: {}x{} @ {} fps (format: {})", width, height, fps, using_fmt); + debug!("Negotiated nokhwa CameraFormat: {:?}", fmt); // Pace publishing at the requested FPS (not the camera-reported FPS) to hit desired cadence let pace_fps = args.fps as f64; @@ -172,6 +173,10 @@ async fn main() -> Result<()> { // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; let is_yuyv = using_fmt == "YUYV"; + info!( + "Selected conversion path: {}", + if is_yuyv { "YUYV->I420 (libyuv)" } else { "MJPEG->RGB->I420" } + ); // Accurate pacing using absolute schedule (no drift) let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); @@ -193,6 +198,7 @@ async fn main() -> Result<()> { let mut sum_capture_ms = 0.0; let mut sum_sleep_ms = 0.0; let mut sum_iter_ms = 0.0; + let mut logged_mjpeg_fallback = false; loop { // Wait until the scheduled next frame time let wait_start = Instant::now(); @@ -228,23 +234,73 @@ async fn main() -> Result<()> { } t2_local } else { - // Fallback (e.g., MJPEG): decode to RGB24 then convert to I420 - let rgb = frame_buf.decode_image::()?; - let t2_local = Instant::now(); - unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - rgb.as_raw().as_ptr(), - (width * 3) as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } + // Fallback (e.g., MJPEG): try nokhwa's decode first; if that fails, use image crate + let t2_local = match frame_buf.decode_image::() { + Ok(rgb) => { + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb.as_raw().as_ptr(), + (width * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() + } + Err(e) => { + if !logged_mjpeg_fallback { + log::warn!( + "MJPEG decode via nokhwa failed: {}. Falling back to image crate decode.", + e + ); + logged_mjpeg_fallback = true; + } + let src = frame_buf.buffer(); + match image::load_from_memory(src.as_ref()) { + Ok(img_dyn) => { + let rgb8 = img_dyn.to_rgb8(); + let dec_w = rgb8.width() as u32; + let dec_h = rgb8.height() as u32; + if dec_w != width || dec_h != height { + log::warn!( + "Decoded MJPEG size {}x{} differs from camera {}x{}; skipping resize and attempting convert as-is", + dec_w, + dec_h, + width, + height + ); + } + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb8.as_raw().as_ptr(), + (dec_w * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() + } + Err(e2) => { + return Err(anyhow::anyhow!( + "MJPEG decode failed: {}; fallback with image crate also failed: {}", + e, e2 + )); + } + } + } + }; t2_local }; let t3 = Instant::now(); From 27b8705b955782d8ff1c023cd4bd57502d5e0174 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 29 Oct 2025 23:51:28 -0700 Subject: [PATCH 11/39] revert removal of feature, does not connect w/o it --- examples/local_video/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 6872927ec..7ded6dc99 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -14,7 +14,7 @@ path = "src/subscriber.rs" [dependencies] tokio = { version = "1", features = ["full", "parking_lot"] } -livekit = { workspace = true } +livekit = { workspace = true, features = ["rustls-tls-native-roots"] } webrtc-sys = { workspace = true } libwebrtc = { workspace = true } livekit-api = { workspace = true } From 03a64997306d5963e2330ae459890b7456ea06af Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 00:09:37 -0700 Subject: [PATCH 12/39] wip --- examples/local_video/src/publisher.rs | 120 ++++++++++++-------------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index a9eeb3c44..bc96098af 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -172,10 +172,10 @@ async fn main() -> Result<()> { // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; - let is_yuyv = using_fmt == "YUYV"; + let is_yuyv = fmt.format() == FrameFormat::YUYV; info!( "Selected conversion path: {}", - if is_yuyv { "YUYV->I420 (libyuv)" } else { "MJPEG->RGB->I420" } + if is_yuyv { "YUYV->I420 (libyuv)" } else { "Auto (RGB24 or MJPEG)" } ); // Accurate pacing using absolute schedule (no drift) @@ -234,70 +234,64 @@ async fn main() -> Result<()> { } t2_local } else { - // Fallback (e.g., MJPEG): try nokhwa's decode first; if that fails, use image crate - let t2_local = match frame_buf.decode_image::() { - Ok(rgb) => { - unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - rgb.as_raw().as_ptr(), - (width * 3) as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } - Instant::now() + // Auto path (either RGB24 already or compressed MJPEG) + let src = frame_buf.buffer(); + let t2_local = if src.len() == (width as usize * height as usize * 3) { + // Already RGB24 from backend; convert directly + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + src.as_ref().as_ptr(), + (width * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); } - Err(e) => { - if !logged_mjpeg_fallback { - log::warn!( - "MJPEG decode via nokhwa failed: {}. Falling back to image crate decode.", - e - ); - logged_mjpeg_fallback = true; - } - let src = frame_buf.buffer(); - match image::load_from_memory(src.as_ref()) { - Ok(img_dyn) => { - let rgb8 = img_dyn.to_rgb8(); - let dec_w = rgb8.width() as u32; - let dec_h = rgb8.height() as u32; - if dec_w != width || dec_h != height { - log::warn!( - "Decoded MJPEG size {}x{} differs from camera {}x{}; skipping resize and attempting convert as-is", - dec_w, - dec_h, - width, - height - ); - } - unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - rgb8.as_raw().as_ptr(), - (dec_w * 3) as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } - Instant::now() + Instant::now() + } else { + // Try decoding as MJPEG via image crate + match image::load_from_memory(src.as_ref()) { + Ok(img_dyn) => { + let rgb8 = img_dyn.to_rgb8(); + let dec_w = rgb8.width() as u32; + let dec_h = rgb8.height() as u32; + if dec_w != width || dec_h != height { + log::warn!( + "Decoded MJPEG size {}x{} differs from requested {}x{}; dropping frame", + dec_w, dec_h, width, height + ); + continue; } - Err(e2) => { - return Err(anyhow::anyhow!( - "MJPEG decode failed: {}; fallback with image crate also failed: {}", - e, e2 - )); + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb8.as_raw().as_ptr(), + (dec_w * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() + } + Err(e2) => { + if !logged_mjpeg_fallback { + log::error!( + "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", + e2 + ); + logged_mjpeg_fallback = true; } + continue; } } }; From 7cb75e994eb933b28c8e33eca8b26df0a6448c1e Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 10:56:02 -0700 Subject: [PATCH 13/39] experiement with mjpg -> I420 conversino --- Cargo.lock | 1 + examples/local_video/src/publisher.rs | 96 +++++++++++++++++---------- yuv-sys/Cargo.toml | 1 + yuv-sys/build.rs | 24 ++++++- 4 files changed, 83 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8a3df9840..d8c7a80aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7512,6 +7512,7 @@ version = "0.3.10" dependencies = [ "bindgen 0.72.1", "cc", + "pkg-config", "rayon", "regex", ] diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index bc96098af..853de55f4 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -254,44 +254,68 @@ async fn main() -> Result<()> { } Instant::now() } else { - // Try decoding as MJPEG via image crate - match image::load_from_memory(src.as_ref()) { - Ok(img_dyn) => { - let rgb8 = img_dyn.to_rgb8(); - let dec_w = rgb8.width() as u32; - let dec_h = rgb8.height() as u32; - if dec_w != width || dec_h != height { - log::warn!( - "Decoded MJPEG size {}x{} differs from requested {}x{}; dropping frame", - dec_w, dec_h, width, height - ); - continue; - } - unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - rgb8.as_raw().as_ptr(), - (dec_w * 3) as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); + // Try fast MJPEG->I420 via libyuv if available; fallback to image crate + let mut used_fast_mjpeg = false; + let t2_try = unsafe { + // rs_MJPGToI420 returns 0 on success + let ret = yuv_sys::rs_MJPGToI420( + src.as_ref().as_ptr(), + src.len(), + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + width as i32, + height as i32, + ); + if ret == 0 { used_fast_mjpeg = true; Instant::now() } else { t1 } + }; + if used_fast_mjpeg { + t2_try + } else { + // Fallback: decode MJPEG using image crate then RGB24->I420 + match image::load_from_memory(src.as_ref()) { + Ok(img_dyn) => { + let rgb8 = img_dyn.to_rgb8(); + let dec_w = rgb8.width() as u32; + let dec_h = rgb8.height() as u32; + if dec_w != width || dec_h != height { + log::warn!( + "Decoded MJPEG size {}x{} differs from requested {}x{}; dropping frame", + dec_w, dec_h, width, height + ); + continue; + } + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb8.as_raw().as_ptr(), + (dec_w * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() } - Instant::now() - } - Err(e2) => { - if !logged_mjpeg_fallback { - log::error!( - "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", - e2 - ); - logged_mjpeg_fallback = true; + Err(e2) => { + if !logged_mjpeg_fallback { + log::error!( + "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", + e2 + ); + logged_mjpeg_fallback = true; + } + continue; } - continue; } } }; diff --git a/yuv-sys/Cargo.toml b/yuv-sys/Cargo.toml index 47fdaab61..f75e103d3 100644 --- a/yuv-sys/Cargo.toml +++ b/yuv-sys/Cargo.toml @@ -11,3 +11,4 @@ bindgen = "0.72.1" cc = "1.0" regex = "1" rayon = "1.8" +pkg-config = "0.3" diff --git a/yuv-sys/build.rs b/yuv-sys/build.rs index 4c5c503cb..3e9a40bd3 100644 --- a/yuv-sys/build.rs +++ b/yuv-sys/build.rs @@ -134,11 +134,29 @@ fn main() { rename_symbols(&fnc_list, &include_files, &source_files); } - cc::Build::new() + // Try to detect system libjpeg (or libjpeg-turbo) via pkg-config to enable MJPEG fast path + let jpeg_pkg = + pkg_config::Config::new() + .probe("libjpeg") + .or_else(|_| pkg_config::Config::new().probe("libjpeg-turbo")) + .or_else(|_| pkg_config::Config::new().probe("jpeg")) + .ok(); + + let mut build = cc::Build::new(); + build .warnings(false) .include(libyuv_dir.join("include")) - .files(source_files.iter().map(|f| f.path())) - .compile("yuv"); + .files(source_files.iter().map(|f| f.path())); + + if let Some(pkg) = &jpeg_pkg { + // Enable JPEG in libyuv and add include paths from pkg-config + build.define("HAVE_JPEG", None); + for p in &pkg.include_paths { + build.include(p); + } + } + + build.compile("yuv"); let mut bindgen = bindgen::Builder::default() .header(include_dir.join("libyuv.h").to_string_lossy()) From 4db0ae4492651fd3936db8d09d9b07d29741d281 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 11:40:57 -0700 Subject: [PATCH 14/39] add conversion path for I400 -> I420 --- examples/local_video/src/publisher.rs | 33 +++++++++++++++++++++------ 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 853de55f4..71908bb95 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -234,13 +234,32 @@ async fn main() -> Result<()> { } t2_local } else { - // Auto path (either RGB24 already or compressed MJPEG) + // Auto path: handle GREY (I400), RGB24, or compressed MJPEG let src = frame_buf.buffer(); - let t2_local = if src.len() == (width as usize * height as usize * 3) { + let src_bytes = src.as_ref(); + let grey_len = (width as usize) * (height as usize); + let t2_local = if src_bytes.len() == grey_len { + // GREY/I400: use libyuv converter + unsafe { + let _ = yuv_sys::rs_I400ToI420( + src_bytes.as_ptr(), + width as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() + } else if src_bytes.len() == (width as usize * height as usize * 3) { // Already RGB24 from backend; convert directly unsafe { let _ = yuv_sys::rs_RGB24ToI420( - src.as_ref().as_ptr(), + src_bytes.as_ptr(), (width * 3) as i32, data_y.as_mut_ptr(), stride_y as i32, @@ -259,8 +278,8 @@ async fn main() -> Result<()> { let t2_try = unsafe { // rs_MJPGToI420 returns 0 on success let ret = yuv_sys::rs_MJPGToI420( - src.as_ref().as_ptr(), - src.len(), + src_bytes.as_ptr(), + src_bytes.len(), data_y.as_mut_ptr(), stride_y as i32, data_u.as_mut_ptr(), @@ -278,7 +297,7 @@ async fn main() -> Result<()> { t2_try } else { // Fallback: decode MJPEG using image crate then RGB24->I420 - match image::load_from_memory(src.as_ref()) { + match image::load_from_memory(src_bytes) { Ok(img_dyn) => { let rgb8 = img_dyn.to_rgb8(); let dec_w = rgb8.width() as u32; @@ -309,7 +328,7 @@ async fn main() -> Result<()> { Err(e2) => { if !logged_mjpeg_fallback { log::error!( - "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", + "MJPEG decode failed; buffer not RGB24/GRY8 and image decode failed: {}", e2 ); logged_mjpeg_fallback = true; From 60daefafcadb629dc755312261d1fd00a1da26c7 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 11:46:00 -0700 Subject: [PATCH 15/39] try to open camera in GREY --- examples/local_video/src/publisher.rs | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 71908bb95..0fc712f5c 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -123,7 +123,7 @@ async fn main() -> Result<()> { let index = CameraIndex::Index(args.camera_index as u32); let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); let mut camera = Camera::new(index, requested)?; - // Try raw YUYV first (cheaper than MJPEG), fall back to MJPEG + // Try raw YUYV first (cheaper than MJPEG), then GREY, then fall back to MJPEG let wanted = CameraFormat::new( Resolution::new(args.width, args.height), FrameFormat::YUYV, @@ -131,13 +131,23 @@ async fn main() -> Result<()> { ); let mut using_fmt = "YUYV"; if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { - let alt = CameraFormat::new( + // Try GREY (I400) + let grey = CameraFormat::new( Resolution::new(args.width, args.height), - FrameFormat::MJPEG, + FrameFormat::GRAY, args.fps, ); - using_fmt = "MJPEG"; - let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); + using_fmt = "GREY"; + if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(grey))) { + // Fall back to MJPEG + let alt = CameraFormat::new( + Resolution::new(args.width, args.height), + FrameFormat::MJPEG, + args.fps, + ); + using_fmt = "MJPEG"; + let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); + } } camera.open_stream()?; let fmt = camera.camera_format(); From 6069a917118152b2aebc1fe2c20a81f7ef8abe42 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 11:48:32 -0700 Subject: [PATCH 16/39] add more logging --- examples/local_video/src/publisher.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 0fc712f5c..77d0b721b 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -130,6 +130,10 @@ async fn main() -> Result<()> { args.fps, ); let mut using_fmt = "YUYV"; + info!( + "Requesting camera format: YUYV {}x{} @ {} fps", + args.width, args.height, args.fps + ); if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { // Try GREY (I400) let grey = CameraFormat::new( @@ -138,6 +142,10 @@ async fn main() -> Result<()> { args.fps, ); using_fmt = "GREY"; + info!( + "Requesting camera format: GREY {}x{} @ {} fps", + args.width, args.height, args.fps + ); if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(grey))) { // Fall back to MJPEG let alt = CameraFormat::new( @@ -146,6 +154,10 @@ async fn main() -> Result<()> { args.fps, ); using_fmt = "MJPEG"; + info!( + "Requesting camera format: MJPEG {}x{} @ {} fps", + args.width, args.height, args.fps + ); let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); } } From 9529512c719b2a77b31e187e0651e5cf4e05c057 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 12:05:47 -0700 Subject: [PATCH 17/39] clean up --- examples/local_video/src/publisher.rs | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 77d0b721b..fa0123251 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -9,6 +9,7 @@ use livekit_api::access_token; use log::{debug, info}; use yuv_sys as yuv_sys; use nokhwa::pixel_format::RgbFormat; +use nokhwa::pixel_format::LumaFormat; use nokhwa::utils::{ApiBackend, CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution}; use nokhwa::Camera; use std::env; @@ -121,8 +122,26 @@ async fn main() -> Result<()> { // Setup camera let index = CameraIndex::Index(args.camera_index as u32); - let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); - let mut camera = Camera::new(index, requested)?; + // Prefer LumaFormat for grayscale cameras; fall back to RgbFormat if not supported + let mut camera = match Camera::new( + index.clone(), + RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate), + ) { + Ok(cam) => { + info!("Opened camera with LumaFormat output (highest framerate)"); + cam + } + Err(e) => { + info!( + "LumaFormat not available ({}); falling back to RgbFormat output", + e + ); + Camera::new( + index.clone(), + RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate), + )? + } + }; // Try raw YUYV first (cheaper than MJPEG), then GREY, then fall back to MJPEG let wanted = CameraFormat::new( Resolution::new(args.width, args.height), From 6929eda32ec1384bf75e5b62d5abfa0b26f5c22a Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 30 Oct 2025 12:36:05 -0700 Subject: [PATCH 18/39] remove grey support for now --- examples/local_video/src/publisher.rs | 88 +++++---------------------- 1 file changed, 14 insertions(+), 74 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index fa0123251..853de55f4 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -9,7 +9,6 @@ use livekit_api::access_token; use log::{debug, info}; use yuv_sys as yuv_sys; use nokhwa::pixel_format::RgbFormat; -use nokhwa::pixel_format::LumaFormat; use nokhwa::utils::{ApiBackend, CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution}; use nokhwa::Camera; use std::env; @@ -122,63 +121,23 @@ async fn main() -> Result<()> { // Setup camera let index = CameraIndex::Index(args.camera_index as u32); - // Prefer LumaFormat for grayscale cameras; fall back to RgbFormat if not supported - let mut camera = match Camera::new( - index.clone(), - RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate), - ) { - Ok(cam) => { - info!("Opened camera with LumaFormat output (highest framerate)"); - cam - } - Err(e) => { - info!( - "LumaFormat not available ({}); falling back to RgbFormat output", - e - ); - Camera::new( - index.clone(), - RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate), - )? - } - }; - // Try raw YUYV first (cheaper than MJPEG), then GREY, then fall back to MJPEG + let requested = RequestedFormat::new::(RequestedFormatType::AbsoluteHighestFrameRate); + let mut camera = Camera::new(index, requested)?; + // Try raw YUYV first (cheaper than MJPEG), fall back to MJPEG let wanted = CameraFormat::new( Resolution::new(args.width, args.height), FrameFormat::YUYV, args.fps, ); let mut using_fmt = "YUYV"; - info!( - "Requesting camera format: YUYV {}x{} @ {} fps", - args.width, args.height, args.fps - ); if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(wanted))) { - // Try GREY (I400) - let grey = CameraFormat::new( + let alt = CameraFormat::new( Resolution::new(args.width, args.height), - FrameFormat::GRAY, + FrameFormat::MJPEG, args.fps, ); - using_fmt = "GREY"; - info!( - "Requesting camera format: GREY {}x{} @ {} fps", - args.width, args.height, args.fps - ); - if let Err(_) = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(grey))) { - // Fall back to MJPEG - let alt = CameraFormat::new( - Resolution::new(args.width, args.height), - FrameFormat::MJPEG, - args.fps, - ); - using_fmt = "MJPEG"; - info!( - "Requesting camera format: MJPEG {}x{} @ {} fps", - args.width, args.height, args.fps - ); - let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); - } + using_fmt = "MJPEG"; + let _ = camera.set_camera_requset(RequestedFormat::new::(RequestedFormatType::Exact(alt))); } camera.open_stream()?; let fmt = camera.camera_format(); @@ -275,32 +234,13 @@ async fn main() -> Result<()> { } t2_local } else { - // Auto path: handle GREY (I400), RGB24, or compressed MJPEG + // Auto path (either RGB24 already or compressed MJPEG) let src = frame_buf.buffer(); - let src_bytes = src.as_ref(); - let grey_len = (width as usize) * (height as usize); - let t2_local = if src_bytes.len() == grey_len { - // GREY/I400: use libyuv converter - unsafe { - let _ = yuv_sys::rs_I400ToI420( - src_bytes.as_ptr(), - width as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } - Instant::now() - } else if src_bytes.len() == (width as usize * height as usize * 3) { + let t2_local = if src.len() == (width as usize * height as usize * 3) { // Already RGB24 from backend; convert directly unsafe { let _ = yuv_sys::rs_RGB24ToI420( - src_bytes.as_ptr(), + src.as_ref().as_ptr(), (width * 3) as i32, data_y.as_mut_ptr(), stride_y as i32, @@ -319,8 +259,8 @@ async fn main() -> Result<()> { let t2_try = unsafe { // rs_MJPGToI420 returns 0 on success let ret = yuv_sys::rs_MJPGToI420( - src_bytes.as_ptr(), - src_bytes.len(), + src.as_ref().as_ptr(), + src.len(), data_y.as_mut_ptr(), stride_y as i32, data_u.as_mut_ptr(), @@ -338,7 +278,7 @@ async fn main() -> Result<()> { t2_try } else { // Fallback: decode MJPEG using image crate then RGB24->I420 - match image::load_from_memory(src_bytes) { + match image::load_from_memory(src.as_ref()) { Ok(img_dyn) => { let rgb8 = img_dyn.to_rgb8(); let dec_w = rgb8.width() as u32; @@ -369,7 +309,7 @@ async fn main() -> Result<()> { Err(e2) => { if !logged_mjpeg_fallback { log::error!( - "MJPEG decode failed; buffer not RGB24/GRY8 and image decode failed: {}", + "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", e2 ); logged_mjpeg_fallback = true; From 77ef1df67ee4547e4339ff9f38a6c8293fcb2f77 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 31 Oct 2025 22:38:22 -0700 Subject: [PATCH 19/39] try h265 --- examples/local_video/src/publisher.rs | 48 ++++++++++++++++++++------- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 853de55f4..31171281e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -56,6 +56,10 @@ struct Args { /// LiveKit API secret #[arg(long)] api_secret: Option, + + /// Use H.265/HEVC encoding if supported (falls back to H.264 on failure) + #[arg(long, default_value_t = false)] + h265: bool, } fn list_cameras() -> Result<()> { @@ -156,19 +160,39 @@ async fn main() -> Result<()> { RtcVideoSource::Native(rtc_source.clone()), ); - room + // Choose requested codec and attempt to publish; if H.265 fails, retry with H.264 + let requested_codec = if args.h265 { VideoCodec::H265 } else { VideoCodec::H264 }; + info!("Attempting publish with codec: {}", requested_codec.as_str()); + + let publish_opts = |codec: VideoCodec| TrackPublishOptions { + source: TrackSource::Camera, + simulcast: false, + video_codec: codec, + ..Default::default() + }; + + let publish_result = room .local_participant() - .publish_track( - LocalTrack::Video(track.clone()), - TrackPublishOptions { - source: TrackSource::Camera, - simulcast: false, - video_codec: VideoCodec::H264, - ..Default::default() - }, - ) - .await?; - info!("Published camera track"); + .publish_track(LocalTrack::Video(track.clone()), publish_opts(requested_codec)) + .await; + + if let Err(e) = publish_result { + if matches!(requested_codec, VideoCodec::H265) { + log::warn!( + "H.265 publish failed ({}). Falling back to H.264...", + e + ); + room + .local_participant() + .publish_track(LocalTrack::Video(track.clone()), publish_opts(VideoCodec::H264)) + .await?; + info!("Published camera track with H.264 fallback"); + } else { + return Err(e.into()); + } + } else { + info!("Published camera track"); + } // Reusable I420 buffer and frame let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; From 2aaa175cb82032cfc490bf9af2e52710e0358a09 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 31 Oct 2025 23:14:54 -0700 Subject: [PATCH 20/39] eanble h265 on nvidia gpu --- webrtc-sys/build.rs | 1 + webrtc-sys/src/nvidia/h264_encoder_impl.cpp | 5 + webrtc-sys/src/nvidia/h265_encoder_impl.cpp | 381 ++++++++++++++++++ webrtc-sys/src/nvidia/h265_encoder_impl.h | 92 +++++ .../src/nvidia/nvidia_encoder_factory.cpp | 22 +- 5 files changed, 497 insertions(+), 4 deletions(-) create mode 100644 webrtc-sys/src/nvidia/h265_encoder_impl.cpp create mode 100644 webrtc-sys/src/nvidia/h265_encoder_impl.h diff --git a/webrtc-sys/build.rs b/webrtc-sys/build.rs index d4c2e70f9..72007e1ba 100644 --- a/webrtc-sys/build.rs +++ b/webrtc-sys/build.rs @@ -191,6 +191,7 @@ fn main() { .file("src/nvidia/NvCodec/NvCodec/NvEncoder/NvEncoder.cpp") .file("src/nvidia/NvCodec/NvCodec/NvEncoder/NvEncoderCuda.cpp") .file("src/nvidia/h264_encoder_impl.cpp") + .file("src/nvidia/h265_encoder_impl.cpp") .file("src/nvidia/h264_decoder_impl.cpp") .file("src/nvidia/nvidia_decoder_factory.cpp") .file("src/nvidia/nvidia_encoder_factory.cpp") diff --git a/webrtc-sys/src/nvidia/h264_encoder_impl.cpp b/webrtc-sys/src/nvidia/h264_encoder_impl.cpp index dade3f0d3..f6cc62b24 100644 --- a/webrtc-sys/src/nvidia/h264_encoder_impl.cpp +++ b/webrtc-sys/src/nvidia/h264_encoder_impl.cpp @@ -238,6 +238,11 @@ int32_t NvidiaH264EncoderImpl::InitEncode( return WEBRTC_VIDEO_CODEC_ERROR; } + RTC_LOG(LS_INFO) << "NVIDIA H264 NVENC initialized: " + << codec_.width << "x" << codec_.height + << " @ " << codec_.maxFramerate << "fps, target_bps=" + << configuration_.target_bps; + SimulcastRateAllocator init_allocator(env_, codec_); VideoBitrateAllocation allocation = init_allocator.Allocate(VideoBitrateAllocationParameters( diff --git a/webrtc-sys/src/nvidia/h265_encoder_impl.cpp b/webrtc-sys/src/nvidia/h265_encoder_impl.cpp new file mode 100644 index 000000000..8a33076d3 --- /dev/null +++ b/webrtc-sys/src/nvidia/h265_encoder_impl.cpp @@ -0,0 +1,381 @@ +#include "h265_encoder_impl.h" + +#include +#include +#include + +#include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/utility/simulcast_utility.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/metrics.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +namespace webrtc { + +// Used by histograms. Values of entries should not be changed. +enum H265EncoderImplEvent { + kH265EncoderEventInit = 0, + kH265EncoderEventError = 1, + kH265EncoderEventMax = 16, +}; + +NvidiaH265EncoderImpl::NvidiaH265EncoderImpl( + const webrtc::Environment& env, + CUcontext context, + CUmemorytype memory_type, + NV_ENC_BUFFER_FORMAT nv_format, + const SdpVideoFormat& format) + : env_(env), + encoder_(nullptr), + cu_context_(context), + cu_memory_type_(memory_type), + cu_scaled_array_(nullptr), + nv_format_(nv_format), + format_(format) { + RTC_CHECK_NE(cu_memory_type_, CU_MEMORYTYPE_HOST); +} + +NvidiaH265EncoderImpl::~NvidiaH265EncoderImpl() { + Release(); +} + +void NvidiaH265EncoderImpl::ReportInit() { + if (has_reported_init_) + return; + RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H265EncoderImpl.Event", + kH265EncoderEventInit, kH265EncoderEventMax); + has_reported_init_ = true; +} + +void NvidiaH265EncoderImpl::ReportError() { + if (has_reported_error_) + return; + RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H265EncoderImpl.Event", + kH265EncoderEventError, kH265EncoderEventMax); + has_reported_error_ = true; +} + +int32_t NvidiaH265EncoderImpl::InitEncode( + const VideoCodec* inst, + const VideoEncoder::Settings& settings) { + if (!inst || inst->codecType != kVideoCodecH265) { + ReportError(); + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (inst->maxFramerate == 0) { + ReportError(); + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + if (inst->width < 1 || inst->height < 1) { + ReportError(); + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + int32_t release_ret = Release(); + if (release_ret != WEBRTC_VIDEO_CODEC_OK) { + ReportError(); + return release_ret; + } + + codec_ = *inst; + + if (codec_.numberOfSimulcastStreams == 0) { + codec_.simulcastStream[0].width = codec_.width; + codec_.simulcastStream[0].height = codec_.height; + } + + const size_t new_capacity = + CalcBufferSize(VideoType::kI420, codec_.width, codec_.height); + encoded_image_.SetEncodedData(EncodedImageBuffer::Create(new_capacity)); + encoded_image_._encodedWidth = codec_.width; + encoded_image_._encodedHeight = codec_.height; + encoded_image_.set_size(0); + + configuration_.sending = false; + configuration_.frame_dropping_on = codec_.GetFrameDropEnabled(); + configuration_.key_frame_interval = 0; + + configuration_.width = codec_.width; + configuration_.height = codec_.height; + + configuration_.max_frame_rate = codec_.maxFramerate; + configuration_.target_bps = codec_.startBitrate * 1000; + configuration_.max_bps = codec_.maxBitrate * 1000; + + const CUresult result = cuCtxSetCurrent(cu_context_); + if (result != CUDA_SUCCESS) { + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } + + try { + if (cu_memory_type_ == CU_MEMORYTYPE_DEVICE) { + encoder_ = std::make_unique(cu_context_, codec_.width, + codec_.height, nv_format_, 0); + } else { + RTC_DCHECK_NOTREACHED(); + } + } catch (const NVENCException& e) { + RTC_LOG(LS_ERROR) << "Failed Initialize NvEncoder " << e.what(); + return WEBRTC_VIDEO_CODEC_ERROR; + } + + nv_initialize_params_.version = NV_ENC_INITIALIZE_PARAMS_VER; + nv_encode_config_.version = NV_ENC_CONFIG_VER; + nv_initialize_params_.encodeConfig = &nv_encode_config_; + + GUID encodeGuid = NV_ENC_CODEC_HEVC_GUID; + GUID presetGuid = NV_ENC_PRESET_P4_GUID; + + encoder_->CreateDefaultEncoderParams(&nv_initialize_params_, encodeGuid, + presetGuid, + NV_ENC_TUNING_INFO_ULTRA_LOW_LATENCY); + + nv_initialize_params_.frameRateNum = + static_cast(configuration_.max_frame_rate); + nv_initialize_params_.frameRateDen = 1; + nv_initialize_params_.bufferFormat = nv_format_; + + nv_encode_config_.profileGUID = NV_ENC_HEVC_PROFILE_MAIN_GUID; + nv_encode_config_.gopLength = NVENC_INFINITE_GOPLENGTH; + nv_encode_config_.frameIntervalP = 1; + nv_encode_config_.rcParams.version = NV_ENC_RC_PARAMS_VER; + nv_encode_config_.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR; + nv_encode_config_.rcParams.averageBitRate = configuration_.target_bps; + nv_encode_config_.rcParams.vbvBufferSize = + (nv_encode_config_.rcParams.averageBitRate * + nv_initialize_params_.frameRateDen / + nv_initialize_params_.frameRateNum) * + 5; + nv_encode_config_.rcParams.vbvInitialDelay = + nv_encode_config_.rcParams.vbvBufferSize; + + try { + encoder_->CreateEncoder(&nv_initialize_params_); + } catch (const NVENCException& e) { + RTC_LOG(LS_ERROR) << "Failed Initialize NvEncoder " << e.what(); + return WEBRTC_VIDEO_CODEC_ERROR; + } + + RTC_LOG(LS_INFO) << "NVIDIA H265/HEVC NVENC initialized: " + << codec_.width << "x" << codec_.height + << " @ " << codec_.maxFramerate << "fps, target_bps=" + << configuration_.target_bps; + + SimulcastRateAllocator init_allocator(env_, codec_); + VideoBitrateAllocation allocation = + init_allocator.Allocate(VideoBitrateAllocationParameters( + DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate)); + SetRates(RateControlParameters(allocation, codec_.maxFramerate)); + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265EncoderImpl::RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) { + encoded_image_callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265EncoderImpl::Release() { + if (encoder_) { + encoder_->DestroyEncoder(); + encoder_ = nullptr; + } + if (cu_scaled_array_) { + cuArrayDestroy(cu_scaled_array_); + cu_scaled_array_ = nullptr; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265EncoderImpl::Encode( + const VideoFrame& input_frame, + const std::vector* frame_types) { + if (!encoder_) { + ReportError(); + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (!encoded_image_callback_) { + RTC_LOG(LS_WARNING) + << "InitEncode() has been called, but a callback function " + "has not been set with RegisterEncodeCompleteCallback()"; + ReportError(); + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + + webrtc::scoped_refptr frame_buffer = + input_frame.video_frame_buffer()->ToI420(); + if (!frame_buffer) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString( + input_frame.video_frame_buffer()->type()) + << " image to I420. Can't encode frame."; + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } + RTC_CHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420); + + bool is_keyframe_needed = false; + if (configuration_.key_frame_request && configuration_.sending) { + is_keyframe_needed = true; + } + + bool send_key_frame = + is_keyframe_needed || + (frame_types && (*frame_types)[0] == VideoFrameType::kVideoFrameKey); + if (send_key_frame) { + is_keyframe_needed = true; + configuration_.key_frame_request = false; + } + + RTC_DCHECK_EQ(configuration_.width, frame_buffer->width()); + RTC_DCHECK_EQ(configuration_.height, frame_buffer->height()); + + if (!configuration_.sending) { + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + + if (frame_types != nullptr) { + if ((*frame_types)[0] == VideoFrameType::kEmptyFrame) { + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + } + + try { + const NvEncInputFrame* nv_enc_input_frame = encoder_->GetNextInputFrame(); + + if (cu_memory_type_ == CU_MEMORYTYPE_DEVICE) { + NvEncoderCuda::CopyToDeviceFrame( + cu_context_, (void*)frame_buffer->DataY(), frame_buffer->StrideY(), + reinterpret_cast(nv_enc_input_frame->inputPtr), + nv_enc_input_frame->pitch, input_frame.width(), input_frame.height(), + CU_MEMORYTYPE_HOST, nv_enc_input_frame->bufferFormat, + nv_enc_input_frame->chromaOffsets, nv_enc_input_frame->numChromaPlanes); + } + + NV_ENC_PIC_PARAMS pic_params = NV_ENC_PIC_PARAMS(); + pic_params.version = NV_ENC_PIC_PARAMS_VER; + pic_params.encodePicFlags = 0; + if (is_keyframe_needed) { + pic_params.encodePicFlags = NV_ENC_PIC_FLAG_FORCEINTRA | + NV_ENC_PIC_FLAG_FORCEIDR | + NV_ENC_PIC_FLAG_OUTPUT_SPSPPS; + configuration_.key_frame_request = false; + } + + current_encoding_is_keyframe_ = is_keyframe_needed; + + std::vector> bit_stream; + encoder_->EncodeFrame(bit_stream, &pic_params); + + for (std::vector& packet : bit_stream) { + int32_t result = ProcessEncodedFrame(packet, input_frame); + if (result != WEBRTC_VIDEO_CODEC_OK) { + return result; + } + } + current_encoding_is_keyframe_ = false; + } catch (const NVENCException& e) { + RTC_LOG(LS_ERROR) << "Failed EncodeFrame NvEncoder " << e.what(); + return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265EncoderImpl::ProcessEncodedFrame( + std::vector& packet, + const ::webrtc::VideoFrame& inputFrame) { + encoded_image_._encodedWidth = encoder_->GetEncodeWidth(); + encoded_image_._encodedHeight = encoder_->GetEncodeHeight(); + encoded_image_.SetRtpTimestamp(inputFrame.rtp_timestamp()); + encoded_image_.SetSimulcastIndex(0); + encoded_image_.ntp_time_ms_ = inputFrame.ntp_time_ms(); + encoded_image_.capture_time_ms_ = inputFrame.render_time_ms(); + encoded_image_.rotation_ = inputFrame.rotation(); + encoded_image_.content_type_ = VideoContentType::UNSPECIFIED; + encoded_image_.timing_.flags = VideoSendTiming::kInvalid; + encoded_image_._frameType = + current_encoding_is_keyframe_ ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta; + encoded_image_.SetColorSpace(inputFrame.color_space()); + + encoded_image_.SetEncodedData( + EncodedImageBuffer::Create(packet.data(), packet.size())); + encoded_image_.set_size(packet.size()); + + encoded_image_.qp_ = -1; + + CodecSpecificInfo codecInfo; + codecInfo.codecType = kVideoCodecH265; + + const auto result = + encoded_image_callback_->OnEncodedImage(encoded_image_, &codecInfo); + if (result.error != EncodedImageCallback::Result::OK) { + RTC_LOG(LS_ERROR) << "Encode m_encodedCompleteCallback failed " + << result.error; + return WEBRTC_VIDEO_CODEC_ERROR; + } + return WEBRTC_VIDEO_CODEC_OK; +} + +VideoEncoder::EncoderInfo NvidiaH265EncoderImpl::GetEncoderInfo() const { + EncoderInfo info; + info.supports_native_handle = false; + info.implementation_name = "NVIDIA H265 Encoder"; + info.scaling_settings = VideoEncoder::ScalingSettings::kOff; + info.is_hardware_accelerated = true; + info.supports_simulcast = false; + info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420}; + return info; +} + +void NvidiaH265EncoderImpl::SetRates( + const RateControlParameters& parameters) { + if (!encoder_) { + RTC_LOG(LS_WARNING) << "SetRates() while uninitialized."; + return; + } + + if (parameters.framerate_fps < 1.0) { + RTC_LOG(LS_WARNING) << "Invalid frame rate: " << parameters.framerate_fps; + return; + } + + if (parameters.bitrate.get_sum_bps() == 0) { + configuration_.SetStreamState(false); + return; + } + + codec_.maxFramerate = static_cast(parameters.framerate_fps); + codec_.maxBitrate = parameters.bitrate.GetSpatialLayerSum(0); + + configuration_.target_bps = parameters.bitrate.GetSpatialLayerSum(0); + configuration_.max_frame_rate = parameters.framerate_fps; + + if (configuration_.target_bps) { + configuration_.SetStreamState(true); + } else { + configuration_.SetStreamState(false); + } +} + +void NvidiaH265EncoderImpl::LayerConfig::SetStreamState(bool send_stream) { + if (send_stream && !sending) { + key_frame_request = true; + } + sending = send_stream; +} + +} // namespace webrtc + + diff --git a/webrtc-sys/src/nvidia/h265_encoder_impl.h b/webrtc-sys/src/nvidia/h265_encoder_impl.h new file mode 100644 index 000000000..5a660dc9f --- /dev/null +++ b/webrtc-sys/src/nvidia/h265_encoder_impl.h @@ -0,0 +1,92 @@ +#ifndef WEBRTC_NVIDIA_H265_ENCODER_IMPL_H_ +#define WEBRTC_NVIDIA_H265_ENCODER_IMPL_H_ + +#include + +#include +#include + +#include "NvEncoder/NvEncoder.h" +#include "NvEncoder/NvEncoderCuda.h" + +#include "absl/container/inlined_vector.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_encoder.h" + +namespace webrtc { + +class NvidiaH265EncoderImpl : public VideoEncoder { + public: + struct LayerConfig { + int simulcast_idx = 0; + int width = -1; + int height = -1; + bool sending = true; + bool key_frame_request = false; + float max_frame_rate = 0; + uint32_t target_bps = 0; + uint32_t max_bps = 0; + bool frame_dropping_on = false; + int key_frame_interval = 0; + int num_temporal_layers = 1; + + void SetStreamState(bool send_stream); + }; + + public: + NvidiaH265EncoderImpl(const webrtc::Environment& env, + CUcontext context, + CUmemorytype memory_type, + NV_ENC_BUFFER_FORMAT nv_format, + const SdpVideoFormat& format); + ~NvidiaH265EncoderImpl() override; + + int32_t InitEncode(const VideoCodec* codec_settings, + const Settings& settings) override; + + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) override; + + int32_t Release() override; + + int32_t Encode(const VideoFrame& frame, + const std::vector* frame_types) override; + + void SetRates(const RateControlParameters& rc_parameters) override; + + EncoderInfo GetEncoderInfo() const override; + + private: + int32_t ProcessEncodedFrame(std::vector& packet, + const ::webrtc::VideoFrame& inputFrame); + private: + const webrtc::Environment& env_; + EncodedImageCallback* encoded_image_callback_ = nullptr; + + std::unique_ptr encoder_; + CUcontext cu_context_; + CUmemorytype cu_memory_type_; + CUarray cu_scaled_array_; + NV_ENC_BUFFER_FORMAT nv_format_; + NV_ENC_INITIALIZE_PARAMS nv_initialize_params_; + NV_ENC_CONFIG nv_encode_config_; + + LayerConfig configuration_; + EncodedImage encoded_image_; + VideoCodec codec_; + void ReportInit(); + void ReportError(); + bool has_reported_init_ = false; + bool has_reported_error_ = false; + const SdpVideoFormat format_; + bool current_encoding_is_keyframe_ = false; +}; + +} // namespace webrtc + +#endif // WEBRTC_NVIDIA_H265_ENCODER_IMPL_H_ + + diff --git a/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp b/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp index a30c96f9f..e921859f2 100644 --- a/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp +++ b/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp @@ -4,6 +4,7 @@ #include "cuda_context.h" #include "h264_encoder_impl.h" +#include "h265_encoder_impl.h" #include "rtc_base/logging.h" namespace webrtc { @@ -16,6 +17,9 @@ NvidiaVideoEncoderFactory::NvidiaVideoEncoderFactory() { }; supported_formats_.push_back(SdpVideoFormat("H264", baselineParameters)); + // Advertise HEVC/H265 with default parameters. + supported_formats_.push_back(SdpVideoFormat("H265")); + /*std::map highParameters = { {"profile-level-id", "4d0032"}, {"level-asymmetry-allowed", "1"}, @@ -44,7 +48,6 @@ std::unique_ptr NvidiaVideoEncoderFactory::Create( // Check if the requested format is supported. for (const auto& supported_format : supported_formats_) { if (format.IsSameCodec(supported_format)) { - // If the format is supported, create and return the encoder. if (!cu_context_) { cu_context_ = livekit::CudaContext::GetInstance(); if (!cu_context_->Initialize()) { @@ -52,9 +55,20 @@ std::unique_ptr NvidiaVideoEncoderFactory::Create( return nullptr; } } - return std::make_unique( - env, cu_context_->GetContext(), CU_MEMORYTYPE_DEVICE, - NV_ENC_BUFFER_FORMAT_IYUV, format); + + if (format.name == "H264") { + RTC_LOG(LS_INFO) << "Using NVIDIA HW encoder (NVENC) for H264"; + return std::make_unique( + env, cu_context_->GetContext(), CU_MEMORYTYPE_DEVICE, + NV_ENC_BUFFER_FORMAT_IYUV, format); + } + + if (format.name == "H265" || format.name == "HEVC") { + RTC_LOG(LS_INFO) << "Using NVIDIA HW encoder (NVENC) for H265/HEVC"; + return std::make_unique( + env, cu_context_->GetContext(), CU_MEMORYTYPE_DEVICE, + NV_ENC_BUFFER_FORMAT_IYUV, format); + } } } return nullptr; From ac007d213624894ab670b80bb8ba1b443842a021 Mon Sep 17 00:00:00 2001 From: David Chen Date: Fri, 31 Oct 2025 23:29:56 -0700 Subject: [PATCH 21/39] add includes --- webrtc-sys/src/nvidia/h265_encoder_impl.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/webrtc-sys/src/nvidia/h265_encoder_impl.h b/webrtc-sys/src/nvidia/h265_encoder_impl.h index 5a660dc9f..63abe975c 100644 --- a/webrtc-sys/src/nvidia/h265_encoder_impl.h +++ b/webrtc-sys/src/nvidia/h265_encoder_impl.h @@ -10,6 +10,8 @@ #include "NvEncoder/NvEncoderCuda.h" #include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/i420_buffer.h" #include "api/video/video_codec_constants.h" From 2aead1c1998edea2204ba656772c841e196606a4 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 00:10:26 -0700 Subject: [PATCH 22/39] add HEVC --- webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp b/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp index e921859f2..7c8d0d95c 100644 --- a/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp +++ b/webrtc-sys/src/nvidia/nvidia_encoder_factory.cpp @@ -19,6 +19,8 @@ NvidiaVideoEncoderFactory::NvidiaVideoEncoderFactory() { // Advertise HEVC/H265 with default parameters. supported_formats_.push_back(SdpVideoFormat("H265")); + // Some stacks use 'HEVC' name. + supported_formats_.push_back(SdpVideoFormat("HEVC")); /*std::map highParameters = { {"profile-level-id", "4d0032"}, From 8066f373cd7ee4f8051097c3d46e75877608db61 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 01:05:49 -0700 Subject: [PATCH 23/39] improve subscriber example render performance --- Cargo.lock | 1 + examples/local_video/Cargo.toml | 1 + examples/local_video/src/subscriber.rs | 435 ++++++++++++++++++++--- examples/local_video/src/yuv_shader.wgsl | 63 ++++ 4 files changed, 449 insertions(+), 51 deletions(-) create mode 100644 examples/local_video/src/yuv_shader.wgsl diff --git a/Cargo.lock b/Cargo.lock index d8c7a80aa..172b738b2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3238,6 +3238,7 @@ name = "local_video" version = "0.1.0" dependencies = [ "anyhow", + "bytemuck", "clap", "eframe", "egui", diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 7ded6dc99..8d01c1086 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -32,6 +32,7 @@ wgpu = "25.0" winit = { version = "0.30.11", features = ["android-native-activity"] } parking_lot = { version = "0.12.1", features = ["deadlock_detection"] } anyhow = "1" +bytemuck = { version = "1.16", features = ["derive"] } [target.'cfg(target_os = "macos")'.dependencies] objc2 = { version = "0.6.0", features = ["relax-sign-encoding"] } diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 14484d212..f83f554bf 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -1,9 +1,11 @@ use anyhow::Result; use clap::Parser; use eframe::egui; +use egui_wgpu as egui_wgpu_backend; +use egui_wgpu_backend::CallbackTrait; +use eframe::wgpu::{self, util::DeviceExt}; use futures::StreamExt; use livekit::prelude::*; -use libwebrtc::prelude::VideoBuffer; use livekit::webrtc::video_stream::native::NativeVideoStream; use livekit_api::access_token; use log::{debug, info}; @@ -34,50 +36,37 @@ struct Args { api_secret: Option, } -struct SharedFrame { +struct SharedYuv { width: u32, height: u32, - rgba: Vec, + stride_y: u32, + stride_u: u32, + stride_v: u32, + y: Vec, + u: Vec, + v: Vec, dirty: bool, } struct VideoApp { - shared: Arc>, - texture: Option, + shared: Arc>, } impl eframe::App for VideoApp { fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { egui::CentralPanel::default().show(ctx, |ui| { - let mut shared = self.shared.lock(); - if shared.dirty { - let size = [shared.width as usize, shared.height as usize]; - let image = egui::ColorImage::from_rgba_unmultiplied(size, &shared.rgba); - match &mut self.texture { - Some(tex) => { - tex.set(image, egui::TextureOptions::LINEAR) - } - None => { - debug!("Creating texture for remote video: {}x{}", shared.width, shared.height); - self.texture = Some(ui.ctx().load_texture( - "remote-video", - image, - egui::TextureOptions::LINEAR, - )); - } - } - shared.dirty = false; - } + let available = ui.available_size(); + let rect = egui::Rect::from_min_size(ui.min_rect().min, available); - if let Some(tex) = &self.texture { - let tex_size = tex.size_vec2(); - let available = ui.available_size(); - let scale = (available.x / tex_size.x).min(available.y / tex_size.y); - let desired = tex_size * scale; - ui.image((tex.id(), desired)); - } else { - ui.heading("Waiting for video..."); - } + // Ensure we keep repainting for smooth playback + ui.ctx().request_repaint(); + + // Add a custom wgpu paint callback that renders I420 directly + let cb = egui_wgpu_backend::Callback::new_paint_callback( + rect, + YuvPaintCallback { shared: self.shared.clone() }, + ); + ui.painter().add(cb); }); ctx.request_repaint_after(Duration::from_millis(16)); @@ -121,8 +110,18 @@ async fn main() -> Result<()> { let room = Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); - // Shared frame buffer for UI - let shared = Arc::new(Mutex::new(SharedFrame { width: 0, height: 0, rgba: Vec::new(), dirty: false })); + // Shared YUV buffer for UI/GPU + let shared = Arc::new(Mutex::new(SharedYuv { + width: 0, + height: 0, + stride_y: 0, + stride_u: 0, + stride_v: 0, + y: Vec::new(), + u: Vec::new(), + v: Vec::new(), + dirty: false, + })); // Subscribe to room events: on first video track, start sink task let shared_clone = shared.clone(); @@ -142,31 +141,50 @@ async fn main() -> Result<()> { let mut frames: u64 = 0; let mut last_log = Instant::now(); let mut logged_first = false; + // YUV buffers reused to avoid per-frame allocations + let mut y_buf: Vec = Vec::new(); + let mut u_buf: Vec = Vec::new(); + let mut v_buf: Vec = Vec::new(); while let Some(frame) = rt.block_on(sink.next()) { - let buffer = frame.buffer.to_i420(); - let w = buffer.width(); - let h = buffer.height(); - - let (sy, su, sv) = buffer.strides(); - let (dy, du, dv) = buffer.data(); + let w = frame.buffer.width(); + let h = frame.buffer.height(); if !logged_first { debug!( - "First frame I420: {}x{}, strides Y/U/V = {}/{}/{}", - w, h, sy, su, sv + "First frame: {}x{}, type {:?}", + w, h, frame.buffer.buffer_type() ); logged_first = true; } - let mut rgba = vec![0u8; (w * h * 4) as usize]; - libwebrtc::native::yuv_helper::i420_to_rgba( - dy, sy, du, su, dv, sv, &mut rgba, w * 4, w as i32, h as i32, - ); + // Convert to I420 on CPU, but keep planes separate for GPU sampling + let i420 = frame.buffer.to_i420(); + let (sy, su, sv) = i420.strides(); + let (dy, du, dv) = i420.data(); + + let ch = (h + 1) / 2; + + // Ensure capacity and copy full plane slices + let y_size = (sy * h) as usize; + let u_size = (su * ch) as usize; + let v_size = (sv * ch) as usize; + if y_buf.len() != y_size { y_buf.resize(y_size, 0); } + if u_buf.len() != u_size { u_buf.resize(u_size, 0); } + if v_buf.len() != v_size { v_buf.resize(v_size, 0); } + y_buf.copy_from_slice(dy); + u_buf.copy_from_slice(du); + v_buf.copy_from_slice(dv); + // Swap buffers into shared state let mut s = shared2.lock(); - s.width = w; - s.height = h; - s.rgba = rgba; + s.width = w as u32; + s.height = h as u32; + s.stride_y = sy as u32; + s.stride_u = su as u32; + s.stride_v = sv as u32; + std::mem::swap(&mut s.y, &mut y_buf); + std::mem::swap(&mut s.u, &mut u_buf); + std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; frames += 1; @@ -187,7 +205,7 @@ async fn main() -> Result<()> { }); // Start UI - let app = VideoApp { shared, texture: None }; + let app = VideoApp { shared }; let native_options = eframe::NativeOptions::default(); eframe::run_native("LiveKit Video Subscriber", native_options, Box::new(|_| Ok::, _>(Box::new(app))))?; @@ -195,3 +213,318 @@ async fn main() -> Result<()> { } +// ===== WGPU I420 renderer ===== + +struct YuvPaintCallback { + shared: Arc>, +} + +struct YuvGpuState { + pipeline: wgpu::RenderPipeline, + sampler: wgpu::Sampler, + bind_layout: wgpu::BindGroupLayout, + y_tex: wgpu::Texture, + u_tex: wgpu::Texture, + v_tex: wgpu::Texture, + y_view: wgpu::TextureView, + u_view: wgpu::TextureView, + v_view: wgpu::TextureView, + bind_group: wgpu::BindGroup, + params_buf: wgpu::Buffer, + y_pad_w: u32, + uv_pad_w: u32, + dims: (u32, u32), +} + +impl YuvGpuState { + fn create_textures(device: &wgpu::Device, _width: u32, height: u32, y_pad_w: u32, uv_pad_w: u32) -> (wgpu::Texture, wgpu::Texture, wgpu::Texture, wgpu::TextureView, wgpu::TextureView, wgpu::TextureView) { + let y_size = wgpu::Extent3d { width: y_pad_w, height, depth_or_array_layers: 1 }; + let uv_size = wgpu::Extent3d { width: uv_pad_w, height: (height + 1) / 2, depth_or_array_layers: 1 }; + let usage = wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING; + let desc = |size: wgpu::Extent3d| wgpu::TextureDescriptor { + label: Some("yuv_plane"), + size, + mip_level_count: 1, + sample_count: 1, + dimension: wgpu::TextureDimension::D2, + format: wgpu::TextureFormat::R8Unorm, + usage, + view_formats: &[], + }; + let y_tex = device.create_texture(&desc(y_size)); + let u_tex = device.create_texture(&desc(uv_size)); + let v_tex = device.create_texture(&desc(uv_size)); + let y_view = y_tex.create_view(&wgpu::TextureViewDescriptor::default()); + let u_view = u_tex.create_view(&wgpu::TextureViewDescriptor::default()); + let v_view = v_tex.create_view(&wgpu::TextureViewDescriptor::default()); + (y_tex, u_tex, v_tex, y_view, u_view, v_view) + } +} + +fn align_up(value: u32, alignment: u32) -> u32 { + ((value + alignment - 1) / alignment) * alignment +} + +#[repr(C)] +#[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] +struct ParamsUniform { + src_w: u32, + src_h: u32, + y_tex_w: u32, + uv_tex_w: u32, +} + +impl CallbackTrait for YuvPaintCallback { + fn prepare(&self, device: &wgpu::Device, queue: &wgpu::Queue, _screen_desc: &egui_wgpu_backend::ScreenDescriptor, _encoder: &mut wgpu::CommandEncoder, resources: &mut egui_wgpu_backend::CallbackResources) -> Vec { + // Initialize or update GPU state lazily based on current frame + let mut shared = self.shared.lock(); + + // Nothing to draw yet + if shared.width == 0 || shared.height == 0 { + return Vec::new(); + } + + // Fetch or create our GPU state + if resources.get::().is_none() { + // Build pipeline and initial small textures; will be recreated on first upload + let shader_src = include_str!("yuv_shader.wgsl"); + let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("yuv_shader"), + source: wgpu::ShaderSource::Wgsl(shader_src.into()), + }); + + let bind_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("yuv_bind_layout"), + entries: &[ + wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 1, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + wgpu::BindGroupLayoutEntry { binding: 2, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Texture { sample_type: wgpu::TextureSampleType::Float { filterable: true }, view_dimension: wgpu::TextureViewDimension::D2, multisampled: false }, count: None }, + wgpu::BindGroupLayoutEntry { binding: 3, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Texture { sample_type: wgpu::TextureSampleType::Float { filterable: true }, view_dimension: wgpu::TextureViewDimension::D2, multisampled: false }, count: None }, + wgpu::BindGroupLayoutEntry { + binding: 4, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: Some(std::num::NonZeroU64::new(std::mem::size_of::() as u64).unwrap()), + }, + count: None, + }, + ], + }); + + let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("yuv_pipeline_layout"), + bind_group_layouts: &[&bind_layout], + push_constant_ranges: &[], + }); + + let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { + label: Some("yuv_pipeline"), + layout: Some(&pipeline_layout), + vertex: wgpu::VertexState { module: &shader, entry_point: Some("vs_main"), buffers: &[], compilation_options: wgpu::PipelineCompilationOptions::default() }, + fragment: Some(wgpu::FragmentState { + module: &shader, + entry_point: Some("fs_main"), + targets: &[Some(wgpu::ColorTargetState { + format: wgpu::TextureFormat::Bgra8Unorm, + blend: Some(wgpu::BlendState::ALPHA_BLENDING), + write_mask: wgpu::ColorWrites::ALL, + })], + compilation_options: wgpu::PipelineCompilationOptions::default(), + }), + primitive: wgpu::PrimitiveState { topology: wgpu::PrimitiveTopology::TriangleList, strip_index_format: None, front_face: wgpu::FrontFace::Ccw, cull_mode: None, unclipped_depth: false, polygon_mode: wgpu::PolygonMode::Fill, conservative: false }, + depth_stencil: None, + multisample: wgpu::MultisampleState { count: 1, mask: !0, alpha_to_coverage_enabled: false }, + multiview: None, + cache: None, + }); + + let sampler = device.create_sampler(&wgpu::SamplerDescriptor { + label: Some("yuv_sampler"), + address_mode_u: wgpu::AddressMode::ClampToEdge, + address_mode_v: wgpu::AddressMode::ClampToEdge, + address_mode_w: wgpu::AddressMode::ClampToEdge, + mag_filter: wgpu::FilterMode::Linear, + min_filter: wgpu::FilterMode::Linear, + mipmap_filter: wgpu::FilterMode::Nearest, + ..Default::default() + }); + + let params_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { + label: Some("yuv_params"), + contents: bytemuck::bytes_of(&ParamsUniform { src_w: 1, src_h: 1, y_tex_w: 1, uv_tex_w: 1 }), + usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + }); + + // Initial tiny textures + let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = YuvGpuState::create_textures(device, 1, 1, 256, 256); + let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("yuv_bind_group"), + layout: &bind_layout, + entries: &[ + wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::Sampler(&sampler) }, + wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::TextureView(&y_view) }, + wgpu::BindGroupEntry { binding: 2, resource: wgpu::BindingResource::TextureView(&u_view) }, + wgpu::BindGroupEntry { binding: 3, resource: wgpu::BindingResource::TextureView(&v_view) }, + wgpu::BindGroupEntry { binding: 4, resource: params_buf.as_entire_binding() }, + ], + }); + + let new_state = YuvGpuState { + pipeline: render_pipeline, + sampler, + bind_layout, + y_tex, + u_tex, + v_tex, + y_view, + u_view, + v_view, + bind_group, + params_buf, + y_pad_w: 256, + uv_pad_w: 256, + dims: (0, 0), + }; + resources.insert(new_state); + } + let state = resources.get_mut::().unwrap(); + + // Upload planes when marked dirty + // Recreate textures/bind group on size change + if state.dims != (shared.width, shared.height) { + let y_pad_w = align_up(shared.width, 256); + let uv_w = (shared.width + 1) / 2; + let uv_pad_w = align_up(uv_w, 256); + let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = YuvGpuState::create_textures(device, shared.width, shared.height, y_pad_w, uv_pad_w); + let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("yuv_bind_group"), + layout: &state.bind_layout, + entries: &[ + wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::Sampler(&state.sampler) }, + wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::TextureView(&y_view) }, + wgpu::BindGroupEntry { binding: 2, resource: wgpu::BindingResource::TextureView(&u_view) }, + wgpu::BindGroupEntry { binding: 3, resource: wgpu::BindingResource::TextureView(&v_view) }, + wgpu::BindGroupEntry { binding: 4, resource: state.params_buf.as_entire_binding() }, + ], + }); + state.y_tex = y_tex; + state.u_tex = u_tex; + state.v_tex = v_tex; + state.y_view = y_view; + state.u_view = u_view; + state.v_view = v_view; + state.bind_group = bind_group; + state.y_pad_w = y_pad_w; + state.uv_pad_w = uv_pad_w; + state.dims = (shared.width, shared.height); + } + + if shared.dirty { + let y_bytes_per_row = align_up(shared.width, 256); + let uv_w = (shared.width + 1) / 2; + let uv_h = (shared.height + 1) / 2; + let uv_bytes_per_row = align_up(uv_w, 256); + + // Pack and upload Y + if shared.stride_y >= shared.width { + let mut packed = vec![0u8; (y_bytes_per_row * shared.height) as usize]; + for row in 0..shared.height { + let src = &shared.y[(row * shared.stride_y) as usize..][..shared.width as usize]; + let dst_off = (row * y_bytes_per_row) as usize; + packed[dst_off..dst_off + shared.width as usize].copy_from_slice(src); + } + queue.write_texture( + wgpu::ImageCopyTexture { + texture: &state.y_tex, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &packed, + wgpu::ImageDataLayout { + offset: 0, + bytes_per_row: Some(y_bytes_per_row), + rows_per_image: Some(shared.height), + }, + wgpu::Extent3d { width: state.y_pad_w, height: shared.height, depth_or_array_layers: 1 }, + ); + } + + // Pack and upload U,V + if shared.stride_u >= uv_w && shared.stride_v >= uv_w { + let mut packed_u = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; + let mut packed_v = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; + for row in 0..uv_h { + let src_u = &shared.u[(row * shared.stride_u) as usize..][..uv_w as usize]; + let src_v = &shared.v[(row * shared.stride_v) as usize..][..uv_w as usize]; + let dst_off = (row * uv_bytes_per_row) as usize; + packed_u[dst_off..dst_off + uv_w as usize].copy_from_slice(src_u); + packed_v[dst_off..dst_off + uv_w as usize].copy_from_slice(src_v); + } + queue.write_texture( + wgpu::ImageCopyTexture { texture: &state.u_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All }, + &packed_u, + wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some(uv_bytes_per_row), rows_per_image: Some(uv_h) }, + wgpu::Extent3d { width: state.uv_pad_w, height: uv_h, depth_or_array_layers: 1 }, + ); + queue.write_texture( + wgpu::ImageCopyTexture { texture: &state.v_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All }, + &packed_v, + wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some(uv_bytes_per_row), rows_per_image: Some(uv_h) }, + wgpu::Extent3d { width: state.uv_pad_w, height: uv_h, depth_or_array_layers: 1 }, + ); + } + + // Update params uniform + let params = ParamsUniform { src_w: shared.width, src_h: shared.height, y_tex_w: state.y_pad_w, uv_tex_w: state.uv_pad_w }; + queue.write_buffer(&state.params_buf, 0, bytemuck::bytes_of(¶ms)); + + shared.dirty = false; + } + + Vec::new() + } + + fn paint(&self, _info: egui::PaintCallbackInfo, render_pass: &mut wgpu::RenderPass<'static>, resources: &egui_wgpu_backend::CallbackResources) { + // Acquire device/queue via screen_descriptor? Not available; use resources to fetch our state + let shared = self.shared.lock(); + if shared.width == 0 || shared.height == 0 { + return; + } + + // Build pipeline and textures on first paint or on resize + let state_entry = resources.get::().expect("YuvGpuState should be initialized in prepare"); + // We cannot mutate resources here; assume created already with correct dims + let state = state_entry; + + if state.dims != (shared.width, shared.height) { + // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild + return; + } + + render_pass.set_pipeline(&state.pipeline); + render_pass.set_bind_group(0, &state.bind_group, &[]); + // Fullscreen triangle without vertex buffer + render_pass.draw(0..3, 0..1); + } +} + +// Build or rebuild GPU state. This helper is intended to be called from prepare, but we lack device there in current API constraints. +// Note: eframe/egui-wgpu provides device in paint via RenderPass context; however, to keep this example concise, we set up the state once externally. + diff --git a/examples/local_video/src/yuv_shader.wgsl b/examples/local_video/src/yuv_shader.wgsl new file mode 100644 index 000000000..4a66065fc --- /dev/null +++ b/examples/local_video/src/yuv_shader.wgsl @@ -0,0 +1,63 @@ +struct VSOut { + @builtin(position) pos: vec4, + @location(0) uv: vec2, +}; + +@vertex +fn vs_main(@builtin(vertex_index) vid: u32) -> VSOut { + var pos = array, 3>( + vec2(-1.0, -3.0), + vec2(-1.0, 1.0), + vec2( 3.0, 1.0) + ); + let p = pos[vid]; + var out: VSOut; + out.pos = vec4(p, 0.0, 1.0); + out.uv = 0.5 * (p + vec2(1.0, 1.0)); + return out; +} + +@group(0) @binding(0) var samp: sampler; +@group(0) @binding(1) var y_tex: texture_2d; +@group(0) @binding(2) var u_tex: texture_2d; +@group(0) @binding(3) var v_tex: texture_2d; + +struct Params { + src_w: u32, + src_h: u32, + y_tex_w: u32, + uv_tex_w: u32, +}; +@group(0) @binding(4) var params: Params; + +fn yuv_to_rgb(y: f32, u: f32, v: f32) -> vec3 { + let c = y - (16.0/255.0); + let d = u - 0.5; + let e = v - 0.5; + let r = 1.164 * c + 1.596 * e; + let g = 1.164 * c - 0.392 * d - 0.813 * e; + let b = 1.164 * c + 2.017 * d; + return clamp(vec3(r, g, b), vec3(0.0), vec3(1.0)); +} + +@fragment +fn fs_main(in_: VSOut) -> @location(0) vec4 { + let src_w = f32(params.src_w); + let src_h = f32(params.src_h); + let y_tex_w = f32(params.y_tex_w); + let uv_tex_w = f32(params.uv_tex_w); + + // Flip vertically and scale X to avoid sampling padded columns + let flipped = vec2(in_.uv.x, 1.0 - in_.uv.y); + let uv_y = vec2(flipped.x * (src_w / y_tex_w), flipped.y); + let uv_uv = vec2(flipped.x * ((src_w * 0.5) / uv_tex_w), flipped.y); + + let y = textureSample(y_tex, samp, uv_y).r; + let u = textureSample(u_tex, samp, uv_uv).r; + let v = textureSample(v_tex, samp, uv_uv).r; + + let rgb = yuv_to_rgb(y, u, v); + return vec4(rgb, 1.0); +} + + From 1640c8a3ebfeefb653acc08426e5901753a0f02d Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 15:45:21 -0700 Subject: [PATCH 24/39] wip --- webrtc-sys/build.rs | 1 + webrtc-sys/src/nvidia/nvidia_decoder_factory.cpp | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/webrtc-sys/build.rs b/webrtc-sys/build.rs index 72007e1ba..87213e82a 100644 --- a/webrtc-sys/build.rs +++ b/webrtc-sys/build.rs @@ -193,6 +193,7 @@ fn main() { .file("src/nvidia/h264_encoder_impl.cpp") .file("src/nvidia/h265_encoder_impl.cpp") .file("src/nvidia/h264_decoder_impl.cpp") + .file("src/nvidia/h265_decoder_impl.cpp") .file("src/nvidia/nvidia_decoder_factory.cpp") .file("src/nvidia/nvidia_encoder_factory.cpp") .file("src/nvidia/cuda_context.cpp") diff --git a/webrtc-sys/src/nvidia/nvidia_decoder_factory.cpp b/webrtc-sys/src/nvidia/nvidia_decoder_factory.cpp index 00fde1026..40c0a198d 100644 --- a/webrtc-sys/src/nvidia/nvidia_decoder_factory.cpp +++ b/webrtc-sys/src/nvidia/nvidia_decoder_factory.cpp @@ -6,6 +6,7 @@ #include "cuda_context.h" #include "h264_decoder_impl.h" +#include "h265_decoder_impl.h" #include "rtc_base/logging.h" namespace webrtc { @@ -53,6 +54,8 @@ std::vector SupportedNvDecoderCodecs(CUcontext context) { webrtc::H264Level::kLevel5_1, "1"), CreateH264Format(webrtc::H264Profile::kProfileMain, webrtc::H264Level::kLevel5_1, "1"), + SdpVideoFormat("H265"), + SdpVideoFormat("HEVC"), }; } @@ -92,7 +95,7 @@ std::unique_ptr NvidiaVideoDecoderFactory::Create( // Check if the requested format is supported. for (const auto& supported_format : supported_formats_) { if (format.IsSameCodec(supported_format)) { - // If the format is supported, create and return the encoder. + // If the format is supported, create and return the decoder. if (!cu_context_) { cu_context_ = livekit::CudaContext::GetInstance(); if (!cu_context_->Initialize()) { @@ -100,7 +103,14 @@ std::unique_ptr NvidiaVideoDecoderFactory::Create( return nullptr; } } - return std::make_unique(cu_context_->GetContext()); + if (format.name == "H264") { + RTC_LOG(LS_INFO) << "Using NVIDIA HW decoder (NVDEC) for H264"; + return std::make_unique(cu_context_->GetContext()); + } + if (format.name == "H265" || format.name == "HEVC") { + RTC_LOG(LS_INFO) << "Using NVIDIA HW decoder (NVDEC) for H265/HEVC"; + return std::make_unique(cu_context_->GetContext()); + } } } return nullptr; From 15630385a9caed81354ff3c48ed242a9fe0bb50b Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 16:46:29 -0700 Subject: [PATCH 25/39] missing files --- webrtc-sys/src/nvidia/h265_decoder_impl.cpp | 341 ++++++++++++++++++++ webrtc-sys/src/nvidia/h265_decoder_impl.h | 47 +++ 2 files changed, 388 insertions(+) create mode 100644 webrtc-sys/src/nvidia/h265_decoder_impl.cpp create mode 100644 webrtc-sys/src/nvidia/h265_decoder_impl.h diff --git a/webrtc-sys/src/nvidia/h265_decoder_impl.cpp b/webrtc-sys/src/nvidia/h265_decoder_impl.cpp new file mode 100644 index 000000000..2a8e433eb --- /dev/null +++ b/webrtc-sys/src/nvidia/h265_decoder_impl.cpp @@ -0,0 +1,341 @@ +#include "h265_decoder_impl.h" + +#include +#include +#include +#include + +#include "NvDecoder/NvDecoder.h" +#include "Utils/NvCodecUtils.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static ColorSpace ExtractColorSpaceFromFormat(const CUVIDEOFORMAT& format) { + return ColorSpace( + static_cast( + format.video_signal_description.color_primaries), + static_cast( + format.video_signal_description.transfer_characteristics), + static_cast( + format.video_signal_description.matrix_coefficients), + static_cast( + format.video_signal_description.video_full_range_flag)); +} + +NvidiaH265DecoderImpl::NvidiaH265DecoderImpl(CUcontext context) + : cu_context_(context), + decoder_(nullptr), + is_configured_decoder_(false), + decoded_complete_callback_(nullptr), + buffer_pool_(false) {} + +NvidiaH265DecoderImpl::~NvidiaH265DecoderImpl() { Release(); } + +VideoDecoder::DecoderInfo NvidiaH265DecoderImpl::GetDecoderInfo() const { + VideoDecoder::DecoderInfo info; + info.implementation_name = "NVIDIA H265 Decoder"; + info.is_hardware_accelerated = true; + return info; +} + +bool NvidiaH265DecoderImpl::Configure(const Settings& settings) { + if (settings.codec_type() != kVideoCodecH265) { + RTC_LOG(LS_ERROR) << "initialization failed: codec type is not H265"; + return false; + } + if (!settings.max_render_resolution().Valid()) { + RTC_LOG(LS_ERROR) + << "initialization failed on codec_settings width < 0 or height < 0"; + return false; + } + + settings_ = settings; + + const CUresult result = cuCtxSetCurrent(cu_context_); + if (!ck(result)) { + RTC_LOG(LS_ERROR) << "initialization failed on cuCtxSetCurrent result" + << result; + return false; + } + + int maxWidth = 4096; + int maxHeight = 4096; + + decoder_ = std::make_unique( + cu_context_, false, cudaVideoCodec_HEVC, true, false, nullptr, nullptr, + false, maxWidth, maxHeight); + return true; +} + +int32_t NvidiaH265DecoderImpl::RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) { + decoded_complete_callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265DecoderImpl::Release() { + buffer_pool_.Release(); + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265DecoderImpl::Decode(const EncodedImage& input_image, + bool /*missing_frames*/, + int64_t /*render_time_ms*/) { + CUcontext current; + if (!ck(cuCtxGetCurrent(¤t))) { + RTC_LOG(LS_ERROR) << "decode failed on cuCtxGetCurrent"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (current != cu_context_) { + RTC_LOG(LS_ERROR) + << "decode failed: current context does not match held context"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (decoded_complete_callback_ == nullptr) { + RTC_LOG(LS_ERROR) << "decode failed: decoded_complete_callback_ not set"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (!input_image.data() || !input_image.size()) { + RTC_LOG(LS_ERROR) << "decode failed: input image is null"; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + int nFrameReturned = 0; + do { + nFrameReturned = decoder_->Decode( + input_image.data(), static_cast(input_image.size()), + CUVID_PKT_TIMESTAMP, input_image.RtpTimestamp()); + } while (nFrameReturned == 0); + + is_configured_decoder_ = true; + + const cudaVideoSurfaceFormat output_format = decoder_->GetOutputFormat(); + if (output_format != cudaVideoSurfaceFormat_NV12 && + output_format != cudaVideoSurfaceFormat_P016) { + RTC_LOG(LS_ERROR) << "not supported output format: " << output_format; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + const ColorSpace& color_space = + input_image.ColorSpace() ? *input_image.ColorSpace() + : ExtractColorSpaceFromFormat( + decoder_->GetVideoFormatInfo()); + + for (int i = 0; i < nFrameReturned; i++) { + int64_t timeStamp; + uint8_t* pFrame = decoder_->GetFrame(&timeStamp); + + webrtc::scoped_refptr i420_buffer = + buffer_pool_.CreateI420Buffer(decoder_->GetWidth(), + decoder_->GetHeight()); + + int result = 0; + if (output_format == cudaVideoSurfaceFormat_NV12) { + result = libyuv::NV12ToI420( + pFrame, decoder_->GetDeviceFramePitch(), + pFrame + decoder_->GetHeight() * decoder_->GetDeviceFramePitch(), + decoder_->GetDeviceFramePitch(), i420_buffer->MutableDataY(), + i420_buffer->StrideY(), i420_buffer->MutableDataU(), + i420_buffer->StrideU(), i420_buffer->MutableDataV(), + i420_buffer->StrideV(), decoder_->GetWidth(), + decoder_->GetHeight()); + } else { + // Treat P016 as P010 for conversion purposes. + result = libyuv::P010ToI420( + reinterpret_cast(pFrame), + decoder_->GetDeviceFramePitch(), + reinterpret_cast(pFrame + + decoder_->GetHeight() * + decoder_->GetDeviceFramePitch()), + decoder_->GetDeviceFramePitch(), i420_buffer->MutableDataY(), + i420_buffer->StrideY(), i420_buffer->MutableDataU(), + i420_buffer->StrideU(), i420_buffer->MutableDataV(), + i420_buffer->StrideV(), decoder_->GetWidth(), + decoder_->GetHeight()); + } + + if (result) { + RTC_LOG(LS_INFO) << "libyuv::NV12ToI420 failed. error:" << result; + } + + VideoFrame decoded_frame = VideoFrame::Builder() + .set_video_frame_buffer(i420_buffer) + .set_timestamp_rtp(static_cast( + timeStamp)) + .set_color_space(color_space) + .build(); + + std::optional decodetime; + std::optional qp; + decoded_complete_callback_->Decoded(decoded_frame, decodetime, qp); + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +} // end namespace webrtc + +#include "h265_decoder_impl.h" + +#include +#include +#include +#include + +#include "NvDecoder/NvDecoder.h" +#include "Utils/NvCodecUtils.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static ColorSpace ExtractColorSpaceFromFormat(const CUVIDEOFORMAT& format) { + return ColorSpace( + static_cast( + format.video_signal_description.color_primaries), + static_cast( + format.video_signal_description.transfer_characteristics), + static_cast( + format.video_signal_description.matrix_coefficients), + static_cast( + format.video_signal_description.video_full_range_flag)); +} + +NvidiaH265DecoderImpl::NvidiaH265DecoderImpl(CUcontext context) + : cu_context_(context), + decoder_(nullptr), + is_configured_decoder_(false), + decoded_complete_callback_(nullptr), + buffer_pool_(false) {} + +NvidiaH265DecoderImpl::~NvidiaH265DecoderImpl() { Release(); } + +VideoDecoder::DecoderInfo NvidiaH265DecoderImpl::GetDecoderInfo() const { + VideoDecoder::DecoderInfo info; + info.implementation_name = "NVIDIA H265 Decoder"; + info.is_hardware_accelerated = true; + return info; +} + +bool NvidiaH265DecoderImpl::Configure(const Settings& settings) { + if (settings.codec_type() != kVideoCodecH265) { + RTC_LOG(LS_ERROR) << "initialization failed: codec type is not H265"; + return false; + } + if (!settings.max_render_resolution().Valid()) { + RTC_LOG(LS_ERROR) + << "initialization failed on codec_settings width < 0 or height < 0"; + return false; + } + + settings_ = settings; + + const CUresult result = cuCtxSetCurrent(cu_context_); + if (!ck(result)) { + RTC_LOG(LS_ERROR) << "initialization failed on cuCtxSetCurrent result" + << result; + return false; + } + + int maxWidth = 4096; + int maxHeight = 4096; + + decoder_ = std::make_unique( + cu_context_, false, cudaVideoCodec_HEVC, true, false, nullptr, nullptr, + false, maxWidth, maxHeight); + return true; +} + +int32_t NvidiaH265DecoderImpl::RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) { + decoded_complete_callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265DecoderImpl::Release() { + buffer_pool_.Release(); + return WEBRTC_VIDEO_CODEC_OK; +} + +int32_t NvidiaH265DecoderImpl::Decode(const EncodedImage& input_image, + bool /*missing_frames*/, + int64_t /*render_time_ms*/) { + CUcontext current; + if (!ck(cuCtxGetCurrent(¤t))) { + RTC_LOG(LS_ERROR) << "decode failed on cuCtxGetCurrent"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (current != cu_context_) { + RTC_LOG(LS_ERROR) + << "decode failed: current context does not match held context"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (decoded_complete_callback_ == nullptr) { + RTC_LOG(LS_ERROR) << "decode failed: decoded_complete_callback_ not set"; + return WEBRTC_VIDEO_CODEC_UNINITIALIZED; + } + if (!input_image.data() || !input_image.size()) { + RTC_LOG(LS_ERROR) << "decode failed: input image is null"; + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + int nFrameReturned = 0; + do { + nFrameReturned = decoder_->Decode( + input_image.data(), static_cast(input_image.size()), + CUVID_PKT_TIMESTAMP, input_image.RtpTimestamp()); + } while (nFrameReturned == 0); + + is_configured_decoder_ = true; + + if (decoder_->GetOutputFormat() != cudaVideoSurfaceFormat_NV12) { + RTC_LOG(LS_ERROR) << "not supported output format: " + << decoder_->GetOutputFormat(); + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + } + + const ColorSpace& color_space = + input_image.ColorSpace() ? *input_image.ColorSpace() + : ExtractColorSpaceFromFormat( + decoder_->GetVideoFormatInfo()); + + for (int i = 0; i < nFrameReturned; i++) { + int64_t timeStamp; + uint8_t* pFrame = decoder_->GetFrame(&timeStamp); + + webrtc::scoped_refptr i420_buffer = + buffer_pool_.CreateI420Buffer(decoder_->GetWidth(), + decoder_->GetHeight()); + + int result = libyuv::NV12ToI420( + pFrame, decoder_->GetDeviceFramePitch(), + pFrame + decoder_->GetHeight() * decoder_->GetDeviceFramePitch(), + decoder_->GetDeviceFramePitch(), i420_buffer->MutableDataY(), + i420_buffer->StrideY(), i420_buffer->MutableDataU(), + i420_buffer->StrideU(), i420_buffer->MutableDataV(), + i420_buffer->StrideV(), decoder_->GetWidth(), decoder_->GetHeight()); + + if (result) { + RTC_LOG(LS_INFO) << "libyuv::NV12ToI420 failed. error:" << result; + } + + VideoFrame decoded_frame = VideoFrame::Builder() + .set_video_frame_buffer(i420_buffer) + .set_timestamp_rtp(static_cast( + timeStamp)) + .set_color_space(color_space) + .build(); + + std::optional decodetime; + std::optional qp; // Not parsed for H265 currently + decoded_complete_callback_->Decoded(decoded_frame, decodetime, qp); + } + + return WEBRTC_VIDEO_CODEC_OK; +} + +} // end namespace webrtc + + diff --git a/webrtc-sys/src/nvidia/h265_decoder_impl.h b/webrtc-sys/src/nvidia/h265_decoder_impl.h new file mode 100644 index 000000000..54dcbfc20 --- /dev/null +++ b/webrtc-sys/src/nvidia/h265_decoder_impl.h @@ -0,0 +1,47 @@ +#ifndef WEBRTC_NVIDIA_H265_DECODER_IMPL_H_ +#define WEBRTC_NVIDIA_H265_DECODER_IMPL_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "NvDecoder/NvDecoder.h" + +namespace webrtc { + +class NvidiaH265DecoderImpl : public VideoDecoder { + public: + explicit NvidiaH265DecoderImpl(CUcontext context); + NvidiaH265DecoderImpl(const NvidiaH265DecoderImpl&) = delete; + NvidiaH265DecoderImpl& operator=(const NvidiaH265DecoderImpl&) = delete; + ~NvidiaH265DecoderImpl() override; + + bool Configure(const Settings& settings) override; + int32_t Decode(const EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms) override; + int32_t RegisterDecodeCompleteCallback( + DecodedImageCallback* callback) override; + int32_t Release() override; + DecoderInfo GetDecoderInfo() const override; + + private: + CUcontext cu_context_; + std::unique_ptr decoder_; + bool is_configured_decoder_; + + Settings settings_; + + DecodedImageCallback* decoded_complete_callback_ = nullptr; + webrtc::VideoFrameBufferPool buffer_pool_; +}; + +} // end namespace webrtc + +#endif // WEBRTC_NVIDIA_H265_DECODER_IMPL_H_ + + From 450d56bc94d34d42ab9952c2438623c0c9fd7182 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 16:53:34 -0700 Subject: [PATCH 26/39] fixes --- webrtc-sys/src/nvidia/h265_decoder_impl.cpp | 206 ++++---------------- 1 file changed, 43 insertions(+), 163 deletions(-) diff --git a/webrtc-sys/src/nvidia/h265_decoder_impl.cpp b/webrtc-sys/src/nvidia/h265_decoder_impl.cpp index 2a8e433eb..652d62080 100644 --- a/webrtc-sys/src/nvidia/h265_decoder_impl.cpp +++ b/webrtc-sys/src/nvidia/h265_decoder_impl.cpp @@ -12,6 +12,47 @@ namespace webrtc { +// Fallback converter for P016 (16-bit NV12-like) to I420 (8-bit). +// Downshifts 16-bit components to 8-bit by discarding the lower 8 bits. +static int P016ToI420Fallback(const uint16_t* src_y, + int src_stride_y_bytes, + const uint16_t* src_uv, + int src_stride_uv_bytes, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + for (int y = 0; y < height; ++y) { + const uint16_t* src_row = reinterpret_cast( + reinterpret_cast(src_y) + y * src_stride_y_bytes); + uint8_t* dst_row = dst_y + y * dst_stride_y; + for (int x = 0; x < width; ++x) { + dst_row[x] = static_cast(src_row[x] >> 8); + } + } + + const int chroma_height = height / 2; + const int chroma_width = width / 2; + for (int y = 0; y < chroma_height; ++y) { + const uint16_t* src_uv_row = reinterpret_cast( + reinterpret_cast(src_uv) + y * src_stride_uv_bytes); + uint8_t* dst_u_row = dst_u + y * dst_stride_u; + uint8_t* dst_v_row = dst_v + y * dst_stride_v; + for (int x = 0; x < chroma_width; ++x) { + const uint16_t u16 = src_uv_row[2 * x + 0]; + const uint16_t v16 = src_uv_row[2 * x + 1]; + dst_u_row[x] = static_cast(u16 >> 8); + dst_v_row[x] = static_cast(v16 >> 8); + } + } + + return 0; +} + static ColorSpace ExtractColorSpaceFromFormat(const CUVIDEOFORMAT& format) { return ColorSpace( static_cast( @@ -142,8 +183,8 @@ int32_t NvidiaH265DecoderImpl::Decode(const EncodedImage& input_image, i420_buffer->StrideV(), decoder_->GetWidth(), decoder_->GetHeight()); } else { - // Treat P016 as P010 for conversion purposes. - result = libyuv::P010ToI420( + // P016 output: use local 16->8 downshift and deinterleave UV. + result = P016ToI420Fallback( reinterpret_cast(pFrame), decoder_->GetDeviceFramePitch(), reinterpret_cast(pFrame + @@ -177,165 +218,4 @@ int32_t NvidiaH265DecoderImpl::Decode(const EncodedImage& input_image, } // end namespace webrtc -#include "h265_decoder_impl.h" - -#include -#include -#include -#include - -#include "NvDecoder/NvDecoder.h" -#include "Utils/NvCodecUtils.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -static ColorSpace ExtractColorSpaceFromFormat(const CUVIDEOFORMAT& format) { - return ColorSpace( - static_cast( - format.video_signal_description.color_primaries), - static_cast( - format.video_signal_description.transfer_characteristics), - static_cast( - format.video_signal_description.matrix_coefficients), - static_cast( - format.video_signal_description.video_full_range_flag)); -} - -NvidiaH265DecoderImpl::NvidiaH265DecoderImpl(CUcontext context) - : cu_context_(context), - decoder_(nullptr), - is_configured_decoder_(false), - decoded_complete_callback_(nullptr), - buffer_pool_(false) {} - -NvidiaH265DecoderImpl::~NvidiaH265DecoderImpl() { Release(); } - -VideoDecoder::DecoderInfo NvidiaH265DecoderImpl::GetDecoderInfo() const { - VideoDecoder::DecoderInfo info; - info.implementation_name = "NVIDIA H265 Decoder"; - info.is_hardware_accelerated = true; - return info; -} - -bool NvidiaH265DecoderImpl::Configure(const Settings& settings) { - if (settings.codec_type() != kVideoCodecH265) { - RTC_LOG(LS_ERROR) << "initialization failed: codec type is not H265"; - return false; - } - if (!settings.max_render_resolution().Valid()) { - RTC_LOG(LS_ERROR) - << "initialization failed on codec_settings width < 0 or height < 0"; - return false; - } - - settings_ = settings; - - const CUresult result = cuCtxSetCurrent(cu_context_); - if (!ck(result)) { - RTC_LOG(LS_ERROR) << "initialization failed on cuCtxSetCurrent result" - << result; - return false; - } - - int maxWidth = 4096; - int maxHeight = 4096; - - decoder_ = std::make_unique( - cu_context_, false, cudaVideoCodec_HEVC, true, false, nullptr, nullptr, - false, maxWidth, maxHeight); - return true; -} - -int32_t NvidiaH265DecoderImpl::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - decoded_complete_callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t NvidiaH265DecoderImpl::Release() { - buffer_pool_.Release(); - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t NvidiaH265DecoderImpl::Decode(const EncodedImage& input_image, - bool /*missing_frames*/, - int64_t /*render_time_ms*/) { - CUcontext current; - if (!ck(cuCtxGetCurrent(¤t))) { - RTC_LOG(LS_ERROR) << "decode failed on cuCtxGetCurrent"; - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (current != cu_context_) { - RTC_LOG(LS_ERROR) - << "decode failed: current context does not match held context"; - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (decoded_complete_callback_ == nullptr) { - RTC_LOG(LS_ERROR) << "decode failed: decoded_complete_callback_ not set"; - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - if (!input_image.data() || !input_image.size()) { - RTC_LOG(LS_ERROR) << "decode failed: input image is null"; - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } - - int nFrameReturned = 0; - do { - nFrameReturned = decoder_->Decode( - input_image.data(), static_cast(input_image.size()), - CUVID_PKT_TIMESTAMP, input_image.RtpTimestamp()); - } while (nFrameReturned == 0); - - is_configured_decoder_ = true; - - if (decoder_->GetOutputFormat() != cudaVideoSurfaceFormat_NV12) { - RTC_LOG(LS_ERROR) << "not supported output format: " - << decoder_->GetOutputFormat(); - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } - - const ColorSpace& color_space = - input_image.ColorSpace() ? *input_image.ColorSpace() - : ExtractColorSpaceFromFormat( - decoder_->GetVideoFormatInfo()); - - for (int i = 0; i < nFrameReturned; i++) { - int64_t timeStamp; - uint8_t* pFrame = decoder_->GetFrame(&timeStamp); - - webrtc::scoped_refptr i420_buffer = - buffer_pool_.CreateI420Buffer(decoder_->GetWidth(), - decoder_->GetHeight()); - - int result = libyuv::NV12ToI420( - pFrame, decoder_->GetDeviceFramePitch(), - pFrame + decoder_->GetHeight() * decoder_->GetDeviceFramePitch(), - decoder_->GetDeviceFramePitch(), i420_buffer->MutableDataY(), - i420_buffer->StrideY(), i420_buffer->MutableDataU(), - i420_buffer->StrideU(), i420_buffer->MutableDataV(), - i420_buffer->StrideV(), decoder_->GetWidth(), decoder_->GetHeight()); - - if (result) { - RTC_LOG(LS_INFO) << "libyuv::NV12ToI420 failed. error:" << result; - } - - VideoFrame decoded_frame = VideoFrame::Builder() - .set_video_frame_buffer(i420_buffer) - .set_timestamp_rtp(static_cast( - timeStamp)) - .set_color_space(color_space) - .build(); - - std::optional decodetime; - std::optional qp; // Not parsed for H265 currently - decoded_complete_callback_->Decoded(decoded_frame, decodetime, qp); - } - - return WEBRTC_VIDEO_CODEC_OK; -} - -} // end namespace webrtc - From 12a730be6ab7232c4b034b226eb53b10ec00d4a6 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 22:19:06 -0700 Subject: [PATCH 27/39] enable simulcast in example --- examples/local_video/src/publisher.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 31171281e..87317473d 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -166,7 +166,7 @@ async fn main() -> Result<()> { let publish_opts = |codec: VideoCodec| TrackPublishOptions { source: TrackSource::Camera, - simulcast: false, + simulcast: true, video_codec: codec, ..Default::default() }; From f2a9ed3be20b3dc7d7ff32e760b5f3298eefd001 Mon Sep 17 00:00:00 2001 From: David Chen Date: Sat, 1 Nov 2025 22:42:07 -0700 Subject: [PATCH 28/39] print out track details --- examples/local_video/src/subscriber.rs | 61 ++++++++++++++++++++++++-- 1 file changed, 58 insertions(+), 3 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index f83f554bf..22d371542 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -10,7 +10,12 @@ use livekit::webrtc::video_stream::native::NativeVideoStream; use livekit_api::access_token; use log::{debug, info}; use parking_lot::Mutex; -use std::{env, sync::Arc, time::{Duration, Instant}}; +use std::{ + collections::HashMap, + env, + sync::Arc, + time::{Duration, Instant}, +}; #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -131,9 +136,59 @@ async fn main() -> Result<()> { info!("Subscribed to room events"); while let Some(evt) = events.recv().await { debug!("Room event: {:?}", evt); - if let RoomEvent::TrackSubscribed { track, .. } = evt { + if let RoomEvent::TrackSubscribed { track, publication, participant } = evt { if let livekit::track::RemoteTrack::Video(video_track) = track { - info!("Subscribed to video track: {}", video_track.name()); + info!( + "Subscribed to video track: {} (sid {}) from {} - codec: {}, simulcast: {}, dimension: {}x{}", + publication.name(), + publication.sid(), + participant.identity(), + publication.mime_type(), + publication.simulcasted(), + publication.dimension().0, + publication.dimension().1 + ); + + // Try to fetch inbound RTP/codec stats for more details + match video_track.get_stats().await { + Ok(stats) => { + let mut codec_by_id: HashMap = HashMap::new(); + let mut inbound: Option = None; + for s in stats.iter() { + match s { + livekit::webrtc::stats::RtcStats::Codec(c) => { + codec_by_id.insert( + c.rtc.id.clone(), + (c.codec.mime_type.clone(), c.codec.sdp_fmtp_line.clone()), + ); + } + livekit::webrtc::stats::RtcStats::InboundRtp(i) => { + if i.stream.kind == "video" { + inbound = Some(i.clone()); + } + } + _ => {} + } + } + + if let Some(i) = inbound { + if let Some((mime, fmtp)) = codec_by_id.get(&i.stream.codec_id) { + info!("Inbound codec: {} (fmtp: {})", mime, fmtp); + } else { + info!("Inbound codec id: {}", i.stream.codec_id); + } + info!( + "Inbound current layer: {}x{} ~{:.1} fps, decoder: {}, power_efficient: {}", + i.inbound.frame_width, + i.inbound.frame_height, + i.inbound.frames_per_second, + i.inbound.decoder_implementation, + i.inbound.power_efficient_decoder + ); + } + } + Err(e) => debug!("Failed to get stats for video track: {:?}", e), + } // Start background sink thread let shared2 = shared_clone.clone(); std::thread::spawn(move || { From bf5ec2131eb22aceee4bc0840a981c1096dab028 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 13 Nov 2025 15:21:29 -0800 Subject: [PATCH 29/39] add --participant flag to subscriber example --- examples/local_video/README.md | 12 +- examples/local_video/src/subscriber.rs | 281 +++++++++++++++---------- 2 files changed, 180 insertions(+), 113 deletions(-) diff --git a/examples/local_video/README.md b/examples/local_video/README.md index 5589638c4..aa39facbd 100644 --- a/examples/local_video/README.md +++ b/examples/local_video/README.md @@ -36,5 +36,15 @@ Subscriber usage: --identity viewer-1 \ --url https://your.livekit.server \ --api-key YOUR_KEY \ - --api-secret YOUR_SECRET + --api-secret YOUR_SECRET + + # subscribe to a specific participant's video only + cargo run -p local_video --bin subscriber -- \ + --room-name demo \ + --identity viewer-1 \ + --participant alice ``` + +Notes: +- `--participant` limits subscription to video tracks from the specified participant identity. +- If the active video track is unsubscribed or unpublished, the app clears its state and will automatically attach to the next matching video track when it appears. diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 22d371542..de07ecdea 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -39,6 +39,10 @@ struct Args { /// LiveKit API secret (can also be set via LIVEKIT_API_SECRET environment variable) #[arg(long)] api_secret: Option, + + /// Only subscribe to video from this participant identity + #[arg(long)] + participant: Option, } struct SharedYuv { @@ -129,132 +133,184 @@ async fn main() -> Result<()> { })); // Subscribe to room events: on first video track, start sink task + let allowed_identity = args.participant.clone(); let shared_clone = shared.clone(); let rt = tokio::runtime::Handle::current(); + // Track currently active video track SID to handle unpublish/unsubscribe + let active_sid = Arc::new(Mutex::new(None::)); tokio::spawn(async move { + let active_sid = active_sid.clone(); let mut events = room.subscribe(); info!("Subscribed to room events"); while let Some(evt) = events.recv().await { debug!("Room event: {:?}", evt); - if let RoomEvent::TrackSubscribed { track, publication, participant } = evt { - if let livekit::track::RemoteTrack::Video(video_track) = track { - info!( - "Subscribed to video track: {} (sid {}) from {} - codec: {}, simulcast: {}, dimension: {}x{}", - publication.name(), - publication.sid(), - participant.identity(), - publication.mime_type(), - publication.simulcasted(), - publication.dimension().0, - publication.dimension().1 - ); - - // Try to fetch inbound RTP/codec stats for more details - match video_track.get_stats().await { - Ok(stats) => { - let mut codec_by_id: HashMap = HashMap::new(); - let mut inbound: Option = None; - for s in stats.iter() { - match s { - livekit::webrtc::stats::RtcStats::Codec(c) => { - codec_by_id.insert( - c.rtc.id.clone(), - (c.codec.mime_type.clone(), c.codec.sdp_fmtp_line.clone()), - ); - } - livekit::webrtc::stats::RtcStats::InboundRtp(i) => { - if i.stream.kind == "video" { - inbound = Some(i.clone()); + match evt { + RoomEvent::TrackSubscribed { track, publication, participant } => { + // If a participant filter is set, skip others + if let Some(ref allow) = allowed_identity { + if participant.identity().as_str() != allow { + debug!("Skipping track from '{}' (filter set to '{}')", participant.identity(), allow); + continue; + } + } + if let livekit::track::RemoteTrack::Video(video_track) = track { + let sid = publication.sid().clone(); + // Only handle if we don't already have an active video track + { + let mut active = active_sid.lock(); + if active.as_ref() == Some(&sid) { + debug!("Track {} already active, ignoring duplicate subscribe", sid); + continue; + } + if active.is_some() { + debug!("A video track is already active ({}), ignoring new subscribe {}", active.as_ref().unwrap(), sid); + continue; + } + *active = Some(sid.clone()); + } + + info!( + "Subscribed to video track: {} (sid {}) from {} - codec: {}, simulcast: {}, dimension: {}x{}", + publication.name(), + publication.sid(), + participant.identity(), + publication.mime_type(), + publication.simulcasted(), + publication.dimension().0, + publication.dimension().1 + ); + + // Try to fetch inbound RTP/codec stats for more details + match video_track.get_stats().await { + Ok(stats) => { + let mut codec_by_id: HashMap = HashMap::new(); + let mut inbound: Option = None; + for s in stats.iter() { + match s { + livekit::webrtc::stats::RtcStats::Codec(c) => { + codec_by_id.insert( + c.rtc.id.clone(), + (c.codec.mime_type.clone(), c.codec.sdp_fmtp_line.clone()), + ); + } + livekit::webrtc::stats::RtcStats::InboundRtp(i) => { + if i.stream.kind == "video" { + inbound = Some(i.clone()); + } } + _ => {} } - _ => {} } - } - if let Some(i) = inbound { - if let Some((mime, fmtp)) = codec_by_id.get(&i.stream.codec_id) { - info!("Inbound codec: {} (fmtp: {})", mime, fmtp); - } else { - info!("Inbound codec id: {}", i.stream.codec_id); + if let Some(i) = inbound { + if let Some((mime, fmtp)) = codec_by_id.get(&i.stream.codec_id) { + info!("Inbound codec: {} (fmtp: {})", mime, fmtp); + } else { + info!("Inbound codec id: {}", i.stream.codec_id); + } + info!( + "Inbound current layer: {}x{} ~{:.1} fps, decoder: {}, power_efficient: {}", + i.inbound.frame_width, + i.inbound.frame_height, + i.inbound.frames_per_second, + i.inbound.decoder_implementation, + i.inbound.power_efficient_decoder + ); } - info!( - "Inbound current layer: {}x{} ~{:.1} fps, decoder: {}, power_efficient: {}", - i.inbound.frame_width, - i.inbound.frame_height, - i.inbound.frames_per_second, - i.inbound.decoder_implementation, - i.inbound.power_efficient_decoder - ); } + Err(e) => debug!("Failed to get stats for video track: {:?}", e), } - Err(e) => debug!("Failed to get stats for video track: {:?}", e), - } - // Start background sink thread - let shared2 = shared_clone.clone(); - std::thread::spawn(move || { - let mut sink = NativeVideoStream::new(video_track.rtc_track()); - let mut frames: u64 = 0; - let mut last_log = Instant::now(); - let mut logged_first = false; - // YUV buffers reused to avoid per-frame allocations - let mut y_buf: Vec = Vec::new(); - let mut u_buf: Vec = Vec::new(); - let mut v_buf: Vec = Vec::new(); - while let Some(frame) = rt.block_on(sink.next()) { - let w = frame.buffer.width(); - let h = frame.buffer.height(); - - if !logged_first { - debug!( - "First frame: {}x{}, type {:?}", - w, h, frame.buffer.buffer_type() - ); - logged_first = true; - } + // Start background sink thread + let shared2 = shared_clone.clone(); + let active_sid2 = active_sid.clone(); + let my_sid = sid.clone(); + let rt_clone = rt.clone(); + std::thread::spawn(move || { + let mut sink = NativeVideoStream::new(video_track.rtc_track()); + let mut frames: u64 = 0; + let mut last_log = Instant::now(); + let mut logged_first = false; + // YUV buffers reused to avoid per-frame allocations + let mut y_buf: Vec = Vec::new(); + let mut u_buf: Vec = Vec::new(); + let mut v_buf: Vec = Vec::new(); + while let Some(frame) = rt_clone.block_on(sink.next()) { + let w = frame.buffer.width(); + let h = frame.buffer.height(); + + if !logged_first { + debug!( + "First frame: {}x{}, type {:?}", + w, h, frame.buffer.buffer_type() + ); + logged_first = true; + } - // Convert to I420 on CPU, but keep planes separate for GPU sampling - let i420 = frame.buffer.to_i420(); - let (sy, su, sv) = i420.strides(); - let (dy, du, dv) = i420.data(); - - let ch = (h + 1) / 2; - - // Ensure capacity and copy full plane slices - let y_size = (sy * h) as usize; - let u_size = (su * ch) as usize; - let v_size = (sv * ch) as usize; - if y_buf.len() != y_size { y_buf.resize(y_size, 0); } - if u_buf.len() != u_size { u_buf.resize(u_size, 0); } - if v_buf.len() != v_size { v_buf.resize(v_size, 0); } - y_buf.copy_from_slice(dy); - u_buf.copy_from_slice(du); - v_buf.copy_from_slice(dv); - - // Swap buffers into shared state - let mut s = shared2.lock(); - s.width = w as u32; - s.height = h as u32; - s.stride_y = sy as u32; - s.stride_u = su as u32; - s.stride_v = sv as u32; - std::mem::swap(&mut s.y, &mut y_buf); - std::mem::swap(&mut s.u, &mut u_buf); - std::mem::swap(&mut s.v, &mut v_buf); - s.dirty = true; - - frames += 1; - let elapsed = last_log.elapsed(); - if elapsed >= Duration::from_secs(2) { - let fps = frames as f64 / elapsed.as_secs_f64(); - info!("Receiving video: {}x{}, ~{:.1} fps", w, h, fps); - frames = 0; - last_log = Instant::now(); + // Convert to I420 on CPU, but keep planes separate for GPU sampling + let i420 = frame.buffer.to_i420(); + let (sy, su, sv) = i420.strides(); + let (dy, du, dv) = i420.data(); + + let ch = (h + 1) / 2; + + // Ensure capacity and copy full plane slices + let y_size = (sy * h) as usize; + let u_size = (su * ch) as usize; + let v_size = (sv * ch) as usize; + if y_buf.len() != y_size { y_buf.resize(y_size, 0); } + if u_buf.len() != u_size { u_buf.resize(u_size, 0); } + if v_buf.len() != v_size { v_buf.resize(v_size, 0); } + y_buf.copy_from_slice(dy); + u_buf.copy_from_slice(du); + v_buf.copy_from_slice(dv); + + // Swap buffers into shared state + let mut s = shared2.lock(); + s.width = w as u32; + s.height = h as u32; + s.stride_y = sy as u32; + s.stride_u = su as u32; + s.stride_v = sv as u32; + std::mem::swap(&mut s.y, &mut y_buf); + std::mem::swap(&mut s.u, &mut u_buf); + std::mem::swap(&mut s.v, &mut v_buf); + s.dirty = true; + + frames += 1; + let elapsed = last_log.elapsed(); + if elapsed >= Duration::from_secs(2) { + let fps = frames as f64 / elapsed.as_secs_f64(); + info!("Receiving video: {}x{}, ~{:.1} fps", w, h, fps); + frames = 0; + last_log = Instant::now(); + } } - } - info!("Video stream ended"); - }); - break; + info!("Video stream ended for {}", my_sid); + // Clear active sid if still ours + let mut active = active_sid2.lock(); + if active.as_ref() == Some(&my_sid) { + *active = None; + } + }); + } } + RoomEvent::TrackUnsubscribed { publication, .. } => { + let sid = publication.sid().clone(); + let mut active = active_sid.lock(); + if active.as_ref() == Some(&sid) { + info!("Video track unsubscribed ({}), clearing active sink", sid); + *active = None; + } + } + RoomEvent::TrackUnpublished { publication, .. } => { + let sid = publication.sid().clone(); + let mut active = active_sid.lock(); + if active.as_ref() == Some(&sid) { + info!("Video track unpublished ({}), clearing active sink", sid); + *active = None; + } + } + _ => {} } } }); @@ -564,9 +620,10 @@ impl CallbackTrait for YuvPaintCallback { } // Build pipeline and textures on first paint or on resize - let state_entry = resources.get::().expect("YuvGpuState should be initialized in prepare"); - // We cannot mutate resources here; assume created already with correct dims - let state = state_entry; + let Some(state) = resources.get::() else { + // prepare may not have created the state yet (race with first frame); skip this paint + return; + }; if state.dims != (shared.width, shared.height) { // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild From a202eddca6de829da7f32ed3ee4d63c3bfd289e8 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 13 Nov 2025 15:34:28 -0800 Subject: [PATCH 30/39] add bitrate option to publisher --- examples/local_video/src/publisher.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 87317473d..ab196e392 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,6 +1,6 @@ use anyhow::Result; use clap::Parser; -use livekit::options::{TrackPublishOptions, VideoCodec}; +use livekit::options::{TrackPublishOptions, VideoCodec, VideoEncoding}; use livekit::prelude::*; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; use livekit::webrtc::video_source::native::NativeVideoSource; @@ -37,6 +37,10 @@ struct Args { #[arg(long, default_value_t = 30)] fps: u32, + /// Max video bitrate for the main layer in bps (optional) + #[arg(long)] + max_bitrate: Option, + /// LiveKit participant identity #[arg(long, default_value = "rust-camera-pub")] identity: String, @@ -164,11 +168,20 @@ async fn main() -> Result<()> { let requested_codec = if args.h265 { VideoCodec::H265 } else { VideoCodec::H264 }; info!("Attempting publish with codec: {}", requested_codec.as_str()); - let publish_opts = |codec: VideoCodec| TrackPublishOptions { - source: TrackSource::Camera, - simulcast: true, - video_codec: codec, - ..Default::default() + let publish_opts = |codec: VideoCodec| { + let mut opts = TrackPublishOptions { + source: TrackSource::Camera, + simulcast: true, + video_codec: codec, + ..Default::default() + }; + if let Some(bitrate) = args.max_bitrate { + opts.video_encoding = Some(VideoEncoding { + max_bitrate: bitrate, + max_framerate: args.fps as f64, + }); + } + opts }; let publish_result = room From 9afe58b22c4889abeb8b4eb04538788c6ccf4c2c Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 13 Nov 2025 15:41:42 -0800 Subject: [PATCH 31/39] disable simulcast --- examples/local_video/src/publisher.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index ab196e392..d311129b9 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -171,7 +171,7 @@ async fn main() -> Result<()> { let publish_opts = |codec: VideoCodec| { let mut opts = TrackPublishOptions { source: TrackSource::Camera, - simulcast: true, + simulcast: false, video_codec: codec, ..Default::default() }; From 0974c73b3f1ba8cb6edbe23a1cff0e7ecfb9393c Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 25 Nov 2025 14:29:44 -0800 Subject: [PATCH 32/39] wip adding simulcast layer select buttons --- examples/local_video/src/subscriber.rs | 123 ++++++++++++++++++++++++- 1 file changed, 122 insertions(+), 1 deletion(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index de07ecdea..9d56b67b6 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -57,8 +57,56 @@ struct SharedYuv { dirty: bool, } +#[derive(Clone)] +struct SimulcastState { + available: bool, + publication: Option, + requested_quality: Option, + active_quality: Option, + full_dims: Option<(u32, u32)>, +} + +impl Default for SimulcastState { + fn default() -> Self { + Self { + available: false, + publication: None, + requested_quality: None, + active_quality: None, + full_dims: None, + } + } +} + +fn infer_quality_from_dims( + full_w: u32, + _full_h: u32, + cur_w: u32, + _cur_h: u32, +) -> livekit::track::VideoQuality { + if full_w == 0 { + return livekit::track::VideoQuality::High; + } + let ratio = cur_w as f32 / full_w as f32; + if ratio >= 0.75 { + livekit::track::VideoQuality::High + } else if ratio >= 0.45 { + livekit::track::VideoQuality::Medium + } else { + livekit::track::VideoQuality::Low + } +} + +fn simulcast_state_full_dims( + state: &Arc>, +) -> Option<(u32, u32)> { + let sc = state.lock(); + sc.full_dims +} + struct VideoApp { shared: Arc>, + simulcast: Arc>, } impl eframe::App for VideoApp { @@ -78,6 +126,35 @@ impl eframe::App for VideoApp { ui.painter().add(cb); }); + // Simulcast layer controls: bottom-left overlay + egui::Area::new("simulcast_controls") + .anchor(egui::Align2::LEFT_BOTTOM, egui::vec2(10.0, -10.0)) + .interactable(true) + .show(ctx, |ui| { + let mut sc = self.simulcast.lock(); + if !sc.available { + return; + } + let selected = sc.requested_quality.or(sc.active_quality); + ui.horizontal(|ui| { + let choices = [ + (livekit::track::VideoQuality::Low, "Low"), + (livekit::track::VideoQuality::Medium, "Med"), + (livekit::track::VideoQuality::High, "High"), + ]; + for (q, label) in choices { + let is_selected = selected.is_some_and(|s| s == q); + let resp = ui.selectable_label(is_selected, label); + if resp.clicked() { + if let Some(ref pub_remote) = sc.publication { + pub_remote.set_video_quality(q); + sc.requested_quality = Some(q); + } + } + } + }); + }); + ctx.request_repaint_after(Duration::from_millis(16)); } } @@ -138,8 +215,11 @@ async fn main() -> Result<()> { let rt = tokio::runtime::Handle::current(); // Track currently active video track SID to handle unpublish/unsubscribe let active_sid = Arc::new(Mutex::new(None::)); + // Shared simulcast UI/control state + let simulcast = Arc::new(Mutex::new(SimulcastState::default())); tokio::spawn(async move { let active_sid = active_sid.clone(); + let simulcast = simulcast.clone(); let mut events = room.subscribe(); info!("Subscribed to room events"); while let Some(evt) = events.recv().await { @@ -225,11 +305,25 @@ async fn main() -> Result<()> { let active_sid2 = active_sid.clone(); let my_sid = sid.clone(); let rt_clone = rt.clone(); + // Initialize simulcast state for this publication + { + let mut sc = simulcast.lock(); + sc.available = publication.simulcasted(); + sc.full_dims = Some(publication.dimension()); + sc.requested_quality = None; + sc.active_quality = None; + sc.publication = match publication.clone() { + livekit::room::publication::TrackPublication::Remote(rp) => Some(rp), + _ => None, + }; + } + let simulcast2 = simulcast.clone(); std::thread::spawn(move || { let mut sink = NativeVideoStream::new(video_track.rtc_track()); let mut frames: u64 = 0; let mut last_log = Instant::now(); let mut logged_first = false; + let mut last_stats = Instant::now(); // YUV buffers reused to avoid per-frame allocations let mut y_buf: Vec = Vec::new(); let mut u_buf: Vec = Vec::new(); @@ -284,6 +378,27 @@ async fn main() -> Result<()> { frames = 0; last_log = Instant::now(); } + // Periodically infer active simulcast quality from inbound stats + if last_stats.elapsed() >= Duration::from_secs(1) { + if let Ok(stats) = rt_clone.block_on(video_track.get_stats()) { + let mut inbound: Option = None; + for s in stats.iter() { + if let livekit::webrtc::stats::RtcStats::InboundRtp(i) = s { + if i.stream.kind == "video" { + inbound = Some(i.clone()); + } + } + } + if let Some(i) = inbound { + if let Some((fw, fh)) = simulcast_state_full_dims(&simulcast2) { + let q = infer_quality_from_dims(fw, fh, i.inbound.frame_width as u32, i.inbound.frame_height as u32); + let mut sc = simulcast2.lock(); + sc.active_quality = Some(q); + } + } + } + last_stats = Instant::now(); + } } info!("Video stream ended for {}", my_sid); // Clear active sid if still ours @@ -301,6 +416,9 @@ async fn main() -> Result<()> { info!("Video track unsubscribed ({}), clearing active sink", sid); *active = None; } + // Clear simulcast state + let mut sc = simulcast.lock(); + *sc = SimulcastState::default(); } RoomEvent::TrackUnpublished { publication, .. } => { let sid = publication.sid().clone(); @@ -309,6 +427,9 @@ async fn main() -> Result<()> { info!("Video track unpublished ({}), clearing active sink", sid); *active = None; } + // Clear simulcast state + let mut sc = simulcast.lock(); + *sc = SimulcastState::default(); } _ => {} } @@ -316,7 +437,7 @@ async fn main() -> Result<()> { }); // Start UI - let app = VideoApp { shared }; + let app = VideoApp { shared, simulcast }; let native_options = eframe::NativeOptions::default(); eframe::run_native("LiveKit Video Subscriber", native_options, Box::new(|_| Ok::, _>(Box::new(app))))?; From 588c847e50dd3ba1ae8c54179e371cfd43fdf4a9 Mon Sep 17 00:00:00 2001 From: David Chen Date: Tue, 2 Dec 2025 18:06:06 -0800 Subject: [PATCH 33/39] simulcast ui controls --- examples/local_video/src/subscriber.rs | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 9d56b67b6..db719ee02 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -60,7 +60,7 @@ struct SharedYuv { #[derive(Clone)] struct SimulcastState { available: bool, - publication: Option, + publication: Option, requested_quality: Option, active_quality: Option, full_dims: Option<(u32, u32)>, @@ -127,7 +127,7 @@ impl eframe::App for VideoApp { }); // Simulcast layer controls: bottom-left overlay - egui::Area::new("simulcast_controls") + egui::Area::new("simulcast_controls".into()) .anchor(egui::Align2::LEFT_BOTTOM, egui::vec2(10.0, -10.0)) .interactable(true) .show(ctx, |ui| { @@ -217,9 +217,10 @@ async fn main() -> Result<()> { let active_sid = Arc::new(Mutex::new(None::)); // Shared simulcast UI/control state let simulcast = Arc::new(Mutex::new(SimulcastState::default())); + let simulcast_events = simulcast.clone(); tokio::spawn(async move { let active_sid = active_sid.clone(); - let simulcast = simulcast.clone(); + let simulcast = simulcast_events; let mut events = room.subscribe(); info!("Subscribed to room events"); while let Some(evt) = events.recv().await { @@ -309,13 +310,11 @@ async fn main() -> Result<()> { { let mut sc = simulcast.lock(); sc.available = publication.simulcasted(); - sc.full_dims = Some(publication.dimension()); + let dim = publication.dimension(); + sc.full_dims = Some((dim.0, dim.1)); sc.requested_quality = None; sc.active_quality = None; - sc.publication = match publication.clone() { - livekit::room::publication::TrackPublication::Remote(rp) => Some(rp), - _ => None, - }; + sc.publication = Some(publication.clone()); } let simulcast2 = simulcast.clone(); std::thread::spawn(move || { From 93c075cedc92ccb91fe52152ae16460e45de9dae Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 3 Dec 2025 18:55:41 -0800 Subject: [PATCH 34/39] wip --- examples/local_video/src/publisher.rs | 20 +- examples/local_video/src/subscriber.rs | 8 + examples/wgpu_room/src/logo_track.rs | 3 +- libwebrtc/src/lib.rs | 2 +- libwebrtc/src/native/mod.rs | 1 + libwebrtc/src/native/sensor_timestamp.rs | 186 ++++++++++ libwebrtc/src/native/video_stream.rs | 1 + libwebrtc/src/video_frame.rs | 4 + livekit-ffi/src/server/video_source.rs | 3 +- livekit/src/room/e2ee/manager.rs | 46 ++- livekit/src/room/track/local_video_track.rs | 17 +- livekit/src/room/track/remote_video_track.rs | 20 +- webrtc-sys/build.rs | 2 + webrtc-sys/include/livekit/sensor_timestamp.h | 186 ++++++++++ webrtc-sys/src/lib.rs | 1 + webrtc-sys/src/sensor_timestamp.cpp | 335 ++++++++++++++++++ webrtc-sys/src/sensor_timestamp.rs | 100 ++++++ 17 files changed, 917 insertions(+), 18 deletions(-) create mode 100644 libwebrtc/src/native/sensor_timestamp.rs create mode 100644 webrtc-sys/include/livekit/sensor_timestamp.h create mode 100644 webrtc-sys/src/sensor_timestamp.cpp create mode 100644 webrtc-sys/src/sensor_timestamp.rs diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index d311129b9..fbfffe45f 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -208,7 +208,12 @@ async fn main() -> Result<()> { } // Reusable I420 buffer and frame - let mut frame = VideoFrame { rotation: VideoRotation::VideoRotation0, timestamp_us: 0, buffer: I420Buffer::new(width, height) }; + let mut frame = VideoFrame { + rotation: VideoRotation::VideoRotation0, + timestamp_us: 0, + sensor_timestamp_us: None, + buffer: I420Buffer::new(width, height), + }; let is_yuyv = fmt.format() == FrameFormat::YUYV; info!( "Selected conversion path: {}", @@ -362,6 +367,19 @@ async fn main() -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; + + // Attach a static sensor timestamp for testing and push it into the + // shared queue used by the sensor timestamp transformer. + if let Some(store) = track.sensor_timestamp_store() { + let sensor_ts = frame.timestamp_us + 123_456; // simple fixed offset for visibility + frame.sensor_timestamp_us = Some(sensor_ts); + store.store(frame.timestamp_us, sensor_ts); + info!( + "Publisher: attached sensor_timestamp_us={} for capture_ts={}", + sensor_ts, frame.timestamp_us + ); + } + rtc_source.capture_frame(&frame); let t4 = Instant::now(); diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index db719ee02..250d89f59 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -369,6 +369,14 @@ async fn main() -> Result<()> { std::mem::swap(&mut s.v, &mut v_buf); s.dirty = true; + // Log any parsed sensor timestamp for this frame if available. + if let Some(ts) = video_track.last_sensor_timestamp() { + info!( + "Subscriber: received frame {}x{} with sensor_timestamp_us={}", + w, h, ts + ); + } + frames += 1; let elapsed = last_log.elapsed(); if elapsed >= Duration::from_secs(2) { diff --git a/examples/wgpu_room/src/logo_track.rs b/examples/wgpu_room/src/logo_track.rs index 03b1a0ec4..f284ac4b9 100644 --- a/examples/wgpu_room/src/logo_track.rs +++ b/examples/wgpu_room/src/logo_track.rs @@ -116,8 +116,9 @@ impl LogoTrack { framebuffer: Arc::new(Mutex::new(vec![0u8; FB_WIDTH * FB_HEIGHT * 4])), video_frame: Arc::new(Mutex::new(VideoFrame { rotation: VideoRotation::VideoRotation0, - buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), timestamp_us: 0, + sensor_timestamp_us: None, + buffer: I420Buffer::new(FB_WIDTH as u32, FB_HEIGHT as u32), })), pos: (0, 0), direction: (1, 1), diff --git a/libwebrtc/src/lib.rs b/libwebrtc/src/lib.rs index 8dc2e426c..0f372f58c 100644 --- a/libwebrtc/src/lib.rs +++ b/libwebrtc/src/lib.rs @@ -66,7 +66,7 @@ pub mod video_track; pub mod native { pub use webrtc_sys::webrtc::ffi::create_random_uuid; - pub use crate::imp::{apm, audio_resampler, frame_cryptor, yuv_helper}; + pub use crate::imp::{apm, audio_resampler, frame_cryptor, sensor_timestamp, yuv_helper}; } #[cfg(target_os = "android")] diff --git a/libwebrtc/src/native/mod.rs b/libwebrtc/src/native/mod.rs index b91005f20..679937971 100644 --- a/libwebrtc/src/native/mod.rs +++ b/libwebrtc/src/native/mod.rs @@ -36,6 +36,7 @@ pub mod video_source; pub mod video_stream; pub mod video_track; pub mod yuv_helper; +pub mod sensor_timestamp; use webrtc_sys::{rtc_error as sys_err, webrtc as sys_rtc}; diff --git a/libwebrtc/src/native/sensor_timestamp.rs b/libwebrtc/src/native/sensor_timestamp.rs new file mode 100644 index 000000000..c6d9e3652 --- /dev/null +++ b/libwebrtc/src/native/sensor_timestamp.rs @@ -0,0 +1,186 @@ +// Copyright 2025 LiveKit, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Sensor timestamp support for end-to-end timestamp propagation. +//! +//! This module provides functionality to embed sensor/hardware timestamps +//! in encoded video frames as trailers. The timestamps are preserved +//! through the WebRTC pipeline and can be extracted on the receiver side. +//! +//! This works independently of e2ee encryption - timestamps can be +//! embedded even when encryption is disabled. + +use cxx::SharedPtr; +use webrtc_sys::sensor_timestamp::ffi as sys_st; + +use crate::{ + peer_connection_factory::PeerConnectionFactory, + rtp_receiver::RtpReceiver, + rtp_sender::RtpSender, +}; + +/// Thread-safe store for mapping capture timestamps to sensor timestamps. +/// +/// Used on the sender side to correlate video frame capture time with +/// the sensor timestamp that should be embedded in the encoded frame. +#[derive(Clone)] +pub struct SensorTimestampStore { + sys_handle: SharedPtr, +} + +impl SensorTimestampStore { + /// Create a new sensor timestamp store. + pub fn new() -> Self { + Self { + sys_handle: sys_st::new_sensor_timestamp_store(), + } + } + + /// Store a sensor timestamp associated with a capture timestamp. + /// + /// Call this when capturing a video frame with a sensor timestamp. + /// The `capture_timestamp_us` should match the `timestamp_us` field + /// of the VideoFrame. + pub fn store(&self, capture_timestamp_us: i64, sensor_timestamp_us: i64) { + self.sys_handle.store(capture_timestamp_us, sensor_timestamp_us); + } + + /// Lookup a sensor timestamp by capture timestamp (for debugging). + /// Returns None if not found. + pub fn lookup(&self, capture_timestamp_us: i64) -> Option { + let result = self.sys_handle.lookup(capture_timestamp_us); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Pop the oldest sensor timestamp from the queue. + /// Returns None if the queue is empty. + pub fn pop(&self) -> Option { + let result = self.sys_handle.pop(); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Peek at the oldest sensor timestamp without removing it. + /// Returns None if the queue is empty. + pub fn peek(&self) -> Option { + let result = self.sys_handle.peek(); + if result < 0 { + None + } else { + Some(result) + } + } + + /// Clear old entries (older than the given threshold in microseconds). + pub fn prune(&self, max_age_us: i64) { + self.sys_handle.prune(max_age_us); + } + + pub(crate) fn sys_handle(&self) -> SharedPtr { + self.sys_handle.clone() + } +} + +impl Default for SensorTimestampStore { + fn default() -> Self { + Self::new() + } +} + +/// Handler for sensor timestamp embedding/extraction on RTP streams. +/// +/// For sender side: Embeds sensor timestamps as 12-byte trailers on +/// encoded frames before they are sent. +/// +/// For receiver side: Extracts sensor timestamps from received frames +/// and makes them available for retrieval. +#[derive(Clone)] +pub struct SensorTimestampHandler { + sys_handle: SharedPtr, +} + +impl SensorTimestampHandler { + /// Enable or disable timestamp embedding/extraction. + pub fn set_enabled(&self, enabled: bool) { + self.sys_handle.set_enabled(enabled); + } + + /// Check if timestamp embedding/extraction is enabled. + pub fn enabled(&self) -> bool { + self.sys_handle.enabled() + } + + /// Get the last received sensor timestamp (receiver side only). + /// Returns None if no timestamp has been received yet. + pub fn last_sensor_timestamp(&self) -> Option { + if self.sys_handle.has_sensor_timestamp() { + let ts = self.sys_handle.last_sensor_timestamp(); + if ts >= 0 { + Some(ts) + } else { + None + } + } else { + None + } + } + + pub(crate) fn sys_handle(&self) -> SharedPtr { + self.sys_handle.clone() + } +} + +/// Create a sender-side sensor timestamp handler. +/// +/// This handler will embed sensor timestamps from the provided store +/// into encoded frames before they are packetized and sent. +pub fn create_sender_handler( + peer_factory: &PeerConnectionFactory, + store: &SensorTimestampStore, + sender: &RtpSender, +) -> SensorTimestampHandler { + SensorTimestampHandler { + sys_handle: sys_st::new_sensor_timestamp_sender( + peer_factory.handle.sys_handle.clone(), + store.sys_handle(), + sender.handle.sys_handle.clone(), + ), + } +} + +/// Create a receiver-side sensor timestamp handler. +/// +/// This handler will extract sensor timestamps from received frames +/// and make them available via `last_sensor_timestamp()`. +pub fn create_receiver_handler( + peer_factory: &PeerConnectionFactory, + store: &SensorTimestampStore, + receiver: &RtpReceiver, +) -> SensorTimestampHandler { + SensorTimestampHandler { + sys_handle: sys_st::new_sensor_timestamp_receiver( + peer_factory.handle.sys_handle.clone(), + store.sys_handle(), + receiver.handle.sys_handle.clone(), + ), + } +} + diff --git a/libwebrtc/src/native/video_stream.rs b/libwebrtc/src/native/video_stream.rs index 07774f87b..a2ab70f4e 100644 --- a/libwebrtc/src/native/video_stream.rs +++ b/libwebrtc/src/native/video_stream.rs @@ -84,6 +84,7 @@ impl sys_vt::VideoSink for VideoTrackObserver { let _ = self.frame_tx.send(VideoFrame { rotation: frame.rotation().into(), timestamp_us: frame.timestamp_us(), + sensor_timestamp_us: None, buffer: new_video_frame_buffer(unsafe { frame.video_frame_buffer() }), }); } diff --git a/libwebrtc/src/video_frame.rs b/libwebrtc/src/video_frame.rs index 926b45572..5d60659d8 100644 --- a/libwebrtc/src/video_frame.rs +++ b/libwebrtc/src/video_frame.rs @@ -59,6 +59,10 @@ where { pub rotation: VideoRotation, pub timestamp_us: i64, // When the frame was captured in microseconds + /// Optional sensor timestamp in microseconds, if available. + /// This is typically a hardware or device timestamp that can be + /// propagated end-to-end through the media pipeline. + pub sensor_timestamp_us: Option, pub buffer: T, } diff --git a/livekit-ffi/src/server/video_source.rs b/livekit-ffi/src/server/video_source.rs index 5af7d9a38..0eca541ce 100644 --- a/livekit-ffi/src/server/video_source.rs +++ b/livekit-ffi/src/server/video_source.rs @@ -64,8 +64,9 @@ impl FfiVideoSource { let buffer = colorcvt::to_libwebrtc_buffer(capture.buffer.clone()); let frame = VideoFrame { rotation: capture.rotation().into(), - timestamp_us: capture.timestamp_us, buffer, + timestamp_us: capture.timestamp_us, + sensor_timestamp_us: None, }; source.capture_frame(&frame); diff --git a/livekit/src/room/e2ee/manager.rs b/livekit/src/room/e2ee/manager.rs index 1e583b9c4..38bcb804f 100644 --- a/livekit/src/room/e2ee/manager.rs +++ b/livekit/src/room/e2ee/manager.rs @@ -15,8 +15,11 @@ use std::{collections::HashMap, sync::Arc}; use libwebrtc::{ - native::frame_cryptor::{ - DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, + native::{ + frame_cryptor::{ + DataPacketCryptor, EncryptedPacket, EncryptionAlgorithm, EncryptionState, FrameCryptor, + }, + sensor_timestamp::{self, SensorTimestampStore}, }, rtp_receiver::RtpReceiver, rtp_sender::RtpSender, @@ -98,16 +101,24 @@ impl E2eeManager { publication: RemoteTrackPublication, participant: RemoteParticipant, ) { - if !self.initialized() { - return; + let identity = participant.identity(); + let receiver = track.transceiver().unwrap().receiver(); + // Always set up sensor timestamp extraction for remote video tracks. + if let RemoteTrack::Video(video_track) = &track { + let store = SensorTimestampStore::new(); + let handler = sensor_timestamp::create_receiver_handler( + LkRuntime::instance().pc_factory(), + &store, + &receiver, + ); + video_track.set_sensor_timestamp_handler(handler); } - if publication.encryption_type() == EncryptionType::None { + // E2EE frame cryptor is only created when encryption is configured. + if !self.initialized() || publication.encryption_type() == EncryptionType::None { return; } - let identity = participant.identity(); - let receiver = track.transceiver().unwrap().receiver(); let frame_cryptor = self.setup_rtp_receiver(&identity, receiver); self.setup_cryptor(&frame_cryptor); @@ -122,16 +133,27 @@ impl E2eeManager { publication: LocalTrackPublication, participant: LocalParticipant, ) { - if !self.initialized() { - return; + let identity = participant.identity(); + let sender = track.transceiver().unwrap().sender(); + // Always set up sensor timestamp embedding for local video tracks. + if let LocalTrack::Video(video_track) = &track { + let store = SensorTimestampStore::new(); + video_track.set_sensor_timestamp_store(store.clone()); + let _handler = sensor_timestamp::create_sender_handler( + LkRuntime::instance().pc_factory(), + &store, + &sender, + ); + // We rely on the underlying WebRTC objects to hold references to the + // transformer; the Rust-side handler object is kept only for lifetime + // management when needed. } - if publication.encryption_type() == EncryptionType::None { + // E2EE frame cryptor is only created when encryption is configured. + if !self.initialized() || publication.encryption_type() == EncryptionType::None { return; } - let identity = participant.identity(); - let sender = track.transceiver().unwrap().sender(); let frame_cryptor = self.setup_rtp_sender(&identity, sender); self.setup_cryptor(&frame_cryptor); diff --git a/livekit/src/room/track/local_video_track.rs b/livekit/src/room/track/local_video_track.rs index c7c26649b..e99158fc5 100644 --- a/livekit/src/room/track/local_video_track.rs +++ b/livekit/src/room/track/local_video_track.rs @@ -14,8 +14,9 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{prelude::*, stats::RtcStats}; +use libwebrtc::{native::sensor_timestamp::SensorTimestampStore, prelude::*, stats::RtcStats}; use livekit_protocol as proto; +use parking_lot::Mutex; use super::TrackInner; use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; @@ -24,6 +25,7 @@ use crate::{prelude::*, rtc_engine::lk_runtime::LkRuntime}; pub struct LocalVideoTrack { inner: Arc, source: RtcVideoSource, + sensor_timestamp_store: Arc>>, } impl Debug for LocalVideoTrack { @@ -46,6 +48,7 @@ impl LocalVideoTrack { MediaStreamTrack::Video(rtc_track), )), source, + sensor_timestamp_store: Arc::new(Mutex::new(None)), } } @@ -123,6 +126,18 @@ impl LocalVideoTrack { self.source.clone() } + /// Returns the sensor timestamp store associated with this track, if any. + /// When present, callers can push per-frame sensor timestamps into the + /// outgoing queue which will then be embedded into encoded frames. + pub fn sensor_timestamp_store(&self) -> Option { + self.sensor_timestamp_store.lock().clone() + } + + /// Internal: set the sensor timestamp store used for this track. + pub(crate) fn set_sensor_timestamp_store(&self, store: SensorTimestampStore) { + *self.sensor_timestamp_store.lock() = Some(store); + } + pub async fn get_stats(&self) -> RoomResult> { super::local_track::get_stats(&self.inner).await } diff --git a/livekit/src/room/track/remote_video_track.rs b/livekit/src/room/track/remote_video_track.rs index 2076a3b1c..5688ce6b9 100644 --- a/livekit/src/room/track/remote_video_track.rs +++ b/livekit/src/room/track/remote_video_track.rs @@ -14,8 +14,9 @@ use std::{fmt::Debug, sync::Arc}; -use libwebrtc::{prelude::*, stats::RtcStats}; +use libwebrtc::{native::sensor_timestamp::SensorTimestampHandler, prelude::*, stats::RtcStats}; use livekit_protocol as proto; +use parking_lot::Mutex; use super::{remote_track, TrackInner}; use crate::prelude::*; @@ -23,6 +24,7 @@ use crate::prelude::*; #[derive(Clone)] pub struct RemoteVideoTrack { inner: Arc, + sensor_timestamp_handler: Arc>>, } impl Debug for RemoteVideoTrack { @@ -44,6 +46,7 @@ impl RemoteVideoTrack { TrackKind::Video, MediaStreamTrack::Video(rtc_track), )), + sensor_timestamp_handler: Arc::new(Mutex::new(None)), } } @@ -94,6 +97,21 @@ impl RemoteVideoTrack { true } + /// Returns the last parsed sensor timestamp (in microseconds) for this + /// remote video track, if the sensor timestamp transformer is enabled and + /// a timestamp has been received. + pub fn last_sensor_timestamp(&self) -> Option { + self.sensor_timestamp_handler + .lock() + .as_ref() + .and_then(|h| h.last_sensor_timestamp()) + } + + /// Internal: set the handler that extracts sensor timestamps for this track. + pub(crate) fn set_sensor_timestamp_handler(&self, handler: SensorTimestampHandler) { + self.sensor_timestamp_handler.lock().replace(handler); + } + pub async fn get_stats(&self) -> RoomResult> { super::remote_track::get_stats(&self.inner).await } diff --git a/webrtc-sys/build.rs b/webrtc-sys/build.rs index 87213e82a..6ca57602a 100644 --- a/webrtc-sys/build.rs +++ b/webrtc-sys/build.rs @@ -49,6 +49,7 @@ fn main() { "src/android.rs", "src/prohibit_libsrtp_initialization.rs", "src/apm.rs", + "src/sensor_timestamp.rs", ]); builder.files(&[ @@ -77,6 +78,7 @@ fn main() { "src/global_task_queue.cpp", "src/prohibit_libsrtp_initialization.cpp", "src/apm.cpp", + "src/sensor_timestamp.cpp", ]); let webrtc_dir = webrtc_sys_build::webrtc_dir(); diff --git a/webrtc-sys/include/livekit/sensor_timestamp.h b/webrtc-sys/include/livekit/sensor_timestamp.h new file mode 100644 index 000000000..ef2262d32 --- /dev/null +++ b/webrtc-sys/include/livekit/sensor_timestamp.h @@ -0,0 +1,186 @@ +/* + * Copyright 2025 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "livekit/peer_connection.h" +#include "livekit/peer_connection_factory.h" +#include "livekit/rtp_receiver.h" +#include "livekit/rtp_sender.h" +#include "livekit/webrtc.h" +#include "rtc_base/synchronization/mutex.h" +#include "rust/cxx.h" + +namespace livekit { + +// Magic bytes to identify sensor timestamp trailers: "LKTS" (LiveKit TimeStamp) +constexpr uint8_t kSensorTimestampMagic[4] = {'L', 'K', 'T', 'S'}; +constexpr size_t kSensorTimestampTrailerSize = 12; // 8 bytes timestamp + 4 bytes magic + +/// Thread-safe FIFO queue for sensor timestamps. +/// Used on the sender side to pass sensor timestamps to the transformer. +/// Works on the assumption that frames are captured and encoded in order. +class SensorTimestampStore { + public: + SensorTimestampStore() = default; + ~SensorTimestampStore() = default; + + /// Push a sensor timestamp to the queue. + /// Call this when capturing a video frame with a sensor timestamp. + void store(int64_t capture_timestamp_us, int64_t sensor_timestamp_us) const; + + /// Pop and return the next sensor timestamp from the queue. + /// Returns -1 if the queue is empty. + int64_t lookup(int64_t capture_timestamp_us) const; + + /// Pop the oldest entry if the queue has entries. + /// Returns the sensor timestamp, or -1 if empty. + int64_t pop() const; + + /// Peek at the oldest entry without removing it. + /// Returns the sensor timestamp, or -1 if empty. + int64_t peek() const; + + /// Clear old entries (older than the given threshold in microseconds). + void prune(int64_t max_age_us) const; + + private: + mutable webrtc::Mutex mutex_; + struct Entry { + int64_t capture_timestamp_us; + int64_t sensor_timestamp_us; + }; + mutable std::deque entries_; + static constexpr size_t kMaxEntries = 300; // ~10 seconds at 30fps +}; + +/// Frame transformer that appends/extracts sensor timestamp trailers. +/// This transformer can be used standalone or in conjunction with e2ee. +class SensorTimestampTransformer + : public webrtc::FrameTransformerInterface { + public: + enum class Direction { kSend, kReceive }; + + SensorTimestampTransformer(Direction direction, + std::shared_ptr store); + ~SensorTimestampTransformer() override = default; + + // FrameTransformerInterface implementation + void Transform( + std::unique_ptr frame) override; + void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override; + void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override; + void UnregisterTransformedFrameCallback() override; + void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override; + + /// Enable/disable timestamp embedding + void set_enabled(bool enabled); + bool enabled() const; + + /// Get the last received sensor timestamp (receiver side only) + std::optional last_sensor_timestamp() const; + + private: + void TransformSend( + std::unique_ptr frame); + void TransformReceive( + std::unique_ptr frame); + + /// Append sensor timestamp trailer to frame data + std::vector AppendTimestampTrailer( + rtc::ArrayView data, + int64_t sensor_timestamp_us); + + /// Extract and remove sensor timestamp trailer from frame data + /// Returns the sensor timestamp if found, nullopt otherwise + std::optional ExtractTimestampTrailer( + rtc::ArrayView data, + std::vector& out_data); + + const Direction direction_; + std::shared_ptr store_; + std::atomic enabled_{true}; + mutable webrtc::Mutex mutex_; + rtc::scoped_refptr callback_; + std::unordered_map> + sink_callbacks_; + mutable std::atomic last_sensor_timestamp_{0}; + mutable std::atomic has_last_sensor_timestamp_{false}; +}; + +/// Wrapper class for Rust FFI that manages sensor timestamp transformers. +class SensorTimestampHandler { + public: + SensorTimestampHandler(std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr sender); + + SensorTimestampHandler(std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr receiver); + + ~SensorTimestampHandler() = default; + + /// Enable/disable timestamp embedding + void set_enabled(bool enabled) const; + bool enabled() const; + + /// Get the last received sensor timestamp (receiver side only) + /// Returns -1 if no timestamp has been received yet + int64_t last_sensor_timestamp() const; + + /// Check if a sensor timestamp has been received + bool has_sensor_timestamp() const; + + private: + std::shared_ptr rtc_runtime_; + rtc::scoped_refptr transformer_; + rtc::scoped_refptr sender_; + rtc::scoped_refptr receiver_; +}; + +// Factory functions for Rust FFI +std::shared_ptr new_sensor_timestamp_store(); + +std::shared_ptr new_sensor_timestamp_sender( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr sender); + +std::shared_ptr new_sensor_timestamp_receiver( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr receiver); + +} // namespace livekit + diff --git a/webrtc-sys/src/lib.rs b/webrtc-sys/src/lib.rs index 2c3c94faf..a21a8726f 100644 --- a/webrtc-sys/src/lib.rs +++ b/webrtc-sys/src/lib.rs @@ -32,6 +32,7 @@ pub mod rtp_parameters; pub mod rtp_receiver; pub mod rtp_sender; pub mod rtp_transceiver; +pub mod sensor_timestamp; pub mod video_frame; pub mod video_frame_buffer; pub mod video_track; diff --git a/webrtc-sys/src/sensor_timestamp.cpp b/webrtc-sys/src/sensor_timestamp.cpp new file mode 100644 index 000000000..512a4ce50 --- /dev/null +++ b/webrtc-sys/src/sensor_timestamp.cpp @@ -0,0 +1,335 @@ +/* + * Copyright 2025 LiveKit, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "livekit/sensor_timestamp.h" + +#include +#include +#include + +#include "api/make_ref_counted.h" +#include "livekit/peer_connection_factory.h" +#include "rtc_base/logging.h" +#include "webrtc-sys/src/sensor_timestamp.rs.h" + +namespace livekit { + +// SensorTimestampStore implementation + +void SensorTimestampStore::store(int64_t capture_timestamp_us, + int64_t sensor_timestamp_us) const { + webrtc::MutexLock lock(&mutex_); + + // Remove old entries if we're at capacity + while (entries_.size() >= kMaxEntries) { + entries_.pop_front(); + } + + entries_.push_back({capture_timestamp_us, sensor_timestamp_us}); +} + +int64_t SensorTimestampStore::lookup(int64_t capture_timestamp_us) const { + webrtc::MutexLock lock(&mutex_); + + // Search from the end (most recent) for better performance + for (auto it = entries_.rbegin(); it != entries_.rend(); ++it) { + if (it->capture_timestamp_us == capture_timestamp_us) { + return it->sensor_timestamp_us; + } + } + + return -1; +} + +int64_t SensorTimestampStore::pop() const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + return -1; + } + + int64_t sensor_ts = entries_.front().sensor_timestamp_us; + entries_.pop_front(); + return sensor_ts; +} + +int64_t SensorTimestampStore::peek() const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + return -1; + } + + return entries_.front().sensor_timestamp_us; +} + +void SensorTimestampStore::prune(int64_t max_age_us) const { + webrtc::MutexLock lock(&mutex_); + + if (entries_.empty()) { + return; + } + + int64_t newest_timestamp = entries_.back().capture_timestamp_us; + int64_t threshold = newest_timestamp - max_age_us; + + while (!entries_.empty() && + entries_.front().capture_timestamp_us < threshold) { + entries_.pop_front(); + } +} + +// SensorTimestampTransformer implementation + +SensorTimestampTransformer::SensorTimestampTransformer( + Direction direction, + std::shared_ptr store) + : direction_(direction), store_(store) {} + +void SensorTimestampTransformer::Transform( + std::unique_ptr frame) { + if (!enabled_.load()) { + // Pass through without modification + webrtc::MutexLock lock(&mutex_); + if (callback_) { + callback_->OnTransformedFrame(std::move(frame)); + } + return; + } + + if (direction_ == Direction::kSend) { + TransformSend(std::move(frame)); + } else { + TransformReceive(std::move(frame)); + } +} + +void SensorTimestampTransformer::TransformSend( + std::unique_ptr frame) { + // Get the RTP timestamp from the frame for logging + uint32_t rtp_timestamp = frame->GetTimestamp(); + + auto data = frame->GetData(); + + // Pop the next sensor timestamp from the queue. + // This assumes frames are captured and encoded in order (FIFO). + int64_t ts_to_embed = 0; + + if (store_) { + int64_t popped_ts = store_->pop(); + if (popped_ts >= 0) { + ts_to_embed = popped_ts; + } + } + + // Always append trailer when enabled (even if timestamp is 0, + // which indicates no sensor timestamp was set for this frame) + std::vector new_data; + if (enabled_.load()) { + new_data = AppendTimestampTrailer(data, ts_to_embed); + frame->SetData(rtc::ArrayView(new_data)); + + RTC_LOG(LS_VERBOSE) << "SensorTimestampTransformer: Appended timestamp trailer" + << " ts=" << ts_to_embed + << " rtp_ts=" << rtp_timestamp + << " data_size=" << new_data.size(); + } + + webrtc::MutexLock lock(&mutex_); + if (callback_) { + callback_->OnTransformedFrame(std::move(frame)); + } +} + +void SensorTimestampTransformer::TransformReceive( + std::unique_ptr frame) { + auto data = frame->GetData(); + std::vector stripped_data; + + auto sensor_ts = ExtractTimestampTrailer(data, stripped_data); + + if (sensor_ts.has_value()) { + // Store the extracted timestamp for later retrieval + last_sensor_timestamp_.store(sensor_ts.value()); + has_last_sensor_timestamp_.store(true); + + // Update frame with stripped data + frame->SetData(rtc::ArrayView(stripped_data)); + + RTC_LOG(LS_VERBOSE) << "SensorTimestampTransformer: Extracted timestamp trailer" + << " ts=" << sensor_ts.value() + << " rtp_ts=" << frame->GetTimestamp() + << " stripped_size=" << stripped_data.size(); + } + + webrtc::MutexLock lock(&mutex_); + if (callback_) { + callback_->OnTransformedFrame(std::move(frame)); + } +} + +std::vector SensorTimestampTransformer::AppendTimestampTrailer( + rtc::ArrayView data, + int64_t sensor_timestamp_us) { + std::vector result; + result.reserve(data.size() + kSensorTimestampTrailerSize); + + // Copy original data + result.insert(result.end(), data.begin(), data.end()); + + // Append timestamp (big-endian) + for (int i = 7; i >= 0; --i) { + result.push_back(static_cast((sensor_timestamp_us >> (i * 8)) & 0xFF)); + } + + // Append magic bytes + result.insert(result.end(), std::begin(kSensorTimestampMagic), + std::end(kSensorTimestampMagic)); + + return result; +} + +std::optional SensorTimestampTransformer::ExtractTimestampTrailer( + rtc::ArrayView data, + std::vector& out_data) { + if (data.size() < kSensorTimestampTrailerSize) { + out_data.assign(data.begin(), data.end()); + return std::nullopt; + } + + // Check for magic bytes at the end + const uint8_t* magic_start = data.data() + data.size() - 4; + if (std::memcmp(magic_start, kSensorTimestampMagic, 4) != 0) { + out_data.assign(data.begin(), data.end()); + return std::nullopt; + } + + // Extract timestamp (big-endian) + const uint8_t* ts_start = data.data() + data.size() - kSensorTimestampTrailerSize; + int64_t timestamp = 0; + for (int i = 0; i < 8; ++i) { + timestamp = (timestamp << 8) | ts_start[i]; + } + + // Copy data without trailer + out_data.assign(data.begin(), data.end() - kSensorTimestampTrailerSize); + + return timestamp; +} + +void SensorTimestampTransformer::RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) { + webrtc::MutexLock lock(&mutex_); + callback_ = callback; +} + +void SensorTimestampTransformer::RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) { + webrtc::MutexLock lock(&mutex_); + sink_callbacks_[ssrc] = callback; +} + +void SensorTimestampTransformer::UnregisterTransformedFrameCallback() { + webrtc::MutexLock lock(&mutex_); + callback_ = nullptr; +} + +void SensorTimestampTransformer::UnregisterTransformedFrameSinkCallback( + uint32_t ssrc) { + webrtc::MutexLock lock(&mutex_); + sink_callbacks_.erase(ssrc); +} + +void SensorTimestampTransformer::set_enabled(bool enabled) { + enabled_.store(enabled); +} + +bool SensorTimestampTransformer::enabled() const { + return enabled_.load(); +} + +std::optional SensorTimestampTransformer::last_sensor_timestamp() + const { + if (!has_last_sensor_timestamp_.load()) { + return std::nullopt; + } + return last_sensor_timestamp_.load(); +} + +// SensorTimestampHandler implementation + +SensorTimestampHandler::SensorTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr sender) + : rtc_runtime_(rtc_runtime), sender_(sender) { + transformer_ = rtc::make_ref_counted( + SensorTimestampTransformer::Direction::kSend, store); + sender->SetEncoderToPacketizerFrameTransformer(transformer_); +} + +SensorTimestampHandler::SensorTimestampHandler( + std::shared_ptr rtc_runtime, + std::shared_ptr store, + rtc::scoped_refptr receiver) + : rtc_runtime_(rtc_runtime), receiver_(receiver) { + transformer_ = rtc::make_ref_counted( + SensorTimestampTransformer::Direction::kReceive, store); + receiver->SetDepacketizerToDecoderFrameTransformer(transformer_); +} + +void SensorTimestampHandler::set_enabled(bool enabled) const { + transformer_->set_enabled(enabled); +} + +bool SensorTimestampHandler::enabled() const { + return transformer_->enabled(); +} + +int64_t SensorTimestampHandler::last_sensor_timestamp() const { + auto ts = transformer_->last_sensor_timestamp(); + return ts.value_or(-1); +} + +bool SensorTimestampHandler::has_sensor_timestamp() const { + return transformer_->last_sensor_timestamp().has_value(); +} + +// Factory functions + +std::shared_ptr new_sensor_timestamp_store() { + return std::make_shared(); +} + +std::shared_ptr new_sensor_timestamp_sender( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr sender) { + return std::make_shared( + peer_factory->rtc_runtime(), store, sender->rtc_sender()); +} + +std::shared_ptr new_sensor_timestamp_receiver( + std::shared_ptr peer_factory, + std::shared_ptr store, + std::shared_ptr receiver) { + return std::make_shared( + peer_factory->rtc_runtime(), store, receiver->rtc_receiver()); +} + +} // namespace livekit + diff --git a/webrtc-sys/src/sensor_timestamp.rs b/webrtc-sys/src/sensor_timestamp.rs new file mode 100644 index 000000000..5a1a71ac7 --- /dev/null +++ b/webrtc-sys/src/sensor_timestamp.rs @@ -0,0 +1,100 @@ +// Copyright 2025 LiveKit, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::impl_thread_safety; + +#[cxx::bridge(namespace = "livekit")] +pub mod ffi { + unsafe extern "C++" { + include!("livekit/sensor_timestamp.h"); + include!("livekit/rtp_sender.h"); + include!("livekit/rtp_receiver.h"); + include!("livekit/peer_connection_factory.h"); + + type RtpSender = crate::rtp_sender::ffi::RtpSender; + type RtpReceiver = crate::rtp_receiver::ffi::RtpReceiver; + type PeerConnectionFactory = crate::peer_connection_factory::ffi::PeerConnectionFactory; + + /// Thread-safe store for mapping capture timestamps to sensor timestamps. + pub type SensorTimestampStore; + + /// Push a sensor timestamp to the queue. + fn store(self: &SensorTimestampStore, capture_timestamp_us: i64, sensor_timestamp_us: i64); + + /// Lookup a sensor timestamp by capture timestamp (for debugging). + /// Returns -1 if not found. + fn lookup(self: &SensorTimestampStore, capture_timestamp_us: i64) -> i64; + + /// Pop the oldest sensor timestamp from the queue. + /// Returns -1 if empty. + fn pop(self: &SensorTimestampStore) -> i64; + + /// Peek at the oldest sensor timestamp without removing it. + /// Returns -1 if empty. + fn peek(self: &SensorTimestampStore) -> i64; + + /// Clear old entries. + fn prune(self: &SensorTimestampStore, max_age_us: i64); + + /// Create a new sensor timestamp store. + fn new_sensor_timestamp_store() -> SharedPtr; + } + + unsafe extern "C++" { + include!("livekit/sensor_timestamp.h"); + + /// Handler for sensor timestamp embedding/extraction on RTP streams. + pub type SensorTimestampHandler; + + /// Enable/disable timestamp embedding. + fn set_enabled(self: &SensorTimestampHandler, enabled: bool); + + /// Check if timestamp embedding is enabled. + fn enabled(self: &SensorTimestampHandler) -> bool; + + /// Get the last received sensor timestamp (receiver side only). + /// Returns -1 if no timestamp has been received yet. + fn last_sensor_timestamp(self: &SensorTimestampHandler) -> i64; + + /// Check if a sensor timestamp has been received. + fn has_sensor_timestamp(self: &SensorTimestampHandler) -> bool; + + /// Create a new sensor timestamp handler for a sender. + fn new_sensor_timestamp_sender( + peer_factory: SharedPtr, + store: SharedPtr, + sender: SharedPtr, + ) -> SharedPtr; + + /// Create a new sensor timestamp handler for a receiver. + fn new_sensor_timestamp_receiver( + peer_factory: SharedPtr, + store: SharedPtr, + receiver: SharedPtr, + ) -> SharedPtr; + } +} + +impl_thread_safety!(ffi::SensorTimestampStore, Send + Sync); +impl_thread_safety!(ffi::SensorTimestampHandler, Send + Sync); + +#[cfg(test)] +mod tests { + #[test] + fn test_sensor_timestamp_store_creation() { + // Basic test to ensure the store can be created + // Full testing requires a running WebRTC context + } +} + From b11cf986640c3599e306817d31b357f621f02aad Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 3 Dec 2025 19:33:39 -0800 Subject: [PATCH 35/39] working ex --- libwebrtc/src/native/sensor_timestamp.rs | 11 ++ webrtc-sys/src/sensor_timestamp.cpp | 176 ++++++++++++++++++----- 2 files changed, 153 insertions(+), 34 deletions(-) diff --git a/libwebrtc/src/native/sensor_timestamp.rs b/libwebrtc/src/native/sensor_timestamp.rs index c6d9e3652..519cb6b96 100644 --- a/libwebrtc/src/native/sensor_timestamp.rs +++ b/libwebrtc/src/native/sensor_timestamp.rs @@ -53,6 +53,12 @@ impl SensorTimestampStore { /// The `capture_timestamp_us` should match the `timestamp_us` field /// of the VideoFrame. pub fn store(&self, capture_timestamp_us: i64, sensor_timestamp_us: i64) { + log::info!( + target: "sensor_timestamp", + "store: capture_ts_us={}, sensor_ts_us={}", + capture_timestamp_us, + sensor_timestamp_us + ); self.sys_handle.store(capture_timestamp_us, sensor_timestamp_us); } @@ -134,6 +140,11 @@ impl SensorTimestampHandler { if self.sys_handle.has_sensor_timestamp() { let ts = self.sys_handle.last_sensor_timestamp(); if ts >= 0 { + log::info!( + target: "sensor_timestamp", + "last_sensor_timestamp: {}", + ts + ); Some(ts) } else { None diff --git a/webrtc-sys/src/sensor_timestamp.cpp b/webrtc-sys/src/sensor_timestamp.cpp index 512a4ce50..36c333b86 100644 --- a/webrtc-sys/src/sensor_timestamp.cpp +++ b/webrtc-sys/src/sensor_timestamp.cpp @@ -32,13 +32,16 @@ namespace livekit { void SensorTimestampStore::store(int64_t capture_timestamp_us, int64_t sensor_timestamp_us) const { webrtc::MutexLock lock(&mutex_); - + // Remove old entries if we're at capacity while (entries_.size() >= kMaxEntries) { entries_.pop_front(); } - + entries_.push_back({capture_timestamp_us, sensor_timestamp_us}); + RTC_LOG(LS_INFO) << "SensorTimestampStore::store capture_ts_us=" << capture_timestamp_us + << " sensor_ts_us=" << sensor_timestamp_us + << " size=" << entries_.size(); } int64_t SensorTimestampStore::lookup(int64_t capture_timestamp_us) const { @@ -56,13 +59,16 @@ int64_t SensorTimestampStore::lookup(int64_t capture_timestamp_us) const { int64_t SensorTimestampStore::pop() const { webrtc::MutexLock lock(&mutex_); - + if (entries_.empty()) { + RTC_LOG(LS_INFO) << "SensorTimestampStore::pop empty"; return -1; } - + int64_t sensor_ts = entries_.front().sensor_timestamp_us; entries_.pop_front(); + RTC_LOG(LS_INFO) << "SensorTimestampStore::pop sensor_ts_us=" << sensor_ts + << " remaining=" << entries_.size(); return sensor_ts; } @@ -97,15 +103,43 @@ void SensorTimestampStore::prune(int64_t max_age_us) const { SensorTimestampTransformer::SensorTimestampTransformer( Direction direction, std::shared_ptr store) - : direction_(direction), store_(store) {} + : direction_(direction), store_(store) { + RTC_LOG(LS_INFO) << "SensorTimestampTransformer created direction=" + << (direction_ == Direction::kSend ? "send" : "recv"); +} void SensorTimestampTransformer::Transform( std::unique_ptr frame) { + uint32_t ssrc = frame->GetSsrc(); + uint32_t rtp_timestamp = frame->GetTimestamp(); + if (!enabled_.load()) { - // Pass through without modification - webrtc::MutexLock lock(&mutex_); - if (callback_) { - callback_->OnTransformedFrame(std::move(frame)); + // Pass through without modification, but still log basic info so we know + // frames are flowing through the transformer. + RTC_LOG(LS_INFO) << "SensorTimestampTransformer::Transform (disabled)" + << " direction=" + << (direction_ == Direction::kSend ? "send" : "recv") + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; + + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "SensorTimestampTransformer::Transform (disabled) has no callback" + << " direction=" + << (direction_ == Direction::kSend ? "send" : "recv") + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } return; } @@ -121,63 +155,125 @@ void SensorTimestampTransformer::TransformSend( std::unique_ptr frame) { // Get the RTP timestamp from the frame for logging uint32_t rtp_timestamp = frame->GetTimestamp(); - + uint32_t ssrc = frame->GetSsrc(); + auto data = frame->GetData(); - + // Pop the next sensor timestamp from the queue. // This assumes frames are captured and encoded in order (FIFO). int64_t ts_to_embed = 0; - + if (store_) { int64_t popped_ts = store_->pop(); if (popped_ts >= 0) { ts_to_embed = popped_ts; + } else { + RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformSend no sensor timestamp available" + << " rtp_ts=" << rtp_timestamp << " orig_size=" << data.size(); } } - + // Always append trailer when enabled (even if timestamp is 0, // which indicates no sensor timestamp was set for this frame) std::vector new_data; if (enabled_.load()) { new_data = AppendTimestampTrailer(data, ts_to_embed); frame->SetData(rtc::ArrayView(new_data)); - - RTC_LOG(LS_VERBOSE) << "SensorTimestampTransformer: Appended timestamp trailer" - << " ts=" << ts_to_embed - << " rtp_ts=" << rtp_timestamp - << " data_size=" << new_data.size(); + + RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformSend appended trailer" + << " ts_us=" << ts_to_embed << " rtp_ts=" << rtp_timestamp + << " ssrc=" << ssrc + << " orig_size=" << data.size() + << " new_size=" << new_data.size(); } - - webrtc::MutexLock lock(&mutex_); - if (callback_) { - callback_->OnTransformedFrame(std::move(frame)); + + // Forward to the appropriate callback (either global or per-SSRC sink). + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "SensorTimestampTransformer::TransformSend has no callback" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } } void SensorTimestampTransformer::TransformReceive( std::unique_ptr frame) { + uint32_t ssrc = frame->GetSsrc(); + uint32_t rtp_timestamp = frame->GetTimestamp(); auto data = frame->GetData(); std::vector stripped_data; - + + RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformReceive begin" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp + << " size=" << data.size(); + auto sensor_ts = ExtractTimestampTrailer(data, stripped_data); - + if (sensor_ts.has_value()) { // Store the extracted timestamp for later retrieval last_sensor_timestamp_.store(sensor_ts.value()); has_last_sensor_timestamp_.store(true); - + // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - - RTC_LOG(LS_VERBOSE) << "SensorTimestampTransformer: Extracted timestamp trailer" - << " ts=" << sensor_ts.value() - << " rtp_ts=" << frame->GetTimestamp() - << " stripped_size=" << stripped_data.size(); + + RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformReceive extracted trailer" + << " ts_us=" << sensor_ts.value() + << " rtp_ts=" << frame->GetTimestamp() + << " ssrc=" << ssrc + << " stripped_size=" << stripped_data.size() + << " orig_size=" << data.size(); + } else { + // Log the last few bytes so we can see whether the magic marker is present. + size_t log_len = std::min(data.size(), 16); + std::string tail_bytes; + tail_bytes.reserve(log_len * 4); + for (size_t i = data.size() - log_len; i < data.size(); ++i) { + char buf[8]; + std::snprintf(buf, sizeof(buf), "%u", static_cast(data[i])); + if (!tail_bytes.empty()) { + tail_bytes.append(","); + } + tail_bytes.append(buf); + } + + RTC_LOG(LS_INFO) + << "SensorTimestampTransformer::TransformReceive no trailer found" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp + << " size=" << data.size() + << " tail_bytes_dec=[" << tail_bytes << "]"; } - - webrtc::MutexLock lock(&mutex_); - if (callback_) { - callback_->OnTransformedFrame(std::move(frame)); + + // Forward to the appropriate callback (either global or per-SSRC sink). + rtc::scoped_refptr cb; + { + webrtc::MutexLock lock(&mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + cb = it->second; + } else { + cb = callback_; + } + } + + if (cb) { + cb->OnTransformedFrame(std::move(frame)); + } else { + RTC_LOG(LS_WARNING) + << "SensorTimestampTransformer::TransformReceive has no callback" + << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp; } } @@ -206,6 +302,10 @@ std::optional SensorTimestampTransformer::ExtractTimestampTrailer( rtc::ArrayView data, std::vector& out_data) { if (data.size() < kSensorTimestampTrailerSize) { + RTC_LOG(LS_INFO) + << "SensorTimestampTransformer::ExtractTimestampTrailer data too small" + << " size=" << data.size() + << " required=" << kSensorTimestampTrailerSize; out_data.assign(data.begin(), data.end()); return std::nullopt; } @@ -213,6 +313,14 @@ std::optional SensorTimestampTransformer::ExtractTimestampTrailer( // Check for magic bytes at the end const uint8_t* magic_start = data.data() + data.size() - 4; if (std::memcmp(magic_start, kSensorTimestampMagic, 4) != 0) { + RTC_LOG(LS_INFO) + << "SensorTimestampTransformer::ExtractTimestampTrailer magic mismatch" + << " size=" << data.size() + << " magic_bytes_dec=[" + << static_cast(magic_start[0]) << "," + << static_cast(magic_start[1]) << "," + << static_cast(magic_start[2]) << "," + << static_cast(magic_start[3]) << "]"; out_data.assign(data.begin(), data.end()); return std::nullopt; } From 951ef5913fe8c374b49d0b17eb83a48bc6879ec3 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 4 Dec 2025 09:37:14 -0800 Subject: [PATCH 36/39] add e2ee key config --- examples/local_video/src/publisher.rs | 14 +++++++++++++- examples/local_video/src/subscriber.rs | 12 ++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index fbfffe45f..d3c3d74c1 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,5 +1,6 @@ use anyhow::Result; use clap::Parser; +use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::options::{TrackPublishOptions, VideoCodec, VideoEncoding}; use livekit::prelude::*; use livekit::webrtc::video_frame::{I420Buffer, VideoFrame, VideoRotation}; @@ -61,6 +62,10 @@ struct Args { #[arg(long)] api_secret: Option, + /// Shared E2EE key (enables end-to-end encryption when set) + #[arg(long)] + e2ee_key: Option, + /// Use H.265/HEVC encoding if supported (falls back to H.264 on failure) #[arg(long, default_value_t = false)] h265: bool, @@ -111,6 +116,13 @@ async fn main() -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + if let Some(ref key) = args.e2ee_key { + let key_provider = + KeyProvider::with_shared_key(KeyProviderOptions::default(), key.clone().into_bytes()); + room_options.encryption = + Some(E2eeOptions { encryption_type: EncryptionType::Gcm, key_provider }); + info!("E2EE enabled with provided shared key"); + } let (room, _) = Room::connect(&url, &token, room_options).await?; let room = std::sync::Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); @@ -371,7 +383,7 @@ async fn main() -> Result<()> { // Attach a static sensor timestamp for testing and push it into the // shared queue used by the sensor timestamp transformer. if let Some(store) = track.sensor_timestamp_store() { - let sensor_ts = frame.timestamp_us + 123_456; // simple fixed offset for visibility + let sensor_ts = 123_456; //frame.timestamp_us + 123_456; // simple fixed offset for visibility frame.sensor_timestamp_us = Some(sensor_ts); store.store(frame.timestamp_us, sensor_ts); info!( diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 250d89f59..608e33234 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -5,6 +5,7 @@ use egui_wgpu as egui_wgpu_backend; use egui_wgpu_backend::CallbackTrait; use eframe::wgpu::{self, util::DeviceExt}; use futures::StreamExt; +use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::prelude::*; use livekit::webrtc::video_stream::native::NativeVideoStream; use livekit_api::access_token; @@ -40,6 +41,10 @@ struct Args { #[arg(long)] api_secret: Option, + /// Shared E2EE key (enables end-to-end encryption when set) + #[arg(long)] + e2ee_key: Option, + /// Only subscribe to video from this participant identity #[arg(long)] participant: Option, @@ -192,6 +197,13 @@ async fn main() -> Result<()> { info!("Connecting to LiveKit room '{}' as '{}'...", args.room_name, args.identity); let mut room_options = RoomOptions::default(); room_options.auto_subscribe = true; + if let Some(ref key) = args.e2ee_key { + let key_provider = + KeyProvider::with_shared_key(KeyProviderOptions::default(), key.clone().into_bytes()); + room_options.encryption = + Some(E2eeOptions { encryption_type: EncryptionType::Gcm, key_provider }); + info!("E2EE enabled with provided shared key"); + } let (room, _) = Room::connect(&url, &token, room_options).await?; let room = Arc::new(room); info!("Connected: {} - {}", room.name(), room.sid().await); From ab3a055b21ddf6de21ac51beb7564849922fa7b0 Mon Sep 17 00:00:00 2001 From: David Chen Date: Thu, 4 Dec 2025 15:18:33 -0800 Subject: [PATCH 37/39] measure latency along pipeline --- examples/local_video/src/publisher.rs | 5 ++++- examples/local_video/src/subscriber.rs | 15 +++++++++++-- .../src/native/peer_connection_factory.rs | 17 ++++++++++---- libwebrtc/src/native/sensor_timestamp.rs | 5 ----- webrtc-sys/src/sensor_timestamp.cpp | 22 +++++++++++-------- 5 files changed, 43 insertions(+), 21 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index d3c3d74c1..7f4081eb5 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -383,7 +383,10 @@ async fn main() -> Result<()> { // Attach a static sensor timestamp for testing and push it into the // shared queue used by the sensor timestamp transformer. if let Some(store) = track.sensor_timestamp_store() { - let sensor_ts = 123_456; //frame.timestamp_us + 123_456; // simple fixed offset for visibility + let sensor_ts = std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH") + .as_micros() as i64; frame.sensor_timestamp_us = Some(sensor_ts); store.store(frame.timestamp_us, sensor_ts); info!( diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 608e33234..cb97ec3da 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -383,9 +383,20 @@ async fn main() -> Result<()> { // Log any parsed sensor timestamp for this frame if available. if let Some(ts) = video_track.last_sensor_timestamp() { + // Get the current system timestamp in microseconds + use std::time::{SystemTime, UNIX_EPOCH}; + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_micros() as i64; + + // Calculate the latency in microseconds, then convert to milliseconds + let latency_us = now - ts; + let latency_ms = latency_us as f64 / 1000.0; + info!( - "Subscriber: received frame {}x{} with sensor_timestamp_us={}", - w, h, ts + "Subscriber: decoded frame {}x{} sensor_timestamp={} latency={:.2} ms", + w, h, ts, latency_ms ); } diff --git a/libwebrtc/src/native/peer_connection_factory.rs b/libwebrtc/src/native/peer_connection_factory.rs index 4edc63047..0e4f23348 100644 --- a/libwebrtc/src/native/peer_connection_factory.rs +++ b/libwebrtc/src/native/peer_connection_factory.rs @@ -44,10 +44,19 @@ impl Default for PeerConnectionFactory { fn default() -> Self { let mut log_sink = LOG_SINK.lock(); if log_sink.is_none() { - *log_sink = Some(sys_rtc::ffi::new_log_sink(|msg, _| { - let msg = msg.strip_suffix("\r\n").or(msg.strip_suffix('\n')).unwrap_or(&msg); - - log::debug!(target: "libwebrtc", "{}", msg); + *log_sink = Some(sys_rtc::ffi::new_log_sink(|msg, _severity| { + let msg = msg + .strip_suffix("\r\n") + .or(msg.strip_suffix('\n')) + .unwrap_or(&msg); + + // Route sensor timestamp transformer logs to a dedicated target so they can + // be enabled independently from the very noisy general libwebrtc logs. + if msg.contains("SensorTimestampTransformer") { + log::info!(target: "sensor_timestamp_rtp", "{}", msg); + } else { + log::debug!(target: "libwebrtc", "{}", msg); + } })); } diff --git a/libwebrtc/src/native/sensor_timestamp.rs b/libwebrtc/src/native/sensor_timestamp.rs index 519cb6b96..c0149ee49 100644 --- a/libwebrtc/src/native/sensor_timestamp.rs +++ b/libwebrtc/src/native/sensor_timestamp.rs @@ -140,11 +140,6 @@ impl SensorTimestampHandler { if self.sys_handle.has_sensor_timestamp() { let ts = self.sys_handle.last_sensor_timestamp(); if ts >= 0 { - log::info!( - target: "sensor_timestamp", - "last_sensor_timestamp: {}", - ts - ); Some(ts) } else { None diff --git a/webrtc-sys/src/sensor_timestamp.cpp b/webrtc-sys/src/sensor_timestamp.cpp index 36c333b86..14e014a6f 100644 --- a/webrtc-sys/src/sensor_timestamp.cpp +++ b/webrtc-sys/src/sensor_timestamp.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include "api/make_ref_counted.h" #include "livekit/peer_connection_factory.h" @@ -215,13 +216,18 @@ void SensorTimestampTransformer::TransformReceive( auto data = frame->GetData(); std::vector stripped_data; - RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformReceive begin" - << " ssrc=" << ssrc << " rtp_ts=" << rtp_timestamp - << " size=" << data.size(); - auto sensor_ts = ExtractTimestampTrailer(data, stripped_data); if (sensor_ts.has_value()) { + // Compute latency from embedded sensor timestamp to RTP receive + // time (both in microseconds since Unix epoch), so we can compare + // this with the latency logged after decode on the subscriber side. + int64_t now_us = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + double recv_latency_ms = + static_cast(now_us - sensor_ts.value()) / 1000.0; + // Store the extracted timestamp for later retrieval last_sensor_timestamp_.store(sensor_ts.value()); has_last_sensor_timestamp_.store(true); @@ -229,12 +235,10 @@ void SensorTimestampTransformer::TransformReceive( // Update frame with stripped data frame->SetData(rtc::ArrayView(stripped_data)); - RTC_LOG(LS_INFO) << "SensorTimestampTransformer::TransformReceive extracted trailer" - << " ts_us=" << sensor_ts.value() + RTC_LOG(LS_INFO) << "SensorTimestampTransformer" + << " sensor_ts=" << sensor_ts.value() << " rtp_ts=" << frame->GetTimestamp() - << " ssrc=" << ssrc - << " stripped_size=" << stripped_data.size() - << " orig_size=" << data.size(); + << " recv_latency=" << recv_latency_ms << " ms"; } else { // Log the last few bytes so we can see whether the magic marker is present. size_t log_len = std::min(data.size(), 16); From 69cb8dcc5c6e9bb93e0339ddc5f1dca053e51ba0 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 10 Dec 2025 11:07:49 -0800 Subject: [PATCH 38/39] add video render to publisher --- Cargo.lock | 13 + examples/local_video/Cargo.toml | 1 + examples/local_video/src/publisher.rs | 568 ++++++++++++++++--------- examples/local_video/src/subscriber.rs | 414 +++--------------- examples/local_video/src/yuv_viewer.rs | 479 +++++++++++++++++++++ 5 files changed, 927 insertions(+), 548 deletions(-) create mode 100644 examples/local_video/src/yuv_viewer.rs diff --git a/Cargo.lock b/Cargo.lock index 172b738b2..ad4ca9ccf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3253,6 +3253,7 @@ dependencies = [ "nokhwa", "objc2 0.6.3", "parking_lot", + "time", "tokio", "webrtc-sys", "wgpu 25.0.2", @@ -5632,10 +5633,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", + "itoa", "num-conv", "powerfmt", "serde", "time-core", + "time-macros", ] [[package]] @@ -5644,6 +5647,16 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +[[package]] +name = "time-macros" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +dependencies = [ + "num-conv", + "time-core", +] + [[package]] name = "tiny-skia" version = "0.11.4" diff --git a/examples/local_video/Cargo.toml b/examples/local_video/Cargo.toml index 8d01c1086..37dd6b8ee 100644 --- a/examples/local_video/Cargo.toml +++ b/examples/local_video/Cargo.toml @@ -33,6 +33,7 @@ winit = { version = "0.30.11", features = ["android-native-activity"] } parking_lot = { version = "0.12.1", features = ["deadlock_detection"] } anyhow = "1" bytemuck = { version = "1.16", features = ["derive"] } +time = { version = "0.3", features = ["macros", "formatting"] } [target.'cfg(target_os = "macos")'.dependencies] objc2 = { version = "0.6.0", features = ["relax-sign-encoding"] } diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 7f4081eb5..1b6b6c050 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -1,5 +1,7 @@ use anyhow::Result; use clap::Parser; +use eframe::egui; +use egui_wgpu as egui_wgpu_backend; use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::options::{TrackPublishOptions, VideoCodec, VideoEncoding}; use livekit::prelude::*; @@ -8,12 +10,30 @@ use livekit::webrtc::video_source::native::NativeVideoSource; use livekit::webrtc::video_source::{RtcVideoSource, VideoResolution}; use livekit_api::access_token; use log::{debug, info}; -use yuv_sys as yuv_sys; use nokhwa::pixel_format::RgbFormat; use nokhwa::utils::{ApiBackend, CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution}; use nokhwa::Camera; +use parking_lot::Mutex; use std::env; +use std::sync::Arc; use std::time::{Duration, Instant}; +use yuv_sys as yuv_sys; + +mod yuv_viewer; +use yuv_viewer::{SharedYuv, YuvPaintCallback}; + +fn format_sensor_timestamp(ts_micros: i64) -> Option { + if ts_micros == 0 { + // Treat 0 as "not set" + return None; + } + let nanos = i128::from(ts_micros).checked_mul(1_000)?; + let dt = time::OffsetDateTime::from_unix_timestamp_nanos(nanos).ok()?; + let format = time::macros::format_description!( + "[year]-[month]-[day] [hour]:[minute]:[second]:[subsecond digits:3]" + ); + dt.format(&format).ok() +} #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -66,9 +86,17 @@ struct Args { #[arg(long)] e2ee_key: Option, + /// Attach sensor timestamps to published frames (for testing) + #[arg(long, default_value_t = false)] + sensor_timestamp: bool, + /// Use H.265/HEVC encoding if supported (falls back to H.264 on failure) #[arg(long, default_value_t = false)] h265: bool, + + /// Show a local preview window for the captured video + #[arg(long, default_value_t = false)] + show_video: bool, } fn list_cameras() -> Result<()> { @@ -219,83 +247,93 @@ async fn main() -> Result<()> { info!("Published camera track"); } - // Reusable I420 buffer and frame - let mut frame = VideoFrame { - rotation: VideoRotation::VideoRotation0, - timestamp_us: 0, - sensor_timestamp_us: None, - buffer: I420Buffer::new(width, height), + // Optional shared YUV buffer for local preview UI + let shared_preview = if args.show_video { + Some(Arc::new(Mutex::new(SharedYuv { + width: 0, + height: 0, + stride_y: 0, + stride_u: 0, + stride_v: 0, + y: Vec::new(), + u: Vec::new(), + v: Vec::new(), + dirty: false, + sensor_timestamp: None, + }))) + } else { + None }; - let is_yuyv = fmt.format() == FrameFormat::YUYV; - info!( - "Selected conversion path: {}", - if is_yuyv { "YUYV->I420 (libyuv)" } else { "Auto (RGB24 or MJPEG)" } - ); - // Accurate pacing using absolute schedule (no drift) - let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); - ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); - // Align the first tick to now - ticker.tick().await; - let start_ts = Instant::now(); - - // Capture loop - let mut frames: u64 = 0; - let mut last_fps_log = Instant::now(); - let target = Duration::from_secs_f64(1.0 / pace_fps); - info!("Target frame interval: {:.2} ms", target.as_secs_f64() * 1000.0); - - // Timing accumulators (ms) for rolling stats - let mut sum_get_ms = 0.0; - let mut sum_decode_ms = 0.0; - let mut sum_convert_ms = 0.0; - let mut sum_capture_ms = 0.0; - let mut sum_sleep_ms = 0.0; - let mut sum_iter_ms = 0.0; - let mut logged_mjpeg_fallback = false; - loop { - // Wait until the scheduled next frame time - let wait_start = Instant::now(); + // Spawn the capture loop on the Tokio runtime so we can optionally run an egui + // preview window on the main thread. + let capture_shared = shared_preview.clone(); + let show_sensor_ts = args.sensor_timestamp; + let capture_handle = tokio::spawn(async move { + // Reusable I420 buffer and frame + let mut frame = VideoFrame { + rotation: VideoRotation::VideoRotation0, + timestamp_us: 0, + sensor_timestamp_us: None, + buffer: I420Buffer::new(width, height), + }; + let is_yuyv = fmt.format() == FrameFormat::YUYV; + info!( + "Selected conversion path: {}", + if is_yuyv { "YUYV->I420 (libyuv)" } else { "Auto (RGB24 or MJPEG)" } + ); + + // Accurate pacing using absolute schedule (no drift) + let mut ticker = tokio::time::interval(Duration::from_secs_f64(1.0 / pace_fps)); + ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); + // Align the first tick to now ticker.tick().await; - let iter_start = Instant::now(); - - // Get frame as RGB24 (decoded by nokhwa if needed) - let t0 = Instant::now(); - let frame_buf = camera.frame()?; - let t1 = Instant::now(); - let (stride_y, stride_u, stride_v) = frame.buffer.strides(); - let (data_y, data_u, data_v) = frame.buffer.data_mut(); - // Fast path for YUYV: convert directly to I420 via libyuv - let t2 = if is_yuyv { - let src = frame_buf.buffer(); - let src_bytes = src.as_ref(); - let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 - let t2_local = t1; // no decode step in YUYV path - unsafe { - // returns 0 on success - let _ = yuv_sys::rs_YUY2ToI420( - src_bytes.as_ptr(), - src_stride, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } - t2_local - } else { - // Auto path (either RGB24 already or compressed MJPEG) - let src = frame_buf.buffer(); - let t2_local = if src.len() == (width as usize * height as usize * 3) { - // Already RGB24 from backend; convert directly + let start_ts = Instant::now(); + + // Capture loop + let mut frames: u64 = 0; + let mut last_fps_log = Instant::now(); + let target = Duration::from_secs_f64(1.0 / pace_fps); + info!("Target frame interval: {:.2} ms", target.as_secs_f64() * 1000.0); + + // Timing accumulators (ms) for rolling stats + let mut sum_get_ms = 0.0; + let mut sum_decode_ms = 0.0; + let mut sum_convert_ms = 0.0; + let mut sum_capture_ms = 0.0; + let mut sum_sleep_ms = 0.0; + let mut sum_iter_ms = 0.0; + let mut logged_mjpeg_fallback = false; + + // Local YUV buffers reused for preview upload (if enabled) + let mut y_buf: Vec = Vec::new(); + let mut u_buf: Vec = Vec::new(); + let mut v_buf: Vec = Vec::new(); + let mut last_sensor_ts: Option = None; + + loop { + // Wait until the scheduled next frame time + let wait_start = Instant::now(); + ticker.tick().await; + let iter_start = Instant::now(); + + // Get frame as RGB24 (decoded by nokhwa if needed) + let t0 = Instant::now(); + let frame_buf = camera.frame()?; + let t1 = Instant::now(); + let (stride_y, stride_u, stride_v) = frame.buffer.strides(); + let (data_y, data_u, data_v) = frame.buffer.data_mut(); + // Fast path for YUYV: convert directly to I420 via libyuv + let t2 = if is_yuyv { + let src = frame_buf.buffer(); + let src_bytes = src.as_ref(); + let src_stride = (width * 2) as i32; // YUYV packed 4:2:2 + let t2_local = t1; // no decode step in YUYV path unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - src.as_ref().as_ptr(), - (width * 3) as i32, + // returns 0 on success + let _ = yuv_sys::rs_YUY2ToI420( + src_bytes.as_ptr(), + src_stride, data_y.as_mut_ptr(), stride_y as i32, data_u.as_mut_ptr(), @@ -306,144 +344,272 @@ async fn main() -> Result<()> { height as i32, ); } - Instant::now() + t2_local } else { - // Try fast MJPEG->I420 via libyuv if available; fallback to image crate - let mut used_fast_mjpeg = false; - let t2_try = unsafe { - // rs_MJPGToI420 returns 0 on success - let ret = yuv_sys::rs_MJPGToI420( - src.as_ref().as_ptr(), - src.len(), - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - width as i32, - height as i32, - ); - if ret == 0 { used_fast_mjpeg = true; Instant::now() } else { t1 } - }; - if used_fast_mjpeg { - t2_try + // Auto path (either RGB24 already or compressed MJPEG) + let src = frame_buf.buffer(); + let t2_local = if src.len() == (width as usize * height as usize * 3) { + // Already RGB24 from backend; convert directly + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + src.as_ref().as_ptr(), + (width * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() } else { - // Fallback: decode MJPEG using image crate then RGB24->I420 - match image::load_from_memory(src.as_ref()) { - Ok(img_dyn) => { - let rgb8 = img_dyn.to_rgb8(); - let dec_w = rgb8.width() as u32; - let dec_h = rgb8.height() as u32; - if dec_w != width || dec_h != height { - log::warn!( - "Decoded MJPEG size {}x{} differs from requested {}x{}; dropping frame", - dec_w, dec_h, width, height - ); - continue; - } - unsafe { - let _ = yuv_sys::rs_RGB24ToI420( - rgb8.as_raw().as_ptr(), - (dec_w * 3) as i32, - data_y.as_mut_ptr(), - stride_y as i32, - data_u.as_mut_ptr(), - stride_u as i32, - data_v.as_mut_ptr(), - stride_v as i32, - width as i32, - height as i32, - ); - } + // Try fast MJPEG->I420 via libyuv if available; fallback to image crate + let mut used_fast_mjpeg = false; + let t2_try = unsafe { + // rs_MJPGToI420 returns 0 on success + let ret = yuv_sys::rs_MJPGToI420( + src.as_ref().as_ptr(), + src.len(), + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + width as i32, + height as i32, + ); + if ret == 0 { + used_fast_mjpeg = true; Instant::now() + } else { + t1 } - Err(e2) => { - if !logged_mjpeg_fallback { - log::error!( - "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", - e2 - ); - logged_mjpeg_fallback = true; + }; + if used_fast_mjpeg { + t2_try + } else { + // Fallback: decode MJPEG using image crate then RGB24->I420 + match image::load_from_memory(src.as_ref()) { + Ok(img_dyn) => { + let rgb8 = img_dyn.to_rgb8(); + let dec_w = rgb8.width() as u32; + let dec_h = rgb8.height() as u32; + if dec_w != width || dec_h != height { + log::warn!( + "Decoded MJPEG size {}x{} differs from requested {}x{}; dropping frame", + dec_w, dec_h, width, height + ); + continue; + } + unsafe { + let _ = yuv_sys::rs_RGB24ToI420( + rgb8.as_raw().as_ptr(), + (dec_w * 3) as i32, + data_y.as_mut_ptr(), + stride_y as i32, + data_u.as_mut_ptr(), + stride_u as i32, + data_v.as_mut_ptr(), + stride_v as i32, + width as i32, + height as i32, + ); + } + Instant::now() + } + Err(e2) => { + if !logged_mjpeg_fallback { + log::error!( + "MJPEG decode failed; buffer not RGB24 and image decode failed: {}", + e2 + ); + logged_mjpeg_fallback = true; + } + continue; } - continue; } } - } + }; + t2_local }; - t2_local - }; - let t3 = Instant::now(); - - // Update RTP timestamp (monotonic, microseconds since start) - frame.timestamp_us = start_ts.elapsed().as_micros() as i64; - - // Attach a static sensor timestamp for testing and push it into the - // shared queue used by the sensor timestamp transformer. - if let Some(store) = track.sensor_timestamp_store() { - let sensor_ts = std::time::SystemTime::now() - .duration_since(std::time::SystemTime::UNIX_EPOCH) - .expect("SystemTime before UNIX EPOCH") - .as_micros() as i64; - frame.sensor_timestamp_us = Some(sensor_ts); - store.store(frame.timestamp_us, sensor_ts); - info!( - "Publisher: attached sensor_timestamp_us={} for capture_ts={}", - sensor_ts, frame.timestamp_us - ); + let t3 = Instant::now(); + + // Update RTP timestamp (monotonic, microseconds since start) + frame.timestamp_us = start_ts.elapsed().as_micros() as i64; + + // Optionally attach a sensor timestamp and push it into the shared queue + // used by the sensor timestamp transformer. + if show_sensor_ts { + if let Some(store) = track.sensor_timestamp_store() { + let sensor_ts = std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH") + .as_micros() as i64; + frame.sensor_timestamp_us = Some(sensor_ts); + store.store(frame.timestamp_us, sensor_ts); + last_sensor_ts = Some(sensor_ts); + info!( + "Publisher: attached sensor_timestamp_us={} for capture_ts={}", + sensor_ts, frame.timestamp_us + ); + } + } + + // If preview is enabled, copy I420 planes into the shared buffer. + if let Some(shared) = &capture_shared { + let (sy, su, sv) = (stride_y as u32, stride_u as u32, stride_v as u32); + let (dy, du, dv) = frame.buffer.data(); + let ch = (height + 1) / 2; + let y_size = (sy * height) as usize; + let u_size = (su * ch) as usize; + let v_size = (sv * ch) as usize; + if y_buf.len() != y_size { + y_buf.resize(y_size, 0); + } + if u_buf.len() != u_size { + u_buf.resize(u_size, 0); + } + if v_buf.len() != v_size { + v_buf.resize(v_size, 0); + } + y_buf.copy_from_slice(dy); + u_buf.copy_from_slice(du); + v_buf.copy_from_slice(dv); + + let mut s = shared.lock(); + s.width = width; + s.height = height; + s.stride_y = sy; + s.stride_u = su; + s.stride_v = sv; + std::mem::swap(&mut s.y, &mut y_buf); + std::mem::swap(&mut s.u, &mut u_buf); + std::mem::swap(&mut s.v, &mut v_buf); + s.dirty = true; + s.sensor_timestamp = last_sensor_ts; + } + + rtc_source.capture_frame(&frame); + let t4 = Instant::now(); + + frames += 1; + // We already paced via interval; measure actual sleep time for logging only + let sleep_dur = iter_start - wait_start; + + // Per-iteration timing bookkeeping + let t_end = Instant::now(); + let get_ms = (t1 - t0).as_secs_f64() * 1000.0; + let decode_ms = (t2 - t1).as_secs_f64() * 1000.0; + let convert_ms = (t3 - t2).as_secs_f64() * 1000.0; + let capture_ms = (t4 - t3).as_secs_f64() * 1000.0; + let sleep_ms = sleep_dur.as_secs_f64() * 1000.0; + let iter_ms = (t_end - iter_start).as_secs_f64() * 1000.0; + sum_get_ms += get_ms; + sum_decode_ms += decode_ms; + sum_convert_ms += convert_ms; + sum_capture_ms += capture_ms; + sum_sleep_ms += sleep_ms; + sum_iter_ms += iter_ms; + + if last_fps_log.elapsed() >= std::time::Duration::from_secs(2) { + let secs = last_fps_log.elapsed().as_secs_f64(); + let fps_est = frames as f64 / secs; + let n = frames.max(1) as f64; + info!( + "Publishing video: {}x{}, ~{:.1} fps | avg ms: get {:.2}, decode {:.2}, convert {:.2}, capture {:.2}, sleep {:.2}, iter {:.2} | target {:.2}", + width, + height, + fps_est, + sum_get_ms / n, + sum_decode_ms / n, + sum_convert_ms / n, + sum_capture_ms / n, + sum_sleep_ms / n, + sum_iter_ms / n, + target.as_secs_f64() * 1000.0, + ); + frames = 0; + sum_get_ms = 0.0; + sum_decode_ms = 0.0; + sum_convert_ms = 0.0; + sum_capture_ms = 0.0; + sum_sleep_ms = 0.0; + sum_iter_ms = 0.0; + last_fps_log = Instant::now(); + } + } + #[allow(unreachable_code)] + Ok::<(), anyhow::Error>(()) + }); + + // If preview is requested, run an egui window on the main thread rendering from + // the shared YUV buffer. Otherwise, just wait for the capture loop. + if let Some(shared) = shared_preview { + struct PreviewApp { + shared: Arc>, } - rtc_source.capture_frame(&frame); - let t4 = Instant::now(); - - frames += 1; - // We already paced via interval; measure actual sleep time for logging only - let sleep_dur = iter_start - wait_start; - - // Per-iteration timing bookkeeping - let t_end = Instant::now(); - let get_ms = (t1 - t0).as_secs_f64() * 1000.0; - let decode_ms = (t2 - t1).as_secs_f64() * 1000.0; - let convert_ms = (t3 - t2).as_secs_f64() * 1000.0; - let capture_ms = (t4 - t3).as_secs_f64() * 1000.0; - let sleep_ms = sleep_dur.as_secs_f64() * 1000.0; - let iter_ms = (t_end - iter_start).as_secs_f64() * 1000.0; - sum_get_ms += get_ms; - sum_decode_ms += decode_ms; - sum_convert_ms += convert_ms; - sum_capture_ms += capture_ms; - sum_sleep_ms += sleep_ms; - sum_iter_ms += iter_ms; - - if last_fps_log.elapsed() >= std::time::Duration::from_secs(2) { - let secs = last_fps_log.elapsed().as_secs_f64(); - let fps_est = frames as f64 / secs; - let n = frames.max(1) as f64; - info!( - "Publishing video: {}x{}, ~{:.1} fps | avg ms: get {:.2}, decode {:.2}, convert {:.2}, capture {:.2}, sleep {:.2}, iter {:.2} | target {:.2}", - width, - height, - fps_est, - sum_get_ms / n, - sum_decode_ms / n, - sum_convert_ms / n, - sum_capture_ms / n, - sum_sleep_ms / n, - sum_iter_ms / n, - target.as_secs_f64() * 1000.0, - ); - frames = 0; - sum_get_ms = 0.0; - sum_decode_ms = 0.0; - sum_convert_ms = 0.0; - sum_capture_ms = 0.0; - sum_sleep_ms = 0.0; - sum_iter_ms = 0.0; - last_fps_log = Instant::now(); + impl eframe::App for PreviewApp { + fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { + egui::CentralPanel::default().show(ctx, |ui| { + let available = ui.available_size(); + let rect = egui::Rect::from_min_size(ui.min_rect().min, available); + + ui.ctx().request_repaint(); + + let cb = egui_wgpu_backend::Callback::new_paint_callback( + rect, + YuvPaintCallback { + shared: self.shared.clone(), + }, + ); + ui.painter().add(cb); + }); + + // Sensor timestamp overlay: top-left, same style as subscriber. + let sensor_timestamp_text = { + let shared = self.shared.lock(); + shared + .sensor_timestamp + .and_then(format_sensor_timestamp) + }; + if let Some(ts_text) = sensor_timestamp_text { + egui::Area::new("publisher_sensor_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.label( + egui::RichText::new(ts_text) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + }); + } + + ctx.request_repaint_after(Duration::from_millis(16)); + } } + + let app = PreviewApp { shared }; + let native_options = eframe::NativeOptions::default(); + eframe::run_native( + "LiveKit Camera Publisher Preview", + native_options, + Box::new(|_| Ok::, _>(Box::new(app))), + )?; + // When the window closes, main will exit, dropping the runtime and capture task. + Ok(()) + } else { + // No preview window; just run the capture loop until process exit or error. + capture_handle.await??; + Ok(()) } } - diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index cb97ec3da..15b3c227f 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -2,8 +2,6 @@ use anyhow::Result; use clap::Parser; use eframe::egui; use egui_wgpu as egui_wgpu_backend; -use egui_wgpu_backend::CallbackTrait; -use eframe::wgpu::{self, util::DeviceExt}; use futures::StreamExt; use livekit::e2ee::{key_provider::*, E2eeOptions, EncryptionType}; use livekit::prelude::*; @@ -18,6 +16,9 @@ use std::{ time::{Duration, Instant}, }; +mod yuv_viewer; +use yuv_viewer::{SharedYuv, YuvPaintCallback}; + #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] struct Args { @@ -50,18 +51,6 @@ struct Args { participant: Option, } -struct SharedYuv { - width: u32, - height: u32, - stride_y: u32, - stride_u: u32, - stride_v: u32, - y: Vec, - u: Vec, - v: Vec, - dirty: bool, -} - #[derive(Clone)] struct SimulcastState { available: bool, @@ -109,6 +98,20 @@ fn simulcast_state_full_dims( sc.full_dims } +fn format_sensor_timestamp(ts_micros: i64) -> Option { + if ts_micros == 0 { + // Treat 0 as "not set" + return None; + } + // Convert microseconds since UNIX epoch to `OffsetDateTime` in UTC, then format. + let nanos = i128::from(ts_micros).checked_mul(1_000)?; + let dt = time::OffsetDateTime::from_unix_timestamp_nanos(nanos).ok()?; + let format = time::macros::format_description!( + "[year]-[month]-[day] [hour]:[minute]:[second]:[subsecond digits:3]" + ); + dt.format(&format).ok() +} + struct VideoApp { shared: Arc>, simulcast: Arc>, @@ -131,6 +134,27 @@ impl eframe::App for VideoApp { ui.painter().add(cb); }); + // Sensor timestamp overlay: top-left. Show nothing if no sensor timestamp parsed. + let sensor_timestamp_text = { + let shared = self.shared.lock(); + shared + .sensor_timestamp + .and_then(format_sensor_timestamp) + }; + if let Some(ts_text) = sensor_timestamp_text { + egui::Area::new("sensor_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.label( + egui::RichText::new(ts_text) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + }); + } + // Simulcast layer controls: bottom-left overlay egui::Area::new("simulcast_controls".into()) .anchor(egui::Align2::LEFT_BOTTOM, egui::vec2(10.0, -10.0)) @@ -219,6 +243,7 @@ async fn main() -> Result<()> { u: Vec::new(), v: Vec::new(), dirty: false, + sensor_timestamp: None, })); // Subscribe to room events: on first video track, start sink task @@ -369,28 +394,40 @@ async fn main() -> Result<()> { u_buf.copy_from_slice(du); v_buf.copy_from_slice(dv); - // Swap buffers into shared state - let mut s = shared2.lock(); - s.width = w as u32; - s.height = h as u32; - s.stride_y = sy as u32; - s.stride_u = su as u32; - s.stride_v = sv as u32; - std::mem::swap(&mut s.y, &mut y_buf); - std::mem::swap(&mut s.u, &mut u_buf); - std::mem::swap(&mut s.v, &mut v_buf); - s.dirty = true; - - // Log any parsed sensor timestamp for this frame if available. - if let Some(ts) = video_track.last_sensor_timestamp() { - // Get the current system timestamp in microseconds + // Fetch any parsed sensor timestamp for this frame, if available. + // Treat 0 as "not set". + let ts_opt = video_track + .last_sensor_timestamp() + .and_then(|ts| if ts == 0 { None } else { Some(ts) }); + + // Swap buffers into shared state, and only update the + // sensor timestamp when we actually have one. This + // prevents the overlay from flickering on frames that + // don't carry a parsed timestamp. + { + let mut s = shared2.lock(); + s.width = w as u32; + s.height = h as u32; + s.stride_y = sy as u32; + s.stride_u = su as u32; + s.stride_v = sv as u32; + std::mem::swap(&mut s.y, &mut y_buf); + std::mem::swap(&mut s.u, &mut u_buf); + std::mem::swap(&mut s.v, &mut v_buf); + s.dirty = true; + if let Some(ts) = ts_opt { + s.sensor_timestamp = Some(ts); + } + } + + // Log sensor timestamp + derived latency if available. + if let Some(ts) = ts_opt { use std::time::{SystemTime, UNIX_EPOCH}; let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap_or_default() .as_micros() as i64; - // Calculate the latency in microseconds, then convert to milliseconds let latency_us = now - ts; let latency_ms = latency_us as f64 / 1000.0; @@ -474,320 +511,3 @@ async fn main() -> Result<()> { Ok(()) } - -// ===== WGPU I420 renderer ===== - -struct YuvPaintCallback { - shared: Arc>, -} - -struct YuvGpuState { - pipeline: wgpu::RenderPipeline, - sampler: wgpu::Sampler, - bind_layout: wgpu::BindGroupLayout, - y_tex: wgpu::Texture, - u_tex: wgpu::Texture, - v_tex: wgpu::Texture, - y_view: wgpu::TextureView, - u_view: wgpu::TextureView, - v_view: wgpu::TextureView, - bind_group: wgpu::BindGroup, - params_buf: wgpu::Buffer, - y_pad_w: u32, - uv_pad_w: u32, - dims: (u32, u32), -} - -impl YuvGpuState { - fn create_textures(device: &wgpu::Device, _width: u32, height: u32, y_pad_w: u32, uv_pad_w: u32) -> (wgpu::Texture, wgpu::Texture, wgpu::Texture, wgpu::TextureView, wgpu::TextureView, wgpu::TextureView) { - let y_size = wgpu::Extent3d { width: y_pad_w, height, depth_or_array_layers: 1 }; - let uv_size = wgpu::Extent3d { width: uv_pad_w, height: (height + 1) / 2, depth_or_array_layers: 1 }; - let usage = wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING; - let desc = |size: wgpu::Extent3d| wgpu::TextureDescriptor { - label: Some("yuv_plane"), - size, - mip_level_count: 1, - sample_count: 1, - dimension: wgpu::TextureDimension::D2, - format: wgpu::TextureFormat::R8Unorm, - usage, - view_formats: &[], - }; - let y_tex = device.create_texture(&desc(y_size)); - let u_tex = device.create_texture(&desc(uv_size)); - let v_tex = device.create_texture(&desc(uv_size)); - let y_view = y_tex.create_view(&wgpu::TextureViewDescriptor::default()); - let u_view = u_tex.create_view(&wgpu::TextureViewDescriptor::default()); - let v_view = v_tex.create_view(&wgpu::TextureViewDescriptor::default()); - (y_tex, u_tex, v_tex, y_view, u_view, v_view) - } -} - -fn align_up(value: u32, alignment: u32) -> u32 { - ((value + alignment - 1) / alignment) * alignment -} - -#[repr(C)] -#[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] -struct ParamsUniform { - src_w: u32, - src_h: u32, - y_tex_w: u32, - uv_tex_w: u32, -} - -impl CallbackTrait for YuvPaintCallback { - fn prepare(&self, device: &wgpu::Device, queue: &wgpu::Queue, _screen_desc: &egui_wgpu_backend::ScreenDescriptor, _encoder: &mut wgpu::CommandEncoder, resources: &mut egui_wgpu_backend::CallbackResources) -> Vec { - // Initialize or update GPU state lazily based on current frame - let mut shared = self.shared.lock(); - - // Nothing to draw yet - if shared.width == 0 || shared.height == 0 { - return Vec::new(); - } - - // Fetch or create our GPU state - if resources.get::().is_none() { - // Build pipeline and initial small textures; will be recreated on first upload - let shader_src = include_str!("yuv_shader.wgsl"); - let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { - label: Some("yuv_shader"), - source: wgpu::ShaderSource::Wgsl(shader_src.into()), - }); - - let bind_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { - label: Some("yuv_bind_layout"), - entries: &[ - wgpu::BindGroupLayoutEntry { - binding: 0, - visibility: wgpu::ShaderStages::FRAGMENT, - ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), - count: None, - }, - wgpu::BindGroupLayoutEntry { - binding: 1, - visibility: wgpu::ShaderStages::FRAGMENT, - ty: wgpu::BindingType::Texture { - sample_type: wgpu::TextureSampleType::Float { filterable: true }, - view_dimension: wgpu::TextureViewDimension::D2, - multisampled: false, - }, - count: None, - }, - wgpu::BindGroupLayoutEntry { binding: 2, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Texture { sample_type: wgpu::TextureSampleType::Float { filterable: true }, view_dimension: wgpu::TextureViewDimension::D2, multisampled: false }, count: None }, - wgpu::BindGroupLayoutEntry { binding: 3, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Texture { sample_type: wgpu::TextureSampleType::Float { filterable: true }, view_dimension: wgpu::TextureViewDimension::D2, multisampled: false }, count: None }, - wgpu::BindGroupLayoutEntry { - binding: 4, - visibility: wgpu::ShaderStages::FRAGMENT, - ty: wgpu::BindingType::Buffer { - ty: wgpu::BufferBindingType::Uniform, - has_dynamic_offset: false, - min_binding_size: Some(std::num::NonZeroU64::new(std::mem::size_of::() as u64).unwrap()), - }, - count: None, - }, - ], - }); - - let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { - label: Some("yuv_pipeline_layout"), - bind_group_layouts: &[&bind_layout], - push_constant_ranges: &[], - }); - - let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { - label: Some("yuv_pipeline"), - layout: Some(&pipeline_layout), - vertex: wgpu::VertexState { module: &shader, entry_point: Some("vs_main"), buffers: &[], compilation_options: wgpu::PipelineCompilationOptions::default() }, - fragment: Some(wgpu::FragmentState { - module: &shader, - entry_point: Some("fs_main"), - targets: &[Some(wgpu::ColorTargetState { - format: wgpu::TextureFormat::Bgra8Unorm, - blend: Some(wgpu::BlendState::ALPHA_BLENDING), - write_mask: wgpu::ColorWrites::ALL, - })], - compilation_options: wgpu::PipelineCompilationOptions::default(), - }), - primitive: wgpu::PrimitiveState { topology: wgpu::PrimitiveTopology::TriangleList, strip_index_format: None, front_face: wgpu::FrontFace::Ccw, cull_mode: None, unclipped_depth: false, polygon_mode: wgpu::PolygonMode::Fill, conservative: false }, - depth_stencil: None, - multisample: wgpu::MultisampleState { count: 1, mask: !0, alpha_to_coverage_enabled: false }, - multiview: None, - cache: None, - }); - - let sampler = device.create_sampler(&wgpu::SamplerDescriptor { - label: Some("yuv_sampler"), - address_mode_u: wgpu::AddressMode::ClampToEdge, - address_mode_v: wgpu::AddressMode::ClampToEdge, - address_mode_w: wgpu::AddressMode::ClampToEdge, - mag_filter: wgpu::FilterMode::Linear, - min_filter: wgpu::FilterMode::Linear, - mipmap_filter: wgpu::FilterMode::Nearest, - ..Default::default() - }); - - let params_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("yuv_params"), - contents: bytemuck::bytes_of(&ParamsUniform { src_w: 1, src_h: 1, y_tex_w: 1, uv_tex_w: 1 }), - usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, - }); - - // Initial tiny textures - let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = YuvGpuState::create_textures(device, 1, 1, 256, 256); - let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { - label: Some("yuv_bind_group"), - layout: &bind_layout, - entries: &[ - wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::Sampler(&sampler) }, - wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::TextureView(&y_view) }, - wgpu::BindGroupEntry { binding: 2, resource: wgpu::BindingResource::TextureView(&u_view) }, - wgpu::BindGroupEntry { binding: 3, resource: wgpu::BindingResource::TextureView(&v_view) }, - wgpu::BindGroupEntry { binding: 4, resource: params_buf.as_entire_binding() }, - ], - }); - - let new_state = YuvGpuState { - pipeline: render_pipeline, - sampler, - bind_layout, - y_tex, - u_tex, - v_tex, - y_view, - u_view, - v_view, - bind_group, - params_buf, - y_pad_w: 256, - uv_pad_w: 256, - dims: (0, 0), - }; - resources.insert(new_state); - } - let state = resources.get_mut::().unwrap(); - - // Upload planes when marked dirty - // Recreate textures/bind group on size change - if state.dims != (shared.width, shared.height) { - let y_pad_w = align_up(shared.width, 256); - let uv_w = (shared.width + 1) / 2; - let uv_pad_w = align_up(uv_w, 256); - let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = YuvGpuState::create_textures(device, shared.width, shared.height, y_pad_w, uv_pad_w); - let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { - label: Some("yuv_bind_group"), - layout: &state.bind_layout, - entries: &[ - wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::Sampler(&state.sampler) }, - wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::TextureView(&y_view) }, - wgpu::BindGroupEntry { binding: 2, resource: wgpu::BindingResource::TextureView(&u_view) }, - wgpu::BindGroupEntry { binding: 3, resource: wgpu::BindingResource::TextureView(&v_view) }, - wgpu::BindGroupEntry { binding: 4, resource: state.params_buf.as_entire_binding() }, - ], - }); - state.y_tex = y_tex; - state.u_tex = u_tex; - state.v_tex = v_tex; - state.y_view = y_view; - state.u_view = u_view; - state.v_view = v_view; - state.bind_group = bind_group; - state.y_pad_w = y_pad_w; - state.uv_pad_w = uv_pad_w; - state.dims = (shared.width, shared.height); - } - - if shared.dirty { - let y_bytes_per_row = align_up(shared.width, 256); - let uv_w = (shared.width + 1) / 2; - let uv_h = (shared.height + 1) / 2; - let uv_bytes_per_row = align_up(uv_w, 256); - - // Pack and upload Y - if shared.stride_y >= shared.width { - let mut packed = vec![0u8; (y_bytes_per_row * shared.height) as usize]; - for row in 0..shared.height { - let src = &shared.y[(row * shared.stride_y) as usize..][..shared.width as usize]; - let dst_off = (row * y_bytes_per_row) as usize; - packed[dst_off..dst_off + shared.width as usize].copy_from_slice(src); - } - queue.write_texture( - wgpu::ImageCopyTexture { - texture: &state.y_tex, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - &packed, - wgpu::ImageDataLayout { - offset: 0, - bytes_per_row: Some(y_bytes_per_row), - rows_per_image: Some(shared.height), - }, - wgpu::Extent3d { width: state.y_pad_w, height: shared.height, depth_or_array_layers: 1 }, - ); - } - - // Pack and upload U,V - if shared.stride_u >= uv_w && shared.stride_v >= uv_w { - let mut packed_u = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; - let mut packed_v = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; - for row in 0..uv_h { - let src_u = &shared.u[(row * shared.stride_u) as usize..][..uv_w as usize]; - let src_v = &shared.v[(row * shared.stride_v) as usize..][..uv_w as usize]; - let dst_off = (row * uv_bytes_per_row) as usize; - packed_u[dst_off..dst_off + uv_w as usize].copy_from_slice(src_u); - packed_v[dst_off..dst_off + uv_w as usize].copy_from_slice(src_v); - } - queue.write_texture( - wgpu::ImageCopyTexture { texture: &state.u_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All }, - &packed_u, - wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some(uv_bytes_per_row), rows_per_image: Some(uv_h) }, - wgpu::Extent3d { width: state.uv_pad_w, height: uv_h, depth_or_array_layers: 1 }, - ); - queue.write_texture( - wgpu::ImageCopyTexture { texture: &state.v_tex, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All }, - &packed_v, - wgpu::ImageDataLayout { offset: 0, bytes_per_row: Some(uv_bytes_per_row), rows_per_image: Some(uv_h) }, - wgpu::Extent3d { width: state.uv_pad_w, height: uv_h, depth_or_array_layers: 1 }, - ); - } - - // Update params uniform - let params = ParamsUniform { src_w: shared.width, src_h: shared.height, y_tex_w: state.y_pad_w, uv_tex_w: state.uv_pad_w }; - queue.write_buffer(&state.params_buf, 0, bytemuck::bytes_of(¶ms)); - - shared.dirty = false; - } - - Vec::new() - } - - fn paint(&self, _info: egui::PaintCallbackInfo, render_pass: &mut wgpu::RenderPass<'static>, resources: &egui_wgpu_backend::CallbackResources) { - // Acquire device/queue via screen_descriptor? Not available; use resources to fetch our state - let shared = self.shared.lock(); - if shared.width == 0 || shared.height == 0 { - return; - } - - // Build pipeline and textures on first paint or on resize - let Some(state) = resources.get::() else { - // prepare may not have created the state yet (race with first frame); skip this paint - return; - }; - - if state.dims != (shared.width, shared.height) { - // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild - return; - } - - render_pass.set_pipeline(&state.pipeline); - render_pass.set_bind_group(0, &state.bind_group, &[]); - // Fullscreen triangle without vertex buffer - render_pass.draw(0..3, 0..1); - } -} - -// Build or rebuild GPU state. This helper is intended to be called from prepare, but we lack device there in current API constraints. -// Note: eframe/egui-wgpu provides device in paint via RenderPass context; however, to keep this example concise, we set up the state once externally. - diff --git a/examples/local_video/src/yuv_viewer.rs b/examples/local_video/src/yuv_viewer.rs new file mode 100644 index 000000000..75323e16f --- /dev/null +++ b/examples/local_video/src/yuv_viewer.rs @@ -0,0 +1,479 @@ +use eframe::egui; +use egui_wgpu as egui_wgpu_backend; +use egui_wgpu_backend::CallbackTrait; +use eframe::wgpu::{self, util::DeviceExt}; +use parking_lot::Mutex; +use std::sync::Arc; + +/// Shared I420 YUV frame storage for GPU rendering. +pub struct SharedYuv { + pub width: u32, + pub height: u32, + pub stride_y: u32, + pub stride_u: u32, + pub stride_v: u32, + pub y: Vec, + pub u: Vec, + pub v: Vec, + pub dirty: bool, + /// Optional sensor timestamp in microseconds since UNIX epoch. + pub sensor_timestamp: Option, +} + +/// egui-wgpu callback that renders a fullscreen quad from a `SharedYuv` buffer. +pub struct YuvPaintCallback { + pub shared: Arc>, +} + +struct YuvGpuState { + pipeline: wgpu::RenderPipeline, + sampler: wgpu::Sampler, + bind_layout: wgpu::BindGroupLayout, + y_tex: wgpu::Texture, + u_tex: wgpu::Texture, + v_tex: wgpu::Texture, + y_view: wgpu::TextureView, + u_view: wgpu::TextureView, + v_view: wgpu::TextureView, + bind_group: wgpu::BindGroup, + params_buf: wgpu::Buffer, + y_pad_w: u32, + uv_pad_w: u32, + dims: (u32, u32), +} + +impl YuvGpuState { + fn create_textures( + device: &wgpu::Device, + _width: u32, + height: u32, + y_pad_w: u32, + uv_pad_w: u32, + ) -> ( + wgpu::Texture, + wgpu::Texture, + wgpu::Texture, + wgpu::TextureView, + wgpu::TextureView, + wgpu::TextureView, + ) { + let y_size = wgpu::Extent3d { width: y_pad_w, height, depth_or_array_layers: 1 }; + let uv_size = wgpu::Extent3d { + width: uv_pad_w, + height: (height + 1) / 2, + depth_or_array_layers: 1, + }; + let usage = wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING; + let desc = |size: wgpu::Extent3d| wgpu::TextureDescriptor { + label: Some("yuv_plane"), + size, + mip_level_count: 1, + sample_count: 1, + dimension: wgpu::TextureDimension::D2, + format: wgpu::TextureFormat::R8Unorm, + usage, + view_formats: &[], + }; + let y_tex = device.create_texture(&desc(y_size)); + let u_tex = device.create_texture(&desc(uv_size)); + let v_tex = device.create_texture(&desc(uv_size)); + let y_view = y_tex.create_view(&wgpu::TextureViewDescriptor::default()); + let u_view = u_tex.create_view(&wgpu::TextureViewDescriptor::default()); + let v_view = v_tex.create_view(&wgpu::TextureViewDescriptor::default()); + (y_tex, u_tex, v_tex, y_view, u_view, v_view) + } +} + +fn align_up(value: u32, alignment: u32) -> u32 { + ((value + alignment - 1) / alignment) * alignment +} + +#[repr(C)] +#[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] +struct ParamsUniform { + src_w: u32, + src_h: u32, + y_tex_w: u32, + uv_tex_w: u32, +} + +impl CallbackTrait for YuvPaintCallback { + fn prepare( + &self, + device: &wgpu::Device, + queue: &wgpu::Queue, + _screen_desc: &egui_wgpu_backend::ScreenDescriptor, + _encoder: &mut wgpu::CommandEncoder, + resources: &mut egui_wgpu_backend::CallbackResources, + ) -> Vec { + // Initialize or update GPU state lazily based on current frame + let mut shared = self.shared.lock(); + + // Nothing to draw yet + if shared.width == 0 || shared.height == 0 { + return Vec::new(); + } + + // Fetch or create our GPU state + if resources.get::().is_none() { + // Build pipeline and initial small textures; will be recreated on first upload + let shader_src = include_str!("yuv_shader.wgsl"); + let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("yuv_shader"), + source: wgpu::ShaderSource::Wgsl(shader_src.into()), + }); + + let bind_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("yuv_bind_layout"), + entries: &[ + wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 1, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 2, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 3, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 4, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: Some( + std::num::NonZeroU64::new( + std::mem::size_of::() as u64, + ) + .unwrap(), + ), + }, + count: None, + }, + ], + }); + + let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("yuv_pipeline_layout"), + bind_group_layouts: &[&bind_layout], + push_constant_ranges: &[], + }); + + let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { + label: Some("yuv_pipeline"), + layout: Some(&pipeline_layout), + vertex: wgpu::VertexState { + module: &shader, + entry_point: Some("vs_main"), + buffers: &[], + compilation_options: wgpu::PipelineCompilationOptions::default(), + }, + fragment: Some(wgpu::FragmentState { + module: &shader, + entry_point: Some("fs_main"), + targets: &[Some(wgpu::ColorTargetState { + format: wgpu::TextureFormat::Bgra8Unorm, + blend: Some(wgpu::BlendState::ALPHA_BLENDING), + write_mask: wgpu::ColorWrites::ALL, + })], + compilation_options: wgpu::PipelineCompilationOptions::default(), + }), + primitive: wgpu::PrimitiveState { + topology: wgpu::PrimitiveTopology::TriangleList, + strip_index_format: None, + front_face: wgpu::FrontFace::Ccw, + cull_mode: None, + unclipped_depth: false, + polygon_mode: wgpu::PolygonMode::Fill, + conservative: false, + }, + depth_stencil: None, + multisample: wgpu::MultisampleState { + count: 1, + mask: !0, + alpha_to_coverage_enabled: false, + }, + multiview: None, + cache: None, + }); + + let sampler = device.create_sampler(&wgpu::SamplerDescriptor { + label: Some("yuv_sampler"), + address_mode_u: wgpu::AddressMode::ClampToEdge, + address_mode_v: wgpu::AddressMode::ClampToEdge, + address_mode_w: wgpu::AddressMode::ClampToEdge, + mag_filter: wgpu::FilterMode::Linear, + min_filter: wgpu::FilterMode::Linear, + mipmap_filter: wgpu::FilterMode::Nearest, + ..Default::default() + }); + + let params_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { + label: Some("yuv_params"), + contents: bytemuck::bytes_of(&ParamsUniform { + src_w: 1, + src_h: 1, + y_tex_w: 1, + uv_tex_w: 1, + }), + usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + }); + + // Initial tiny textures + let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = + YuvGpuState::create_textures(device, 1, 1, 256, 256); + let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("yuv_bind_group"), + layout: &bind_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: wgpu::BindingResource::Sampler(&sampler), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: wgpu::BindingResource::TextureView(&y_view), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: wgpu::BindingResource::TextureView(&u_view), + }, + wgpu::BindGroupEntry { + binding: 3, + resource: wgpu::BindingResource::TextureView(&v_view), + }, + wgpu::BindGroupEntry { + binding: 4, + resource: params_buf.as_entire_binding(), + }, + ], + }); + + let new_state = YuvGpuState { + pipeline: render_pipeline, + sampler, + bind_layout, + y_tex, + u_tex, + v_tex, + y_view, + u_view, + v_view, + bind_group, + params_buf, + y_pad_w: 256, + uv_pad_w: 256, + dims: (0, 0), + }; + resources.insert(new_state); + } + let state = resources.get_mut::().unwrap(); + + // Upload planes when marked dirty + // Recreate textures/bind group on size change + if state.dims != (shared.width, shared.height) { + let y_pad_w = align_up(shared.width, 256); + let uv_w = (shared.width + 1) / 2; + let uv_pad_w = align_up(uv_w, 256); + let (y_tex, u_tex, v_tex, y_view, u_view, v_view) = + YuvGpuState::create_textures(device, shared.width, shared.height, y_pad_w, uv_pad_w); + let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("yuv_bind_group"), + layout: &state.bind_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: wgpu::BindingResource::Sampler(&state.sampler), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: wgpu::BindingResource::TextureView(&y_view), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: wgpu::BindingResource::TextureView(&u_view), + }, + wgpu::BindGroupEntry { + binding: 3, + resource: wgpu::BindingResource::TextureView(&v_view), + }, + wgpu::BindGroupEntry { + binding: 4, + resource: state.params_buf.as_entire_binding(), + }, + ], + }); + state.y_tex = y_tex; + state.u_tex = u_tex; + state.v_tex = v_tex; + state.y_view = y_view; + state.u_view = u_view; + state.v_view = v_view; + state.bind_group = bind_group; + state.y_pad_w = y_pad_w; + state.uv_pad_w = uv_pad_w; + state.dims = (shared.width, shared.height); + } + + if shared.dirty { + let y_bytes_per_row = align_up(shared.width, 256); + let uv_w = (shared.width + 1) / 2; + let uv_h = (shared.height + 1) / 2; + let uv_bytes_per_row = align_up(uv_w, 256); + + // Pack and upload Y + if shared.stride_y >= shared.width { + let mut packed = vec![0u8; (y_bytes_per_row * shared.height) as usize]; + for row in 0..shared.height { + let src = + &shared.y[(row * shared.stride_y) as usize..][..shared.width as usize]; + let dst_off = (row * y_bytes_per_row) as usize; + packed[dst_off..dst_off + shared.width as usize].copy_from_slice(src); + } + queue.write_texture( + wgpu::ImageCopyTexture { + texture: &state.y_tex, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &packed, + wgpu::ImageDataLayout { + offset: 0, + bytes_per_row: Some(y_bytes_per_row), + rows_per_image: Some(shared.height), + }, + wgpu::Extent3d { + width: state.y_pad_w, + height: shared.height, + depth_or_array_layers: 1, + }, + ); + } + + // Pack and upload U,V + if shared.stride_u >= uv_w && shared.stride_v >= uv_w { + let mut packed_u = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; + let mut packed_v = vec![0u8; (uv_bytes_per_row * uv_h) as usize]; + for row in 0..uv_h { + let src_u = + &shared.u[(row * shared.stride_u) as usize..][..uv_w as usize]; + let src_v = + &shared.v[(row * shared.stride_v) as usize..][..uv_w as usize]; + let dst_off = (row * uv_bytes_per_row) as usize; + packed_u[dst_off..dst_off + uv_w as usize].copy_from_slice(src_u); + packed_v[dst_off..dst_off + uv_w as usize].copy_from_slice(src_v); + } + queue.write_texture( + wgpu::ImageCopyTexture { + texture: &state.u_tex, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &packed_u, + wgpu::ImageDataLayout { + offset: 0, + bytes_per_row: Some(uv_bytes_per_row), + rows_per_image: Some(uv_h), + }, + wgpu::Extent3d { + width: state.uv_pad_w, + height: uv_h, + depth_or_array_layers: 1, + }, + ); + queue.write_texture( + wgpu::ImageCopyTexture { + texture: &state.v_tex, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + &packed_v, + wgpu::ImageDataLayout { + offset: 0, + bytes_per_row: Some(uv_bytes_per_row), + rows_per_image: Some(uv_h), + }, + wgpu::Extent3d { + width: state.uv_pad_w, + height: uv_h, + depth_or_array_layers: 1, + }, + ); + } + + // Update params uniform + let params = ParamsUniform { + src_w: shared.width, + src_h: shared.height, + y_tex_w: state.y_pad_w, + uv_tex_w: state.uv_pad_w, + }; + queue.write_buffer(&state.params_buf, 0, bytemuck::bytes_of(¶ms)); + + shared.dirty = false; + } + + Vec::new() + } + + fn paint( + &self, + _info: egui::PaintCallbackInfo, + render_pass: &mut wgpu::RenderPass<'static>, + resources: &egui_wgpu_backend::CallbackResources, + ) { + // Acquire current frame + let shared = self.shared.lock(); + if shared.width == 0 || shared.height == 0 { + return; + } + + // Build pipeline and textures on first paint or on resize + let Some(state) = resources.get::() else { + // prepare may not have created the state yet (race with first frame); skip this paint + return; + }; + + if state.dims != (shared.width, shared.height) { + // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild + return; + } + + render_pass.set_pipeline(&state.pipeline); + render_pass.set_bind_group(0, &state.bind_group, &[]); + // Fullscreen triangle without vertex buffer + render_pass.draw(0..3, 0..1); + } +} + + From c00496140d6bf488c2c8714a752d18a8498dc813 Mon Sep 17 00:00:00 2001 From: David Chen Date: Wed, 10 Dec 2025 16:01:37 -0800 Subject: [PATCH 39/39] add timestamp render on the video --- examples/local_video/src/publisher.rs | 177 +++++++++++++++++++++---- examples/local_video/src/subscriber.rs | 155 +++++++++++++++++++--- examples/local_video/src/yuv_viewer.rs | 2 +- 3 files changed, 286 insertions(+), 48 deletions(-) diff --git a/examples/local_video/src/publisher.rs b/examples/local_video/src/publisher.rs index 1b6b6c050..8fe05490e 100644 --- a/examples/local_video/src/publisher.rs +++ b/examples/local_video/src/publisher.rs @@ -35,6 +35,13 @@ fn format_sensor_timestamp(ts_micros: i64) -> Option { dt.format(&format).ok() } +fn now_unix_timestamp_micros() -> i64 { + std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH") + .as_micros() as i64 +} + #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] struct Args { @@ -90,6 +97,10 @@ struct Args { #[arg(long, default_value_t = false)] sensor_timestamp: bool, + /// Show system time and delta vs sensor timestamp in the preview overlay + #[arg(long, default_value_t = false)] + show_sys_time: bool, + /// Use H.265/HEVC encoding if supported (falls back to H.264 on failure) #[arg(long, default_value_t = false)] h265: bool, @@ -317,6 +328,19 @@ async fn main() -> Result<()> { ticker.tick().await; let iter_start = Instant::now(); + // Capture a sensor timestamp at the beginning of the loop so it reflects + // the scheduled capture time rather than the later capture_frame call. + let loop_sensor_ts = if show_sensor_ts { + Some( + std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH") + .as_micros() as i64, + ) + } else { + None + }; + // Get frame as RGB24 (decoded by nokhwa if needed) let t0 = Instant::now(); let frame_buf = camera.frame()?; @@ -443,14 +467,10 @@ async fn main() -> Result<()> { // Update RTP timestamp (monotonic, microseconds since start) frame.timestamp_us = start_ts.elapsed().as_micros() as i64; - // Optionally attach a sensor timestamp and push it into the shared queue - // used by the sensor timestamp transformer. + // Optionally attach a sensor timestamp captured at the top of the loop and + // push it into the shared queue used by the sensor timestamp transformer. if show_sensor_ts { - if let Some(store) = track.sensor_timestamp_store() { - let sensor_ts = std::time::SystemTime::now() - .duration_since(std::time::SystemTime::UNIX_EPOCH) - .expect("SystemTime before UNIX EPOCH") - .as_micros() as i64; + if let (Some(store), Some(sensor_ts)) = (track.sensor_timestamp_store(), loop_sensor_ts) { frame.sensor_timestamp_us = Some(sensor_ts); store.store(frame.timestamp_us, sensor_ts); last_sensor_ts = Some(sensor_ts); @@ -553,6 +573,9 @@ async fn main() -> Result<()> { if let Some(shared) = shared_preview { struct PreviewApp { shared: Arc>, + show_sys_time: bool, + last_latency_ms: Option, + last_latency_update: Option, } impl eframe::App for PreviewApp { @@ -572,32 +595,132 @@ async fn main() -> Result<()> { ui.painter().add(cb); }); - // Sensor timestamp overlay: top-left, same style as subscriber. - let sensor_timestamp_text = { - let shared = self.shared.lock(); - shared - .sensor_timestamp - .and_then(format_sensor_timestamp) - }; - if let Some(ts_text) = sensor_timestamp_text { - egui::Area::new("publisher_sensor_timestamp_overlay".into()) - .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) - .interactable(false) - .show(ctx, |ui| { - ui.label( - egui::RichText::new(ts_text) - .monospace() - .size(22.0) - .color(egui::Color32::WHITE), - ); - }); + // Sensor timestamp / system time overlay for the local preview. + // + // When `show_sys_time` is false, we only render the user (sensor) timestamp, if present. + // + // When `show_sys_time` is true: + // - If there is a sensor timestamp, we render up to three rows: + // 1) "usr ts: yyyy-mm-dd hh:mm:ss:nnn" (sensor timestamp) + // 2) "sys ts: yyyy-mm-dd hh:mm:ss:nnn" (system timestamp) + // 3) "latency: xxxxms" (delta in ms, 4 digits, updated at 2 Hz) + // - If there is no sensor timestamp, we render a single row: + // "sys ts: yyyy-mm-dd hh:mm:ss:nnn" + if self.show_sys_time { + let (sensor_raw, sensor_text, sys_raw, sys_text_opt) = { + let shared = self.shared.lock(); + let sensor_raw = shared.sensor_timestamp; + let sensor_text = sensor_raw.and_then(format_sensor_timestamp); + let sys_raw = now_unix_timestamp_micros(); + let sys_text = format_sensor_timestamp(sys_raw); + (sensor_raw, sensor_text, sys_raw, sys_text) + }; + + if let Some(sys_text) = sys_text_opt { + // Latency: throttle updates to 2 Hz to reduce jitter in the display. + let latency_to_show = if let Some(sensor) = sensor_raw { + let now = Instant::now(); + let needs_update = self + .last_latency_update + .map(|prev| now.duration_since(prev) >= Duration::from_millis(500)) + .unwrap_or(true); + if needs_update { + let delta_micros = sys_raw - sensor; + let delta_ms = delta_micros as f64 / 1000.0; + // Clamp to [0, 9999] ms to keep formatting consistent. + let clamped = delta_ms.round().clamp(0.0, 9_999.0) as i32; + self.last_latency_ms = Some(clamped); + self.last_latency_update = Some(now); + } + self.last_latency_ms + } else { + self.last_latency_ms = None; + self.last_latency_update = None; + None + }; + + egui::Area::new("publisher_sensor_sys_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.vertical(|ui| { + if let Some(ts_text) = sensor_text { + // First row: user (sensor) timestamp + let usr_line = format!("usr ts: {ts_text}"); + ui.label( + egui::RichText::new(usr_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + + // Second row: system timestamp. + let sys_line = format!("sys ts: {sys_text}"); + ui.label( + egui::RichText::new(sys_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + + // Third row: latency in milliseconds (if available). + if let Some(latency_ms) = latency_to_show { + let latency_line = + format!("latency: {:04}ms", latency_ms.max(0)); + ui.label( + egui::RichText::new(latency_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + } + } else { + // No sensor timestamp: only show system timestamp. + let sys_line = format!("sys ts: {sys_text}"); + ui.label( + egui::RichText::new(sys_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + } + }); + }); + } + } else { + // Original behavior: render only the user (sensor) timestamp, if present. + let sensor_timestamp_text = { + let shared = self.shared.lock(); + shared + .sensor_timestamp + .and_then(format_sensor_timestamp) + }; + if let Some(ts_text) = sensor_timestamp_text { + let usr_line = format!("usr ts: {ts_text}"); + egui::Area::new("publisher_sensor_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.label( + egui::RichText::new(usr_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + }); + } } ctx.request_repaint_after(Duration::from_millis(16)); } } - let app = PreviewApp { shared }; + let app = PreviewApp { + shared, + show_sys_time: args.show_sys_time, + last_latency_ms: None, + last_latency_update: None, + }; let native_options = eframe::NativeOptions::default(); eframe::run_native( "LiveKit Camera Publisher Preview", diff --git a/examples/local_video/src/subscriber.rs b/examples/local_video/src/subscriber.rs index 15b3c227f..b78ee30de 100644 --- a/examples/local_video/src/subscriber.rs +++ b/examples/local_video/src/subscriber.rs @@ -49,6 +49,10 @@ struct Args { /// Only subscribe to video from this participant identity #[arg(long)] participant: Option, + + /// Show system time and delta vs sensor timestamp in the YUV viewer overlay + #[arg(long, default_value_t = false)] + show_sys_time: bool, } #[derive(Clone)] @@ -112,9 +116,19 @@ fn format_sensor_timestamp(ts_micros: i64) -> Option { dt.format(&format).ok() } +fn now_unix_timestamp_micros() -> i64 { + std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime before UNIX EPOCH") + .as_micros() as i64 +} + struct VideoApp { shared: Arc>, simulcast: Arc>, + show_sys_time: bool, + last_latency_ms: Option, + last_latency_update: Option, } impl eframe::App for VideoApp { @@ -134,25 +148,120 @@ impl eframe::App for VideoApp { ui.painter().add(cb); }); - // Sensor timestamp overlay: top-left. Show nothing if no sensor timestamp parsed. - let sensor_timestamp_text = { - let shared = self.shared.lock(); - shared - .sensor_timestamp - .and_then(format_sensor_timestamp) - }; - if let Some(ts_text) = sensor_timestamp_text { - egui::Area::new("sensor_timestamp_overlay".into()) - .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) - .interactable(false) - .show(ctx, |ui| { - ui.label( - egui::RichText::new(ts_text) - .monospace() - .size(22.0) - .color(egui::Color32::WHITE), - ); - }); + // Sensor timestamp / system time overlay: top-left. + // + // When `show_sys_time` is false, we only render the user (sensor) timestamp, if present. + // + // When `show_sys_time` is true: + // - If there is a sensor timestamp, we render up to three rows: + // 1) "usr ts: yyyy-mm-dd hh:mm:ss:nnn" (sensor timestamp) + // 2) "sys ts: yyyy-mm-dd hh:mm:ss:nnn" (system timestamp) + // 3) "latency: xxxxms" (delta in ms, 4 digits, updated at 2 Hz) + // - If there is no sensor timestamp, we render a single row: + // "sys ts: yyyy-mm-dd hh:mm:ss:nnn" + if self.show_sys_time { + let (sensor_raw, sensor_text, sys_raw, sys_text_opt) = { + let shared = self.shared.lock(); + let sensor_raw = shared.sensor_timestamp; + let sensor_text = sensor_raw.and_then(format_sensor_timestamp); + let sys_raw = now_unix_timestamp_micros(); + let sys_text = format_sensor_timestamp(sys_raw); + (sensor_raw, sensor_text, sys_raw, sys_text) + }; + + if let Some(sys_text) = sys_text_opt { + // Latency: throttle updates to 2 Hz to reduce jitter in the display. + let latency_to_show = if let Some(sensor) = sensor_raw { + let now = Instant::now(); + let needs_update = self + .last_latency_update + .map(|prev| now.duration_since(prev) >= Duration::from_millis(500)) + .unwrap_or(true); + if needs_update { + let delta_micros = sys_raw - sensor; + let delta_ms = delta_micros as f64 / 1000.0; + // Clamp to [0, 9999] ms to keep formatting consistent. + let clamped = delta_ms.round().clamp(0.0, 9_999.0) as i32; + self.last_latency_ms = Some(clamped); + self.last_latency_update = Some(now); + } + self.last_latency_ms + } else { + self.last_latency_ms = None; + self.last_latency_update = None; + None + }; + + egui::Area::new("sensor_sys_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.vertical(|ui| { + if let Some(ts_text) = sensor_text { + // First row: user (sensor) timestamp + let usr_line = format!("usr ts: {ts_text}"); + ui.label( + egui::RichText::new(usr_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + + // Second row: system timestamp. + let sys_line = format!("sys ts: {sys_text}"); + ui.label( + egui::RichText::new(sys_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + + // Third row: latency in milliseconds (if available). + if let Some(latency_ms) = latency_to_show { + let latency_line = + format!("latency: {:04}ms", latency_ms.max(0)); + ui.label( + egui::RichText::new(latency_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + } + } else { + // No sensor timestamp: only show system timestamp. + let sys_line = format!("sys ts: {sys_text}"); + ui.label( + egui::RichText::new(sys_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + } + }); + }); + } + } else { + // Original behavior: render only the user (sensor) timestamp, if present. + let sensor_timestamp_text = { + let shared = self.shared.lock(); + shared + .sensor_timestamp + .and_then(format_sensor_timestamp) + }; + if let Some(ts_text) = sensor_timestamp_text { + let usr_line = format!("usr ts: {ts_text}"); + egui::Area::new("sensor_timestamp_overlay".into()) + .anchor(egui::Align2::LEFT_TOP, egui::vec2(20.0, 20.0)) + .interactable(false) + .show(ctx, |ui| { + ui.label( + egui::RichText::new(usr_line) + .monospace() + .size(22.0) + .color(egui::Color32::WHITE), + ); + }); + } } // Simulcast layer controls: bottom-left overlay @@ -504,7 +613,13 @@ async fn main() -> Result<()> { }); // Start UI - let app = VideoApp { shared, simulcast }; + let app = VideoApp { + shared, + simulcast, + show_sys_time: args.show_sys_time, + last_latency_ms: None, + last_latency_update: None, + }; let native_options = eframe::NativeOptions::default(); eframe::run_native("LiveKit Video Subscriber", native_options, Box::new(|_| Ok::, _>(Box::new(app))))?; diff --git a/examples/local_video/src/yuv_viewer.rs b/examples/local_video/src/yuv_viewer.rs index 75323e16f..dc4c9c3c9 100644 --- a/examples/local_video/src/yuv_viewer.rs +++ b/examples/local_video/src/yuv_viewer.rs @@ -463,7 +463,7 @@ impl CallbackTrait for YuvPaintCallback { // prepare may not have created the state yet (race with first frame); skip this paint return; }; - + if state.dims != (shared.width, shared.height) { // We cannot rebuild here (no device access); skip drawing until next frame where prepare will rebuild return;