Fix SSE event filtering and add Picture-in-Picture support (#25)

* Fix SSE event filtering race condition in mission views

Events were being filtered out during mission load due to a race condition where
viewingMissionId was set before currentMission finished loading. Now events only
get filtered when both IDs are set and different, allowing streaming updates to
display while missions are loading.

* Improve desktop stream UX with auto-open and auto-close

- Auto-extract display ID from desktop_start_session tool result
- Auto-open desktop stream when agent starts a desktop session
- Auto-close desktop stream when agent finishes (status becomes idle)
- Apply same improvements to both web and iOS dashboards

* Fix desktop display extraction from JSON string results

Tool results may be returned as JSON strings rather than parsed objects.
Handle both cases when extracting the display ID from desktop_start_session.

* Fix desktop stream staying open when status=idle during loading

The event filtering was updated to allow events through when currentMissionId
is null (during initial load), but the status application logic wasn't updated
to match. This created a window where tool_result could open the desktop stream
but status=idle wouldn't close it because shouldApplyStatus was false.

Now both the event filter and status application logic use consistent conditions:
allow when currentMissionId hasn't loaded yet.

* Fix desktop auto-open and add Picture-in-Picture support

- Use tool_result event's name field directly for desktop_start_session detection
  (fixes auto-open when tool_call event was filtered or missed)
- Add native Picture-in-Picture button to desktop stream
  - Converts canvas to video stream for OS-level floating window
  - Works outside the browser tab
  - Shows PiP button only when browser supports it

* Add iOS Picture-in-Picture support for desktop stream

- Implement AVSampleBufferDisplayLayer-based PiP for iOS
- Convert JPEG frames to CMSampleBuffer for PiP playback
- Add PiP buttons to desktop stream header and controls
- Fix web dashboard auto-open to use tool name from event data directly
- Add audio background mode to Info.plist for PiP support

* Fix React anti-patterns flagged by Bugbot

- Use itemsRef for synchronous read instead of calling state setters
  inside setItems updater callback (React strict mode safe)
- Attach PiP event listeners directly to video element instead of
  document, since these events don't bubble

* Fix PiP issues flagged by Bugbot

- iOS: Only disconnect stream onDisappear if PiP is not active,
  allowing stream to continue in PiP mode after sheet is dismissed
- Web: Stop existing stream tracks before creating new ones to
  prevent resource leaks on repeated PiP toggle

* Fix iOS PiP cleanup when stopped after view dismissal

- Add shouldDisconnectAfterPip flag to track deferred cleanup
- Set flag in onDisappear when PiP is active
- Clean up WebSocket and PiP resources when PiP stops if flag is set

* Fix additional PiP issues flagged by Bugbot

- iOS: Return actual isPaused state in PiP delegate using MainActor.assumeIsolated
- iOS: Add isPipReady flag and disable PiP button until setup completes
- Web: Don't forcibly exit PiP on unmount to match iOS behavior
This commit is contained in:
Thomas Marchand
2026-01-03 22:16:02 +00:00
committed by GitHub
parent 0c2344b74d
commit 269958f0a9
6 changed files with 504 additions and 20 deletions

View File

@@ -584,6 +584,7 @@ export default function ControlClient() {
const router = useRouter();
const [items, setItems] = useState<ChatItem[]>([]);
const itemsRef = useRef<ChatItem[]>([]);
const [draftInput, setDraftInput] = useLocalStorage("control-draft", "");
const [input, setInput] = useState(draftInput);
@@ -691,6 +692,10 @@ export default function ControlClient() {
viewingMissionRef.current = viewingMission;
}, [viewingMission]);
useEffect(() => {
itemsRef.current = items;
}, [items]);
// Smart auto-scroll
const { containerRef, endRef, isAtBottom, scrollToBottom } =
useScrollToBottom();
@@ -1205,8 +1210,9 @@ export default function ControlClient() {
}
} else {
// Event has NO mission_id (from main session)
// Only show if we're viewing the current/main mission
if (viewingId !== currentMissionId) {
// Only show if we're viewing the current/main mission OR if currentMission
// hasn't been loaded yet (to handle race condition during initial load)
if (currentMissionId && viewingId !== currentMissionId) {
// We're viewing a parallel mission, skip main session events
if (event.type !== "status") {
return;
@@ -1248,16 +1254,20 @@ export default function ControlClient() {
// Status for a specific mission - only apply if viewing that mission
shouldApplyStatus = statusMissionId === viewingId;
} else {
// Status for main session - only apply if viewing main mission or no specific mission
shouldApplyStatus = !viewingId || viewingId === currentMissionId;
// Status for main session - only apply if viewing main mission, no specific mission,
// or currentMissionId hasn't loaded yet (to match event filter logic and avoid
// desktop stream staying open when status=idle comes during loading)
shouldApplyStatus = !viewingId || viewingId === currentMissionId || !currentMissionId;
}
if (shouldApplyStatus) {
setQueueLen(typeof q === "number" ? q : 0);
// Clear progress when idle
// Clear progress and auto-close desktop stream when idle
if (newState === "idle") {
setProgress(null);
// Auto-close desktop stream when agent finishes
setShowDesktopStream(false);
}
// If we reconnected and agent is already running, add a visual indicator
@@ -1395,6 +1405,59 @@ export default function ControlClient() {
if (event.type === "tool_result" && isRecord(data)) {
const toolCallId = String(data["tool_call_id"] ?? "");
const endTime = Date.now();
// Extract display ID from desktop_start_session tool result
// Get tool name from the event data (preferred) or fall back to stored tool item
const eventToolName = typeof data["name"] === "string" ? data["name"] : null;
// Check for desktop_start_session right away using event data
// This handles the case where tool_call events might be filtered or missed
if (eventToolName === "desktop_start_session" || eventToolName === "desktop_desktop_start_session") {
let result = data["result"];
// Handle case where result is a JSON string that needs parsing
if (typeof result === "string") {
try {
result = JSON.parse(result);
} catch {
// Not valid JSON, leave as-is
}
}
if (isRecord(result) && typeof result["display"] === "string") {
const display = result["display"];
setDesktopDisplayId(display);
// Auto-open desktop stream when session starts
setShowDesktopStream(true);
}
}
// If eventToolName wasn't available, check stored items for desktop_start_session
// Use itemsRef for synchronous read to avoid side effects in state updaters
if (!eventToolName) {
const toolItem = itemsRef.current.find(
(it) => it.kind === "tool" && it.toolCallId === toolCallId
);
if (toolItem && toolItem.kind === "tool") {
const toolName = toolItem.name;
// Check for desktop_start_session (with or without desktop_ prefix from MCP)
if (toolName === "desktop_start_session" || toolName === "desktop_desktop_start_session") {
let result = data["result"];
// Handle case where result is a JSON string that needs parsing
if (typeof result === "string") {
try {
result = JSON.parse(result);
} catch {
// Not valid JSON, leave as-is
}
}
if (isRecord(result) && typeof result["display"] === "string") {
const display = result["display"];
setDesktopDisplayId(display);
setShowDesktopStream(true);
}
}
}
}
setItems((prev) =>
prev.map((it) =>
it.kind === "tool" && it.toolCallId === toolCallId

View File

@@ -13,6 +13,7 @@ import {
Settings,
Maximize2,
Minimize2,
PictureInPicture2,
} from "lucide-react";
interface DesktopStreamProps {
@@ -41,10 +42,14 @@ export function DesktopStream({
const [fps, setFps] = useState(initialFps);
const [quality, setQuality] = useState(initialQuality);
const [isFullscreen, setIsFullscreen] = useState(false);
const [isPipActive, setIsPipActive] = useState(false);
const [isPipSupported, setIsPipSupported] = useState(false);
const wsRef = useRef<WebSocket | null>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const pipVideoRef = useRef<HTMLVideoElement | null>(null);
const pipStreamRef = useRef<MediaStream | null>(null);
const connectionIdRef = useRef(0); // Guard against stale callbacks from old connections
// Refs to store current values without triggering reconnection on slider changes
@@ -215,6 +220,75 @@ export function DesktopStream({
}
}, [isFullscreen]);
// Picture-in-Picture handler
const handlePip = useCallback(async () => {
if (!canvasRef.current) return;
if (isPipActive && document.pictureInPictureElement) {
// Exit PiP
try {
await document.exitPictureInPicture();
} catch {
// Ignore errors
}
return;
}
try {
// Stop any existing stream tracks to prevent resource leaks
if (pipStreamRef.current) {
pipStreamRef.current.getTracks().forEach((track) => track.stop());
}
// Create a video element from canvas stream
const canvas = canvasRef.current;
const stream = canvas.captureStream(fps);
pipStreamRef.current = stream;
// Create or reuse video element
if (!pipVideoRef.current) {
const video = document.createElement("video");
video.muted = true;
video.autoplay = true;
video.playsInline = true;
// Attach PiP event listeners directly to the video element
// These events fire on the video, not document, so we need to listen here
video.addEventListener("enterpictureinpicture", () => setIsPipActive(true));
video.addEventListener("leavepictureinpicture", () => setIsPipActive(false));
pipVideoRef.current = video;
}
pipVideoRef.current.srcObject = stream;
await pipVideoRef.current.play();
// Request PiP
await pipVideoRef.current.requestPictureInPicture();
} catch (err) {
console.error("Failed to enter Picture-in-Picture:", err);
}
}, [isPipActive, fps]);
// Check PiP support on mount
useEffect(() => {
setIsPipSupported(
"pictureInPictureEnabled" in document && document.pictureInPictureEnabled
);
}, []);
// Cleanup PiP resources on unmount
// Note: We don't forcibly exit PiP here to match iOS behavior where
// PiP continues when the sheet is dismissed. The PiP will naturally
// close when the WebSocket disconnects and the stream ends.
useEffect(() => {
return () => {
// Only stop stream tracks if PiP is not active
// This allows PiP to continue showing the last frame briefly
if (!document.pictureInPictureElement && pipStreamRef.current) {
pipStreamRef.current.getTracks().forEach((track) => track.stop());
}
};
}, []);
// Connect on mount
useEffect(() => {
connect();
@@ -291,6 +365,23 @@ export function DesktopStream({
</div>
<div className="flex items-center gap-2">
{isPipSupported && (
<button
onClick={handlePip}
disabled={connectionState !== "connected"}
className={cn(
"p-1.5 rounded-lg transition-colors",
connectionState === "connected"
? isPipActive
? "bg-indigo-500/30 text-indigo-400 hover:bg-indigo-500/40"
: "hover:bg-white/10 text-white/60 hover:text-white"
: "text-white/30 cursor-not-allowed"
)}
title={isPipActive ? "Exit Picture-in-Picture" : "Picture-in-Picture"}
>
<PictureInPicture2 className="w-4 h-4" />
</button>
)}
<button
onClick={handleFullscreen}
className="p-1.5 rounded-lg hover:bg-white/10 text-white/60 hover:text-white transition-colors"

View File

@@ -56,5 +56,9 @@
</dict>
<key>CFBundleIconName</key>
<string>AppIcon</string>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
</dict>
</plist>

View File

@@ -2,18 +2,21 @@
// DesktopStreamService.swift
// OpenAgentDashboard
//
// WebSocket client for MJPEG desktop streaming
// WebSocket client for MJPEG desktop streaming with Picture-in-Picture support
//
import Foundation
import Observation
import UIKit
import AVKit
import CoreMedia
import VideoToolbox
@MainActor
@Observable
final class DesktopStreamService {
final class DesktopStreamService: NSObject {
static let shared = DesktopStreamService()
nonisolated init() {}
override nonisolated init() { super.init() }
// Stream state
var isConnected = false
@@ -24,6 +27,21 @@ final class DesktopStreamService {
var fps: Int = 10
var quality: Int = 70
// Picture-in-Picture state
var isPipSupported: Bool { AVPictureInPictureController.isPictureInPictureSupported() }
var isPipActive = false
/// Whether PiP has been set up and is ready to use
var isPipReady = false
/// When true, disconnect and cleanup when PiP stops (set when view is dismissed while PiP is active)
var shouldDisconnectAfterPip = false
private(set) var pipController: AVPictureInPictureController?
private(set) var sampleBufferDisplayLayer: AVSampleBufferDisplayLayer?
// For PiP content source
private var pipContentSource: AVPictureInPictureController.ContentSource?
private var lastFrameTime: CMTime = .zero
private var frameTimeScale: CMTimeScale = 600
private var webSocket: URLSessionWebSocketTask?
private var displayId: String?
// Connection ID to prevent stale callbacks from corrupting state
@@ -164,6 +182,11 @@ final class DesktopStreamService {
currentFrame = image
frameCount += 1
errorMessage = nil
// Feed frame to PiP layer if active
if isPipActive || pipController != nil {
feedFrameToPipLayer(image)
}
}
case .string(let text):
@@ -179,4 +202,206 @@ final class DesktopStreamService {
break
}
}
// MARK: - Picture-in-Picture
/// Set up the PiP layer and controller
func setupPip(in view: UIView) {
guard isPipSupported else { return }
// Create the sample buffer display layer
let layer = AVSampleBufferDisplayLayer()
layer.videoGravity = .resizeAspect
layer.frame = view.bounds
view.layer.addSublayer(layer)
sampleBufferDisplayLayer = layer
// Create PiP content source using the sample buffer layer
let contentSource = AVPictureInPictureController.ContentSource(
sampleBufferDisplayLayer: layer,
playbackDelegate: self
)
pipContentSource = contentSource
// Create PiP controller
let controller = AVPictureInPictureController(contentSource: contentSource)
controller.delegate = self
pipController = controller
isPipReady = true
}
/// Clean up PiP resources
func cleanupPip() {
stopPip()
sampleBufferDisplayLayer?.removeFromSuperlayer()
sampleBufferDisplayLayer = nil
pipController = nil
pipContentSource = nil
isPipReady = false
}
/// Start Picture-in-Picture
func startPip() {
guard isPipSupported,
let controller = pipController,
controller.isPictureInPicturePossible else { return }
controller.startPictureInPicture()
}
/// Stop Picture-in-Picture
func stopPip() {
pipController?.stopPictureInPicture()
}
/// Toggle Picture-in-Picture
func togglePip() {
if isPipActive {
stopPip()
} else {
startPip()
}
}
/// Feed a UIImage frame to the sample buffer layer for PiP display
private func feedFrameToPipLayer(_ image: UIImage) {
guard let cgImage = image.cgImage,
let layer = sampleBufferDisplayLayer else { return }
// Create pixel buffer from CGImage
let width = cgImage.width
let height = cgImage.height
var pixelBuffer: CVPixelBuffer?
let attrs: [CFString: Any] = [
kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true,
kCVPixelBufferIOSurfacePropertiesKey: [:] as CFDictionary
]
let status = CVPixelBufferCreate(
kCFAllocatorDefault,
width, height,
kCVPixelFormatType_32BGRA,
attrs as CFDictionary,
&pixelBuffer
)
guard status == kCVReturnSuccess, let buffer = pixelBuffer else { return }
CVPixelBufferLockBaseAddress(buffer, [])
defer { CVPixelBufferUnlockBaseAddress(buffer, []) }
guard let context = CGContext(
data: CVPixelBufferGetBaseAddress(buffer),
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue
) else { return }
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height))
// Create format description
var formatDescription: CMFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: buffer,
formatDescriptionOut: &formatDescription
)
guard let format = formatDescription else { return }
// Calculate timing
let frameDuration = CMTime(value: 1, timescale: CMTimeScale(fps))
let presentationTime = CMTimeAdd(lastFrameTime, frameDuration)
lastFrameTime = presentationTime
var timingInfo = CMSampleTimingInfo(
duration: frameDuration,
presentationTimeStamp: presentationTime,
decodeTimeStamp: .invalid
)
// Create sample buffer
var sampleBuffer: CMSampleBuffer?
CMSampleBufferCreateReadyWithImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: buffer,
formatDescription: format,
sampleTiming: &timingInfo,
sampleBufferOut: &sampleBuffer
)
guard let sample = sampleBuffer else { return }
// Enqueue to layer
if layer.status == .failed {
layer.flush()
}
layer.enqueue(sample)
}
}
// MARK: - AVPictureInPictureControllerDelegate
extension DesktopStreamService: AVPictureInPictureControllerDelegate {
nonisolated func pictureInPictureControllerWillStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
Task { @MainActor in
isPipActive = true
}
}
nonisolated func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
Task { @MainActor in
isPipActive = false
// If the view was dismissed while PiP was active, clean up now
if shouldDisconnectAfterPip {
shouldDisconnectAfterPip = false
cleanupPip()
disconnect()
}
}
}
nonisolated func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, failedToStartPictureInPictureWithError error: Error) {
Task { @MainActor in
errorMessage = "PiP failed: \(error.localizedDescription)"
isPipActive = false
}
}
}
// MARK: - AVPictureInPictureSampleBufferPlaybackDelegate
extension DesktopStreamService: AVPictureInPictureSampleBufferPlaybackDelegate {
nonisolated func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, setPlaying playing: Bool) {
Task { @MainActor in
if playing {
resume()
} else {
pause()
}
}
}
nonisolated func pictureInPictureControllerTimeRangeForPlayback(_ pictureInPictureController: AVPictureInPictureController) -> CMTimeRange {
// Live stream - return a large range
return CMTimeRange(start: .zero, duration: CMTime(value: 3600, timescale: 1))
}
nonisolated func pictureInPictureControllerIsPlaybackPaused(_ pictureInPictureController: AVPictureInPictureController) -> Bool {
// This is called on the main thread, so we can safely access MainActor-isolated state
return MainActor.assumeIsolated { isPaused }
}
nonisolated func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, didTransitionToRenderSize newRenderSize: CMVideoDimensions) {
// Handle render size change if needed
}
nonisolated func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, skipByInterval skipInterval: CMTime) async {
// Not applicable for live stream
}
}

View File

@@ -1059,9 +1059,11 @@ struct ControlView: View {
}
// If both viewingId and currentId are nil, accept the event
// This handles the case where a new mission was just created
} else if viewingId != nil && viewingId != currentId {
} else if let vId = viewingId, let cId = currentId, vId != cId {
// Event has NO mission_id (from main session)
// Skip if we're viewing a different (parallel) mission
// Note: We only skip if BOTH viewingId and currentId are set and different
// If currentId is nil (not loaded yet), we accept the event
return
}
}
@@ -1078,9 +1080,10 @@ struct ControlView: View {
// Status for a specific mission - only apply if we're viewing that mission
shouldApply = statusId == viewingId
} else {
// Status for main session - only apply if viewing the current (main) mission
// or if we don't have a current mission yet
shouldApply = viewingId == nil || viewingId == currentId
// Status for main session - only apply if viewing the current (main) mission,
// no specific mission, or currentId hasn't loaded yet (to match event filter
// logic and avoid desktop stream staying open when status=idle comes during loading)
shouldApply = viewingId == nil || viewingId == currentId || currentId == nil
}
if shouldApply {
@@ -1088,9 +1091,11 @@ struct ControlView: View {
let newState = ControlRunState(rawValue: state) ?? .idle
runState = newState
// Clear progress when idle
// Clear progress and auto-close desktop stream when idle
if newState == .idle {
progress = nil
// Auto-close desktop stream when agent finishes
showDesktopStream = false
}
}
if let queue = data["queue_len"] as? Int {
@@ -1233,7 +1238,26 @@ struct ControlView: View {
messages.append(message)
}
}
case "tool_result":
if let name = data["name"] as? String {
// Extract display ID from desktop_start_session tool result
if name == "desktop_start_session" || name == "desktop_desktop_start_session" {
// Handle result as either a dictionary or a JSON string
var resultDict: [String: Any]? = data["result"] as? [String: Any]
if resultDict == nil, let resultString = data["result"] as? String,
let jsonData = resultString.data(using: .utf8),
let parsed = try? JSONSerialization.jsonObject(with: jsonData) as? [String: Any] {
resultDict = parsed
}
if let display = resultDict?["display"] as? String {
desktopDisplayId = display
// Auto-open desktop stream when session starts
showDesktopStream = true
}
}
}
default:
break
}

View File

@@ -3,10 +3,11 @@
// OpenAgentDashboard
//
// Real-time desktop stream viewer with controls
// Designed to be shown in a bottom sheet
// Designed to be shown in a bottom sheet, with Picture-in-Picture support
//
import SwiftUI
import AVKit
struct DesktopStreamView: View {
@State private var streamService = DesktopStreamService.shared
@@ -28,9 +29,17 @@ struct DesktopStreamView: View {
// Header bar
headerView
// Stream content
streamContent
.frame(maxWidth: .infinity, maxHeight: .infinity)
// Stream content with PiP layer
ZStack {
// PiP-enabled layer (hidden, used for feeding frames to PiP)
PipLayerView(streamService: streamService)
.frame(width: 1, height: 1)
.opacity(0)
// Visible stream content
streamContent
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
// Controls (when visible)
if showControls {
@@ -43,7 +52,15 @@ struct DesktopStreamView: View {
streamService.connect(displayId: displayId)
}
.onDisappear {
streamService.disconnect()
// Only disconnect if PiP is not active
// This allows the stream to continue in PiP mode after the sheet is dismissed
if streamService.isPipActive {
// Mark for cleanup when PiP stops
streamService.shouldDisconnectAfterPip = true
} else {
streamService.cleanupPip()
streamService.disconnect()
}
}
.onTapGesture {
withAnimation(.easeInOut(duration: 0.2)) {
@@ -87,6 +104,23 @@ struct DesktopStreamView: View {
.font(.caption2.monospaced())
.foregroundStyle(Theme.textMuted)
// PiP button (if supported)
if streamService.isPipSupported {
Button {
streamService.togglePip()
HapticService.lightTap()
} label: {
Image(systemName: streamService.isPipActive ? "pip.exit" : "pip.enter")
.font(.system(size: 14, weight: .medium))
.foregroundStyle(streamService.isPipActive ? Theme.accent : Theme.textSecondary)
.frame(width: 28, height: 28)
.background(streamService.isPipActive ? Theme.accent.opacity(0.2) : Theme.backgroundSecondary)
.clipShape(Circle())
}
.disabled(!streamService.isConnected || !streamService.isPipReady)
.opacity(streamService.isConnected && streamService.isPipReady ? 1 : 0.5)
}
// Close button
Button {
dismiss()
@@ -158,7 +192,7 @@ struct DesktopStreamView: View {
private var controlsView: some View {
VStack(spacing: 16) {
// Play/Pause and reconnect buttons
// Play/Pause, PiP, and reconnect buttons
HStack(spacing: 16) {
// Play/Pause
Button {
@@ -191,6 +225,23 @@ struct DesktopStreamView: View {
.background(Theme.backgroundSecondary)
.clipShape(Circle())
}
// PiP button in controls (larger, more visible)
if streamService.isPipSupported {
Button {
streamService.togglePip()
HapticService.lightTap()
} label: {
Image(systemName: streamService.isPipActive ? "pip.exit" : "pip.enter")
.font(.system(size: 16, weight: .medium))
.foregroundStyle(streamService.isPipActive ? .white : Theme.textPrimary)
.frame(width: 44, height: 44)
.background(streamService.isPipActive ? Theme.accent : Theme.backgroundSecondary)
.clipShape(Circle())
}
.disabled(!streamService.isConnected || !streamService.isPipReady)
.opacity(streamService.isConnected && streamService.isPipReady ? 1 : 0.5)
}
}
// Quality and FPS sliders
@@ -240,6 +291,32 @@ struct DesktopStreamView: View {
}
}
// MARK: - PiP Layer View (UIViewRepresentable)
/// A UIView wrapper that sets up the AVSampleBufferDisplayLayer for PiP
struct PipLayerView: UIViewRepresentable {
let streamService: DesktopStreamService
func makeUIView(context: Context) -> UIView {
let view = UIView()
view.backgroundColor = .clear
// Set up PiP on the main actor
Task { @MainActor in
streamService.setupPip(in: view)
}
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
// Update layer frame if needed
if let layer = streamService.sampleBufferDisplayLayer {
layer.frame = uiView.bounds
}
}
}
// MARK: - Preview
#Preview {