Skip to content

Commit 8fb61a0

Browse files
committed
[Enhancement]Implement static frame pipeline for PiP
1 parent 090f32e commit 8fb61a0

11 files changed

+387
-26
lines changed

Sources/StreamVideoSwiftUI/CallView/CallParticipantImageView.swift

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,3 +48,27 @@ public struct CallParticipantImageView<Factory: ViewFactory>: View {
4848
)
4949
}
5050
}
51+
52+
public struct PictureInPictureParticipantImageView: View {
53+
54+
@Injected(\.colors) var colors
55+
56+
var imageURL: URL?
57+
var content: AnyView
58+
59+
public init(
60+
imageURL: URL? = nil,
61+
@ViewBuilder contentProvider: () -> some View
62+
) {
63+
self.imageURL = imageURL
64+
content = AnyView(contentProvider())
65+
}
66+
67+
public var body: some View {
68+
StreamLazyImage(imageURL: imageURL) {
69+
Color(colors.participantBackground)
70+
}
71+
.blur(radius: 8)
72+
.overlay(content)
73+
}
74+
}

Sources/StreamVideoSwiftUI/CallingViews/CallingGroupView.swift

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -148,16 +148,30 @@ struct CircledTitleView: View {
148148
var size: CGFloat = .expandedAvatarSize
149149

150150
var body: some View {
151-
ZStack {
152-
Circle()
153-
.foregroundColor(colors.tintColor)
154-
Text(title)
155-
.foregroundColor(.white)
156-
.font(fonts.title)
157-
.minimumScaleFactor(0.4)
158-
.padding()
151+
if size > 0 {
152+
ZStack {
153+
Circle()
154+
.foregroundColor(colors.tintColor)
155+
Text(title)
156+
.foregroundColor(.white)
157+
.font(fonts.title)
158+
.minimumScaleFactor(0.4)
159+
.padding()
160+
}
161+
.frame(maxWidth: size, maxHeight: size)
162+
.modifier(ShadowModifier())
163+
} else {
164+
ZStack {
165+
Circle()
166+
.foregroundColor(colors.tintColor)
167+
Text(title)
168+
.foregroundColor(.white)
169+
.font(fonts.title)
170+
.minimumScaleFactor(0.4)
171+
.padding()
172+
}
173+
.padding()
174+
.modifier(ShadowModifier())
159175
}
160-
.frame(maxWidth: size, maxHeight: size)
161-
.modifier(ShadowModifier())
162176
}
163177
}
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
//
2+
// Copyright © 2025 Stream.io Inc. All rights reserved.
3+
//
4+
5+
#if canImport(UIKit)
6+
import Foundation
7+
import UIKit
8+
9+
extension CVPixelBuffer {
10+
static func build(from uiImage: UIImage) -> CVPixelBuffer? {
11+
let width = Int(uiImage.size.width)
12+
let height = Int(uiImage.size.height)
13+
14+
var pixelBuffer: CVPixelBuffer?
15+
let status = CVPixelBufferCreate(
16+
kCFAllocatorDefault,
17+
width,
18+
height,
19+
kCVPixelFormatType_32BGRA,
20+
[
21+
kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue,
22+
kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue
23+
] as CFDictionary,
24+
&pixelBuffer
25+
)
26+
27+
guard
28+
let cgImage = uiImage.cgImage,
29+
status == kCVReturnSuccess,
30+
let pixelBuffer = pixelBuffer
31+
else {
32+
return nil
33+
}
34+
35+
CVPixelBufferLockBaseAddress(pixelBuffer, [])
36+
defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, []) }
37+
38+
let context = CGContext(
39+
data: CVPixelBufferGetBaseAddress(pixelBuffer),
40+
width: width,
41+
height: height,
42+
bitsPerComponent: 8,
43+
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
44+
space: CGColorSpaceCreateDeviceRGB(),
45+
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue
46+
)
47+
48+
context?.draw(
49+
cgImage,
50+
in: CGRect(x: 0, y: 0, width: width, height: height)
51+
)
52+
53+
return pixelBuffer
54+
}
55+
}
56+
#endif
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
//
2+
// Copyright © 2025 Stream.io Inc. All rights reserved.
3+
//
4+
5+
import Foundation
6+
import SwiftUI
7+
8+
extension View {
9+
10+
@MainActor
11+
func toPixelBuffer(contentSize: CGSize) -> CVPixelBuffer? {
12+
guard #available(iOS 16.0, *) else {
13+
return nil
14+
}
15+
let renderer = ImageRenderer(content: self)
16+
renderer.proposedSize = .init(contentSize)
17+
if let image = renderer.uiImage {
18+
return .build(from: image)
19+
} else {
20+
return nil
21+
}
22+
}
23+
}
24+
25+
extension CVBuffer: @unchecked @retroactive Sendable {}

Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamAVPictureInPictureVideoCallViewController.swift

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,6 @@ final class StreamAVPictureInPictureVideoCallViewController: AVPictureInPictureV
5252
@available(*, unavailable)
5353
required init?(coder: NSCoder) { fatalError("init(coder:) has not been implemented") }
5454

55-
/// Initializes a new instance and sets the `preferredContentSize` to `Self.defaultPreferredContentSize`
56-
/// value.
5755
required init() {
5856
super.init(nibName: nil, bundle: nil)
5957
contentView.pictureInPictureWindowSizePolicy.controller = self

Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureAdapter.swift

Lines changed: 65 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ import UIKit
1212
/// Picture display.
1313
public final class StreamPictureInPictureAdapter: @unchecked Sendable {
1414

15+
@Injected(\.staticVideoSource) private var staticVideoSource
16+
1517
/// The active call.
1618
public var call: Call? {
1719
didSet {
@@ -36,6 +38,7 @@ public final class StreamPictureInPictureAdapter: @unchecked Sendable {
3638
didSet {
3739
Task { @MainActor in
3840
pictureInPictureController?.onSizeUpdate = { [weak self] size in
41+
self?.staticVideoSource.contentSize = size
3942
if let activeParticipant = self?.activeParticipant {
4043
self?.onSizeUpdate?(size, activeParticipant)
4144
}
@@ -89,16 +92,74 @@ public final class StreamPictureInPictureAdapter: @unchecked Sendable {
8992
let sessionId = call?.state.sessionId
9093
let otherParticipants = participants.filter { $0.sessionId != sessionId }
9194

92-
if let session = call?.state.screenSharingSession, call?.state.isCurrentUserScreensharing == false,
93-
let track = session.track {
95+
if
96+
let session = call?.state.screenSharingSession, call?.state.isCurrentUserScreensharing == false,
97+
let track = session.track {
9498
pictureInPictureController?.track = track
99+
staticVideoSource.participant = nil
95100
activeParticipant = nil
96-
} else if let participant = otherParticipants.first(where: { $0.track != nil }), let track = participant.track {
101+
102+
log.debug(
103+
"Active participant:\(session.participant.name) with screensharing will be used.",
104+
subsystems: .pictureInPicture
105+
)
106+
} else if
107+
let participant = otherParticipants.first(where: { $0.hasVideo && $0.track != nil }),
108+
let track = participant.track {
109+
if participant.trackSize != .zero {
110+
pictureInPictureController?.preferredContentSize = participant.trackSize
111+
}
97112
pictureInPictureController?.track = track
113+
staticVideoSource.participant = nil
98114
activeParticipant = participant
99-
} else if let localParticipant = call?.state.localParticipant, let track = localParticipant.track {
115+
116+
log.debug(
117+
"Active participant:\(participant.name) will be used.",
118+
subsystems: .pictureInPicture
119+
)
120+
121+
} else if
122+
let participant = call?.state.dominantSpeaker {
123+
if participant.trackSize != .zero {
124+
pictureInPictureController?.preferredContentSize = participant.trackSize
125+
}
126+
pictureInPictureController?.track = nil
127+
staticVideoSource.participant = participant
128+
activeParticipant = participant
129+
130+
log.debug(
131+
"Dominant speaker participant:\(participant.name) will be used.",
132+
subsystems: .pictureInPicture
133+
)
134+
135+
} else if
136+
let localParticipant = call?.state.localParticipant,
137+
localParticipant.hasVideo,
138+
let track = localParticipant.track {
139+
if localParticipant.trackSize != .zero {
140+
pictureInPictureController?.preferredContentSize = localParticipant.trackSize
141+
}
100142
pictureInPictureController?.track = track
143+
staticVideoSource.participant = nil
101144
activeParticipant = localParticipant
145+
146+
log.debug(
147+
"Local participant:\(localParticipant.name) will be used.",
148+
subsystems: .pictureInPicture
149+
)
150+
151+
} else {
152+
if let trackSize = otherParticipants.first?.trackSize, trackSize != .zero {
153+
pictureInPictureController?.preferredContentSize = trackSize
154+
}
155+
pictureInPictureController?.track = nil
156+
staticVideoSource.participant = otherParticipants.first
157+
activeParticipant = otherParticipants.first
158+
159+
log.debug(
160+
"No active participant found. Will use the first participant:\(otherParticipants.first?.name ?? "n/a") in the list.",
161+
subsystems: .pictureInPicture
162+
)
102163
}
103164
}
104165
}

Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureController.swift

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,14 @@ final class StreamPictureInPictureController: NSObject, AVPictureInPictureContro
3333
}
3434
}
3535

36+
var preferredContentSize: CGSize = .init(width: 640, height: 480) {
37+
didSet {
38+
Task { @MainActor in
39+
contentViewController?.preferredContentSize = preferredContentSize
40+
}
41+
}
42+
}
43+
3644
/// A closure called when the picture-in-picture view's size changes.
3745
@MainActor
3846
var onSizeUpdate: (@Sendable(CGSize) -> Void)? {

Sources/StreamVideoSwiftUI/Utils/PictureInPicture/StreamPictureInPictureVideoRenderer.swift

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ import StreamWebRTC
1010
/// A view that can be used to render an instance of `RTCVideoTrack`
1111
final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer {
1212

13+
@Injected(\.staticVideoSource) private var staticVideoSource
14+
1315
/// The rendering track.
1416
var track: RTCVideoTrack? {
1517
didSet {
@@ -47,6 +49,7 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer {
4749

4850
/// The cancellable used to control the bufferPublisher stream.
4951
private var bufferUpdatesCancellable: AnyCancellable?
52+
private var staticFrameGenerationCancellable: AnyCancellable?
5053

5154
/// The view's size.
5255
/// - Note: We are using this property instead for `frame.size` or `bounds.size` so we can
@@ -185,18 +188,16 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer {
185188
private func process(_ buffer: CMSampleBuffer) {
186189
guard
187190
bufferUpdatesCancellable != nil,
188-
let trackId = track?.trackId,
189191
buffer.isValid
190192
else {
191193
contentView.renderingComponent.flush()
192194
logMessage(.debug, message: "🔥 Display layer flushed.")
193195
return
194196
}
195197

196-
logMessage(
197-
.debug,
198-
message: "⚙️ Processing buffer for trackId:\(trackId)."
199-
)
198+
let trackId = track?.trackId ?? "-"
199+
200+
logMessage(.debug, message: "⚙️ Processing buffer for trackId:\(trackId).")
200201
if #available(iOS 14.0, *) {
201202
if contentView.renderingComponent.requiresFlushToResumeDecoding == true {
202203
contentView.renderingComponent.flush()
@@ -217,13 +218,22 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer {
217218
for track: RTCVideoTrack?,
218219
on window: UIWindow?
219220
) {
220-
guard window != nil, let track else { return }
221+
guard window != nil else { return }
222+
223+
staticFrameGenerationCancellable?.cancel()
224+
staticVideoSource.isEnabled = track == nil
225+
if let track {
226+
track.add(self)
227+
} else {
228+
staticFrameGenerationCancellable = staticVideoSource
229+
.renderingPublisher
230+
.sink { [weak self] in self?.renderFrame($0) }
231+
}
221232

222233
bufferUpdatesCancellable = bufferPublisher
223234
.receive(on: DispatchQueue.main)
224235
.sink { [weak self] in self?.process($0) }
225236

226-
track.add(self)
227237
logMessage(
228238
.debug,
229239
message: "⏳ Frame streaming for Picture-in-Picture started."

0 commit comments

Comments
 (0)