Skip to content

Commit

Permalink
capture overlay, stabilize virtual camera
Browse files Browse the repository at this point in the history
  • Loading branch information
jcm committed Feb 17, 2024
1 parent c93fe5b commit 03bf47b
Show file tree
Hide file tree
Showing 8 changed files with 204 additions and 130 deletions.
7 changes: 7 additions & 0 deletions CaptureSample/CaptureEngine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {
var dstData: UnsafeMutableRawPointer!
private let frameHandlerQueue = DispatchQueue(label: "com.jcm.Record.FrameHandlerQueue")

var onehundredthframetest: CMSampleBuffer!
var counttest = 0

var sink: RecordCameraStreamSink! = RecordCameraStreamSink()
var sinkInitialized = false

Expand Down Expand Up @@ -157,6 +160,10 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {
switch outputType {
case .screen:
if let frame = self.createFrame(for: sampleBuffer) {
self.counttest += 1
if self.counttest == 100 {
self.onehundredthframetest = frame.encodedFrame
}
IOSurfaceLock(frame.surface!, [], nil)
self.capturedFrameHandler?(frame)
self.sink.enqueue(frame.surface!)
Expand Down
4 changes: 2 additions & 2 deletions CaptureSample/Enums.swift
Original file line number Diff line number Diff line change
Expand Up @@ -192,9 +192,9 @@ public enum CapturePixelFormat: Int, Codable, CaseIterable {
case .l10r:
return kCVPixelFormatType_ARGB2101010LEPacked
case .biplanarpartial420v:
return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange
return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
case .biplanarfull420f:
return kCVPixelFormatType_420YpCbCr10BiPlanarFullRange
return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
}
}
func stringValue() -> String {
Expand Down
14 changes: 9 additions & 5 deletions CaptureSample/RecordCameraStreamSink.swift
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class RecordCameraStreamSink: NSObject {

CVPixelBufferPoolCreate(kCFAllocatorDefault, nil, pixelBufferAttributes, &_bufferPool)

let pointerQueue = UnsafeMutablePointer<Unmanaged<CMSimpleQueue>?>.allocate(capacity: 1)
let pointerQueue = UnsafeMutablePointer<Unmanaged<CMSimpleQueue>?>.allocate(capacity: 8)
let pointerRef = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())
let result = CMIOStreamCopyBufferQueue(sinkStream, {
(sinkStream: CMIOStreamID, buf: UnsafeMutableRawPointer?, refcon: UnsafeMutableRawPointer?) in
Expand Down Expand Up @@ -163,24 +163,28 @@ class RecordCameraStreamSink: NSObject {
}

func enqueue(_ image: IOSurfaceRef) {
guard sinkQueue != nil else { return }
guard CMSimpleQueueGetCount(sinkQueue!) < CMSimpleQueueGetCapacity(sinkQueue!) else {
print("error enqueuing")
//print("error enqueuing")
return
}
var err: OSStatus = 0
var pixelBuffer: Unmanaged<CVPixelBuffer>?
CVPixelBufferCreateWithIOSurface(kCFAllocatorDefault, image, self._bufferAuxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {

var sbuf: CMSampleBuffer!
var sbuf: CMSampleBuffer?
var timingInfo = CMSampleTimingInfo()
timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock())
err = CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pixelBuffer.takeRetainedValue(), dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self._videoDescription, sampleTiming: &timingInfo, sampleBufferOut: &sbuf)
if err == 0 {
if let sbuf = sbuf {
let pointerRef = UnsafeMutableRawPointer(Unmanaged.passRetained(sbuf).toOpaque())
CMSimpleQueueEnqueue(self.sinkQueue!, element: pointerRef)
self.setTestProperty(streamId: self.sourceStream!, newValue: "a")
let queueError = CMSimpleQueueEnqueue(self.sinkQueue!, element: pointerRef)
if queueError != 0 {
//print(queueError)
}
//self.setTestProperty(streamId: self.sourceStream!, newValue: "a")
}
}
} else {
Expand Down
8 changes: 6 additions & 2 deletions CaptureSample/ScreenRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,10 @@ class ScreenRecorder: ObservableObject {
}
}

@Published var applicationFilterIsInclusive = false {
didSet { updateEngine() }
}

@Published var errorText = ""
@Published var isShowingError = false

Expand Down Expand Up @@ -588,7 +592,7 @@ class ScreenRecorder: ObservableObject {
// If a user chooses to exclude the app from the stream,
// exclude it by matching its bundle identifier.
excludedApps = availableApps.filter { app in
!self.selectedApplications.contains(app)
self.selectedApplications.contains(app) == self.applicationFilterIsInclusive
}
// Create a content filter with excluded apps.
filter = SCContentFilter(display: display,
Expand Down Expand Up @@ -642,7 +646,7 @@ class ScreenRecorder: ObservableObject {

// Increase the depth of the frame queue to ensure high fps at the expense of increasing
// the memory footprint of WindowServer.
streamConfig.queueDepth = 5
streamConfig.queueDepth = 15

return streamConfig
}
Expand Down
148 changes: 89 additions & 59 deletions CaptureSample/Views/CaptureConfigurationOverlay.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,80 +20,80 @@ struct CaptureConfigurationOverlay: View {

switch screenRecorder.captureType {
case .display:
GroupBox {
LazyVGrid(columns: columns) {
ForEach(screenRecorder.availableApps, id: \.self) { app in
VStack {
HStack {
Toggle("butt", isOn: Binding( get: {
return screenRecorder.selectedApplications.contains(app)
}, set: { isOn in
if isOn { screenRecorder.selectedApplications.insert(app) }
else { screenRecorder.selectedApplications.remove(app) }
UserDefaults.standard.setValue(isOn, forKey: app.bundleIdentifier)
}))
.controlSize(.large)
Text(app.applicationName)
.font(.title2)
.tag(app)
.fontWeight(.regular)
.opacity(0.8)
Spacer(minLength: 1)
Rectangle()
.fill(.quinary)
//.padding(EdgeInsets(top: -20, leading: 0, bottom: -20, trailing: 0))
.frame(width: 1, height: 200)
VStack {
//GroupBox {
Picker("Capture", selection: $screenRecorder.applicationFilterIsInclusive) {
Text("Inclusive")
.tag(true)
.font(.title)
Text("Exclusive")
.tag(false)
.font(.title)
}
.pickerStyle(.radioGroup)
.horizontalRadioGroupLayout()
.controlSize(.large)
//.background(.clear)
//}
.frame(width: 440, height: 100)
.labelsHidden()
.background(.thickMaterial)
.cornerRadius(20.0)
GroupBox {
LazyVGrid(columns: columns) {
ForEach(screenRecorder.availableApps, id: \.self) { app in
var dumb = false
VStack {
HStack {
Toggle("butt", isOn: Binding( get: {
return screenRecorder.selectedApplications.contains(app)
}, set: { isOn in
if isOn { screenRecorder.selectedApplications.insert(app) }
else { screenRecorder.selectedApplications.remove(app) }
UserDefaults.standard.setValue(isOn, forKey: app.bundleIdentifier)
}))
.controlSize(.large)
.toggleStyle(OtherCheckboxToggleStyle())
Text(app.applicationName)
.font(.title2)
.tag(app)
.fontWeight(.regular)
.opacity(0.8)
Spacer(minLength: 1)
//Rectangle()
//.fill(.quinary)
//.frame(width: 1, height: 200)
}
.frame(height: 25)
/*Rectangle()
.fill(.quinary)
.frame(width: 1000, height: 1)
.gridCellColumns(2)*/
}
.frame(height: 25)
Rectangle()
.fill(.quinary)
.frame(width: 1000, height: 1)
//.padding(EdgeInsets(top: 0, leading: -20, bottom: 0, trailing: -20))
.gridCellColumns(2)
}
}
.padding(EdgeInsets(top: 20, leading: 0, bottom: 20, trailing: -32))
}
.padding(EdgeInsets(top: 20, leading: 0, bottom: 20, trailing: -32))
.frame(width: 440)
.labelsHidden()
.background(.thickMaterial)
.cornerRadius(20.0)
}
/*Grid {
List(screenRecorder.availableApps, selection: $screenRecorder.selectedApplications) { app in
HStack {
Toggle("butt", isOn: Binding( get: {
return screenRecorder.selectedApplications.contains(app)
}, set: { isOn in
if isOn { screenRecorder.selectedApplications.insert(app) }
else { screenRecorder.selectedApplications.remove(app) }
}))
.controlSize(.large)
Text(app.applicationName)
.font(.title2)
.frame(height: 30)
.tag(app)
}
}
}*/
.frame(width: 440)
.labelsHidden()
.background(OverlayMaterialView())
.cornerRadius(20.0)
//.padding(EdgeInsets(top: 50, leading: 0, bottom: 50, trailing: 0))
//.opacity(0.6)




case .window:
Picker("Window", selection: $screenRecorder.selectedWindow) {
EmptyView()
/*Picker("Window", selection: $screenRecorder.selectedWindow) {
ForEach(screenRecorder.availableWindows, id: \.self) { window in
Text(window.displayName)
.tag(SCWindow?.some(window))
}
}
.controlSize(.large)
.frame(width: 500)
.onHover(perform: { hovering in
Task {
await self.screenRecorder.refreshAvailableContent()
}
})
})*/
}

}
Expand All @@ -105,3 +105,33 @@ struct ApplicationProxy: Identifiable {
var isToggled = false
var application: SCRunningApplication
}

struct OtherCheckboxToggleStyle: ToggleStyle {
func makeBody(configuration: Configuration) -> some View {
HStack {
RoundedRectangle(cornerRadius: 5.0)
.stroke(lineWidth: 1)
.frame(width: 22, height: 22)
.cornerRadius(5.0)
.overlay {
if configuration.isOn {
Image(systemName: "checkmark")
}
}
.background(
Color.clear
.contentShape(RoundedRectangle(cornerRadius: 5.0))
.onTapGesture {
withAnimation(.snappy(duration: 0.1)) {
configuration.isOn.toggle()
}
}
)
}
.onTapGesture {
withAnimation(.snappy(duration: 0.1)) {
configuration.isOn.toggle()
}
}
}
}
5 changes: 4 additions & 1 deletion CaptureSample/Views/CapturePreview.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,12 @@ struct CaptureSplitViewPreview: NSViewRepresentable {

// Called by ScreenRecorder as it receives new video frames.
func updateFrame(_ frame: CapturedFrame) {
IOSurfaceLock(frame.surface!, [], nil)
contentLayer.contents = frame.surface
if let frame = frame.encodedFrame {
self.renderer.enqueue(frame)
}

IOSurfaceUnlock(frame.surface!, [], nil)
//encodedContentLayer.contents = frame.encodedSurface
}

Expand Down Expand Up @@ -93,8 +94,10 @@ struct CaptureSplitViewPreview: NSViewRepresentable {

override func viewDidEndLiveResize() {
super.viewDidEndLiveResize()
IOSurfaceLock(firstView.layer!.contents as! IOSurface, [], nil)
let scale = CGFloat(IOSurfaceGetHeight(firstView.layer!.contents as! IOSurface)) / self.frame.height
firstView.layer?.contentsScale = scale
IOSurfaceUnlock(firstView.layer!.contents as! IOSurface, [], nil)
if let surface = secondView.layer?.contents as? IOSurface {
let otherScale = CGFloat(IOSurfaceGetHeight(surface)) / self.frame.height
secondView.layer?.contentsScale = otherScale
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,13 @@ struct AudioConfigurationView: View {
var body: some View {
GroupBox {
VStack(alignment: .imageTitleAlignmentGuide) {
Toggle("Capture audio", isOn: $screenRecorder.isAudioCaptureEnabled)
.padding(EdgeInsets(top: 0, leading: 48, bottom: 0, trailing: 0))
.controlSize(.small)
HStack {
Spacer()
Toggle("Capture audio", isOn: $screenRecorder.isAudioCaptureEnabled)
.padding(EdgeInsets(top: 0, leading: 48, bottom: 0, trailing: 0))
.controlSize(.small)
Spacer()
}
}
}
.modifier(ConfigurationSubViewStyle())
Expand Down
Loading

0 comments on commit 03bf47b

Please sign in to comment.