Skip to content

Commit

Permalink
application picker, miscellaneous changes
Browse files Browse the repository at this point in the history
  • Loading branch information
jcm committed Apr 29, 2024
1 parent 03bf47b commit b9881ac
Show file tree
Hide file tree
Showing 9 changed files with 49 additions and 33 deletions.
5 changes: 4 additions & 1 deletion CaptureSample/CaptureEngine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {

var sink: RecordCameraStreamSink! = RecordCameraStreamSink()
var sinkInitialized = false
var virtualCameraIsActive = false

var framesWritten = 0

Expand Down Expand Up @@ -166,7 +167,9 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {
}
IOSurfaceLock(frame.surface!, [], nil)
self.capturedFrameHandler?(frame)
self.sink.enqueue(frame.surface!)
if self.virtualCameraIsActive {
self.sink.enqueue(frame.surface!)
}
IOSurfaceUnlock(frame.surface!, [], nil)
}
case .audio:
Expand Down
8 changes: 8 additions & 0 deletions CaptureSample/CaptureSampleApp.swift
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,14 @@ struct CaptureSampleApp: App {
}
}
}
CommandMenu("Camera Extension") {
Button("Install Camera Extension...") {
self.screenRecorder.installExtension()
}
Button("Uninstall Camera Extension...") {
self.screenRecorder.uninstallExtension()
}
}
}
Window("Test Pattern", id: "testpattern") {
TestPatternView(fps: $screenRecorder.framesPerSecond)
Expand Down
8 changes: 7 additions & 1 deletion CaptureSample/ScreenRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,11 @@ class ScreenRecorder: ObservableObject {
@Published var isRunning = false
@Published var isRecording = false
@Published var isEncoding = false
@Published var virtualCameraIsActive = true {
didSet {
self.captureEngine.streamOutput.virtualCameraIsActive = virtualCameraIsActive
}
}

@Published var captureWidth: Int = 0
@Published var captureHeight: Int = 0
Expand Down Expand Up @@ -616,7 +621,7 @@ class ScreenRecorder: ObservableObject {
streamConfig.excludesCurrentProcessAudio = isAppAudioExcluded
if #available(macOS 14.0, *) {
//streamConfig.capturesShadowsOnly = true
//streamConfig.ignoreGlobalClipDisplay = true
streamConfig.ignoreGlobalClipDisplay = true
} else {
// Fallback on earlier versions
}
Expand Down Expand Up @@ -647,6 +652,7 @@ class ScreenRecorder: ObservableObject {
// Increase the depth of the frame queue to ensure high fps at the expense of increasing
// the memory footprint of WindowServer.
streamConfig.queueDepth = 15
streamConfig.backgroundColor = CGColor.clear

return streamConfig
}
Expand Down
4 changes: 2 additions & 2 deletions CaptureSample/Views/CaptureConfigurationOverlay.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ struct CaptureConfigurationOverlay: View {
VStack {
//GroupBox {
Picker("Capture", selection: $screenRecorder.applicationFilterIsInclusive) {
Text("Inclusive")
Text("Exclude:")
.tag(true)
.font(.title)
Text("Exclusive")
Text("Include:")
.tag(false)
.font(.title)
}
Expand Down
4 changes: 3 additions & 1 deletion CaptureSample/Views/CapturePreview.swift
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,14 @@ struct CaptureSplitViewPreview: NSViewRepresentable {
init() {
//contentLayer.contentsGravity = .resizeAspect
contentLayer.contentsGravity = .topLeft
contentLayer.backgroundColor = CGColor.clear
encodedContentLayer.contentsGravity = .topRight
self.renderer = encodedContentLayer
}

func makeNSView(context: Context) -> CaptureSplitViewPreview {
//CaptureVideoPreview(layer: contentLayer)
CaptureSplitViewPreview(firstLayer: contentLayer, secondLayer: encodedContentLayer)
return CaptureSplitViewPreview(firstLayer: contentLayer, secondLayer: encodedContentLayer)
}

// Called by ScreenRecorder as it receives new video frames.
Expand Down Expand Up @@ -79,6 +80,7 @@ struct CaptureSplitViewPreview: NSViewRepresentable {
init(firstLayer: CALayer, secondLayer: CALayer) {
self.firstView = CaptureVideoPreview(layer: firstLayer)
self.secondView = CaptureVideoPreview(layer: secondLayer)
self.firstView.layer?.backgroundColor = CGColor.clear
super.init(frame: .zero)
self.isVertical = true
self.addSubview(self.firstView)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,25 +41,25 @@ struct AppControlsConfigurationView: View {
}
Spacer(minLength: 10)
HStack {
if !screenRecorder.isRecording {
if screenRecorder.usesReplayBuffer && screenRecorder.isRecording {
Button {
Task { await screenRecorder.record() }
Task { screenRecorder.saveReplayBuffer() }
} label: {
Text("Start Recording")
Text("Save Replay Buffer")
}
.controlSize(.large)
.buttonStyle(.borderedProminent)
.disabled(screenRecorder.isRecording || !screenRecorder.isRunning)
.disabled(!screenRecorder.isRecording || !screenRecorder.isRunning)
}
if screenRecorder.usesReplayBuffer && screenRecorder.isRecording {
if !screenRecorder.isRecording {
Button {
Task { screenRecorder.saveReplayBuffer() }
Task { await screenRecorder.record() }
} label: {
Text("Save Replay Buffer")
Text("Start Recording")
}
.controlSize(.large)
.buttonStyle(.borderedProminent)
.disabled(!screenRecorder.isRecording || !screenRecorder.isRunning)
.disabled(screenRecorder.isRecording || !screenRecorder.isRunning)
}
if screenRecorder.isRecording {
Button {
Expand All @@ -74,30 +74,19 @@ struct AppControlsConfigurationView: View {
}
.frame(maxWidth: .infinity)
.padding(EdgeInsets(top: 0, leading: 0, bottom: 15, trailing: 0))
HStack {
if !screenRecorder.virtualCameraIsActive {
Button {
Task { await screenRecorder.uninstallExtension() }
Task { screenRecorder.virtualCameraIsActive = true }
} label: {
Text("Remove Extension")
Text("Start Virtual Camera")
}
.controlSize(.large)
.buttonStyle(.borderedProminent)
Button {
Task { screenRecorder.installExtension() }
} label: {
Text("Install Extension")
}
.controlSize(.large)
.buttonStyle(.borderedProminent)
}
if screenRecorder.virtualCameraIsActive {
Button {
Task { screenRecorder.testSetProperty() }
Task { screenRecorder.virtualCameraIsActive = false }
} label: {
Text("Test Set Property")
Text("Stop Virtual Camera")
}
.controlSize(.large)
.buttonStyle(.borderedProminent)
}
.frame(maxWidth: .infinity)
.padding(EdgeInsets(top: 0, leading: 0, bottom: 15, trailing: 0))
}
}
1 change: 1 addition & 0 deletions CaptureSample/Views/ContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ struct ContentView: View {
overlayOpacity = hover == true ? 1.0 : 0.0
}
}
.background(.white)
}
}
.navigationTitle("Record")
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ Record is in active development and uses rolling releases. Download the most rec
<video src="https://github.com/jcm93/Record/assets/6864788/53d590c5-e4da-45e7-adf2-8b54d585175b" controls="controls" style="max-width: 730px;"></video>

# Roadmap
* fixup virtual camera, implement install flow for CMIO extension
* menu bar mode / run in background
* better test pattern
* stable builds / unbreak CI
* audio settings / audio metering / audio only
* advanced capture options (include some apps, not others)
* compositing?
* streaming?

7 changes: 6 additions & 1 deletion RecordCameraExtension/RecordCameraExtensionProvider.swift
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,12 @@ class RecordCameraExtensionDeviceSource: NSObject, CMIOExtensionDeviceSource {
self.lastTimingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock())
let output: CMIOExtensionScheduledOutput = CMIOExtensionScheduledOutput(sequenceNumber: seq, hostTimeInNanoseconds: UInt64(self.lastTimingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC)))
if self._streamingCounter > 0 {
self._streamSource.stream.send(sbuf!, discontinuity: [], hostTimeInNanoseconds: UInt64(sbuf!.presentationTimeStamp.seconds * Double(NSEC_PER_SEC)))
var newSbuf: CMSampleBuffer?
var timingInfo = CMSampleTimingInfo()
timingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock())
var err: OSStatus = 0
err = CMSampleBufferCreateCopyWithNewTiming(allocator: kCFAllocatorDefault, sampleBuffer: sbuf!, sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleBufferOut: &newSbuf)
self._streamSource.stream.send(newSbuf!, discontinuity: [], hostTimeInNanoseconds: UInt64(newSbuf!.presentationTimeStamp.seconds * Double(NSEC_PER_SEC)))
}
self._streamSink.stream.notifyScheduledOutputChanged(output)
if let surface = CVPixelBufferGetIOSurface(sbuf?.imageBuffer)?.takeUnretainedValue() {
Expand Down

0 comments on commit b9881ac

Please sign in to comment.