From c93fe5bd045abac5d49c1c033d1b4fef2dddb827 Mon Sep 17 00:00:00 2001 From: jcm Date: Thu, 25 Jan 2024 12:51:23 -0600 Subject: [PATCH] overlay app picker --- CaptureSample/CaptureEngine.swift | 2 + CaptureSample/RecordCameraStreamSink.swift | 52 ++- CaptureSample/ScreenRecorder.swift | 10 +- .../Views/CaptureConfigurationOverlay.swift | 107 ++++++ .../AudioConfigurationView.swift | 10 +- .../ConfigurationSubViewModifier.swift | 6 +- .../EncoderConfigurationView.swift | 204 ++++++------ .../OutputConfigurationView.swift | 26 +- .../VideoCaptureConfigurationView.swift | 311 +++++++++--------- CaptureSample/Views/ContentView.swift | 16 + CaptureSample/Views/MaterialView.swift | 13 + Record.xcodeproj/project.pbxproj | 4 + .../RecordCameraExtensionProvider.swift | 30 +- 13 files changed, 504 insertions(+), 287 deletions(-) create mode 100644 CaptureSample/Views/CaptureConfigurationOverlay.swift diff --git a/CaptureSample/CaptureEngine.swift b/CaptureSample/CaptureEngine.swift index 27e65cf..4e224bb 100644 --- a/CaptureSample/CaptureEngine.swift +++ b/CaptureSample/CaptureEngine.swift @@ -157,8 +157,10 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate { switch outputType { case .screen: if let frame = self.createFrame(for: sampleBuffer) { + IOSurfaceLock(frame.surface!, [], nil) self.capturedFrameHandler?(frame) self.sink.enqueue(frame.surface!) + IOSurfaceUnlock(frame.surface!, [], nil) } case .audio: if let copy = self.createAudioFrame(for: sampleBuffer) { diff --git a/CaptureSample/RecordCameraStreamSink.swift b/CaptureSample/RecordCameraStreamSink.swift index 79c2946..adbd6bf 100644 --- a/CaptureSample/RecordCameraStreamSink.swift +++ b/CaptureSample/RecordCameraStreamSink.swift @@ -29,8 +29,8 @@ class RecordCameraStreamSink: NSObject { private var timer: Timer? private var propTimer: Timer? - func getJustProperty(streamId: CMIOStreamID) -> String? { - let selector = "just".convertedToCMIOObjectPropertySelectorName() + func getTestProperty(streamId: CMIOStreamID) -> String? { + let selector = FourCharCode("just") var address = CMIOObjectPropertyAddress(selector, .global, .main) let exists = CMIOObjectHasProperty(streamId, &address) if exists { @@ -45,8 +45,8 @@ class RecordCameraStreamSink: NSObject { } } - func setJustProperty(streamId: CMIOStreamID, newValue: String) { - let selector = "just".convertedToCMIOObjectPropertySelectorName() + func setTestProperty(streamId: CMIOStreamID, newValue: String) { + let selector = FourCharCode("just") var address = CMIOObjectPropertyAddress(selector, .global, .main) let exists = CMIOObjectHasProperty(streamId, &address) if exists { @@ -59,6 +59,7 @@ class RecordCameraStreamSink: NSObject { CMIOObjectGetPropertyDataSize(streamId, &address, 0, nil, &dataSize) var newName: CFString = newValue as NSString CMIOObjectSetPropertyData(streamId, &address, 0, nil, dataSize, &newName) + print("setting test property") } } @@ -179,6 +180,7 @@ class RecordCameraStreamSink: NSObject { if let sbuf = sbuf { let pointerRef = UnsafeMutableRawPointer(Unmanaged.passRetained(sbuf).toOpaque()) CMSimpleQueueEnqueue(self.sinkQueue!, element: pointerRef) + self.setTestProperty(streamId: self.sourceStream!, newValue: "a") } } } else { @@ -188,6 +190,48 @@ class RecordCameraStreamSink: NSObject { } +extension FourCharCode: ExpressibleByStringLiteral { + + public init(stringLiteral value: StringLiteralType) { + var code: FourCharCode = 0 + // Value has to consist of 4 printable ASCII characters, e.g. '420v'. + // Note: This implementation does not enforce printable range (32-126) + if value.count == 4 && value.utf8.count == 4 { + for byte in value.utf8 { + code = code << 8 + FourCharCode(byte) + } + } + else { + print("FourCharCode: Can't initialize with '\(value)', only printable ASCII allowed. Setting to '????'.") + code = 0x3F3F3F3F // = '????' + } + self = code + } + + public init(extendedGraphemeClusterLiteral value: String) { + self = FourCharCode(stringLiteral: value) + } + + public init(unicodeScalarLiteral value: String) { + self = FourCharCode(stringLiteral: value) + } + + public init(_ value: String) { + self = FourCharCode(stringLiteral: value) + } + + public var string: String? { + let cString: [CChar] = [ + CChar(self >> 24 & 0xFF), + CChar(self >> 16 & 0xFF), + CChar(self >> 8 & 0xFF), + CChar(self & 0xFF), + 0 + ] + return String(cString: cString) + } +} + extension String { func convertedToCMIOObjectPropertySelectorName() -> CMIOObjectPropertySelector { let noName: CMIOObjectPropertySelector = 0 diff --git a/CaptureSample/ScreenRecorder.swift b/CaptureSample/ScreenRecorder.swift index 2caa181..cded3a8 100644 --- a/CaptureSample/ScreenRecorder.swift +++ b/CaptureSample/ScreenRecorder.swift @@ -11,7 +11,6 @@ import Combine import OSLog import SwiftUI import AVFoundation -import com_jcm_record_RecordVirtualCam import SystemExtensions /// A provider of audio levels from the captured samples. @@ -199,7 +198,7 @@ class ScreenRecorder: ObservableObject { didSet { updateEngine() } } - @Published var selectedApplications = Set() { + @Published var selectedApplications = Set() { willSet { print("setting selected applications \(newValue)") } @@ -209,6 +208,8 @@ class ScreenRecorder: ObservableObject { } } + @State var bindingBool = [String : Binding]() + @AppStorage("excludeSelf") var isAppExcluded = true { didSet { updateEngine() } } @@ -574,7 +575,7 @@ class ScreenRecorder: ObservableObject { } func testSetProperty() { - print("poop") + print("a") } /// - Tag: UpdateFilter @@ -587,7 +588,7 @@ class ScreenRecorder: ObservableObject { // If a user chooses to exclude the app from the stream, // exclude it by matching its bundle identifier. excludedApps = availableApps.filter { app in - self.selectedApplications.contains(app.id) + !self.selectedApplications.contains(app) } // Create a content filter with excluded apps. filter = SCContentFilter(display: display, @@ -674,6 +675,7 @@ class ScreenRecorder: ObservableObject { availableWindows = windows } availableApps = availableContent.applications + selectedApplications = Set(availableApps.filter({UserDefaults.standard.bool(forKey: $0.bundleIdentifier) == true})) if selectedDisplay == nil { selectedDisplay = availableDisplays.first diff --git a/CaptureSample/Views/CaptureConfigurationOverlay.swift b/CaptureSample/Views/CaptureConfigurationOverlay.swift new file mode 100644 index 0000000..c102f8d --- /dev/null +++ b/CaptureSample/Views/CaptureConfigurationOverlay.swift @@ -0,0 +1,107 @@ +// +// CaptureConfigurationOverlay.swift +// Record +// +// Created by John Moody on 1/21/24. +// Copyright © 2024 jcm. All rights reserved. +// + +import SwiftUI +import ScreenCaptureKit + +struct CaptureConfigurationOverlay: View { + @ObservedObject var screenRecorder: ScreenRecorder + + var availableApps = [SCRunningApplication]() + + var columns = [GridItem(.flexible(minimum: 100, maximum: 200)), GridItem(.flexible(minimum: 100, maximum: 200))] + + var body: some View { + + switch screenRecorder.captureType { + case .display: + GroupBox { + LazyVGrid(columns: columns) { + ForEach(screenRecorder.availableApps, id: \.self) { app in + VStack { + HStack { + Toggle("butt", isOn: Binding( get: { + return screenRecorder.selectedApplications.contains(app) + }, set: { isOn in + if isOn { screenRecorder.selectedApplications.insert(app) } + else { screenRecorder.selectedApplications.remove(app) } + UserDefaults.standard.setValue(isOn, forKey: app.bundleIdentifier) + })) + .controlSize(.large) + Text(app.applicationName) + .font(.title2) + .tag(app) + .fontWeight(.regular) + .opacity(0.8) + Spacer(minLength: 1) + Rectangle() + .fill(.quinary) + //.padding(EdgeInsets(top: -20, leading: 0, bottom: -20, trailing: 0)) + .frame(width: 1, height: 200) + } + .frame(height: 25) + Rectangle() + .fill(.quinary) + .frame(width: 1000, height: 1) + //.padding(EdgeInsets(top: 0, leading: -20, bottom: 0, trailing: -20)) + .gridCellColumns(2) + } + } + } + .padding(EdgeInsets(top: 20, leading: 0, bottom: 20, trailing: -32)) + } + /*Grid { + List(screenRecorder.availableApps, selection: $screenRecorder.selectedApplications) { app in + HStack { + Toggle("butt", isOn: Binding( get: { + return screenRecorder.selectedApplications.contains(app) + }, set: { isOn in + if isOn { screenRecorder.selectedApplications.insert(app) } + else { screenRecorder.selectedApplications.remove(app) } + })) + .controlSize(.large) + Text(app.applicationName) + .font(.title2) + .frame(height: 30) + .tag(app) + } + } + }*/ + .frame(width: 440) + .labelsHidden() + .background(OverlayMaterialView()) + .cornerRadius(20.0) + //.padding(EdgeInsets(top: 50, leading: 0, bottom: 50, trailing: 0)) + //.opacity(0.6) + + + + + case .window: + Picker("Window", selection: $screenRecorder.selectedWindow) { + ForEach(screenRecorder.availableWindows, id: \.self) { window in + Text(window.displayName) + .tag(SCWindow?.some(window)) + } + } + .onHover(perform: { hovering in + Task { + await self.screenRecorder.refreshAvailableContent() + } + }) + } + + } +} + +struct ApplicationProxy: Identifiable { + var id: ObjectIdentifier + + var isToggled = false + var application: SCRunningApplication +} diff --git a/CaptureSample/Views/Configuration View/AudioConfigurationView.swift b/CaptureSample/Views/Configuration View/AudioConfigurationView.swift index bea55dc..ff45c40 100644 --- a/CaptureSample/Views/Configuration View/AudioConfigurationView.swift +++ b/CaptureSample/Views/Configuration View/AudioConfigurationView.swift @@ -11,10 +11,12 @@ import SwiftUI struct AudioConfigurationView: View { @ObservedObject var screenRecorder: ScreenRecorder var body: some View { - VStack(alignment: .trailing) { - Toggle("Capture audio", isOn: $screenRecorder.isAudioCaptureEnabled) - .padding(EdgeInsets(top: 0, leading: 48, bottom: 0, trailing: 0)) - .controlSize(.small) + GroupBox { + VStack(alignment: .imageTitleAlignmentGuide) { + Toggle("Capture audio", isOn: $screenRecorder.isAudioCaptureEnabled) + .padding(EdgeInsets(top: 0, leading: 48, bottom: 0, trailing: 0)) + .controlSize(.small) + } } .modifier(ConfigurationSubViewStyle()) } diff --git a/CaptureSample/Views/Configuration View/ConfigurationSubViewModifier.swift b/CaptureSample/Views/Configuration View/ConfigurationSubViewModifier.swift index 47e0cd1..7bd49ca 100644 --- a/CaptureSample/Views/Configuration View/ConfigurationSubViewModifier.swift +++ b/CaptureSample/Views/Configuration View/ConfigurationSubViewModifier.swift @@ -15,10 +15,10 @@ struct ConfigurationSubViewStyle: ViewModifier { .frame(width: 260) .padding(EdgeInsets(top: 13, leading: 15, bottom: 13, trailing: 15)) .controlSize(.small) - .background(.quinary, in: RoundedRectangle(cornerRadius: 5)) - .overlay( + //.background(.quinary, in: RoundedRectangle(cornerRadius: 5)) + /*.overlay( RoundedRectangle(cornerRadius: 5) .stroke(Color(.quinaryLabel), lineWidth: 1) - ) + )*/ } } diff --git a/CaptureSample/Views/Configuration View/Encoder Configuration View/EncoderConfigurationView.swift b/CaptureSample/Views/Configuration View/Encoder Configuration View/EncoderConfigurationView.swift index 5112332..9dc0d68 100644 --- a/CaptureSample/Views/Configuration View/Encoder Configuration View/EncoderConfigurationView.swift +++ b/CaptureSample/Views/Configuration View/Encoder Configuration View/EncoderConfigurationView.swift @@ -14,61 +14,38 @@ struct EncoderConfigurationView: View { @State var currentTab: Int = 0 var body: some View { - VStack(alignment: .imageTitleAlignmentGuide) { - Group { - HStack { - Text("Codec:") - Picker("Codec", selection: $screenRecorder.encoderSetting) { - ForEach(EncoderSetting.allCases, id: \.self) { format in - Text(format.stringValue()) - .tag(format) + GroupBox { + VStack(alignment: .imageTitleAlignmentGuide) { + Group { + HStack { + Text("Codec:") + Picker("Codec", selection: $screenRecorder.encoderSetting) { + ForEach(EncoderSetting.allCases, id: \.self) { format in + Text(format.stringValue()) + .tag(format) + } + } + //.pickerStyle(.radioGroup) + .frame(width: 150) + .horizontalRadioGroupLayout() + .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] } } - //.pickerStyle(.radioGroup) - .frame(width: 150) - .horizontalRadioGroupLayout() - .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } + } + .controlSize(.small) + .labelsHidden() - } - .controlSize(.small) - .labelsHidden() - - Group { - HStack { - Text("Container:") - Picker("Container", selection: $screenRecorder.containerSetting) { - Text(".mp4") - .tag(ContainerSetting.mp4) - Text(".mov") - .tag(ContainerSetting.mov) - } - .frame(width: 150) - //.pickerStyle(.radioGroup) - .horizontalRadioGroupLayout() - .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - } - } - .controlSize(.small) - .labelsHidden() - if (self.screenRecorder.encoderSetting == .H264 || self.screenRecorder.encoderSetting == .H265) { Group { HStack { - Spacer(minLength: 20) - Text("Rate Control:") - Picker("Rate Control", selection: $screenRecorder.rateControlSetting) { - Text("CBR") - .tag(RateControlSetting.cbr) - Text("ABR") - .tag(RateControlSetting.abr) - Text("CRF") - .tag(RateControlSetting.crf) + Text("Container:") + Picker("Container", selection: $screenRecorder.containerSetting) { + Text(".mp4") + .tag(ContainerSetting.mp4) + Text(".mov") + .tag(ContainerSetting.mov) } .frame(width: 150) //.pickerStyle(.radioGroup) @@ -81,79 +58,104 @@ struct EncoderConfigurationView: View { } .controlSize(.small) .labelsHidden() - - - if (screenRecorder.rateControlSetting != .crf) { + if (self.screenRecorder.encoderSetting == .H264 || self.screenRecorder.encoderSetting == .H265) { Group { HStack { - Text("Bitrate:") + Spacer(minLength: 20) + Text("Rate Control:") + Picker("Rate Control", selection: $screenRecorder.rateControlSetting) { + Text("CBR") + .tag(RateControlSetting.cbr) + Text("ABR") + .tag(RateControlSetting.abr) + Text("CRF") + .tag(RateControlSetting.crf) + } + .frame(width: 150) + //.pickerStyle(.radioGroup) + .horizontalRadioGroupLayout() + .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + } + } + .controlSize(.small) + .labelsHidden() + + + if (screenRecorder.rateControlSetting != .crf) { + Group { + HStack { + Text("Bitrate:") + HStack { + TextField("", value: $screenRecorder.bitRate, format: .number) + .frame(width: 100) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + Text("kbps") + .frame(width: 40) + } + } + } + .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) + .controlSize(.small) + .labelsHidden() + } else { + Group { + //Text("Quality") + HStack { + Text("Quality:") + Slider( + value: $screenRecorder.crfValue, + in: 0.0...1.00 + ) { + Text("Values from 0 to 1.00") + } + .frame(width: 150)/*minimumValueLabel: { + Text("Poor") + } maximumValueLabel: { + Text("'Lossless'") + }*/ + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + } HStack { - TextField("", value: $screenRecorder.bitRate, format: .number) - .frame(width: 100) + Text("CRF:") + TextField("CRF", value: $screenRecorder.crfValue, format: .number) + .frame(width: 70) .alignmentGuide(.imageTitleAlignmentGuide) { dimension in dimension[.leading] } - Text("kbps") - .frame(width: 40) } + .frame(maxWidth: .infinity, alignment: .center) } + .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) + .controlSize(.small) + .labelsHidden() } - .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) - .controlSize(.small) - .labelsHidden() } else { Group { - //Text("Quality") HStack { - Text("Quality:") - Slider( - value: $screenRecorder.crfValue, - in: 0.0...1.00 - ) { - Text("Values from 0 to 1.00") + Text("ProRes Setting:") + Picker("ProRes Setting", selection: $screenRecorder.proResSetting) { + ForEach(ProResSetting.allCases, id: \.self) { format in + Text(format.stringValue()) + .tag(format) + } } - .frame(width: 150)/*minimumValueLabel: { - Text("Poor") - } maximumValueLabel: { - Text("'Lossless'") - }*/ + .frame(width: 150) .alignmentGuide(.imageTitleAlignmentGuide) { dimension in dimension[.leading] } } - HStack { - Text("CRF:") - TextField("CRF", value: $screenRecorder.crfValue, format: .number) - .frame(width: 70) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - } - .frame(maxWidth: .infinity, alignment: .center) + .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) } - .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) .controlSize(.small) .labelsHidden() } - } else { - Group { - HStack { - Text("ProRes Setting:") - Picker("ProRes Setting", selection: $screenRecorder.proResSetting) { - ForEach(ProResSetting.allCases, id: \.self) { format in - Text(format.stringValue()) - .tag(format) - } - } - .frame(width: 150) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - } - .padding(EdgeInsets(top: 0, leading: 0, bottom: 4, trailing: 0)) - } - .controlSize(.small) - .labelsHidden() } } .modifier(ConfigurationSubViewStyle()) diff --git a/CaptureSample/Views/Configuration View/OutputConfigurationView.swift b/CaptureSample/Views/Configuration View/OutputConfigurationView.swift index 4789cec..9a2e390 100644 --- a/CaptureSample/Views/Configuration View/OutputConfigurationView.swift +++ b/CaptureSample/Views/Configuration View/OutputConfigurationView.swift @@ -19,20 +19,22 @@ struct OutputConfigurationView: View { var logger = Logger.application var body: some View { - VStack(alignment: .imageTitleAlignmentGuide) { - HStack { - Text("Output folder:") - .padding(EdgeInsets(top: 0, leading: 20, bottom: 0, trailing: 0)) + GroupBox { + VStack(alignment: .imageTitleAlignmentGuide) { HStack { - TextField("Path", text: $screenRecorder.filePath) - .disabled(true) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] + Text("Output folder:") + .padding(EdgeInsets(top: 0, leading: 20, bottom: 0, trailing: 0)) + HStack { + TextField("Path", text: $screenRecorder.filePath) + .disabled(true) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + Button { + Task { await self.selectFolder() } + } label: { + Image(systemName: "folder") } - Button { - Task { await self.selectFolder() } - } label: { - Image(systemName: "folder") } } } diff --git a/CaptureSample/Views/Configuration View/VideoCaptureConfigurationView.swift b/CaptureSample/Views/Configuration View/VideoCaptureConfigurationView.swift index 8ae80be..de0ee53 100644 --- a/CaptureSample/Views/Configuration View/VideoCaptureConfigurationView.swift +++ b/CaptureSample/Views/Configuration View/VideoCaptureConfigurationView.swift @@ -12,32 +12,51 @@ import ScreenCaptureKit struct VideoCaptureConfigurationView: View { @ObservedObject var screenRecorder: ScreenRecorder var body: some View { - VStack(alignment: .imageTitleAlignmentGuide) { - Group { - HStack { - Text("Capture Type:") - Picker("Capture", selection: $screenRecorder.captureType) { - Text("Display") - .tag(CaptureType.display) - Text("Window") - .tag(CaptureType.window) + GroupBox { + VStack(alignment: .imageTitleAlignmentGuide) { + Group { + HStack { + Text("Capture Type:") + Picker("Capture", selection: $screenRecorder.captureType) { + Text("Display") + .tag(CaptureType.display) + Text("Window") + .tag(CaptureType.window) + } + .pickerStyle(.radioGroup) + .horizontalRadioGroupLayout() + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } //.padding([.trailing]) } - .pickerStyle(.radioGroup) - .horizontalRadioGroupLayout() - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } //.padding([.trailing]) - } - .labelsHidden() - HStack { - Text("Screen Content:") - switch screenRecorder.captureType { - case .display: - VStack { - Picker("Display", selection: $screenRecorder.selectedDisplay) { - ForEach(screenRecorder.availableDisplays, id: \.self) { display in - Text(display.displayName) - .tag(SCDisplay?.some(display)) + .labelsHidden() + HStack { + Text("Screen Content:") + switch screenRecorder.captureType { + case .display: + VStack { + Picker("Display", selection: $screenRecorder.selectedDisplay) { + ForEach(screenRecorder.availableDisplays, id: \.self) { display in + Text(display.displayName) + .tag(SCDisplay?.some(display)) + } + } + .onHover(perform: { hovering in + Task { + await self.screenRecorder.refreshAvailableContent() + } + }) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + .frame(width: 150) + } + + case .window: + Picker("Window", selection: $screenRecorder.selectedWindow) { + ForEach(screenRecorder.availableWindows, id: \.self) { window in + Text(window.displayName) + .tag(SCWindow?.some(window)) } } .onHover(perform: { hovering in @@ -49,51 +68,14 @@ struct VideoCaptureConfigurationView: View { dimension[.leading] } .frame(width: 150) - - List(screenRecorder.availableApps, selection: $screenRecorder.selectedApplications) { - Text($0.applicationName) - } - .frame(height: 100) - } - - case .window: - Picker("Window", selection: $screenRecorder.selectedWindow) { - ForEach(screenRecorder.availableWindows, id: \.self) { window in - Text(window.displayName) - .tag(SCWindow?.some(window)) - } } - .onHover(perform: { hovering in - Task { - await self.screenRecorder.refreshAvailableContent() - } - }) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - .frame(width: 150) } - } - .labelsHidden() - Group { - HStack { - Text("Pixel Format:") - Picker("Pixel Format", selection: $screenRecorder.capturePixelFormat) { - ForEach(CapturePixelFormat.allCases, id: \.self) { format in - Text(format.stringValue()) - .tag(format) - } - } - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - .frame(width: 150) - } - if (self.screenRecorder.capturePixelFormat == .biplanarfull420f || self.screenRecorder.capturePixelFormat == .biplanarpartial420v) { + .labelsHidden() + Group { HStack { - Text("Transfer Function:") - Picker("Transfer Function", selection: $screenRecorder.captureYUVMatrix) { - ForEach(CaptureYUVMatrix.allCases, id: \.self) { format in + Text("Pixel Format:") + Picker("Pixel Format", selection: $screenRecorder.capturePixelFormat) { + ForEach(CapturePixelFormat.allCases, id: \.self) { format in Text(format.stringValue()) .tag(format) } @@ -103,116 +85,131 @@ struct VideoCaptureConfigurationView: View { } .frame(width: 150) } - } - HStack { - Text("Color Space:") - Picker("Color Space", selection: $screenRecorder.captureColorSpace) { - ForEach(CaptureColorSpace.allCases, id: \.self) { format in - Text(String(format.cfString())) - .tag(format) + if (self.screenRecorder.capturePixelFormat == .biplanarfull420f || self.screenRecorder.capturePixelFormat == .biplanarpartial420v) { + HStack { + Text("Transfer Function:") + Picker("Transfer Function", selection: $screenRecorder.captureYUVMatrix) { + ForEach(CaptureYUVMatrix.allCases, id: \.self) { format in + Text(format.stringValue()) + .tag(format) + } + } + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + .frame(width: 150) } } - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - .frame(width: 150) - } - } - .labelsHidden() - .controlSize(.small) - HStack { - Text("Dimensions:") - HStack { - TextField("Width", value: $screenRecorder.captureWidth, formatter: NumberFormatter()) - .disabled(true) + HStack { + Text("Color Space:") + Picker("Color Space", selection: $screenRecorder.captureColorSpace) { + ForEach(CaptureColorSpace.allCases, id: \.self) { format in + Text(String(format.cfString())) + .tag(format) + } + } .alignmentGuide(.imageTitleAlignmentGuide) { dimension in dimension[.leading] } - .fixedSize() - //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) - } - HStack { - TextField("Height", value: $screenRecorder.captureHeight, formatter: NumberFormatter()) - .disabled(true) - .fixedSize() - //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) + .frame(width: 150) + } } - } - .controlSize(.small) - .labelsHidden() - Group { + .labelsHidden() + .controlSize(.small) HStack { - Text("Scaled Dimensions:") + Text("Dimensions:") HStack { - TextField("Width", value: $screenRecorder.scaleWidth, formatter: NumberFormatter(), onEditingChanged: { value in - if !value { - self.screenRecorder.dimensionsChanged(width: screenRecorder.scaleWidth, height: 0) + TextField("Width", value: $screenRecorder.captureWidth, formatter: NumberFormatter()) + .disabled(true) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] } - }) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - .fixedSize() + .fixedSize() //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) - .disabled(!screenRecorder.doesScale) } HStack { - TextField("Height", value: $screenRecorder.scaleHeight, formatter: NumberFormatter(), onEditingChanged: { value in - if !value { - self.screenRecorder.dimensionsChanged(width: 0, height: screenRecorder.scaleHeight) - } - }) - .fixedSize() + TextField("Height", value: $screenRecorder.captureHeight, formatter: NumberFormatter()) + .disabled(true) + .fixedSize() //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) - .disabled(!screenRecorder.doesScale) } } - } - .controlSize(.small) - .labelsHidden() - Group { - //Text("Quality") - HStack { - Text("Target frame rate:") - TextField("Value", value: $screenRecorder.framesPerSecond, format: .number) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] + .controlSize(.small) + .labelsHidden() + Group { + HStack { + Text("Scaled Dimensions:") + HStack { + TextField("Width", value: $screenRecorder.scaleWidth, formatter: NumberFormatter(), onEditingChanged: { value in + if !value { + self.screenRecorder.dimensionsChanged(width: screenRecorder.scaleWidth, height: 0) + } + }) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + .fixedSize() + //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) + .disabled(!screenRecorder.doesScale) } - } - } - .controlSize(.small) - .labelsHidden() - .disabled(!screenRecorder.usesTargetFPS) - .help("Establishes a target frame rate for ScreenCaptureKit. Even with a target frame rate, frame times and rates are variable with the screen content refresh interval. Encoded FPS may be lower if the screen content contains many idle (duplicated) frames.") - - Toggle("Use target frame rate", isOn: $screenRecorder.usesTargetFPS) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - .help("If not targeting a frame rate, the system will make as many frames available as it can, up to the maximum supported frame rate.") - - Toggle("Scale Output", isOn: $screenRecorder.doesScale) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - - Toggle("Exclude self from stream", isOn: $screenRecorder.isAppExcluded) - .disabled(screenRecorder.captureType == .window) - .onChange(of: screenRecorder.isAppExcluded) { _ in - // Capturing app audio is only possible when the sample is included in the stream. - // Ensure the audio stops playing if the user enables the "Exclude app from stream" checkbox. + HStack { + TextField("Height", value: $screenRecorder.scaleHeight, formatter: NumberFormatter(), onEditingChanged: { value in + if !value { + self.screenRecorder.dimensionsChanged(width: 0, height: screenRecorder.scaleHeight) + } + }) + .fixedSize() + //.background(Color(red: 0.086, green: 0.086, blue: 0.086)) + .disabled(!screenRecorder.doesScale) + } + } } .controlSize(.small) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] - } - Toggle("Live encode preview", isOn: $screenRecorder.showsEncodePreview) - .alignmentGuide(.imageTitleAlignmentGuide) { dimension in - dimension[.leading] + .labelsHidden() + Group { + //Text("Quality") + HStack { + Text("Target frame rate:") + TextField("Value", value: $screenRecorder.framesPerSecond, format: .number) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + } } + .controlSize(.small) + .labelsHidden() + .disabled(!screenRecorder.usesTargetFPS) + .help("Establishes a target frame rate for ScreenCaptureKit. Even with a target frame rate, frame times and rates are variable with the screen content refresh interval. Encoded FPS may be lower if the screen content contains many idle (duplicated) frames.") + + Toggle("Use target frame rate", isOn: $screenRecorder.usesTargetFPS) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + .help("If not targeting a frame rate, the system will make as many frames available as it can, up to the maximum supported frame rate.") + + Toggle("Scale Output", isOn: $screenRecorder.doesScale) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + + Toggle("Exclude self from stream", isOn: $screenRecorder.isAppExcluded) + .disabled(screenRecorder.captureType == .window) + .onChange(of: screenRecorder.isAppExcluded) { _ in + // Capturing app audio is only possible when the sample is included in the stream. + // Ensure the audio stops playing if the user enables the "Exclude app from stream" checkbox. + } + .controlSize(.small) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + Toggle("Live encode preview", isOn: $screenRecorder.showsEncodePreview) + .alignmentGuide(.imageTitleAlignmentGuide) { dimension in + dimension[.leading] + } + } + .padding(EdgeInsets(top: 4, leading: -2, bottom: 0, trailing: -2)) } - .padding(EdgeInsets(top: 4, leading: -2, bottom: 0, trailing: -2)) + .modifier(ConfigurationSubViewStyle()) } - .modifier(ConfigurationSubViewStyle()) } } diff --git a/CaptureSample/Views/ContentView.swift b/CaptureSample/Views/ContentView.swift index c26f320..af0d1ad 100644 --- a/CaptureSample/Views/ContentView.swift +++ b/CaptureSample/Views/ContentView.swift @@ -15,6 +15,8 @@ struct ContentView: View { @State var userStopped = false @State var disableInput = false @State var isUnauthorized = false + @State var mouseCursorHovering = false + @State var overlayOpacity = 1.0 @EnvironmentObject var screenRecorder: ScreenRecorder @@ -87,6 +89,20 @@ struct ContentView: View { } } } + .overlay(alignment: .leading) { + VStack() { + CaptureConfigurationOverlay(screenRecorder: self.screenRecorder) + Spacer() + } + .padding(EdgeInsets(top: 75, leading: 75, bottom: 0, trailing: 0)) + .opacity(overlayOpacity) + } + .onHover { hover in + mouseCursorHovering = hover + withAnimation(.easeInOut(duration: 0.25)) { + overlayOpacity = hover == true ? 1.0 : 0.0 + } + } } } .navigationTitle("Record") diff --git a/CaptureSample/Views/MaterialView.swift b/CaptureSample/Views/MaterialView.swift index 7b80bb1..e5e2b34 100644 --- a/CaptureSample/Views/MaterialView.swift +++ b/CaptureSample/Views/MaterialView.swift @@ -30,3 +30,16 @@ struct SheetMaterialView: NSViewRepresentable { func updateNSView(_ nsView: NSVisualEffectView, context: Context) {} } + +struct OverlayMaterialView: NSViewRepresentable { + @Environment(\.colorScheme) var colorScheme + + func makeNSView(context: Context) -> NSVisualEffectView { + let view = NSVisualEffectView() + view.material = .fullScreenUI + view.blendingMode = .withinWindow + return view + } + + func updateNSView(_ nsView: NSVisualEffectView, context: Context) {} +} diff --git a/Record.xcodeproj/project.pbxproj b/Record.xcodeproj/project.pbxproj index 3fc9cee..d8167d6 100644 --- a/Record.xcodeproj/project.pbxproj +++ b/Record.xcodeproj/project.pbxproj @@ -35,6 +35,7 @@ 0D6933082B34B1850019368E /* com.jcm.Record.RecordCameraExtension.systemextension in Embed System Extensions */ = {isa = PBXBuildFile; fileRef = 0D6932FE2B34B1850019368E /* com.jcm.Record.RecordCameraExtension.systemextension */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; 0D6933B02B35322B0019368E /* RecordCameraExtensionSink.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D6933AF2B35322B0019368E /* RecordCameraExtensionSink.swift */; }; 0D6933B22B3688FD0019368E /* RecordCameraStreamSink.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D6933B12B3688FD0019368E /* RecordCameraStreamSink.swift */; }; + 0D85BC012B5E30B400EC424B /* CaptureConfigurationOverlay.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0D85BC002B5E30B400EC424B /* CaptureConfigurationOverlay.swift */; }; 0DE5C82F2A95EDC60054AC23 /* PickerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0DE5C82E2A95EDC60054AC23 /* PickerView.swift */; }; C470F0812811C5CB00D29309 /* ScreenRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = C470F0802811C5CB00D29309 /* ScreenRecorder.swift */; }; C471DFFB2809F440001D24C9 /* PowerMeter.swift in Sources */ = {isa = PBXBuildFile; fileRef = C471DFF92809F440001D24C9 /* PowerMeter.swift */; }; @@ -105,6 +106,7 @@ 0D6933052B34B1850019368E /* RecordCameraExtension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = RecordCameraExtension.entitlements; sourceTree = ""; }; 0D6933AF2B35322B0019368E /* RecordCameraExtensionSink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordCameraExtensionSink.swift; sourceTree = ""; }; 0D6933B12B3688FD0019368E /* RecordCameraStreamSink.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordCameraStreamSink.swift; sourceTree = ""; }; + 0D85BC002B5E30B400EC424B /* CaptureConfigurationOverlay.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureConfigurationOverlay.swift; sourceTree = ""; }; 0DE5C82E2A95EDC60054AC23 /* PickerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PickerView.swift; sourceTree = ""; }; 0DF11FDD2A6ECBA500B45306 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.markdown; }; 7C6C99F1D4B6E3EBA3A7B7DF /* LICENSE.txt */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = LICENSE.txt; sourceTree = ""; }; @@ -203,6 +205,7 @@ 0D152E9E2AB9FF2700FEB9CE /* HotkeysPreferencesView.swift */, 0DE5C82E2A95EDC60054AC23 /* PickerView.swift */, C4B0DABB276BA4B50015082A /* ContentView.swift */, + 0D85BC002B5E30B400EC424B /* CaptureConfigurationOverlay.swift */, C4B0DABC276BA4B50015082A /* CapturePreview.swift */, 0D164AA32A97EACB003F2F7E /* Configuration View */, C471DFFD280A0968001D24C9 /* AudioLevelsView.swift */, @@ -418,6 +421,7 @@ 0D3065CA2A94667E00247474 /* TestPatternView.swift in Sources */, 0D164AA62A97EB30003F2F7E /* ColorTabItem.swift in Sources */, 0D164A9C2A97E783003F2F7E /* AudioConfigurationView.swift in Sources */, + 0D85BC012B5E30B400EC424B /* CaptureConfigurationOverlay.swift in Sources */, 0D152E9F2AB9FF2700FEB9CE /* HotkeysPreferencesView.swift in Sources */, 0D164AA02A97E894003F2F7E /* OutputConfigurationView.swift in Sources */, 0D164AAA2A97EC32003F2F7E /* ReplayBufferTabItem.swift in Sources */, diff --git a/RecordCameraExtension/RecordCameraExtensionProvider.swift b/RecordCameraExtension/RecordCameraExtensionProvider.swift index de6299e..6d123b0 100644 --- a/RecordCameraExtension/RecordCameraExtensionProvider.swift +++ b/RecordCameraExtension/RecordCameraExtensionProvider.swift @@ -6,7 +6,7 @@ import AppKit // MARK: - -let customExtensionPropertyTest: CMIOExtensionProperty = CMIOExtensionProperty(rawValue: "4cc_test_glob_0000") +let customExtensionPropertyTest: CMIOExtensionProperty = CMIOExtensionProperty(rawValue: "4cc_just_glob_0000") let kFrameRate: Int = 60 class RecordCameraExtensionDeviceSource: NSObject, CMIOExtensionDeviceSource { @@ -46,6 +46,8 @@ class RecordCameraExtensionDeviceSource: NSObject, CMIOExtensionDeviceSource { private var _whiteStripeIsAscending: Bool = false + private var client: CMIOExtensionClient! + var stupidCount = 0 init(localizedName: String) { @@ -220,10 +222,31 @@ class RecordCameraExtensionDeviceSource: NSObject, CMIOExtensionDeviceSource { } } + func otherConsumeBuffer() { + guard let client = self.client else { return } + os_log("dequeue called") + self._streamSink.stream.consumeSampleBuffer(from: client) { sbuf, seq, discontinuity, hasMoreSampleBuffers, err in + if sbuf != nil { + self.lastTimingInfo.presentationTimeStamp = CMClockGetTime(CMClockGetHostTimeClock()) + let output: CMIOExtensionScheduledOutput = CMIOExtensionScheduledOutput(sequenceNumber: seq, hostTimeInNanoseconds: UInt64(self.lastTimingInfo.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) + os_log("streamingCounter is \(self._streamingCounter)") + if self._streamingCounter > 0 { + os_log("sending boofer") + self._streamSource.stream.send(sbuf!, discontinuity: [], hostTimeInNanoseconds: UInt64(sbuf!.presentationTimeStamp.seconds * Double(NSEC_PER_SEC))) + } + self._streamSink.stream.notifyScheduledOutputChanged(output) + } + if err != nil { + os_log("LOGGING AN ERROR POOPY") + os_log("\(err!.localizedDescription)") + } + } + } + func startStreamingSink(client: CMIOExtensionClient) { _streamingSinkCounter += 1 self.sinkStarted = true - consumeBuffer(client) + self.client = client } func stopStreamingSink() { @@ -303,6 +326,9 @@ class RecordCameraExtensionStreamSource: NSObject, CMIOExtensionStreamSource { if let newValue = state.value as? String { self.test = newValue os_log("test is \(self.test, privacy: .public)") + if let deviceSource = device.source as? RecordCameraExtensionDeviceSource { + deviceSource.otherConsumeBuffer() + } } } }