Skip to content

Commit

Permalink
miscellaneous piled up changes; may break CI
Browse files Browse the repository at this point in the history
  • Loading branch information
jcm committed Dec 11, 2023
1 parent 1d30f52 commit 5e701b9
Show file tree
Hide file tree
Showing 8 changed files with 185 additions and 101 deletions.
14 changes: 11 additions & 3 deletions CaptureSample/CaptureEngine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,18 @@ class CaptureEngine: @unchecked Sendable {
powerMeter.processSilence()
}

func startRecording(options: Options) async throws {
func startEncodingSession(options: Options) async throws {
self.streamOutput.encoder = try await VTEncoder(options: options)
}

func startRecording(options: Options) async throws {
if self.streamOutput.encoder == nil {
self.streamOutput.encoder = try await VTEncoder(options: options)
} else {
self.streamOutput.encoder.startRecording()
}
}

func stopRecording() async throws {
try await self.streamOutput.stopReplayBuffer()
try await self.streamOutput.encoder.stopEncoding()
Expand Down Expand Up @@ -169,9 +177,9 @@ class CaptureEngineStreamOutput: NSObject, SCStreamOutput, SCStreamDelegate {
}
}

func stopReplayBuffer() {
func stopReplayBuffer() async {
do {
try self.encoder.videoSink.stopReplayBuffer()
try await self.encoder.videoSink.stopReplayBuffer()
} catch {
self.errorHandler!(error)
}
Expand Down
6 changes: 4 additions & 2 deletions CaptureSample/CaptureSample.entitlements
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.app-sandbox</key>
<key>com.apple.developer.system-extension.install</key>
<true/>
<key>com.apple.security.assets.movies.read-write</key>
<key>com.apple.security.app-sandbox</key>
<true/>
<key>com.apple.security.application-groups</key>
<array/>
<key>com.apple.security.files.user-selected.read-write</key>
<true/>
</dict>
Expand Down
7 changes: 5 additions & 2 deletions CaptureSample/Encoder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ enum EncoderError: Error {
case unknownFrameType
}

class VTEncoder: NSObject {
class VTEncoder {

var session: VTCompressionSession!
var decodeSession: VTDecompressionSession!
Expand All @@ -45,7 +45,6 @@ class VTEncoder: NSObject {
init?(options: Options) async throws {
self.destWidth = options.destWidth
self.destHeight = options.destHeight
super.init()
let sourceImageBufferAttributes = [kCVPixelBufferPixelFormatTypeKey: options.pixelFormat as CFNumber] as CFDictionary

let err = VTCompressionSessionCreate(allocator: kCFAllocatorDefault,
Expand Down Expand Up @@ -226,6 +225,10 @@ class VTEncoder: NSObject {
}
}

func startRecording() {
self.videoSink.makeActive()
}

func outputHandler(_ status: OSStatus, flags: VTDecodeInfoFlags, buffer: CVImageBuffer?, uhtime: CMTime, uhothertime: CMTime) {
self.videoSink.mostRecentImageBuffer = buffer
}
Expand Down
75 changes: 67 additions & 8 deletions CaptureSample/ScreenRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ import Combine
import OSLog
import SwiftUI
import AVFoundation
import com_jcm_record_RecordVirtualCam
import SystemExtensions

/// A provider of audio levels from the captured samples.
class AudioLevelsProvider: ObservableObject {
Expand All @@ -19,9 +21,15 @@ class AudioLevelsProvider: ObservableObject {

@MainActor
class ScreenRecorder: ObservableObject {



private var extensionActivated = false

//MARK: event tap

//private var providerSource: RecordVirtualCamProviderSource!

var eventTap: RecordEventTap! = nil

private let logger = Logger.capture
Expand Down Expand Up @@ -136,6 +144,7 @@ class ScreenRecorder: ObservableObject {

@Published var isRunning = false
@Published var isRecording = false
@Published var isEncoding = false

@Published var captureWidth: Int = 0
@Published var captureHeight: Int = 0
Expand Down Expand Up @@ -188,6 +197,16 @@ class ScreenRecorder: ObservableObject {
didSet { updateEngine() }
}

@Published var selectedApplications = Set<String>() {
willSet {
print("setting selected applications \(newValue)")
}
didSet {
print("set selected applications \(selectedApplications)")
updateEngine()
}
}

@AppStorage("excludeSelf") var isAppExcluded = true {
didSet { updateEngine() }
}
Expand Down Expand Up @@ -307,7 +326,7 @@ class ScreenRecorder: ObservableObject {
CaptureSplitViewPreview()
}()

private var availableApps = [SCRunningApplication]()
@Published var availableApps = [SCRunningApplication]()
@Published private(set) var availableDisplays = [SCDisplay]()
@Published private(set) var availableWindows = [SCWindow]()

Expand Down Expand Up @@ -424,17 +443,32 @@ class ScreenRecorder: ObservableObject {
isRunning = false
}

func startEncodingSession() async {
guard isRunning else { return }
guard !isRecording else { return }
guard !isEncoding else { return }
self.options.logStart(logger)
do {
try await captureEngine.startEncodingSession(options: self.options)
//self.isRecording = true
} catch {
self.isRecording = false
self.errorText = error.localizedDescription
self.isShowingError = true
}
}

func record() async {
guard isRunning else { return }
guard !isRecording else { return }
self.options.logStart(logger)
guard self.filePath != "" else {
self.isRecording = false
self.errorText = "No output folder selected."
//todo add an alert
self.isShowingError = true
return
}
self.options.logStart(logger)
do {
try await captureEngine.startRecording(options: self.options)
self.isRecording = true
Expand Down Expand Up @@ -487,6 +521,13 @@ class ScreenRecorder: ObservableObject {
audioLevelsProvider.audioLevels = AudioLevels.zero
}*/

func updateEncodePreview() {
Task {
await self.stopRecord()
await self.startEncodingSession()
}
}

/// - Tag: UpdateCaptureConfig
private func updateEngine() {
guard isRunning else { return }
Expand All @@ -503,8 +544,17 @@ class ScreenRecorder: ObservableObject {
}
self.eventTap?.callback = self.saveReplayBuffer
if self.showsEncodePreview {

self.updateEncodePreview()
}
/*if !self.extensionActivated {
let identifier = "com.example.apple-samplecode.CustomCamera.CameraExtension"
// Submit an activation request.
let activationRequest = OSSystemExtensionRequest.activationRequest(forExtensionWithIdentifier: identifier, queue: .main)
activationRequest.delegate = self
OSSystemExtensionManager.shared.submitRequest(activationRequest)
}*/
}

/// - Tag: UpdateFilter
Expand All @@ -513,13 +563,11 @@ class ScreenRecorder: ObservableObject {
switch captureType {
case .display:
guard let display = selectedDisplay else { fatalError("No display selected.") }
var excludedApps = [SCRunningApplication]()
var excludedApps = self.availableApps
// If a user chooses to exclude the app from the stream,
// exclude it by matching its bundle identifier.
if isAppExcluded {
excludedApps = availableApps.filter { app in
Bundle.main.bundleIdentifier == app.bundleIdentifier
}
excludedApps = availableApps.filter { app in
self.selectedApplications.contains(app.id)
}
// Create a content filter with excluded apps.
filter = SCContentFilter(display: display,
Expand All @@ -541,6 +589,12 @@ class ScreenRecorder: ObservableObject {
// Configure audio capture.
streamConfig.capturesAudio = isAudioCaptureEnabled
streamConfig.excludesCurrentProcessAudio = isAppAudioExcluded
if #available(macOS 14.0, *) {
//streamConfig.capturesShadowsOnly = true
//streamConfig.ignoreGlobalClipDisplay = true
} else {
// Fallback on earlier versions
}

// Configure the display content width and height.
if captureType == .display, let display = selectedDisplay {
Expand Down Expand Up @@ -643,3 +697,8 @@ extension SCDisplay {
"Display: \(width) x \(height)"
}
}
extension SCRunningApplication: Identifiable {
public var id: String {
self.bundleIdentifier
}
}
92 changes: 51 additions & 41 deletions CaptureSample/VideoSink.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ public class VideoSink {
private let isRealTime: Bool
private let usesReplayBuffer: Bool
private let replayBufferDuration: Int
private var isActive: Bool

var accessingBookmarkURL = false

Expand Down Expand Up @@ -59,6 +60,11 @@ public class VideoSink {
self.audioReplayBuffer = ReplayBuffer(buffer: [], maxLengthInSeconds: Double(replayBufferDuration))
}
self.isStopping = false
self.isActive = true
}

public func makeActive() {
self.isActive = true
}

/// Appends a video frame to the destination movie file.
Expand Down Expand Up @@ -97,60 +103,64 @@ public class VideoSink {
}

func initializeAssetWriters() throws {
//pretty ugly still
logger.notice("Initializing file asset writers.")
let bookmarkedData = UserDefaults.standard.data(forKey: "mostRecentSinkURL")
var isStale = false
do {
let bookmarkedData = UserDefaults.standard.data(forKey: "mostRecentSinkURL")
var isStale = false
if bookmarkedData != nil {
//self.bookmarkedURL = try URL(resolvingBookmarkData: bookmarkedData!, bookmarkDataIsStale: &isStale)
self.bookmarkedURL = try URL(resolvingBookmarkData: bookmarkedData!, options: .withSecurityScope, relativeTo: nil, bookmarkDataIsStale: &isStale)
}
if bookmarkedURL?.path() == outputFolder.path() {
self.accessingBookmarkURL = true
bookmarkedURL?.startAccessingSecurityScopedResource()
}
let fileExtension = self.fileType == .mov ? "mov" : "mp4"
let sinkURL = outputFolder.appendingRecordFilename(fileExtension: fileExtension)
let bookmarkData = try outputFolder.bookmarkData(options: .withSecurityScope, includingResourceValuesForKeys: nil, relativeTo: nil)
UserDefaults.standard.setValue(bookmarkData, forKey: "mostRecentSinkURL")
assetWriter = try AVAssetWriter(outputURL: sinkURL, fileType: fileType)

let videoFormatDesc = try CMFormatDescription(videoCodecType: CMFormatDescription.MediaSubType(rawValue: codec), width: width, height: height)

assetWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: nil, sourceFormatHint: videoFormatDesc)
let audioFormatDescription = AudioStreamBasicDescription(mSampleRate: 48000.0, mFormatID: kAudioFormatLinearPCM, mFormatFlags: 0x29, mBytesPerPacket: 4, mFramesPerPacket: 1, mBytesPerFrame: 4, mChannelsPerFrame: 2, mBitsPerChannel: 32, mReserved: 0)
let outputSettings = [
AVFormatIDKey: UInt(kAudioFormatLinearPCM),
AVSampleRateKey: 48000,
AVNumberOfChannelsKey: 2,
//AVChannelLayoutKey: NSData(bytes:&channelLayout, length:MemoryLayout<AudioChannelLayout>.size),
AVLinearPCMBitDepthKey: 16,
AVLinearPCMIsNonInterleaved: false,
AVLinearPCMIsFloatKey: false,
AVLinearPCMIsBigEndianKey: false
] as [String : Any]
let cmFormat = try CMFormatDescription(audioStreamBasicDescription: audioFormatDescription)
assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings, sourceFormatHint: cmFormat)


assetWriterInput.expectsMediaDataInRealTime = true
assetWriterAudioInput.expectsMediaDataInRealTime = true

assetWriter.add(assetWriterInput)
assetWriter.add(assetWriterAudioInput)
guard assetWriter.startWriting() else {
throw assetWriter.error!
if bookmarkedURL?.path() == outputFolder.path() {
self.accessingBookmarkURL = true
bookmarkedURL?.startAccessingSecurityScopedResource()
}
}
} catch {
logger.fault("Critical error initializing asset writers: \(error, privacy: .public)")
logger.fault("Failed to create bookmark URL from serialized NSData with security scope: \(error, privacy: .public)")

}
let fileExtension = self.fileType == .mov ? "mov" : "mp4"
let sinkURL = outputFolder.appendingRecordFilename(fileExtension: fileExtension)
let bookmarkData = try outputFolder.bookmarkData(options: .withSecurityScope, includingResourceValuesForKeys: nil, relativeTo: nil)
UserDefaults.standard.setValue(bookmarkData, forKey: "mostRecentSinkURL")
assetWriter = try AVAssetWriter(outputURL: sinkURL, fileType: fileType)

let videoFormatDesc = try CMFormatDescription(videoCodecType: CMFormatDescription.MediaSubType(rawValue: codec), width: width, height: height)

assetWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: nil, sourceFormatHint: videoFormatDesc)
let audioFormatDescription = AudioStreamBasicDescription(mSampleRate: 48000.0, mFormatID: kAudioFormatLinearPCM, mFormatFlags: 0x29, mBytesPerPacket: 4, mFramesPerPacket: 1, mBytesPerFrame: 4, mChannelsPerFrame: 2, mBitsPerChannel: 32, mReserved: 0)
let outputSettings = [
AVFormatIDKey: UInt(kAudioFormatLinearPCM),
AVSampleRateKey: 48000,
AVNumberOfChannelsKey: 2,
//AVChannelLayoutKey: NSData(bytes:&channelLayout, length:MemoryLayout<AudioChannelLayout>.size),
AVLinearPCMBitDepthKey: 16,
AVLinearPCMIsNonInterleaved: false,
AVLinearPCMIsFloatKey: false,
AVLinearPCMIsBigEndianKey: false
] as [String : Any]
let cmFormat = try CMFormatDescription(audioStreamBasicDescription: audioFormatDescription)
assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: outputSettings, sourceFormatHint: cmFormat)


assetWriterInput.expectsMediaDataInRealTime = true
assetWriterAudioInput.expectsMediaDataInRealTime = true

assetWriter.add(assetWriterInput)
assetWriter.add(assetWriterAudioInput)
guard assetWriter.startWriting() else {
throw assetWriter.error!
}
/*catch {
logger.fault("Failed to create bookmark URL from serialized NSData with security scope: \(error, privacy: .public)")
if self.accessingBookmarkURL {
self.bookmarkedURL?.stopAccessingSecurityScopedResource()
self.accessingBookmarkURL = false
}
self.assetWriter?.cancelWriting()
//this should be all the cleanup we need. everything else with `try`
//shouldn't have any side effects, unlike AVAssetWriter and security-scoped bookmark
}
}*/
}

public func sendAudioBuffer(_ buffer: CMSampleBuffer) {
Expand Down
3 changes: 1 addition & 2 deletions CaptureSample/Views/CapturePreview.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ struct CaptureSplitViewPreview: NSViewRepresentable {
if let frame = frame.encodedFrame {
self.renderer.enqueue(frame)
}

//encodedContentLayer.contents = frame.encodedSurface
}

Expand All @@ -81,8 +82,6 @@ struct CaptureSplitViewPreview: NSViewRepresentable {
self.isVertical = true
self.addSubview(self.firstView)
self.addSubview(self.secondView)
//let scale = CGFloat(IOSurfaceGetHeight(firstView.layer!.contents as! IOSurface)) / self.frame.height
//firstView.layer?.contentsScale = scale
//secondView.layer?.contentsScale = scale
self.wantsLayer = true
if #available(macOS 14.0, *) {
Expand Down
Loading

0 comments on commit 5e701b9

Please sign in to comment.