Skip to content

Commit

Permalink
feat: Create TrackTimeline to properly synchronize pause/resume/sta…
Browse files Browse the repository at this point in the history
…rt/stop video/audio buffers (#2948)

* feat: Create `ClockSession`

* prepare?

* https

* feat: Create `TrackTimeline`

* Update TrackTimeline.swift

* Update TrackTimeline.swift

* Update TrackTimeline.swift

* Update TrackTimeline.swift

* Create `Track`

* finish recording?

* Fix typos

* chore: Lint

* fix: Start asset writer

* Log track type

* Update RecordingSession.swift

* feat: Add `addFollowingTrack`

* Update TrackTimeline.swift

* Update TrackTimeline.swift

* fix: Always update `firstTimestamp` and `lastTimestamp`

* Update TrackTimeline.swift

* fix: Fix actual duration

* Remove now unneeded following track loic

* fix: More logs

* Log better

* fix: Fix `isPaused` not respected

* fix: Subtract `pauseDuration`

* fix: Also support consecutive pauses

* fix: Use `CMTime`

* fix: Remove logs

* Create CMTime+invert.swift

* Create CMSampleBuffer+copyWithTimestampOffset.swift

* fix: Also account for open pauses

* feat: Shift off Buffer timestamp if needed

* fix: Lint

* fix: Fix adjusting timestamp

* fix: Fix pauses being left open

* fix: Properly offset buffer inside write block

* `inverted`

* Log timeline after finish

* Update CMSampleBuffer+copyWithTimestampOffset.swift
  • Loading branch information
mrousavy committed Jun 10, 2024
1 parent bd955b5 commit d477246
Show file tree
Hide file tree
Showing 11 changed files with 627 additions and 167 deletions.
2 changes: 1 addition & 1 deletion package/VisionCamera.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ package = JSON.parse(File.read(File.join(__dir__, "package.json")))
nodeModules = File.join(File.dirname(`cd "#{Pod::Config.instance.installation_root.to_s}" && node --print "require.resolve('react-native/package.json')"`), '..')

Pod::UI.puts "[VisionCamera] Thank you for using VisionCamera ❤️"
Pod::UI.puts "[VisionCamera] If you enjoy using VisionCamera, please consider sponsoring this project: http://github.com/sponsors/mrousavy"
Pod::UI.puts "[VisionCamera] If you enjoy using VisionCamera, please consider sponsoring this project: https://github.com/sponsors/mrousavy"

enableLocation = true
if defined?($VCEnableLocation)
Expand Down
2 changes: 1 addition & 1 deletion package/android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ static def findNodeModules(baseDir) {
}

logger.warn("[VisionCamera] Thank you for using VisionCamera ❤️")
logger.warn("[VisionCamera] If you enjoy using VisionCamera, please consider sponsoring this project: http://github.com/sponsors/mrousavy")
logger.warn("[VisionCamera] If you enjoy using VisionCamera, please consider sponsoring this project: https://github.com/sponsors/mrousavy")

def nodeModules = findNodeModules(projectDir)
logger.warn("[VisionCamera] node_modules found at $nodeModules")
Expand Down
1 change: 1 addition & 0 deletions package/ios/Core/CameraSession+Audio.swift
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ extension CameraSession {
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Try resuming if possible
let isRecording = recordingSession != nil
if isRecording {
CameraQueues.audioQueue.async {
VisionLogger.log(level: .info, message: "Resuming interrupted Audio Session...")
Expand Down
26 changes: 10 additions & 16 deletions package/ios/Core/CameraSession+Video.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ extension CameraSession {
}
}

self.isRecording = false
self.recordingSession = nil

if self.didCancelRecording {
Expand Down Expand Up @@ -96,6 +95,7 @@ extension CameraSession {
let recordingSession = try RecordingSession(url: tempURL,
fileType: options.fileType,
metadataProvider: self.metadataProvider,
clock: self.captureSession.clock,
orientation: self.videoFileOrientation,
completion: onFinish)

Expand All @@ -115,20 +115,18 @@ extension CameraSession {

// Initialize audio asset writer
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: options.fileType)
recordingSession.initializeAudioWriter(withSettings: audioSettings,
format: audioInput.device.activeFormat.formatDescription)
try recordingSession.initializeAudioTrack(withSettings: audioSettings,
format: audioInput.device.activeFormat.formatDescription)
}

// Init Video
let videoSettings = try videoOutput.recommendedVideoSettings(forOptions: options)
recordingSession.initializeVideoWriter(withSettings: videoSettings)
try recordingSession.initializeVideoTrack(withSettings: videoSettings)

// start recording session with or without audio.
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
try recordingSession.start(clock: self.captureSession.clock)
try recordingSession.start()
self.didCancelRecording = false
self.recordingSession = recordingSession
self.isRecording = true

let end = DispatchTime.now()
VisionLogger.log(level: .info, message: "RecordingSesssion started in \(Double(end.uptimeNanoseconds - start.uptimeNanoseconds) / 1_000_000)ms!")
Expand All @@ -149,9 +147,7 @@ extension CameraSession {
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
}
// Use Video [AVCaptureSession] clock as a timebase - all other sessions (here; audio) have to be synced to that Clock.
recordingSession.stop(clock: self.captureSession.clock)
// There might be late frames, so maybe we need to still provide more Frames to the RecordingSession. Let's keep isRecording true for now.
recordingSession.stop()
return nil
}
}
Expand All @@ -171,11 +167,10 @@ extension CameraSession {
func pauseRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = false
recordingSession.pause()
return nil
}
}
Expand All @@ -187,11 +182,10 @@ extension CameraSession {
func resumeRecording(promise: Promise) {
CameraQueues.cameraQueue.async {
withPromise(promise) {
guard self.recordingSession != nil else {
// there's no active recording!
guard let recordingSession = self.recordingSession else {
throw CameraError.capture(.noRecordingInProgress)
}
self.isRecording = true
recordingSession.resume()
return nil
}
}
Expand Down
28 changes: 20 additions & 8 deletions package/ios/Core/CameraSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
var metadataProvider = MetadataProvider()
var recordingSession: RecordingSession?
var didCancelRecording = false
var isRecording = false
var orientationManager = OrientationManager()

// Callbacks
Expand Down Expand Up @@ -276,9 +275,15 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
}

private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation) {
if let recordingSession, isRecording {
// Write the Video Buffer to the .mov/.mp4 file, this is the first timestamp if nothing has been recorded yet
recordingSession.appendBuffer(sampleBuffer, clock: captureSession.clock, type: .video)
if let recordingSession {
do {
// Write the Video Buffer to the .mov/.mp4 file
try recordingSession.append(buffer: sampleBuffer, ofType: .video)
} catch let error as CameraError {
delegate?.onError(error)
} catch {
delegate?.onError(.capture(.unknown(message: error.localizedDescription)))
}
}

if let delegate {
Expand All @@ -289,10 +294,17 @@ class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVC
}

private final func onAudioFrame(sampleBuffer: CMSampleBuffer) {
if let recordingSession, isRecording {
// Synchronize the Audio Buffer with the Video Session's time because it's two separate AVCaptureSessions
audioCaptureSession.synchronizeBuffer(sampleBuffer, toSession: captureSession)
recordingSession.appendBuffer(sampleBuffer, clock: audioCaptureSession.clock, type: .audio)
if let recordingSession {
do {
// Synchronize the Audio Buffer with the Video Session's time because it's two separate
// AVCaptureSessions, then write it to the .mov/.mp4 file
audioCaptureSession.synchronizeBuffer(sampleBuffer, toSession: captureSession)
try recordingSession.append(buffer: sampleBuffer, ofType: .audio)
} catch let error as CameraError {
delegate?.onError(error)
} catch {
delegate?.onError(.capture(.unknown(message: error.localizedDescription)))
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
//
// CMSampleBuffer+copyWithTimestampOffset.swift
// VisionCamera
//
// Created by Marc Rousavy on 08.06.24.
//

import CoreMedia
import Foundation

// MARK: - TimestampAdjustmentError

enum TimestampAdjustmentError: Error {
case failedToCopySampleBuffer(status: OSStatus)
case sampleBufferCopyIsNil
case failedToGetTimingInfo(status: OSStatus)
case noTimingEntriesFound
}

private let kSampleBufferError_NoError: OSStatus = 0

extension CMSampleBuffer {
private func getTimingInfos() throws -> [CMSampleTimingInfo] {
var count: CMItemCount = 0
let getCountStatus = CMSampleBufferGetSampleTimingInfoArray(self,
entryCount: 0,
arrayToFill: nil,
entriesNeededOut: &count)
guard getCountStatus == kSampleBufferError_NoError else {
throw TimestampAdjustmentError.failedToGetTimingInfo(status: getCountStatus)
}

let emptyTimingInfo = CMSampleTimingInfo(duration: .invalid,
presentationTimeStamp: .invalid,
decodeTimeStamp: .invalid)
var infos = [CMSampleTimingInfo](repeating: emptyTimingInfo, count: count)
let getArrayStatus = CMSampleBufferGetSampleTimingInfoArray(self,
entryCount: count,
arrayToFill: &infos,
entriesNeededOut: nil)
guard getArrayStatus == kSampleBufferError_NoError else {
throw TimestampAdjustmentError.failedToGetTimingInfo(status: getArrayStatus)
}
guard !infos.isEmpty else {
throw TimestampAdjustmentError.noTimingEntriesFound
}
return infos
}

/**
Returns a copy of the current CMSampleBuffer with the timing info adjusted by the given offset.
The decode and presentation timestamps will be adjusted by the given offset (+).
*/
func copyWithTimestampOffset(_ offset: CMTime) throws -> CMSampleBuffer {
let timingInfos = try getTimingInfos()
let newTimingInfos = timingInfos.map { timingInfo in
return CMSampleTimingInfo(duration: timingInfo.duration,
presentationTimeStamp: timingInfo.presentationTimeStamp + offset,
decodeTimeStamp: timingInfo.decodeTimeStamp + offset)
}

var newBuffer: CMSampleBuffer?
let copyResult = CMSampleBufferCreateCopyWithNewTiming(allocator: nil,
sampleBuffer: self,
sampleTimingEntryCount: newTimingInfos.count,
sampleTimingArray: newTimingInfos,
sampleBufferOut: &newBuffer)
guard copyResult == kSampleBufferError_NoError else {
throw TimestampAdjustmentError.failedToCopySampleBuffer(status: copyResult)
}
guard let newBuffer else {
throw TimestampAdjustmentError.sampleBufferCopyIsNil
}

return newBuffer
}
}
19 changes: 19 additions & 0 deletions package/ios/Core/Extensions/CMTime+inverted.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
//
// CMTime+inverted.swift
// VisionCamera
//
// Created by Marc Rousavy on 08.06.24.
//

import CoreMedia
import Foundation

extension CMTime {
/**
Inverts the time.
e.g. 3.5 seconds -> -3.5 seconds
*/
func inverted() -> CMTime {
return CMTime(value: value * -1, timescale: timescale)
}
}
38 changes: 38 additions & 0 deletions package/ios/Core/Recording/TimelineEvent.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
//
// TimelineEvent.swift
// VisionCamera
//
// Created by Marc Rousavy on 08.06.24.
//

import CoreMedia
import Foundation

/**
Represents an Event inside a track timeline.
Each event has a timestamp.
*/
struct TimelineEvent {
let type: EventType
let timestamp: CMTime

var description: String {
switch type {
case .start:
return "\(timestamp.seconds): ⏺️ Started"
case .pause:
return "\(timestamp.seconds): ⏸️ Paused"
case .resume:
return "\(timestamp.seconds): ▶️ Resumed"
case .stop:
return "\(timestamp.seconds): ⏹️ Stopped"
}
}

enum EventType {
case start
case pause
case resume
case stop
}
}
Loading

0 comments on commit d477246

Please sign in to comment.