Skip to content

Commit

Permalink
Exposed camera properties to allow exposure locking, added transforms…
Browse files Browse the repository at this point in the history
… to movie outputs.
  • Loading branch information
BradLarson committed Oct 16, 2018
1 parent d0f8f5d commit 20ab871
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 4 deletions.
2 changes: 1 addition & 1 deletion framework/Source/Mac/Camera.swift
Expand Up @@ -24,7 +24,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer

public let targets = TargetContainer()
let captureSession:AVCaptureSession
let inputCamera:AVCaptureDevice
public let inputCamera:AVCaptureDevice
let videoInput:AVCaptureDeviceInput!
let videoOutput:AVCaptureVideoDataOutput!
var microphone:AVCaptureDevice?
Expand Down
14 changes: 13 additions & 1 deletion framework/Source/Mac/MovieOutput.swift
Expand Up @@ -22,6 +22,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
private var previousAudioTime = kCMTimeNegativeInfinity
private var encodingLiveVideo:Bool

var transform:CGAffineTransform {
get {
return assetWriterVideoInput.transform
}
set {
assetWriterVideoInput.transform = transform
}
}

public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws {
self.size = size
assetWriter = try AVAssetWriter(url:URL, fileType:fileType)
Expand Down Expand Up @@ -52,7 +61,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
assetWriter.add(assetWriterVideoInput)
}

public func startRecording() {
public func startRecording(transform:CGAffineTransform? = nil) {
if let transform = transform {
assetWriterVideoInput.transform = transform
}
startTime = nil
sharedImageProcessingContext.runOperationSynchronously{
self.isRecording = self.assetWriter.startWriting()
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/iOS/Camera.swift
Expand Up @@ -66,7 +66,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
public let targets = TargetContainer()
public var delegate: CameraDelegate?
public let captureSession:AVCaptureSession
let inputCamera:AVCaptureDevice!
public let inputCamera:AVCaptureDevice!
let videoInput:AVCaptureDeviceInput!
let videoOutput:AVCaptureVideoDataOutput!
var microphone:AVCaptureDevice?
Expand Down
14 changes: 13 additions & 1 deletion framework/Source/iOS/MovieOutput.swift
Expand Up @@ -26,6 +26,15 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
var pixelBuffer:CVPixelBuffer? = nil
var renderFramebuffer:Framebuffer!

var transform:CGAffineTransform {
get {
return assetWriterVideoInput.transform
}
set {
assetWriterVideoInput.transform = transform
}
}

public init(URL:Foundation.URL, size:Size, fileType:AVFileType = AVFileType.mov, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws {
if sharedImageProcessingContext.supportsTextureCaches() {
self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader
Expand Down Expand Up @@ -62,7 +71,10 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
assetWriter.add(assetWriterVideoInput)
}

public func startRecording() {
public func startRecording(transform:CGAffineTransform? = nil) {
if let transform = transform {
assetWriterVideoInput.transform = transform
}
startTime = nil
sharedImageProcessingContext.runOperationSynchronously{
self.isRecording = self.assetWriter.startWriting()
Expand Down

0 comments on commit 20ab871

Please sign in to comment.