Permalink
Browse files

Timestamps are now passed through the filters, so movies are now filt…

…ered and recorded at the correct framerate.
  • Loading branch information...
1 parent d1d3586 commit a553d17a1fc8f96c131f169fb4c9afac7cac5aec @BradLarson BradLarson committed Apr 3, 2012
View
@@ -154,7 +154,6 @@ For example, an application that takes in live video from the camera, converts t
- *pixelSize*: The fractional pixel size, split into width and height components. The default is (0.05, 0.05)
- **GPUImageSobelEdgeDetectionFilter**: Sobel edge detection, with edges highlighted in white
- - *intensity*: The degree to which the original image colors are replaced by the detected edges (0.0 - 1.0, with 1.0 as the default)
- *imageWidthFactor*:
- *imageHeightFactor*: These parameters affect the visibility of the detected edges
@@ -4,7 +4,7 @@
@interface SimpleVideoFileFilterViewController : UIViewController
{
GPUImageMovie *movieFile;
- GPUImagePixellateFilter *pixellateFilter;
+ GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
}
@@ -24,32 +24,40 @@ - (void)viewDidLoad
NSURL *sampleURL = [[NSBundle mainBundle] URLForResource:@"sample_iPod" withExtension:@"m4v"];
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
- pixellateFilter = [[GPUImagePixellateFilter alloc] init];
+// filter = [[GPUImagePixellateFilter alloc] init];
+ filter = [[GPUImageAdaptiveThresholdFilter alloc] init];
GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
[movieFile addTarget:rotationFilter];
- [rotationFilter addTarget:pixellateFilter];
+ [rotationFilter addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
- [pixellateFilter addTarget:filterView];
+ [filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
- [pixellateFilter addTarget:movieWriter];
+ [filter addTarget:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
+ [movieWriter setCompletionBlock:^{
+ [filter removeTarget:movieWriter];
+ [movieWriter finishRecording];
+ }];
+
+ /*
double delayInSeconds = 5.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
- [pixellateFilter removeTarget:movieWriter];
+ [filter removeTarget:movieWriter];
[movieWriter finishRecording];
NSLog(@"Done recording");
});
+ */
}
- (void)viewDidUnload
@@ -64,7 +72,7 @@ - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interface
- (IBAction)updatePixelWidth:(id)sender
{
- pixellateFilter.fractionalWidthOfAPixel = [(UISlider *)sender value];
+// pixellateFilter.fractionalWidthOfAPixel = [(UISlider *)sender value];
}
@end
@@ -4,7 +4,7 @@
@interface SimpleVideoFilterViewController : UIViewController
{
GPUImageVideoCamera *videoCamera;
- GPUImageFilter *filter;
+ GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
}
@@ -24,8 +24,9 @@ - (void)viewDidLoad
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1920x1080 cameraPosition:AVCaptureDevicePositionBack];
-// filter = [[GPUImagePixellateFilter alloc] init];
- filter = [[GPUImageSketchFilter alloc] init];
+ filter = [[GPUImagePixellateFilter alloc] init];
+// filter = [[GPUImageSketchFilter alloc] init];
+// filter = [[GPUImageAdaptiveThresholdFilter alloc] init];
GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
[videoCamera addTarget:rotationFilter];
@@ -46,11 +47,12 @@ - (void)viewDidLoad
[movieWriter startRecording];
[videoCamera startCameraCapture];
- double delayInSeconds = 20.0;
+ double delayInSeconds = 10.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[filter removeTarget:movieWriter];
[movieWriter finishRecording];
+ NSLog(@"Movie completed");
});
}
@@ -43,8 +43,11 @@ - (id)init;
[self addFilter:adaptiveThresholdFilter];
[luminanceFilter addTarget:boxBlurFilter];
- [luminanceFilter addTarget:adaptiveThresholdFilter];
+
[boxBlurFilter addTarget:adaptiveThresholdFilter];
+ // To prevent double updating of this filter, disable updates from the sharp luminance image side
+ adaptiveThresholdFilter.shouldIgnoreUpdatesToThisTarget = YES;
+ [luminanceFilter addTarget:adaptiveThresholdFilter];
self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
self.terminalFilter = adaptiveThresholdFilter;
@@ -53,7 +53,7 @@ - (id)init;
// inputTextureSize = croppedSize;
//}
//
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
static const GLfloat cropSquareVertices[] = {
-1.0f, -1.0f,
@@ -71,7 +71,7 @@ - (void)newFrameReady;
[self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates sourceTexture:filterSourceTexture];
- [self informTargetsAboutNewFrame];
+ [self informTargetsAboutNewFrameAtTime:frameTime];
}
@end
@@ -55,7 +55,7 @@ typedef struct GPUMatrix4x4 GPUMatrix4x4;
// Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
-- (void)informTargetsAboutNewFrame;
+- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
// Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
@@ -207,7 +207,7 @@ - (void)createFilterFBOofSize:(CGSize)currentFBOSize;
glGenFramebuffers(1, &filterFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
- NSLog(@"Filter size: %f, %f for filter: %@", currentFBOSize.width, currentFBOSize.height, self);
+// NSLog(@"Filter size: %f, %f for filter: %@", currentFBOSize.width, currentFBOSize.height, self);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
@@ -281,12 +281,15 @@ - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
-- (void)informTargetsAboutNewFrame;
-{
+- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
+{
for (id<GPUImageInput> currentTarget in targets)
{
- [currentTarget setInputSize:inputTextureSize];
- [currentTarget newFrameReady];
+ if (currentTarget != targetToIgnoreForUpdates)
+ {
+ [currentTarget setInputSize:inputTextureSize];
+ [currentTarget newFrameReadyAtTime:frameTime];
+ }
}
}
@@ -370,7 +373,7 @@ - (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSStrin
#pragma mark -
#pragma mark GPUImageInput
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
@@ -387,7 +390,7 @@ - (void)newFrameReady;
};
[self renderToTextureWithVertices:squareVertices textureCoordinates:squareTextureCoordinates sourceTexture:filterSourceTexture];
- [self informTargetsAboutNewFrame];
+ [self informTargetsAboutNewFrameAtTime:frameTime];
}
- (NSInteger)nextAvailableTextureIndex;
@@ -76,11 +76,11 @@ - (void)removeAllTargets;
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
- [currentFilter newFrameReady];
+ [currentFilter newFrameReadyAtTime:frameTime];
}
}
@@ -1,17 +1,18 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
-#import <CoreMedia/CoreMedia.h>
#import "GPUImageOpenGLESContext.h"
#import "GPUImageOutput.h"
-@interface GPUImageMovie : GPUImageOutput {
- CVPixelBufferRef _currentBuffer;
-}
+@interface GPUImageMovie : GPUImageOutput
@property (readwrite, retain) NSURL *url;
--(id)initWithURL:(NSURL *)url;
--(void)startProcessing;
--(void)endProcessing;
+// Initialization and teardown
+- (id)initWithURL:(NSURL *)url;
+
+// Movie processing
+- (void)startProcessing;
+- (void)endProcessing;
+- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
@@ -4,7 +4,10 @@ @implementation GPUImageMovie
@synthesize url = _url;
-- (id)initWithURL:(NSURL *)url
+#pragma mark -
+#pragma mark Initialization and teardown
+
+- (id)initWithURL:(NSURL *)url;
{
if (!(self = [super init]))
{
@@ -16,7 +19,10 @@ - (id)initWithURL:(NSURL *)url
return self;
}
-- (void)startProcessing
+#pragma mark -
+#pragma mark Movie processing
+
+- (void)startProcessing;
{
// AVURLAsset to read input movie (i.e. mov recorded to local storage)
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
@@ -51,9 +57,9 @@ - (void)startProcessing
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
- CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
- _currentBuffer = pixelBuffer;
- [self performSelectorOnMainThread:@selector(processFrame) withObject:nil waitUntilDone:YES];
+ runOnMainQueueWithoutDeadlocking(^{
+ [self processMovieFrame:sampleBufferRef];
+ });
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
@@ -65,27 +71,30 @@ - (void)startProcessing
}];
}
-- (void)processFrame
+- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
+ CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
+ CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
+
// Upload to texture
- CVPixelBufferLockBaseAddress(_currentBuffer, 0);
- int bufferHeight = CVPixelBufferGetHeight(_currentBuffer);
- int bufferWidth = CVPixelBufferGetWidth(_currentBuffer);
+ CVPixelBufferLockBaseAddress(movieFrame, 0);
+ int bufferHeight = CVPixelBufferGetHeight(movieFrame);
+ int bufferWidth = CVPixelBufferGetWidth(movieFrame);
glBindTexture(GL_TEXTURE_2D, outputTexture);
// Using BGRA extension to pull in video frame data directly
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(_currentBuffer));
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(movieFrame));
CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight);
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget setInputSize:currentSize];
- [currentTarget newFrameReady];
+ [currentTarget newFrameReadyAtTime:currentSampleTime];
}
- CVPixelBufferUnlockBaseAddress(_currentBuffer, 0);
+ CVPixelBufferUnlockBaseAddress(movieFrame, 0);
}
-- (void)endProcessing
+- (void)endProcessing;
{
for (id<GPUImageInput> currentTarget in targets)
{
@@ -29,7 +29,7 @@ @interface GPUImageMovieWriter ()
GLubyte *frameData;
- NSDate *startTime;
+ CMTime startTime;
}
// Movie recording
@@ -61,6 +61,7 @@ - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
videoSize = newSize;
movieURL = newMovieURL;
+ startTime = kCMTimeInvalid;
[GPUImageOpenGLESContext useImageProcessingContext];
@@ -183,7 +184,7 @@ - (void)initializeMovie;
- (void)startRecording;
{
- startTime = [NSDate date];
+ startTime = kCMTimeInvalid;
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
}
@@ -313,13 +314,19 @@ - (void)renderAtInternalSize;
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
if (!assetWriterVideoInput.readyForMoreMediaData)
{
// NSLog(@"Had to drop a frame");
return;
}
+
+ if (CMTIME_IS_INVALID(frameTime))
+ {
+ // Drop frames forced by images and other things with no time constants
+ return;
+ }
// Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
[GPUImageOpenGLESContext useImageProcessingContext];
@@ -349,15 +356,23 @@ - (void)newFrameReady;
}
// May need to add a check here, because if two consecutive times with the same value are added to the movie, it aborts recording
- CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);
- if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:currentTime])
+// CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);
+
+ if (CMTIME_IS_INVALID(startTime))
{
- NSLog(@"Problem appending pixel buffer at time: %lld", currentTime.value);
+ startTime = frameTime;
+ }
+
+ if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)])
+ {
+ NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value);
}
else
{
-// NSLog(@"Recorded pixel buffer at time: %lld", currentTime.value);
+// CMTime testTime = CMTimeSubtract(frameTime, startTime);
+//
+// NSLog(@"Recorded pixel buffer at time: %lld, %lld", frameTime.value, testTime.value);
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
@@ -398,4 +413,9 @@ - (void)endProcessing
}
}
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
+{
+ return NO;
+}
+
@end
Oops, something went wrong.

0 comments on commit a553d17

Please sign in to comment.