Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also compare across forks.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also compare across forks.
base fork: mclark4386/GPUImage
base: d1d3586732
...
head fork: mclark4386/GPUImage
compare: 44ec275345
  • 3 commits
  • 27 files changed
  • 0 commit comments
  • 1 contributor
Showing with 364 additions and 79 deletions.
  1. +0 −1  README.md
  2. +9 −0 examples/MultiViewFilterExample/MultiViewFilterExample/MultiViewViewController.m
  3. +1 −1  examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.h
  4. +15 −6 examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.m
  5. +1 −1  examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.h
  6. +5 −3 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
  7. +4 −1 framework/Source/GPUImageAdaptiveThresholdFilter.m
  8. +2 −2 framework/Source/GPUImageCropFilter.m
  9. +1 −1  framework/Source/GPUImageFilter.h
  10. +29 −7 framework/Source/GPUImageFilter.m
  11. +11 −2 framework/Source/GPUImageFilterGroup.m
  12. +12 −7 framework/Source/GPUImageMovie.h
  13. +93 −20 framework/Source/GPUImageMovie.m
  14. +2 −0  framework/Source/GPUImageMovieWriter.h
  15. +60 −12 framework/Source/GPUImageMovieWriter.m
  16. +3 −1 framework/Source/GPUImageOpenGLESContext.h
  17. +7 −0 framework/Source/GPUImageOutput.h
  18. +27 −0 framework/Source/GPUImageOutput.m
  19. +1 −1  framework/Source/GPUImagePicture.m
  20. +9 −2 framework/Source/GPUImageRawData.m
  21. +2 −2 framework/Source/GPUImageRotationFilter.m
  22. +9 −2 framework/Source/GPUImageTextureOutput.m
  23. +3 −0  framework/Source/GPUImageUnsharpMaskFilter.m
  24. +3 −0  framework/Source/GPUImageVideoCamera.h
  25. +19 −3 framework/Source/GPUImageVideoCamera.m
  26. +4 −0 framework/Source/GPUImageView.h
  27. +32 −4 framework/Source/GPUImageView.m
View
1  README.md
@@ -154,7 +154,6 @@ For example, an application that takes in live video from the camera, converts t
- *pixelSize*: The fractional pixel size, split into width and height components. The default is (0.05, 0.05)
- **GPUImageSobelEdgeDetectionFilter**: Sobel edge detection, with edges highlighted in white
- - *intensity*: The degree to which the original image colors are replaced by the detected edges (0.0 - 1.0, with 1.0 as the default)
- *imageWidthFactor*:
- *imageHeightFactor*: These parameters affect the visibility of the detected edges
View
9 examples/MultiViewFilterExample/MultiViewFilterExample/MultiViewViewController.m
@@ -41,6 +41,15 @@ - (void)loadView
GPUImageFilter *filter1 = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"Shader1"];
GPUImageFilter *filter2 = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"Shader2"];
GPUImageSepiaFilter *filter3 = [[GPUImageSepiaFilter alloc] init];
+
+ // For thumbnails smaller than the input video size, we currently need to make them render at a smaller size.
+ // This is to avoid wasting processing time on larger frames than will be displayed.
+ // You'll need to use -forceProcessingAtSize: with a zero size to re-enable full frame processing of video.
+
+ [rotationFilter forceProcessingAtSize:view1.sizeInPixels];
+ [filter1 forceProcessingAtSize:view2.sizeInPixels];
+ [filter2 forceProcessingAtSize:view3.sizeInPixels];
+ [filter3 forceProcessingAtSize:view4.sizeInPixels];
[videoCamera addTarget:rotationFilter];
View
2  examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.h
@@ -4,7 +4,7 @@
@interface SimpleVideoFileFilterViewController : UIViewController
{
GPUImageMovie *movieFile;
- GPUImagePixellateFilter *pixellateFilter;
+ GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
}
View
21 examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.m
@@ -24,13 +24,15 @@ - (void)viewDidLoad
NSURL *sampleURL = [[NSBundle mainBundle] URLForResource:@"sample_iPod" withExtension:@"m4v"];
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
- pixellateFilter = [[GPUImagePixellateFilter alloc] init];
+ movieFile.runBenchmark = YES;
+// filter = [[GPUImagePixellateFilter alloc] init];
+ filter = [[GPUImageAdaptiveThresholdFilter alloc] init];
GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
[movieFile addTarget:rotationFilter];
- [rotationFilter addTarget:pixellateFilter];
+ [rotationFilter addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
- [pixellateFilter addTarget:filterView];
+ [filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
@@ -38,18 +40,25 @@ - (void)viewDidLoad
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
- [pixellateFilter addTarget:movieWriter];
+ [filter addTarget:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
+ [movieWriter setCompletionBlock:^{
+ [filter removeTarget:movieWriter];
+ [movieWriter finishRecording];
+ }];
+
+ /*
double delayInSeconds = 5.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
- [pixellateFilter removeTarget:movieWriter];
+ [filter removeTarget:movieWriter];
[movieWriter finishRecording];
NSLog(@"Done recording");
});
+ */
}
- (void)viewDidUnload
@@ -64,7 +73,7 @@ - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interface
- (IBAction)updatePixelWidth:(id)sender
{
- pixellateFilter.fractionalWidthOfAPixel = [(UISlider *)sender value];
+// pixellateFilter.fractionalWidthOfAPixel = [(UISlider *)sender value];
}
@end
View
2  examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.h
@@ -4,7 +4,7 @@
@interface SimpleVideoFilterViewController : UIViewController
{
GPUImageVideoCamera *videoCamera;
- GPUImageFilter *filter;
+ GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
}
View
8 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
@@ -24,8 +24,9 @@ - (void)viewDidLoad
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1920x1080 cameraPosition:AVCaptureDevicePositionBack];
-// filter = [[GPUImagePixellateFilter alloc] init];
- filter = [[GPUImageSketchFilter alloc] init];
+ filter = [[GPUImagePixellateFilter alloc] init];
+// filter = [[GPUImageSketchFilter alloc] init];
+// filter = [[GPUImageAdaptiveThresholdFilter alloc] init];
GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
[videoCamera addTarget:rotationFilter];
@@ -46,11 +47,12 @@ - (void)viewDidLoad
[movieWriter startRecording];
[videoCamera startCameraCapture];
- double delayInSeconds = 20.0;
+ double delayInSeconds = 10.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[filter removeTarget:movieWriter];
[movieWriter finishRecording];
+ NSLog(@"Movie completed");
});
}
View
5 framework/Source/GPUImageAdaptiveThresholdFilter.m
@@ -43,8 +43,11 @@ - (id)init;
[self addFilter:adaptiveThresholdFilter];
[luminanceFilter addTarget:boxBlurFilter];
- [luminanceFilter addTarget:adaptiveThresholdFilter];
+
[boxBlurFilter addTarget:adaptiveThresholdFilter];
+ // To prevent double updating of this filter, disable updates from the sharp luminance image side
+ adaptiveThresholdFilter.shouldIgnoreUpdatesToThisTarget = YES;
+ [luminanceFilter addTarget:adaptiveThresholdFilter];
self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
self.terminalFilter = adaptiveThresholdFilter;
View
4 framework/Source/GPUImageCropFilter.m
@@ -53,7 +53,7 @@ - (id)init;
// inputTextureSize = croppedSize;
//}
//
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
static const GLfloat cropSquareVertices[] = {
-1.0f, -1.0f,
@@ -71,7 +71,7 @@ - (void)newFrameReady;
[self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates sourceTexture:filterSourceTexture];
- [self informTargetsAboutNewFrame];
+ [self informTargetsAboutNewFrameAtTime:frameTime];
}
@end
View
2  framework/Source/GPUImageFilter.h
@@ -55,7 +55,7 @@ typedef struct GPUMatrix4x4 GPUMatrix4x4;
// Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
-- (void)informTargetsAboutNewFrame;
+- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
// Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
View
36 framework/Source/GPUImageFilter.m
@@ -207,7 +207,7 @@ - (void)createFilterFBOofSize:(CGSize)currentFBOSize;
glGenFramebuffers(1, &filterFramebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer);
- NSLog(@"Filter size: %f, %f for filter: %@", currentFBOSize.width, currentFBOSize.height, self);
+// NSLog(@"Filter size: %f, %f for filter: %@", currentFBOSize.width, currentFBOSize.height, self);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
@@ -281,12 +281,15 @@ - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
-- (void)informTargetsAboutNewFrame;
-{
+- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
+{
for (id<GPUImageInput> currentTarget in targets)
{
- [currentTarget setInputSize:inputTextureSize];
- [currentTarget newFrameReady];
+ if (currentTarget != targetToIgnoreForUpdates)
+ {
+ [currentTarget setInputSize:inputTextureSize];
+ [currentTarget newFrameReadyAtTime:frameTime];
+ }
}
}
@@ -370,7 +373,7 @@ - (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSStrin
#pragma mark -
#pragma mark GPUImageInput
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
@@ -387,7 +390,7 @@ - (void)newFrameReady;
};
[self renderToTextureWithVertices:squareVertices textureCoordinates:squareTextureCoordinates sourceTexture:filterSourceTexture];
- [self informTargetsAboutNewFrame];
+ [self informTargetsAboutNewFrameAtTime:frameTime];
}
- (NSInteger)nextAvailableTextureIndex;
@@ -426,6 +429,11 @@ - (void)recreateFilterFBO
- (void)setInputSize:(CGSize)newSize;
{
+ if (overrideInputSize)
+ {
+ return;
+ }
+
if ( (CGSizeEqualToSize(inputTextureSize, CGSizeZero)) || (CGSizeEqualToSize(newSize, CGSizeZero)) )
{
inputTextureSize = newSize;
@@ -437,6 +445,20 @@ - (void)setInputSize:(CGSize)newSize;
}
}
+- (void)forceProcessingAtSize:(CGSize)frameSize;
+{
+ if (CGSizeEqualToSize(frameSize, CGSizeZero))
+ {
+ overrideInputSize = NO;
+ }
+ else
+ {
+ overrideInputSize = YES;
+ inputTextureSize = frameSize;
+ }
+}
+
+
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
View
13 framework/Source/GPUImageFilterGroup.m
@@ -76,11 +76,11 @@ - (void)removeAllTargets;
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
- [currentFilter newFrameReady];
+ [currentFilter newFrameReadyAtTime:frameTime];
}
}
@@ -110,6 +110,15 @@ - (void)setInputSize:(CGSize)newSize;
}
}
+- (void)forceProcessingAtSize:(CGSize)frameSize;
+{
+ for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
+ {
+ [currentFilter forceProcessingAtSize:frameSize];
+ }
+}
+
+
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
View
19 framework/Source/GPUImageMovie.h
@@ -1,17 +1,22 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
-#import <CoreMedia/CoreMedia.h>
#import "GPUImageOpenGLESContext.h"
#import "GPUImageOutput.h"
-@interface GPUImageMovie : GPUImageOutput {
- CVPixelBufferRef _currentBuffer;
+@interface GPUImageMovie : GPUImageOutput
+{
+ CVOpenGLESTextureCacheRef coreVideoTextureCache;
}
-@property (readwrite, retain) NSURL *url;
+@property(readwrite, retain) NSURL *url;
+@property(readwrite, nonatomic) BOOL runBenchmark;
--(id)initWithURL:(NSURL *)url;
--(void)startProcessing;
--(void)endProcessing;
+// Initialization and teardown
+- (id)initWithURL:(NSURL *)url;
+
+// Movie processing
+- (void)startProcessing;
+- (void)endProcessing;
+- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
View
113 framework/Source/GPUImageMovie.m
@@ -3,20 +3,40 @@
@implementation GPUImageMovie
@synthesize url = _url;
+@synthesize runBenchmark = _runBenchmark;
-- (id)initWithURL:(NSURL *)url
+#pragma mark -
+#pragma mark Initialization and teardown
+
+- (id)initWithURL:(NSURL *)url;
{
if (!(self = [super init]))
{
return nil;
}
+ if ([GPUImageOpenGLESContext supportsFastTextureUpload])
+ {
+ [GPUImageOpenGLESContext useImageProcessingContext];
+ CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
+ if (err)
+ {
+ NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d");
+ }
+
+ // Need to remove the initially created texture
+ [self deleteOutputTexture];
+ }
+
self.url = url;
return self;
}
-- (void)startProcessing
+#pragma mark -
+#pragma mark Movie processing
+
+- (void)startProcessing;
{
// AVURLAsset to read input movie (i.e. mov recorded to local storage)
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
@@ -51,9 +71,9 @@ - (void)startProcessing
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
- CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
- _currentBuffer = pixelBuffer;
- [self performSelectorOnMainThread:@selector(processFrame) withObject:nil waitUntilDone:YES];
+ runOnMainQueueWithoutDeadlocking(^{
+ [self processMovieFrame:sampleBufferRef];
+ });
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
@@ -65,27 +85,80 @@ - (void)startProcessing
}];
}
-- (void)processFrame
+- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
- // Upload to texture
- CVPixelBufferLockBaseAddress(_currentBuffer, 0);
- int bufferHeight = CVPixelBufferGetHeight(_currentBuffer);
- int bufferWidth = CVPixelBufferGetWidth(_currentBuffer);
-
- glBindTexture(GL_TEXTURE_2D, outputTexture);
- // Using BGRA extension to pull in video frame data directly
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(_currentBuffer));
+ CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
+ CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
+
+ int bufferHeight = CVPixelBufferGetHeight(movieFrame);
+ int bufferWidth = CVPixelBufferGetWidth(movieFrame);
+
+ CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
+
+ if ([GPUImageOpenGLESContext supportsFastTextureUpload])
+ {
+ CVPixelBufferLockBaseAddress(movieFrame, 0);
+
+ [GPUImageOpenGLESContext useImageProcessingContext];
+ CVOpenGLESTextureRef texture = NULL;
+ CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
+
+ if (!texture || err) {
+ NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
+ return;
+ }
+
+ outputTexture = CVOpenGLESTextureGetName(texture);
+ // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
+ glBindTexture(GL_TEXTURE_2D, outputTexture);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+ for (id<GPUImageInput> currentTarget in targets)
+ {
+ [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight)];
+
+ NSInteger indexOfObject = [targets indexOfObject:currentTarget];
+ [currentTarget setInputTexture:outputTexture atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
+
+ [currentTarget newFrameReadyAtTime:currentSampleTime];
+ }
+
+ CVPixelBufferUnlockBaseAddress(movieFrame, 0);
+
+ // Flush the CVOpenGLESTexture cache and release the texture
+ CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
+ CFRelease(texture);
+ outputTexture = 0;
+ }
+ else
+ {
+ // Upload to texture
+ CVPixelBufferLockBaseAddress(movieFrame, 0);
+
+ glBindTexture(GL_TEXTURE_2D, outputTexture);
+ // Using BGRA extension to pull in video frame data directly
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(movieFrame));
+
+ CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight);
+ for (id<GPUImageInput> currentTarget in targets)
+ {
+ [currentTarget setInputSize:currentSize];
+ [currentTarget newFrameReadyAtTime:currentSampleTime];
+ }
+ CVPixelBufferUnlockBaseAddress(movieFrame, 0);
+ }
- CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight);
- for (id<GPUImageInput> currentTarget in targets)
+ if (_runBenchmark)
{
- [currentTarget setInputSize:currentSize];
- [currentTarget newFrameReady];
+ CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
+ NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
- CVPixelBufferUnlockBaseAddress(_currentBuffer, 0);
}
-- (void)endProcessing
+- (void)endProcessing;
{
for (id<GPUImageInput> currentTarget in targets)
{
View
2  framework/Source/GPUImageMovieWriter.h
@@ -28,6 +28,7 @@
CGSize videoSize;
}
+@property (readwrite, nonatomic) BOOL hasAudioTrack;
@property (nonatomic, copy) void(^CompletionBlock)(void);
@property (nonatomic, copy) void(^FailureBlock)(NSError*);
@property (nonatomic, assign) id<GPUImageMovieWriterDelegate> delegate;
@@ -38,5 +39,6 @@
// Movie recording
- (void)startRecording;
- (void)finishRecording;
+- (void)newAudioSampleReadyAtTime:(CMTime)frameTime;
@end
View
72 framework/Source/GPUImageMovieWriter.m
@@ -29,7 +29,7 @@ @interface GPUImageMovieWriter ()
GLubyte *frameData;
- NSDate *startTime;
+ CMTime startTime;
}
// Movie recording
@@ -45,9 +45,11 @@ - (void)renderAtInternalSize;
@end
@implementation GPUImageMovieWriter
+
+@synthesize hasAudioTrack = _hasAudioTrack;
@synthesize CompletionBlock;
@synthesize FailureBlock;
-@synthesize delegate;
+@synthesize delegate = _delegate;
#pragma mark -
#pragma mark Initialization and teardown
@@ -61,6 +63,7 @@ - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
videoSize = newSize;
movieURL = newMovieURL;
+ startTime = kCMTimeInvalid;
[GPUImageOpenGLESContext useImageProcessingContext];
@@ -183,7 +186,7 @@ - (void)initializeMovie;
- (void)startRecording;
{
- startTime = [NSDate date];
+ startTime = kCMTimeInvalid;
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
}
@@ -194,6 +197,11 @@ - (void)finishRecording;
[assetWriter finishWriting];
}
+- (void)newAudioSampleReadyAtTime:(CMTime)frameTime;
+{
+
+}
+
#pragma mark -
#pragma mark Frame rendering
@@ -313,13 +321,19 @@ - (void)renderAtInternalSize;
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
if (!assetWriterVideoInput.readyForMoreMediaData)
{
// NSLog(@"Had to drop a frame");
return;
}
+
+ if (CMTIME_IS_INVALID(frameTime))
+ {
+ // Drop frames forced by images and other things with no time constants
+ return;
+ }
// Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
[GPUImageOpenGLESContext useImageProcessingContext];
@@ -349,15 +363,23 @@ - (void)newFrameReady;
}
// May need to add a check here, because if two consecutive times with the same value are added to the movie, it aborts recording
- CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);
- if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:currentTime])
+// CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);
+
+ if (CMTIME_IS_INVALID(startTime))
+ {
+ startTime = frameTime;
+ }
+
+ if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)])
{
- NSLog(@"Problem appending pixel buffer at time: %lld", currentTime.value);
+ NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value);
}
else
{
-// NSLog(@"Recorded pixel buffer at time: %lld", currentTime.value);
+// CMTime testTime = CMTimeSubtract(frameTime, startTime);
+//
+// NSLog(@"Recorded pixel buffer at time: %lld, %lld", frameTime.value, testTime.value);
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
@@ -388,14 +410,40 @@ - (CGSize)maximumOutputSize;
- (void)endProcessing
{
- if (CompletionBlock) {
+ if (CompletionBlock)
+ {
CompletionBlock();
}
- else {
- if(self.delegate&&[delegate respondsToSelector:@selector(Completed)]){
- [self.delegate Completed];
+ else
+ {
+ if (_delegate && [_delegate respondsToSelector:@selector(Completed)])
+ {
+ [_delegate Completed];
}
}
}
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
+{
+ return NO;
+}
+
+#pragma mark -
+#pragma mark Accessors
+
+- (void)setHasAudioTrack:(BOOL)newValue
+{
+ _hasAudioTrack = newValue;
+
+ if (_hasAudioTrack)
+ {
+ // Add audio track
+ }
+ else
+ {
+ // Remove audio track if it exists
+ }
+}
+
+
@end
View
4 framework/Source/GPUImageOpenGLESContext.h
@@ -3,6 +3,7 @@
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
+#import <CoreMedia/CoreMedia.h>
@interface GPUImageOpenGLESContext : NSObject
{
@@ -24,10 +25,11 @@
@end
@protocol GPUImageInput
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
- (NSInteger)nextAvailableTextureIndex;
- (void)setInputSize:(CGSize)newSize;
- (CGSize)maximumOutputSize;
- (void)endProcessing;
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
@end
View
7 framework/Source/GPUImageOutput.h
@@ -3,15 +3,21 @@
#import "GPUImageOpenGLESContext.h"
#import "GLProgram.h"
+void runOnMainQueueWithoutDeadlocking(void (^block)(void));
+
@interface GPUImageOutput : NSObject
{
NSMutableArray *targets, *targetTextureIndices;
GLuint outputTexture;
CGSize inputTextureSize, cachedMaximumOutputSize;
+ id<GPUImageInput> targetToIgnoreForUpdates;
+
+ BOOL overrideInputSize;
}
@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
+@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
// Managing targets
- (void)setInputTextureForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
@@ -23,6 +29,7 @@
// Manage the output texture
- (void)initializeOutputTexture;
- (void)deleteOutputTexture;
+- (void)forceProcessingAtSize:(CGSize)frameSize;
// Still image processing
- (UIImage *)imageFromCurrentlyProcessedOutput;
View
27 framework/Source/GPUImageOutput.m
@@ -1,8 +1,21 @@
#import "GPUImageOutput.h"
+void runOnMainQueueWithoutDeadlocking(void (^block)(void))
+{
+ if ([NSThread isMainThread])
+ {
+ block();
+ }
+ else
+ {
+ dispatch_sync(dispatch_get_main_queue(), block);
+ }
+}
+
@implementation GPUImageOutput
@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
+@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;
#pragma mark -
#pragma mark Initialization and teardown
@@ -40,6 +53,10 @@ - (void)addTarget:(id<GPUImageInput>)newTarget;
{
NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
[self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];
+ if ([newTarget shouldIgnoreUpdatesToThisTarget])
+ {
+ targetToIgnoreForUpdates = newTarget;
+ }
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
@@ -62,6 +79,11 @@ - (void)removeTarget:(id<GPUImageInput>)targetToRemove;
return;
}
+ if (targetToIgnoreForUpdates == targetToRemove)
+ {
+ targetToIgnoreForUpdates = nil;
+ }
+
cachedMaximumOutputSize = CGSizeZero;
[targetToRemove setInputSize:CGSizeZero];
@@ -112,6 +134,11 @@ - (void)deleteOutputTexture;
}
}
+- (void)forceProcessingAtSize:(CGSize)frameSize;
+{
+
+}
+
#pragma mark -
#pragma mark Still image processing
View
2  framework/Source/GPUImagePicture.m
@@ -77,7 +77,7 @@ - (void)processImage;
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget setInputSize:pixelSizeOfImage];
- [currentTarget newFrameReady];
+ [currentTarget newFrameReadyAtTime:kCMTimeInvalid];
}
}
View
11 framework/Source/GPUImageRawData.m
@@ -196,7 +196,7 @@ - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
hasReadFromTheCurrentFrame = NO;
@@ -222,7 +222,14 @@ - (CGSize)maximumOutputSize;
return imageSize;
}
--(void) endProcessing{}
+- (void)endProcessing;
+{
+}
+
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
+{
+ return NO;
+}
#pragma mark -
#pragma mark Accessors
View
4 framework/Source/GPUImageRotationFilter.m
@@ -46,7 +46,7 @@ - (void)setInputSize:(CGSize)newSize;
[super setInputSize:processedSize];
}
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
static const GLfloat rotationSquareVertices[] = {
-1.0f, -1.0f,
@@ -99,7 +99,7 @@ - (void)newFrameReady;
case kGPUImageFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:horizontalFlipTextureCoordinates sourceTexture:filterSourceTexture]; break;
case kGPUImageRotateRightFlipVertical: [self renderToTextureWithVertices:rotationSquareVertices textureCoordinates:rotateRightVerticalFlipTextureCoordinates sourceTexture:filterSourceTexture]; break;
}
- [self informTargetsAboutNewFrame];
+ [self informTargetsAboutNewFrameAtTime:frameTime];
}
@end
View
11 framework/Source/GPUImageTextureOutput.m
@@ -8,7 +8,7 @@ @implementation GPUImageTextureOutput
#pragma mark -
#pragma mark GPUImageInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
[_delegate newFrameReadyFromTextureOutput:self];
}
@@ -32,6 +32,13 @@ - (CGSize)maximumOutputSize;
return CGSizeZero;
}
--(void) endProcessing{}
+- (void)endProcessing
+{
+}
+
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
+{
+ return NO;
+}
@end
View
3  framework/Source/GPUImageUnsharpMaskFilter.m
@@ -45,6 +45,9 @@ - (id)init;
// Texture location 0 needs to be the sharp image for both the blur and the second stage processing
[blurFilter addTarget:unsharpMaskFilter atTextureLocation:1];
+ // To prevent double updating of this filter, disable updates from the sharp image side
+ unsharpMaskFilter.shouldIgnoreUpdatesToThisTarget = YES;
+
self.initialFilters = [NSArray arrayWithObjects:blurFilter, unsharpMaskFilter, nil];
self.terminalFilter = unsharpMaskFilter;
View
3  framework/Source/GPUImageVideoCamera.h
@@ -8,6 +8,8 @@
// "In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight."
// Currently, rotation is needed to handle each camera
+@class GPUImageMovieWriter;
+
@interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate>
{
CVOpenGLESTextureCacheRef coreVideoTextureCache;
@@ -21,6 +23,7 @@
@property(readonly, retain) AVCaptureSession *captureSession;
@property(readwrite, nonatomic) BOOL runBenchmark;
+@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
// Use this property to manage camera settings.Focus point, exposure point, etc.
@property(readonly) AVCaptureDevice *inputCamera;
View
22 framework/Source/GPUImageVideoCamera.m
@@ -1,4 +1,5 @@
#import "GPUImageVideoCamera.h"
+#import "GPUImageMovieWriter.h"
#pragma mark -
#pragma mark Private methods and instance variables
@@ -7,6 +8,7 @@ @interface GPUImageVideoCamera ()
{
AVCaptureDeviceInput *videoInput;
AVCaptureVideoDataOutput *videoOutput;
+ NSDate *startingCaptureTime;
}
@end
@@ -16,7 +18,7 @@ @implementation GPUImageVideoCamera
@synthesize captureSession = _captureSession;
@synthesize inputCamera = _inputCamera;
@synthesize runBenchmark = _runBenchmark;
-
+@synthesize audioEncodingTarget = _audioEncodingTarget;
#pragma mark -
#pragma mark Initialization and teardown
@@ -130,6 +132,7 @@ - (void)startCameraCapture;
{
if (![_captureSession isRunning])
{
+ startingCaptureTime = [NSDate date];
[_captureSession startRunning];
};
}
@@ -149,9 +152,13 @@ - (void)rotateCamera
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if(currentCameraPosition == AVCaptureDevicePositionBack)
+ {
currentCameraPosition = AVCaptureDevicePositionFront;
+ }
else
+ {
currentCameraPosition = AVCaptureDevicePositionBack;
+ }
AVCaptureDevice *backFacingCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
@@ -202,6 +209,8 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
int bufferWidth = CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = CVPixelBufferGetHeight(cameraFrame);
+ CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startingCaptureTime], 120);
+
if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
@@ -230,7 +239,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
[currentTarget setInputTexture:outputTexture atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
- [currentTarget newFrameReady];
+ [currentTarget newFrameReadyAtTime:currentTime];
}
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
@@ -266,7 +275,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight)];
- [currentTarget newFrameReady];
+ [currentTarget newFrameReadyAtTime:currentTime];
}
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
@@ -285,4 +294,11 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
#pragma mark -
#pragma mark Accessors
+- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
+{
+ _audioEncodingTarget = newValue;
+
+ _audioEncodingTarget.hasAudioTrack = YES;
+}
+
@end
View
4 framework/Source/GPUImageView.h
@@ -2,5 +2,9 @@
#import "GPUImageOpenGLESContext.h"
@interface GPUImageView : UIView <GPUImageInput>
+{
+}
+
+@property(readonly, nonatomic) CGSize sizeInPixels;
@end
View
36 framework/Source/GPUImageView.m
@@ -10,7 +10,6 @@
@interface GPUImageView ()
{
GLuint inputTextureForDisplay;
- GLint backingWidth, backingHeight;
GLuint displayRenderbuffer, displayFramebuffer;
GLProgram *displayProgram;
@@ -29,6 +28,8 @@ - (void)destroyDisplayFramebuffer;
@implementation GPUImageView
+@synthesize sizeInPixels = _sizeInPixels;
+
#pragma mark -
#pragma mark Initialization and teardown
@@ -131,8 +132,13 @@ - (void)createDisplayFramebuffer;
[[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
+ GLint backingWidth, backingHeight;
+
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
+ _sizeInPixels.width = (CGFloat)backingWidth;
+ _sizeInPixels.height = (CGFloat)backingHeight;
+
// NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);
@@ -165,7 +171,7 @@ - (void)setDisplayFramebuffer;
glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);
- glViewport(0, 0, backingWidth, backingHeight);
+ glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height);
}
- (void)presentFramebuffer;
@@ -177,7 +183,7 @@ - (void)presentFramebuffer;
#pragma mark -
#pragma mark GPUInput protocol
-- (void)newFrameReady;
+- (void)newFrameReadyAtTime:(CMTime)frameTime;
{
[GPUImageOpenGLESContext useImageProcessingContext];
[self setDisplayFramebuffer];
@@ -241,6 +247,28 @@ - (CGSize)maximumOutputSize;
}
}
--(void) endProcessing{}
+- (void)endProcessing
+{
+}
+
+- (BOOL)shouldIgnoreUpdatesToThisTarget;
+{
+ return NO;
+}
+
+#pragma mark -
+#pragma mark Accessors
+
+- (CGSize)sizeInPixels;
+{
+ if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero))
+ {
+ return [self maximumOutputSize];
+ }
+ else
+ {
+ return _sizeInPixels;
+ }
+}
@end

No commit comments for this range

Something went wrong with that request. Please try again.