Permalink
Browse files

Added Play Audio support in GPUImageMovie

  • Loading branch information...
tuo committed Oct 18, 2013
1 parent 1106703 commit b96d2d1ab43d99b018066d86834cce914cf02171
Showing with 66 additions and 19 deletions.
  1. +2 −0 framework/Source/GPUImageMovie.h
  2. +64 −19 framework/Source/GPUImageMovie.m
@@ -30,6 +30,8 @@
*/
@property(readwrite, nonatomic) BOOL shouldRepeat;
+@property(readwrite, nonatomic) BOOL playSound;
+
/** This is used to send the delete Movie did complete playing alert
*/
@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;
@@ -5,7 +5,7 @@
@interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
{
- BOOL audioEncodingIsFinished, videoEncodingIsFinished;
+ BOOL audioEncodingIsFinished, videoEncodingIsFinished, hasAudioTrack;
GPUImageMovieWriter *synchronizedMovieWriter;
CVOpenGLESTextureCacheRef coreVideoTextureCache;
AVAssetReader *reader;
@@ -25,6 +25,11 @@ @interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
const GLfloat *_preferredConversion;
int imageBufferWidth, imageBufferHeight;
+
+ AVAudioPlayer *audioPlayer;
+ CFAbsoluteTime startActualFrameTime;
+ CGFloat currentVideoTime;
+
}
- (void)processAsset;
@@ -168,6 +173,7 @@ - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieW
- (void)startProcessing
{
+ currentVideoTime = 0.0f;
if( self.playerItem ) {
[self processPlayerItem];
return;
@@ -185,7 +191,10 @@ - (void)startProcessing
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
-
+ if (self.playSound)
+ {
+ [self setupSound];
+ }
GPUImageMovie __block *blockSelf = self;
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
@@ -197,47 +206,54 @@ - (void)startProcessing
return;
}
blockSelf.asset = inputAsset;
+ startActualFrameTime = CFAbsoluteTimeGetCurrent() - currentVideoTime;
[blockSelf processAsset];
blockSelf = nil;
//});
}];
}
-- (AVAssetReader*)createAssetReader
+- (void)setupSound {
+ NSError *error;
+ audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.url error:&error];
+
+ if (error) {
+ NSLog(@"Failed to initialise sound with error:%@",error);
+ }
+ [audioPlayer prepareToPlay];
+}
+
+
+
+- (void)processAsset
{
NSError *error = nil;
- AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
+ reader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
+ CMTimeRange timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(currentVideoTime, 1000), kCMTimePositiveInfinity);
NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerVideoTrackOutput];
+ [reader addOutput:readerVideoTrackOutput];
+ [reader setTimeRange:timeRange];
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
+ hasAudioTrack = [audioTracks count] > 0;
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
if (shouldRecordAudioTrack)
{
[self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
-
+
// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
readerAudioTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerAudioTrackOutput];
+ [reader addOutput:readerAudioTrackOutput];
}
- return assetReader;
-}
-
-- (void)processAsset
-{
- reader = [self createAssetReader];
-
- AVAssetReaderOutput *readerVideoTrackOutput = nil;
- AVAssetReaderOutput *readerAudioTrackOutput = nil;
audioEncodingIsFinished = YES;
for( AVAssetReaderOutput *output in reader.outputs ) {
@@ -256,6 +272,12 @@ - (void)processAsset
return;
}
+ if (self.playSound && hasAudioTrack)
+ {
+ [audioPlayer setCurrentTime:currentVideoTime];
+ [audioPlayer play];
+ }
+
__unsafe_unretained GPUImageMovie *weakSelf = self;
if (synchronizedMovieWriter != nil)
@@ -353,6 +375,7 @@ - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutp
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
+ CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
//NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
if (_playAtActualSpeed)
{
@@ -363,16 +386,33 @@ - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutp
CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
-
- if (frameTimeDifference > actualTimeDifference)
+
+
+ CGFloat frameTimeOffset= CMTimeGetSeconds(currentSampleTime);
+ CGFloat actualTimeOffset = currentActualTime - startActualFrameTime;
+
+
+ if (self.playSound && hasAudioTrack)
{
- usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
+ actualTimeOffset = [audioPlayer currentTime];
+ }
+
+
+// if (frameTimeDifference > actualTimeDifference)
+// {
+// usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
+// }
+ if (frameTimeOffset - actualTimeOffset > 0.0f)
+ {
+ usleep(1000000.0 * (frameTimeOffset - actualTimeOffset));
}
previousFrameTime = currentSampleTime;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
}
+ currentVideoTime = CMTimeGetSeconds(currentSampleTime);
+
__unsafe_unretained GPUImageMovie *weakSelf = self;
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:sampleBufferRef];
@@ -633,6 +673,11 @@ - (void)endProcessing;
[synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
}
+ if (audioPlayer != nil)
+ {
+ [audioPlayer stop];
+ }
+
if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
[self.delegate didCompletePlayingMovie];
}

0 comments on commit b96d2d1

Please sign in to comment.