Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

Merge pull request #1244 from julapy/bugfix-ios-moviePlaye

movie player fixes / optimisations
  • Loading branch information...
commit d9c0304ef9232debd9795fd959224ec84489f8c0 2 parents 565752e + 591787b
Theodore Watson authored May 23, 2012
3  libs/openFrameworks/video/ofiPhoneVideoPlayer.h
@@ -69,6 +69,9 @@ class ofiPhoneVideoPlayer : public ofBaseVideoPlayer {
69 69
 	int width;
70 70
 	int height;
71 71
 	float playbackSpeed;
  72
+    
  73
+    bool bFrameNew;
  74
+    bool bUpdatePixels;
72 75
 	
73 76
 	int vol;
74 77
 		
226  libs/openFrameworks/video/ofiPhoneVideoPlayer.mm
@@ -2,12 +2,20 @@
2 2
 #import "ofxiPhoneExtras.h"
3 3
 #import "AVFoundationVideoPlayer.h"
4 4
 
  5
+#ifdef __IPHONE_5_0
  6
+CVOpenGLESTextureCacheRef _videoTextureCache;
  7
+CVOpenGLESTextureRef _videoTextureRef;
  8
+#endif
  9
+
5 10
 ofiPhoneVideoPlayer::ofiPhoneVideoPlayer() {
6 11
 	videoPlayer=NULL;
7 12
 	pixels = NULL;
8 13
 	pixelsTmp = NULL;
9 14
 	
10 15
 	videoWasStopped=false;
  16
+
  17
+    bUpdatePixels = false;
  18
+    bFrameNew = false;
11 19
 	
12 20
 	width = 0;
13 21
 	height = 0;
@@ -31,6 +39,17 @@
31 39
 	
32 40
 	videoPath = name;
33 41
 	initWithPath(videoPath);
  42
+    
  43
+#ifdef __IPHONE_5_0
  44
+    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, 
  45
+                                                NULL, 
  46
+                                                (__bridge void *)ofxiPhoneGetGLView().context,
  47
+                                                NULL, 
  48
+                                                &_videoTextureCache);
  49
+    if(err) {
  50
+        NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
  51
+    }    
  52
+#endif
34 53
 	
35 54
 	if(videoPlayer != NULL)
36 55
 		if(! [(AVFoundationVideoPlayer *)videoPlayer isInErrorState])
@@ -66,6 +85,17 @@
66 85
 		width = height = 0;
67 86
 		
68 87
 		[(AVFoundationVideoPlayer *)videoPlayer release];
  88
+        
  89
+#ifdef __IPHONE_5_0
  90
+        if(_videoTextureRef) {
  91
+            CFRelease(_videoTextureRef);
  92
+            _videoTextureRef = NULL;
  93
+        }
  94
+        if(_videoTextureCache) {
  95
+            CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
  96
+            CFRelease(_videoTextureCache);
  97
+        }
  98
+#endif
69 99
 	}
70 100
 	videoPlayer = NULL;
71 101
 }
@@ -103,7 +133,7 @@
103 133
 
104 134
 bool ofiPhoneVideoPlayer::isFrameNew() {
105 135
 	if(videoPlayer != NULL) {
106  
-		return [(AVFoundationVideoPlayer *)videoPlayer hasNewFrame];
  136
+		return bFrameNew;
107 137
 	}	
108 138
 	return false;
109 139
 }
@@ -114,6 +144,10 @@
114 144
 
115 145
 	if(videoPlayer != NULL && isPlaying())
116 146
 	{
  147
+        if(!bUpdatePixels) { // if pixels have not changed, return the already calculated pixels.
  148
+            return pixels;
  149
+        }
  150
+        
117 151
 		CGImageRef currentFrameRef;
118 152
 		
119 153
 		NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
@@ -187,6 +221,8 @@
187 221
 			CGImageRelease(currentFrameRef);
188 222
 		}
189 223
 		
  224
+        bUpdatePixels = false;
  225
+        
190 226
 		return pixels;
191 227
 	}
192 228
 	
@@ -195,42 +231,139 @@
195 231
 
196 232
 ofTexture * ofiPhoneVideoPlayer::getTexture()
197 233
 {
198  
-	
199  
-	if(videoPlayer != NULL)
200  
-	{
201  
-		CVImageBufferRef imageBuffer = [(AVFoundationVideoPlayer *)videoPlayer getCurrentFrame]; 
202  
-
203  
-		CVPixelBufferLockBaseAddress(imageBuffer,0); 
  234
+	if(videoPlayer != NULL) {
204 235
 
205  
-		uint8_t *bufferPixels = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
  236
+		CVImageBufferRef imageBuffer = [(AVFoundationVideoPlayer *)videoPlayer getCurrentFrame]; 
206 237
 		
207  
-		if(width != min(size_t(1024),CVPixelBufferGetWidth(imageBuffer))) {
208  
-			
209  
-			if(videoTexture.bAllocated())
210  
-				videoTexture.clear();
211  
-				
212  
-			int widthIn = min(size_t(1024),CVPixelBufferGetWidth(imageBuffer)); 
213  
-			int heightIn = min(size_t(1024),CVPixelBufferGetHeight(imageBuffer));
214  
-			
215  
-			if( width==0 && widthIn != 0  && pixels == NULL) {
216  
-								
217  
-				if(internalGLFormat == GL_RGB)
218  
-					pixels = (GLubyte *) malloc(widthIn * heightIn * 3);
219  
-				else
220  
-					pixels = (GLubyte *) malloc(widthIn * heightIn * 4);
  238
+        CVPixelBufferLockBaseAddress(imageBuffer,0); 
  239
+        
  240
+        size_t widthIn = CVPixelBufferGetWidth(imageBuffer);
  241
+        size_t heightIn = CVPixelBufferGetHeight(imageBuffer);
  242
+        
  243
+#ifdef __IPHONE_5_0        
  244
+        if(_videoTextureCache) {
  245
+            /**
  246
+             *  video texture cache is available.
  247
+             *  this means we don't have to copy any pixels,
  248
+             *  and we can reuse the already existing video texture.
  249
+             *  this is very fast! :)
  250
+             */
  251
+            
  252
+            if(_videoTextureRef) {
  253
+                CFRelease(_videoTextureRef);
  254
+                _videoTextureRef = NULL;
  255
+            }
  256
+            CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); // Periodic texture cache flush every frame
  257
+
  258
+            /**
  259
+             *  CVOpenGLESTextureCache does this operation for us.
  260
+             *  it automatically returns a texture reference which means we don't have to create the texture ourselves.
  261
+             *  but we do want to return an ofTexture object...
  262
+             *  this creates a slight problem because when we create an ofTexture objects, it also creates a opengl texture for us,
  263
+             *  which is unecessary in this case because the texture already exists.
  264
+             *  so... the below is somewhat of a hack to reuse an existing texture and give it to a ofTexture object.
  265
+             */
  266
+            
  267
+            ofTextureData texData;
  268
+            texData.tex_w = texData.width = widthIn;
  269
+            texData.tex_h = texData.height = heightIn;
  270
+            texData.tex_w = ofNextPow2(texData.tex_w);
  271
+            texData.tex_h = ofNextPow2(texData.tex_h);
  272
+            texData.tex_t = 1.0f;
  273
+            texData.tex_u = 1.0f;
  274
+            texData.textureTarget = GL_TEXTURE_2D;
  275
+            texData.glTypeInternal = GL_RGBA; // opengl format
  276
+            texData.glType = GL_BGRA; // native iOS format
  277
+            texData.pixelType = GL_UNSIGNED_BYTE;
  278
+            texData.bAllocated = true;
  279
+            
  280
+            glActiveTexture(GL_TEXTURE0);
  281
+            
  282
+            /**
  283
+             *  create video texture from video image.
  284
+             *  inside this function, ios is creating the texture for us.
  285
+             *  a video texture reference is returned.
  286
+             */
  287
+            CVReturn err;
  288
+            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,     // CFAllocatorRef allocator
  289
+                                                               _videoTextureCache,      // CVOpenGLESTextureCacheRef textureCache
  290
+                                                               imageBuffer,             // CVImageBufferRef sourceImage
  291
+                                                               NULL,                    // CFDictionaryRef textureAttributes
  292
+                                                               texData.textureTarget,   // GLenum target
  293
+                                                               texData.glTypeInternal,  // GLint internalFormat
  294
+                                                               texData.width,           // GLsizei width
  295
+                                                               texData.height,          // GLsizei height
  296
+                                                               texData.glType,          // GLenum format
  297
+                                                               texData.pixelType,       // GLenum type
  298
+                                                               0,                       // size_t planeIndex
  299
+                                                               &_videoTextureRef);      // CVOpenGLESTextureRef *textureOut
  300
+            
  301
+            /**
  302
+             *  get the generated textureID and textureTarget.
  303
+             *  configure the texture (this is from inside ofTexture::allocate).
  304
+             *  and add give it to an ofTexture object.
  305
+             */
  306
+            texData.textureID = CVOpenGLESTextureGetName(_videoTextureRef);
  307
+            texData.textureTarget = CVOpenGLESTextureGetTarget(_videoTextureRef);
  308
+            
  309
+            glEnable(texData.textureTarget);
  310
+            glBindTexture(texData.textureTarget, (GLuint)texData.textureID);
  311
+            glTexParameterf(texData.textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  312
+            glTexParameterf(texData.textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  313
+            glTexParameterf(texData.textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  314
+            glTexParameterf(texData.textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  315
+            glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
  316
+            glDisable(texData.textureTarget);
  317
+            
  318
+            videoTexture.texData = texData;
  319
+
  320
+            if(err) {
  321
+                NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  322
+            }            
  323
+        } else 
  324
+#endif            
  325
+        {
  326
+            /**
  327
+             *  no video texture cache.
  328
+             *  load texture from pixels.
  329
+             *  this method is the slower alternative.
  330
+             */
  331
+            
  332
+            int maxTextureSize = 0;
  333
+            glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
  334
+            
  335
+            if((int)widthIn > maxTextureSize || (int)heightIn > maxTextureSize) {
  336
+                ofLog(OF_LOG_WARNING, "ofiPhoneVideoPlayer::getTexture() - video image is bigger then supported texture size");
  337
+            }
  338
+            
  339
+            widthIn = min(size_t(maxTextureSize), widthIn);
  340
+            heightIn = min(size_t(maxTextureSize), heightIn);
  341
+            
  342
+            if((width != widthIn) || (height != heightIn)) {
  343
+                
  344
+                if(videoTexture.bAllocated()) {
  345
+                    videoTexture.clear();
  346
+                }
221 347
 				
222  
-				pixelsTmp	= (GLubyte *) malloc(widthIn * heightIn * 4);
223  
-			}				
  348
+                if(width == 0 && widthIn != 0 && pixels == NULL) {
  349
+                    
  350
+                    if(internalGLFormat == GL_RGB) {
  351
+                        pixels = (GLubyte *)malloc(widthIn * heightIn * 3);
  352
+                    } else {
  353
+                        pixels = (GLubyte *)malloc(widthIn * heightIn * 4);
  354
+                    }
  355
+                    pixelsTmp = (GLubyte *)malloc(widthIn * heightIn * 4);
  356
+                }				
224 357
 				
225  
-			width	= widthIn; 
226  
-			height	= heightIn;
227  
-			videoTexture.allocate(width, height, internalGLFormat);
228  
-		}
229  
-		
230  
-		videoTexture.loadData((unsigned char *)imageBuffer, width, height, internalGLFormat);
231  
-		
232  
-		// unlock the image buffer
233  
-		CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  358
+                width = widthIn;
  359
+                height = heightIn;
  360
+                videoTexture.allocate(width, height, internalGLFormat);
  361
+            }
  362
+            
  363
+            videoTexture.loadData(getPixels(), width, height, internalGLFormat);
  364
+        }
  365
+        
  366
+        CVPixelBufferUnlockBaseAddress(imageBuffer,0); // unlock the image buffer
234 367
 		
235 368
 		return &videoTexture;
236 369
 	}
@@ -290,11 +423,28 @@
290 423
 }
291 424
 
292 425
 void ofiPhoneVideoPlayer::update() {
293  
-	if(videoPlayer != NULL) {
294  
-		float t = ofGetElapsedTimef();
295  
-		[(AVFoundationVideoPlayer *)videoPlayer updateWithElapsedTime:(t-lastUpdateTime)*playbackSpeed];
296  
-		lastUpdateTime=t;
297  
-	}
  426
+    
  427
+    bFrameNew = false; // default.
  428
+    
  429
+    if(videoPlayer == NULL) {
  430
+        return;
  431
+    }
  432
+    
  433
+    float t = ofGetElapsedTimef();
  434
+    [(AVFoundationVideoPlayer *)videoPlayer updateWithElapsedTime:(t-lastUpdateTime)*playbackSpeed];
  435
+    lastUpdateTime=t;
  436
+    
  437
+    bFrameNew = [(AVFoundationVideoPlayer *)videoPlayer hasNewFrame]; // check for new frame staright after the call to update.
  438
+    
  439
+    if(bFrameNew) {
  440
+        /**
  441
+         *  mark pixels to be updated.
  442
+         *  pixels are then only updated if the getPixels() method is called,
  443
+         *  internally or externally to this class.
  444
+         *  this ensures the pixels are updated only once per frame.
  445
+         */
  446
+        bUpdatePixels = true;
  447
+    }
298 448
 }
299 449
 
300 450
 float ofiPhoneVideoPlayer::getPosition() {

0 notes on commit d9c0304

Please sign in to comment.
Something went wrong with that request. Please try again.