Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

updates to address changes from pull request discussion

Synchronous scrubbing now on by default
Synchronous scrubbing of RGB videos now correctly handles ARGB frames with vImagePermuteChannels_ARGB8888
Objective-C grabber now has destructor
changed video event to use abstract ofBaseVideoGrabber type for sender to support other sender types using the same event in the future
  • Loading branch information...
commit 5a30c64b688f12e5f1415f7428440d00424c79c5 1 parent a362e26
@obviousjim authored
View
75 libs/openFrameworks/video/QTKitMovieRenderer.m
@@ -234,6 +234,7 @@ - (BOOL) update
NSError* nsError = nil;
if(self.usePixels){
//pull the frame
+
NSDictionary *dict = [NSDictionary
dictionaryWithObjectsAndKeys:QTMovieFrameImageTypeCVPixelBufferRef, QTMovieFrameImageType,
[NSValue valueWithSize:movieSize], QTMovieFrameImageSize,
@@ -256,9 +257,9 @@ - (BOOL) update
CVPixelBufferRetain(theImage);
//DEBUG timecode
- CVAttachmentMode mode = kCVAttachmentMode_ShouldPropagate;
- CFDictionaryRef timeDictionary = CVBufferGetAttachment (theImage, kCVBufferMovieTimeKey, &mode);
- // NSLog(@"Image One current movie time: %f incoming frame time: %f", 1.0*_movie.currentTime.timeValue/_movie.currentTime.timeScale, [[(NSDictionary*)timeDictionary valueForKey:@"TimeValue"] floatValue] / [[(NSDictionary*)timeDictionary valueForKey:@"TimeScale"] floatValue]);
+// CVAttachmentMode mode = kCVAttachmentMode_ShouldPropagate;
+// CFDictionaryRef timeDictionary = CVBufferGetAttachment (theImage, kCVBufferMovieTimeKey, &mode);
+// NSLog(@"Image One current movie time: %f incoming frame time: %f", 1.0*_movie.currentTime.timeValue/_movie.currentTime.timeScale, [[(NSDictionary*)timeDictionary valueForKey:@"TimeValue"] floatValue] / [[(NSDictionary*)timeDictionary valueForKey:@"TimeScale"] floatValue]);
if(self.useTexture){
if(_latestTextureFrame != NULL){
@@ -270,7 +271,6 @@ - (BOOL) update
OSErr err = CVOpenGLTextureCacheCreateTextureFromImage(NULL, _textureCache, _latestPixelFrame, NULL, &_latestTextureFrame);
if(err != noErr){
NSLog(@"Error creating OpenGL texture %d ", err);
-
return NO;
}
}
@@ -291,7 +291,6 @@ - (BOOL) update
return NO;
}
-
if(_latestTextureFrame != NULL){
CVOpenGLTextureRelease(_latestTextureFrame);
}
@@ -324,8 +323,8 @@ - (BOOL) update
}
//DEBUG timecode
- CVAttachmentMode mode = kCVAttachmentMode_ShouldPropagate;
- CFDictionaryRef timeDictionary = CVBufferGetAttachment (_latestPixelFrame, kCVBufferMovieTimeKey, &mode);
+// CVAttachmentMode mode = kCVAttachmentMode_ShouldPropagate;
+// CFDictionaryRef timeDictionary = CVBufferGetAttachment (_latestPixelFrame, kCVBufferMovieTimeKey, &mode);
// NSLog(@"movie time: %f incoming frame time: %f", 1.0*_movie.currentTime.timeValue/_movie.currentTime.timeScale, [[(NSDictionary*)timeDictionary valueForKey:@"TimeValue"] floatValue] / [[(NSDictionary*)timeDictionary valueForKey:@"TimeScale"] floatValue]);
//if we are using a texture, create one from the texture cache
@@ -415,11 +414,12 @@ - (void) pixels:(unsigned char*) outbuf
//CoreVieo creates alpha video in the format ARGB, and openFrameworks expects RGBA,
//so we need to swap the alpha around using a vImage permutation
if(self.useAlpha){
- vImage_Buffer src = { CVPixelBufferGetBaseAddress(_latestPixelFrame),
- CVPixelBufferGetHeight(_latestPixelFrame),
- CVPixelBufferGetWidth(_latestPixelFrame),
- CVPixelBufferGetBytesPerRow(_latestPixelFrame)
- };
+ vImage_Buffer src = {
+ CVPixelBufferGetBaseAddress(_latestPixelFrame),
+ CVPixelBufferGetHeight(_latestPixelFrame),
+ CVPixelBufferGetWidth(_latestPixelFrame),
+ CVPixelBufferGetBytesPerRow(_latestPixelFrame)
+ };
vImage_Buffer dest = { outbuf, movieSize.height, movieSize.width, movieSize.width*4 };
uint8_t permuteMap[4] = { 1, 2, 3, 0 }; //swizzle the alpha around to the end to make ARGB -> RGBA
vImage_Error err = vImagePermuteChannels_ARGB8888(&src, &dest, permuteMap, 0);
@@ -431,22 +431,39 @@ - (void) pixels:(unsigned char*) outbuf
//and the ofQTKitPlayer will have created a buffer of size movieSize.width * movieSize.height * 3
//so we can just copy them straight into the outbuffer
else {
- size_t dstBytesPerRow = movieSize.width * 3;
- if (CVPixelBufferGetBytesPerRow(_latestPixelFrame) == dstBytesPerRow) {
- memcpy(outbuf, CVPixelBufferGetBaseAddress(_latestPixelFrame), dstBytesPerRow*CVPixelBufferGetHeight(_latestPixelFrame));
- }
- else {
- unsigned char *dst = outbuf;
- unsigned char *src = (unsigned char*)CVPixelBufferGetBaseAddress(_latestPixelFrame);
- size_t srcBytesPerRow = CVPixelBufferGetBytesPerRow(_latestPixelFrame);
- size_t copyBytesPerRow = MIN(dstBytesPerRow, srcBytesPerRow); // should always be dstBytesPerRow but be safe
- int y;
- for(y = 0; y < movieSize.height; y++){
- memcpy(dst, src, copyBytesPerRow);
- dst += dstBytesPerRow;
- src += srcBytesPerRow;
- }
- }
+ //NSLog(@"incoming frame is %ld RGBA is %ld RGB is %ld", CVPixelBufferGetPixelFormatType(_latestPixelFrame), kCVPixelFormatType_32ARGB, kCVPixelFormatType_24RGB);
+ //with frameImageAtTime: on sycnrhonouse scrub mode the frames come in 32ARGB even if we have 24RGB enforce, so prep for this case
+ if(CVPixelBufferGetPixelFormatType(_latestPixelFrame) == kCVPixelFormatType_32ARGB){
+ vImage_Buffer src = {
+ CVPixelBufferGetBaseAddress(_latestPixelFrame),
+ CVPixelBufferGetHeight(_latestPixelFrame),
+ CVPixelBufferGetWidth(_latestPixelFrame),
+ CVPixelBufferGetBytesPerRow(_latestPixelFrame)
+ };
+ vImage_Buffer dest = { outbuf, movieSize.height, movieSize.width, movieSize.width*3 };
+ vImageConvert_ARGB8888toRGB888(&src, &dest, 0);
+ }
+ else{
+ if (CVPixelBufferGetPixelFormatType(_latestPixelFrame) != kCVPixelFormatType_24RGB){
+ NSLog(@"QTKitMovieRenderer - Frame pixelformat not kCVPixelFormatType_24RGB: %d, instead %ld",kCVPixelFormatType_24RGB,CVPixelBufferGetPixelFormatType(_latestPixelFrame));
+ }
+ size_t dstBytesPerRow = movieSize.width * 3;
+ if (CVPixelBufferGetBytesPerRow(_latestPixelFrame) == dstBytesPerRow) {
+ memcpy(outbuf, CVPixelBufferGetBaseAddress(_latestPixelFrame), dstBytesPerRow*CVPixelBufferGetHeight(_latestPixelFrame));
+ }
+ else {
+ unsigned char *dst = outbuf;
+ unsigned char *src = (unsigned char*)CVPixelBufferGetBaseAddress(_latestPixelFrame);
+ size_t srcBytesPerRow = CVPixelBufferGetBytesPerRow(_latestPixelFrame);
+ size_t copyBytesPerRow = MIN(dstBytesPerRow, srcBytesPerRow); // should always be dstBytesPerRow but be safe
+ int y;
+ for(y = 0; y < movieSize.height; y++){
+ memcpy(dst, src, copyBytesPerRow);
+ dst += dstBytesPerRow;
+ src += srcBytesPerRow;
+ }
+ }
+ }
}
CVPixelBufferUnlockBaseAddress(_latestPixelFrame, kCVPixelBufferLock_ReadOnly);
}
@@ -492,7 +509,6 @@ - (void) unbindTexture
- (void) setRate:(float) rate
{
-
[_movie setRate:rate];
}
@@ -508,7 +524,6 @@ - (void) setVolume:(float) volume
- (float) volume
{
- [_movie stop];
return [_movie volume];
}
View
3  libs/openFrameworks/video/ofQTKitGrabber.h
@@ -10,11 +10,10 @@
@class QTKitVideoGrabber;
#endif
-class ofQTKitGrabber; //quick forward declare for the event reference
class ofVideoSavedEventArgs : public ofEventArgs {
public:
string videoPath;
- ofQTKitGrabber* grabber;
+ ofBaseVideoGrabber* grabber;
string error; //"" if there is no error
};
View
26 libs/openFrameworks/video/ofQTKitGrabber.mm
@@ -27,11 +27,9 @@ @interface QTKitVideoGrabber : QTCaptureVideoPreviewOutput
ofQTKitGrabber* grabber;
BOOL hasNewFrame;
- BOOL isRunning;
BOOL isFrameNew;
BOOL isRecording;
BOOL isRecordReady;
- BOOL verbose;
BOOL useAudio;
}
@@ -47,7 +45,6 @@ @interface QTKitVideoGrabber : QTCaptureVideoPreviewOutput
@property(readonly) BOOL isFrameNew;
@property(readonly) BOOL isRecording;
@property(readonly) BOOL isRecordReady;
-@property(nonatomic, readwrite) BOOL verbose;
@property(nonatomic, readwrite) BOOL useAudio;
@property(nonatomic, readwrite) ofQTKitGrabber* grabber; //for event reference setting
@@ -111,7 +108,6 @@ @implementation QTKitVideoGrabber
@synthesize isFrameNew;
@synthesize isRecording;
@synthesize isRecordReady;
-@synthesize verbose;
@synthesize useAudio;
@synthesize grabber;
@@ -135,7 +131,6 @@ + (NSInteger) getIndexofStringInArray:(NSArray*)someArray stringToFind:(NSString
return index;
}
}
-
return -1;
}
@@ -144,7 +139,6 @@ + (NSArray*) listVideoDevices
NSArray* videoDevices = [[QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]
arrayByAddingObjectsFromArray:[QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed]];
- NSLog(@"ofQTKitGrabber listing video devices");
[self enumerateArray:videoDevices];
return videoDevices;
@@ -155,7 +149,6 @@ + (NSArray*) listAudioDevices
{
NSArray* audioDevices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeSound];
- NSLog(@"ofQTKitGrabber listing audio devices");
[self enumerateArray:audioDevices];
return audioDevices;
@@ -194,7 +187,6 @@ - (id) initWithWidth:(NSInteger)_width
forKey:(NSString*)kCVPixelBufferWidthKey];
[pixelBufferAttributes setValue:[NSNumber numberWithInt:height]
forKey:(NSString*)kCVPixelBufferHeightKey];
- cout << "Setting width and height to " << width << " " << height << endl;
}
if(pixelBufferAttributes.count > 0){
@@ -572,6 +564,20 @@ - (BOOL) isRunning
return self.session && self.session.isRunning;
}
+- (void) dealloc
+{
+ if(self.isRunning){
+ [self stop];
+ }
+
+ if(cvFrame != NULL){
+ CVPixelBufferRelease(cvFrame);
+ cvFrame = NULL;
+ }
+
+ [super dealloc];
+}
+
@end
//C++ Wrapper class:
@@ -874,9 +880,7 @@ - (BOOL) isRunning
}
void ofQTKitGrabber::setVerbose(bool bTalkToMe){
- if(confirmInit()){
- grabber.verbose = bTalkToMe;
- }
+ //Now handled by ofLogVerbose()
}
void ofQTKitGrabber::videoSettings(){
View
2  libs/openFrameworks/video/ofQTKitPlayer.h
@@ -97,7 +97,7 @@ class ofQTKitPlayer : public ofBaseVideoPlayer {
float speed;
ofQTKitDecodeMode decodeMode;
string moviePath;
-
+ bool bSynchronousScrubbing;
//pulls texture data from the movie renderer into our ofTexture
void updateTexture();
void reallocatePixels();
View
36 libs/openFrameworks/video/ofQTKitPlayer.mm
@@ -6,6 +6,8 @@
bNewFrame = false;
duration = 0;
speed = 0;
+ //default this to true so the player update behavior matches ofQuicktimePlayer
+ bSynchronousScrubbing = true;
//ofQTKitPlayer supports RGB and RGBA
pixelFormat = OF_PIXELS_RGB;
}
@@ -41,8 +43,9 @@
allowTexture:useTexture
allowPixels:usePixels
allowAlpha:useAlpha];
-
+
if(success){
+ moviePlayer.synchronousScrub = bSynchronousScrubbing;
reallocatePixels();
moviePath = movieFilePath;
duration = moviePlayer.duration;
@@ -111,24 +114,31 @@
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
[moviePlayer gotoBeginning];
-
+
[pool release];
+ if(bSynchronousScrubbing){
+ update();
+ }
+
}
void ofQTKitPlayer::nextFrame(){
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
[moviePlayer stepForward];
-
+
[pool release];
-
+ if(bSynchronousScrubbing){
+ update();
+ }
+
}
void ofQTKitPlayer::previousFrame(){
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
[moviePlayer stepBackward];
-
+ bNewFrame = bHavePixelsChanged = bSynchronousScrubbing;
[pool release];
}
@@ -238,6 +248,10 @@
moviePlayer.position = pct;
[pool release];
+
+ if(bSynchronousScrubbing){
+ update();
+ }
}
void ofQTKitPlayer::setVolume(float volume) {
@@ -264,11 +278,11 @@
if(moviePlayer == NULL) return;
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
-
moviePlayer.frame = frame % moviePlayer.frameCount;
-
[pool release];
-
+ if(bSynchronousScrubbing){
+ update();
+ }
}
int ofQTKitPlayer::getCurrentFrame() {
@@ -395,18 +409,16 @@
}
void ofQTKitPlayer::setSynchronousScrubbing(bool synchronous){
+ bSynchronousScrubbing = synchronous;
if(moviePlayer != nil){
moviePlayer.synchronousScrub = synchronous;
}
}
bool ofQTKitPlayer::getSynchronousScrubbing(){
- if(moviePlayer != nil){
- return moviePlayer.synchronousScrub;
- }
+ return bSynchronousScrubbing;
}
-
void ofQTKitPlayer::reallocatePixels(){
if(pixelFormat == OF_PIXELS_RGBA){
pixels.allocate(moviePlayer.movieSize.width, moviePlayer.movieSize.height, OF_IMAGE_COLOR_ALPHA);
Please sign in to comment.
Something went wrong with that request. Please try again.