Permalink
Browse files

updates corresponding to HapQ+A

updated QT and hap test app to support HapQ + A :: updated VVBufferPool to and sample additions to VVBufferPool to include support for HapQ + A :: updated arbitrary GL texture to VVBuffer test app to compensate for premultiplied image data
  • Loading branch information...
1 parent 8ea9a08 commit f7f9220c92baafc3a8db8294c2a2fbd3749bf626 @mrRay committed Apr 9, 2016
@@ -70,7 +70,8 @@ - (void) loadTheImageIntoATexture {
hasAlpha:YES
isPlanar:NO
colorSpaceName:NSCalibratedRGBColorSpace
- bitmapFormat:0
+ bitmapFormat:0 // premultiplied, but alpha is last
+ //bitmapFormat:NSAlphaNonpremultipliedBitmapFormat // can't use this- graphics contexts cant use non-premultiplied bitmap reps as a backing
bytesPerRow:32 * (long)imgSize.width / 8
bitsPerPixel:32];
if (imgRep==nil) {
@@ -94,6 +95,9 @@ - (void) loadTheImageIntoATexture {
[NSGraphicsContext setCurrentContext:origCtx];
uint8_t *rdPtr = [imgRep bitmapData];
size_t rdBytesPerRow = [imgRep bytesPerRow];
+
+ // the rep we just drew into was premultiplied, and we have to fix that before uploading it
+ [imgRep unpremultiply];
// upload the bytes on the bitmap to a GL texture
textureSize = imgSize;
@@ -6,7 +6,8 @@
-@interface AppDelegate : NSObject <NSApplicationDelegate> {
+@interface QTAndHapTestAppDelegate : NSObject <NSApplicationDelegate> {
+ IBOutlet NSWindow *window;
IBOutlet VVBufferGLView *glView;
NSOpenGLContext *sharedContext;
CVDisplayLinkRef displayLink;
@@ -1,9 +1,9 @@
-#import "AppDelegate.h"
+#import "QTAndHapTestAppDelegate.h"
-@implementation AppDelegate
+@implementation QTAndHapTestAppDelegate
- (id) init {
@@ -64,7 +64,7 @@ - (IBAction) openDocument:(id)sender {
[op setDirectoryURL:[NSURL fileURLWithPath:importDir]];
[op
- beginSheetModalForWindow:nil
+ beginSheetModalForWindow:window
completionHandler:^(NSInteger result) {
if (result == NSFileHandlingPanelOKButton) {
// get the inspected object
@@ -108,7 +108,7 @@ CVReturn displayLinkCallback(CVDisplayLinkRef displayLink,
{
//NSLog(@"%s",__func__);
NSAutoreleasePool *pool =[[NSAutoreleasePool alloc] init];
- [(AppDelegate *)displayLinkContext renderCallback];
+ [(QTAndHapTestAppDelegate *)displayLinkContext renderCallback];
[pool release];
return kCVReturnSuccess;
}
@@ -39,7 +39,7 @@ - (void) loadFileAtPath:(NSString *)p {
glContext = [[NSOpenGLContext alloc] initWithFormat:pf shareContext:[[VVBufferPool globalVVBufferPool] sharedContext]];
// make a new hapQ swizzler
hapQSwizzler = [[ISFGLScene alloc] initWithSharedContext:[[VVBufferPool globalVVBufferPool] sharedContext]];
- [hapQSwizzler useFile:[[NSBundle mainBundle] pathForResource:@"ScaledCoCgYtoRGBA" ofType:@"fs"]];
+ //[hapQSwizzler useFile:[[NSBundle mainBundle] pathForResource:@"ScaledCoCgYtoRGBA" ofType:@"fs"]];
// make a movie
OSStatus err = noErr;
NSURL *pathURL = [NSURL fileURLWithPath:p];
@@ -57,6 +57,20 @@ - (void) loadFileAtPath:(NSString *)p {
NSLog(@"\t\terr %ld at A in %s",err,__func__);
return;
}
+
+ // get the hap codec type- if it's HapQ or HapQ alpha, we're going to need to load a shader to convert the image...
+ switch (HapCodecType([movie quickTimeMovie])) {
+ case kHapCodecSubType:
+ case kHapAlphaCodecSubType:
+ case kHapAOnlyCodecSubType:
+ break;
+ case kHapYCoCgCodecSubType:
+ [hapQSwizzler useFile:[[NSBundle mainBundle] pathForResource:@"ScaledCoCgYtoRGBA" ofType:@"fs"]];
+ break;
+ case kHapYCoCgACodecSubType:
+ [hapQSwizzler useFile:[[NSBundle mainBundle] pathForResource:@"ScaledCoCgYplusAtoRGBA" ofType:@"fs"]];
+ break;
+ }
}
// else this movie doesn't have a hap video track- we're going to use a standard visual context
else {
@@ -95,23 +109,32 @@ - (VVBuffer *) allocNewFrame {
// if there's an image buffer
if (imgRef != NULL) {
CFTypeID imgRefType = CFGetTypeID(imgRef);
+ OSType imgPixelFormat = CVPixelBufferGetPixelFormatType(imgRef);
// if the image buffer is already a GL texture...
if (imgRefType == CVOpenGLTextureGetTypeID()) {
// just wrap the CoreVideo GL texture with VVBuffer. AVFoundation rendering is handled the same way- use apple APIs to get a CV GL texture, then create a VVBuffer from it.
returnMe = [[VVBufferPool globalVVBufferPool] allocBufferForCVGLTex:imgRef];
}
// else if the image buffer is a pixel buffer (hap)
else if (imgRefType == CVPixelBufferGetTypeID()) {
- // if this is hapQ, we have to convert the image from YCoCg to RGBA. we do this with an ISF file, because that's really easy.
- OSType imgPixelFormat = CVPixelBufferGetPixelFormatType(imgRef);
if (imgPixelFormat == kHapPixelFormatTypeYCoCg_DXT5) {
- VVBuffer *yCoCg = [[VVBufferPool globalVVBufferPool] allocTexRangeForHapCVImageBuffer:imgRef];
+ VVBuffer *yCoCg = [[VVBufferPool globalVVBufferPool] allocTexRangeForPlane:0 ofHapCVImageBuffer:imgRef];
[hapQSwizzler setFilterInputImageBuffer:yCoCg];
returnMe = [hapQSwizzler allocAndRenderToBufferSized:[yCoCg srcRect].size];
VVRELEASE(yCoCg);
}
- else
- returnMe = [[VVBufferPool globalVVBufferPool] allocTexRangeForHapCVImageBuffer:imgRef];
+ else if (imgPixelFormat == kHapPixelFormatType_YCoCg_DXT5_A_RGTC1) {
+ VVBuffer *yCoCg = [[VVBufferPool globalVVBufferPool] allocTexRangeForPlane:0 ofHapCVImageBuffer:imgRef];
+ VVBuffer *alpha = [[VVBufferPool globalVVBufferPool] allocTexRangeForPlane:1 ofHapCVImageBuffer:imgRef];
+ [hapQSwizzler setBuffer:alpha forInputImageKey:@"alphaImage"];
+ [hapQSwizzler setFilterInputImageBuffer:yCoCg];
+ returnMe = [hapQSwizzler allocAndRenderToBufferSized:[yCoCg srcRect].size];
+ VVRELEASE(alpha);
+ VVRELEASE(yCoCg);
+ }
+ else {
+ returnMe = [[VVBufferPool globalVVBufferPool] allocTexRangeForPlane:0 ofHapCVImageBuffer:imgRef];
+ }
}
// else- dunno
else {
@@ -0,0 +1,37 @@
+/*{
+ "DESCRIPTION": "swizzles CoCgY (Hap Q) plus an additional A tex to RGBA",
+ "CREDIT": "by zoidberg",
+ "CATEGORIES": [
+ "TEST-GLSL FX"
+ ],
+ "INPUTS": [
+ {
+ "NAME": "inputImage",
+ "TYPE": "image"
+ },
+ {
+ "NAME": "alphaImage",
+ "TYPE": "image"
+ }
+ ]
+}*/
+
+
+
+const vec4 offsets = vec4(-0.50196078431373, -0.50196078431373, 0.0, 0.0);
+
+void main()
+{
+ vec4 cocgsy = IMG_THIS_NORM_PIXEL(inputImage);
+ vec4 theAlpha = IMG_THIS_NORM_PIXEL(alphaImage);
+
+ cocgsy += offsets;
+
+ float scale = ( cocgsy.z * ( 255.0 / 8.0 ) ) + 1.0;
+
+ float Co = cocgsy.x / scale;
+ float Cg = cocgsy.y / scale;
+ float Y = cocgsy.w;
+
+ gl_FragColor = vec4(Y + Co - Cg, Y + Cg, Y - Co - Cg, theAlpha.r);
+}
Oops, something went wrong.

0 comments on commit f7f9220

Please sign in to comment.