Skip to content

Commit

Permalink
Trying to address camera distortion again. Mostly in ARCam
Browse files Browse the repository at this point in the history
- remove zoom level, turns out IOS 11 GM is mostly normalized with only a minimal difference that can mostly be corrected in the shader.
- set new zoom level in the shader. Still probably needs a device independent value.
- add `updatePlaneTexCoords` to try and address distortion
- try building FBO with a scaled width and height according to camera capture dimensions.
  • Loading branch information
Joseph Chow committed Sep 21, 2017
1 parent 8d1e2f2 commit 6eb4873
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 20 deletions.
Binary file not shown.
8 changes: 8 additions & 0 deletions src/ARCam.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ namespace ARCore {

//! This class manages dealing with the camera image coming in from ARKit.
class ARCam {
ofVec2f nativeDimensions;
ofRectangle cam,screen;

int cFboWidth;
int cFboHeight;
float captureRatio;

bool debugMode;

Expand Down Expand Up @@ -120,6 +126,8 @@ namespace ARCore {
//! draws the camera frame.
void draw();

void updatePlaneTexCoords();

//! retrieves the current lighting conditions that ARKit is seeing.
ARLightEstimate* getLightingConditions();

Expand Down
52 changes: 35 additions & 17 deletions src/ARCam.mm
Original file line number Diff line number Diff line change
Expand Up @@ -40,25 +40,21 @@

// ========== CAMERA CORRECTION ============= //

ofVec2f nativeDimensions = ARCommon::getDeviceNativeDimensions();

// this plays into adjusting the camera image to fit the correct perspective.
// this should THEORETICALLY be your devices aspect ratio which is what the default is.

zoomLevel = ARCommon::getNativeAspectRatio();

// get the name of the current device
deviceType = [[UIDevice currentDevice] model];

// setup zooming if we're not on an iPhone
// TODO how does this affect things if we're on a smaller than standard iphone device, ie SE?
// TODO maybe we should try to re-orient in shaderworld.
if([deviceType isEqualToString:@"iPad"]){
needsPerspectiveAdjustment = true;
}

rotation.makeRotationMatrix(-90, ofVec3f(0,0,1));

// try to fit the camera capture width within the device's viewport.
cam = ofRectangle(0,0,1280,720);
screen = ofRectangle(0,0,ofGetWindowWidth(),ofGetWindowHeight());
cam.scaleTo(screen,OF_ASPECT_RATIO_KEEP);

// ========== SHADER SETUP ============= //
// setup plane and shader in order to draw the camera feed
cameraPlane = ofMesh::plane(ofGetWindowWidth(),ofGetWindowHeight());
Expand All @@ -67,10 +63,10 @@
cameraConvertShader.setupShaderFromSource(GL_FRAGMENT_SHADER, ARShaders::camera_convert_fragment);
cameraConvertShader.linkProgram();


cameraFbo.allocate(cam.getWidth(),cam.getHeight(), GL_RGBA);


// going with a default of 1280x720 as that seems to be a consistant value that ARKit captures at
// regardless of device, also after a contributor suggested POT textures are better.
cameraFbo.allocate(1280,720, GL_RGBA);
}

void ARCam::setCameraNearClip(float near){
Expand Down Expand Up @@ -137,6 +133,25 @@
return currentFrame.camera.trackingState;
}

void ARCam::updatePlaneTexCoords(){

// see
// https://developer.apple.com/documentation/arkit/arframe/2923543-displaytransformfororientation?language=objc
// this is more or less from the default project example.

CGAffineTransform displayToCameraTransform = CGAffineTransformInvert([currentFrame displayTransformForOrientation:orientation viewportSize:viewportSize]);

for (int index = 0; index < 4; index++) {
//NSInteger textureCoordIndex = 4 * index + 2;
int textureCoordIndex = index;
ofVec2f texCoord = cameraPlane.getTexCoords()[textureCoordIndex];

CGPoint textureCoord = CGPointMake(texCoord.x,texCoord.y);
CGPoint transformedCoord = CGPointApplyAffineTransform(textureCoord, displayToCameraTransform);
cameraPlane.setTexCoord(textureCoordIndex, ofVec2f(transformedCoord.x,transformedCoord.y));
}
}

void ARCam::logTrackingState(){

if(debugMode){
Expand Down Expand Up @@ -171,9 +186,9 @@


currentFrame = session.currentFrame;

trackingState = currentFrame.camera.trackingState;


if(debugMode){
// update state and reason
trackingStateReason = currentFrame.camera.trackingStateReason;
Expand All @@ -187,7 +202,10 @@

// only act if we have the current frame
if(currentFrame){


// update tex coords to try and better scale the image coming from the camera.

updatePlaneTexCoords();

// do light estimates
if (currentFrame.lightEstimate) {
Expand Down Expand Up @@ -260,7 +278,7 @@
// ========= ROTATE IMAGES ================= //

cameraConvertShader.begin();
cameraConvertShader.setUniformMatrix4f("rotationMatrix", rotation);
cameraConvertShader.setUniformMatrix4f("rotationMatrix", rotation);

cameraConvertShader.end();

Expand All @@ -269,7 +287,7 @@

int width = (int) CVPixelBufferGetWidth(pixelBuffer);
int height = (int) CVPixelBufferGetHeight(pixelBuffer);

CbCrTexture = createTextureFromPixelBuffer(pixelBuffer, 1,GL_LUMINANCE_ALPHA,width / 2, height / 2);


Expand Down
13 changes: 10 additions & 3 deletions src/ARShaders.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,17 @@ const std::string camera_convert_vertex = STRINGIFY(
// if we need to correct perspective distortion,
if(needsCorrection){

// fix scaling?
// https://stackoverflow.com/questions/24651369/blend-textures-of-different-size-coordinates-in-glsl/24654919#24654919

/**
this method didn't really work -
https://stackoverflow.com/questions/24651369/blend-textures-of-different-size-coordinates-in-glsl/24654919#24654919
hardcoding 0.05 for now.
Note that with Golden Master IOS 11 - image is almost
the same as without this correction to the uvs
*/
vec2 fromCenter = vUv - scale;
vec2 scaleFromCenter = fromCenter * vec2(zoomRatio);
vec2 scaleFromCenter = fromCenter * vec2(0.05);

vUv -= scaleFromCenter;

Expand Down

0 comments on commit 6eb4873

Please sign in to comment.