Skip to content

Commit

Permalink
Release version 1.4 revision 3
Browse files Browse the repository at this point in the history
Using tarball (with SHA1):
9bf41ca0da1949738e9b3d82e3a93f951f7cea0d *SARndbox-1.4.tar.gz
  • Loading branch information
Doc-Ok authored and melanopsis committed Jun 17, 2019
1 parent 1134a84 commit 3b3342c
Show file tree
Hide file tree
Showing 33 changed files with 858 additions and 566 deletions.
1,026 changes: 502 additions & 524 deletions CalibrateProjector.cpp

Large diffs are not rendered by default.

224 changes: 224 additions & 0 deletions CalibrateProjector.h
@@ -0,0 +1,224 @@
/***********************************************************************
CalibrateProjector - Utility to calculate the calibration transformation
of a projector into a Kinect-captured 3D space.
Copyright (c) 2012-2013 Oliver Kreylos
This file is part of the Augmented Reality Sandbox (SARndbox).
The Augmented Reality Sandbox is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
The Augmented Reality Sandbox is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with the Augmented Reality Sandbox; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
***********************************************************************/

#ifndef CALIBRATEPROJECTOR_INCLUDED
#define CALIBRATEPROJECTOR_INCLUDED

#include <vector>
#include <Threads/Mutex.h>
#include <Threads/Cond.h>
#include <Threads/TripleBuffer.h>
#include <USB/Context.h>
#include <Math/Matrix.h>
#include <Geometry/Point.h>
#include <Geometry/AffineCombiner.h>
#include <Geometry/Plane.h>
#include <GL/gl.h>
#include <GL/GLColor.h>
#include <GL/GLObject.h>
#include <Images/ExtractBlobs.h>
#include <Vrui/Application.h>
#include <Vrui/Tool.h>
#include <Vrui/GenericToolFactory.h>
#include <Kinect/FrameBuffer.h>
#include <Kinect/Camera.h>

class CalibrateProjector:public Vrui::Application,public GLObject
{
/* Embedded classes: */
private:
typedef Kinect::FrameSource::DepthPixel DepthPixel; // Type for depth image pixels
typedef Kinect::FrameSource::DepthCorrection::PixelCorrection PixelDepthCorrection; // Type for per-pixel depth correction factors
typedef double Scalar; // Scalar type for points
typedef Geometry::Point<Scalar,2> PPoint; // Point in 2D projection space
typedef Geometry::Point<Scalar,3> OPoint; // Point in 3D object space
typedef Geometry::Plane<Scalar,3> OPlane; // Plane in 3D object space

struct DepthCentroidBlob:public Images::BboxBlob<Images::Blob<DepthPixel> > // Structure to calculate 3D centroids of blobs in depth image space
{
/* Embedded classes: */
public:
typedef DepthPixel Pixel;
typedef Images::BboxBlob<Images::Blob<DepthPixel> > Base;

struct Creator:public Base::Creator
{
/* Elements: */
public:
unsigned int frameSize[2]; // Size of depth images
PixelDepthCorrection* pixelDepthCorrection; // Buffer of per-pixel depth correction coefficients
Kinect::FrameSource::IntrinsicParameters::PTransform depthProjection; // Transformation from depth image space to camera space
};

/* Elements: */
Kinect::FrameSource::IntrinsicParameters::PTransform::HVector c; // Accumulated centroid components (x, y, z) and total weight

/* Constructors and destructors: */
DepthCentroidBlob(unsigned int x,unsigned int y,const Pixel& pixel,const Creator& creator)
:Base(x,y,pixel,creator)
{
/* Calculate the pixel's corrected depth value: */
double px=double(x)+0.5;
double py=double(y)+0.5;
double pz=creator.pixelDepthCorrection[y*creator.frameSize[0]+x].correct(float(pixel));

/* Unproject the pixel to calculate its centroid accumulation weight as camera-space z coordinate to the fourth: */
const Kinect::FrameSource::IntrinsicParameters::PTransform::Matrix& m=creator.depthProjection.getMatrix();
double weight=Math::sqr(Math::sqr((m(2,0)*px+m(2,1)*py+m(2,2)*pz+m(2,3))/(m(3,0)*px+m(3,1)*py+m(3,2)*pz+m(3,3))));

/* Accumulate the pixel: */
c[0]=px*weight;
c[1]=py*weight;
c[2]=pz*weight;
c[3]=weight;
}

/* Methods: */
void addPixel(unsigned int x,unsigned int y,const Pixel& pixel,const Creator& creator)
{
Base::addPixel(x,y,pixel,creator);

/* Calculate the pixel's corrected depth value: */
double px=double(x)+0.5;
double py=double(y)+0.5;
double pz=creator.pixelDepthCorrection[y*creator.frameSize[0]+x].correct(float(pixel));

/* Unproject the pixel to calculate its centroid accumulation weight as camera-space z coordinate to the fourth: */
const Kinect::FrameSource::IntrinsicParameters::PTransform::Matrix& m=creator.depthProjection.getMatrix();
double weight=Math::sqr(Math::sqr((m(2,0)*px+m(2,1)*py+m(2,2)*pz+m(2,3))/(m(3,0)*px+m(3,1)*py+m(3,2)*pz+m(3,3))));

/* Accumulate the pixel: */
c[0]+=px*weight;
c[1]+=py*weight;
c[2]+=pz*weight;
c[3]+=weight;
}
void merge(const DepthCentroidBlob& other,const Creator& creator)
{
Base::merge(other,creator);

for(int i=0;i<4;++i)
c[i]+=other.c[i];
}
OPoint getCentroid(const Kinect::FrameSource::IntrinsicParameters::PTransform& depthProjection) const // Returns the blob's centroid in camera space
{
return depthProjection.transform(c).toPoint();
}
};

struct TiePoint // Tie point between 3D object space and 2D projector space
{
/* Elements: */
public:
PPoint p; // Projection-space point
OPoint o; // Object-space point
};

class CaptureTool;
typedef Vrui::GenericToolFactory<CaptureTool> CaptureToolFactory; // Tool class uses the generic factory class

class CaptureTool:public Vrui::Tool,public Vrui::Application::Tool<CalibrateProjector>
{
friend class Vrui::GenericToolFactory<CaptureTool>;

/* Elements: */
private:
static CaptureToolFactory* factory; // Pointer to the factory object for this class

/* Constructors and destructors: */
public:
CaptureTool(const Vrui::ToolFactory* factory,const Vrui::ToolInputAssignment& inputAssignment);
virtual ~CaptureTool(void);

/* Methods from class Vrui::Tool: */
virtual const Vrui::ToolFactory* getFactory(void) const;
virtual void buttonCallback(int buttonSlotIndex,Vrui::InputDevice::ButtonCallbackData* cbData);
};

struct DataItem:public GLObject::DataItem
{
/* Elements: */
public:
GLuint blobImageTextureId; // ID of texture object holding the blob image
GLfloat texMin[2],texMax[2]; // Texture coordinate rectangle to render the blob image texture
unsigned int blobImageVersion; // Version number of blob image currently in texture object

/* Constructors and destructors: */
DataItem(void);
virtual ~DataItem(void);
};

/* Elements: */
private:
int imageSize[2]; // Size of projector image
int numTiePoints[2]; // Number of tie points in x and y
OPlane basePlane; // Base plane of the configured sandbox area
OPoint basePlaneCorners[4]; // Corners of the configured sandbox area
unsigned int numTiePointFrames; // Number of frames to capture per tie point
unsigned int numBackgroundFrames; // Number of frames to capture for background removal
int blobMergeDepth; // Maximum depth difference between neighboring pixels in the same blob
USB::Context usbContext; // USB device context
Kinect::Camera* camera; // Pointer to Kinect camera defining the object space
unsigned int frameSize[2]; // Size of the Kinect camera's depth frames in pixels
PixelDepthCorrection* pixelDepthCorrection; // Buffer of per-pixel depth correction coefficients
Kinect::FrameSource::IntrinsicParameters cameraIps; // Intrinsic parameters of the Kinect camera

bool capturingBackground; // Flag if the Kinect camera is currently capturing a background frame

Threads::TripleBuffer<Kinect::FrameBuffer> rawFrames; // Triple buffer for raw depth frames from the Kinect camera
unsigned int* blobIdImage; // An image of blob IDs
GLColor<GLubyte,3>* blobImage; // A texture image visualizing the current target tracking state
unsigned int blobImageVersion; // Version counter for the blob image
DepthCentroidBlob* currentBlob; // The currently selected target blob
OPoint currentCentroid; // Centroid of the currently selected target blob in camera space

bool capturingTiePoint; // Flag whether the main thread is currently capturing a tie point
unsigned int numCaptureFrames; // Number of tie point frames still to capture
Geometry::AffineCombiner<double,3> tiePointCombiner; // Combiner to average multiple tie point frames

std::vector<TiePoint> tiePoints; // List of already captured tie points
bool haveProjection; // Flag if a projection matrix has been computed
Math::Matrix projection; // The current projection matrix

/* Private methods: */
void depthStreamingCallback(const Kinect::FrameBuffer& frameBuffer); // Callback receiving depth frames from the Kinect camera
void backgroundCaptureCompleteCallback(Kinect::Camera& camera); // Callback when the Kinect camera is done capturing a background image

/* Constructors and destructors: */
public:
CalibrateProjector(int& argc,char**& argv);
virtual ~CalibrateProjector(void);

/* Methods from Vrui::Application: */
virtual void frame(void);
virtual void display(GLContextData& contextData) const;

/* Methods from GLObject: */
virtual void initContext(GLContextData& contextData) const;

/* New methods: */
void startBackgroundCapture(void); // Starts capturing a background frame
void startTiePointCapture(void); // Starts capturing an averaged depth frame
void calcCalibration(void); // Calculates the calibration transformation after all tie points have been collected
};

#endif
16 changes: 16 additions & 0 deletions FindBlobs.icpp
Expand Up @@ -2,6 +2,22 @@
FindBlobs - Helper function to extract all eight-connected blobs of
pixels from a frame that match an arbitrary property.
Copyright (c) 2010-2013 Oliver Kreylos

This file is part of the Augmented Reality Sandbox (SARndbox).

The Augmented Reality Sandbox is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.

The Augmented Reality Sandbox is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.

You should have received a copy of the GNU General Public License along
with the Augmented Reality Sandbox; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
***********************************************************************/

#define FINDBLOBS_IMPLEMENTATION
Expand Down
15 changes: 15 additions & 0 deletions HISTORY
Expand Up @@ -10,3 +10,18 @@ SARndbox-1.1:
SARndbox-1.2:
- Adapted code to APIs of Vrui-2.7 and Kinect-2.5.
- Changed types of all size variables to unsigned int.

SARndbox-1.3:
- Fixed projector calibration code.
- Fixed normalization of calibration homography.
- Fixed calculation of projected z value for z buffering.
- Now properly handles non-orthogonal Kinect views.

SARndbox-1.4:
- Bumped Vrui version requirement to Vrui-3.0-001.
- Bumped Kinect version requirement to Kinect-2.7.
- Fixed a mistake in step 5 of the installation instructions in the
README file; measurement points must be taken in zig-zag order, not
counter-clockwise. It's still wrong in the calibration video.
- Updated instructions in README file according to new calibration
utility.
82 changes: 54 additions & 28 deletions README
@@ -1,5 +1,5 @@
========================================================================
README for Augmented Reality Sandbox (SARndbox) version 1.2
README for Augmented Reality Sandbox (SARndbox) version 1.4
Copyright (c) 2012-2013 Oliver Kreylos
========================================================================

Expand All @@ -15,9 +15,8 @@ surface using a calibrated projector.
Requirements
============

The Augmented Reality Sandbox version 1.2 requires Vrui version 2.7
build 001 or newer, and the Kinect 3D Video Capture Project version 2.5
or newer.
The Augmented Reality Sandbox requires Vrui version 3.0 build 001 or
newer, and the Kinect 3D Video Capture Project version 2.7 or newer.

Installation Guide
==================
Expand Down Expand Up @@ -49,8 +48,8 @@ be used from multiple user accounts.
2. Change into the Augmented Reality Sandbox's base directory:
> cd SARndbox-<version>

3. If the Vrui version installed in step 0 was not 2.7, or Vrui's
installation directory was changed from the default of ~/Vrui-2.7,
3. If the Vrui version installed in step 0 was not 3.0, or Vrui's
installation directory was changed from the default of ~/Vrui-3.0,
adapt the makefile using a text editor. Change the value of
VRUI_MAKEDIR close to the beginning of the file as follows:
VRUI_MAKEDIR := <Vrui install dir>/share/make
Expand Down Expand Up @@ -326,12 +325,13 @@ Start KinectViewer, and create a 3D measurement tool by assigning a
"Mouse -> Screen Projector" tool to some button, and then a "Measurement
Tool" tool to the same button. Then measure the 3D positions of the four
corners of the flattened sand surface in the order lower left, lower
right, upper right, upper left. To measure the positions correctly,
change the view until the sand surface is aligned to the screen plane,
i.e., until the crosshairs displayed while the viewpoint is changed
exactly touch the virtual sand surface along its entire extent. Then
move the mouse to the four corners of the virtual sand surface, and
click the "Measurement Tool" tool's button once each time.
right, upper left, upper right; in other words, form a mirrored Z
starting in the lower left. To measure the positions correctly, change
the view until the sand surface is aligned to the screen plane, i.e.,
until the crosshairs displayed while the viewpoint is changed exactly
touch the virtual sand surface along its entire extent. Then move the
mouse to the four corners of the virtual sand surface, and click the
"Measurement Tool" tool's button once each time.

The measurement tool will save its measurements in a file in the current
directory, with a name of MeasurementToolXXXX.dat. This file will
Expand Down Expand Up @@ -430,32 +430,52 @@ are in the same plane, the calibration procedure will fail.
The exact procedure is as follows:

1. Start CalibrateProjector and wait for it to collect a background
frame. It is essential to run CalibrateProjector in full-screen mode
on the projector, or the resulting calibration will be defective. See
the Vrui user's manual on how to force Vrui applications to run at
the proper position and size.
frame. Background capture is active while the screen is red. It is
essential to run CalibrateProjector in full-screen mode on the
projector, or the resulting calibration will be defective. See the
Vrui user's manual on how to force Vrui applications to run at the
proper position and size.
When started, CalibrateProjector must be told the exact pixel size of
the projector's image using the -s <width> <height> command line
option. Using a wrong pixel size will result in a defective
calibration.

2. Create a "Capture" tool.
calibration. The recommended BenQ short-throw projector has 1024x768
pixels, which is also the default in the software. In other words,
when using an XGA-resolution projector, the -s option is not
required.

2. Create a "Capture" tool and bind it to two keys (here "1" and "2").
Press and hold "1" and move the mouse to highlight the "Capture" item
in the tool selection menu that pops up. Then release "1" to select
the highlighted item. This will open a dialog box prompting to press
a second key; press and release "2". This will close the dialog box.
Do not press "1" again when the dialog box is still open; that will
cancel the tool creation process.
This process binds functions to two keys: "1" will capture a tie
point, and "2" will re-capture the background sand surface. "2"
should only be pressed if the sand surface changes during the
calibration procedure, for example if a hole is dug to capture a
lower tie point. After any change to the sand surface, remove the
calibration object and any other objects, press "2", and wait for the
screen to turn black again.

3. Place the disk target at some random elevation above or below the
flattened average sand surface such that the intersection of the
projected white lines exactly coincides with the target's center
point.

4. Remove your hands from the disk target and confirm that the target
is seen by the Kinect camera. CalibrateProjector will display green
boxes of various sizes surrounding non-background blobs in the depth
image. Because there is no calibration yet, the green box
corresponding to the disk target will not appear over the target;
simply ensure that there is a big green box for the disk target, and
that the box is stable.

5. Press the "Capture" tool's button, and wait until the tie point is
captured.
is seen by the Kinect camera. CalibrateProjector will display all
non-background objects as dark green blobs, and the object it
identified as the calibration target as a light green blob.
Because there is no calibration yet, the light green blob
corresponding to the disk target will not be aligned with the target;
simply ensure that there is a light green blob, that it is circular
and stable, and that it corresponds with the actual calibration
target.

5. Press the "Capture" tool's first button ("1"), and wait until the tie
point is captured. Do not move the calibration target or hold any
objects above the sand surface while a tie point is captured.

6. CalibrateProjector will move on to the next tie point position, and
display a new set of white lines. Repeat from step 3 until all tie
Expand All @@ -466,6 +486,12 @@ The exact procedure is as follows:
continue to capture more tie points to improve calibration as
desired; the calibration file will be updated after every additional
tie point. Simply close the application window when satisfied.
Additionally, after the first round of tie points has been collected,
CalibrateProjector will track the calibration target in real-time and
indicate its position with red crosshairs. To check calibration
quality, place the target anywhere in or above the sandbox, remove
your hands, and ensure that the red crosshairs intersect in the
target's center.

Step 8: Run the Augmented Reality Sandbox
-----------------------------------------
Expand Down

0 comments on commit 3b3342c

Please sign in to comment.