Skip to content

Commit

Permalink
fixup! ovr: Add initial example code.
Browse files Browse the repository at this point in the history
  • Loading branch information
Squareys committed Jun 5, 2015
1 parent 4939b6d commit fd78343
Showing 1 changed file with 105 additions and 25 deletions.
130 changes: 105 additions & 25 deletions src/ovr/OVRExample.cpp
Expand Up @@ -22,9 +22,6 @@
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#define OVR_DLL_IMPORT
#include "OVR/OVR_CAPI_0_6_0.h"
#include <OVR/OVR_ErrorCode.h>

#include <memory.h>
#include <Corrade/Containers/Array.h>
Expand All @@ -49,11 +46,20 @@
#include <Magnum/Math/Quaternion.h>
#include <Magnum/SceneGraph/Scene.h>
#include <Magnum/SceneGraph/Drawable.h>
#include <Magnum/Framebuffer.h>
#include <Magnum/Renderbuffer.h>
#include <Magnum/Texture.h>
#include <Magnum/Context.h>

#include "Types.h"
#include "HMDCamera.h"
#include "CubeDrawable.h"

#define OVR_DLL_IMPORT
#include "OVR/OVR_CAPI_0_6_0.h"
#include "OVR/OVR_CAPI_GL.h"
#include <OVR/OVR_ErrorCode.h>

namespace Magnum {

namespace Examples {
Expand All @@ -80,10 +86,14 @@ class OVRExample: public Platform::Application {
Scene3D _scene;
Object3D _cameraObject;
SceneGraph::DrawableGroup3D _drawables;
std::unique_ptr<HMDCamera> _camera;
HMDCamera* _cameras[2];
Object3D* _cubes[1]; // maybe more later.
CubeDrawable* _cubeDrawables[1]; // maybe more later.
Framebuffer* _mirrorFramebuffer;
Texture2D* _mirrorTexture;
ovrGLTexture* _ovrMirrorTexture;

Vector2i _resolution;
Vector2i _previousMousePosition;

UnsignedInt _frame;
Expand All @@ -97,7 +107,7 @@ class OVRExample: public Platform::Application {
OVRExample::OVRExample(const Arguments& arguments)
: Platform::Application(arguments, nullptr),
_indexBuffer(nullptr), _vertexBuffer(nullptr), _mesh(nullptr),
_shader(nullptr), _scene(), _cameraObject(&_scene), _camera(nullptr) {
_shader(nullptr), _scene(), _cameraObject(&_scene) {

// initialize OVR
ovrResult result = ovr_Initialize(nullptr);
Expand All @@ -115,31 +125,41 @@ OVRExample::OVRExample(const Arguments& arguments)
}
} else {
// create debug hmd instead
ovrHmd_CreateDebug(ovrHmd_DK1, &_hmd);
ovrHmd_CreateDebug(ovrHmd_DK2, &_hmd);
}

// get the hmd display resolution
ovrSizei r = _hmd->Resolution;
Vector2i resolution = {r.w, r.h};
_resolution = {r.w / 2, r.h / 2};

// create a context with the hmd display resolution
Configuration conf;
conf.setTitle("Magnum OculusVR Example").setSize(resolution).setSampleCount(16);
conf.setTitle("Magnum OculusVR Example")
.setSize(_resolution)
.setSampleCount(16);
if(!tryCreateContext(conf))
createContext(conf.setSampleCount(0));

ovrHmd_ConfigureTracking(
_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position,
0);
// the oculus sdk compositor does some "magic" to reduce latency. For
// that to work, vsync needs to be turned off.
if (!setSwapInterval(0))
Error() << "Could not turn off vsync.";

// setup camera
_camera.reset(new HMDCamera(_cameraObject));
_camera->setPerspective(Deg(90.0f), Vector2(resolution).aspectRatio(), 0.001f, 100)
.setViewport(defaultFramebuffer.viewport().size());
// setup cameras
for(int eye = 0; eye < 2; ++eye) {
// projection matrix is set in the camera, since it requires some hmd-specific fov etc.
_cameras[eye] = new HMDCamera(_hmd, (ovrEyeType) eye, _cameraObject);
_cameras[eye]->setViewport(_resolution);
}

ovrHmd_SetEnabledCaps(_hmd, ovrHmdCap_LowPersistence|ovrHmdCap_DynamicPrediction);
ovrHmd_ConfigureTracking(_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position,
0);

Renderer::enable(Renderer::Feature::DepthTest);
Renderer::enable(Renderer::Feature::FaceCulling);

// setup cube mesh
const Trade::MeshData3D cube = Primitives::Cube::solid();

_vertexBuffer.reset(new Buffer());
Expand All @@ -162,34 +182,94 @@ OVRExample::OVRExample(const Arguments& arguments)
Shaders::Phong::Normal {}).setIndexBuffer(*_indexBuffer, 0, indexType,
indexStart, indexEnd);

// setup shader
_shader.reset(new Shaders::Phong());

// setup scene
_cubes[0] = new Object3D(&_scene);
_cubeDrawables[0] = new CubeDrawable(_mesh.get(), _shader.get(), Color3::fromHSV(35.0_degf, 1.0f, 1.0f), _cubes[0], &_drawables);

// setup mirroring of the sdk results to a texture which can later be blitted onto the defaultFramebuffer
if(ovrHmd_CreateMirrorTextureGL(_hmd, GL_RGBA, _resolution.x(), _resolution.y(), (ovrTexture**)&_ovrMirrorTexture) != ovrSuccess) {
ovrErrorInfo info;
ovr_GetLastErrorInfo(&info);
Error() << info.ErrorString;
}

_mirrorTexture = new Texture2D(Texture2D::wrap(_ovrMirrorTexture->OGL.TexId));
_mirrorFramebuffer = new Framebuffer(Range2Di::fromSize({}, _resolution));
_mirrorFramebuffer->attachTexture(Framebuffer::ColorAttachment(0), *_mirrorTexture, 0)
.mapForRead(Framebuffer::ColorAttachment(0));

ovrEyeRenderDesc EyeRenderDesc[2];
EyeRenderDesc[0] = ovrHmd_GetRenderDesc(_hmd, ovrEye_Left, _hmd->DefaultEyeFov[0]);
EyeRenderDesc[1] = ovrHmd_GetRenderDesc(_hmd, ovrEye_Right, _hmd->DefaultEyeFov[1]);

_hmdToEyeViewOffset[0] = EyeRenderDesc[0].HmdToEyeViewOffset;
_hmdToEyeViewOffset[1] = EyeRenderDesc[1].HmdToEyeViewOffset;
}

OVRExample::~OVRExample() {
delete _mirrorTexture;
delete _cameras[0];
delete _cameras[1];

ovrHmd_Destroy(_hmd);
ovr_Shutdown();
}

void OVRExample::drawEvent() {
defaultFramebuffer.clear(FramebufferClear::Color | FramebufferClear::Depth);

// get orientation and position of the hmd
ovrHmd_GetEyePoses(_hmd, _frame++, _hmdToEyeViewOffset, _poses, &_trackingState);
_cameraObject.setTransformation(Matrix4::from(
reinterpret_cast<Quaternion*>(&_poses[0].Orientation)->toMatrix(),
Vector3::from(reinterpret_cast<float*>(&_poses[0].Position)) + Vector3{0.0f, 0.0f, 3.0f}
));
ovrHmd_GetEyePoses(_hmd, 0, _hmdToEyeViewOffset, _poses, &_trackingState);

// draw the scene for both cameras
for(int eye = 0; eye < 2; ++eye) {
// set the transformation according to rift trackers
_cameraObject.setTransformation(Matrix4::from(
reinterpret_cast<Quaternion*>(&_poses[eye].Orientation)->toMatrix(),
Vector3::from(reinterpret_cast<float*>(&_poses[eye].Position)) + Vector3{0.0f, 0.0f, 3.0f}
));
_cameras[eye]->draw(_drawables);
}

_camera->draw(_drawables);
/*
* render distortion
*/

ovrViewScaleDesc viewScaleDesc; // TODO: doesn't need to be in render loop
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = _hmdToEyeViewOffset[0];
viewScaleDesc.HmdToEyeViewOffset[1] = _hmdToEyeViewOffset[1];

// create simple distortion layer for the Oculus SDK compositor
ovrLayerEyeFov eyeLayer; // TODO: doesn't need to be in render loop
eyeLayer.Header.Type = ovrLayerType_EyeFov;
eyeLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;

for (int eye = 0; eye < 2; ++eye)
{
const HMDCamera* cam = _cameras[eye];
eyeLayer.ColorTexture[eye] = cam->getTextureSet();
eyeLayer.Viewport[eye] = cam->getViewport();
eyeLayer.Fov[eye] = _hmd->DefaultEyeFov[eye];
eyeLayer.RenderPose[eye] = _poses[eye];
}

swapBuffers();
// let the compositor begin
ovrLayerHeader* layers = &eyeLayer.Header;
ovrHmd_SubmitFrame(_hmd, 0, &viewScaleDesc, &layers, 1);

Utility::sleep(40);
// blit mirror texture to defaultFramebuffer
Int w = _ovrMirrorTexture->OGL.Header.TextureSize.w;
Int h = _ovrMirrorTexture->OGL.Header.TextureSize.h;
Framebuffer::blit(*_mirrorFramebuffer,
defaultFramebuffer,
Range2Di::fromSize({0, h}, {w, 0}),
Range2Di::fromSize({}, {w, h}),
FramebufferBlit::Color, FramebufferBlitFilter::Nearest);
// glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);

swapBuffers();
redraw();
}

Expand Down

0 comments on commit fd78343

Please sign in to comment.