Permalink
Cannot retrieve contributors at this time
676 lines (511 sloc)
19.3 KB
| var canvas; // The canvas element | |
| var gl; // The WebGL context | |
| var cubeVerticesBuffer; | |
| var cubeVerticesTextureCoordBuffer; | |
| var cubeVerticesIndexBuffer; | |
| var cubeVerticesIndexBuffer; | |
| var cubeRotation = 0.0; | |
| var lastCubeUpdateTime = 0; | |
| var cubeImage; | |
| var cubeTexture; | |
| var mvMatrix; | |
| var shaderProgram; // The WebGLProgram we will create, which will render the cube | |
| var vertexPositionAttribute; | |
| var textureCoordAttribute; | |
| var perspectiveMatrix; | |
| // WebVR variables | |
| var frameData = new VRFrameData(); | |
| var vrDisplay; | |
| var btn = document.querySelector('.stop-start'); | |
| var normalSceneFrame; | |
| var vrSceneFrame; | |
| var poseStatsBtn = document.querySelector('.pose-stats'); | |
| var poseStatsSection = document.querySelector('section'); | |
| poseStatsSection.style.visibility = 'hidden'; // hide it initially | |
| var posStats = document.querySelector('.pos'); | |
| var orientStats = document.querySelector('.orient'); | |
| var linVelStats = document.querySelector('.lin-vel'); | |
| var linAccStats = document.querySelector('.lin-acc'); | |
| var angVelStats = document.querySelector('.ang-vel'); | |
| var angAccStats = document.querySelector('.ang-acc'); | |
| var poseStatsDisplayed = false; | |
| // | |
| // start | |
| // | |
| // Called when the body has loaded is created to get the ball rolling. | |
| document.body.onload = start; | |
| function start() { | |
| canvas = document.getElementById("glcanvas"); | |
| initWebGL(canvas); // Initialize the GL context | |
| // Only continue if WebGL is available and working | |
| if (gl) { | |
| gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque | |
| gl.clearDepth(1.0); // Clear everything | |
| gl.enable(gl.DEPTH_TEST); // Enable depth testing | |
| gl.depthFunc(gl.LEQUAL); // Near things obscure far things | |
| // Initialize the shaders; this is where all the lighting for the | |
| // vertices and so forth is established. | |
| initShaders(); | |
| // Here's where we call the routine that builds all the objects | |
| // we'll be drawing. | |
| initBuffers(); | |
| // Next, load and set up the textures we'll be using. | |
| initTextures(); | |
| // draw the scene normally, without WebVR - for those who don't have it and want to see the scene in their browser | |
| canvas.width = window.innerWidth; | |
| canvas.height = window.innerHeight; | |
| drawScene(); | |
| // WebVR: Check to see if WebVR is supported | |
| if(navigator.getVRDisplays) { | |
| console.log('WebVR 1.1 supported'); | |
| // Then get the displays attached to the computer | |
| navigator.getVRDisplays().then(function(displays) { | |
| // If a display is available, use it to present the scene | |
| if(displays.length > 0) { | |
| vrDisplay = displays[0]; | |
| console.log('Display found'); | |
| // Starting the presentation when the button is clicked: It can only be called in response to a user gesture | |
| btn.addEventListener('click', function() { | |
| if(btn.textContent === 'Start VR display') { | |
| vrDisplay.requestPresent([{ source: canvas }]).then(function() { | |
| console.log('Presenting to WebVR display'); | |
| // Set the canvas size to the size of the vrDisplay viewport | |
| var leftEye = vrDisplay.getEyeParameters('left'); | |
| var rightEye = vrDisplay.getEyeParameters('right'); | |
| canvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2; | |
| canvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight); | |
| // stop the normal presentation, and start the vr presentation | |
| window.cancelAnimationFrame(normalSceneFrame); | |
| drawVRScene(); | |
| btn.textContent = 'Exit VR display'; | |
| }); | |
| } else { | |
| vrDisplay.exitPresent(); | |
| console.log('Stopped presenting to WebVR display'); | |
| btn.textContent = 'Start VR display'; | |
| // Stop the VR presentation, and start the normal presentation | |
| vrDisplay.cancelAnimationFrame(vrSceneFrame); | |
| drawScene(); | |
| } | |
| }); | |
| } | |
| }); | |
| } else { | |
| console.log('WebVR API not supported by this browser.'); | |
| } | |
| } | |
| } | |
| // | |
| // initWebGL | |
| // | |
| // Initialize WebGL, returning the GL context or null if | |
| // WebGL isn't available or could not be initialized. | |
| // | |
| function initWebGL() { | |
| gl = null; | |
| try { | |
| gl = canvas.getContext("experimental-webgl"); | |
| } | |
| catch(e) { | |
| } | |
| // If we don't have a GL context, give up now | |
| if (!gl) { | |
| alert("Unable to initialize WebGL. Your browser may not support it."); | |
| } | |
| } | |
| // | |
| // initBuffers | |
| // | |
| // Initialize the buffers we'll need. For this demo, we just have | |
| // one object -- a simple two-dimensional cube. | |
| // | |
| function initBuffers() { | |
| // Create a buffer for the cube's vertices. | |
| cubeVerticesBuffer = gl.createBuffer(); | |
| // Select the cubeVerticesBuffer as the one to apply vertex | |
| // operations to from here out. | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesBuffer); | |
| // Now create an array of vertices for the cube. | |
| var vertices = [ | |
| // Front face | |
| -1.0, -1.0, 1.0, | |
| 1.0, -1.0, 1.0, | |
| 1.0, 1.0, 1.0, | |
| -1.0, 1.0, 1.0, | |
| // Back face | |
| -1.0, -1.0, -1.0, | |
| -1.0, 1.0, -1.0, | |
| 1.0, 1.0, -1.0, | |
| 1.0, -1.0, -1.0, | |
| // Top face | |
| -1.0, 1.0, -1.0, | |
| -1.0, 1.0, 1.0, | |
| 1.0, 1.0, 1.0, | |
| 1.0, 1.0, -1.0, | |
| // Bottom face | |
| -1.0, -1.0, -1.0, | |
| 1.0, -1.0, -1.0, | |
| 1.0, -1.0, 1.0, | |
| -1.0, -1.0, 1.0, | |
| // Right face | |
| 1.0, -1.0, -1.0, | |
| 1.0, 1.0, -1.0, | |
| 1.0, 1.0, 1.0, | |
| 1.0, -1.0, 1.0, | |
| // Left face | |
| -1.0, -1.0, -1.0, | |
| -1.0, -1.0, 1.0, | |
| -1.0, 1.0, 1.0, | |
| -1.0, 1.0, -1.0 | |
| ]; | |
| // Now pass the list of vertices into WebGL to build the shape. We | |
| // do this by creating a Float32Array from the JavaScript array, | |
| // then use it to fill the current vertex buffer. | |
| gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW); | |
| // Map the texture onto the cube's faces. | |
| cubeVerticesTextureCoordBuffer = gl.createBuffer(); | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesTextureCoordBuffer); | |
| var textureCoordinates = [ | |
| // Front | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0, | |
| // Back | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0, | |
| // Top | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0, | |
| // Bottom | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0, | |
| // Right | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0, | |
| // Left | |
| 0.0, 0.0, | |
| 1.0, 0.0, | |
| 1.0, 1.0, | |
| 0.0, 1.0 | |
| ]; | |
| gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates), | |
| gl.STATIC_DRAW); | |
| // Build the element array buffer; this specifies the indices | |
| // into the vertex array for each face's vertices. | |
| cubeVerticesIndexBuffer = gl.createBuffer(); | |
| gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVerticesIndexBuffer); | |
| // This array defines each face as two triangles, using the | |
| // indices into the vertex array to specify each triangle's | |
| // position. | |
| var cubeVertexIndices = [ | |
| 0, 1, 2, 0, 2, 3, // front | |
| 4, 5, 6, 4, 6, 7, // back | |
| 8, 9, 10, 8, 10, 11, // top | |
| 12, 13, 14, 12, 14, 15, // bottom | |
| 16, 17, 18, 16, 18, 19, // right | |
| 20, 21, 22, 20, 22, 23 // left | |
| ]; | |
| // Now send the element array to GL | |
| gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, | |
| new Uint16Array(cubeVertexIndices), gl.STATIC_DRAW); | |
| } | |
| // | |
| // initTextures | |
| // | |
| // Initialize the textures we'll be using, then initiate a load of | |
| // the texture images. The handleTextureLoaded() callback will finish | |
| // the job; it gets called each time a texture finishes loading. | |
| // | |
| function initTextures() { | |
| cubeTexture = gl.createTexture(); | |
| cubeImage = new Image(); | |
| cubeImage.onload = function() { handleTextureLoaded(cubeImage, cubeTexture); }; | |
| cubeImage.src = "metal003.png"; | |
| } | |
| function handleTextureLoaded(image, texture) { | |
| console.log("handleTextureLoaded, image = " + image); | |
| gl.bindTexture(gl.TEXTURE_2D, texture); | |
| gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, | |
| gl.UNSIGNED_BYTE, image); | |
| gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
| gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST); | |
| gl.generateMipmap(gl.TEXTURE_2D); | |
| gl.bindTexture(gl.TEXTURE_2D, null); | |
| } | |
| // | |
| // drawScene | |
| function drawScene() { | |
| // Request the next frame of the animation | |
| normalSceneFrame = window.requestAnimationFrame(drawScene); | |
| // Clear the canvas before we start drawing on it. | |
| gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); | |
| gl.viewport(0, 0, canvas.width, canvas.height); | |
| // Establish the perspective with which we want to view the | |
| // scene. Our field of view is 45 degrees, with a width/height | |
| // ratio of 640:480, and we only want to see objects between 0.1 units | |
| // and 100 units away from the camera. | |
| perspectiveMatrix = makePerspective(45, 640.0/480.0, 0.1, 100.0); | |
| // Set the drawing position to the "identity" point, which is | |
| // the center of the scene. | |
| loadIdentity(); | |
| // Now move the drawing position a bit to where we want to start | |
| // drawing the cube. | |
| mvTranslate([-0.0, 0.0, -9.0]); | |
| // Save the current matrix, then rotate before we draw. | |
| mvPushMatrix(); | |
| mvRotate(cubeRotation, [0.25, 0, 0.25]); | |
| // Draw the cube by binding the array buffer to the cube's vertices | |
| // array, setting attributes, and pushing it to GL. | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesBuffer); | |
| gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0); | |
| // Set the texture coordinates attribute for the vertices. | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesTextureCoordBuffer); | |
| gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0); | |
| // Specify the texture to map onto the faces. | |
| gl.activeTexture(gl.TEXTURE0); | |
| gl.bindTexture(gl.TEXTURE_2D, cubeTexture); | |
| gl.uniform1i(gl.getUniformLocation(shaderProgram, "uSampler"), 0); | |
| // Draw the cube. | |
| gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVerticesIndexBuffer); | |
| setMatrixUniforms(); | |
| gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0); | |
| // Restore the original matrix | |
| mvPopMatrix(); | |
| // Update the rotation for the next draw, if it's time to do so. | |
| var currentTime = (new Date).getTime(); | |
| if (lastCubeUpdateTime) { | |
| var delta = currentTime - lastCubeUpdateTime; | |
| cubeRotation += (30 * delta) / 1000.0; | |
| } | |
| lastCubeUpdateTime = currentTime; | |
| } | |
| // | |
| // WebVR: Draw the scene for the WebVR display. | |
| // | |
| function drawVRScene() { | |
| // WebVR: Request the next frame of the animation | |
| vrSceneFrame = vrDisplay.requestAnimationFrame(drawVRScene); | |
| // Populate frameData with the data of the next frame to display | |
| vrDisplay.getFrameData(frameData); | |
| // You can get the position, orientation, etc. of the display from the current frame's pose | |
| var curFramePose = frameData.pose; | |
| var curPos = curFramePose.position; | |
| var curOrient = curFramePose.orientation; | |
| if(poseStatsDisplayed) { | |
| displayPoseStats(curFramePose); | |
| } | |
| // Clear the canvas before we start drawing on it. | |
| gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); | |
| // WebVR: Create the required projection and view matrix locations needed | |
| // for passing into the uniformMatrix4fv methods below | |
| var projectionMatrixLocation = gl.getUniformLocation(shaderProgram, "projMatrix"); | |
| var viewMatrixLocation = gl.getUniformLocation(shaderProgram, "viewMatrix"); | |
| // WebVR: Render the left eye’s view to the left half of the canvas | |
| gl.viewport(0, 0, canvas.width * 0.5, canvas.height); | |
| gl.uniformMatrix4fv(projectionMatrixLocation, false, frameData.leftProjectionMatrix); | |
| gl.uniformMatrix4fv(viewMatrixLocation, false, frameData.leftViewMatrix); | |
| drawGeometry(); | |
| // WebVR: Render the right eye’s view to the right half of the canvas | |
| gl.viewport(canvas.width * 0.5, 0, canvas.width * 0.5, canvas.height); | |
| gl.uniformMatrix4fv(projectionMatrixLocation, false, frameData.rightProjectionMatrix); | |
| gl.uniformMatrix4fv(viewMatrixLocation, false, frameData.rightViewMatrix); | |
| drawGeometry(); | |
| function drawGeometry() { | |
| // Establish the perspective with which we want to view the | |
| // scene. Our field of view is 45 degrees, with a width/height | |
| // ratio of 640:480, and we only want to see objects between 0.1 units | |
| // and 100 units away from the camera. | |
| perspectiveMatrix = makePerspective(45, 640.0/480.0, 0.1, 100.0); | |
| // Set the drawing position to the "identity" point, which is | |
| // the center of the scene. | |
| loadIdentity(); | |
| // Now move the drawing position a bit to where we want to start | |
| // drawing the cube. | |
| mvTranslate([ | |
| 0.0 - (curPos[0] * 25) + (curOrient[1] * 25), | |
| 5.0 - (curPos[1] * 25) - (curOrient[0] * 25), | |
| -15.0 - (curPos[2] * 25) | |
| ]); | |
| // Save the current matrix, then rotate before we draw. | |
| mvPushMatrix(); | |
| mvRotate(cubeRotation, [0.25, 0, 0.25 - curOrient[2] * 0.5]); | |
| // Draw the cube by binding the array buffer to the cube's vertices | |
| // array, setting attributes, and pushing it to GL. | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesBuffer); | |
| gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0); | |
| // Set the texture coordinates attribute for the vertices. | |
| gl.bindBuffer(gl.ARRAY_BUFFER, cubeVerticesTextureCoordBuffer); | |
| gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0); | |
| // Specify the texture to map onto the faces. | |
| gl.activeTexture(gl.TEXTURE0); | |
| gl.bindTexture(gl.TEXTURE_2D, cubeTexture); | |
| gl.uniform1i(gl.getUniformLocation(shaderProgram, "uSampler"), 0); | |
| // Draw the cube. | |
| gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVerticesIndexBuffer); | |
| setMatrixUniforms(); | |
| gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0); | |
| // Restore the original matrix | |
| mvPopMatrix(); | |
| } | |
| // Update the rotation for the next draw, if it's time to do so. | |
| var currentTime = (new Date).getTime(); | |
| if (lastCubeUpdateTime) { | |
| var delta = currentTime - lastCubeUpdateTime; | |
| cubeRotation += (30 * delta) / 1000.0; | |
| } | |
| lastCubeUpdateTime = currentTime; | |
| // WebVR: Indicate that we are ready to present the rendered frame to the VR display | |
| vrDisplay.submitFrame(); | |
| } | |
| // | |
| // initShaders | |
| // | |
| // Initialize the shaders, so WebGL knows how to light our scene. | |
| // | |
| function initShaders() { | |
| var fragmentShader = getShader(gl, "shader-fs"); | |
| var vertexShader = getShader(gl, "shader-vs"); | |
| // Create the shader program | |
| shaderProgram = gl.createProgram(); // Create our program, set it to be the value of shaderProgram | |
| gl.attachShader(shaderProgram, vertexShader); | |
| gl.attachShader(shaderProgram, fragmentShader); | |
| gl.linkProgram(shaderProgram); | |
| // If creating the shader program failed, alert | |
| if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { | |
| alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(shader)); | |
| } | |
| gl.useProgram(shaderProgram); // Specify the WebGL program we want to use for the rendering | |
| vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition"); | |
| gl.enableVertexAttribArray(vertexPositionAttribute); | |
| textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord"); | |
| gl.enableVertexAttribArray(textureCoordAttribute); | |
| } | |
| // | |
| // getShader | |
| // | |
| // Loads a shader program by scouring the current document, | |
| // looking for a script with the specified ID. | |
| // | |
| function getShader(gl, id) { | |
| var shaderScript = document.getElementById(id); | |
| // Didn't find an element with the specified ID; abort. | |
| if (!shaderScript) { | |
| return null; | |
| } | |
| // Walk through the source element's children, building the | |
| // shader source string. | |
| var theSource = ""; | |
| var currentChild = shaderScript.firstChild; | |
| while(currentChild) { | |
| if (currentChild.nodeType == 3) { | |
| theSource += currentChild.textContent; | |
| } | |
| currentChild = currentChild.nextSibling; | |
| } | |
| // Now figure out what type of shader script we have, | |
| // based on its MIME type. | |
| var shader; | |
| if (shaderScript.type == "x-shader/x-fragment") { | |
| shader = gl.createShader(gl.FRAGMENT_SHADER); | |
| } else if (shaderScript.type == "x-shader/x-vertex") { | |
| shader = gl.createShader(gl.VERTEX_SHADER); | |
| } else { | |
| return null; // Unknown shader type | |
| } | |
| // Send the source to the shader object | |
| gl.shaderSource(shader, theSource); | |
| // Compile the shader program | |
| gl.compileShader(shader); | |
| // See if it compiled successfully | |
| if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { | |
| alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader)); | |
| return null; | |
| } | |
| return shader; | |
| } | |
| // | |
| // Matrix utility functions | |
| // | |
| function loadIdentity() { | |
| mvMatrix = Matrix.I(4); | |
| } | |
| function multMatrix(m) { | |
| mvMatrix = mvMatrix.x(m); | |
| } | |
| function mvTranslate(v) { | |
| multMatrix(Matrix.Translation($V([v[0], v[1], v[2]])).ensure4x4()); | |
| } | |
| function setMatrixUniforms() { | |
| var pUniform = gl.getUniformLocation(shaderProgram, "uPMatrix"); | |
| gl.uniformMatrix4fv(pUniform, false, new Float32Array(perspectiveMatrix.flatten())); | |
| var mvUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix"); | |
| gl.uniformMatrix4fv(mvUniform, false, new Float32Array(mvMatrix.flatten())); | |
| } | |
| var mvMatrixStack = []; | |
| function mvPushMatrix(m) { | |
| if (m) { | |
| mvMatrixStack.push(m.dup()); | |
| mvMatrix = m.dup(); | |
| } else { | |
| mvMatrixStack.push(mvMatrix.dup()); | |
| } | |
| } | |
| function mvPopMatrix() { | |
| if (!mvMatrixStack.length) { | |
| throw("Can't pop from an empty matrix stack."); | |
| } | |
| mvMatrix = mvMatrixStack.pop(); | |
| return mvMatrix; | |
| } | |
| function mvRotate(angle, v) { | |
| var inRadians = angle * Math.PI / 180.0; | |
| var m = Matrix.Rotation(inRadians, $V([v[0], v[1], v[2]])).ensure4x4(); | |
| multMatrix(m); | |
| } | |
| // WebVR: Sample event handler | |
| window.addEventListener('vrdisplaypresentchange', function(e) { | |
| console.log('Display ' + e.display.displayId + ' presentation has changed. Reason given: ' + e.reason + '.'); | |
| }); | |
| // WebVR: Controls readout of pose stats panel | |
| poseStatsBtn.addEventListener('click', function() { | |
| if(!poseStatsDisplayed) { | |
| poseStatsDisplayed = true; | |
| poseStatsSection.style.visibility = 'visible'; | |
| poseStatsBtn.textContent = 'Hide pose stats'; | |
| } else { | |
| poseStatsDisplayed = false; | |
| poseStatsSection.style.visibility = 'hidden'; | |
| poseStatsBtn.textContent = 'Show pose stats'; | |
| } | |
| }); | |
| function displayPoseStats(pose) { | |
| var pos = pose.position; | |
| var orient = pose.orientation; | |
| var linVel = pose.linearVelocity; | |
| var linAcc = pose.linearAcceleration; | |
| var angVel = pose.angularVelocity; | |
| var angAcc = pose.angularAcceleration; | |
| posStats.textContent = 'Position: x ' + pos[0].toFixed(3) + ', y ' + pos[1].toFixed(3) + ', z ' + pos[2].toFixed(3); | |
| orientStats.textContent = 'Orientation: x ' + orient[0].toFixed(3) + ', y ' + orient[1].toFixed(3) + ', z ' + orient[2].toFixed(3); | |
| linVelStats.textContent = 'Linear velocity: x ' + linVel[0].toFixed(3) + ', y ' + linVel[1].toFixed(3) + ', z ' + linVel[2].toFixed(3); | |
| angVelStats.textContent = 'Angular velocity: x ' + angVel[0].toFixed(3) + ', y ' + angVel[1].toFixed(3) + ', z ' + angVel[2].toFixed(3); | |
| if(linAcc) { | |
| linAccStats.textContent = 'Linear acceleration: x ' + linAcc[0].toFixed(3) + ', y ' + linAcc[1].toFixed(3) + ', z ' + linAcc[2].toFixed(3); | |
| } else { | |
| linAccStats.textContent = 'Linear acceleration not reported'; | |
| } | |
| if(angAcc) { | |
| angAccStats.textContent = 'Angular acceleration: x ' + angAcc[0].toFixed(3) + ', y ' + angAcc[1].toFixed(3) + ', z ' + angAcc[2].toFixed(3); | |
| } else { | |
| angAccStats.textContent = 'Angular acceleration not reported'; | |
| } | |
| } |