From cff676f37ce1f0fbca54823e11382c3275f68619 Mon Sep 17 00:00:00 2001 From: Dimitrios Ververidis Date: Fri, 4 Feb 2022 15:39:58 +0200 Subject: [PATCH] Some of Vincent's comments addressed, but the main comment about the use of the Shader is not addressed. --- dist/networked-aframe.js | 2 +- dist/networked-aframe.min.js | 2 +- docs/getting-started-local.md | 32 +- examples/advanced-video-green-screen.html | 21 +- examples/advanced-video-green-screen_dev.html | 350 ++++++++++++++++++ examples/index.html | 28 +- server/easyrtc-server.js | 4 +- .../networked-video-source-green-screen.js | 26 +- 8 files changed, 396 insertions(+), 69 deletions(-) create mode 100644 examples/advanced-video-green-screen_dev.html diff --git a/dist/networked-aframe.js b/dist/networked-aframe.js index 6362fd86..6f96ceb0 100644 --- a/dist/networked-aframe.js +++ b/dist/networked-aframe.js @@ -298,7 +298,7 @@ eval("\n\n/* global AFRAME, NAF */\nAFRAME.registerComponent('networked-scene', /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\n/* global AFRAME, NAF, THREE */\nvar naf = __webpack_require__(/*! ../NafIndex */ \"./src/NafIndex.js\");\n\nAFRAME.registerComponent('networked-video-source-green-screen', {\n schema: {\n streamName: {\n \"default\": 'video'\n },\n GreenThresholdIn: {\n \"default\": 0.02\n },\n event: {\n type: 'string',\n \"default\": ''\n }\n },\n dependencies: ['material'],\n update: function update() {\n var _this = this;\n\n var data = this.data; // Component property values.\n\n var el = this.el; // Reference to the component's entity.\n\n if (data.event) {\n // This will log the `message` when the entity emits the `event`.\n el.addEventListener(data.event, function () {});\n } else {// `event` not specified, just log the message.\n }\n\n this.videoTexture = null;\n this.video = null;\n this.stream = null;\n this._setMediaStream = this._setMediaStream.bind(this);\n NAF.utils.getNetworkedEntity(this.el).then(function (networkedEl) {\n var ownerId = networkedEl.components.networked.data.owner;\n\n if (ownerId) {\n NAF.connection.adapter.getMediaStream(ownerId, _this.data.streamName).then(_this._setMediaStream)[\"catch\"](function (e) {\n return naf.log.error(\"Error getting media stream for \".concat(ownerId), e);\n });\n } else {// Correctly configured local entity, perhaps do something here for enabling debug audio loopback\n }\n });\n },\n _setMediaStream: function _setMediaStream(newStream) {\n if (!this.video) {\n this.setupVideo();\n }\n\n if (newStream != this.stream) {\n if (this.stream) {\n this._clearMediaStream();\n }\n\n if (newStream) {\n this.video.srcObject = newStream;\n var playResult = this.video.play();\n\n if (playResult instanceof Promise) {\n playResult[\"catch\"](function (e) {\n return naf.log.error(\"Error play video stream\", e);\n });\n }\n\n if (this.videoTexture) {\n this.videoTexture.dispose();\n }\n\n this.videoTexture = new THREE.VideoTexture(this.video);\n this.videoTexture.format = THREE.RGBAFormat;\n var mesh = this.el.getObject3D('mesh'); //--------- begin replace green with transparent ---------\n\n this.uniforms = {};\n this.uniforms.uMap = {\n type: 't',\n value: this.videoTexture\n };\n this.uniforms.GreenThresholdIn = {\n type: 'float',\n value: this.data.GreenThresholdIn\n };\n this.uniforms = THREE.UniformsUtils.merge([this.uniforms, THREE.UniformsLib['lights']]);\n this.materialIncoming = new THREE.ShaderMaterial({\n uniforms: this.uniforms\n });\n this.materialIncoming.vertexShader = \"\\n varying vec2 vUv;\\n\\n void main() {\\n vec4 worldPosition = modelViewMatrix * vec4( position, 1.0 );\\n vec3 vWorldPosition = worldPosition.xyz;\\n vUv = uv;\\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\\n }\\n \";\n this.materialIncoming.fragmentShader = \"\\n varying vec2 vUv;\\n uniform sampler2D uMap;\\n uniform float GreenThresholdIn;\\n \\n void main() {\\n vec2 uv = vUv;\\n vec4 tex1 = texture2D(uMap, uv * 1.0);\\n if (tex1.g - tex1.r > GreenThresholdIn)\\n gl_FragColor = vec4(0,0,0,0);\\n else\\n gl_FragColor = vec4(tex1.r,tex1.g,tex1.b,1.0);\\n }\\n \";\n this.materialIncoming.transparent = true;\n this.materialIncoming.side = THREE.BackSide;\n mesh.material = this.materialIncoming; //---------- end of replace -----------------\n //mesh.material.map = this.videoTexture;\n\n mesh.material.needsUpdate = true;\n }\n\n this.stream = newStream;\n }\n },\n _clearMediaStream: function _clearMediaStream() {\n this.stream = null;\n\n if (this.videoTexture) {\n if (this.videoTexture.image instanceof HTMLVideoElement) {\n // Note: this.videoTexture.image === this.video\n var video = this.videoTexture.image;\n video.pause();\n video.srcObject = null;\n video.load();\n }\n\n this.videoTexture.dispose();\n this.videoTexture = null;\n }\n },\n remove: function remove() {\n this._clearMediaStream();\n },\n setupVideo: function setupVideo() {\n if (!this.video) {\n var video = document.createElement('video');\n video.setAttribute('autoplay', true);\n video.setAttribute('playsinline', true);\n video.setAttribute('muted', true);\n this.video = video;\n }\n }\n});\n\n//# sourceURL=webpack:///./src/components/networked-video-source-green-screen.js?"); +eval("\n\n/* global AFRAME, NAF, THREE */\nvar naf = __webpack_require__(/*! ../NafIndex */ \"./src/NafIndex.js\");\n\nAFRAME.registerComponent('networked-video-source-green-screen', {\n schema: {\n streamName: {\n \"default\": 'video'\n },\n gthreshold: {\n \"default\": 0.02\n }\n },\n dependencies: ['material'],\n update: function update() {\n var _this = this;\n\n this.videoTexture = null;\n this.video = null;\n this.stream = null;\n this._setMediaStream = this._setMediaStream.bind(this);\n NAF.utils.getNetworkedEntity(this.el).then(function (networkedEl) {\n var ownerId = networkedEl.components.networked.data.owner;\n\n if (ownerId) {\n NAF.connection.adapter.getMediaStream(ownerId, _this.data.streamName).then(_this._setMediaStream)[\"catch\"](function (e) {\n return naf.log.error(\"Error getting media stream for \".concat(ownerId), e);\n });\n } else {// Correctly configured local entity, perhaps do something here for enabling debug audio loopback\n }\n });\n },\n _setMediaStream: function _setMediaStream(newStream) {\n if (!this.video) {\n this.setupVideo();\n }\n\n if (newStream != this.stream) {\n if (this.stream) {\n this._clearMediaStream();\n }\n\n if (newStream) {\n this.video.srcObject = newStream;\n var playResult = this.video.play();\n\n if (playResult instanceof Promise) {\n playResult[\"catch\"](function (e) {\n return naf.log.error(\"Error play video stream\", e);\n });\n }\n\n if (this.videoTexture) {\n this.videoTexture.dispose();\n }\n\n this.videoTexture = new THREE.VideoTexture(this.video);\n var mesh = this.el.getObject3D('mesh'); //--------- begin replace green with transparent ---------\n\n this.uniforms = {};\n this.uniforms.uMap = {\n type: 't',\n value: this.videoTexture\n };\n this.uniforms.gthreshold = {\n type: 'float',\n value: this.data.gthreshold\n };\n this.uniforms = THREE.UniformsUtils.merge([this.uniforms, THREE.UniformsLib['lights']]);\n this.materialIncoming = new THREE.ShaderMaterial({\n uniforms: this.uniforms\n });\n this.materialIncoming.vertexShader = \"\\n varying vec2 vUv;\\n\\n void main() {\\n vec4 worldPosition = modelViewMatrix * vec4( position, 1.0 );\\n vec3 vWorldPosition = worldPosition.xyz;\\n vUv = uv;\\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\\n }\\n \";\n this.materialIncoming.fragmentShader = \"\\n varying vec2 vUv;\\n uniform sampler2D uMap;\\n uniform float gthreshold;\\n \\n void main() {\\n vec2 uv = vUv;\\n vec4 tex1 = texture2D(uMap, uv * 1.0);\\n if (tex1.g - tex1.r > gthreshold)\\n discard; // better than gl_FragColor = vec4(0,0,0,0);\\n else\\n gl_FragColor = vec4(tex1.r,tex1.g,tex1.b,1.0);\\n }\\n \";\n this.materialIncoming.transparent = true;\n this.materialIncoming.side = THREE.DoubleSide;\n mesh.material = this.materialIncoming; //---------- end of replace -----------------\n //mesh.material.map = this.videoTexture;\n\n mesh.material.needsUpdate = true;\n }\n\n this.stream = newStream;\n }\n },\n _clearMediaStream: function _clearMediaStream() {\n this.stream = null;\n\n if (this.videoTexture) {\n if (this.videoTexture.image instanceof HTMLVideoElement) {\n // Note: this.videoTexture.image === this.video\n var video = this.videoTexture.image;\n video.pause();\n video.srcObject = null;\n video.load();\n }\n\n this.videoTexture.dispose();\n this.videoTexture = null;\n }\n },\n remove: function remove() {\n this._clearMediaStream();\n },\n setupVideo: function setupVideo() {\n if (!this.video) {\n var video = document.createElement('video');\n video.setAttribute('autoplay', true);\n video.setAttribute('playsinline', true);\n video.setAttribute('muted', true);\n this.video = video;\n }\n }\n});\n\n//# sourceURL=webpack:///./src/components/networked-video-source-green-screen.js?"); /***/ }), diff --git a/dist/networked-aframe.min.js b/dist/networked-aframe.min.js index bc506525..4b62a731 100644 --- a/dist/networked-aframe.min.js +++ b/dist/networked-aframe.min.js @@ -1 +1 @@ -!function(e){var t={};function n(i){if(t[i])return t[i].exports;var o=t[i]={i:i,l:!1,exports:{}};return e[i].call(o.exports,o,o.exports,n),o.l=!0,o.exports}n.m=e,n.c=t,n.d=function(e,t,i){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:i})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var i=Object.create(null);if(n.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(i,o,function(t){return e[t]}.bind(null,o));return i},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=2)}([function(e,t,n){"use strict";var i=n(3),o=n(4),r=n(5),a=n(6),s=n(7),c=n(9),u=n(10),l={app:"",room:"",clientId:""};l.options=i,l.utils=o,l.log=new r,l.schemas=new a,l.version="0.8.3",l.adapters=new u;var d=new s,h=new c(d);l.connection=h,l.entities=d,e.exports=window.NAF=l},function(e,t,n){"use strict";function i(e){return(i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function o(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function r(e,t){for(var n=0;n is defined."));if(!this.validateTemplate(e,t))return;this.templateCache[e.template]=document.importNode(t.content,!0)}else NAF.log.error("Schema not valid: ",e),NAF.log.error("See https://github.com/networked-aframe/networked-aframe#syncing-custom-components")}},{key:"getCachedTemplate",value:function(e){return this.templateIsCached(e)||(this.templateExistsInScene(e)?this.add(this.createDefaultSchema(e)):NAF.log.error("Template el for ".concat(e," is not in the scene, add the template to and register with NAF.schemas.add."))),this.templateCache[e].firstElementChild.cloneNode(!0)}},{key:"templateIsCached",value:function(e){return!!this.templateCache[e]}},{key:"getComponents",value:function(e){var t=["position","rotation"];return this.hasTemplate(e)&&(t=this.schemaDict[e].components),t}},{key:"hasTemplate",value:function(e){return!!this.schemaDict[e]}},{key:"templateExistsInScene",value:function(e){var t=document.querySelector(e);return t&&this.isTemplateTag(t)}},{key:"validateSchema",value:function(e){return!(!e.template||!e.components)}},{key:"validateTemplate",value:function(e,t){return this.isTemplateTag(t)?!!this.templateHasOneOrZeroChildren(t)||(NAF.log.error("Template for ".concat(e.template," has more than one child. Templates must have one direct child element, no more. Template found:"),t),!1):(NAF.log.error("Template for ".concat(e.template," is not a