/* * Tencent is pleased to support the open source community by making vap available. * * Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the MIT License (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://opensource.org/licenses/MIT * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. */ import VapFrameParser from './vap-frame-parser' import * as glUtil from './gl-util' import VapVideo from './video' let clearTimer = null let instances = {} const PER_SIZE = 9 function computeCoord(x, y, w, h, vw, vh) { // leftX rightX bottomY topY return [x / vw, (x + w) / vw, (vh - y - h) / vh, (vh - y) / vh] } export default class WebglRenderVap extends VapVideo { constructor(options) { super(options) this.insType = this.options.type if (instances[this.insType]) { this.instance = instances[this.insType] } else { this.instance = instances[this.insType] = {} } this.textures = [] this.buffers = [] this.shaders = [] this.init() } async init() { this.setCanvas() if (this.options.config) { try { this.vapFrameParser = await new VapFrameParser(this.options.config, this.options).init() this.resources = this.vapFrameParser.srcData } catch (e) { console.error('[Alpha video] parse vap frame error.', e) } } this.resources = this.resources || {} this.initWebGL() this.play() } setCanvas() { let canvas = this.instance.canvas const { width, height } = this.options if (!canvas) { canvas = this.instance.canvas = document.createElement('canvas') } canvas.width = width canvas.height = height this.container.appendChild(canvas) } initWebGL() { const { canvas } = this.instance let { gl, vertexShader, fragmentShader, program } = this.instance if (!canvas) { return } if (!gl) { this.instance.gl = gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl') gl.enable(gl.BLEND) gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA) gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true) } if (gl) { gl.viewport(0, 0, canvas.width, canvas.height) if (!vertexShader) { vertexShader = this.instance.vertexShader = this.initVertexShader() } if (!fragmentShader) { fragmentShader = this.instance.fragmentShader = this.initFragmentShader() } if (!program) { program = this.instance.program = glUtil.createProgram(gl, vertexShader, fragmentShader) } this.program = program this.initTexture() this.initVideoTexture() return gl } } /** * 顶点着色器 */ initVertexShader() { const { gl } = this.instance return glUtil.createShader( gl, gl.VERTEX_SHADER, `attribute vec2 a_position; // 接受顶点坐标 attribute vec2 a_texCoord; // 接受纹理坐标 attribute vec2 a_alpha_texCoord; // 接受纹理坐标 varying vec2 v_alpha_texCoord; // 接受纹理坐标 varying vec2 v_texcoord; // 传递纹理坐标给片元着色器 void main(void){ gl_Position = vec4(a_position, 0.0, 1.0); // 设置坐标 v_texcoord = a_texCoord; // 设置纹理坐标 v_alpha_texCoord = a_alpha_texCoord; // 设置纹理坐标 }` ) } /** * 片元着色器 */ initFragmentShader() { const { gl } = this.instance const bgColor = `vec4(texture2D(u_image_video, v_texcoord).rgb, texture2D(u_image_video,v_alpha_texCoord).r);` const textureSize = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS) - 1 // const textureSize =0 let sourceTexure = '' let sourceUniform = '' if (textureSize > 0) { const imgColor = [] for (let i = 0; i < textureSize; i++) { imgColor.push( `if(ndx == ${i}){ color = texture2D(textures[${i}],uv); }` ) } sourceUniform = ` uniform sampler2D u_image[${textureSize}]; uniform float image_pos[${textureSize * PER_SIZE}]; vec4 getSampleFromArray(sampler2D textures[${textureSize}], int ndx, vec2 uv) { vec4 color; ${imgColor.join(' else ')} return color; } ` sourceTexure = ` vec4 srcColor,maskColor; vec2 srcTexcoord,maskTexcoord; int srcIndex; float x1,x2,y1,y2,mx1,mx2,my1,my2; //显示的区域 for(int i=0;i<${textureSize * PER_SIZE};i+= ${PER_SIZE}){ if ((int(image_pos[i]) > 0)) { srcIndex = int(image_pos[i]); x1 = image_pos[i+1]; x2 = image_pos[i+2]; y1 = image_pos[i+3]; y2 = image_pos[i+4]; mx1 = image_pos[i+5]; mx2 = image_pos[i+6]; my1 = image_pos[i+7]; my2 = image_pos[i+8]; if (v_texcoord.s>x1 && v_texcoord.sy1 && v_texcoord.t { posArr[posArr.length] = +this.vapFrameParser.textureMap[frame.srcId] const info = this.vapFrameParser.config.info const { videoW: vW, videoH: vH } = info const [x, y, w, h] = frame.frame const [mX, mY, mW, mH] = frame.mFrame const coord = computeCoord(x, y, w, h, vW, vH) const mCoord = computeCoord(mX, mY, mW, mH, vW, vH) posArr = posArr.concat(coord).concat(mCoord) }) } // const size = (gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS) - 1) * PER_SIZE posArr = posArr.concat(new Array(size - posArr.length).fill(0)) this._imagePos = this._imagePos || gl.getUniformLocation(this.program, 'image_pos') gl.uniform1fv(this._imagePos, new Float32Array(posArr)) } gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.video) // 指定二维纹理方式 gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4) super.drawFrame() } destroy() { const { canvas } = this.instance if (canvas) { canvas.parentNode && canvas.parentNode.removeChild(canvas) } // glUtil.cleanWebGL(gl, this.shaders, this.program, this.textures, this.buffers) super.destroy() this.clearMemoryCache() } clearMemoryCache() { if (clearTimer) { clearTimeout(clearTimer) } clearTimer = setTimeout(() => { instances = {} }, 30 * 60 * 1000) } }