webgl-render-vap.js 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. /*
  2. * Tencent is pleased to support the open source community by making vap available.
  3. *
  4. * Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
  5. *
  6. * Licensed under the MIT License (the "License"); you may not use this file except in
  7. * compliance with the License. You may obtain a copy of the License at
  8. *
  9. * http://opensource.org/licenses/MIT
  10. *
  11. * Unless required by applicable law or agreed to in writing, software distributed under the License is
  12. * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
  13. * either express or implied. See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. import VapFrameParser from './vap-frame-parser'
  17. import * as glUtil from './gl-util'
  18. import VapVideo from './video'
  19. let clearTimer = null
  20. let instances = {}
  21. const PER_SIZE = 9
  22. function computeCoord(x, y, w, h, vw, vh) {
  23. // leftX rightX bottomY topY
  24. return [x / vw, (x + w) / vw, (vh - y - h) / vh, (vh - y) / vh]
  25. }
  26. export default class WebglRenderVap extends VapVideo {
  27. constructor(options) {
  28. super(options)
  29. this.insType = this.options.type
  30. if (instances[this.insType]) {
  31. this.instance = instances[this.insType]
  32. } else {
  33. this.instance = instances[this.insType] = {}
  34. }
  35. this.textures = []
  36. this.buffers = []
  37. this.shaders = []
  38. this.init()
  39. }
  40. async init() {
  41. this.setCanvas()
  42. if (this.options.config) {
  43. try {
  44. this.vapFrameParser = await new VapFrameParser(this.options.config, this.options).init()
  45. this.resources = this.vapFrameParser.srcData
  46. } catch (e) {
  47. console.error('[Alpha video] parse vap frame error.', e)
  48. }
  49. }
  50. this.resources = this.resources || {}
  51. this.initWebGL()
  52. this.play()
  53. }
  54. setCanvas() {
  55. let canvas = this.instance.canvas
  56. const { width, height } = this.options
  57. if (!canvas) {
  58. canvas = this.instance.canvas = document.createElement('canvas')
  59. }
  60. canvas.width = width
  61. canvas.height = height
  62. this.container.appendChild(canvas)
  63. }
  64. initWebGL() {
  65. const { canvas } = this.instance
  66. let { gl, vertexShader, fragmentShader, program } = this.instance
  67. if (!canvas) {
  68. return
  69. }
  70. if (!gl) {
  71. this.instance.gl = gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl')
  72. gl.enable(gl.BLEND)
  73. gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA)
  74. gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true)
  75. }
  76. if (gl) {
  77. gl.viewport(0, 0, canvas.width, canvas.height)
  78. if (!vertexShader) {
  79. vertexShader = this.instance.vertexShader = this.initVertexShader()
  80. }
  81. if (!fragmentShader) {
  82. fragmentShader = this.instance.fragmentShader = this.initFragmentShader()
  83. }
  84. if (!program) {
  85. program = this.instance.program = glUtil.createProgram(gl, vertexShader, fragmentShader)
  86. }
  87. this.program = program
  88. this.initTexture()
  89. this.initVideoTexture()
  90. return gl
  91. }
  92. }
  93. /**
  94. * 顶点着色器
  95. */
  96. initVertexShader() {
  97. const { gl } = this.instance
  98. return glUtil.createShader(
  99. gl,
  100. gl.VERTEX_SHADER,
  101. `attribute vec2 a_position; // 接受顶点坐标
  102. attribute vec2 a_texCoord; // 接受纹理坐标
  103. attribute vec2 a_alpha_texCoord; // 接受纹理坐标
  104. varying vec2 v_alpha_texCoord; // 接受纹理坐标
  105. varying vec2 v_texcoord; // 传递纹理坐标给片元着色器
  106. void main(void){
  107. gl_Position = vec4(a_position, 0.0, 1.0); // 设置坐标
  108. v_texcoord = a_texCoord; // 设置纹理坐标
  109. v_alpha_texCoord = a_alpha_texCoord; // 设置纹理坐标
  110. }`
  111. )
  112. }
  113. /**
  114. * 片元着色器
  115. */
  116. initFragmentShader() {
  117. const { gl } = this.instance
  118. const bgColor = `vec4(texture2D(u_image_video, v_texcoord).rgb, texture2D(u_image_video,v_alpha_texCoord).r);`
  119. const textureSize = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS) - 1
  120. // const textureSize =0
  121. let sourceTexure = ''
  122. let sourceUniform = ''
  123. if (textureSize > 0) {
  124. const imgColor = []
  125. for (let i = 0; i < textureSize; i++) {
  126. imgColor.push(
  127. `if(ndx == ${i}){
  128. color = texture2D(textures[${i}],uv);
  129. }`
  130. )
  131. }
  132. sourceUniform = `
  133. uniform sampler2D u_image[${textureSize}];
  134. uniform float image_pos[${textureSize * PER_SIZE}];
  135. vec4 getSampleFromArray(sampler2D textures[${textureSize}], int ndx, vec2 uv) {
  136. vec4 color;
  137. ${imgColor.join(' else ')}
  138. return color;
  139. }
  140. `
  141. sourceTexure = `
  142. vec4 srcColor,maskColor;
  143. vec2 srcTexcoord,maskTexcoord;
  144. int srcIndex;
  145. float x1,x2,y1,y2,mx1,mx2,my1,my2; //显示的区域
  146. for(int i=0;i<${textureSize * PER_SIZE};i+= ${PER_SIZE}){
  147. if ((int(image_pos[i]) > 0)) {
  148. srcIndex = int(image_pos[i]);
  149. x1 = image_pos[i+1];
  150. x2 = image_pos[i+2];
  151. y1 = image_pos[i+3];
  152. y2 = image_pos[i+4];
  153. mx1 = image_pos[i+5];
  154. mx2 = image_pos[i+6];
  155. my1 = image_pos[i+7];
  156. my2 = image_pos[i+8];
  157. if (v_texcoord.s>x1 && v_texcoord.s<x2 && v_texcoord.t>y1 && v_texcoord.t<y2) {
  158. srcTexcoord = vec2((v_texcoord.s-x1)/(x2-x1),(v_texcoord.t-y1)/(y2-y1));
  159. maskTexcoord = vec2(mx1+srcTexcoord.s*(mx2-mx1),my1+srcTexcoord.t*(my2-my1));
  160. srcColor = getSampleFromArray(u_image,srcIndex,srcTexcoord);
  161. maskColor = texture2D(u_image_video, maskTexcoord);
  162. srcColor.a = srcColor.a*(maskColor.r);
  163. bgColor = vec4(srcColor.rgb*srcColor.a,srcColor.a) + (1.0-srcColor.a)*bgColor;
  164. }
  165. }
  166. }
  167. `
  168. }
  169. const fragmentSharder = `
  170. precision lowp float;
  171. varying vec2 v_texcoord;
  172. varying vec2 v_alpha_texCoord;
  173. uniform sampler2D u_image_video;
  174. ${sourceUniform}
  175. void main(void) {
  176. vec4 bgColor = ${bgColor}
  177. ${sourceTexure}
  178. // bgColor = texture2D(u_image[0], v_texcoord);
  179. gl_FragColor = bgColor;
  180. }
  181. `
  182. return glUtil.createShader(gl, gl.FRAGMENT_SHADER, fragmentSharder)
  183. }
  184. initTexture() {
  185. const { gl } = this.instance
  186. let i = 1
  187. if (!this.vapFrameParser || !this.vapFrameParser.srcData) {
  188. return
  189. }
  190. const resources = this.vapFrameParser.srcData
  191. for (const key in resources) {
  192. const resource = resources[key]
  193. this.textures.push(glUtil.createTexture(gl, i, resource.img))
  194. const sampler = gl.getUniformLocation(this.program, `u_image[${i}]`)
  195. gl.uniform1i(sampler, i)
  196. this.vapFrameParser.textureMap[resource.srcId] = i++
  197. }
  198. this.videoTexture = glUtil.createTexture(gl, i)
  199. const sampler = gl.getUniformLocation(this.program, `u_image_video`)
  200. gl.uniform1i(sampler, i)
  201. }
  202. initVideoTexture() {
  203. const { gl } = this.instance
  204. const vertexBuffer = gl.createBuffer()
  205. this.buffers.push(vertexBuffer)
  206. if (!this.vapFrameParser || !this.vapFrameParser.config || !this.vapFrameParser.config.info) {
  207. return
  208. }
  209. const info = this.vapFrameParser.config.info
  210. const ver = []
  211. const { videoW: vW, videoH: vH } = info
  212. const [rgbX, rgbY, rgbW, rgbH] = info.rgbFrame
  213. const [aX, aY, aW, aH] = info.aFrame
  214. const rgbCoord = computeCoord(rgbX, rgbY, rgbW, rgbH, vW, vH)
  215. const aCoord = computeCoord(aX, aY, aW, aH, vW, vH)
  216. ver.push(...[-1, 1, rgbCoord[0], rgbCoord[3], aCoord[0], aCoord[3]])
  217. ver.push(...[1, 1, rgbCoord[1], rgbCoord[3], aCoord[1], aCoord[3]])
  218. ver.push(...[-1, -1, rgbCoord[0], rgbCoord[2], aCoord[0], aCoord[2]])
  219. ver.push(...[1, -1, rgbCoord[1], rgbCoord[2], aCoord[1], aCoord[2]])
  220. const view = new Float32Array(ver)
  221. gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer)
  222. gl.bufferData(gl.ARRAY_BUFFER, view, gl.STATIC_DRAW)
  223. this.aPosition = gl.getAttribLocation(this.program, 'a_position')
  224. gl.enableVertexAttribArray(this.aPosition)
  225. this.aTexCoord = gl.getAttribLocation(this.program, 'a_texCoord')
  226. gl.enableVertexAttribArray(this.aTexCoord)
  227. this.aAlphaTexCoord = gl.getAttribLocation(this.program, 'a_alpha_texCoord')
  228. gl.enableVertexAttribArray(this.aAlphaTexCoord)
  229. // 将缓冲区对象分配给a_position变量、a_texCoord变量
  230. const size = view.BYTES_PER_ELEMENT
  231. gl.vertexAttribPointer(this.aPosition, 2, gl.FLOAT, false, size * 6, 0) // 顶点着色器位置
  232. gl.vertexAttribPointer(this.aTexCoord, 2, gl.FLOAT, false, size * 6, size * 2) // rgb像素位置
  233. gl.vertexAttribPointer(this.aAlphaTexCoord, 2, gl.FLOAT, false, size * 6, size * 4) // rgb像素位置
  234. }
  235. drawFrame() {
  236. const gl = this.instance.gl
  237. if (!gl) {
  238. super.drawFrame()
  239. return
  240. }
  241. gl.clear(gl.COLOR_BUFFER_BIT)
  242. if (this.vapFrameParser) {
  243. const frame = Math.floor(this.video.currentTime * this.options.fps)
  244. const frameData = this.vapFrameParser.getFrame(frame)
  245. let posArr = []
  246. if (frameData && frameData.obj) {
  247. frameData.obj.forEach((frame, index) => {
  248. posArr[posArr.length] = +this.vapFrameParser.textureMap[frame.srcId]
  249. const info = this.vapFrameParser.config.info
  250. const { videoW: vW, videoH: vH } = info
  251. const [x, y, w, h] = frame.frame
  252. const [mX, mY, mW, mH] = frame.mFrame
  253. const coord = computeCoord(x, y, w, h, vW, vH)
  254. const mCoord = computeCoord(mX, mY, mW, mH, vW, vH)
  255. posArr = posArr.concat(coord).concat(mCoord)
  256. })
  257. }
  258. //
  259. const size = (gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS) - 1) * PER_SIZE
  260. posArr = posArr.concat(new Array(size - posArr.length).fill(0))
  261. this._imagePos = this._imagePos || gl.getUniformLocation(this.program, 'image_pos')
  262. gl.uniform1fv(this._imagePos, new Float32Array(posArr))
  263. }
  264. gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.video) // 指定二维纹理方式
  265. gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)
  266. super.drawFrame()
  267. }
  268. destroy() {
  269. const { canvas } = this.instance
  270. if (canvas) {
  271. canvas.parentNode && canvas.parentNode.removeChild(canvas)
  272. }
  273. // glUtil.cleanWebGL(gl, this.shaders, this.program, this.textures, this.buffers)
  274. super.destroy()
  275. this.clearMemoryCache()
  276. }
  277. clearMemoryCache() {
  278. if (clearTimer) {
  279. clearTimeout(clearTimer)
  280. }
  281. clearTimer = setTimeout(() => {
  282. instances = {}
  283. }, 30 * 60 * 1000)
  284. }
  285. }