webgl-render-vap.ts 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373
  1. /*
  2. * Tencent is pleased to support the open source community by making vap available.
  3. *
  4. * Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
  5. *
  6. * Licensed under the MIT License (the "License"); you may not use this file except in
  7. * compliance with the License. You may obtain a copy of the License at
  8. *
  9. * http://opensource.org/licenses/MIT
  10. *
  11. * Unless required by applicable law or agreed to in writing, software distributed under the License is
  12. * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
  13. * either express or implied. See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. import { VapConfig } from './type';
  17. import VapFrameParser from './vap-frame-parser';
  18. import * as glUtil from './gl-util';
  19. import VapVideo from './video';
  20. const PER_SIZE = 9;
  21. function computeCoord(x: number, y: number, w: number, h: number, vw: number, vh: number) {
  22. // leftX rightX bottomY topY
  23. return [x / vw, (x + w) / vw, (vh - y - h) / vh, (vh - y) / vh];
  24. }
  25. export default class WebglRenderVap extends VapVideo {
  26. private canvas: HTMLCanvasElement;
  27. private gl: WebGLRenderingContext;
  28. private vertexShader: WebGLShader;
  29. private fragmentShader: WebGLShader;
  30. private program: WebGLProgram;
  31. private textures: WebGLTexture[] = [];
  32. private videoTexture: WebGLTexture;
  33. private vertexBuffer: WebGLBuffer;
  34. private vapFrameParser: VapFrameParser;
  35. private imagePosLoc: WebGLUniformLocation;
  36. constructor(options?: VapConfig) {
  37. super();
  38. if (options) {
  39. this.play(options);
  40. }
  41. }
  42. play(options?: VapConfig) {
  43. if (options) {
  44. this.setOptions(options);
  45. }
  46. if (!this.options?.config) {
  47. console.error(`options.config cannot be empty.`);
  48. return this;
  49. }
  50. if (options) {
  51. this.initVideo();
  52. // 重新解析
  53. this.vapFrameParser = new VapFrameParser(this.options.config, this.options);
  54. this.vapFrameParser
  55. .init()
  56. .then(() => {
  57. this.initWebGL();
  58. this.initTexture();
  59. this.initVideoTexture();
  60. this.options.fps = this.vapFrameParser.config.info.fps || 30;
  61. super.play();
  62. })
  63. .catch((e) => {
  64. this.vapFrameParser = null;
  65. console.error('[Alpha video] parse vap frame error.', e);
  66. return this;
  67. });
  68. } else {
  69. super.play();
  70. }
  71. return this;
  72. }
  73. initWebGL() {
  74. let { canvas, gl, vertexShader, fragmentShader, program } = this;
  75. if (!canvas) {
  76. canvas = document.createElement('canvas');
  77. }
  78. const { vapFrameParser } = this;
  79. const { w, h } = vapFrameParser.config.info;
  80. canvas.width = w;
  81. canvas.height = h;
  82. this.container.appendChild(canvas);
  83. if (!gl) {
  84. gl = canvas.getContext('webgl') || (canvas.getContext('experimental-webgl') as WebGLRenderingContext);
  85. gl.disable(gl.BLEND);
  86. gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
  87. gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
  88. }
  89. gl.viewport(0, 0, w, h);
  90. if (!vertexShader) {
  91. vertexShader = this.initVertexShader(gl);
  92. }
  93. if (fragmentShader && program) {
  94. glUtil.cleanWebGL(gl, { program, shaders: [fragmentShader] });
  95. }
  96. const { srcData } = vapFrameParser;
  97. fragmentShader = this.initFragmentShader(gl, Object.keys(srcData).length);
  98. program = glUtil.createProgram(gl, vertexShader, fragmentShader);
  99. this.canvas = canvas;
  100. this.gl = gl;
  101. this.vertexShader = vertexShader;
  102. this.fragmentShader = fragmentShader;
  103. this.program = program;
  104. this.imagePosLoc = null;
  105. return gl;
  106. }
  107. /**
  108. * 顶点着色器
  109. */
  110. initVertexShader(gl: WebGLRenderingContext) {
  111. return glUtil.createShader(
  112. gl,
  113. gl.VERTEX_SHADER,
  114. `attribute vec2 a_position; // 接受顶点坐标
  115. attribute vec2 a_texCoord; // 接受纹理坐标
  116. attribute vec2 a_alpha_texCoord; // 接受纹理坐标
  117. varying vec2 v_alpha_texCoord; // 接受纹理坐标
  118. varying vec2 v_texcoord; // 传递纹理坐标给片元着色器
  119. void main(void){
  120. gl_Position = vec4(a_position, 0.0, 1.0); // 设置坐标
  121. v_texcoord = a_texCoord; // 设置纹理坐标
  122. v_alpha_texCoord = a_alpha_texCoord; // 设置纹理坐标
  123. }`
  124. );
  125. }
  126. /**
  127. * 片元着色器
  128. */
  129. initFragmentShader(gl: WebGLRenderingContext, textureSize) {
  130. const bgColor = `vec4(texture2D(u_image_video, v_texcoord).rgb, texture2D(u_image_video,v_alpha_texCoord).r);`;
  131. let sourceTexure = '';
  132. let sourceUniform = '';
  133. if (textureSize > 0) {
  134. const bufferSize = textureSize * PER_SIZE;
  135. const imgColor = [];
  136. const samplers = [];
  137. for (let i = 0; i < textureSize; i++) {
  138. imgColor.push(
  139. `if(ndx == ${i + 1}){
  140. color = texture2D(u_image${i + 1},uv);
  141. }`
  142. );
  143. samplers.push(`uniform sampler2D u_image${i + 1};`);
  144. }
  145. sourceUniform = `
  146. ${samplers.join('\n')}
  147. uniform float image_pos[${bufferSize}];
  148. vec4 getSampleFromArray(int ndx, vec2 uv) {
  149. vec4 color;
  150. ${imgColor.join(' else ')}
  151. return color;
  152. }
  153. `;
  154. sourceTexure = `
  155. vec4 srcColor,maskColor;
  156. vec2 srcTexcoord,maskTexcoord;
  157. int srcIndex;
  158. float x1,x2,y1,y2,mx1,mx2,my1,my2; //显示的区域
  159. for(int i=0;i<${bufferSize};i+= ${PER_SIZE}){
  160. if ((int(image_pos[i]) > 0)) {
  161. srcIndex = int(image_pos[i]);
  162. x1 = image_pos[i+1];
  163. x2 = image_pos[i+2];
  164. y1 = image_pos[i+3];
  165. y2 = image_pos[i+4];
  166. mx1 = image_pos[i+5];
  167. mx2 = image_pos[i+6];
  168. my1 = image_pos[i+7];
  169. my2 = image_pos[i+8];
  170. if (v_texcoord.s>x1 && v_texcoord.s<x2 && v_texcoord.t>y1 && v_texcoord.t<y2) {
  171. srcTexcoord = vec2((v_texcoord.s-x1)/(x2-x1),(v_texcoord.t-y1)/(y2-y1));
  172. maskTexcoord = vec2(mx1+srcTexcoord.s*(mx2-mx1),my1+srcTexcoord.t*(my2-my1));
  173. srcColor = getSampleFromArray(srcIndex,srcTexcoord);
  174. maskColor = texture2D(u_image_video, maskTexcoord);
  175. srcColor.a = srcColor.a*(maskColor.r);
  176. bgColor = vec4(srcColor.rgb*srcColor.a,srcColor.a) + (1.0-srcColor.a)*bgColor;
  177. }
  178. }
  179. }
  180. `;
  181. }
  182. const fragmentShader = `
  183. precision lowp float;
  184. varying vec2 v_texcoord;
  185. varying vec2 v_alpha_texCoord;
  186. uniform sampler2D u_image_video;
  187. ${sourceUniform}
  188. void main(void) {
  189. vec4 bgColor = ${bgColor}
  190. ${sourceTexure}
  191. gl_FragColor = bgColor;
  192. }
  193. `;
  194. return glUtil.createShader(gl, gl.FRAGMENT_SHADER, fragmentShader);
  195. }
  196. initTexture() {
  197. const { gl, vapFrameParser, textures } = this;
  198. if (!vapFrameParser || !vapFrameParser.srcData) {
  199. return;
  200. }
  201. const resources = vapFrameParser.srcData;
  202. // 0分配给video
  203. let i = 1;
  204. for (const key in resources) {
  205. const resource = resources[key];
  206. const texture = textures[i - 1];
  207. if (texture) {
  208. // 复用
  209. gl.activeTexture(gl.TEXTURE0 + i);
  210. gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, resource.img);
  211. } else {
  212. this.textures.push(glUtil.createTexture(gl, i, resource.img));
  213. }
  214. const sampler = gl.getUniformLocation(this.program, `u_image${i}`);
  215. gl.uniform1i(sampler, i);
  216. this.vapFrameParser.textureMap[resource.srcId] = i++;
  217. }
  218. }
  219. initVideoTexture() {
  220. const { gl, vapFrameParser, program } = this;
  221. if (!vapFrameParser || !vapFrameParser.config || !vapFrameParser.config.info) {
  222. return;
  223. }
  224. // video texture
  225. if (!this.videoTexture) {
  226. this.videoTexture = glUtil.createTexture(gl, 0);
  227. }
  228. const sampler = gl.getUniformLocation(program, `u_image_video`);
  229. gl.uniform1i(sampler, 0);
  230. gl.activeTexture(gl.TEXTURE0);
  231. const info = vapFrameParser.config.info;
  232. const { videoW: vW, videoH: vH } = info;
  233. const [rgbX, rgbY, rgbW, rgbH] = info.rgbFrame;
  234. const [aX, aY, aW, aH] = info.aFrame;
  235. const rgbCoord = computeCoord(rgbX, rgbY, rgbW, rgbH, vW, vH);
  236. const aCoord = computeCoord(aX, aY, aW, aH, vW, vH);
  237. const view = new Float32Array([
  238. ...[-1, 1, rgbCoord[0], rgbCoord[3], aCoord[0], aCoord[3]],
  239. ...[1, 1, rgbCoord[1], rgbCoord[3], aCoord[1], aCoord[3]],
  240. ...[-1, -1, rgbCoord[0], rgbCoord[2], aCoord[0], aCoord[2]],
  241. ...[1, -1, rgbCoord[1], rgbCoord[2], aCoord[1], aCoord[2]],
  242. ]);
  243. if (!this.vertexBuffer) {
  244. this.vertexBuffer = gl.createBuffer();
  245. gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
  246. }
  247. gl.bufferData(gl.ARRAY_BUFFER, view, gl.STATIC_DRAW);
  248. // 将缓冲区对象分配给a_position变量、a_texCoord变量
  249. const size = view.BYTES_PER_ELEMENT;
  250. const aPosition = gl.getAttribLocation(program, 'a_position');
  251. gl.enableVertexAttribArray(aPosition);
  252. gl.vertexAttribPointer(aPosition, 2, gl.FLOAT, false, size * 6, 0); // 顶点着色器位置
  253. const aTexCoord = gl.getAttribLocation(program, 'a_texCoord');
  254. gl.enableVertexAttribArray(aTexCoord);
  255. gl.vertexAttribPointer(aTexCoord, 2, gl.FLOAT, false, size * 6, size * 2); // rgb像素位置
  256. const aAlphaTexCoord = gl.getAttribLocation(program, 'a_alpha_texCoord');
  257. gl.enableVertexAttribArray(aAlphaTexCoord);
  258. gl.vertexAttribPointer(aAlphaTexCoord, 2, gl.FLOAT, false, size * 6, size * 4); // rgb像素位置
  259. }
  260. drawFrame(_, info) {
  261. const { gl, vapFrameParser, video, options } = this;
  262. if (!gl) {
  263. super.drawFrame(_, info);
  264. return;
  265. }
  266. const frame =
  267. !options.loop && info?.presentedFrames > 0
  268. ? info.presentedFrames - 1
  269. : Math.round(video.currentTime * options.fps) + options.offset;
  270. // console.info('frame:', info.presentedFrames - 1, Math.round(this.video.currentTime * this.options.fps));
  271. const frameData = vapFrameParser.getFrame(frame);
  272. if (frameData?.obj) {
  273. let posArr = [];
  274. const { videoW: vW, videoH: vH, rgbFrame } = vapFrameParser.config.info;
  275. frameData.obj.forEach((frame) => {
  276. // 有可能用户没有传入src
  277. const imgIndex = vapFrameParser.textureMap[frame.srcId];
  278. if (imgIndex > 0) {
  279. posArr[posArr.length] = imgIndex;
  280. // frame坐标是最终展示坐标,这里glsl中计算使用视频坐标
  281. const [rgbX, rgbY] = rgbFrame;
  282. const [x, y, w, h] = frame.frame;
  283. const [mX, mY, mW, mH] = frame.mFrame;
  284. const coord = computeCoord(x + rgbX, y + rgbY, w, h, vW, vH);
  285. const mCoord = computeCoord(mX, mY, mW, mH, vW, vH);
  286. posArr = posArr.concat(coord).concat(mCoord);
  287. }
  288. });
  289. if (posArr.length) {
  290. this.imagePosLoc = this.imagePosLoc || gl.getUniformLocation(this.program, 'image_pos');
  291. gl.uniform1fv(this.imagePosLoc, new Float32Array(posArr));
  292. }
  293. }
  294. this.trigger('frame', frame + 1, frameData, vapFrameParser.config);
  295. gl.clear(gl.COLOR_BUFFER_BIT);
  296. gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video); // 指定二维纹理方式
  297. gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
  298. super.drawFrame(_, info);
  299. }
  300. // 清理数据,为下一次播放做准备
  301. clear() {
  302. super.clear();
  303. const { gl } = this;
  304. // 清除界面,解决连续播放时,第一帧是上一个mp4最后一帧的问题
  305. gl.clear(gl.COLOR_BUFFER_BIT);
  306. }
  307. // 销毁,释放webgl资源,销毁后调用play,资源会重新初始化
  308. destroy() {
  309. super.destroy();
  310. const { canvas, gl, vertexShader, fragmentShader, program, textures, videoTexture, vertexBuffer } = this;
  311. if (canvas) {
  312. canvas.parentNode && canvas.parentNode.removeChild(canvas);
  313. this.canvas = null;
  314. }
  315. if (gl) {
  316. glUtil.cleanWebGL(gl, {
  317. program,
  318. shaders: [vertexShader, fragmentShader],
  319. textures: [...textures, videoTexture],
  320. buffers: [vertexBuffer],
  321. });
  322. }
  323. this.gl = null;
  324. this.vertexShader = null;
  325. this.fragmentShader = null;
  326. this.program = null;
  327. this.imagePosLoc = null;
  328. this.vertexBuffer = null;
  329. this.videoTexture = null;
  330. this.textures = [];
  331. }
  332. }