基于webassembly的H265视频播放器前端Angular学习笔记5:使用WebGL在canvas上渲染H256解码数据

基于webassembly的H265视频播放器前端Angular学习笔记5:使用WebGL在canvas上渲染H256解码数据

  • 使用wasm解码H265
    • 创建组件与路由
    • 获取H265解码数据
    • 重写webgl.js文件
    • casvas渲染视频图像
    • 测试结果

本文基于开源项目decoder_wasm使用Angular开发的前端页面项目,对项目整理结构进行的改动,并使用Typescript重写大部分js源码,便于维护。
着重学习Angular框架下Worker,Wasm的使用,本人非前端开发从业人员,页面简陋请见谅。

使用wasm解码H265

创建组件与路由

创建组件

$ ng generate component WebGLTest --skipTests=true
CREATE src/app/web-gltest/web-gltest.component.scss (0 bytes)
CREATE src/app/web-gltest/web-gltest.component.html (25 bytes)
CREATE src/app/web-gltest/web-gltest.component.ts (291 bytes)
UPDATE src/app/app.module.ts (886 bytes)

尴尬,目录名怎么生成为web-gltest,好别扭,手工修改为webgl-test

设置路由

{ path: 'webgl', component: DecoderTestComponent },

获取H265解码数据

详见的之前篇笔记
基于webassembly的H265视频播放器前端Angular学习笔记4:使用wasm解码H265数据
拷贝代码相关逻辑到新的测试组件中

给canvas设置个大小

<canvas #playCanvas width="540px" height="360px">canvas>

重写webgl.js文件

decoder_wasm中的webgl用typescript重写一下,方便调用。
webgl-texture.ts

export class WebGLTexture {
    private texture: globalThis.WebGLTexture;
    constructor(private gl: WebGLRenderingContext) {
        this.texture = gl.createTexture();
        gl.bindTexture(gl.TEXTURE_2D, this.texture);

        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
    }
    public bind(n: number, program: WebGLProgram, name: string) {
        const gl = this.gl;
        gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]);
        gl.bindTexture(gl.TEXTURE_2D, this.texture);
        gl.uniform1i(gl.getUniformLocation(program, name), n);
    }
    public fill(width: number, height: number, data: ArrayBufferView) {
        const gl = this.gl;
        gl.bindTexture(gl.TEXTURE_2D, this.texture);
        gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
    }
}

webgl-player.ts

import { WebGLTexture } from './webgl-texture';

interface WebGLPlayerOptions {
    pixFmt: number;
    videoWidth: number;
    videoHeight: number;
    yLength: number;
    uvLength: number;
}

export class WebGLPlayer {
    private gl: WebGLRenderingContext;
    private webglY: WebGLTexture;
    private webglU: WebGLTexture;
    private webglV: WebGLTexture;
    constructor(private canvas: HTMLCanvasElement, private options: WebGLPlayerOptions) {
        this.canvas = canvas;
        // this.gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');在这里插入图片描述
        this.gl = canvas.getContext('webgl');
        this.initGL();
    }

    private initGL(): void {
        if (!this.gl) {
            console.log('[ER] WebGL not supported.');
            return;
        }

        const gl: WebGLRenderingContext = this.gl;
        gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
        const program = gl.createProgram();
        const vertexShaderSource = [
            'attribute highp vec4 aVertexPosition;',
            'attribute vec2 aTextureCoord;',
            'varying highp vec2 vTextureCoord;',
            'void main(void) {',
            ' gl_Position = aVertexPosition;',
            ' vTextureCoord = aTextureCoord;',
            '}'
        ].join('\n');
        const vertexShader = gl.createShader(gl.VERTEX_SHADER);
        gl.shaderSource(vertexShader, vertexShaderSource);
        gl.compileShader(vertexShader);
        const fragmentShaderSource = [
            'precision highp float;',
            'varying lowp vec2 vTextureCoord;',
            'uniform sampler2D YTexture;',
            'uniform sampler2D UTexture;',
            'uniform sampler2D VTexture;',
            'const mat4 YUV2RGB = mat4',
            '(',
            ' 1.1643828125, 0, 1.59602734375, -.87078515625,',
            ' 1.1643828125, -.39176171875, -.81296875, .52959375,',
            ' 1.1643828125, 2.017234375, 0, -1.081390625,',
            ' 0, 0, 0, 1',
            ');',
            'void main(void) {',
            ' gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;',
            '}'
        ].join('\n');

        const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
        gl.shaderSource(fragmentShader, fragmentShaderSource);
        gl.compileShader(fragmentShader);
        gl.attachShader(program, vertexShader);
        gl.attachShader(program, fragmentShader);
        gl.linkProgram(program);
        gl.useProgram(program);
        if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
            console.log('[ER] Shader link failed.');在这里插入图片描述
        }
        const vertexPositionAttribute = gl.getAttribLocation(program, 'aVertexPosition');
        gl.enableVertexAttribArray(vertexPositionAttribute);
        const textureCoordAttribute = gl.getAttribLocation(program, 'aTextureCoord');
        gl.enableVertexAttribArray(textureCoordAttribute);

        const verticesBuffer = gl.createBuffer();
        gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer);
        gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, -1.0, 0.0]), gl.STATIC_DRAW);
        gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
        const texCoordBuffer = gl.createBuffer();
        gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
        gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0]), gl.STATIC_DRAW);
        gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);

        // const tempGL = this.gl as any;
        // tempGL.y = new WebGLTexture(gl);
        // tempGL.u = new WebGLTexture(gl);
        // tempGL.v = new WebGLTexture(gl);
        // tempGL.y.bind(0, program, 'YTexture');
        // tempGL.u.bind(1, program, 'UTexture');
        // tempGL.v.bind(2, program, 'VTexture');

        this.webglY = new WebGLTexture(gl);
        this.webglU = new WebGLTexture(gl);
        this.webglV = new WebGLTexture(gl);
        this.webglY.bind(0, program, 'YTexture');
        this.webglY.bind(1, program, 'UTexture');
        this.webglY.bind(2, program, 'VTexture');
    }
    /**
     * 全画布渲染YUV数据
     * @param videoFrame YUV420P数据 Uint8Array
     * @param width 宽度
     * @param height 高度
     * @param uOffset u数据偏移量
     * @param vOffset v数据偏移量
     */
    public renderFrame(videoFrame: Uint8Array, width: number, height: number, uOffset: number, vOffset: number) {
        if (!this.gl) {
            console.log('[ER] Render frame failed due to WebGL not supported.');
            return;
        }
        const gl = this.gl;
        gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
        gl.clearColor(0.0, 0.0, 0.0, 0.0);
        gl.clear(gl.COLOR_BUFFER_BIT);

        this.webglY.fill(width, height, videoFrame.subarray(0, uOffset));
        this.webglU.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset, uOffset + vOffset));
        this.webglV.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset + vOffset, videoFrame.length));

        gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
    }

    /**
     * 等比例渲染YUV数据
     * @param videoFrame YUV420P数据 Uint8Array
     * @param width 宽度
     * @param height 高度
     * @param uOffset u数据偏移量
     * @param vOffset v数据偏移量
     */
    public renderSrcFrame(videoFrame: Uint8Array, width: number, height: number, uOffset: number, vOffset: number) {
        if (!this.gl) {
            console.log('[ER] Render frame failed due to WebGL not supported.');
            return;
        }

        const gl = this.gl;

        // 刷纯黑背景(RBB自己改颜色也可)
        gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
        gl.clearColor(0.0, 0.0, 0.0, 1);
        gl.clear(gl.COLOR_BUFFER_BIT);

        // 刷透明背景
        // gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
        // gl.clearColor(0.0, 0.0, 0.0, 0.0);
        // gl.clear(gl.COLOR_BUFFER_BIT);


        let tempX = 0;
        let tempY = 0;
        let tempWidth = gl.canvas.width;
        let tempHeight = gl.canvas.height;
在这里插入图片描述
        if (width * 0.1 / height >= gl.canvas.width * 0.1 / gl.canvas.height) {
            tempHeight = gl.canvas.width * height / width;
            tempY = (gl.canvas.height - tempHeight) / 2;
        } else {
            tempWidth = gl.canvas.height * width / height;
            tempX = (gl.canvas.width - tempWidth) / 2;
        }
        gl.viewport(tempX, tempY, tempWidth, tempHeight);

        this.webglY.fill(width, height, videoFrame.subarray(0, uOffset));
        this.webglU.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset, uOffset + vOffset));
        this.webglV.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset + vOffset, videoFrame.length));

        gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
    }

    public fullscreen() {
        const canvas = this.canvas as any;
        if (canvas.RequestFullScreen) {
            canvas.RequestFullScreen();
        } else if (canvas.webkitRequestFullScreen) {
            canvas.webkitRequestFullScreen();
        } else if (canvas.mozRequestFullScreen) {
            canvas.mozRequestFullScreen();
        } else if (canvas.msRequestFullscreen) {,算了只是测试,不伤大雅。
            canvas.msRequestFullscreen();
        } else {
            alert('This browser doesn\'t supporter fullscreen');
        }
    }

    public exitfullscreen() {
        const tempDocument = document as any;
        if (tempDocument.exitFullscreen) {
            tempDocument.exitFullscreen();
        } else if (tempDocument.webkitExitFullscreen) {
            tempDocument.webkitExitFullscreen();
        } else if (tempDocument.mozCancelFullScreen) {
            tempDocument.mozCancelFullScreen();
        } else if (tempDocument.msExitFullscreen) {
            tempDocument.msExitFullscreen();
        } else {
            alert('Exit fullscreen doesn\'t work');
        }
    }

}

重写时顺便做了写注释,加了等比例刷屏等功能,有不理解的私信我的微信。

casvas渲染视频图像

引入webgl-player

import { WebGLPlayer } from './webgl-player';

增加成员变量

private webglPlayer?: WebGLPlayer;
@ViewChild('playCanvas', { static: true }) playCanvas: ElementRef<HTMLCanvasElement>;

AfterViewInit初始化webglPlayer

ngAfterViewInit(): void {
    console.log(this.playCanvas);
    if (this.playCanvas) {
      this.webglPlayer = new WebGLPlayer(
        this.playCanvas.nativeElement, {}
      );
      console.log(this.webglPlayer);
    }else{
      console.error('canvas error');
    }
  }

增加渲染方法,(使用了我个人改写的原比例播放)

private displayVideoFrame(obj: { data: Uint8Array, width: number, height: number }): void {
    const data = new Uint8Array(obj.data);
    const width = obj.width;
    const height = obj.height;
    const yLength = width * height;
    const uvLength = (width / 2) * (height / 2);
    this.webglPlayer.renderSrcFrame(data, width, height, yLength, uvLength);
  }

修改wasm数据回调函数,处理数据

const callback = this.wasm.addFunction((
      addrY: number,
      addrU: number,
      addrV: number,
      strideY: number,
      strideU: number,
      strideV: number,
      width: number,
      height: number,
      pts: number
    ) => {
      console.log('[%d]In video callback, size = %d * %d, pts = %d', ++this.videoSize, width, height, pts);
      const size = width * height + (width / 2) * (height / 2) + (width / 2) * (height / 2);
      const data = new Uint8Array(size);
      let pos = 0;
      for (let i = 0; i < height; i++) {
        const src = addrY + i * strideY;
        const tmp: ArrayBuffer = this.wasm.HEAPU8.subarray(src, src + width);
        const u8Tmp = new Uint8Array(tmp);
        data.set(u8Tmp, pos);
        pos += u8Tmp.length;
      }在这里插入图片描述
      for (let i = 0; i < height / 2; i++) {
        const src = addrU + i * strideU;
        const tmp = this.wasm.HEAPU8.subarray(src, src + width / 2);
        const u8Tmp = new Uint8Array(tmp);
        data.set(u8Tmp, pos);
        pos += u8Tmp.length;
      }
      for (let i = 0; i < height / 2; i++) {
        const src = addrV + i * strideV;
        const tmp = this.wasm.HEAPU8.subarray(src, src + width / 2);
        const u8Tmp = new Uint8Array(tmp);
        data.set(u8Tmp, pos);
        pos += u8Tmp.length;
      }
      const obj = {
        data,
        width,
        height
      };
      this.displayVideoFrame(obj);
    });

测试结果

基于webassembly的H265视频播放器前端Angular学习笔记5:使用WebGL在canvas上渲染H256解码数据_第1张图片
H265数据已经被解码展示出来,仓库decoder_wasm也没有做时间戳处理,我这里也就先做处理。
下一篇我会演示吧解码放到worker中进行解码,这样在angular框架下就可以实现多屏播放。

微信号:yjkhtddx

你可能感兴趣的:(流媒体)