基于WebAssembly无插件解码H264/H265码流播放器

国服卤鸡蛋 2024-09-09 13:03:01 阅读 53

基于WebAssembly无插件解码H264/H265码流播放器

之前看到一篇文章:web无插件解码播放H264/H265(WebAssembly解码HTML5播放)

H.265/HEVC在Web视频播放的实践

这里写目录标题

一.emsdk工具的安装二.emsdk编译ffmpeg三.ffmpeg解码库四.执行Makefile.sh脚本,生成ffmpeg.js ffmpeg.wasm解码库五.实现websocket协议服务器,推送码流六.web端实现七.结果

按照文章思路,已经复现了web端无插件解码H265码流。首先说明下我这边的环境,框架。

在海思主板上移植了web服务器nginx,用于pc端请求网页资源,然后用了两个线程,一个线程海思编码,一个线程websocket协议服务器推送H265码流到web端,web端收到了码流数据,调用ffmpeg编译好的js库软解码H265数据,webgl渲染yuv数据。

在这里插入图片描述

一.emsdk工具的安装

https://www.cnblogs.com/tonghaolang/p/9253719.html Windows下安装enscripten

Windows 和Linux 安装 emsdk

https://blog.csdn.net/qq_34754747/article/details/103815349?depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-11&utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-11

安装emsdk工具

wget https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz

二.emsdk编译ffmpeg

下载ffmpeg源码,用下面命令在linux平台编译得到ffmpeg静态库文件。

<code> emconfigure ./configure --cc="emcc" --prefix=/opt/tar-download/emccffmpeg \code>

--enable-cross-compile --target-os=none --arch=x86_32 --cpu=generic --disable-ffplay --disable-ffprobe \

--disable-asm --disable-doc --disable-devices --disable-pthreads --disable-w32threads --disable-network \

--disable-hwaccels --disable-parsers --disable-bsfs --disable-debug --disable-protocols --disable-indevs \

--disable-outdevs --enable-protocol=file --enable-decoder=hevc --enable-decoder=h264

三.ffmpeg解码库

ffmpeg.h

#ifndef _FFMPEG_H_

#define _FFMPEG_H_

#include <stdio.h>

#include <stdlib.h>

#include <string.h>

#include <unistd.h>

#include <sys/stat.h>

#include <sys/types.h>

#include <fcntl.h>

#include "libswscale/swscale.h"

#include "libavutil/opt.h"

#include "libavutil/imgutils.h"

#include "libavcodec/avcodec.h"

#define QUEUE_SIZE20

//图片数据

typedef struct _imgdata{

int width;

int height;

uint8_t* data;

}Imgdata;

//解码API

extern int init_decoder(int codeid);//初始化解码器

extern Imgdata* decoder_raw(char* framedata, int framesize );//解码

extern uint8_t* process(AVFrame* frame, uint8_t* buff);//yuvdata 保存在buff中

extern void free_decoder();//关闭解码器

#endif

ffmpeg.c

#include "ffmpeg.h"

AVCodec* codec; //记录编码详细信息

AVCodecParserContext* parser = NULL;

AVCodecContext* c = NULL; //记录码流编码信息

AVPacket* pkt = NULL;

AVFrame* frame = NULL; //存储视频中的帧

Imgdata *imgbuff = NULL;

static int codeId = 0;

/**********************************************************

*功能: 初始化解码器

*参数: codeid---> 1=打开h.265解码器

* 0=打开h.264解码器

*

* 返回值:0=成功初始化

* -1=初始化失败

*

* *******************************************************/

int init_decoder(int codeid)

{

codeId = codeid;

switch(codeid)

{

case 0:{

codec = avcodec_find_decoder(AV_CODEC_ID_H264);

break;

}

case 1:{

codec = avcodec_find_decoder(AV_CODEC_ID_H265);

break;

}

default:{

printf("In C language no choose h.264/h.265 decoder\n");

return -1;

}

}

if(NULL == codec)

{

printf("codec find decoder failed\n");

return -1;

}

parser = av_parser_init(codec->id);

if(NULL == parser)

{

printf("can't jsparser init!\n");

return -1;

}

c = avcodec_alloc_context3(codec);

if(NULL == c)

{

printf("can't allocate video jscodec context!\n");

return -1;

}

if(avcodec_open2(c, codec, NULL) < 0)

{

printf("can't open codec\n");

return -1;

}

frame = av_frame_alloc();

if(NULL == frame)

{

printf("can't allocate video frame\n");

return -1;

}

pkt = av_packet_alloc();

if(NULL == pkt)

{

printf("can't allocate video packet\n");

return -1;

}

imgbuff = (Imgdata*)malloc(sizeof(Imgdata));

if(NULL == imgbuff)

{

printf("malloc imgbuff failed\n");

return -1;

}

printf("In C language init decoder success!\n");

return 0;

}

/**********************************************************

*功能: 解码,将h.265/h.264裸流解码为yuv420p数据

* 参数: framedata---->裸流数据

* framesize---->裸流长度

*

* 返回值:Imgdata的结构体 Imgdata->width 视频宽度

* Imgdata->height 视频高度

* Imgdata->data yuv420p数据

*

* *******************************************************/

Imgdata* decoder_raw(char* framedata, int framesize )

{

if(NULL == framedata || framesize <= 0)

{

printf("In C language framedata is null or framesize is zero\n");

return NULL;

}

/*****************************************************

*过滤,第一次检测首帧是否是VPS/SPS,若不是跳过不解码

第一次检测到首帧是VPS/SPS,后面不用检测,flag=0 --> flag=1

*

* **************************************************/

int nut = 0;

static int flag = 0;

if(codeId == 0)//裸流h.264

{

nut = (char)framedata[4]& 0x1f;

while(!flag)

{

if( 7==nut ){ //PPS

flag = 1;

break;

}

else if( 8==nut )//PPS

{

return NULL;

}

else if( 5==nut )//I

{

return NULL;

}

else if( 1==nut )//P

{

return NULL;

}

else if( 9==nut )//AUD

{

return NULL;

}

else if( 6==nut )//SEI

{

return NULL;

}

else//Unknown

{

return NULL;

}

}

}

else

{

//h.265裸流

nut = (char)(framedata[4]>>1) &0x3f;

while(!flag)

{

if(nut == 32)//VPS

{

flag = 1;

break;

}

else if(nut == 33)//SPS

{

return NULL;

}

else if(nut == 34)//PPS

{

return NULL;

}

else if(nut == 19)//I帧

{

return NULL;

}

else if(nut == 1)//P帧

{

return NULL;

}

else if(nut == 35)//AUD

{

return NULL;

}

else if(nut == 39)//SEI

{

return NULL;

}

else //Unknown

{

return NULL;

}

}

}

//开始解码

int ret = 0;

int size = 0;

while(framesize > 0)

{

size = av_parser_parse2(parser, c, &pkt->data, &pkt->size, (const uint8_t *)framedata, framesize,

AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

if(size < 0)

{

printf("Error while parsing\n");

return NULL;

}

framedata += size;

framesize -= size;

if(pkt->size)

{

ret = avcodec_send_packet(c, pkt);

if (ret < 0)

{

printf("In C language error sending a packet for decoding\n");

return NULL;

}

else

{

while(ret >=0 )

{

ret = avcodec_receive_frame(c, frame);

if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)

{

break;

}

else if(ret < 0)

{

printf("Error during decoding\n");

return NULL;

}

//yuv数据处理,保存在buff中,在网页中调用yuv420p数据交给webgl渲染

int yuvBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);

uint8_t *buff = (uint8_t*)malloc(yuvBytes);

if(buff == NULL)

{

printf("malloc yuvBytes failed!\n");

return NULL;

}

imgbuff->width = frame->width;

imgbuff->height = frame->height;

imgbuff->data = process(frame, buff);

free(buff);

}

}

}

}

return imgbuff;

}

/******************************************************

*功能: 将一帧yuvdata 保存为yuv buff

* 参数: frame---->yuvframe

* buff----->保存数据buff

*

* 返回值: buff yuv420p数据

* ***************************************************/

uint8_t* process(AVFrame* frame, uint8_t* buff)

{

int i, j, k;

for(i=0; i<frame->height; i++)

{

memcpy(buff + frame->width*i, frame->data[0]+frame->linesize[0]*i, frame->width);

}

for(j=0; j<frame->height/2; j++)

{

memcpy(buff + frame->width*i + frame->width/2*j, frame->data[1] + frame->linesize[1]*j, frame->width/2);

}

for(k=0; k<frame->height/2; k++)

{

memcpy(buff + frame->width*i +frame->width/2*j + frame->width/2*k, frame->data[2]+frame->linesize[2]*k, frame->width/2);

}

return buff;

}

/*****************************************************

*功能:关闭解码器,释放内存

* 参数:

*

* 返回值:无

* **************************************************/

void free_decoder()

{

av_parser_close(parser);

avcodec_free_context(&c);

av_frame_free(&frame);

av_packet_free(&pkt);

if(imgbuff !=NULL)

{

if(imgbuff->data != NULL)

{

free(imgbuff->data);

imgbuff->data = NULL;

}

free(imgbuff);

imgbuff = NULL;

}

printf("In C language free all requested memory! \n");

}

test.c

#include <stdio.h>

#include "ffmpeg.h"

int main(int argc, const char **argv)

{

return 0;

}

四.执行Makefile.sh脚本,生成ffmpeg.js ffmpeg.wasm解码库

Makefile.sh

echo "start building"

export TOTAL_MEMORY=67108864

export EXPORTED_FUNCTIONS="[ \

'_init_decoder', \

'_decoder_raw', \

'_free_decoder', \

'_main'

]"code>

emcc test.c ffmpeg.c ../lib/libavcodec.a ../lib/libavutil.a ../lib/libswscale.a \

-O2 \

-I "../inc" \

-s WASM=1 \

-s USE_PTHREADS=1 \

-s TOTAL_MEMORY=${ -- -->TOTAL_MEMORY} \

-s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" \code>

-s EXTRA_EXPORTED_RUNTIME_METHODS="['addFunction']" \code>

-s RESERVED_FUNCTION_POINTERS=14 \

-s FORCE_FILESYSTEM=1 \

-o ../js/libffmpeg.js

echo "finish building"

五.实现websocket协议服务器,推送码流

参考文章:WebSocket的C++服务器端实现

WebSocket协议解析

/*******************************************************************************

* 名称: server_thread_fun

* 功能: 线程管理websocket服务器

* 形参: arg: WebSocket_server 结构体

*

*

* 返回: 无

* 说明: 无

******************************************************************************/

void server_thread_fun(void *arg)

{ -- -->

int ret, j;

int cfd;

char ip[16] = "";

struct sockaddr_in serverAddr;

struct sockaddr_in clientAddr;

socklen_t client_len = sizeof(clientAddr);

WebSocket_server *wss = (WebSocket_server *)arg;

memset(&serverAddr, 0, sizeof(serverAddr));

serverAddr.sin_family = AF_INET;

serverAddr.sin_port = htons(wss->port);

serverAddr.sin_addr.s_addr = INADDR_ANY;

wss->fd = socket(AF_INET, SOCK_STREAM, 0);

if(wss->fd < 0)

{

printf("server can't create socket\n");

exit(1);

}

//设置端口复用,绑定监听

int opt = 1;

setsockopt(wss->fd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));

bind(wss->fd, (struct sockaddr*)&serverAddr, sizeof(serverAddr));

listen(wss->fd, 128);

//创建一个句柄

int epfd = epoll_create(1);

if(epfd < 0)

{

perror("");

exit(1);

}

struct epoll_event ev, evs[1024];

ev.data.fd = wss->fd;

ev.events = EPOLLIN | EPOLLET;

epoll_ctl(epfd, EPOLL_CTL_ADD, wss->fd, &ev);

while(1)

{

int n = epoll_wait(epfd, evs, 1024, -1); //epoll监听

if( n < 0)

{

printf("server start epoll_wait failed\n");

exit(1);

}

for(j=0; j<n; j++)

{

if((evs[j].events & EPOLLERR) || (evs[j].events & EPOLLHUP)) //epoll连接错误

{

//printf("epoll error accept close:%d \n",evs[j].data.fd);

ev.data.fd = evs[j].data.fd;

if(epoll_ctl(epfd, EPOLL_CTL_DEL, evs[j].data.fd,&ev) < 0)

{

printf("server epoll_ctl:EPOLL_CTL_DEL failed\n");

close(epfd);

exit(1);

}

arrayRemoveItem(wss->client_fd_array, EPOLL_RESPOND_NUM, evs[j].data.fd);//client close 从数组中移除fd

close(evs[j].data.fd);

}

else if(evs[j].data.fd == wss->fd) //接入客户端

{

cfd = accept(wss->fd, (struct sockaddr *)&clientAddr, &client_len); //提取新的连接

if(cfd >=0 )

{

ev.data.fd = cfd;

epoll_ctl(epfd, EPOLL_CTL_ADD, cfd, &ev);

}

//printf("websocketV2 client ip=%s port=%d fd=%d\n",inet_ntop(AF_INET, &clientAddr.sin_addr.s_addr, ip, 16),ntohs(clientAddr.sin_port),cfd);

}

else if(evs[j].events & EPOLLIN)//接收数据事件

{

memset(wss->buf, 0, sizeof(wss->buf));

ret = wss->callback(wss, evs[j].data.fd, wss->buf, sizeof(wss->buf));//回调函数

if(ret <= 0) //ret<0时,检查异常,决定是否主动解除连接

{

if( errno >0 && (errno == EAGAIN || errno == EINTR))

;

else

{

//printf("accept close=%d errno =%d\n",evs[j].data.fd,errno);

ev.data.fd = evs[j].data.fd;

if(epoll_ctl(epfd, EPOLL_CTL_DEL, evs[j].data.fd, &ev) < 0)

{

printf("server epoll_ctl:EPOLL_CTL_DEL failed\n");

close(epfd);

exit(1);

}

arrayRemoveItem(wss->client_fd_array, EPOLL_RESPOND_NUM, evs[j].data.fd);//client close 从数组中移除fd

close(evs[j].data.fd);

}

}

}

else if(evs[j].events & EPOLLOUT) //发送数据事件

;

}

}

close(epfd); //关闭句柄

close(wss->fd);

}

/*******************************************************************************

* 名称: server_thread_fun

* 功能: 回调函数

* 形参: wss:WebSocket_server结构体

* fd:句柄

* buf:发送数据buf

* buflen: 数据大小

*

* 返回: 无

* 说明: 无

******************************************************************************/

int ret1=0;

int server_callback(WebSocket_server *wss, int fd, char *buf, unsigned int buflen)

{

ret1 = webSocket_recv(fd, buf, buflen, NULL); //接收客户端发过来的数据

if(ret1 > 0)

{

#if 0

printf(" client->server recv msg fd=%d mgs=%s\n", fd, buf);

if(strstr(buf, "hi~") != NULL)

ret1 = webSocket_send(fd, "I am server", strlen("I am server"), false, WDT_TXTDATA);

#endif

}

else if( ret1 < 0)

{

if(strncmp(buf, "GET", 3) == 0)

{

ret1 = webSocket_serverLinkToClient(fd, buf, ret1); //服务器连接客户端

if(ret1 > 0)

{

arrayAddItem(wss->client_fd_array,EPOLL_RESPOND_NUM, fd );//连接成功后将fd添加到数组中

}

}

}

return ret1;

}

/*******************************************************************************

* 名称: arrayAddItem

* 功能: 添加cfd到数组

* 形参: array:二维数组

* arraySize: 句柄数量

* value: cfd

* 返回:

* 说明: 无

******************************************************************************/

int arrayAddItem(int array[][2], int arraySize, int value)

{

int i;

for(i=0; i<arraySize; i++)

{

if(array[i][1] ==0 )

{

array[i][0] = value;

array[i][1] = 1;

return 0;

}

}

return -1;

}

/*******************************************************************************

* 名称: arrayRemoveItem

* 功能: 从数组中删除cfd

* 形参: array:二维数组

* arraySize: 句柄数量

* value: cfd

* 返回:

* 说明: 无

******************************************************************************/

int arrayRemoveItem(int array[][2], int arraySize, int value)

{

int i = 0;

for(i=0; i<arraySize; i++)

{

if(array[i][0] == value)

{

array[i][0] = 0;

array[i][1] = 0;

return 0;

}

}

return -1;

}

六.web端实现

webgl.js

function Texture(gl) {

this.gl = gl;

this.texture = gl.createTexture();

gl.bindTexture(gl.TEXTURE_2D, this.texture);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

}

Texture.prototype.bind = function (n, program, name) {

var gl = this.gl;

gl.activeTexture([gl.TEXTURE0, gl.TEXTURE1, gl.TEXTURE2][n]);

gl.bindTexture(gl.TEXTURE_2D, this.texture);

gl.uniform1i(gl.getUniformLocation(program, name), n);

};

Texture.prototype.fill = function (width, height, data) {

var gl = this.gl;

gl.bindTexture(gl.TEXTURE_2D, this.texture);

gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, width, height, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);

};

function WebGLPlayer(canvas, options) {

this.canvas = canvas;

this.gl = canvas.getContext("webgl") || canvas.getContext("experimental-webgl");

this.initGL(options);

}

WebGLPlayer.prototype.initGL = function (options) {

if (!this.gl) {

console.log("[ER] WebGL not supported.");

return;

}

var gl = this.gl;

gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);

var program = gl.createProgram();

var vertexShaderSource = [

"attribute highp vec4 aVertexPosition;",

"attribute vec2 aTextureCoord;",

"varying highp vec2 vTextureCoord;",

"void main(void) {",

" gl_Position = aVertexPosition;",

" vTextureCoord = aTextureCoord;",

"}"

].join("\n");

var vertexShader = gl.createShader(gl.VERTEX_SHADER);

gl.shaderSource(vertexShader, vertexShaderSource);

gl.compileShader(vertexShader);

var fragmentShaderSource = [

"precision highp float;",

"varying lowp vec2 vTextureCoord;",

"uniform sampler2D YTexture;",

"uniform sampler2D UTexture;",

"uniform sampler2D VTexture;",

"const mat4 YUV2RGB = mat4",

"(",

" 1.1643828125, 0, 1.59602734375, -.87078515625,",

" 1.1643828125, -.39176171875, -.81296875, .52959375,",

" 1.1643828125, 2.017234375, 0, -1.081390625,",

" 0, 0, 0, 1",

");",

"void main(void) {",

" gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;",

"}"

].join("\n");

var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);

gl.shaderSource(fragmentShader, fragmentShaderSource);

gl.compileShader(fragmentShader);

gl.attachShader(program, vertexShader);

gl.attachShader(program, fragmentShader);

gl.linkProgram(program);

gl.useProgram(program);

if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {

console.log("[ER] Shader link failed.");

}

var vertexPositionAttribute = gl.getAttribLocation(program, "aVertexPosition");

gl.enableVertexAttribArray(vertexPositionAttribute);

var textureCoordAttribute = gl.getAttribLocation(program, "aTextureCoord");

gl.enableVertexAttribArray(textureCoordAttribute);

var verticesBuffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, verticesBuffer);

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, -1.0, -1.0, 0.0]), gl.STATIC_DRAW);

gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);

var texCoordBuffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0]), gl.STATIC_DRAW);

gl.vertexAttribPointer(textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);

gl.y = new Texture(gl);

gl.u = new Texture(gl);

gl.v = new Texture(gl);

gl.y.bind(0, program, "YTexture");

gl.u.bind(1, program, "UTexture");

gl.v.bind(2, program, "VTexture");

}

WebGLPlayer.prototype.renderFrame = function (videoFrame, width, height, uOffset, vOffset) {

if (!this.gl) {

console.log("[ER] Render frame failed due to WebGL not supported.");

return;

}

var gl = this.gl;

gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

gl.clearColor(0.0, 0.0, 0.0, 0.0);

gl.clear(gl.COLOR_BUFFER_BIT);

gl.y.fill(width, height, videoFrame.subarray(0, uOffset));

gl.u.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset, uOffset + vOffset));

gl.v.fill(width >> 1, height >> 1, videoFrame.subarray(uOffset + vOffset, videoFrame.length));

gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

};

WebGLPlayer.prototype.fullscreen = function () {

var canvas = this.canvas;

if (canvas.RequestFullScreen) {

canvas.RequestFullScreen();

} else if (canvas.webkitRequestFullScreen) {

canvas.webkitRequestFullScreen();

} else if (canvas.mozRequestFullScreen) {

canvas.mozRequestFullScreen();

} else if (canvas.msRequestFullscreen) {

canvas.msRequestFullscreen();

} else {

alert("This browser doesn't supporter fullscreen");

}

};

WebGLPlayer.prototype.exitfullscreen = function (){

if (document.exitFullscreen) {

document.exitFullscreen();

} else if (document.webkitExitFullscreen) {

document.webkitExitFullscreen();

} else if (document.mozCancelFullScreen) {

document.mozCancelFullScreen();

} else if (document.msExitFullscreen) {

document.msExitFullscreen();

} else {

alert("Exit fullscreen doesn't work");

}

}

video.js //websocket接收端

var canvas;

var webglPlayer;

var width = 0;

var height = 0;

var socketurl = "ws://192.168.1.213:8000"; //websocket服务器ip,port

//codeid=1,选择解码H.265裸流

function playH265()

{

play(1);

}

//codeid=0,选择解码H.264裸流

function playH264()

{

play(0);

}

function play(codeid)

{

//创建websocket对象

var websocket = new WebSocket(socketurl);

websocket.binaryType = "arraybuffer";//设置客户端接收数据格式

//浏览器-->websocket服务器 发送数据建立连接

websocket.onopen = function()

{

//判断连接服务器状态

if(websocket.readyState == 0){

alert("browser connect websocket server failed,please reconnect");

return;

}

else if(websocket.readyState == 1){

console.log("In javascript language connect websocket server success!");

}

websocket.send("connect websocket server");//向服务器发送数据

//初始化解码器 1=h265 0=h264

var ret = Module._init_decoder(codeid);

if(ret != 0)

{

console.log("init decoder failed!");

return ;

}

};

//接收websocket服务器数据

let ptr;

websocket.onmessage = function(evt)

{

let buff = null;

let size = 0;

size = evt.data.byteLength;

buff = new Uint8Array(evt.data);//evt.data里包含裸流数据以ArrayBuffer对象形式存放

let buf = Module._malloc(size);//手机端不支持这样使用函数,webAssembly浏览器原因不兼容

Module.HEAPU8.set(buff, buf);//buf是裸流数据

ptr = Module._decoder_raw(buf, size);//ffmpeg解码

width = Module.HEAPU32[ptr/4];//Webgl渲染yuv420p数据

height = Module.HEAPU32[ptr/4 +1];

let imgbuffptr = Module.HEAPU32[ptr/4 +2];

let imgbuff = Module.HEAPU8.subarray(imgbuffptr, imgbuffptr + width*height*3/2);

showYuv420pdata(width, height, imgbuff);

if(buf!=null)//释放内存

{

Module._free(buf);

buf=null;

}

};

//websocket关闭连接

websocket.onclose = function()

{

//关闭解码器,释放内存

Module._free_decoder();

console.log("In javascript language websocket client close !");

};

//websocket通信发生错误连接

websocket.onerror = function()

{

alert("websocket 服务器未启动!");

return ;

}

}

/***************************************************************

webgl渲染yuv420p数据

**************************************************************/

function showYuv420pdata(width, height, data)

{

let ylenght = width*height;

let uvlenght = width*height/4;

if(!webglPlayer)

{

const canvasId = "canvas";

canvas = document.getElementById(canvasId);

webglPlayer = new WebGLPlayer(canvas, {

preserveDrawingBuffer:false

});

}

webglPlayer.renderFrame(data, width, height, ylenght,uvlenght);

}

/***************************************************************

webgl方法实现全屏功能

**************************************************************/

function fullscreen()

{

if(!webglPlayer)

{

alert("没有渲染webgl暂时不能全屏");

return;

}

webglPlayer.fullscreen();

}

七.结果

由于2020年6月份做的,当时忘记截图记录了,没有图片。说下这种方案实现效果,测试结果。

解码1080p 30fps的H265码流, 延迟300~450ms,

解码4k 30fps的H265码流,播放越久延迟越重,不行。适合1080p分辨率以下的。



声明

本文内容仅代表作者观点,或转载于其他网站,本站不以此文作为商业用途
如有涉及侵权,请联系本站进行删除
转载本站原创文章,请注明来源及作者。