如何解决如何在 WebGL 中将 HTML5 播放器中的视频绘制和投影到 3D 平面上?
我正在使用 Unity 将我的应用程序构建到 WebGL,我发现自己需要从 HTML 播放器中抓取视频并将其绘制在 3D 空间中的平面上。
我知道您可以在 CanvasRenderingContext2D
上调用 drawImage()
并传入对视频播放器的引用,当函数运行时,当前帧将被绘制到画布上。
我能够为该函数找到的最接近的 3D 等效项是 WebGL2RenderingContext.texImage3D()
。但是,我并不完全理解它是如何工作的,当我尝试对其进行测试时,我收到以下异常:Uncaught DOMException: The operation is insecure.
我使用了自己的本地视频文件,因此它不能是 CORS,但我没有不知道是什么。
您可以在 this GitHub repo 中看到测试项目。
我发现了一个类似的问题 here,但不幸的是,答案显示了如何绘制我认为是预加载的纹理。我不知道如何从视频播放器中抓取并传递它们,或者这是否足够快以完成每一帧。
为了提供一些上下文,我正在尝试在我的 Unity/WebGL 应用程序中显示 HLS 实时流。我可以下载 .ts
(MPEG-2 传输流)视频片段并将它们排入连贯的视频流中,但 Unity 的内置视频播放器不支持这种格式。
作为一种解决方案,我想我可以在 HTML5 播放器中抓取视频(如有必要,使用 hls.js)并将纹理注入 WebGL 应用程序,使用 JavaScript,每一帧。
Unity 允许您从其 C# 脚本内部运行 JavaScript 代码,因此时间可能不会成为问题,也不会获得目标平面的世界比例/位置。我只需要编写 JavaScript 函数来以某种方式绘制纹理。
<!DOCTYPE html>
<html lang="en">
<head>
<Meta charset="UTF-8">
<Meta http-equiv="X-UA-Compatible" content="IE=edge">
<Meta name="viewport" content="width=device-width,initial-scale=1.0">
<title>WebGL</title>
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
<style>
body {
background-color: aquamarine;
}
</style>
</head>
<body>
<video muted autoplay width="480" height="270">
<source src="./test.mp4" type="video/mp4" />
</video>
<br>
<canvas width="1080" height="720"></canvas>
<button onclick="takeScreenshot()">Capture</button>
<script>
function takeScreenshot() {
var video = document.querySelector("video");
var canvas = document.querySelector("canvas");
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D,// target (enum)
0,// level of detail
gl.RGBA,// internalFormat
1920,// width of texture
1080,// height of texture
1,// depth
0,// border
gl.RGBA,// format
gl.UNSIGNED_BYTE,// type
video,// source
);
}
</script>
</body>
</html>
解决方法
这是设置可以接收视频像素的 webGL 对象的示例代码。希望对你有用。
基本上,您使用两个三角形创建一个长方体/矩形形状,然后将视频投影到该矩形上。
0-------1
| |
3-------2
//# two sets of... connected 3-points of a triangle
var vertexIndices = [ 0,1,2,3,];
下面的示例代码还创建了一些必需的 GPU 着色器和程序。尝试一下
如果任何编码人员只想做 GPU 像素效果,那么在 Fragment 着色器中编写您的效果代码。
(参见代码部分://# example of basic colour effect
)。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<title>WebGL</title>
<!--
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
-->
<style> body {background-color: aquamarine; } </style>
</head>
<body>
<video id="myVideo" controls muted autoplay width="480" height="270">
<source src="video.mp4" type="video/mp4" />
</video>
<br>
<button id="capture" onclick="takeScreenshot()"> Capture </button>
<br><br>
<!--
<canvas id="myCanvas" width="1080" height="720"></canvas>
-->
<canvas id="myCanvas" width="480" height="270"></canvas>
<!-- ########## Shader code ###### -->
<!-- ### Shader code here -->
<!-- Fragment shader program -->
<script id="shader-fs" type="x-shader/x-fragment">
//<!-- //## code for pixel effects goes here if needed -->
//# these two vars will access
varying mediump vec2 vDirection;
uniform sampler2D uSampler;
void main(void)
{
//# get current video pixel's color (no FOR-loops needed like in JS Canvas)
gl_FragColor = texture2D(uSampler,vec2(vDirection.x * 0.5 + 0.5,vDirection.y * 0.5 + 0.5));
/*
//# example of basic colour effect
gl_FragColor.r = ( gl_FragColor.r * 1.15 );
gl_FragColor.g = ( gl_FragColor.g * 0.8 );
gl_FragColor.b = ( gl_FragColor.b * 0.45 );
*/
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute mediump vec2 aVertexPosition;
varying mediump vec2 vDirection;
void main( void )
{
gl_Position = vec4(aVertexPosition,1.0,1.0) * 2.0;
vDirection = aVertexPosition;
}
</script>
<!-- ### END Shader code... -->
<script>
//# WebGL setup
var video = document.getElementById('myVideo');
const glcanvas = document.getElementById('myCanvas');
const gl = ( ( glcanvas.getContext("webgl") ) || ( glcanvas.getContext("experimental-webgl") ) );
//# check if WebGL is available..
if (gl && gl instanceof WebGLRenderingContext) { console.log( "WebGL is available"); }
else { console.log( "WebGL is NOT available" ); } //# use regular JS canvas functions if this happens...
//# create and attach the shader program to the webGL context
var attributes,uniforms,program;
function attachShader( params )
{
fragmentShader = getShaderByName(params.fragmentShaderName);
vertexShader = getShaderByName(params.vertexShaderName);
program = gl.createProgram();
gl.attachShader(program,vertexShader);
gl.attachShader(program,fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program,gl.LINK_STATUS))
{ alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(program)); }
gl.useProgram(program);
// get the location of attributes and uniforms
attributes = {};
for (var i = 0; i < params.attributes.length; i++)
{
var attributeName = params.attributes[i];
attributes[attributeName] = gl.getAttribLocation(program,attributeName);
gl.enableVertexAttribArray(attributes[attributeName]);
}
uniforms = {};
for (i = 0; i < params.uniforms.length; i++)
{
var uniformName = params.uniforms[i];
uniforms[uniformName] = gl.getUniformLocation(program,uniformName);
gl.enableVertexAttribArray(attributes[uniformName]);
}
}
function getShaderByName( id )
{
var shaderScript = document.getElementById(id);
var theSource = "";
var currentChild = shaderScript.firstChild;
while(currentChild)
{
if (currentChild.nodeType === 3) { theSource += currentChild.textContent; }
currentChild = currentChild.nextSibling;
}
var result;
if (shaderScript.type === "x-shader/x-fragment")
{ result = gl.createShader(gl.FRAGMENT_SHADER); }
else { result = gl.createShader(gl.VERTEX_SHADER); }
gl.shaderSource(result,theSource);
gl.compileShader(result);
if (!gl.getShaderParameter(result,gl.COMPILE_STATUS))
{
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(result));
return null;
}
return result;
}
//# attach shader
attachShader({
fragmentShaderName: 'shader-fs',vertexShaderName: 'shader-vs',attributes: ['aVertexPosition'],uniforms: ['someVal','uSampler'],});
// some webGL initialization
gl.clearColor(0.0,0.0,0.0);
gl.clearDepth(1.0);
gl.disable(gl.DEPTH_TEST);
positionsBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,positionsBuffer);
var positions = [
-1.0,-1.0,];
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(positions),gl.STATIC_DRAW);
var vertexColors = [0xff00ff88,0xffffffff];
var cBuffer = gl.createBuffer();
verticesIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER,verticesIndexBuffer);
var vertexIndices = [ 0,];
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,new Uint16Array(vertexIndices),gl.STATIC_DRAW
);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,texture);
//# must be LINEAR to avoid subtle pixelation (double-check this... test other options like NEAREST)
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MAG_FILTER,gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_S,gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_T,gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D,null);
// update the texture from the video
updateTexture = function()
{
gl.bindTexture(gl.TEXTURE_2D,texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL,true);
//# next line fails in Safari if input video is NOT from same domain/server as this html code
gl.texImage2D(gl.TEXTURE_2D,gl.RGB,gl.UNSIGNED_BYTE,video);
gl.bindTexture(gl.TEXTURE_2D,null);
};
</script>
<script>
//# Vars for video frame grabbing when system/browser provides a new frame
var requestAnimationFrame = (window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame);
var cancelAnimationFrame = (window.cancelAnimationFrame || window.mozCancelAnimationFrame);
///////////////////////////////////////////////
function takeScreenshot( )
{
//# video is ready (can display pixels)
if( video.readyState >= 3 )
{
updateTexture(); //# update pixels with current video frame's pixels...
gl.useProgram(program); //# apply our program
gl.bindBuffer(gl.ARRAY_BUFFER,positionsBuffer);
gl.vertexAttribPointer(attributes['aVertexPosition'],gl.FLOAT,false,0);
//# Specify the texture to map onto the faces.
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D,texture);
gl.uniform1i(uniforms['uSampler'],0);
//# Draw GPU
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER,verticesIndexBuffer);
gl.drawElements(gl.TRIANGLES,6,gl.UNSIGNED_SHORT,0);
}
//# re-capture the next frame... basically the function loops itself
//# consider adding event listener for video pause to set value as... cancelAnimationFrame( takeScreenshot );
requestAnimationFrame( takeScreenshot );
}
//////////////////////////////////////
function takeScreenshot_old()
{
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D,// target (enum)
0,// level of detail
gl.RGBA,// internalFormat
1920,// width of texture
1080,// height of texture
1,// depth
0,// border
gl.RGBA,// format
gl.UNSIGNED_BYTE,// type
video,// source
);
}
</script>
</body>
</html>
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。