我想获取我已经渲染的WebGLTexture对象并使用它来创建HTML图像元素.目标是显示屏幕外渲染过程的结果以进行调试.它应该比将纹理渲染到全屏四边形(我目前的调试方法)容易得多.
在WebGL中,从图像元素创建纹理非常简单:
var image = new Image();
image.src = "myImg.jpg";
// image loads...
var texture = gl.createTexture();
gl.bindTexture(texture);
gl.texImage2D(_gl.TEXTURE_2D, 0, _gl.RGBA, _gl.RGBA, _gl.UNSIGNED_BYTE, image);
Run Code Online (Sandbox Code Playgroud)
图像加载和解码完全由您完成.
是否有类似的简单方法来反向?即:
// This doesn't work
var img = new Image(texture);
// But maybe this could
var img = createImageFromTexture(texture);
function createImageFromTexture(texture) {
// ... some combination of tricks ...
}
Run Code Online (Sandbox Code Playgroud)
如果有办法做到这一点,我相信它将在调试之外的上下文中有用.我会继续看看我是否能找到办法,但我觉得有人必须先尝试过这个.
nkr*_*ron 19
您可以创建一个由纹理支持的帧缓冲区,然后使用gl.readPixels()从帧缓冲区中读取原始像素数据.获得数据像素后,可以使用ImageData将它们复制到2D画布.然后,您可以通过将image的src属性设置为canvas.toDataURL()来构造Image .
function createImageFromTexture(gl, texture, width, height) {
// Create a framebuffer backed by the texture
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
// Read the contents of the framebuffer
var data = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.deleteFramebuffer(framebuffer);
// Create a 2D canvas to store the result
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
var context = canvas.getContext('2d');
// Copy the pixels to a 2D canvas
var imageData = context.createImageData(width, height);
imageData.data.set(data);
context.putImageData(imageData, 0, 0);
var img = new Image();
img.src = canvas.toDataURL();
return img;
}
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
8259 次 |
| 最近记录: |