Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
webgl video
+++ b/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp
@@ -4266,30 +4266,30 @@ void WebGLRenderingContextBase::texImage2D(GLenum target, GLint level, GLint int
// Go through the fast path doing a GPU-GPU textures copy without a readback to system memory if possible.
// Otherwise, it will fall back to the normal SW path.
- if (GL_TEXTURE_2D == target) {
- if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level)
- && video->copyVideoTextureToPlatformTexture(contextGL(), texture->object(), internalformat, type, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
- return;
- }
-
- // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU.
- OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight())));
- if (surface->isValid()) {
- OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(std::move(surface)));
- if (imageBuffer) {
- // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface,
- // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it
- // may still do a CPU conversion and upload the results).
- video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
-
- // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call.
- if (imageBuffer->copyToPlatformTexture(contextGL(), texture->object(), internalformat, type,
- level, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
- return;
- }
- }
- }
- }
+// if (GL_TEXTURE_2D == target) {
+// if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level)
+// && video->copyVideoTextureToPlatformTexture(contextGL(), texture->object(), internalformat, type, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
+// return;
+// }
+//
+// // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU.
+// OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight())));
+// if (surface->isValid()) {
+// OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(std::move(surface)));
+// if (imageBuffer) {
+// // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface,
+// // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it
+// // may still do a CPU conversion and upload the results).
+// video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
+//
+// // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call.
+// if (imageBuffer->copyToPlatformTexture(contextGL(), texture->object(), internalformat, type,
+// level, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
+// return;
+// }
+// }
+// }
+// }
// Normal pure SW path.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.