EmuGL: set texture info from EGLImage
The EGL->GL translator implementation of glEGLImageTargetTexture2DOES was properly replacing uses of the target texture object with the texture object associated with the EGLImage in the host GL library, but wasn't updating its own info about the texture (dimensions, etc.). This broke places where the translator relied on knowing this, e.g. when implementing the GLES glDrawTexi call on top of GL glDrawArrays. Change-Id: Ia4aefd89852a2609221c56da76bfac927464c0b2
This commit is contained in:
@@ -1673,6 +1673,10 @@ GL_API void GL_APIENTRY glEGLImageTargetTexture2DOES(GLenum target, GLeglImageOE
|
|||||||
ctx->dispatcher().glBindTexture(GL_TEXTURE_2D, img->globalTexName);
|
ctx->dispatcher().glBindTexture(GL_TEXTURE_2D, img->globalTexName);
|
||||||
TextureData *texData = getTextureTargetData(target);
|
TextureData *texData = getTextureTargetData(target);
|
||||||
SET_ERROR_IF(texData==NULL,GL_INVALID_OPERATION);
|
SET_ERROR_IF(texData==NULL,GL_INVALID_OPERATION);
|
||||||
|
texData->width = img->width;
|
||||||
|
texData->height = img->height;
|
||||||
|
texData->border = img->border;
|
||||||
|
texData->internalFormat = img->internalFormat;
|
||||||
texData->sourceEGLImage = (unsigned int)image;
|
texData->sourceEGLImage = (unsigned int)image;
|
||||||
texData->eglImageDetach = s_eglIface->eglDetachEGLImage;
|
texData->eglImageDetach = s_eglIface->eglDetachEGLImage;
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user