diff --git a/Userland/Libraries/LibGL/GL/gl.h b/Userland/Libraries/LibGL/GL/gl.h index fe8e90d248..94c6a6b242 100644 --- a/Userland/Libraries/LibGL/GL/gl.h +++ b/Userland/Libraries/LibGL/GL/gl.h @@ -79,7 +79,14 @@ extern "C" { #define GL_SHADING_LANGUAGE_VERSION 0x8B8C // Get parameters +#define GL_DOUBLEBUFFER 0x0C32 #define GL_MAX_TEXTURE_SIZE 0x0D33 +#define GL_RED_BITS 0x0D52 +#define GL_GREEN_BITS 0x0D53 +#define GL_BLUE_BITS 0x0D54 +#define GL_ALPHA_BITS 0x0D55 +#define GL_DEPTH_BITS 0x0D56 +#define GL_STENCIL_BITS 0x0D57 #define GL_MAX_TEXTURE_UNITS 0x84E2 // Blend factors diff --git a/Userland/Libraries/LibGL/SoftwareGLContext.cpp b/Userland/Libraries/LibGL/SoftwareGLContext.cpp index 7792d20fd2..6ee5292b23 100644 --- a/Userland/Libraries/LibGL/SoftwareGLContext.cpp +++ b/Userland/Libraries/LibGL/SoftwareGLContext.cpp @@ -1582,6 +1582,9 @@ void SoftwareGLContext::gl_get_booleanv(GLenum pname, GLboolean* data) case GL_DEPTH_TEST: *data = m_depth_test_enabled ? GL_TRUE : GL_FALSE; break; + case GL_DOUBLEBUFFER: + *data = GL_TRUE; + break; case GL_CULL_FACE: *data = m_cull_faces ? GL_TRUE : GL_FALSE; break; @@ -1683,6 +1686,14 @@ void SoftwareGLContext::gl_get_integerv(GLenum pname, GLint* data) case GL_UNPACK_ROW_LENGTH: *data = m_unpack_row_length; break; + case GL_RED_BITS: + case GL_GREEN_BITS: + case GL_BLUE_BITS: + case GL_ALPHA_BITS: + case GL_DEPTH_BITS: + case GL_STENCIL_BITS: + *data = sizeof(float) * 8; + break; default: // According to the Khronos docs, we always return GL_INVALID_ENUM if we encounter a non-accepted value // for `pname`