From 9e90aed5de82732cc9921f01388d3063a41a053b Mon Sep 17 00:00:00 2001 From: "commit-bot@chromium.org" Date: Thu, 16 Jan 2014 16:35:09 +0000 Subject: [PATCH] Rename GrGLBinding->GrGLStandard, no longer a bitfield BUG=skia:2042 R=jvanverth@google.com Author: bsalomon@google.com Review URL: https://codereview.chromium.org/133413003 git-svn-id: http://skia.googlecode.com/svn/trunk@13108 2bbb7eff-a529-9590-31e7-b0007b416f81 --- include/gpu/gl/GrGLExtensions.h | 10 +++-- include/gpu/gl/GrGLInterface.h | 36 ++++++++--------- src/gpu/gl/GrGLCaps.cpp | 46 +++++++++++----------- src/gpu/gl/GrGLContext.cpp | 15 ++++--- src/gpu/gl/GrGLContext.h | 6 +-- src/gpu/gl/GrGLCreateNullInterface.cpp | 2 +- src/gpu/gl/GrGLExtensions.cpp | 2 +- src/gpu/gl/GrGLInterface.cpp | 32 +++++++-------- src/gpu/gl/GrGLSL.cpp | 20 +++++----- src/gpu/gl/GrGLSL.h | 3 +- src/gpu/gl/GrGLShaderBuilder.cpp | 8 ++-- src/gpu/gl/GrGLShaderVar.h | 6 +-- src/gpu/gl/GrGLUtil.cpp | 18 +++------ src/gpu/gl/GrGLUtil.h | 3 +- src/gpu/gl/GrGpuGL.cpp | 18 ++++----- src/gpu/gl/GrGpuGL.h | 2 +- src/gpu/gl/SkGLContextHelper.cpp | 15 +++---- .../android/GrGLCreateNativeInterface_android.cpp | 12 +++--- src/gpu/gl/android/SkNativeGLContext_android.cpp | 8 ++-- src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp | 2 +- src/gpu/gl/debug/GrGLCreateDebugInterface.cpp | 2 +- src/gpu/gl/iOS/GrGLCreateNativeInterface_iOS.cpp | 2 +- src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp | 4 +- src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp | 4 +- src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp | 4 +- src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp | 4 +- tests/GLInterfaceValidation.cpp | 8 +--- 27 files changed, 134 insertions(+), 158 deletions(-) diff --git a/include/gpu/gl/GrGLExtensions.h b/include/gpu/gl/GrGLExtensions.h index ffb6733..45f698e 100644 --- a/include/gpu/gl/GrGLExtensions.h +++ b/include/gpu/gl/GrGLExtensions.h @@ -19,16 +19,18 @@ */ class GrGLExtensions { public: - bool init(GrGLBinding binding, const GrGLInterface* iface) { - SkASSERT(binding & iface->fBindingsExported); - return this->init(binding, iface->fGetString, iface->fGetStringi, iface->fGetIntegerv); + bool init(const GrGLInterface* iface) { + return this->init(iface->fStandard, + iface->fGetString, + iface->fGetStringi, + iface->fGetIntegerv); } /** * We sometimes need to use this class without having yet created a GrGLInterface. This version * of init expects that getString is always non-NULL while getIntegerv and getStringi are non- * NULL if on desktop GL with version 3.0 or higher. Otherwise it will fail. */ - bool init(GrGLBinding binding, + bool init(GrGLStandard standard, GrGLGetStringProc getString, GrGLGetStringiProc getStringi, GrGLGetIntegervProc getIntegerv); diff --git a/include/gpu/gl/GrGLInterface.h b/include/gpu/gl/GrGLInterface.h index 588a3a2..2471da9 100644 --- a/include/gpu/gl/GrGLInterface.h +++ b/include/gpu/gl/GrGLInterface.h @@ -14,22 +14,17 @@ //////////////////////////////////////////////////////////////////////////////// /** - * Classifies GL contexts (currently as Desktop vs. ES2). This is a bitfield. - * A GrGLInterface (defined below) may support multiple bindings. + * Classifies GL contexts by which standard they implement (currently as Desktop + * vs. ES). */ -enum GrGLBinding { - kNone_GrGLBinding = 0x0, - - kDesktop_GrGLBinding = 0x01, - kES_GrGLBinding = 0x02, // ES2+ only - - // for iteration of GrGLBindings - kFirstGrGLBinding = kDesktop_GrGLBinding, - kLastGrGLBinding = kES_GrGLBinding +enum GrGLStandard { + kNone_GrGLStandard, + kGL_GrGLStandard, + kGLES_GrGLStandard, }; // Temporary alias until Chromium can be updated. -static const GrGLBinding kES2_GrGLBinding = kES_GrGLBinding; +static const GrGLStandard kES2_GrGLBinding = kGLES_GrGLStandard; //////////////////////////////////////////////////////////////////////////////// @@ -121,15 +116,16 @@ public: GrGLInterface(); - // Validates that the GrGLInterface supports a binding. This means that - // the GrGLinterface advertises the binding in fBindingsExported and all - // the necessary function pointers have been initialized. The interface is - // validated for the current OpenGL context. - bool validate(GrGLBinding binding) const; + // Validates that the GrGLInterface supports its advertised standard. This means the necessary + // function pointers have been initialized for both the GL version and any advertised + // extensions. + bool validate() const; - // Indicator variable specifying the type of GL implementation - // exported: GLES2 and/or Desktop. - GrGLBinding fBindingsExported; + // Indicates the type of GL implementation + union { + GrGLStandard fStandard; + GrGLStandard fBindingsExported; // Legacy name, will be remove when Chromium is updated. + }; GLPtr fActiveTexture; GLPtr fAttachShader; diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp index beee39d..236aeac 100644 --- a/src/gpu/gl/GrGLCaps.cpp +++ b/src/gpu/gl/GrGLCaps.cpp @@ -95,18 +95,18 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { return; } - GrGLBinding binding = ctxInfo.binding(); + GrGLStandard standard = ctxInfo.standard(); GrGLVersion version = ctxInfo.version(); /************************************************************************** * Caps specific to GrGLCaps **************************************************************************/ - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == standard) { GR_GL_GetIntegerv(gli, GR_GL_MAX_FRAGMENT_UNIFORM_VECTORS, &fMaxFragmentUniformVectors); } else { - SkASSERT(kDesktop_GrGLBinding == binding); + SkASSERT(kGL_GrGLStandard == standard); GrGLint max; GR_GL_GetIntegerv(gli, GR_GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, &max); fMaxFragmentUniformVectors = max / 4; @@ -125,7 +125,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { GR_GL_GetIntegerv(gli, GR_GL_MAX_VERTEX_ATTRIBS, &fMaxVertexAttributes); GR_GL_GetIntegerv(gli, GR_GL_MAX_TEXTURE_IMAGE_UNITS, &fMaxFragmentTextureUnits); - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fRGBA8RenderbufferSupport = true; } else { fRGBA8RenderbufferSupport = version >= GR_GL_VER(3,0) || @@ -133,7 +133,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { ctxInfo.hasExtension("GL_ARM_rgba8"); } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fBGRAFormatSupport = version >= GR_GL_VER(1,2) || ctxInfo.hasExtension("GL_EXT_bgra"); } else { @@ -147,14 +147,14 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { kSkia8888_GrPixelConfig != kBGRA_8888_GrPixelConfig); } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fTextureSwizzleSupport = version >= GR_GL_VER(3,3) || ctxInfo.hasExtension("GL_ARB_texture_swizzle"); } else { fTextureSwizzleSupport = version >= GR_GL_VER(3,0); } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fUnpackRowLengthSupport = true; fUnpackFlipYSupport = false; fPackRowLengthSupport = true; @@ -169,10 +169,10 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { ctxInfo.hasExtension("GL_ANGLE_pack_reverse_row_order"); } - fTextureUsageSupport = (kES_GrGLBinding == binding) && + fTextureUsageSupport = (kGLES_GrGLStandard == standard) && ctxInfo.hasExtension("GL_ANGLE_texture_usage"); - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { // The EXT version can apply to either GL or GLES. fTexStorageSupport = version >= GR_GL_VER(4,2) || ctxInfo.hasExtension("GL_ARB_texture_storage") || @@ -186,7 +186,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { // ARB_texture_rg is part of OpenGL 3.0, but mesa doesn't support it if // it doesn't have ARB_texture_rg extension. - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { if (ctxInfo.isMesa()) { fTextureRedSupport = ctxInfo.hasExtension("GL_ARB_texture_rg"); } else { @@ -198,13 +198,13 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { ctxInfo.hasExtension("GL_EXT_texture_rg"); } - fImagingSupport = kDesktop_GrGLBinding == binding && + fImagingSupport = kGL_GrGLStandard == standard && ctxInfo.hasExtension("GL_ARB_imaging"); // ES 2 only guarantees RGBA/uchar + one other format/type combo for // ReadPixels. The other format has to checked at run-time since it // can change based on which render target is bound - fTwoFormatLimit = kES_GrGLBinding == binding; + fTwoFormatLimit = kGLES_GrGLStandard == standard; // Known issue on at least some Intel platforms: // http://code.google.com/p/skia/issues/detail?id=946 @@ -228,7 +228,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { fFullClearIsFree = true; } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fVertexArrayObjectSupport = version >= GR_GL_VER(3, 0) || ctxInfo.hasExtension("GL_ARB_vertex_array_object"); } else { @@ -236,7 +236,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { ctxInfo.hasExtension("GL_OES_vertex_array_object"); } - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == standard) { if (ctxInfo.hasExtension("GL_EXT_shader_framebuffer_fetch")) { fFBFetchType = kEXT_FBFetchType; } else if (ctxInfo.hasExtension("GL_NV_shader_framebuffer_fetch")) { @@ -263,7 +263,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { } } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { // we could also look for GL_ATI_separate_stencil extension or // GL_EXT_stencil_two_side but they use different function signatures // than GL2.0+ (and than each other). @@ -277,14 +277,14 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { fStencilWrapOpsSupport = true; } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fBufferLockSupport = true; // we require VBO support and the desktop VBO extension includes // glMapBuffer. } else { fBufferLockSupport = ctxInfo.hasExtension("GL_OES_mapbuffer"); } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { SkASSERT(ctxInfo.version() >= GR_GL_VER(2,0) || ctxInfo.hasExtension("GL_ARB_texture_non_power_of_two")); fNPOTTextureTileSupport = true; @@ -301,7 +301,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { fMipMapSupport = fNPOTTextureTileSupport || ctxInfo.hasExtension("GL_IMG_texture_npot"); } - fHWAALineSupport = (kDesktop_GrGLBinding == binding); + fHWAALineSupport = (kGL_GrGLStandard == standard); GR_GL_GetIntegerv(gli, GR_GL_MAX_TEXTURE_SIZE, &fMaxTextureSize); GR_GL_GetIntegerv(gli, GR_GL_MAX_RENDERBUFFER_SIZE, &fMaxRenderTargetSize); @@ -320,7 +320,7 @@ void GrGLCaps::init(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { kQualcomm_GrGLVendor != ctxInfo.vendor(); // Enable supported shader-related caps - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == standard) { fDualSourceBlendingSupport = ctxInfo.version() >= GR_GL_VER(3,3) || ctxInfo.hasExtension("GL_ARB_blend_func_extended"); fShaderDerivativeSupport = true; @@ -375,7 +375,7 @@ void GrGLCaps::initConfigRenderableTable(const GrGLContextInfo& ctxInfo) { kYes_MSAA = 1, }; - if (kDesktop_GrGLBinding == ctxInfo.binding()) { + if (kGL_GrGLStandard == ctxInfo.standard()) { // Post 3.0 we will get R8 // Prior to 3.0 we will get ALPHA8 (with GL_ARB_framebuffer_object) if (ctxInfo.version() >= GR_GL_VER(3,0) || @@ -389,7 +389,7 @@ void GrGLCaps::initConfigRenderableTable(const GrGLContextInfo& ctxInfo) { fConfigRenderSupport[kAlpha_8_GrPixelConfig][kYes_MSAA] = fTextureRedSupport; } - if (kDesktop_GrGLBinding != ctxInfo.binding()) { + if (kGL_GrGLStandard != ctxInfo.standard()) { // only available in ES fConfigRenderSupport[kRGB_565_GrPixelConfig][kNo_MSAA] = true; fConfigRenderSupport[kRGB_565_GrPixelConfig][kYes_MSAA] = true; @@ -458,7 +458,7 @@ bool GrGLCaps::readPixelsSupported(const GrGLInterface* intf, void GrGLCaps::initFSAASupport(const GrGLContextInfo& ctxInfo, const GrGLInterface* gli) { fMSFBOType = kNone_MSFBOType; - if (kDesktop_GrGLBinding != ctxInfo.binding()) { + if (kGL_GrGLStandard != ctxInfo.standard()) { // We prefer the EXT/IMG extension over ES3 MSAA because we've observed // ES3 driver bugs on at least one device with a tiled GPU (N10). if (ctxInfo.hasExtension("GL_EXT_multisampled_render_to_texture")) { @@ -506,7 +506,7 @@ void GrGLCaps::initStencilFormats(const GrGLContextInfo& ctxInfo) { // gS = {GR_GL_STENCIL_INDEX, kUnknownBitCount, kUnknownBitCount, false}, gDS = {GR_GL_DEPTH_STENCIL, kUnknownBitCount, kUnknownBitCount, true }; - if (kDesktop_GrGLBinding == ctxInfo.binding()) { + if (kGL_GrGLStandard == ctxInfo.standard()) { bool supportsPackedDS = ctxInfo.version() >= GR_GL_VER(3,0) || ctxInfo.hasExtension("GL_EXT_packed_depth_stencil") || diff --git a/src/gpu/gl/GrGLContext.cpp b/src/gpu/gl/GrGLContext.cpp index 7c99a1c..fad885b 100644 --- a/src/gpu/gl/GrGLContext.cpp +++ b/src/gpu/gl/GrGLContext.cpp @@ -9,7 +9,7 @@ //////////////////////////////////////////////////////////////////////////////// GrGLContextInfo& GrGLContextInfo::operator= (const GrGLContextInfo& ctxInfo) { - fBindingInUse = ctxInfo.fBindingInUse; + fStandard = ctxInfo.fStandard; fGLVersion = ctxInfo.fGLVersion; fGLSLGeneration = ctxInfo.fGLSLGeneration; fVendor = ctxInfo.fVendor; @@ -34,14 +34,11 @@ bool GrGLContextInfo::initialize(const GrGLInterface* interface) { GR_GL_CALL_RET(interface, rendererUByte, GetString(GR_GL_RENDERER)); const char* renderer = reinterpret_cast(rendererUByte); - GrGLBinding binding = GrGLGetBindingInUseFromString(ver); - - if (0 != binding && interface->validate(binding) && fExtensions.init(binding, interface)) { - fBindingInUse = binding; + if (interface->validate() && fExtensions.init(interface)) { fGLVersion = GrGLGetVersionFromString(ver); - fGLSLGeneration = GrGetGLSLGeneration(fBindingInUse, interface); + fGLSLGeneration = GrGetGLSLGeneration(interface); fVendor = GrGLGetVendor(interface); @@ -52,6 +49,8 @@ bool GrGLContextInfo::initialize(const GrGLInterface* interface) { fIsChromium = GrGLIsChromiumFromRendererString(renderer); fGLCaps->init(*this, interface); + + fStandard = interface->fStandard; return true; } } @@ -59,11 +58,11 @@ bool GrGLContextInfo::initialize(const GrGLInterface* interface) { } bool GrGLContextInfo::isInitialized() const { - return kNone_GrGLBinding != fBindingInUse; + return kNone_GrGLStandard != fStandard; } void GrGLContextInfo::reset() { - fBindingInUse = kNone_GrGLBinding; + fStandard = kNone_GrGLStandard; fGLVersion = GR_GL_VER(0, 0); fGLSLGeneration = static_cast(0); fVendor = kOther_GrGLVendor; diff --git a/src/gpu/gl/GrGLContext.h b/src/gpu/gl/GrGLContext.h index d2174c5..a418121 100644 --- a/src/gpu/gl/GrGLContext.h +++ b/src/gpu/gl/GrGLContext.h @@ -19,7 +19,7 @@ /** * Encapsulates information about an OpenGL context including the OpenGL - * version, the GrGLBinding type of the context, and GLSL version. + * version, the GrGLStandard type of the context, and GLSL version. */ class GrGLContextInfo { public: @@ -43,7 +43,7 @@ public: bool initialize(const GrGLInterface* interface); bool isInitialized() const; - GrGLBinding binding() const { return fBindingInUse; } + GrGLStandard standard() const { return fStandard; } GrGLVersion version() const { return fGLVersion; } GrGLSLGeneration glslGeneration() const { return fGLSLGeneration; } GrGLVendor vendor() const { return fVendor; } @@ -75,7 +75,7 @@ public: private: - GrGLBinding fBindingInUse; + GrGLStandard fStandard; GrGLVersion fGLVersion; GrGLSLGeneration fGLSLGeneration; GrGLVendor fVendor; diff --git a/src/gpu/gl/GrGLCreateNullInterface.cpp b/src/gpu/gl/GrGLCreateNullInterface.cpp index c169b1c..d1d851d 100644 --- a/src/gpu/gl/GrGLCreateNullInterface.cpp +++ b/src/gpu/gl/GrGLCreateNullInterface.cpp @@ -266,7 +266,7 @@ const GrGLInterface* GrGLCreateNullInterface() { if (!glInterface.get()) { GrGLInterface* interface = SkNEW(GrGLInterface); glInterface.reset(interface); - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; interface->fActiveTexture = nullGLActiveTexture; interface->fAttachShader = nullGLAttachShader; interface->fBeginQuery = nullGLBeginQuery; diff --git a/src/gpu/gl/GrGLExtensions.cpp b/src/gpu/gl/GrGLExtensions.cpp index 5ebab14..dcfa4f7 100644 --- a/src/gpu/gl/GrGLExtensions.cpp +++ b/src/gpu/gl/GrGLExtensions.cpp @@ -18,7 +18,7 @@ inline bool extension_compare(const SkString& a, const SkString& b) { } } -bool GrGLExtensions::init(GrGLBinding binding, +bool GrGLExtensions::init(GrGLStandard standard, GrGLGetStringProc getString, GrGLGetStringiProc getStringi, GrGLGetIntegervProc getIntegerv) { diff --git a/src/gpu/gl/GrGLInterface.cpp b/src/gpu/gl/GrGLInterface.cpp index e1c69e1..21a6649 100644 --- a/src/gpu/gl/GrGLInterface.cpp +++ b/src/gpu/gl/GrGLInterface.cpp @@ -19,7 +19,7 @@ void GrGLDefaultInterfaceCallback(const GrGLInterface*) {} #endif GrGLInterface::GrGLInterface() { - fBindingsExported = kNone_GrGLBinding; + fStandard = kNone_GrGLStandard; #if GR_GL_PER_GL_FUNC_CALLBACK fCallback = GrGLDefaultInterfaceCallback; @@ -27,18 +27,14 @@ GrGLInterface::GrGLInterface() { #endif } -bool GrGLInterface::validate(GrGLBinding binding) const { +bool GrGLInterface::validate() const { - // kNone must be 0 so that the check we're about to do can never succeed if - // binding == kNone. - GR_STATIC_ASSERT(kNone_GrGLBinding == 0); - - if (0 == (binding & fBindingsExported)) { + if (kNone_GrGLStandard == fStandard) { return false; } GrGLExtensions extensions; - if (!extensions.init(binding, this)) { + if (!extensions.init(this)) { return false; } @@ -141,7 +137,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { GrGLVersion glVer = GrGLGetVersion(this); bool isCoreProfile = false; - if (kDesktop_GrGLBinding == binding && glVer >= GR_GL_VER(3,2)) { + if (kGL_GrGLStandard == fStandard && glVer >= GR_GL_VER(3,2)) { GrGLint profileMask; GR_GL_GetIntegerv(this, GR_GL_CONTEXT_PROFILE_MASK, &profileMask); isCoreProfile = SkToBool(profileMask & GR_GL_CONTEXT_CORE_PROFILE_BIT); @@ -154,13 +150,13 @@ bool GrGLInterface::validate(GrGLBinding binding) const { // these functions are part of ES2, we assume they are available // On the desktop we assume they are available if the extension // is present or GL version is high enough. - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == fStandard) { if (NULL == fStencilFuncSeparate || NULL == fStencilMaskSeparate || NULL == fStencilOpSeparate) { return false; } - } else if (kDesktop_GrGLBinding == binding) { + } else if (kGL_GrGLStandard == fStandard) { if (glVer >= GR_GL_VER(2,0)) { if (NULL == fStencilFuncSeparate || @@ -272,7 +268,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { } // optional function on desktop before 1.3 - if (kDesktop_GrGLBinding != binding || + if (kGL_GrGLStandard != fStandard || (glVer >= GR_GL_VER(1,3)) || extensions.has("GL_ARB_texture_compression")) { if (NULL == fCompressedTexImage2D) { @@ -281,7 +277,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { } // part of desktop GL, but not ES - if (kDesktop_GrGLBinding == binding && + if (kGL_GrGLStandard == fStandard && (NULL == fGetTexLevelParameteriv || NULL == fDrawBuffer || NULL == fReadBuffer)) { @@ -290,7 +286,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { // GL_EXT_texture_storage is part of desktop 4.2 // There is a desktop ARB extension and an ES+desktop EXT extension - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == fStandard) { if (glVer >= GR_GL_VER(4,2) || extensions.has("GL_ARB_texture_storage") || extensions.has("GL_EXT_texture_storage")) { @@ -314,7 +310,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { } // FBO MSAA - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == fStandard) { // GL 3.0 and the ARB extension have multisample + blit if (glVer >= GR_GL_VER(3,0) || extensions.has("GL_ARB_framebuffer_object")) { if (NULL == fRenderbufferStorageMultisample || @@ -376,7 +372,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { // On ES buffer mapping is an extension. On Desktop // buffer mapping was part of original VBO extension // which we require. - if (kDesktop_GrGLBinding == binding || extensions.has("GL_OES_mapbuffer")) { + if (kGL_GrGLStandard == fStandard || extensions.has("GL_OES_mapbuffer")) { if (NULL == fMapBuffer || NULL == fUnmapBuffer) { return false; @@ -384,7 +380,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { } // Dual source blending - if (kDesktop_GrGLBinding == binding && + if (kGL_GrGLStandard == fStandard && (glVer >= GR_GL_VER(3,3) || extensions.has("GL_ARB_blend_func_extended"))) { if (NULL == fBindFragDataLocationIndexed) { return false; @@ -398,7 +394,7 @@ bool GrGLInterface::validate(GrGLBinding binding) const { } } - if (kDesktop_GrGLBinding == binding) { + if (kGL_GrGLStandard == fStandard) { if (glVer >= GR_GL_VER(3, 0) || extensions.has("GL_ARB_vertex_array_object")) { if (NULL == fBindVertexArray || NULL == fDeleteVertexArrays || diff --git a/src/gpu/gl/GrGLSL.cpp b/src/gpu/gl/GrGLSL.cpp index dce44f3..a7948cc 100644 --- a/src/gpu/gl/GrGLSL.cpp +++ b/src/gpu/gl/GrGLSL.cpp @@ -9,10 +9,10 @@ #include "GrGLShaderVar.h" #include "SkString.h" -GrGLSLGeneration GrGetGLSLGeneration(GrGLBinding binding, const GrGLInterface* gl) { +GrGLSLGeneration GrGetGLSLGeneration(const GrGLInterface* gl) { GrGLSLVersion ver = GrGLGetGLSLVersion(gl); - switch (binding) { - case kDesktop_GrGLBinding: + switch (gl->fStandard) { + case kGL_GrGLStandard: SkASSERT(ver >= GR_GLSL_VER(1,10)); if (ver >= GR_GLSL_VER(1,50)) { return k150_GrGLSLGeneration; @@ -23,12 +23,12 @@ GrGLSLGeneration GrGetGLSLGeneration(GrGLBinding binding, const GrGLInterface* g } else { return k110_GrGLSLGeneration; } - case kES_GrGLBinding: + case kGLES_GrGLStandard: // version 1.00 of ES GLSL based on ver 1.20 of desktop GLSL SkASSERT(ver >= GR_GL_VER(1,00)); return k110_GrGLSLGeneration; default: - GrCrash("Unknown GL Binding"); + GrCrash("Unknown GL Standard"); return k110_GrGLSLGeneration; // suppress warning } } @@ -36,22 +36,22 @@ GrGLSLGeneration GrGetGLSLGeneration(GrGLBinding binding, const GrGLInterface* g const char* GrGetGLSLVersionDecl(const GrGLContextInfo& info) { switch (info.glslGeneration()) { case k110_GrGLSLGeneration: - if (kES_GrGLBinding == info.binding()) { + if (kGLES_GrGLStandard == info.standard()) { // ES2s shader language is based on version 1.20 but is version // 1.00 of the ES language. return "#version 100\n"; } else { - SkASSERT(kDesktop_GrGLBinding == info.binding()); + SkASSERT(kGL_GrGLStandard == info.standard()); return "#version 110\n"; } case k130_GrGLSLGeneration: - SkASSERT(kDesktop_GrGLBinding == info.binding()); + SkASSERT(kGL_GrGLStandard == info.standard()); return "#version 130\n"; case k140_GrGLSLGeneration: - SkASSERT(kDesktop_GrGLBinding == info.binding()); + SkASSERT(kGL_GrGLStandard == info.standard()); return "#version 140\n"; case k150_GrGLSLGeneration: - SkASSERT(kDesktop_GrGLBinding == info.binding()); + SkASSERT(kGL_GrGLStandard == info.standard()); if (info.caps()->isCoreProfile()) { return "#version 150\n"; } else { diff --git a/src/gpu/gl/GrGLSL.h b/src/gpu/gl/GrGLSL.h index 8c5da51..5c0a170 100644 --- a/src/gpu/gl/GrGLSL.h +++ b/src/gpu/gl/GrGLSL.h @@ -40,8 +40,7 @@ enum GrGLSLGeneration { /** * Gets the most recent GLSL Generation compatible with the OpenGL context. */ -GrGLSLGeneration GrGetGLSLGeneration(GrGLBinding binding, - const GrGLInterface* gl); +GrGLSLGeneration GrGetGLSLGeneration(const GrGLInterface* gl); /** * Returns a string to include at the beginning of a shader to declare the GLSL diff --git a/src/gpu/gl/GrGLShaderBuilder.cpp b/src/gpu/gl/GrGLShaderBuilder.cpp index 961cad7..3d01ba5 100644 --- a/src/gpu/gl/GrGLShaderBuilder.cpp +++ b/src/gpu/gl/GrGLShaderBuilder.cpp @@ -181,7 +181,7 @@ bool GrGLShaderBuilder::enableFeature(GLSLFeature feature) { if (!fGpu->glCaps().shaderDerivativeSupport()) { return false; } - if (kES_GrGLBinding == fGpu->glBinding()) { + if (kGLES_GrGLStandard == fGpu->glStandard()) { this->addFSFeature(1 << kStandardDerivatives_GLSLFeature, "GL_OES_standard_derivatives"); } @@ -471,10 +471,10 @@ void GrGLShaderBuilder::fsEmitFunction(GrSLType returnType, namespace { inline void append_default_precision_qualifier(GrGLShaderVar::Precision p, - GrGLBinding binding, + GrGLStandard standard, SkString* str) { // Desktop GLSL has added precision qualifiers but they don't do anything. - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == standard) { switch (p) { case GrGLShaderVar::kHigh_Precision: str->append("precision highp float;\n"); @@ -689,7 +689,7 @@ bool GrGLShaderBuilder::compileAndAttachShaders(GrGLuint programId) const { SkString fragShaderSrc(GrGetGLSLVersionDecl(this->ctxInfo())); fragShaderSrc.append(fFSExtensions); append_default_precision_qualifier(kDefaultFragmentPrecision, - fGpu->glBinding(), + fGpu->glStandard(), &fragShaderSrc); this->appendUniformDecls(kFragment_Visibility, &fragShaderSrc); this->appendDecls(fFSInputs, &fragShaderSrc); diff --git a/src/gpu/gl/GrGLShaderVar.h b/src/gpu/gl/GrGLShaderVar.h index acbcef3..7862abd 100644 --- a/src/gpu/gl/GrGLShaderVar.h +++ b/src/gpu/gl/GrGLShaderVar.h @@ -267,7 +267,7 @@ public: ctxInfo.glslGeneration())); out->append(" "); } - out->append(PrecisionString(fPrecision, ctxInfo.binding())); + out->append(PrecisionString(fPrecision, ctxInfo.standard())); GrSLType effectiveType = this->getType(); if (this->isArray()) { if (this->isUnsizedArray()) { @@ -302,9 +302,9 @@ public: fUseUniformFloatArrays ? "" : ".x"); } - static const char* PrecisionString(Precision p, GrGLBinding binding) { + static const char* PrecisionString(Precision p, GrGLStandard standard) { // Desktop GLSL has added precision qualifiers but they don't do anything. - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == standard) { switch (p) { case kLow_Precision: return "lowp "; diff --git a/src/gpu/gl/GrGLUtil.cpp b/src/gpu/gl/GrGLUtil.cpp index 96679fc..4ff6452 100644 --- a/src/gpu/gl/GrGLUtil.cpp +++ b/src/gpu/gl/GrGLUtil.cpp @@ -93,10 +93,10 @@ bool get_gl_version_for_mesa(int mesaMajorVersion, int* major, int* minor) { /////////////////////////////////////////////////////////////////////////////// -GrGLBinding GrGLGetBindingInUseFromString(const char* versionString) { +GrGLStandard GrGLGetStandardInUseFromString(const char* versionString) { if (NULL == versionString) { SkDEBUGFAIL("NULL GL version string."); - return kNone_GrGLBinding; + return kNone_GrGLStandard; } int major, minor; @@ -104,7 +104,7 @@ GrGLBinding GrGLGetBindingInUseFromString(const char* versionString) { // check for desktop int n = sscanf(versionString, "%d.%d", &major, &minor); if (2 == n) { - return kDesktop_GrGLBinding; + return kGL_GrGLStandard; } // check for ES 1 @@ -112,15 +112,15 @@ GrGLBinding GrGLGetBindingInUseFromString(const char* versionString) { n = sscanf(versionString, "OpenGL ES-%c%c %d.%d", profile, profile+1, &major, &minor); if (4 == n) { // we no longer support ES1. - return kNone_GrGLBinding; + return kNone_GrGLStandard; } // check for ES2 n = sscanf(versionString, "OpenGL ES %d.%d", &major, &minor); if (2 == n) { - return kES_GrGLBinding; + return kGLES_GrGLStandard; } - return kNone_GrGLBinding; + return kNone_GrGLStandard; } bool GrGLIsMesaFromVersionString(const char* versionString) { @@ -228,12 +228,6 @@ GrGLRenderer GrGLGetRendererFromString(const char* rendererString) { return kOther_GrGLRenderer; } -GrGLBinding GrGLGetBindingInUse(const GrGLInterface* gl) { - const GrGLubyte* v; - GR_GL_CALL_RET(gl, v, GetString(GR_GL_VERSION)); - return GrGLGetBindingInUseFromString((const char*) v); -} - GrGLVersion GrGLGetVersion(const GrGLInterface* gl) { const GrGLubyte* v; GR_GL_CALL_RET(gl, v, GetString(GR_GL_VERSION)); diff --git a/src/gpu/gl/GrGLUtil.h b/src/gpu/gl/GrGLUtil.h index a4fb0e5..8d3f580 100644 --- a/src/gpu/gl/GrGLUtil.h +++ b/src/gpu/gl/GrGLUtil.h @@ -78,7 +78,7 @@ enum GrGLRenderer { // these variants assume caller already has a string from glGetString() GrGLVersion GrGLGetVersionFromString(const char* versionString); -GrGLBinding GrGLGetBindingInUseFromString(const char* versionString); +GrGLStandard GrGLGetStandardInUseFromString(const char* versionString); GrGLSLVersion GrGLGetGLSLVersionFromString(const char* versionString); bool GrGLIsMesaFromVersionString(const char* versionString); GrGLVendor GrGLGetVendorFromString(const char* vendorString); @@ -86,7 +86,6 @@ GrGLRenderer GrGLGetRendererFromString(const char* rendererString); bool GrGLIsChromiumFromRendererString(const char* rendererString); // these variants call glGetString() -GrGLBinding GrGLGetBindingInUse(const GrGLInterface*); GrGLVersion GrGLGetVersion(const GrGLInterface*); GrGLSLVersion GrGLGetGLSLVersion(const GrGLInterface*); GrGLVendor GrGLGetVendor(const GrGLInterface*); diff --git a/src/gpu/gl/GrGpuGL.cpp b/src/gpu/gl/GrGpuGL.cpp index 7ab4bc1..4e83b05 100644 --- a/src/gpu/gl/GrGpuGL.cpp +++ b/src/gpu/gl/GrGpuGL.cpp @@ -203,7 +203,7 @@ bool GrGpuGL::canWriteTexturePixels(const GrTexture* texture, GrPixelConfig srcC if (kIndex_8_GrPixelConfig == srcConfig || kIndex_8_GrPixelConfig == texture->config()) { return false; } - if (srcConfig != texture->config() && kES_GrGLBinding == this->glBinding()) { + if (srcConfig != texture->config() && kGLES_GrGLStandard == this->glStandard()) { // In general ES2 requires the internal format of the texture and the format of the src // pixels to match. However, It may or may not be possible to upload BGRA data to a RGBA // texture. It depends upon which extension added BGRA. The Apple extension allows it @@ -235,7 +235,7 @@ void GrGpuGL::onResetContext(uint32_t resetBits) { fHWDrawFace = GrDrawState::kInvalid_DrawFace; fHWDitherEnabled = kUnknown_TriState; - if (kDesktop_GrGLBinding == this->glBinding()) { + if (kGL_GrGLStandard == this->glStandard()) { // Desktop-only state that we never change if (!this->glCaps().isCoreProfile()) { GL_CALL(Disable(GR_GL_POINT_SMOOTH)); @@ -561,7 +561,7 @@ bool GrGpuGL::uploadTexData(const GrGLTexture::Desc& desc, desc.fConfig != kIndex_8_GrPixelConfig && this->glCaps().texStorageSupport(); - if (useTexStorage && kDesktop_GrGLBinding == this->glBinding()) { + if (useTexStorage && kGL_GrGLStandard == this->glStandard()) { // 565 is not a sized internal format on desktop GL. So on desktop with // 565 we always use an unsized internal format to let the system pick // the best sized format to convert the 565 data to. Since TexStorage @@ -787,7 +787,7 @@ bool GrGpuGL::createRenderTargetObjects(int width, int height, !desc->fMSColorRenderbufferID || !this->configToGLFormats(desc->fConfig, // ES2 and ES3 require sized internal formats for rb storage. - kES_GrGLBinding == this->glBinding(), + kGLES_GrGLStandard == this->glStandard(), &msColorFormat, NULL, NULL)) { @@ -1868,7 +1868,7 @@ void GrGpuGL::flushAAState(DrawType type) { #endif const GrRenderTarget* rt = this->getDrawState().getRenderTarget(); - if (kDesktop_GrGLBinding == this->glBinding()) { + if (kGL_GrGLStandard == this->glStandard()) { // ES doesn't support toggling GL_MULTISAMPLE and doesn't have // smooth lines. // we prefer smooth lines over multisampled lines @@ -2075,7 +2075,7 @@ void GrGpuGL::bindTexture(int unitIdx, const GrTextureParams& params, GrGLTextur oldTexParams.fSwizzleRGBA, sizeof(newTexParams.fSwizzleRGBA)))) { this->setTextureUnit(unitIdx); - if (this->glBinding() == kES_GrGLBinding) { + if (this->glStandard() == kGLES_GrGLStandard) { // ES3 added swizzle support but not GL_TEXTURE_SWIZZLE_RGBA. const GrGLenum* swizzle = newTexParams.fSwizzleRGBA; GL_CALL(TexParameteri(GR_GL_TEXTURE_2D, GR_GL_TEXTURE_SWIZZLE_R, swizzle[0])); @@ -2334,7 +2334,7 @@ bool GrGpuGL::configToGLFormats(GrPixelConfig config, *internalFormat = GR_GL_RGB; *externalFormat = GR_GL_RGB; if (getSizedInternalFormat) { - if (this->glBinding() == kDesktop_GrGLBinding) { + if (this->glStandard() == kGL_GrGLStandard) { return false; } else { *internalFormat = GR_GL_RGB565; @@ -2445,7 +2445,7 @@ inline bool can_copy_texsubimage(const GrSurface* dst, // Table 3.9 of the ES2 spec indicates the supported formats with CopyTexSubImage // and BGRA isn't in the spec. There doesn't appear to be any extension that adds it. Perhaps // many drivers would allow it to work, but ANGLE does not. - if (kES_GrGLBinding == gpu->glBinding() && gpu->glCaps().bgraIsInternalFormat() && + if (kGLES_GrGLStandard == gpu->glStandard() && gpu->glCaps().bgraIsInternalFormat() && (kBGRA_8888_GrPixelConfig == dst->config() || kBGRA_8888_GrPixelConfig == src->config())) { return false; } @@ -2508,7 +2508,7 @@ inline GrGLuint bind_surface_as_fbo(const GrGLInterface* gl, void GrGpuGL::initCopySurfaceDstDesc(const GrSurface* src, GrTextureDesc* desc) { // Check for format issues with glCopyTexSubImage2D - if (kES_GrGLBinding == this->glBinding() && this->glCaps().bgraIsInternalFormat() && + if (kGLES_GrGLStandard == this->glStandard() && this->glCaps().bgraIsInternalFormat() && kBGRA_8888_GrPixelConfig == src->config()) { // glCopyTexSubImage2D doesn't work with this config. We'll want to make it a render target // in order to call glBlitFramebuffer or to copy to it by rendering. diff --git a/src/gpu/gl/GrGpuGL.h b/src/gpu/gl/GrGpuGL.h index 1a3f1a2..13243f3 100644 --- a/src/gpu/gl/GrGpuGL.h +++ b/src/gpu/gl/GrGpuGL.h @@ -35,7 +35,7 @@ public: const GrGLInterface* glInterface() const { return fGLContext.interface(); } const GrGLContextInfo& ctxInfo() const { return fGLContext.info(); } - GrGLBinding glBinding() const { return fGLContext.info().binding(); } + GrGLStandard glStandard() const { return fGLContext.info().standard(); } GrGLVersion glVersion() const { return fGLContext.info().version(); } GrGLSLGeneration glslGeneration() const { return fGLContext.info().glslGeneration(); } diff --git a/src/gpu/gl/SkGLContextHelper.cpp b/src/gpu/gl/SkGLContextHelper.cpp index da446be..4d31487 100644 --- a/src/gpu/gl/SkGLContextHelper.cpp +++ b/src/gpu/gl/SkGLContextHelper.cpp @@ -37,9 +37,7 @@ bool SkGLContextHelper::init(int width, int height) { if (fGL) { const GrGLubyte* temp; - GrGLBinding bindingInUse = GrGLGetBindingInUse(this->gl()); - - if (!fGL->validate(bindingInUse) || !fExtensions.init(bindingInUse, fGL)) { + if (!fGL->validate() || !fExtensions.init(fGL)) { fGL = NULL; this->destroyGLContext(); return false; @@ -59,7 +57,7 @@ bool SkGLContextHelper::init(int width, int height) { SK_GL(*this, BindFramebuffer(GR_GL_FRAMEBUFFER, fFBO)); SK_GL(*this, GenRenderbuffers(1, &fColorBufferID)); SK_GL(*this, BindRenderbuffer(GR_GL_RENDERBUFFER, fColorBufferID)); - if (kES_GrGLBinding == bindingInUse) { + if (kGLES_GrGLStandard == this->gl()->fStandard) { SK_GL(*this, RenderbufferStorage(GR_GL_RENDERBUFFER, GR_GL_RGBA8, width, height)); @@ -79,7 +77,7 @@ bool SkGLContextHelper::init(int width, int height) { // in binding a packed format an FBO. However, we can't rely on packed // depth stencil being available. bool supportsPackedDepthStencil; - if (kES_GrGLBinding == bindingInUse) { + if (kGLES_GrGLStandard == this->gl()->fStandard) { supportsPackedDepthStencil = version >= GR_GL_VER(3,0) || this->hasExtension("GL_OES_packed_depth_stencil"); } else { @@ -91,7 +89,7 @@ bool SkGLContextHelper::init(int width, int height) { if (supportsPackedDepthStencil) { // ES2 requires sized internal formats for RenderbufferStorage // On Desktop we let the driver decide. - GrGLenum format = kES_GrGLBinding == bindingInUse ? + GrGLenum format = kGLES_GrGLStandard == this->gl()->fStandard ? GR_GL_DEPTH24_STENCIL8 : GR_GL_DEPTH_STENCIL; SK_GL(*this, RenderbufferStorage(GR_GL_RENDERBUFFER, @@ -102,9 +100,8 @@ bool SkGLContextHelper::init(int width, int height) { GR_GL_RENDERBUFFER, fDepthStencilBufferID)); } else { - GrGLenum format = kES_GrGLBinding == bindingInUse ? - GR_GL_STENCIL_INDEX8 : - GR_GL_STENCIL_INDEX; + GrGLenum format = kGLES_GrGLStandard == this->gl()->fStandard ? GR_GL_STENCIL_INDEX8 : + GR_GL_STENCIL_INDEX; SK_GL(*this, RenderbufferStorage(GR_GL_RENDERBUFFER, format, width, height)); diff --git a/src/gpu/gl/android/GrGLCreateNativeInterface_android.cpp b/src/gpu/gl/android/GrGLCreateNativeInterface_android.cpp index ed1c834..72311c4 100644 --- a/src/gpu/gl/android/GrGLCreateNativeInterface_android.cpp +++ b/src/gpu/gl/android/GrGLCreateNativeInterface_android.cpp @@ -24,7 +24,7 @@ static const GrGLInterface* create_es_interface(GrGLVersion version, } GrGLInterface* interface = SkNEW(GrGLInterface); - interface->fBindingsExported = kES_GrGLBinding; + interface->fStandard = kGLES_GrGLStandard; interface->fActiveTexture = glActiveTexture; interface->fAttachShader = glAttachShader; @@ -222,7 +222,7 @@ static const GrGLInterface* create_desktop_interface(GrGLVersion version, } GrGLInterface* interface = SkNEW(GrGLInterface); - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; interface->fActiveTexture = (GrGLActiveTextureProc) eglGetProcAddress("glActiveTexture"); interface->fAttachShader = (GrGLAttachShaderProc) eglGetProcAddress("glAttachShader"); @@ -417,16 +417,16 @@ const GrGLInterface* GrGLCreateNativeInterface() { const char* verStr = reinterpret_cast(glGetString(GR_GL_VERSION)); GrGLVersion version = GrGLGetVersionFromString(verStr); - GrGLBinding binding = GrGLGetBindingInUseFromString(verStr); + GrGLStandard standard = GrGLGetStandardInUseFromString(verStr); GrGLExtensions extensions; - if (!extensions.init(binding, glGetString, getStringi, glGetIntegerv)) { + if (!extensions.init(standard, glGetString, getStringi, glGetIntegerv)) { return NULL; } - if (kES_GrGLBinding == binding) { + if (kGLES_GrGLStandard == standard) { return create_es_interface(version, extensions); - } else if (kDesktop_GrGLBinding == binding) { + } else if (kGL_GrGLStandard == standard) { return create_desktop_interface(version, extensions); } else { return NULL; diff --git a/src/gpu/gl/android/SkNativeGLContext_android.cpp b/src/gpu/gl/android/SkNativeGLContext_android.cpp index dda7d9d..462109a 100644 --- a/src/gpu/gl/android/SkNativeGLContext_android.cpp +++ b/src/gpu/gl/android/SkNativeGLContext_android.cpp @@ -65,19 +65,19 @@ const GrGLInterface* SkNativeGLContext::createGLContext() { const EGLint* fContextAttribs; EGLenum fAPI; EGLint fRenderableTypeBit; - GrGLBinding fBinding; + GrGLStandard fStandard; } kAPIs[] = { { // OpenGL kEGLContextAttribsForOpenGL, EGL_OPENGL_API, EGL_OPENGL_BIT, - kDesktop_GrGLBinding + kGL_GrGLStandard }, { // OpenGL ES. This seems to work for both ES2 and 3 (when available). kEGLContextAttribsForOpenGLES, EGL_OPENGL_ES_API, EGL_OPENGL_ES2_BIT, - kES_GrGLBinding + kGLES_GrGLStandard }, }; @@ -150,7 +150,7 @@ const GrGLInterface* SkNativeGLContext::createGLContext() { continue; } - if (!interface->validate(kAPIs[api].fBinding)) { + if (!interface->validate()) { interface->unref(); interface = NULL; this->destroyGLContext(); diff --git a/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp b/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp index e509b92..62f7608 100644 --- a/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp +++ b/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp @@ -34,7 +34,7 @@ const GrGLInterface* GrGLCreateANGLEInterface() { } GrGLInterface* interface = SkNEW(GrGLInterface); - interface->fBindingsExported = kES_GrGLBinding; + interface->fStandard = kGLES_GrGLStandard; GET_PROC(ActiveTexture); GET_PROC(AttachShader); diff --git a/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp b/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp index 1a0e7ac..bab5f06 100644 --- a/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp +++ b/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp @@ -790,7 +790,7 @@ private: const GrGLInterface* GrGLCreateDebugInterface() { GrGLInterface* interface = SkNEW(GrDebugGLInterface); - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; interface->fActiveTexture = debugGLActiveTexture; interface->fAttachShader = debugGLAttachShader; interface->fBeginQuery = debugGLBeginQuery; diff --git a/src/gpu/gl/iOS/GrGLCreateNativeInterface_iOS.cpp b/src/gpu/gl/iOS/GrGLCreateNativeInterface_iOS.cpp index 9fb8241..c13e358 100644 --- a/src/gpu/gl/iOS/GrGLCreateNativeInterface_iOS.cpp +++ b/src/gpu/gl/iOS/GrGLCreateNativeInterface_iOS.cpp @@ -142,7 +142,7 @@ const GrGLInterface* GrGLCreateNativeInterface() { interface->fGenVertexArrays = glGenVertexArraysOES; #endif - interface->fBindingsExported = kES_GrGLBinding; + interface->fStandard = kGLES_GrGLStandard; return interface; } diff --git a/src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp b/src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp index 423edfb..ca5194d 100644 --- a/src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp +++ b/src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp @@ -51,12 +51,12 @@ const GrGLInterface* GrGLCreateNativeInterface() { const char* verStr = (const char*) glGetString(GR_GL_VERSION); GrGLVersion ver = GrGLGetVersionFromString(verStr); GrGLExtensions extensions; - if (!extensions.init(kDesktop_GrGLBinding, glGetString, glGetStringi, glGetIntegerv)) { + if (!extensions.init(kGL_GrGLStandard, glGetString, glGetStringi, glGetIntegerv)) { return NULL; } GrGLInterface* interface = SkNEW(GrGLInterface); - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; GET_PROC(ActiveTexture); GET_PROC(AttachShader); diff --git a/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp b/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp index 7a00078..524ab09 100644 --- a/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp +++ b/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp @@ -30,7 +30,7 @@ const GrGLInterface* GrGLCreateMesaInterface() { (GrGLGetIntegervProc) OSMesaGetProcAddress("glGetIntegerv"); GrGLExtensions extensions; - if (!extensions.init(kDesktop_GrGLBinding, getString, getStringi, getIntegerv)) { + if (!extensions.init(kGL_GrGLStandard, getString, getStringi, getIntegerv)) { return NULL; } @@ -220,7 +220,7 @@ const GrGLInterface* GrGLCreateMesaInterface() { return NULL; } GR_GL_GET_PROC(BindFragDataLocationIndexed); - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; return interface; } else { return NULL; diff --git a/src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp b/src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp index 5e8c6bc..d8c1d76 100644 --- a/src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp +++ b/src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp @@ -32,7 +32,7 @@ const GrGLInterface* GrGLCreateNativeInterface() { (GrGLGetStringiProc) glXGetProcAddress(reinterpret_cast("glGetStringi")); GrGLExtensions extensions; - if (!extensions.init(kDesktop_GrGLBinding, glGetString, glGetStringi, glGetIntegerv)) { + if (!extensions.init(kGL_GrGLStandard, glGetString, glGetStringi, glGetIntegerv)) { return NULL; } @@ -275,7 +275,7 @@ const GrGLInterface* GrGLCreateNativeInterface() { GR_GL_GET_PROC_SUFFIX(PointAlongPath, NV); } - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; return interface; } else { diff --git a/src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp b/src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp index 26695b3..81f080e 100644 --- a/src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp +++ b/src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp @@ -61,7 +61,7 @@ const GrGLInterface* GrGLCreateNativeInterface() { GrGLGetStringiProc glGetStringi = (GrGLGetStringiProc) wglGetProcAddress("glGetStringi"); GrGLExtensions extensions; - if (!extensions.init(kDesktop_GrGLBinding, glGetString, glGetStringi, glGetIntegerv)) { + if (!extensions.init(kGL_GrGLStandard, glGetString, glGetStringi, glGetIntegerv)) { return NULL; } const char* versionString = (const char*) glGetString(GR_GL_VERSION); @@ -307,7 +307,7 @@ const GrGLInterface* GrGLCreateNativeInterface() { WGL_SET_PROC_SUFFIX(PointAlongPath, NV); } - interface->fBindingsExported = kDesktop_GrGLBinding; + interface->fStandard = kGL_GrGLStandard; return interface; } else { diff --git a/tests/GLInterfaceValidation.cpp b/tests/GLInterfaceValidation.cpp index d47a66d..37a83df 100755 --- a/tests/GLInterfaceValidation.cpp +++ b/tests/GLInterfaceValidation.cpp @@ -21,13 +21,7 @@ DEF_GPUTEST(GLInterfaceValidation, reporter, factory) { REPORTER_ASSERT(reporter, NULL != glCtxHelper); if (NULL != glCtxHelper) { const GrGLInterface* interface = glCtxHelper->gl(); - for (GrGLBinding binding = kFirstGrGLBinding; - binding <= kLastGrGLBinding; - binding = static_cast(binding << 1)) { - if (interface->fBindingsExported & binding) { - REPORTER_ASSERT(reporter, interface->validate(binding)); - } - } + REPORTER_ASSERT(reporter, interface->validate()); } } } -- 2.7.4