Properly detect integer support on WebGL 2.0

Change-Id: Ib0461dd3be8e71537a76c87ca1d4a6501879381f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/279092
Reviewed-by: Kevin Lubick <kjlubick@google.com>
Commit-Queue: Chris Dalton <csmartdalton@google.com>
diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp
index 4799d25..18c9657 100644
--- a/src/gpu/gl/GrGLCaps.cpp
+++ b/src/gpu/gl/GrGLCaps.cpp
@@ -397,6 +397,7 @@
         shaderCaps->fShaderDerivativeSupport = version >= GR_GL_VER(2, 0) ||
                                                ctxInfo.hasExtension("GL_OES_standard_derivatives") ||
                                                ctxInfo.hasExtension("OES_standard_derivatives");
+        shaderCaps->fIntegerSupport = (version >= GR_GL_VER(2, 0));
     }
 
     if (ctxInfo.hasExtension("GL_NV_conservative_raster")) {