Bug 671259 - Disable OpenGL layers on linux when we don't have texture_from_pixmap. r=bjacob

This commit is contained in:
Matt Woodrow 2011-08-05 13:13:25 +12:00
Родитель 1f117151c5
Коммит e0e10d35a2
3 изменённых файлов: 28 добавлений и 3 удалений

Просмотреть файл

@ -138,6 +138,9 @@ static void glxtest()
typedef GLubyte* (* PFNGLGETSTRING) (GLenum);
PFNGLGETSTRING glGetString = cast<PFNGLGETSTRING>(dlsym(libgl, "glGetString"));
typedef void* (* PFNGLXGETPROCADDRESS) (const char *);
PFNGLXGETPROCADDRESS glXGetProcAddress = cast<PFNGLXGETPROCADDRESS>(dlsym(libgl, "glXGetProcAddress"));
if (!glXQueryExtension ||
!glXChooseFBConfig ||
!glXGetVisualFromFBConfig ||
@ -146,7 +149,8 @@ static void glxtest()
!glXMakeCurrent ||
!glXDestroyPixmap ||
!glXDestroyContext ||
!glGetString)
!glGetString ||
!glXGetProcAddress)
{
fatal_error("Unable to find required symbols in libGL.so.1");
}
@ -183,6 +187,9 @@ static void glxtest()
GLXContext context = glXCreateNewContext(dpy, fbConfigs[0], GLX_RGBA_TYPE, NULL, True);
glXMakeCurrent(dpy, glxpixmap, context);
///// Look for this symbol to determine texture_from_pixmap support /////
void* glXBindTexImageEXT = glXGetProcAddress("glXBindTexImageEXT");
///// Get GL vendor/renderer/versions strings /////
enum { bufsize = 1024 };
char buf[bufsize];
@ -194,10 +201,11 @@ static void glxtest()
fatal_error("glGetString returned null");
int length = snprintf(buf, bufsize,
"VENDOR\n%s\nRENDERER\n%s\nVERSION\n%s\n",
"VENDOR\n%s\nRENDERER\n%s\nVERSION\n%s\nTFP\n%s\n",
vendorString,
rendererString,
versionString);
versionString,
glXBindTexImageEXT ? "TRUE" : "FALSE");
if (length >= bufsize)
fatal_error("GL strings length too large for buffer size");

Просмотреть файл

@ -67,6 +67,7 @@ GfxInfo::Init()
mIsMesa = false;
mIsNVIDIA = false;
mIsFGLRX = false;
mHasTextureFromPixmap = false;
return GfxInfoBase::Init();
}
@ -119,6 +120,7 @@ GfxInfo::GetData()
bool error = waiting_for_glxtest_process_failed || exited_with_error_code || received_signal;
nsCString textureFromPixmap;
nsCString *stringToFill = nsnull;
char *bufptr = buf;
if (!error) {
@ -136,9 +138,14 @@ GfxInfo::GetData()
stringToFill = &mRenderer;
else if(!strcmp(line, "VERSION"))
stringToFill = &mVersion;
else if(!strcmp(line, "TFP"))
stringToFill = &textureFromPixmap;
}
}
if (!strcmp(textureFromPixmap.get(), "TRUE"))
mHasTextureFromPixmap = true;
const char *spoofedVendor = PR_GetEnv("MOZ_GFX_SPOOF_GL_VENDOR");
if (spoofedVendor)
mVendor.Assign(spoofedVendor);
@ -181,6 +188,8 @@ GfxInfo::GetData()
note.Append(mAdapterDescription);
note.Append(" -- ");
note.Append(mVersion);
if (mHasTextureFromPixmap)
note.Append(" -- texture_from_pixmap");
note.Append("\n");
#ifdef MOZ_CRASHREPORTER
CrashReporter::AppendAppNotesToCrashReport(note);
@ -249,6 +258,13 @@ GfxInfo::GetFeatureStatusImpl(PRInt32 aFeature, PRInt32 *aStatus, nsAString & aS
return NS_OK;
#endif
// Disable OpenGL layers when we don't have texture_from_pixmap because it regresses performance.
if (aFeature == nsIGfxInfo::FEATURE_OPENGL_LAYERS && !mHasTextureFromPixmap) {
*aStatus = nsIGfxInfo::FEATURE_BLOCKED_DRIVER_VERSION;
aSuggestedDriverVersion.AssignLiteral("<Anything with EXT_texture_from_pixmap support>");
return NS_OK;
}
// whitelist the linux test slaves' current configuration.
// this is necessary as they're still using the slightly outdated 190.42 driver.
// this isn't a huge risk, as at least this is the exact setting in which we do continuous testing,

Просмотреть файл

@ -79,6 +79,7 @@ private:
nsCString mVersion;
nsCString mAdapterDescription;
bool mIsMesa, mIsNVIDIA, mIsFGLRX;
bool mHasTextureFromPixmap;
int mMajorVersion, mMinorVersion, mRevisionVersion;
void AddCrashReportAnnotations();