Bug 1081125 - WebGL2 3D textures - Part 0: stop using plain ints for jsArrayTypes - r=jgilbert

This commit is contained in:
Benoit Jacob 2014-10-13 19:42:11 -04:00
Родитель 59ce535b8f
Коммит 8bd31a6f65
3 изменённых файлов: 35 добавлений и 21 удалений

Просмотреть файл

@ -520,7 +520,7 @@ public:
return TexImage2D_base(texImageTarget, level, internalformat,
size.width, size.height, data->Stride(),
0, format, type, data->GetData(), byteLength,
-1, srcFormat, mPixelStorePremultiplyAlpha);
js::Scalar::TypeMax, srcFormat, mPixelStorePremultiplyAlpha);
}
void TexParameterf(GLenum target, GLenum pname, GLfloat param) {
@ -552,8 +552,11 @@ public:
if (IsContextLost())
return;
if (!ValidateTexImageTarget(2, rawTexImageTarget, WebGLTexImageFunc::TexSubImage))
if (!ValidateTexImageTarget(2, rawTexImageTarget,
WebGLTexImageFunc::TexSubImage))
{
return ErrorInvalidEnumInfo("texSubImage2D: target", rawTexImageTarget);
}
const TexImageTarget texImageTarget(rawTexImageTarget);
@ -592,7 +595,7 @@ public:
size.width, size.height,
data->Stride(), format, type,
data->GetData(), byteLength,
-1, srcFormat, mPixelStorePremultiplyAlpha);
js::Scalar::TypeMax, srcFormat, mPixelStorePremultiplyAlpha);
}
@ -1093,7 +1096,9 @@ protected:
bool ValidateComparisonEnum(GLenum target, const char *info);
bool ValidateStencilOpEnum(GLenum action, const char *info);
bool ValidateFaceEnum(GLenum face, const char *info);
bool ValidateTexInputData(GLenum type, int jsArrayType, WebGLTexImageFunc func);
bool ValidateTexInputData(GLenum type,
js::Scalar::Type jsArrayType,
WebGLTexImageFunc func);
bool ValidateDrawModeEnum(GLenum mode, const char *info);
bool ValidateAttribIndex(GLuint index, const char *info);
bool ValidateStencilParamsForDrawCall();
@ -1144,6 +1149,7 @@ protected:
void MakeContextCurrent() const;
// helpers
void TexImage2D_base(TexImageTarget target,
GLint level,
GLenum internalformat,
@ -1151,7 +1157,7 @@ protected:
GLenum format,
GLenum type,
void *data, uint32_t byteLength,
int jsArrayType,
js::Scalar::Type jsArrayType, // special value TypeMax used to mean no array
WebGLTexelFormat srcFormat, bool srcPremultiplied);
void TexSubImage2D_base(TexImageTarget target, GLint level,
GLint xoffset, GLint yoffset,
@ -1159,7 +1165,7 @@ protected:
GLenum format,
GLenum type,
void *pixels, uint32_t byteLength,
int jsArrayType,
js::Scalar::Type jsArrayType, // special value TypeMax used to mean no array
WebGLTexelFormat srcFormat, bool srcPremultiplied);
void TexParameter_base(GLenum target, GLenum pname,
GLint *intParamPtr, GLfloat *floatParamPtr);

Просмотреть файл

@ -3648,7 +3648,7 @@ WebGLContext::TexImage2D_base(TexImageTarget texImageTarget, GLint level,
GLenum format,
GLenum type,
void* data, uint32_t byteLength,
int jsArrayType, // a TypedArray format enum, or -1 if not relevant
js::Scalar::Type jsArrayType,
WebGLTexelFormat srcFormat, bool srcPremultiplied)
{
const WebGLTexImageFunc func = WebGLTexImageFunc::TexImage;
@ -3802,18 +3802,18 @@ WebGLContext::TexImage2D(GLenum rawTarget, GLint level,
void* data;
uint32_t length;
int jsArrayType;
js::Scalar::Type jsArrayType;
if (pixels.IsNull()) {
data = nullptr;
length = 0;
jsArrayType = -1;
jsArrayType = js::Scalar::TypeMax;
} else {
const ArrayBufferView& view = pixels.Value();
view.ComputeLengthAndData();
data = view.Data();
length = view.Length();
jsArrayType = int(JS_GetArrayBufferViewType(view.Obj()));
jsArrayType = JS_GetArrayBufferViewType(view.Obj());
}
if (!ValidateTexImageTarget(2, rawTarget, WebGLTexImageFunc::TexImage))
@ -3850,7 +3850,7 @@ WebGLContext::TexImage2D(GLenum rawTarget, GLint level,
return TexImage2D_base(rawTarget, level, internalformat, pixels->Width(),
pixels->Height(), 4*pixels->Width(), 0,
format, type, pixelData, pixelDataLength, -1,
format, type, pixelData, pixelDataLength, js::Scalar::TypeMax,
WebGLTexelFormat::RGBA8, false);
}
@ -3861,7 +3861,7 @@ WebGLContext::TexSubImage2D_base(TexImageTarget texImageTarget, GLint level,
GLsizei width, GLsizei height, GLsizei srcStrideOrZero,
GLenum format, GLenum type,
void* data, uint32_t byteLength,
int jsArrayType,
js::Scalar::Type jsArrayType,
WebGLTexelFormat srcFormat, bool srcPremultiplied)
{
const WebGLTexImageFunc func = WebGLTexImageFunc::TexSubImage;
@ -4027,7 +4027,7 @@ WebGLContext::TexSubImage2D(GLenum target, GLint level,
pixels->Width(), pixels->Height(),
4*pixels->Width(), format, type,
arr.Data(), arr.Length(),
-1,
js::Scalar::TypeMax,
WebGLTexelFormat::RGBA8, false);
}

Просмотреть файл

@ -1009,19 +1009,27 @@ WebGLContext::ValidateCopyTexImageInternalFormat(GLenum format,
* It is assumed that type has previously been validated.
*/
bool
WebGLContext::ValidateTexInputData(GLenum type, int jsArrayType, WebGLTexImageFunc func)
WebGLContext::ValidateTexInputData(GLenum type,
js::Scalar::Type jsArrayType,
WebGLTexImageFunc func)
{
bool validInput = false;
const char invalidTypedArray[] = "%s: invalid typed array type for given texture data type";
// We're using js::Scalar::TypeMax as dummy value when the tex source wasn't a
// typed array.
if (jsArrayType == js::Scalar::TypeMax) {
return true;
}
// First, we check for packed types
switch (type) {
case LOCAL_GL_UNSIGNED_BYTE:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Uint8);
validInput = jsArrayType == js::Scalar::Uint8;
break;
case LOCAL_GL_BYTE:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Int8);
validInput = jsArrayType == js::Scalar::Int8;
break;
case LOCAL_GL_HALF_FLOAT:
@ -1029,11 +1037,11 @@ WebGLContext::ValidateTexInputData(GLenum type, int jsArrayType, WebGLTexImageFu
case LOCAL_GL_UNSIGNED_SHORT_4_4_4_4:
case LOCAL_GL_UNSIGNED_SHORT_5_5_5_1:
case LOCAL_GL_UNSIGNED_SHORT_5_6_5:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Uint16);
validInput = jsArrayType == js::Scalar::Uint16;
break;
case LOCAL_GL_SHORT:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Int16);
validInput = jsArrayType == js::Scalar::Int16;
break;
case LOCAL_GL_UNSIGNED_INT:
@ -1041,15 +1049,15 @@ WebGLContext::ValidateTexInputData(GLenum type, int jsArrayType, WebGLTexImageFu
case LOCAL_GL_UNSIGNED_INT_2_10_10_10_REV:
case LOCAL_GL_UNSIGNED_INT_10F_11F_11F_REV:
case LOCAL_GL_UNSIGNED_INT_5_9_9_9_REV:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Uint32);
validInput = jsArrayType == js::Scalar::Uint32;
break;
case LOCAL_GL_INT:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Int32);
validInput = jsArrayType == js::Scalar::Int32;
break;
case LOCAL_GL_FLOAT:
validInput = (jsArrayType == -1 || jsArrayType == js::Scalar::Float32);
validInput = jsArrayType == js::Scalar::Float32;
break;
default: