Merge mozilla-central to autoland. a=merge CLOSED TREE

This commit is contained in:
Tiberius Oros 2018-10-08 19:21:26 +03:00
Родитель 3764388b3e 0f1b35f329
Коммит 2f11f4a742
24 изменённых файлов: 304 добавлений и 289 удалений

Просмотреть файл

@ -176,11 +176,8 @@ async function getImageSizeFromClipboard() {
// Due to the differences in how images could be stored in the clipboard the
// checks below are needed. The clipboard could already provide the image as
// byte streams, but also as pointer, or as image container. If it's not
// possible obtain a byte stream, the function returns `null`.
if (image instanceof Ci.nsISupportsInterfacePointer) {
image = image.data;
}
// byte streams or as image container. If it's not possible obtain a
// byte stream, the function throws.
if (image instanceof Ci.imgIContainer) {
image = Cc["@mozilla.org/image/tools;1"]

Просмотреть файл

@ -183,15 +183,13 @@ function saveToClipboard(base64URI) {
const base64Data = base64URI.replace("data:image/png;base64,", "");
const image = atob(base64Data);
const imgPtr = Cc["@mozilla.org/supports-interface-pointer;1"]
.createInstance(Ci.nsISupportsInterfacePointer);
imgPtr.data = imageTools.decodeImageFromBuffer(image, image.length, "image/png");
const img = imageTools.decodeImageFromBuffer(image, image.length, "image/png");
const transferable = Cc["@mozilla.org/widget/transferable;1"]
.createInstance(Ci.nsITransferable);
transferable.init(null);
transferable.addDataFlavor("image/png");
transferable.setTransferData("image/png", imgPtr, -1);
transferable.setTransferData("image/png", img, -1);
Services.clipboard.setData(transferable, null, Services.clipboard.kGlobalClipboard);
return L10N.getStr("screenshotCopied");

Просмотреть файл

@ -7753,14 +7753,7 @@ nsContentUtils::IPCTransferableToTransferable(const IPCDataTransfer& aDataTransf
getter_AddRefs(imageContainer));
NS_ENSURE_SUCCESS(rv, rv);
nsCOMPtr<nsISupportsInterfacePointer> imgPtr =
do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID);
NS_ENSURE_TRUE(imgPtr, NS_ERROR_FAILURE);
rv = imgPtr->SetData(imageContainer);
NS_ENSURE_SUCCESS(rv, rv);
aTransferable->SetTransferData(item.flavor().get(), imgPtr, sizeof(nsISupports*));
aTransferable->SetTransferData(item.flavor().get(), imageContainer, sizeof(nsISupports*));
} else {
nsCOMPtr<nsISupportsCString> dataWrapper =
do_CreateInstance(NS_SUPPORTS_CSTRING_CONTRACTID, &rv);
@ -8015,12 +8008,6 @@ nsContentUtils::TransferableToIPCTransferable(nsITransferable* aTransferable,
item->data() = dataAsShmem;
} else {
nsCOMPtr<nsISupportsInterfacePointer> sip =
do_QueryInterface(data);
if (sip) {
sip->GetData(getter_AddRefs(data));
}
// Images to be pasted on the clipboard are nsIInputStreams
nsCOMPtr<nsIInputStream> stream(do_QueryInterface(data));
if (stream) {

Просмотреть файл

@ -421,15 +421,8 @@ nsCopySupport::ImageCopy(nsIImageLoadingContent* aImageElement,
NS_ENSURE_SUCCESS(rv, rv);
#endif
nsCOMPtr<nsISupportsInterfacePointer>
imgPtr(do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID, &rv));
NS_ENSURE_SUCCESS(rv, rv);
rv = imgPtr->SetData(image);
NS_ENSURE_SUCCESS(rv, rv);
// copy the image data onto the transferable
rv = trans->SetTransferData(kNativeImageMime, imgPtr,
rv = trans->SetTransferData(kNativeImageMime, image,
sizeof(nsISupports*));
NS_ENSURE_SUCCESS(rv, rv);
}

Просмотреть файл

@ -1161,19 +1161,9 @@ DataTransfer::ConvertFromVariant(nsIVariant* aVariant,
// value 0) as the length.
fdp.forget(aSupports);
*aLength = nsITransferable::kFlavorHasDataProvider;
}
else {
// wrap the item in an nsISupportsInterfacePointer
nsCOMPtr<nsISupportsInterfacePointer> ptrSupports =
do_CreateInstance(NS_SUPPORTS_INTERFACE_POINTER_CONTRACTID);
if (!ptrSupports) {
return false;
}
ptrSupports->SetData(data);
ptrSupports.forget(aSupports);
*aLength = sizeof(nsISupportsInterfacePointer *);
} else {
data.forget(aSupports);
*aLength = sizeof(nsISupports *);
}
return true;

Просмотреть файл

@ -323,14 +323,18 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
#if XP_WIN
// We disable this code path on Windows version earlier of Windows 8 due to
// intermittent crashes with old drivers. See bug 1405110.
if (IsWin8OrLater() && !XRE_IsParentProcess() &&
aAllocator && aAllocator->SupportsD3D11()) {
// D3D11YCbCrImage can only handle YCbCr images using 3 non-interleaved planes
// non-zero mSkip value indicates that one of the plane would be interleaved.
if (IsWin8OrLater() && !XRE_IsParentProcess() && aAllocator &&
aAllocator->SupportsD3D11() && aBuffer.mPlanes[0].mSkip == 0 &&
aBuffer.mPlanes[1].mSkip == 0 && aBuffer.mPlanes[2].mSkip == 0) {
RefPtr<layers::D3D11YCbCrImage> d3d11Image = new layers::D3D11YCbCrImage();
PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
if (d3d11Image->SetData(layers::ImageBridgeChild::GetSingleton()
? layers::ImageBridgeChild::GetSingleton().get()
: aAllocator,
aContainer, data)) {
? layers::ImageBridgeChild::GetSingleton().get()
: aAllocator,
aContainer,
data)) {
v->mImage = d3d11Image;
return v.forget();
}

Просмотреть файл

@ -620,10 +620,13 @@ public:
const gfx::IntRect& aRegion,
Image** aOutImage) override;
virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture,
ID3D11Texture2D** aOutTexture);
HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture,
const GUID& aSubType,
ID3D11Texture2D** aOutTexture) override;
HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight) override;
HRESULT ConfigureForSize(IMFMediaType* aInputType,
uint32_t aWidth,
uint32_t aHeight) override;
bool IsD3D11() override { return true; }
@ -652,7 +655,8 @@ private:
uint32_t mWidth = 0;
uint32_t mHeight = 0;
UINT mDeviceManagerToken = 0;
bool mConfiguredForSize = false;
RefPtr<IMFMediaType> mInputType;
GUID mInputSubType;
};
bool
@ -919,18 +923,37 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
NS_ENSURE_TRUE(aOutImage, E_POINTER);
MOZ_ASSERT(mTextureClientAllocator);
RefPtr<D3D11ShareHandleImage> image =
new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion);
RefPtr<D3D11ShareHandleImage> image = new D3D11ShareHandleImage(
gfx::IntSize(mWidth, mHeight), aRegion, mInputSubType);
// Retrieve the DXGI_FORMAT for the current video sample.
RefPtr<IMFMediaBuffer> buffer;
HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFDXGIBuffer> dxgiBuf;
hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<ID3D11Texture2D> tex;
hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
D3D11_TEXTURE2D_DESC inDesc;
tex->GetDesc(&inDesc);
bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice);
NS_ENSURE_TRUE(ok, E_FAIL);
RefPtr<TextureClient> client = image->GetTextureClient(ImageBridgeChild::GetSingleton().get());
RefPtr<TextureClient> client =
image->GetTextureClient(ImageBridgeChild::GetSingleton().get());
NS_ENSURE_TRUE(client, E_FAIL);
RefPtr<IDXGIKeyedMutex> mutex;
HRESULT hr = S_OK;
RefPtr<ID3D11Texture2D> texture = image->GetTexture();
D3D11_TEXTURE2D_DESC outDesc;
texture->GetDesc(&outDesc);
RefPtr<IDXGIKeyedMutex> mutex;
texture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
{
@ -943,28 +966,15 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
NS_ENSURE_TRUE(mSyncObject, E_FAIL);
}
if (client && client->GetFormat() == SurfaceFormat::NV12) {
if (outDesc.Format == inDesc.Format) {
// Our video frame is stored in a non-sharable ID3D11Texture2D. We need
// to create a copy of that frame as a sharable resource, save its share
// handle, and put that handle into the rendering pipeline.
RefPtr<IMFMediaBuffer> buffer;
hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFDXGIBuffer> dxgiBuf;
hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<ID3D11Texture2D> tex;
hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
UINT index;
dxgiBuf->GetSubresourceIndex(&index);
mContext->CopySubresourceRegion(texture, 0, 0, 0, 0, tex, index, nullptr);
} else {
// Our video sample is in NV12 format but our output texture is in BGRA.
// Use MFT to do color conversion.
hr = E_FAIL;
mozilla::mscom::EnsureMTA(
@ -997,7 +1007,8 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
}
HRESULT
D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D *aInTexture,
D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture,
const GUID& aSubType,
ID3D11Texture2D** aOutTexture)
{
NS_ENSURE_TRUE(aInTexture, E_POINTER);
@ -1010,8 +1021,28 @@ D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D *aInTexture,
CD3D11_TEXTURE2D_DESC desc;
aInTexture->GetDesc(&desc);
hr = ConfigureForSize(desc.Width, desc.Height);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (!mInputType || desc.Width != mWidth || desc.Height != mHeight) {
RefPtr<IMFMediaType> inputType;
hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_SUBTYPE, aSubType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr =
inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = ConfigureForSize(inputType, desc.Width, desc.Height);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
}
RefPtr<IDXGIKeyedMutex> mutex;
inTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
@ -1073,47 +1104,28 @@ D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D *aInTexture,
return S_OK;
}
HRESULT ConfigureOutput(IMFMediaType* aOutput, void* aData)
{
HRESULT hr =
aOutput->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
gfx::IntSize* size = reinterpret_cast<gfx::IntSize*>(aData);
hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, size->width, size->height);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
}
HRESULT
D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType,
uint32_t aWidth,
uint32_t aHeight)
{
if (mConfiguredForSize && aWidth == mWidth && aHeight == mHeight) {
// If the size hasn't changed, don't reconfigure.
GUID subType = { 0 };
HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (subType == mInputSubType && aWidth == mWidth && aHeight == mHeight) {
// If the media type hasn't changed, don't reconfigure.
return S_OK;
}
mWidth = aWidth;
mHeight = aHeight;
// Create a copy of our input type.
RefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = aInputType->CopyAllItems(inputType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFAttributes> attr;
@ -1127,9 +1139,6 @@ D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -1140,15 +1149,27 @@ D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
gfx::IntSize size(mWidth, mHeight);
hr = E_FAIL;
mozilla::mscom::EnsureMTA([&]() -> void {
hr =
mTransform->SetMediaTypes(inputType, outputType, ConfigureOutput, &size);
hr = mTransform->SetMediaTypes(
inputType, outputType, [aWidth, aHeight](IMFMediaType* aOutput) {
HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE,
MFVideoInterlace_Progressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, aWidth, aHeight);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
});
});
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
mConfiguredForSize = true;
mWidth = aWidth;
mHeight = aHeight;
mInputType = inputType;
mInputSubType = subType;
return S_OK;
}

Просмотреть файл

@ -46,7 +46,8 @@ public:
const gfx::IntRect& aRegion,
layers::Image** aOutImage) = 0;
virtual HRESULT CopyToBGRATexture(ID3D11Texture2D *aInTexture,
virtual HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture,
const GUID& aSubType,
ID3D11Texture2D** aOutTexture)
{
// Not implemented!
@ -54,7 +55,9 @@ public:
return E_FAIL;
}
virtual HRESULT ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
virtual HRESULT ConfigureForSize(IMFMediaType* aInputType,
uint32_t aWidth,
uint32_t aHeight)
{
return S_OK;
}

Просмотреть файл

@ -85,17 +85,21 @@ MFTDecoder::Create(HMODULE aDecoderDLL, const GUID& aMFTClsID)
HRESULT
MFTDecoder::SetMediaTypes(IMFMediaType* aInputType,
IMFMediaType* aOutputType,
ConfigureOutputCallback aCallback,
void* aData)
std::function<HRESULT(IMFMediaType*)>&& aCallback)
{
MOZ_ASSERT(mscom::IsCurrentThreadMTA());
mOutputType = aOutputType;
// Set the input type to the one the caller gave us...
HRESULT hr = mDecoder->SetInputType(0, aInputType, 0);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = SetDecoderOutputType(true /* match all attributes */, aCallback, aData);
GUID currentSubtype = {0};
hr = aOutputType->GetGUID(MF_MT_SUBTYPE, &currentSubtype);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = SetDecoderOutputType(currentSubtype,
aOutputType,
std::move(aCallback));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo);
@ -121,16 +125,33 @@ MFTDecoder::GetAttributes()
}
HRESULT
MFTDecoder::SetDecoderOutputType(bool aMatchAllAttributes,
ConfigureOutputCallback aCallback,
void* aData)
MFTDecoder::FindDecoderOutputType()
{
MOZ_ASSERT(mscom::IsCurrentThreadMTA());
MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once");
return FindDecoderOutputTypeWithSubtype(mOutputSubType);
}
HRESULT
MFTDecoder::FindDecoderOutputTypeWithSubtype(const GUID& aSubType)
{
return SetDecoderOutputType(
aSubType, nullptr, [](IMFMediaType*) { return S_OK; });
}
HRESULT
MFTDecoder::SetDecoderOutputType(
const GUID& aSubType,
IMFMediaType* aTypeToUse,
std::function<HRESULT(IMFMediaType*)>&& aCallback)
{
MOZ_ASSERT(mscom::IsCurrentThreadMTA());
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
GUID currentSubtype = {0};
HRESULT hr = mOutputType->GetGUID(MF_MT_SUBTYPE, &currentSubtype);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (!aTypeToUse) {
aTypeToUse = mOutputType;
}
// Iterate the enumerate the output types, until we find one compatible
// with what we need.
@ -139,21 +160,13 @@ MFTDecoder::SetDecoderOutputType(bool aMatchAllAttributes,
while (SUCCEEDED(mDecoder->GetOutputAvailableType(
0, typeIndex++, getter_AddRefs(outputType)))) {
GUID outSubtype = {0};
hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype);
HRESULT hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
BOOL resultMatch = currentSubtype == outSubtype;
if (resultMatch && aMatchAllAttributes) {
hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS,
&resultMatch);
if (aSubType == outSubtype) {
hr = aCallback(outputType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
}
if (resultMatch == TRUE) {
if (aCallback) {
hr = aCallback(outputType, aData);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
}
hr = mDecoder->SetOutputType(0, outputType, 0);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -163,6 +176,9 @@ MFTDecoder::SetDecoderOutputType(bool aMatchAllAttributes,
mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags,
MFT_OUTPUT_STREAM_PROVIDES_SAMPLES);
mOutputType = outputType;
mOutputSubType = outSubtype;
return S_OK;
}
outputType = nullptr;

Просмотреть файл

@ -38,12 +38,10 @@ public:
// - aOutputType needs at least major and minor types set.
// This is used to select the matching output type out
// of all the available output types of the MFT.
typedef HRESULT (*ConfigureOutputCallback)(IMFMediaType* aOutputType,
void* aData);
HRESULT SetMediaTypes(IMFMediaType* aInputType,
IMFMediaType* aOutputType,
ConfigureOutputCallback aCallback = nullptr,
void* aData = nullptr);
std::function<HRESULT(IMFMediaType*)>&& aCallback =
[](IMFMediaType* aOutput) { return S_OK; });
// Returns the MFT's IMFAttributes object.
already_AddRefed<IMFAttributes> GetAttributes();
@ -51,6 +49,7 @@ public:
// Retrieves the media type being output. This may not be valid until
// the first sample is decoded.
HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
const GUID& GetOutputMediaSubType() const { return mOutputSubType; }
// Submits data into the MFT for processing.
//
@ -88,10 +87,15 @@ public:
// Sends a message to the MFT.
HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
HRESULT SetDecoderOutputType(bool aMatchAllAttributes,
ConfigureOutputCallback aCallback,
void* aData);
HRESULT FindDecoderOutputTypeWithSubtype(const GUID& aSubType);
HRESULT FindDecoderOutputType();
private:
// Will search a suitable MediaType using aTypeToUse if set, if not will
// use the current mOutputType.
HRESULT SetDecoderOutputType(
const GUID& aSubType,
IMFMediaType* aTypeToUse,
std::function<HRESULT(IMFMediaType*)>&& aCallback);
HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
MFT_INPUT_STREAM_INFO mInputStreamInfo;
@ -100,6 +104,7 @@ private:
RefPtr<IMFTransform> mDecoder;
RefPtr<IMFMediaType> mOutputType;
GUID mOutputSubType;
// True if the IMFTransform allocates the samples that it returns.
bool mMFTProvidesOutputSamples = false;

Просмотреть файл

@ -237,9 +237,8 @@ WMFAudioMFTManager::Output(int64_t aStreamOffset,
return hr;
}
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
hr = mDecoder->SetDecoderOutputType(true /* check all attribute */,
nullptr,
nullptr);
hr = mDecoder->FindDecoderOutputType();
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = UpdateOutputType();
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// Catch infinite loops, but some decoders perform at least 2 stream

Просмотреть файл

@ -686,8 +686,8 @@ WMFVideoMFTManager::InitInternal()
(mUseHwAccel ? "Yes" : "No"));
if (mUseHwAccel) {
hr = mDXVA2Manager->ConfigureForSize(mVideoInfo.ImageRect().width,
mVideoInfo.ImageRect().height);
hr = mDXVA2Manager->ConfigureForSize(
outputType, mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
NS_ENSURE_TRUE(SUCCEEDED(hr),
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
RESULT_DETAIL("Fail to configure image size for "
@ -900,8 +900,18 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
stride = mVideoStride;
}
// YV12, planar format: [YYYY....][VVVV....][UUUU....]
const GUID& subType = mDecoder->GetOutputMediaSubType();
MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 ||
subType == MFVideoFormat_P010 ||
subType == MFVideoFormat_P016);
const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12
? gfx::ColorDepth::COLOR_8
: gfx::ColorDepth::COLOR_16;
// YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....]
// i.e., Y, then V, then U.
// P010, P016 planar format (2 planes) [YYYY....][UVUV...]
// See https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats
VideoData::YCbCrBuffer b;
uint32_t videoWidth = mImageSize.width;
@ -923,24 +933,43 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
uint32_t halfHeight = (videoHeight + 1) / 2;
uint32_t halfWidth = (videoWidth + 1) / 2;
// U plane (Cb)
b.mPlanes[1].mData = data + y_size + v_size;
b.mPlanes[1].mStride = halfStride;
b.mPlanes[1].mHeight = halfHeight;
b.mPlanes[1].mWidth = halfWidth;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
if (subType == MFVideoFormat_YV12) {
// U plane (Cb)
b.mPlanes[1].mData = data + y_size + v_size;
b.mPlanes[1].mStride = halfStride;
b.mPlanes[1].mHeight = halfHeight;
b.mPlanes[1].mWidth = halfWidth;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 0;
// V plane (Cr)
b.mPlanes[2].mData = data + y_size;
b.mPlanes[2].mStride = halfStride;
b.mPlanes[2].mHeight = halfHeight;
b.mPlanes[2].mWidth = halfWidth;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
// V plane (Cr)
b.mPlanes[2].mData = data + y_size;
b.mPlanes[2].mStride = halfStride;
b.mPlanes[2].mHeight = halfHeight;
b.mPlanes[2].mWidth = halfWidth;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 0;
} else {
// U plane (Cb)
b.mPlanes[1].mData = data + y_size;
b.mPlanes[1].mStride = stride;
b.mPlanes[1].mHeight = halfHeight;
b.mPlanes[1].mWidth = halfWidth;
b.mPlanes[1].mOffset = 0;
b.mPlanes[1].mSkip = 1;
// V plane (Cr)
b.mPlanes[2].mData = data + y_size + sizeof(short);
b.mPlanes[2].mStride = stride;
b.mPlanes[2].mHeight = halfHeight;
b.mPlanes[2].mWidth = halfWidth;
b.mPlanes[2].mOffset = 0;
b.mPlanes[2].mSkip = 1;
}
// YuvColorSpace
b.mYUVColorSpace = mYUVColorSpace;
b.mColorDepth = colorDepth;
TimeUnit pts = GetSampleTime(aSample);
NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
@ -949,7 +978,8 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
gfx::IntRect pictureRegion =
mVideoInfo.ScaledImageRect(videoWidth, videoHeight);
if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor ||
!mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
RefPtr<VideoData> v =
VideoData::CreateAndCopyData(mVideoInfo,
mImageContainer,
@ -1060,21 +1090,37 @@ WMFVideoMFTManager::Output(int64_t aStreamOffset,
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
MOZ_ASSERT(!sample);
// Video stream output type change, probably geometric aperture change.
// Video stream output type change, probably geometric aperture change or
// pixel type.
// We must reconfigure the decoder output type.
hr = mDecoder->SetDecoderOutputType(false /* check all attribute */,
nullptr,
nullptr);
// Attempt to find an appropriate OutputType, trying in order:
// if HW accelerated: NV12, P010, P016
// if SW: YV12, P010, P016
if (FAILED((hr = (mDecoder->FindDecoderOutputTypeWithSubtype(
mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12)))) &&
FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
MFVideoFormat_P010))) &&
FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
MFVideoFormat_P016)))) {
LOG("No suitable output format found");
return hr;
}
RefPtr<IMFMediaType> outputType;
hr = mDecoder->GetOutputMediaType(outputType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
if (!mUseHwAccel) {
// The stride may have changed, recheck for it.
RefPtr<IMFMediaType> outputType;
hr = mDecoder->GetOutputMediaType(outputType);
if (mUseHwAccel) {
hr = mDXVA2Manager->ConfigureForSize(outputType,
mVideoInfo.ImageRect().width,
mVideoInfo.ImageRect().height);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
} else {
// The stride may have changed, recheck for it.
mYUVColorSpace = GetYUVColorSpace(outputType);
hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width,
&mVideoStride);
hr = GetDefaultStride(
outputType, mVideoInfo.ImageRect().width, &mVideoStride);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
UINT32 width = 0, height = 0;

Просмотреть файл

@ -2820,14 +2820,24 @@ ScriptLoader::ConvertToUTF16(nsIChannel* aChannel, const uint8_t* aData,
unicodeDecoder = WINDOWS_1252_ENCODING->NewDecoderWithoutBOMHandling();
}
CheckedInt<size_t> unicodeLength =
unicodeDecoder->MaxUTF16BufferLength(aLength);
if (!unicodeLength.isValid()) {
CheckedInt<size_t> maxLength = unicodeDecoder->MaxUTF16BufferLength(aLength);
if (!maxLength.isValid()) {
aBufOut = nullptr;
aLengthOut = 0;
return NS_ERROR_OUT_OF_MEMORY;
}
aBufOut =
static_cast<char16_t*>(js_malloc(unicodeLength.value() * sizeof(char16_t)));
size_t unicodeLength = maxLength.value();
maxLength *= sizeof(char16_t);
if (!maxLength.isValid()) {
aBufOut = nullptr;
aLengthOut = 0;
return NS_ERROR_OUT_OF_MEMORY;
}
aBufOut = static_cast<char16_t*>(js_malloc(maxLength.value()));
if (!aBufOut) {
aLengthOut = 0;
return NS_ERROR_OUT_OF_MEMORY;
@ -2837,11 +2847,11 @@ ScriptLoader::ConvertToUTF16(nsIChannel* aChannel, const uint8_t* aData,
size_t read;
size_t written;
bool hadErrors;
Tie(result, read, written, hadErrors) = unicodeDecoder->DecodeToUTF16(
data, MakeSpan(aBufOut, unicodeLength.value()), true);
Tie(result, read, written, hadErrors) =
unicodeDecoder->DecodeToUTF16(data, MakeSpan(aBufOut, unicodeLength), true);
MOZ_ASSERT(result == kInputEmpty);
MOZ_ASSERT(read == aLength);
MOZ_ASSERT(written <= unicodeLength.value());
MOZ_ASSERT(written <= unicodeLength);
Unused << hadErrors;
aLengthOut = written;

Просмотреть файл

@ -118,6 +118,7 @@ SurfaceFormatForColorDepth(ColorDepth aColorDepth)
break;
case ColorDepth::COLOR_10:
case ColorDepth::COLOR_12:
case ColorDepth::COLOR_16:
format = SurfaceFormat::A16;
break;
case ColorDepth::UNKNOWN:
@ -139,6 +140,9 @@ BitDepthForColorDepth(ColorDepth aColorDepth)
case ColorDepth::COLOR_12:
depth = 12;
break;
case ColorDepth::COLOR_16:
depth = 16;
break;
case ColorDepth::UNKNOWN:
MOZ_ASSERT_UNREACHABLE("invalid color depth value");
}
@ -158,6 +162,9 @@ ColorDepthForBitDepth(uint8_t aBitDepth)
case 12:
depth = ColorDepth::COLOR_12;
break;
case 16:
depth = ColorDepth::COLOR_16;
break;
default:
MOZ_ASSERT_UNREACHABLE("invalid color depth value");
}
@ -179,6 +186,8 @@ RescalingFactorForColorDepth(ColorDepth aColorDepth)
case ColorDepth::COLOR_12:
factor = 16;
break;
case ColorDepth::COLOR_16:
break;
case ColorDepth::UNKNOWN:
MOZ_ASSERT_UNREACHABLE("invalid color depth value");
}

Просмотреть файл

@ -106,6 +106,7 @@ enum class ColorDepth : uint8_t {
COLOR_8,
COLOR_10,
COLOR_12,
COLOR_16,
UNKNOWN
};

Просмотреть файл

@ -24,22 +24,29 @@ namespace layers {
using namespace gfx;
D3D11ShareHandleImage::D3D11ShareHandleImage(const gfx::IntSize& aSize,
const gfx::IntRect& aRect)
: Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE),
mSize(aSize),
mPictureRect(aRect)
const gfx::IntRect& aRect,
const GUID& aSourceFormat)
: Image(nullptr, ImageFormat::D3D11_SHARE_HANDLE_TEXTURE)
, mSize(aSize)
, mPictureRect(aRect)
, mSourceFormat(aSourceFormat)
{
}
bool
D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice)
D3D11ShareHandleImage::AllocateTexture(D3D11RecycleAllocator* aAllocator,
ID3D11Device* aDevice)
{
if (aAllocator) {
if (gfxPrefs::PDMWMFUseNV12Format() &&
if (mSourceFormat == MFVideoFormat_NV12 &&
gfxPrefs::PDMWMFUseNV12Format() &&
gfx::DeviceManagerDx::Get()->CanUseNV12()) {
mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize);
mTextureClient =
aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::NV12, mSize);
} else {
mTextureClient = aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize);
mTextureClient =
aAllocator->CreateOrRecycleClient(gfx::SurfaceFormat::B8G8R8A8, mSize);
}
if (mTextureClient) {
mTexture = static_cast<D3D11TextureData*>(mTextureClient->GetInternalData())->GetD3D11Texture();
@ -87,7 +94,7 @@ D3D11ShareHandleImage::GetAsSourceSurface()
HRESULT hr;
if (desc.Format == DXGI_FORMAT_NV12) {
if (desc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) {
nsAutoCString error;
std::unique_ptr<DXVA2Manager> manager(DXVA2Manager::CreateD3D11DXVA(nullptr, error, device));
@ -98,10 +105,11 @@ D3D11ShareHandleImage::GetAsSourceSurface()
RefPtr<ID3D11Texture2D> outTexture;
hr = manager->CopyToBGRATexture(texture, getter_AddRefs(outTexture));
hr = manager->CopyToBGRATexture(
texture, mSourceFormat, getter_AddRefs(outTexture));
if (FAILED(hr)) {
gfxWarning() << "Failed to copy NV12 to BGRA texture.";
gfxWarning() << "Failed to copy to BGRA texture.";
return nullptr;
}

Просмотреть файл

@ -52,10 +52,12 @@ protected:
class D3D11ShareHandleImage final : public Image {
public:
D3D11ShareHandleImage(const gfx::IntSize& aSize,
const gfx::IntRect& aRect);
const gfx::IntRect& aRect,
const GUID& aSourceFormat);
virtual ~D3D11ShareHandleImage() {}
bool AllocateTexture(D3D11RecycleAllocator* aAllocator, ID3D11Device* aDevice);
bool AllocateTexture(D3D11RecycleAllocator* aAllocator,
ID3D11Device* aDevice);
gfx::IntSize GetSize() const override;
already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
@ -67,6 +69,7 @@ public:
private:
gfx::IntSize mSize;
gfx::IntRect mPictureRect;
const GUID mSourceFormat;
RefPtr<TextureClient> mTextureClient;
RefPtr<ID3D11Texture2D> mTexture;
};

Просмотреть файл

@ -1895,13 +1895,6 @@ MappedYCbCrChannelData::CopyInto(MappedYCbCrChannelData& aDst)
if (bytesPerPixel == 1) {
copyData(aDst.data, aDst, data, *this);
} else if (bytesPerPixel == 2) {
if (skip != 0) {
// The skip value definition doesn't specify if it's in bytes, or in
// "pixels". We will assume the later. There are currently no decoders
// returning HDR content with a skip value different than zero anyway.
NS_WARNING("skip value non zero for HDR content, please verify code "
"(see bug 1421187)");
}
copyData(reinterpret_cast<uint16_t*>(aDst.data),
aDst,
reinterpret_cast<uint16_t*>(data),

Просмотреть файл

@ -864,6 +864,8 @@ static inline wr::WrColorDepth ToWrColorDepth(gfx::ColorDepth aColorDepth) {
return wr::WrColorDepth::Color10;
case gfx::ColorDepth::COLOR_12:
return wr::WrColorDepth::Color12;
case gfx::ColorDepth::COLOR_16:
return wr::WrColorDepth::Color16;
default:
MOZ_ASSERT_UNREACHABLE("Tried to convert invalid color depth value.");
}

Просмотреть файл

@ -5,8 +5,6 @@
XPCOMUtils.defineLazyServiceGetter(this, "imgTools",
"@mozilla.org/image/tools;1", "imgITools");
const SupportsInterfacePointer = Components.Constructor(
"@mozilla.org/supports-interface-pointer;1", "nsISupportsInterfacePointer");
const Transferable = Components.Constructor(
"@mozilla.org/widget/transferable;1", "nsITransferable");
@ -19,9 +17,9 @@ this.clipboard = class extends ExtensionAPI {
return Promise.reject({message: "Writing images to the clipboard is not supported on Android"});
}
let mimeType = `image/${imageType}`;
let container;
let img;
try {
container = imgTools.decodeImageFromArrayBuffer(imageData, mimeType);
img = imgTools.decodeImageFromArrayBuffer(imageData, mimeType);
} catch (e) {
return Promise.reject({message: `Data is not a valid ${imageType} image`});
}
@ -34,13 +32,10 @@ this.clipboard = class extends ExtensionAPI {
//
// The common protocol for exporting a nsITransferable as an image is:
// - Use nsITransferable::GetTransferData to fetch the stored data.
// - QI a nsISupportsInterfacePointer and get the underlying pointer.
// - QI imgIContainer on the pointer.
// - Convert the image to the native clipboard format.
//
// Below we create a nsITransferable in the above format.
let imgPtr = new SupportsInterfacePointer();
imgPtr.data = container;
let transferable = new Transferable();
transferable.init(null);
transferable.addDataFlavor(mimeType);
@ -53,7 +48,7 @@ this.clipboard = class extends ExtensionAPI {
// On macOS, nsClipboard::GetNativeClipboardData (nsClipboard.mm) uses
// a cached copy of nsITransferable if available, e.g. when the copy
// was initiated by the same browser instance. Consequently, the
// transferable still holds a nsISupportsInterfacePointer pointer
// transferable still holds a imgIContainer pointer
// instead of a nsIInputStream, and logic that assumes the data to be
// a nsIInputStream instance fails.
// For example HTMLEditor::InsertObject (HTMLEditorDataTransfer.cpp)
@ -68,7 +63,7 @@ this.clipboard = class extends ExtensionAPI {
//
// Note that the length itself is not really used if the data is not
// a string type, so the actual value does not matter.
transferable.setTransferData(mimeType, imgPtr, 0);
transferable.setTransferData(mimeType, img, 0);
Services.clipboard.setData(
transferable, null, Services.clipboard.kGlobalClipboard);

Просмотреть файл

@ -587,14 +587,8 @@ nsClipboard::PasteboardDictFromTransferable(nsITransferable* aTransferable)
uint32_t dataSize = 0;
nsCOMPtr<nsISupports> transferSupports;
aTransferable->GetTransferData(flavorStr.get(), getter_AddRefs(transferSupports), &dataSize);
nsCOMPtr<nsISupportsInterfacePointer> ptrPrimitive(do_QueryInterface(transferSupports));
if (!ptrPrimitive)
continue;
nsCOMPtr<nsISupports> primitiveData;
ptrPrimitive->GetData(getter_AddRefs(primitiveData));
nsCOMPtr<imgIContainer> image(do_QueryInterface(primitiveData));
nsCOMPtr<imgIContainer> image(do_QueryInterface(transferSupports));
if (!image) {
NS_WARNING("Image isn't an imgIContainer in transferable");
continue;
@ -647,15 +641,6 @@ nsClipboard::PasteboardDictFromTransferable(nsITransferable* aTransferable)
}
nsCOMPtr<nsIFile> file(do_QueryInterface(genericFile));
if (!file) {
nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericFile));
if (ptr) {
ptr->GetData(getter_AddRefs(genericFile));
file = do_QueryInterface(genericFile);
}
}
if (!file) {
continue;
}

Просмотреть файл

@ -536,19 +536,18 @@ nsClipboard::SelectionGetEvent(GtkClipboard *aClipboard,
static const char* const imageMimeTypes[] = {
kNativeImageMime, kPNGImageMime, kJPEGImageMime, kJPGImageMime, kGIFImageMime };
nsCOMPtr<nsISupports> imageItem;
nsCOMPtr<nsISupportsInterfacePointer> ptrPrimitive;
for (uint32_t i = 0; !ptrPrimitive && i < ArrayLength(imageMimeTypes); i++) {
nsCOMPtr<imgIContainer> image;
for (uint32_t i = 0; i < ArrayLength(imageMimeTypes); i++) {
rv = trans->GetTransferData(imageMimeTypes[i], getter_AddRefs(imageItem), &len);
ptrPrimitive = do_QueryInterface(imageItem);
image = do_QueryInterface(imageItem);
if (image) {
break;
}
}
if (!ptrPrimitive)
return;
nsCOMPtr<nsISupports> primitiveData;
ptrPrimitive->GetData(getter_AddRefs(primitiveData));
nsCOMPtr<imgIContainer> image(do_QueryInterface(primitiveData));
if (!image) // Not getting an image for an image mime type!?
if (!image) { // Not getting an image for an image mime type!?
return;
}
GdkPixbuf* pixbuf = nsImageToPixbuf::ImageToPixbuf(image);
if (!pixbuf)

Просмотреть файл

@ -1591,20 +1591,7 @@ CreateUriList(nsIArray *items, gchar **text, gint *length)
getter_AddRefs(data),
&tmpDataLen);
if (NS_SUCCEEDED(rv)) {
nsCOMPtr<nsIFile> file = do_QueryInterface(data);
if (!file) {
// Sometimes the file is wrapped in a
// nsISupportsInterfacePointer. See bug 1310193 for
// removing this distinction.
nsCOMPtr<nsISupportsInterfacePointer> ptr =
do_QueryInterface(data);
if (ptr) {
ptr->GetData(getter_AddRefs(data));
file = do_QueryInterface(data);
}
}
if (file) {
if (nsCOMPtr<nsIFile> file = do_QueryInterface(data)) {
nsCOMPtr<nsIURI> fileURI;
NS_NewFileURI(getter_AddRefs(fileURI), file);
if (fileURI) {

Просмотреть файл

@ -901,18 +901,6 @@ nsDataObj::GetDib(const nsACString& inFlavor,
nsCOMPtr<nsISupports> genericDataWrapper;
mTransferable->GetTransferData(PromiseFlatCString(inFlavor).get(), getter_AddRefs(genericDataWrapper), &len);
nsCOMPtr<imgIContainer> image ( do_QueryInterface(genericDataWrapper) );
if ( !image ) {
// Check if the image was put in an nsISupportsInterfacePointer wrapper.
// This might not be necessary any more, but could be useful for backwards
// compatibility.
nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
if ( ptr ) {
nsCOMPtr<nsISupports> supports;
ptr->GetData(getter_AddRefs(supports));
image = do_QueryInterface(supports);
}
}
if ( image ) {
// use the |nsImageToClipboard| helper class to build up a bitmap. We now own
// the bits, and pass them back to the OS in |aSTG|.
@ -1466,17 +1454,6 @@ HRESULT nsDataObj::DropFile(FORMATETC& aFE, STGMEDIUM& aSTG)
mTransferable->GetTransferData(kFileMime, getter_AddRefs(genericDataWrapper),
&len);
nsCOMPtr<nsIFile> file ( do_QueryInterface(genericDataWrapper) );
if (!file)
{
nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
if (ptr) {
nsCOMPtr<nsISupports> supports;
ptr->GetData(getter_AddRefs(supports));
file = do_QueryInterface(supports);
}
}
if (!file)
return E_FAIL;
@ -1531,19 +1508,6 @@ HRESULT nsDataObj::DropImage(FORMATETC& aFE, STGMEDIUM& aSTG)
mTransferable->GetTransferData(kNativeImageMime, getter_AddRefs(genericDataWrapper), &len);
nsCOMPtr<imgIContainer> image(do_QueryInterface(genericDataWrapper));
if (!image) {
// Check if the image was put in an nsISupportsInterfacePointer wrapper.
// This might not be necessary any more, but could be useful for backwards
// compatibility.
nsCOMPtr<nsISupportsInterfacePointer> ptr(do_QueryInterface(genericDataWrapper));
if (ptr) {
nsCOMPtr<nsISupports> supports;
ptr->GetData(getter_AddRefs(supports));
image = do_QueryInterface(supports);
}
}
if (!image)
return E_FAIL;