[mac] ImageBuffer should create accelerated buffers for small canvases, but we should...
[WebKit-https.git] / Source / WebCore / platform / graphics / cg / ImageBufferCG.cpp
index 02ba8cf..7e0434c 100644 (file)
@@ -28,7 +28,6 @@
 #include "config.h"
 #include "ImageBuffer.h"
 
-#include "Base64.h"
 #include "BitmapImage.h"
 #include "GraphicsContext.h"
 #include "GraphicsContextCG.h"
@@ -42,6 +41,7 @@
 #include <wtf/OwnArrayPtr.h>
 #include <wtf/RetainPtr.h>
 #include <wtf/UnusedParam.h>
+#include <wtf/text/Base64.h>
 #include <wtf/text/WTFString.h>
 
 #if PLATFORM(MAC) || PLATFORM(CHROMIUM)
@@ -52,7 +52,7 @@
 #include <IOSurface/IOSurface.h>
 #endif
 
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
 #include <wtf/CurrentTime.h>
 #endif
 
@@ -62,7 +62,6 @@ namespace WebCore {
 
 #if USE(IOSURFACE_CANVAS_BACKING_STORE)
 static const int maxIOSurfaceDimension = 4096;
-static const int minIOSurfaceArea = 50 * 100;
 
 static RetainPtr<IOSurfaceRef> createIOSurface(const IntSize& size)
 {
@@ -107,26 +106,36 @@ static void releaseImageData(void*, const void* data, size_t)
     fastFree(const_cast<void*>(data));
 }
 
-ImageBuffer::ImageBuffer(const IntSize& size, ColorSpace imageColorSpace, RenderingMode renderingMode, DeferralMode, bool& success)
-    : m_data(size)
-    , m_size(size)
+ImageBuffer::ImageBuffer(const IntSize& size, float resolutionScale, ColorSpace imageColorSpace, RenderingMode renderingMode, DeferralMode, bool& success)
+    : m_data(size) // NOTE: The input here isn't important as ImageBufferDataCG's constructor just ignores it.
+    , m_logicalSize(size)
+    , m_resolutionScale(resolutionScale)
 {
+    float scaledWidth = ceilf(resolutionScale * size.width());
+    float scaledHeight = ceilf(resolutionScale * size.height());
+
+    // FIXME: Should we automatically use a lower resolution?
+    if (!FloatSize(scaledWidth, scaledHeight).isExpressibleAsIntSize())
+        return;
+
+    m_size = IntSize(scaledWidth, scaledHeight);
+
     success = false;  // Make early return mean failure.
     bool accelerateRendering = renderingMode == Accelerated;
-    if (size.width() <= 0 || size.height() <= 0)
+    if (m_size.width() <= 0 || m_size.height() <= 0)
         return;
 
-    Checked<int, RecordOverflow> width = size.width();
-    Checked<int, RecordOverflow> height = size.height();
+    Checked<int, RecordOverflow> width = m_size.width();
+    Checked<int, RecordOverflow> height = m_size.height();
 
     // Prevent integer overflows
     m_data.m_bytesPerRow = 4 * width;
-    Checked<size_t, RecordOverflow> dataSize = height * m_data.m_bytesPerRow;
-    if (dataSize.hasOverflowed())
+    Checked<size_t, RecordOverflow> numBytes = height * m_data.m_bytesPerRow;
+    if (numBytes.hasOverflowed())
         return;
 
 #if USE(IOSURFACE_CANVAS_BACKING_STORE)
-    if (width.unsafeGet() >= maxIOSurfaceDimension || height.unsafeGet() >= maxIOSurfaceDimension || (width * height).unsafeGet() < minIOSurfaceArea)
+    if (width.unsafeGet() >= maxIOSurfaceDimension || height.unsafeGet() >= maxIOSurfaceDimension)
         accelerateRendering = false;
 #else
     ASSERT(renderingMode == Unaccelerated);
@@ -147,7 +156,7 @@ ImageBuffer::ImageBuffer(const IntSize& size, ColorSpace imageColorSpace, Render
     RetainPtr<CGContextRef> cgContext;
     if (accelerateRendering) {
 #if USE(IOSURFACE_CANVAS_BACKING_STORE)
-        m_data.m_surface = createIOSurface(size);
+        m_data.m_surface = createIOSurface(m_size);
         cgContext.adoptCF(wkIOSurfaceContextCreate(m_data.m_surface.get(), width.unsafeGet(), height.unsafeGet(), m_data.m_colorSpace));
 #endif
         if (!cgContext)
@@ -162,17 +171,18 @@ ImageBuffer::ImageBuffer(const IntSize& size, ColorSpace imageColorSpace, Render
         m_data.m_bitmapInfo = kCGImageAlphaPremultipliedLast;
         cgContext.adoptCF(CGBitmapContextCreate(m_data.m_data, width.unsafeGet(), height.unsafeGet(), 8, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo));
         // Create a live image that wraps the data.
-        m_data.m_dataProvider.adoptCF(CGDataProviderCreateWithData(0, m_data.m_data, dataSize.unsafeGet(), releaseImageData));
+        m_data.m_dataProvider.adoptCF(CGDataProviderCreateWithData(0, m_data.m_data, numBytes.unsafeGet(), releaseImageData));
     }
 
     if (!cgContext)
         return;
 
     m_context = adoptPtr(new GraphicsContext(cgContext.get()));
+    m_context->applyDeviceScaleFactor(m_resolutionScale);
     m_context->scale(FloatSize(1, -1));
-    m_context->translate(0, -height.unsafeGet());
+    m_context->translate(0, -size.height());
     m_context->setIsAcceleratedContext(accelerateRendering);
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
     m_data.m_lastFlushTime = currentTimeMS();
 #endif
     success = true;
@@ -182,14 +192,9 @@ ImageBuffer::~ImageBuffer()
 {
 }
 
-size_t ImageBuffer::dataSize() const
-{
-    return m_size.height() * m_data.m_bytesPerRow.unsafeGet();
-}
-
 GraphicsContext* ImageBuffer::context() const
 {
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
     // Force a flush if last flush was more than 20ms ago
     if (m_context->isAcceleratedContext()) {
         double elapsedTime = currentTimeMS() - m_data.m_lastFlushTime;
@@ -206,9 +211,18 @@ GraphicsContext* ImageBuffer::context() const
     return m_context.get();
 }
 
-PassRefPtr<Image> ImageBuffer::copyImage(BackingStoreCopy copyBehavior) const
+PassRefPtr<Image> ImageBuffer::copyImage(BackingStoreCopy copyBehavior, ScaleBehavior scaleBehavior) const
 {
-    RetainPtr<CGImageRef> image = copyNativeImage(copyBehavior);
+    RetainPtr<CGImageRef> image;
+    if (m_resolutionScale == 1 || scaleBehavior == Unscaled)
+        image = copyNativeImage(copyBehavior);
+    else {
+        image.adoptCF(copyNativeImage(DontCopyBackingStore));
+        RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
+        CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
+        CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
+        image = CGBitmapContextCreateImage(context.get());
+    }
 
     if (!image)
         return 0;
@@ -216,13 +230,18 @@ PassRefPtr<Image> ImageBuffer::copyImage(BackingStoreCopy copyBehavior) const
     return BitmapImage::create(image.get());
 }
 
+BackingStoreCopy ImageBuffer::fastCopyImageMode()
+{
+    return DontCopyBackingStore;
+}
+
 NativeImagePtr ImageBuffer::copyNativeImage(BackingStoreCopy copyBehavior) const
 {
     CGImageRef image = 0;
     if (!m_context->isAcceleratedContext()) {
         switch (copyBehavior) {
         case DontCopyBackingStore:
-            image = CGImageCreate(m_size.width(), m_size.height(), 8, 32, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo, m_data.m_dataProvider.get(), 0, true, kCGRenderingIntentDefault);
+            image = CGImageCreate(internalSize().width(), internalSize().height(), 8, 32, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo, m_data.m_dataProvider.get(), 0, true, kCGRenderingIntentDefault);
             break;
         case CopyBackingStore:
             image = CGBitmapContextCreateImage(context()->platformContext());
@@ -235,7 +254,7 @@ NativeImagePtr ImageBuffer::copyNativeImage(BackingStoreCopy copyBehavior) const
 #if USE(IOSURFACE_CANVAS_BACKING_STORE)
     else {
         image = wkIOSurfaceContextCreateImage(context()->platformContext());
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
         m_data.m_lastFlushTime = currentTimeMS();
 #endif
     }
@@ -244,7 +263,7 @@ NativeImagePtr ImageBuffer::copyNativeImage(BackingStoreCopy copyBehavior) const
     return image;
 }
 
-void ImageBuffer::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, bool useLowQualityScale)
+void ImageBuffer::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, BlendMode blendMode, bool useLowQualityScale)
 {
     UNUSED_PARAM(useLowQualityScale);
     ColorSpace colorSpace = (destContext == m_context) ? ColorSpaceDeviceRGB : styleColorSpace;
@@ -255,28 +274,34 @@ void ImageBuffer::draw(GraphicsContext* destContext, ColorSpace styleColorSpace,
     else
         image.adoptCF(copyNativeImage(DontCopyBackingStore));
 
-    destContext->drawNativeImage(image.get(), m_size, colorSpace, destRect, srcRect, op);
+    FloatRect adjustedSrcRect = srcRect;
+    adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
+    destContext->drawNativeImage(image.get(), internalSize(), colorSpace, destRect, adjustedSrcRect, op, blendMode);
 }
 
 void ImageBuffer::drawPattern(GraphicsContext* destContext, const FloatRect& srcRect, const AffineTransform& patternTransform, const FloatPoint& phase, ColorSpace styleColorSpace, CompositeOperator op, const FloatRect& destRect)
 {
+    FloatRect adjustedSrcRect = srcRect;
+    adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
+
     if (!m_context->isAcceleratedContext()) {
         if (destContext == m_context || destContext->isAcceleratedContext()) {
             RefPtr<Image> copy = copyImage(CopyBackingStore); // Drawing into our own buffer, need to deep copy.
-            copy->drawPattern(destContext, srcRect, patternTransform, phase, styleColorSpace, op, destRect);
+            copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
         } else {
             RefPtr<Image> imageForRendering = copyImage(DontCopyBackingStore);
-            imageForRendering->drawPattern(destContext, srcRect, patternTransform, phase, styleColorSpace, op, destRect);
+            imageForRendering->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
         }
     } else {
         RefPtr<Image> copy = copyImage(CopyBackingStore);
-        copy->drawPattern(destContext, srcRect, patternTransform, phase, styleColorSpace, op, destRect);
+        copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
     }
 }
 
 void ImageBuffer::clip(GraphicsContext* contextToClip, const FloatRect& rect) const
 {
     CGContextRef platformContextToClip = contextToClip->platformContext();
+    // FIXME: This image needs to be grayscale to be used as an alpha mask here.
     RetainPtr<CGImageRef> image(AdoptCF, copyNativeImage(DontCopyBackingStore));
     CGContextTranslateCTM(platformContextToClip, rect.x(), rect.y() + rect.height());
     CGContextScaleCTM(platformContextToClip, 1, -1);
@@ -285,48 +310,64 @@ void ImageBuffer::clip(GraphicsContext* contextToClip, const FloatRect& rect) co
     CGContextTranslateCTM(platformContextToClip, -rect.x(), -rect.y() - rect.height());
 }
 
-PassRefPtr<ByteArray> ImageBuffer::getUnmultipliedImageData(const IntRect& rect) const
+PassRefPtr<Uint8ClampedArray> ImageBuffer::getUnmultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
 {
     if (m_context->isAcceleratedContext()) {
         CGContextFlush(context()->platformContext());
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
         m_data.m_lastFlushTime = currentTimeMS();
 #endif
     }
-    return m_data.getData(rect, m_size, m_context->isAcceleratedContext(), true);
+    return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), true, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
 }
 
-PassRefPtr<ByteArray> ImageBuffer::getPremultipliedImageData(const IntRect& rect) const
+PassRefPtr<Uint8ClampedArray> ImageBuffer::getPremultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
 {
     if (m_context->isAcceleratedContext()) {
         CGContextFlush(context()->platformContext());
-#if defined(BUILDING_ON_LION)
+#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
         m_data.m_lastFlushTime = currentTimeMS();
 #endif
     }
-    return m_data.getData(rect, m_size, m_context->isAcceleratedContext(), false);
+    return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), false, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
 }
 
-void ImageBuffer::putUnmultipliedImageData(ByteArray* source, const IntSize& sourceSize, const IntRect& sourceRect, const IntPoint& destPoint)
+void ImageBuffer::putByteArray(Multiply multiplied, Uint8ClampedArray* source, const IntSize& sourceSize, const IntRect& sourceRect, const IntPoint& destPoint, CoordinateSystem coordinateSystem)
 {
-    if (m_context->isAcceleratedContext()) {
-        CGContextFlush(context()->platformContext());
-#if defined(BUILDING_ON_LION)
-        m_data.m_lastFlushTime = currentTimeMS();
-#endif
+    if (!m_context->isAcceleratedContext()) {
+        m_data.putData(source, sourceSize, sourceRect, destPoint, internalSize(), m_context->isAcceleratedContext(), multiplied == Unmultiplied, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
+        return;
     }
-    m_data.putData(source, sourceSize, sourceRect, destPoint, m_size, m_context->isAcceleratedContext(), true);
-}
 
-void ImageBuffer::putPremultipliedImageData(ByteArray* source, const IntSize& sourceSize, const IntRect& sourceRect, const IntPoint& destPoint)
-{
-    if (m_context->isAcceleratedContext()) {
-        CGContextFlush(context()->platformContext());
-#if defined(BUILDING_ON_LION)
-        m_data.m_lastFlushTime = currentTimeMS();
+#if USE(IOSURFACE_CANVAS_BACKING_STORE)
+    // Make a copy of the source to ensure the bits don't change before being drawn
+    IntSize sourceCopySize(sourceRect.width(), sourceRect.height());
+    OwnPtr<ImageBuffer> sourceCopy = ImageBuffer::create(sourceCopySize, 1, ColorSpaceDeviceRGB, Unaccelerated);
+    if (!sourceCopy)
+        return;
+
+    sourceCopy->m_data.putData(source, sourceSize, sourceRect, IntPoint(-sourceRect.x(), -sourceRect.y()), sourceCopy->internalSize(), sourceCopy->context()->isAcceleratedContext(), multiplied == Unmultiplied, 1);
+
+    // Set up context for using drawImage as a direct bit copy
+    CGContextRef destContext = context()->platformContext();
+    CGContextSaveGState(destContext);
+    if (coordinateSystem == LogicalCoordinateSystem)
+        CGContextConcatCTM(destContext, AffineTransform(wkGetUserToBaseCTM(destContext)).inverse());
+    else
+        CGContextConcatCTM(destContext, AffineTransform(CGContextGetCTM(destContext)).inverse());
+    wkCGContextResetClip(destContext);
+    CGContextSetInterpolationQuality(destContext, kCGInterpolationNone);
+    CGContextSetAlpha(destContext, 1.0);
+    CGContextSetBlendMode(destContext, kCGBlendModeCopy);
+    CGContextSetShadowWithColor(destContext, CGSizeZero, 0, 0);
+
+    // Draw the image in CG coordinate space
+    IntPoint destPointInCGCoords(destPoint.x() + sourceRect.x(), (coordinateSystem == LogicalCoordinateSystem ? logicalSize() : internalSize()).height() - (destPoint.y() + sourceRect.y()) - sourceRect.height());
+    IntRect destRectInCGCoords(destPointInCGCoords, sourceCopySize);
+    RetainPtr<CGImageRef> sourceCopyImage(AdoptCF, sourceCopy->copyNativeImage());
+    CGContextDrawImage(destContext, destRectInCGCoords, sourceCopyImage.get());
+    CGContextRestoreGState(destContext);
 #endif
-    }
-    m_data.putData(source, sourceSize, sourceRect, destPoint, m_size, m_context->isAcceleratedContext(), false);
 }
 
 static inline CFStringRef jpegUTI()
@@ -340,8 +381,7 @@ static inline CFStringRef jpegUTI()
 static RetainPtr<CFStringRef> utiFromMIMEType(const String& mimeType)
 {
 #if PLATFORM(MAC)
-    RetainPtr<CFStringRef> mimeTypeCFString(AdoptCF, mimeType.createCFString());
-    return RetainPtr<CFStringRef>(AdoptCF, UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, mimeTypeCFString.get(), 0));
+    return adoptCF(UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, mimeType.createCFString().get(), 0));
 #else
     ASSERT(isMainThread()); // It is unclear if CFSTR is threadsafe.
 
@@ -362,72 +402,79 @@ static RetainPtr<CFStringRef> utiFromMIMEType(const String& mimeType)
 #endif
 }
 
-static String CGImageToDataURL(CGImageRef image, const String& mimeType, const double* quality)
+static bool CGImageEncodeToData(CGImageRef image, CFStringRef uti, const double* quality, CFMutableDataRef data)
 {
-    RetainPtr<CFMutableDataRef> data(AdoptCF, CFDataCreateMutable(kCFAllocatorDefault, 0));
-    if (!data)
-        return "data:,";
-
-    RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
-    ASSERT(uti);
+    if (!image || !uti || !data)
+        return false;
 
-    RetainPtr<CGImageDestinationRef> destination(AdoptCF, CGImageDestinationCreateWithData(data.get(), uti.get(), 1, 0));
+    RetainPtr<CGImageDestinationRef> destination(AdoptCF, CGImageDestinationCreateWithData(data, uti, 1, 0));
     if (!destination)
-        return "data:,";
+        return false;
 
     RetainPtr<CFDictionaryRef> imageProperties = 0;
-    if (CFEqual(uti.get(), jpegUTI()) && quality && *quality >= 0.0 && *quality <= 1.0) {
-        // Apply the compression quality to the image destination.
+    if (CFEqual(uti, jpegUTI()) && quality && *quality >= 0.0 && *quality <= 1.0) {
+        // Apply the compression quality to the JPEG image destination.
         RetainPtr<CFNumberRef> compressionQuality(AdoptCF, CFNumberCreate(kCFAllocatorDefault, kCFNumberDoubleType, quality));
         const void* key = kCGImageDestinationLossyCompressionQuality;
         const void* value = compressionQuality.get();
         imageProperties.adoptCF(CFDictionaryCreate(0, &key, &value, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
     }
 
-    // Setting kCGImageDestinationBackgroundColor to black in imageProperties would allow saving some math in the
-    // calling functions, but it doesn't seem to work.
+    // Setting kCGImageDestinationBackgroundColor to black for JPEG images in imageProperties would save some math
+    // in the calling functions, but it doesn't seem to work.
 
     CGImageDestinationAddImage(destination.get(), image, imageProperties.get());
-    CGImageDestinationFinalize(destination.get());
+    return CGImageDestinationFinalize(destination.get());
+}
+
+static String CGImageToDataURL(CGImageRef image, const String& mimeType, const double* quality)
+{
+    RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
+    ASSERT(uti);
+
+    RetainPtr<CFMutableDataRef> data(AdoptCF, CFDataCreateMutable(kCFAllocatorDefault, 0));
+    if (!CGImageEncodeToData(image, uti.get(), quality, data.get()))
+        return "data:,";
 
-    Vector<char> out;
-    base64Encode(reinterpret_cast<const char*>(CFDataGetBytePtr(data.get())), CFDataGetLength(data.get()), out);
+    Vector<char> base64Data;
+    base64Encode(reinterpret_cast<const char*>(CFDataGetBytePtr(data.get())), CFDataGetLength(data.get()), base64Data);
 
-    return "data:" + mimeType + ";base64," + out;
+    return "data:" + mimeType + ";base64," + base64Data;
 }
 
-String ImageBuffer::toDataURL(const String& mimeType, const double* quality) const
+String ImageBuffer::toDataURL(const String& mimeType, const double* quality, CoordinateSystem) const
 {
     ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
-    RetainPtr<CGImageRef> image;
+
     RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
     ASSERT(uti);
-    RefPtr<ByteArray> arr;
+
+    RefPtr<Uint8ClampedArray> premultipliedData;
+    RetainPtr<CGImageRef> image;
 
     if (CFEqual(uti.get(), jpegUTI())) {
         // JPEGs don't have an alpha channel, so we have to manually composite on top of black.
-        arr = getPremultipliedImageData(IntRect(IntPoint(0, 0), m_size));
-
-        unsigned char *data = arr->data();
-        for (int i = 0; i < width() * height(); i++)
-            data[i * 4 + 3] = 255; // The image is already premultiplied, so we just need to make it opaque.
+        premultipliedData = getPremultipliedImageData(IntRect(IntPoint(0, 0), logicalSize()));
+        if (!premultipliedData)
+            return "data:,";
 
         RetainPtr<CGDataProviderRef> dataProvider;
-    
-        dataProvider.adoptCF(CGDataProviderCreateWithData(0, data,
-                                                          4 * width() * height(), 0));
-    
+        dataProvider.adoptCF(CGDataProviderCreateWithData(0, premultipliedData->data(), 4 * logicalSize().width() * logicalSize().height(), 0));
         if (!dataProvider)
             return "data:,";
 
-        image.adoptCF(CGImageCreate(width(), height(), 8, 32, 4 * width(),
-                                    CGColorSpaceCreateDeviceRGB(), kCGBitmapByteOrderDefault | kCGImageAlphaLast,
+        image.adoptCF(CGImageCreate(logicalSize().width(), logicalSize().height(), 8, 32, 4 * logicalSize().width(),
+                                    deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast,
                                     dataProvider.get(), 0, false, kCGRenderingIntentDefault));
-    } else
+    } else if (m_resolutionScale == 1)
         image.adoptCF(copyNativeImage(CopyBackingStore));
-
-    if (!image)
-        return "data:,";
+    else {
+        image.adoptCF(copyNativeImage(DontCopyBackingStore));
+        RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
+        CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
+        CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
+        image.adoptCF(CGBitmapContextCreateImage(context.get()));
+    }
 
     return CGImageToDataURL(image.get(), mimeType, quality);
 }
@@ -435,51 +482,49 @@ String ImageBuffer::toDataURL(const String& mimeType, const double* quality) con
 String ImageDataToDataURL(const ImageData& source, const String& mimeType, const double* quality)
 {
     ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
-        
-    RetainPtr<CGImageRef> image;
-    RetainPtr<CGDataProviderRef> dataProvider;
 
-    unsigned char* data = source.data()->data()->data();
     RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
     ASSERT(uti);
-    Vector<uint8_t> dataVector;
+
+    CGImageAlphaInfo dataAlphaInfo = kCGImageAlphaLast;
+    unsigned char* data = source.data()->data();
+    Vector<uint8_t> premultipliedData;
+
     if (CFEqual(uti.get(), jpegUTI())) {
         // JPEGs don't have an alpha channel, so we have to manually composite on top of black.
-        dataVector.resize(4 * source.width() * source.height());
-        unsigned char *out = dataVector.data();
-        
-        for (int i = 0; i < source.width() * source.height(); i++) {
-            // Multiply color data by alpha, and set alpha to 255.
-            int alpha = data[4 * i + 3];
+        size_t size = 4 * source.width() * source.height();
+        if (!premultipliedData.tryReserveCapacity(size))
+            return "data:,";
+
+        unsigned char *buffer = premultipliedData.data();
+        for (size_t i = 0; i < size; i += 4) {
+            unsigned alpha = data[i + 3];
             if (alpha != 255) {
-                out[4 * i + 0] = data[4 * i + 0] * alpha / 255;
-                out[4 * i + 1] = data[4 * i + 1] * alpha / 255;
-                out[4 * i + 2] = data[4 * i + 2] * alpha / 255;
+                buffer[i + 0] = data[i + 0] * alpha / 255;
+                buffer[i + 1] = data[i + 1] * alpha / 255;
+                buffer[i + 2] = data[i + 2] * alpha / 255;
             } else {
-                out[4 * i + 0] = data[4 * i + 0];
-                out[4 * i + 1] = data[4 * i + 1];
-                out[4 * i + 2] = data[4 * i + 2];
+                buffer[i + 0] = data[i + 0];
+                buffer[i + 1] = data[i + 1];
+                buffer[i + 2] = data[i + 2];
             }
-            out[4 * i + 3] = 255;
         }
 
-        data = out;
+        dataAlphaInfo = kCGImageAlphaNoneSkipLast; // Ignore the alpha channel.
+        data = premultipliedData.data();
     }
-    
-    dataProvider.adoptCF(CGDataProviderCreateWithData(0, data,
-                                                      4 * source.width() * source.height(), 0));
-    
+
+    RetainPtr<CGDataProviderRef> dataProvider;
+    dataProvider.adoptCF(CGDataProviderCreateWithData(0, data, 4 * source.width() * source.height(), 0));
     if (!dataProvider)
         return "data:,";
 
+    RetainPtr<CGImageRef> image;
     image.adoptCF(CGImageCreate(source.width(), source.height(), 8, 32, 4 * source.width(),
-                                CGColorSpaceCreateDeviceRGB(), kCGBitmapByteOrderDefault | kCGImageAlphaLast,
+                                deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | dataAlphaInfo,
                                 dataProvider.get(), 0, false, kCGRenderingIntentDefault));
-                                
-        
-    if (!image)
-        return "data:,";
 
     return CGImageToDataURL(image.get(), mimeType, quality);
 }
+
 } // namespace WebCore