Add cookies to FastMalloc spans
authoroliver@apple.com <oliver@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 26 Feb 2013 02:13:31 +0000 (02:13 +0000)
committeroliver@apple.com <oliver@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Tue, 26 Feb 2013 02:13:31 +0000 (02:13 +0000)
https://bugs.webkit.org/show_bug.cgi?id=110827

Reviewed by Michael Saboff.

Add a cookie to spans, and move some of the hardening around so
that it's more useful.

* wtf/FastMalloc.cpp:
(WTF::internalEntropyValue):
(WTF::spanInitializerCookie):
(WTF::Span::initCookie):
(WTF::Span::clearCookie):
(WTF::Span::isValid):
(Span):
(WTF::NewSpan):
(WTF::DeleteSpan):
(WTF::TCMalloc_ThreadCache_FreeList::Validate):
(WTF::TCMalloc_Central_FreeList::Populate):
(WTF::TCMalloc_ThreadCache::Deallocate):
(WTF::TCMalloc_ThreadCache::CreateCacheIfNecessary):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@143996 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/WTF/ChangeLog
Source/WTF/wtf/FastMalloc.cpp

index 8308f17..8423682 100644 (file)
@@ -1,3 +1,27 @@
+2013-02-25  Oliver Hunt  <oliver@apple.com>
+
+        Add cookies to FastMalloc spans
+        https://bugs.webkit.org/show_bug.cgi?id=110827
+
+        Reviewed by Michael Saboff.
+
+        Add a cookie to spans, and move some of the hardening around so
+        that it's more useful.
+
+        * wtf/FastMalloc.cpp:
+        (WTF::internalEntropyValue):
+        (WTF::spanInitializerCookie):
+        (WTF::Span::initCookie):
+        (WTF::Span::clearCookie):
+        (WTF::Span::isValid):
+        (Span):
+        (WTF::NewSpan):
+        (WTF::DeleteSpan):
+        (WTF::TCMalloc_ThreadCache_FreeList::Validate):
+        (WTF::TCMalloc_Central_FreeList::Populate):
+        (WTF::TCMalloc_ThreadCache::Deallocate):
+        (WTF::TCMalloc_ThreadCache::CreateCacheIfNecessary):
+
 2013-02-25  Jer Noble  <jer.noble@apple.com>
 
         Normalize ENABLE_ENCRYPTED_MEDIA definitions across all FeatureDefines files.
index 771e3dd..c4e9a7e 100644 (file)
@@ -512,19 +512,6 @@ namespace WTF {
 #define MESSAGE LOG_ERROR
 #define CHECK_CONDITION ASSERT
 
-#if ENABLE(TCMALLOC_HARDENING)
-/*
- * To make it harder to exploit use-after free style exploits
- * we mask the addresses we put into our linked lists with the
- * address of kLLHardeningMask.  Due to ASLR the address of
- * kLLHardeningMask should be sufficiently randomized to make direct
- * freelist manipulation much more difficult.
- */
-static const char kLLHardeningMask = 0;
-enum {
-    MaskKeyShift = 13
-};
-
 template <unsigned> struct EntropySource;
 template <> struct EntropySource<4> {
     static uint32_t value()
@@ -544,9 +531,22 @@ template <> struct EntropySource<8> {
     }
 };
 
+#if ENABLE(TCMALLOC_HARDENING)
+/*
+ * To make it harder to exploit use-after free style exploits
+ * we mask the addresses we put into our linked lists with the
+ * address of kLLHardeningMask.  Due to ASLR the address of
+ * kLLHardeningMask should be sufficiently randomized to make direct
+ * freelist manipulation much more difficult.
+ */
+static const char kLLHardeningMask = 0;
+enum {
+    MaskKeyShift = 13
+};
+
 static ALWAYS_INLINE uintptr_t internalEntropyValue() 
 {
-    static uintptr_t value = EntropySource<sizeof(uintptr_t)>::value();
+    static uintptr_t value = EntropySource<sizeof(uintptr_t)>::value() | 1;
     ASSERT(value);
     return value;
 }
@@ -573,23 +573,27 @@ static ALWAYS_INLINE uint32_t freedObjectEndPoison()
 #define PTR_TO_UINT32(ptr) static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ptr))
 #define END_POISON_INDEX(allocationSize) (((allocationSize) - sizeof(uint32_t)) / sizeof(uint32_t))
 #define POISON_ALLOCATION(allocation, allocationSize) do { \
-    reinterpret_cast<uint32_t*>(allocation)[0] = 1; \
-    reinterpret_cast<uint32_t*>(allocation)[1] = 1; \
-    if (allocationSize < 4 * sizeof(uint32_t)) \
+    ASSERT((allocationSize) >= 2 * sizeof(uint32_t)); \
+    reinterpret_cast<uint32_t*>(allocation)[0] = 0xbadbeef1; \
+    reinterpret_cast<uint32_t*>(allocation)[1] = 0xbadbeef3; \
+    if ((allocationSize) < 4 * sizeof(uint32_t)) \
         break; \
-    reinterpret_cast<uint32_t*>(allocation)[2] = 1; \
-    reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = 1; \
+    reinterpret_cast<uint32_t*>(allocation)[2] = 0xbadbeef5; \
+    reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = 0xbadbeef7; \
 } while (false);
 
 #define POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, startPoison, endPoison) do { \
-    if (allocationSize < 4 * sizeof(uint32_t)) \
+    ASSERT((allocationSize) >= 2 * sizeof(uint32_t)); \
+    reinterpret_cast<uint32_t*>(allocation)[0] = 0xbadbeef9; \
+    reinterpret_cast<uint32_t*>(allocation)[1] = 0xbadbeefb; \
+    if ((allocationSize) < 4 * sizeof(uint32_t)) \
         break; \
     reinterpret_cast<uint32_t*>(allocation)[2] = (startPoison) ^ PTR_TO_UINT32(allocation); \
     reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = (endPoison) ^ PTR_TO_UINT32(allocation); \
 } while (false)
 
 #define POISON_DEALLOCATION(allocation, allocationSize) \
-    POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, freedObjectStartPoison(), freedObjectEndPoison())
+    POISON_DEALLOCATION_EXPLICIT(allocation, (allocationSize), freedObjectStartPoison(), freedObjectEndPoison())
 
 #define MAY_BE_POISONED(allocation, allocationSize) (((allocationSize) >= 4 * sizeof(uint32_t)) && ( \
     (reinterpret_cast<uint32_t*>(allocation)[2] == (freedObjectStartPoison() ^ PTR_TO_UINT32(allocation))) || \
@@ -1150,6 +1154,20 @@ static size_t AllocationSize(size_t bytes) {
   }
 }
 
+enum {
+    kSpanCookieBits = 10,
+    kSpanCookieMask = (1 << 10) - 1,
+    kSpanThisShift = 7
+};
+
+static uint32_t spanValidationCookie;
+static uint32_t spanInitializerCookie()
+{
+    static uint32_t value = EntropySource<sizeof(uint32_t)>::value() & kSpanCookieMask;
+    spanValidationCookie = value;
+    return value;
+}
+
 // Information kept for a span (a contiguous run of pages).
 struct Span {
   PageID        start;          // Starting page number
@@ -1172,6 +1190,17 @@ public:
   unsigned int  sizeclass : 8;  // Size-class for small objects (or 0)
   unsigned int  refcount : 11;  // Number of non-free objects
   bool decommitted : 1;
+  void initCookie()
+  {
+      m_cookie = ((reinterpret_cast<uintptr_t>(this) >> kSpanThisShift) & kSpanCookieMask) ^ spanInitializerCookie();
+  }
+  void clearCookie() { m_cookie = 0; }
+  bool isValid() const
+  {
+      return (((reinterpret_cast<uintptr_t>(this) >> kSpanThisShift) & kSpanCookieMask) ^ m_cookie) == spanValidationCookie;
+  }
+private:
+  uint32_t m_cookie : kSpanCookieBits;
 
 #undef SPAN_HISTORY
 #ifdef SPAN_HISTORY
@@ -1202,6 +1231,7 @@ static Span* NewSpan(PageID p, Length len) {
   memset(result, 0, sizeof(*result));
   result->start = p;
   result->length = len;
+  result->initCookie();
 #ifdef SPAN_HISTORY
   result->nexthistory = 0;
 #endif
@@ -1209,10 +1239,12 @@ static Span* NewSpan(PageID p, Length len) {
 }
 
 static inline void DeleteSpan(Span* span) {
+  RELEASE_ASSERT(span->isValid());
 #ifndef NDEBUG
   // In debug mode, trash the contents of deleted Spans
   memset(span, 0x3f, sizeof(*span));
 #endif
+  span->clearCookie();
   span_allocator.Delete(span);
 }
 
@@ -2587,10 +2619,12 @@ class TCMalloc_ThreadCache_FreeList {
     // Runs through the linked list to ensure that
     // we can do that, and ensures that 'missing'
     // is not present
-    NEVER_INLINE void Validate(HardenedSLL missing) {
+    NEVER_INLINE void Validate(HardenedSLL missing, size_t size) {
         HardenedSLL node = list_;
+        UNUSED_PARAM(size);
         while (node) {
             RELEASE_ASSERT(node != missing);
+            RELEASE_ASSERT(IS_DEFINITELY_POISONED(node.value(), size));
             node = SLL_Next(node, entropy_);
         }
     }
@@ -3121,7 +3155,15 @@ ALWAYS_INLINE void TCMalloc_Central_FreeList::Populate() {
   }
   ASSERT(ptr == start);
   ASSERT(ptr == head.value());
-  POISON_DEALLOCATION_EXPLICIT(ptr, size, startPoison, endPoison);
+#ifndef NDEBUG
+    {
+        HardenedSLL node = head;
+        while (node) {
+            ASSERT(IS_DEFINITELY_POISONED(node.value(), size));
+            node = SLL_Next(node, entropy_);
+        }
+    }
+#endif
   span->objects = head;
   ASSERT(span->objects.value() == head.value());
   span->refcount = 0; // No sub-object in use yet
@@ -3200,7 +3242,7 @@ inline void TCMalloc_ThreadCache::Deallocate(HardenedSLL ptr, size_t cl) {
   size_ += allocationSize;
   FreeList* list = &list_[cl];
   if (MAY_BE_POISONED(ptr.value(), allocationSize))
-      list->Validate(ptr);
+      list->Validate(ptr, allocationSize);
 
   POISON_DEALLOCATION(ptr.value(), allocationSize);
   list->Push(ptr);
@@ -3954,6 +3996,7 @@ static ALWAYS_INLINE void do_free(void* ptr) {
 
   if (cl == 0) {
     span = pageheap->GetDescriptor(p);
+    RELEASE_ASSERT(span->isValid());
     cl = span->sizeclass;
     pageheap->CacheSizeClass(p, cl);
   }
@@ -3966,8 +4009,7 @@ static ALWAYS_INLINE void do_free(void* ptr) {
       heap->Deallocate(HardenedSLL::create(ptr), cl);
     } else {
       // Delete directly into central cache
-      size_t allocationSize = ByteSizeForClass(cl);
-      POISON_DEALLOCATION(ptr, allocationSize);
+      POISON_DEALLOCATION(ptr, ByteSizeForClass(cl));
       SLL_SetNext(HardenedSLL::create(ptr), HardenedSLL::null(), central_cache[cl].entropy());
       central_cache[cl].InsertRange(HardenedSLL::create(ptr), HardenedSLL::create(ptr), 1);
     }