+2002-11-26 Maciej Stachowiak <mjs@apple.com>
+
+ Change ActivationImp to be allocated via the garbage collector
+ again instead of on the stack. This fixes the following four
+ regressions but sadly it causes a 6% performance hit. It's
+ probably possibly to reduce the hit a bit by being smarter about
+ inlining and the way the marking list variant is implemented, but
+ I'll look into that later.
+
+ - fixed 3111500 - REGRESSION: crash in "KJS::ScopeChain::mark()" on www.posci.com
+ - fixed 3111145 - REGRESSION: reproducible crash in KJS hashtable lookup at time.com
+ - fixed 3110897 - REGRESSION: javascript crasher on http://bmwgallery.tripod.com/
+ - fixed 3109987 - REGRESSION: Reproducible crash in KJS ObjectImp at live365.com
+
+ Also:
+
+ - improved DEBUG_COLLECTOR mode a bit by never giving memory back
+ to the system.
+
+ * kjs/collector.cpp:
+ * kjs/context.h:
+ * kjs/function.cpp:
+ (ActivationImp::ActivationImp):
+ (ActivationImp::mark):
+ (ActivationImp::createArgumentsObject):
+ * kjs/function.h:
+ * kjs/internal.cpp:
+ (ContextImp::ContextImp):
+ (ContextImp::mark):
+ * kjs/list.cpp:
+ * kjs/list.h:
+ * kjs/value.cpp:
+ (Value::Value):
+
2002-11-26 Darin Adler <darin@apple.com>
* kjs/property_map.cpp:
+2002-11-26 Maciej Stachowiak <mjs@apple.com>
+
+ Change ActivationImp to be allocated via the garbage collector
+ again instead of on the stack. This fixes the following four
+ regressions but sadly it causes a 6% performance hit. It's
+ probably possibly to reduce the hit a bit by being smarter about
+ inlining and the way the marking list variant is implemented, but
+ I'll look into that later.
+
+ - fixed 3111500 - REGRESSION: crash in "KJS::ScopeChain::mark()" on www.posci.com
+ - fixed 3111145 - REGRESSION: reproducible crash in KJS hashtable lookup at time.com
+ - fixed 3110897 - REGRESSION: javascript crasher on http://bmwgallery.tripod.com/
+ - fixed 3109987 - REGRESSION: Reproducible crash in KJS ObjectImp at live365.com
+
+ Also:
+
+ - improved DEBUG_COLLECTOR mode a bit by never giving memory back
+ to the system.
+
+ * kjs/collector.cpp:
+ * kjs/context.h:
+ * kjs/function.cpp:
+ (ActivationImp::ActivationImp):
+ (ActivationImp::mark):
+ (ActivationImp::createArgumentsObject):
+ * kjs/function.h:
+ * kjs/internal.cpp:
+ (ContextImp::ContextImp):
+ (ContextImp::mark):
+ * kjs/list.cpp:
+ * kjs/list.h:
+ * kjs/value.cpp:
+ (Value::Value):
+
2002-11-26 Darin Adler <darin@apple.com>
* kjs/property_map.cpp:
+2002-11-26 Maciej Stachowiak <mjs@apple.com>
+
+ Change ActivationImp to be allocated via the garbage collector
+ again instead of on the stack. This fixes the following four
+ regressions but sadly it causes a 6% performance hit. It's
+ probably possibly to reduce the hit a bit by being smarter about
+ inlining and the way the marking list variant is implemented, but
+ I'll look into that later.
+
+ - fixed 3111500 - REGRESSION: crash in "KJS::ScopeChain::mark()" on www.posci.com
+ - fixed 3111145 - REGRESSION: reproducible crash in KJS hashtable lookup at time.com
+ - fixed 3110897 - REGRESSION: javascript crasher on http://bmwgallery.tripod.com/
+ - fixed 3109987 - REGRESSION: Reproducible crash in KJS ObjectImp at live365.com
+
+ Also:
+
+ - improved DEBUG_COLLECTOR mode a bit by never giving memory back
+ to the system.
+
+ * kjs/collector.cpp:
+ * kjs/context.h:
+ * kjs/function.cpp:
+ (ActivationImp::ActivationImp):
+ (ActivationImp::mark):
+ (ActivationImp::createArgumentsObject):
+ * kjs/function.h:
+ * kjs/internal.cpp:
+ (ContextImp::ContextImp):
+ (ContextImp::mark):
+ * kjs/list.cpp:
+ * kjs/list.h:
+ * kjs/value.cpp:
+ (Value::Value):
+
2002-11-26 Darin Adler <darin@apple.com>
* kjs/property_map.cpp:
bool Collector::collect()
{
+ puts("COLLECT");
bool deleted = false;
// MARK: first mark all referenced objects recursively
if (heap.blocks[block]->usedCells == 0) {
emptyBlocks++;
if (emptyBlocks > SPARE_EMPTY_BLOCKS) {
- delete heap.blocks[block];
-
+#if !DEBUG_COLLECTOR
+ free(heap.blocks[block]);
+#endif
// swap with the last block so we compact as we go
heap.blocks[block] = heap.blocks[heap.usedBlocks - 1];
heap.usedBlocks--;
imp->_flags == (ValueImp::VI_GCALLOWED | ValueImp::VI_CREATED)) {
imp->~ValueImp();
+#if DEBUG_COLLECTOR
+ heap.oversizeCells[cell]->u.freeCell.zeroIfFree = 0;
+#else
free((void *)imp);
+#endif
// swap with the last oversize cell so we compact as we go
heap.oversizeCells[cell] = heap.oversizeCells[heap.usedOversizeCells - 1];
private:
InterpreterImp *_interpreter;
ContextImp *_callingContext;
- ActivationImp _activationImp;
FunctionImp *_function;
const List *_arguments;
Object activation;
const ClassInfo ActivationImp::info = {"Activation", 0, 0, 0};
// ECMA 10.1.6
-ActivationImp::ActivationImp(ContextImp *context)
- : _context(context), _argumentsObject(0)
+ActivationImp::ActivationImp(FunctionImp *function, const List &arguments)
+ : _function(function), _arguments(true), _argumentsObject(0)
{
+ _arguments = arguments.copy();
// FIXME: Do we need to support enumerating the arguments property?
}
void ActivationImp::mark()
{
+ if (_function && !_function->marked())
+ _function->mark();
+ _arguments.mark();
if (_argumentsObject && !_argumentsObject->marked())
_argumentsObject->mark();
ObjectImp::mark();
void ActivationImp::createArgumentsObject(ExecState *exec) const
{
- FunctionImp *function = _context->function();
- const List *arguments = _context->arguments();
- if (arguments)
- _argumentsObject = new ArgumentsImp(exec, function, *arguments);
- else
- _argumentsObject = new ArgumentsImp(exec, function);
+ _argumentsObject = new ArgumentsImp(exec, _function, _arguments);
}
// ------------------------------ GlobalFunc -----------------------------------
class ActivationImp : public ObjectImp {
public:
- ActivationImp(ContextImp *);
+ ActivationImp(FunctionImp *function, const List &arguments);
virtual Value get(ExecState *exec, const Identifier &propertyName) const;
virtual void put(ExecState *exec, const Identifier &propertyName, const Value &value, int attr = None);
private:
void createArgumentsObject(ExecState *exec) const;
- const ContextImp *_context;
+ FunctionImp *_function;
+ List _arguments;
mutable ArgumentsImp *_argumentsObject;
};
// ECMA 10.2
ContextImp::ContextImp(Object &glob, InterpreterImp *interpreter, Object &thisV, CodeType type,
ContextImp *callingCon, FunctionImp *func, const List *args)
- : _interpreter(interpreter), _activationImp(this), _function(func), _arguments(args)
+ : _interpreter(interpreter), _function(func), _arguments(args)
{
codeType = type;
_callingContext = callingCon;
// create and initialize activation object (ECMA 10.1.6)
if (type == FunctionCode || type == AnonymousCode ) {
- activation = Object(&_activationImp);
+ activation = Object(new ActivationImp(func, *args));
variable = activation;
} else {
activation = Object();
{
for (ContextImp *context = this; context; context = context->_callingContext) {
context->scope.mark();
- context->_activationImp.mark();
-#if DEBUG_COLLECTOR
- context->_activationImp._flags &= ~ValueImp::VI_MARKED;
-#endif
}
}
namespace KJS {
// tunable parameters
-const int poolSize = 16; // must be a power of 2
+const int poolSize = 32; // must be a power of 2
const int inlineValuesSize = 4;
// derived constants
delete imp;
}
-List::List() : _impBase(allocateListImp())
+List::List() : _impBase(allocateListImp()), _needsMarking(false)
{
ListImp *imp = static_cast<ListImp *>(_impBase);
imp->size = 0;
imp->capacity = 0;
imp->overflow = 0;
+ if (!_needsMarking) {
+ imp->valueRefCount = 1;
+ }
+#if DUMP_STATISTICS
+ if (++numLists > numListsHighWaterMark)
+ numListsHighWaterMark = numLists;
+ imp->sizeHighWaterMark = 0;
+#endif
+}
+
+List::List(bool needsMarking) : _impBase(allocateListImp()), _needsMarking(needsMarking)
+{
+ ListImp *imp = static_cast<ListImp *>(_impBase);
+ imp->size = 0;
+ imp->refCount = 1;
+ imp->capacity = 0;
+ imp->overflow = 0;
+
+ if (!_needsMarking) {
+ imp->valueRefCount = 1;
+ }
+
#if DUMP_STATISTICS
if (++numLists > numListsHighWaterMark)
numListsHighWaterMark = numLists;
#endif
}
-inline void List::derefValues()
+void List::derefValues()
{
ListImp *imp = static_cast<ListImp *>(_impBase);
overflow[i]->deref();
}
+void List::refValues()
+{
+ ListImp *imp = static_cast<ListImp *>(_impBase);
+
+ int size = imp->size;
+
+ int inlineSize = MIN(size, inlineValuesSize);
+ for (int i = 0; i != inlineSize; ++i)
+ imp->values[i]->ref();
+
+ int overflowSize = size - inlineSize;
+ ValueImp **overflow = imp->overflow;
+ for (int i = 0; i != overflowSize; ++i)
+ overflow[i]->ref();
+}
+
+void List::markValues()
+{
+ ListImp *imp = static_cast<ListImp *>(_impBase);
+
+ int size = imp->size;
+
+ int inlineSize = MIN(size, inlineValuesSize);
+ for (int i = 0; i != inlineSize; ++i) {
+ if (!imp->values[i]->marked()) {
+ imp->values[i]->mark();
+ }
+ }
+
+ int overflowSize = size - inlineSize;
+ ValueImp **overflow = imp->overflow;
+ for (int i = 0; i != overflowSize; ++i) {
+ if (!overflow[i]->marked()) {
+ overflow[i]->mark();
+ }
+ }
+}
+
void List::release()
{
ListImp *imp = static_cast<ListImp *>(_impBase);
++numListsBiggerThan[i];
#endif
- derefValues();
delete [] imp->overflow;
deallocateListImp(imp);
}
void List::clear()
{
- derefValues();
+ if (_impBase->valueRefCount > 0) {
+ derefValues();
+ }
_impBase->size = 0;
}
listSizeHighWaterMark = imp->size;
#endif
- v->ref();
+ if (imp->valueRefCount > 0) {
+ v->ref();
+ }
if (i < inlineValuesSize) {
imp->values[i] = v;
imp->overflow[i - inlineValuesSize] = v;
}
-List List::copyTail() const
+List List::copy() const
{
List copy;
return copy;
}
+
+List List::copyTail() const
+{
+ List copy;
+
+ ListImp *imp = static_cast<ListImp *>(_impBase);
+
+ int size = imp->size;
+
+ int inlineSize = MIN(size, inlineValuesSize);
+ for (int i = 1; i != inlineSize; ++i)
+ copy.append(imp->values[i]);
+
+ ValueImp **overflow = imp->overflow;
+ int overflowSize = size - inlineSize;
+ for (int i = 0; i != overflowSize; ++i)
+ copy.append(overflow[i]);
+
+ return copy;
+}
+
const List &List::empty()
{
static List emptyList;
struct ListImpBase {
int size;
int refCount;
+ int valueRefCount;
};
class ListIterator;
class List {
public:
List();
+ List(bool needsMarking);
~List() { deref(); }
- List(const List &b) : _impBase(b._impBase) { ++_impBase->refCount; }
+ List(const List &b) : _impBase(b._impBase), _needsMarking(false) {
+ ++_impBase->refCount;
+ if (!_impBase->valueRefCount) refValues();
+ ++_impBase->valueRefCount;
+ }
List &operator=(const List &);
/**
* Remove all elements from the list.
*/
void clear();
+
+ /**
+ * Make a copy of the list
+ */
+ List copy() const;
+
/**
* Make a copy of the list, omitting the first element.
*/
*/
static const List &empty();
+ void mark() { if (_impBase->valueRefCount == 0) markValues(); }
private:
ListImpBase *_impBase;
+ bool _needsMarking;
- void deref() { if (--_impBase->refCount == 0) release(); }
+ void deref() { if (!_needsMarking && --_impBase->valueRefCount == 0) derefValues(); if (--_impBase->refCount == 0) release(); }
void release();
+ void refValues();
void derefValues();
+ void markValues();
};
/**
++bImpBase->refCount;
deref();
_impBase = bImpBase;
+ if (!_needsMarking) {
+ if (!_impBase->valueRefCount) {
+ refValues();
+ }
+ _impBase->valueRefCount++;
+ }
+
return *this;
}
rep = v;
#if DEBUG_COLLECTOR
assert (!(rep && !SimpleNumber::is(rep) && *((uint32_t *)rep) == 0 ));
- assert (!(rep && !SimpleNumber::is(rep) && rep->_flags & VI_MARKED));
+ assert (!(rep && !SimpleNumber::is(rep) && rep->_flags & ValueImp::VI_MARKED));
#endif
if (v)
{
rep = v.imp();
#if DEBUG_COLLECTOR
assert (!(rep && !SimpleNumber::is(rep) && *((uint32_t *)rep) == 0 ));
- assert (!(rep && !SimpleNumber::is(rep) && rep->_flags & VI_MARKED));
+ assert (!(rep && !SimpleNumber::is(rep) && rep->_flags & ValueImp::VI_MARKED));
#endif
if (rep)
{