Index: test/cctest/test-accessors.cc
|
===================================================================
|
--- test/cctest/test-accessors.cc (revision 3237)
|
+++ test/cctest/test-accessors.cc Sun Nov 15 12:35:57 MSK 2009
|
@@ -43,6 +43,7 @@
|
using ::v8::Function;
|
using ::v8::AccessorInfo;
|
using ::v8::Extension;
|
+using ::v8::v8_context;
|
|
namespace i = ::v8::internal;
|
|
@@ -223,7 +224,7 @@
|
LocalContext context;
|
v8::Handle<v8::Object> inst = obj->NewInstance();
|
context->Global()->Set(v8::String::New("obj"), inst);
|
- int count_before = i::HandleScope::NumberOfHandles();
|
+ int count_before = v8_context()->handle_scope_implementer_.NumberOfHandles();
|
{
|
v8::HandleScope scope;
|
CompileRun(
|
@@ -232,7 +233,7 @@
|
" obj.many;"
|
"}");
|
}
|
- int count_after = i::HandleScope::NumberOfHandles();
|
+ int count_after = v8_context()->handle_scope_implementer_.NumberOfHandles();
|
CHECK_EQ(count_before, count_after);
|
}
|
|
@@ -243,7 +244,7 @@
|
ApiTestFuzzer::Fuzz();
|
CHECK(info.This() == info.Holder());
|
CHECK(info.Data()->Equals(v8::String::New("data")));
|
- i::Heap::CollectAllGarbage(true);
|
+ v8_context()->heap_.CollectAllGarbage(true);
|
CHECK(info.This() == info.Holder());
|
CHECK(info.Data()->Equals(v8::String::New("data")));
|
return v8::Integer::New(17);
|
Index: src/serialize.cc
|
===================================================================
|
--- src/serialize.cc (revision 3238)
|
+++ src/serialize.cc Sat Nov 14 01:43:01 MSK 2009
|
@@ -266,7 +266,7 @@
|
// A SimulatedHeapSpace simulates the allocation of objects in a page in
|
// the heap. It uses linear allocation - that is, it doesn't simulate the
|
// use of a free list. This simulated
|
-// allocation must exactly match that done by Heap.
|
+// allocation must exactly match that done by heap.
|
|
class SimulatedHeapSpace {
|
public:
|
@@ -310,6 +310,7 @@
|
|
|
void SimulatedHeapSpace::InitCurrentHeap(AllocationSpace space) {
|
+ Heap& heap = v8_context()->heap_;
|
switch (space) {
|
case MAP_SPACE:
|
case CELL_SPACE:
|
@@ -318,16 +319,16 @@
|
case CODE_SPACE: {
|
PagedSpace* ps;
|
if (space == MAP_SPACE) {
|
- ps = Heap::map_space();
|
+ ps = heap.map_space();
|
} else if (space == CELL_SPACE) {
|
- ps = Heap::cell_space();
|
+ ps = heap.cell_space();
|
} else if (space == OLD_POINTER_SPACE) {
|
- ps = Heap::old_pointer_space();
|
+ ps = heap.old_pointer_space();
|
} else if (space == OLD_DATA_SPACE) {
|
- ps = Heap::old_data_space();
|
+ ps = heap.old_data_space();
|
} else {
|
ASSERT(space == CODE_SPACE);
|
- ps = Heap::code_space();
|
+ ps = heap.code_space();
|
}
|
Address top = ps->top();
|
Page* top_page = Page::FromAllocationTop(top);
|
@@ -345,11 +346,11 @@
|
case NEW_SPACE:
|
current_ = RelativeAddress(space,
|
0,
|
- Heap::NewSpaceTop() - Heap::NewSpaceStart());
|
+ heap.NewSpaceTop() - heap.NewSpaceStart());
|
break;
|
case LO_SPACE:
|
int page_index = 0;
|
- for (LargeObjectIterator it(Heap::lo_space()); it.has_next(); it.next()) {
|
+ for (LargeObjectIterator it(heap.lo_space()); it.has_next(); it.next()) {
|
page_index++;
|
}
|
current_ = RelativeAddress(space, page_index, 0);
|
@@ -411,8 +412,9 @@
|
class ExternalReferenceTable {
|
public:
|
static ExternalReferenceTable* instance() {
|
- if (!instance_) instance_ = new ExternalReferenceTable();
|
- return instance_;
|
+ ExternalReferenceTable*& instance = v8_context()->external_reference_table_;
|
+ if (!instance) instance = new ExternalReferenceTable();
|
+ return instance;
|
}
|
|
int size() const { return refs_.length(); }
|
@@ -426,8 +428,6 @@
|
int max_id(int code) { return max_id_[code]; }
|
|
private:
|
- static ExternalReferenceTable* instance_;
|
-
|
ExternalReferenceTable() : refs_(64) { PopulateTable(); }
|
~ExternalReferenceTable() { }
|
|
@@ -449,10 +449,6 @@
|
int max_id_[kTypeCodeCount];
|
};
|
|
-
|
-ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL;
|
-
|
-
|
void ExternalReferenceTable::AddFromId(TypeCode type,
|
uint16_t id,
|
const char* name) {
|
@@ -594,9 +590,9 @@
|
const char* name;
|
};
|
|
- static const StatsRefTableEntry stats_ref_table[] = {
|
+ const StatsRefTableEntry stats_ref_table[] = {
|
#define COUNTER_ENTRY(name, caption) \
|
- { &Counters::name, \
|
+ { &v8_context()->counters_.name, \
|
Counters::k_##name, \
|
"Counters::" #name },
|
|
@@ -631,7 +627,7 @@
|
Vector<char>::New(top_format_length + strlen(address_name) + 1);
|
const char* chars = name.start();
|
OS::SNPrintF(name, top_address_format, address_name);
|
- Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
|
+ Add(v8_context()->top_.get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
|
}
|
|
// Extensions
|
@@ -1090,11 +1086,12 @@
|
|
|
void Serializer::Serialize() {
|
+ V8Context* const v8context = v8_context();
|
// No active threads.
|
- CHECK_EQ(NULL, ThreadState::FirstInUse());
|
+ CHECK_EQ(NULL, v8context->thread_manager_.FirstInUse());
|
// No active or weak handles.
|
- CHECK(HandleScopeImplementer::instance()->blocks()->is_empty());
|
- CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles());
|
+ CHECK(v8context->handle_scope_implementer_.blocks()->is_empty());
|
+ CHECK_EQ(0, v8context->global_handles_.NumberOfWeakHandles());
|
// We need a counter function during serialization to resolve the
|
// references to counters in the code on the heap.
|
CHECK(StatsTable::HasCounterFunction());
|
@@ -1102,7 +1099,7 @@
|
InitializeAllocators();
|
reference_encoder_ = new ExternalReferenceEncoder();
|
PutHeader();
|
- Heap::IterateRoots(this, VISIT_ONLY_STRONG);
|
+ v8context->heap_.IterateRoots(this, VISIT_ONLY_STRONG);
|
PutLog();
|
PutContextStack();
|
Disable();
|
@@ -1174,6 +1171,7 @@
|
|
|
void Serializer::PutHeader() {
|
+ Heap& heap = v8_context()->heap_;
|
PutFlags();
|
writer_->PutC('D');
|
#ifdef DEBUG
|
@@ -1190,22 +1188,22 @@
|
// and code spaces, because objects in new space will be promoted to them.
|
writer_->PutC('S');
|
writer_->PutC('[');
|
- writer_->PutInt(Heap::old_pointer_space()->Size() +
|
- Heap::new_space()->Size());
|
+ writer_->PutInt(heap.old_pointer_space()->Size() +
|
+ heap.new_space()->Size());
|
writer_->PutC('|');
|
- writer_->PutInt(Heap::old_data_space()->Size() + Heap::new_space()->Size());
|
+ writer_->PutInt(heap.old_data_space()->Size() + heap.new_space()->Size());
|
writer_->PutC('|');
|
- writer_->PutInt(Heap::code_space()->Size() + Heap::new_space()->Size());
|
+ writer_->PutInt(heap.code_space()->Size() + heap.new_space()->Size());
|
writer_->PutC('|');
|
- writer_->PutInt(Heap::map_space()->Size());
|
+ writer_->PutInt(heap.map_space()->Size());
|
writer_->PutC('|');
|
- writer_->PutInt(Heap::cell_space()->Size());
|
+ writer_->PutInt(heap.cell_space()->Size());
|
writer_->PutC(']');
|
// Write global handles.
|
writer_->PutC('G');
|
writer_->PutC('[');
|
GlobalHandlesRetriever ghr(&global_handles_);
|
- GlobalHandles::IterateStrongRoots(&ghr);
|
+ v8_context()->global_handles_.IterateStrongRoots(&ghr);
|
for (int i = 0; i < global_handles_.length(); i++) {
|
writer_->PutC('N');
|
}
|
@@ -1216,7 +1214,7 @@
|
void Serializer::PutLog() {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
if (FLAG_log_code) {
|
- Logger::TearDown();
|
+ v8_context()->logger_.TearDown();
|
int pos = writer_->InsertC('L', flags_end_);
|
bool exists;
|
Vector<const char> log = ReadFile(FLAG_logfile, &exists);
|
@@ -1250,13 +1248,13 @@
|
|
void Serializer::PutContextStack() {
|
List<Context*> contexts(2);
|
- while (HandleScopeImplementer::instance()->HasSavedContexts()) {
|
- Context* context =
|
- HandleScopeImplementer::instance()->RestoreContext();
|
+ HandleScopeImplementer& handle_scope_implementer = v8_context()->handle_scope_implementer_;
|
+ while (handle_scope_implementer.HasSavedContexts()) {
|
+ Context* context = handle_scope_implementer.RestoreContext();
|
contexts.Add(context);
|
}
|
for (int i = contexts.length() - 1; i >= 0; i--) {
|
- HandleScopeImplementer::instance()->SaveContext(contexts[i]);
|
+ handle_scope_implementer.SaveContext(contexts[i]);
|
}
|
writer_->PutC('C');
|
writer_->PutC('[');
|
@@ -1341,23 +1339,24 @@
|
|
|
RelativeAddress Serializer::Allocate(HeapObject* obj) {
|
+ Heap& heap = v8_context()->heap_;
|
// Find out which AllocationSpace 'obj' is in.
|
AllocationSpace s;
|
bool found = false;
|
for (int i = FIRST_SPACE; !found && i <= LAST_SPACE; i++) {
|
s = static_cast<AllocationSpace>(i);
|
- found = Heap::InSpace(obj, s);
|
+ found = heap.InSpace(obj, s);
|
}
|
CHECK(found);
|
int size = obj->Size();
|
if (s == NEW_SPACE) {
|
- if (size > Heap::MaxObjectSizeInPagedSpace()) {
|
+ if (size > heap.MaxObjectSizeInPagedSpace()) {
|
s = LO_SPACE;
|
} else {
|
- OldSpace* space = Heap::TargetSpace(obj);
|
- ASSERT(space == Heap::old_pointer_space() ||
|
- space == Heap::old_data_space());
|
- s = (space == Heap::old_pointer_space()) ?
|
+ OldSpace* space = heap.TargetSpace(obj);
|
+ ASSERT(space == heap.old_pointer_space() ||
|
+ space == heap.old_data_space());
|
+ s = (space == heap.old_pointer_space()) ?
|
OLD_POINTER_SPACE :
|
OLD_DATA_SPACE;
|
}
|
@@ -1432,8 +1431,9 @@
|
|
class GlobalHandleDestroyer : public ObjectVisitor {
|
void VisitPointers(Object**start, Object**end) {
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
while (start < end) {
|
- GlobalHandles::Destroy(start++);
|
+ global_handles.Destroy(start++);
|
}
|
}
|
};
|
@@ -1442,21 +1442,22 @@
|
void Deserializer::Deserialize() {
|
// No global handles.
|
NoGlobalHandlesChecker checker;
|
- GlobalHandles::IterateStrongRoots(&checker);
|
+ V8Context * v8context = v8_context();
|
+ v8context->global_handles_.IterateStrongRoots(&checker);
|
// No active threads.
|
- ASSERT_EQ(NULL, ThreadState::FirstInUse());
|
+ ASSERT_EQ(NULL, v8context->thread_manager_.FirstInUse());
|
// No active handles.
|
- ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty());
|
+ ASSERT(v8context->handle_scope_implementer_.blocks()->is_empty());
|
reference_decoder_ = new ExternalReferenceDecoder();
|
// By setting linear allocation only, we forbid the use of free list
|
// allocation which is not predicted by SimulatedAddress.
|
GetHeader();
|
- Heap::IterateRoots(this, VISIT_ONLY_STRONG);
|
+ v8context->heap_.IterateRoots(this, VISIT_ONLY_STRONG);
|
GetContextStack();
|
// Any global handles that have been set up by deserialization are leaked
|
// since noone is keeping track of them. So we discard them now.
|
GlobalHandleDestroyer destroyer;
|
- GlobalHandles::IterateStrongRoots(&destroyer);
|
+ v8context->global_handles_.IterateStrongRoots(&destroyer);
|
}
|
|
|
@@ -1557,7 +1558,7 @@
|
int capacity,
|
List<Page*>* page_list) {
|
if (!space->EnsureCapacity(capacity)) {
|
- V8::FatalProcessOutOfMemory("InitPagedSpace");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("InitPagedSpace");
|
}
|
PageIterator it(space, PageIterator::ALL_PAGES);
|
while (it.has_next()) page_list->Add(it.next());
|
@@ -1565,6 +1566,7 @@
|
|
|
void Deserializer::GetHeader() {
|
+ Heap& heap = v8_context()->heap_;
|
reader_.ExpectC('D');
|
#ifdef DEBUG
|
expect_debug_information_ = reader_.GetC() == '1';
|
@@ -1582,17 +1584,17 @@
|
// during deserialization.
|
reader_.ExpectC('S');
|
reader_.ExpectC('[');
|
- InitPagedSpace(Heap::old_pointer_space(),
|
+ InitPagedSpace(heap.old_pointer_space(),
|
reader_.GetInt(),
|
&old_pointer_pages_);
|
reader_.ExpectC('|');
|
- InitPagedSpace(Heap::old_data_space(), reader_.GetInt(), &old_data_pages_);
|
+ InitPagedSpace(heap.old_data_space(), reader_.GetInt(), &old_data_pages_);
|
reader_.ExpectC('|');
|
- InitPagedSpace(Heap::code_space(), reader_.GetInt(), &code_pages_);
|
+ InitPagedSpace(heap.code_space(), reader_.GetInt(), &code_pages_);
|
reader_.ExpectC('|');
|
- InitPagedSpace(Heap::map_space(), reader_.GetInt(), &map_pages_);
|
+ InitPagedSpace(heap.map_space(), reader_.GetInt(), &map_pages_);
|
reader_.ExpectC('|');
|
- InitPagedSpace(Heap::cell_space(), reader_.GetInt(), &cell_pages_);
|
+ InitPagedSpace(heap.cell_space(), reader_.GetInt(), &cell_pages_);
|
reader_.ExpectC(']');
|
// Create placeholders for global handles later to be fill during
|
// IterateRoots.
|
@@ -1601,7 +1603,7 @@
|
int c = reader_.GetC();
|
while (c != ']') {
|
ASSERT(c == 'N');
|
- global_handles_.Add(GlobalHandles::Create(NULL).location());
|
+ global_handles_.Add(v8_context()->global_handles_.Create(NULL).location());
|
c = reader_.GetC();
|
}
|
}
|
@@ -1629,8 +1631,9 @@
|
VisitPointers(start, start + count);
|
}
|
reader_.ExpectC(']');
|
+ HandleScopeImplementer& handle_scope_implementer = v8_context()->handle_scope_implementer_;
|
for (int i = 0; i < count; i++) {
|
- HandleScopeImplementer::instance()->SaveContext(entered_contexts[i]);
|
+ handle_scope_implementer.SaveContext(entered_contexts[i]);
|
}
|
}
|
|
@@ -1649,16 +1652,17 @@
|
|
// Get a raw object of the right size in the right space.
|
AllocationSpace space = GetSpace(a);
|
+ Heap& heap = v8_context()->heap_;
|
Object* o;
|
if (IsLargeExecutableObject(a)) {
|
- o = Heap::lo_space()->AllocateRawCode(size);
|
+ o = heap.lo_space()->AllocateRawCode(size);
|
} else if (IsLargeFixedArray(a)) {
|
- o = Heap::lo_space()->AllocateRawFixedArray(size);
|
+ o = heap.lo_space()->AllocateRawFixedArray(size);
|
} else {
|
AllocationSpace retry_space = (space == NEW_SPACE)
|
- ? Heap::TargetSpaceId(type)
|
+ ? heap.TargetSpaceId(type)
|
: space;
|
- o = Heap::AllocateRaw(size, space, retry_space);
|
+ o = heap.AllocateRaw(size, space, retry_space);
|
}
|
ASSERT(!o->IsFailure());
|
// Check that the simulation of heap allocation was correct.
|
@@ -1720,6 +1724,7 @@
|
|
// Encoded addresses of HeapObjects always have 'HeapObject' tags.
|
ASSERT(o->IsHeapObject());
|
+ Heap& heap = v8_context()->heap_;
|
switch (GetSpace(encoded)) {
|
// For Map space and Old space, we cache the known Pages in map_pages,
|
// old_pointer_pages and old_data_pages. Even though MapSpace keeps a list
|
@@ -1727,30 +1732,30 @@
|
// and that appears not to update the page list.
|
case MAP_SPACE:
|
return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
|
- Heap::map_space(), &map_pages_);
|
+ heap.map_space(), &map_pages_);
|
case CELL_SPACE:
|
return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
|
- Heap::cell_space(), &cell_pages_);
|
+ heap.cell_space(), &cell_pages_);
|
case OLD_POINTER_SPACE:
|
return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
|
- Heap::old_pointer_space(), &old_pointer_pages_);
|
+ heap.old_pointer_space(), &old_pointer_pages_);
|
case OLD_DATA_SPACE:
|
return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
|
- Heap::old_data_space(), &old_data_pages_);
|
+ heap.old_data_space(), &old_data_pages_);
|
case CODE_SPACE:
|
return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
|
- Heap::code_space(), &code_pages_);
|
+ heap.code_space(), &code_pages_);
|
case NEW_SPACE:
|
- return HeapObject::FromAddress(Heap::NewSpaceStart() +
|
+ return HeapObject::FromAddress(heap.NewSpaceStart() +
|
NewSpaceOffset(encoded));
|
case LO_SPACE:
|
// Cache the known large_objects, allocated one per 'page'
|
int index = LargeObjectIndex(encoded);
|
if (index >= large_objects_.length()) {
|
int new_object_count =
|
- Heap::lo_space()->PageCount() - large_objects_.length();
|
+ heap.lo_space()->PageCount() - large_objects_.length();
|
List<Object*> new_objects(new_object_count);
|
- LargeObjectIterator it(Heap::lo_space());
|
+ LargeObjectIterator it(heap.lo_space());
|
for (int i = 0; i < new_object_count; i++) {
|
new_objects.Add(it.next());
|
}
|
@@ -1852,14 +1857,15 @@
|
// Don't GC while deserializing - just expand the heap.
|
AlwaysAllocateScope always_allocate;
|
// Don't use the free lists while deserializing.
|
+ V8Context * v8context = v8_context();
|
LinearAllocationScope allocate_linearly;
|
// No active threads.
|
- ASSERT_EQ(NULL, ThreadState::FirstInUse());
|
+ ASSERT_EQ(NULL, v8context->thread_manager_.FirstInUse());
|
// No active handles.
|
- ASSERT(HandleScopeImplementer::instance()->blocks()->is_empty());
|
+ ASSERT(v8context->handle_scope_implementer_.blocks()->is_empty());
|
ASSERT(external_reference_decoder_ == NULL);
|
external_reference_decoder_ = new ExternalReferenceDecoder();
|
- Heap::IterateRoots(this, VISIT_ONLY_STRONG);
|
+ v8context->heap_.IterateRoots(this, VISIT_ONLY_STRONG);
|
ASSERT(source_->AtEOF());
|
delete external_reference_decoder_;
|
external_reference_decoder_ = NULL;
|
@@ -1909,6 +1915,7 @@
|
Object** limit,
|
int space,
|
Address address) {
|
+ Heap& heap = v8_context()->heap_;
|
while (current < limit) {
|
int data = source_->Get();
|
switch (data) {
|
@@ -1929,41 +1936,41 @@
|
break;
|
}
|
case OBJECT_SERIALIZATION + NEW_SPACE: {
|
- ReadObject(NEW_SPACE, Heap::new_space(), current);
|
+ ReadObject(NEW_SPACE, heap.new_space(), current);
|
if (space != NEW_SPACE) {
|
- Heap::RecordWrite(address,
|
+ heap.RecordWrite(address,
|
reinterpret_cast<Address>(current) - address);
|
}
|
current++;
|
break;
|
}
|
case OBJECT_SERIALIZATION + OLD_DATA_SPACE:
|
- ReadObject(OLD_DATA_SPACE, Heap::old_data_space(), current++);
|
+ ReadObject(OLD_DATA_SPACE, heap.old_data_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + OLD_POINTER_SPACE:
|
- ReadObject(OLD_POINTER_SPACE, Heap::old_pointer_space(), current++);
|
+ ReadObject(OLD_POINTER_SPACE, heap.old_pointer_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + MAP_SPACE:
|
- ReadObject(MAP_SPACE, Heap::map_space(), current++);
|
+ ReadObject(MAP_SPACE, heap.map_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + CODE_SPACE:
|
- ReadObject(CODE_SPACE, Heap::code_space(), current++);
|
+ ReadObject(CODE_SPACE, heap.code_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + CELL_SPACE:
|
- ReadObject(CELL_SPACE, Heap::cell_space(), current++);
|
+ ReadObject(CELL_SPACE, heap.cell_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + kLargeData:
|
- ReadObject(kLargeData, Heap::lo_space(), current++);
|
+ ReadObject(kLargeData, heap.lo_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + kLargeCode:
|
- ReadObject(kLargeCode, Heap::lo_space(), current++);
|
+ ReadObject(kLargeCode, heap.lo_space(), current++);
|
break;
|
case OBJECT_SERIALIZATION + kLargeFixedArray:
|
- ReadObject(kLargeFixedArray, Heap::lo_space(), current++);
|
+ ReadObject(kLargeFixedArray, heap.lo_space(), current++);
|
break;
|
case CODE_OBJECT_SERIALIZATION + kLargeCode: {
|
Object* new_code_object = NULL;
|
- ReadObject(kLargeCode, Heap::lo_space(), &new_code_object);
|
+ ReadObject(kLargeCode, heap.lo_space(), &new_code_object);
|
Code* code_object = reinterpret_cast<Code*>(new_code_object);
|
// Setting a branch/call to another code object from code.
|
Address location_of_branch_data = reinterpret_cast<Address>(current);
|
@@ -1975,7 +1982,7 @@
|
}
|
case CODE_OBJECT_SERIALIZATION + CODE_SPACE: {
|
Object* new_code_object = NULL;
|
- ReadObject(CODE_SPACE, Heap::code_space(), &new_code_object);
|
+ ReadObject(CODE_SPACE, heap.code_space(), &new_code_object);
|
Code* code_object = reinterpret_cast<Code*>(new_code_object);
|
// Setting a branch/call to another code object from code.
|
Address location_of_branch_data = reinterpret_cast<Address>(current);
|
@@ -1989,7 +1996,7 @@
|
// Write a backreference to an object we unpacked earlier.
|
int backref_space = (data & kSpaceMask);
|
if (backref_space == NEW_SPACE && space != NEW_SPACE) {
|
- Heap::RecordWrite(address,
|
+ heap.RecordWrite(address,
|
reinterpret_cast<Address>(current) - address);
|
}
|
*current++ = GetAddressFromEnd(backref_space);
|
@@ -1999,7 +2006,7 @@
|
// Write a reference to an object we unpacked earlier.
|
int reference_space = (data & kSpaceMask);
|
if (reference_space == NEW_SPACE && space != NEW_SPACE) {
|
- Heap::RecordWrite(address,
|
+ heap.RecordWrite(address,
|
reinterpret_cast<Address>(current) - address);
|
}
|
*current++ = GetAddressFromStart(reference_space);
|
@@ -2132,13 +2139,14 @@
|
|
void Serializer2::Serialize() {
|
// No active threads.
|
- CHECK_EQ(NULL, ThreadState::FirstInUse());
|
+ V8Context* v8context = v8_context();
|
+ CHECK_EQ(NULL, v8context->thread_manager_.FirstInUse());
|
// No active or weak handles.
|
- CHECK(HandleScopeImplementer::instance()->blocks()->is_empty());
|
- CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles());
|
+ CHECK(v8context->handle_scope_implementer_.blocks()->is_empty());
|
+ CHECK_EQ(0, v8context->global_handles_.NumberOfWeakHandles());
|
ASSERT(external_reference_encoder_ == NULL);
|
external_reference_encoder_ = new ExternalReferenceEncoder();
|
- Heap::IterateRoots(this, VISIT_ONLY_STRONG);
|
+ v8context->heap_.IterateRoots(this, VISIT_ONLY_STRONG);
|
delete external_reference_encoder_;
|
external_reference_encoder_ = NULL;
|
}
|
@@ -2311,11 +2319,12 @@
|
|
void Serializer2::ObjectSerializer::VisitExternalAsciiString(
|
v8::String::ExternalAsciiStringResource** resource_pointer) {
|
+ V8Context* const v8context = v8_context();
|
Address references_start = reinterpret_cast<Address>(resource_pointer);
|
OutputRawData(references_start);
|
for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
|
// Use raw_unchecked when maps are munged.
|
- Object* source = Heap::raw_unchecked_natives_source_cache()->get(i);
|
+ Object* source = v8context->heap_.raw_unchecked_natives_source_cache()->get(i);
|
if (!source->IsUndefined()) {
|
// Don't use cast when maps are munged.
|
ExternalAsciiString* string =
|
@@ -2365,9 +2374,10 @@
|
|
|
int Serializer2::SpaceOfObject(HeapObject* object) {
|
+ Heap& heap = v8_context()->heap_;
|
for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
|
AllocationSpace s = static_cast<AllocationSpace>(i);
|
- if (Heap::InSpace(object, s)) {
|
+ if (heap.InSpace(object, s)) {
|
if (i == LO_SPACE) {
|
if (object->IsCode()) {
|
return kLargeCode;
|
@@ -2386,9 +2396,10 @@
|
|
|
int Serializer2::SpaceOfAlreadySerializedObject(HeapObject* object) {
|
+ Heap& heap = v8_context()->heap_;
|
for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
|
AllocationSpace s = static_cast<AllocationSpace>(i);
|
- if (Heap::InSpace(object, s)) {
|
+ if (heap.InSpace(object, s)) {
|
return i;
|
}
|
}
|
Index: src/disasm.h
|
===================================================================
|
--- src/disasm.h (revision 1389)
|
+++ src/disasm.h Sat Nov 14 01:43:21 MSK 2009
|
@@ -72,6 +72,12 @@
|
DISALLOW_IMPLICIT_CONSTRUCTORS(Disassembler);
|
};
|
|
+class DisassemblerData {
|
+public:
|
+ v8::internal::EmbeddedVector<char, 128> buffer_;
|
+ v8::internal::EmbeddedVector<char, 32> tmp_buffer_;
|
+};
|
+
|
} // namespace disasm
|
|
#endif // V8_DISASM_H_
|
Index: src/code-stubs.cc
|
===================================================================
|
--- src/code-stubs.cc (revision 3209)
|
+++ src/code-stubs.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -40,6 +40,8 @@
|
|
int index = 0;
|
uint32_t key = 0;
|
+ V8Context* const v8context = v8_context();
|
+
|
if (custom_cache) {
|
Code* cached;
|
if (GetCustomCache(&cached)) {
|
@@ -49,9 +51,9 @@
|
}
|
} else {
|
key = GetKey();
|
- index = Heap::code_stubs()->FindEntry(key);
|
+ index = v8context->heap_.code_stubs()->FindEntry(key);
|
if (index != NumberDictionary::kNotFound)
|
- return Handle<Code>(Code::cast(Heap::code_stubs()->ValueAt(index)));
|
+ return Handle<Code>(Code::cast(v8context->heap_.code_stubs()->ValueAt(index)));
|
}
|
|
Code* result;
|
@@ -59,7 +61,7 @@
|
v8::HandleScope scope;
|
|
// Update the static counter each time a new code stub is generated.
|
- Counters::code_stubs.Increment();
|
+ v8context->counters_.code_stubs.Increment();
|
|
// Generate the new code.
|
MacroAssembler masm(NULL, 256);
|
@@ -81,10 +83,10 @@
|
code->set_major_key(MajorKey());
|
|
// Add unresolved entries in the code to the fixup list.
|
- Bootstrapper::AddFixup(*code, &masm);
|
+ v8context->bootstrapper_.AddFixup(*code, &masm);
|
|
LOG(CodeCreateEvent(Logger::STUB_TAG, *code, GetName()));
|
- Counters::total_stubs_code_size.Increment(code->instruction_size());
|
+ v8context->counters_.total_stubs_code_size.Increment(code->instruction_size());
|
|
#ifdef ENABLE_DISASSEMBLER
|
if (FLAG_print_code_stubs) {
|
@@ -102,10 +104,10 @@
|
// Update the dictionary and the root in Heap.
|
Handle<NumberDictionary> dict =
|
Factory::DictionaryAtNumberPut(
|
- Handle<NumberDictionary>(Heap::code_stubs()),
|
+ Handle<NumberDictionary>(v8_context()->heap_.code_stubs()),
|
key,
|
code);
|
- Heap::public_set_code_stubs(*dict);
|
+ v8_context()->heap_.public_set_code_stubs(*dict);
|
}
|
result = *code;
|
}
|
Index: src/log-inl.h
|
===================================================================
|
--- src/log-inl.h (revision 2855)
|
+++ src/log-inl.h Sat Nov 14 01:43:23 MSK 2009
|
@@ -55,8 +55,8 @@
|
}
|
}
|
|
-VMState::VMState(StateTag state) : disabled_(true) {
|
- if (!Logger::is_logging()) {
|
+VMState::VMState(StateTag state, Logger& logger) : disabled_(true) {
|
+ if (!logger.is_logging()) {
|
return;
|
}
|
|
@@ -69,8 +69,8 @@
|
if (state == EXTERNAL) state = OTHER;
|
#endif
|
state_ = state;
|
- previous_ = Logger::current_state_;
|
- Logger::current_state_ = this;
|
+ previous_ = logger.current_state_;
|
+ logger.current_state_ = this;
|
|
if (FLAG_log_state_changes) {
|
LOG(UncheckedStringEvent("Entering", StateToString(state_)));
|
@@ -84,10 +84,10 @@
|
if (state_ == EXTERNAL) {
|
// We are leaving V8.
|
ASSERT(previous_->state_ != EXTERNAL);
|
- Heap::Protect();
|
+ v8_context()->heap_.Protect();
|
} else if (previous_->state_ == EXTERNAL) {
|
// We are entering V8.
|
- Heap::Unprotect();
|
+ v8_context()->heap_.Unprotect();
|
}
|
}
|
#endif
|
@@ -96,7 +96,7 @@
|
|
VMState::~VMState() {
|
if (disabled_) return;
|
- Logger::current_state_ = previous_;
|
+ v8_context()->logger_.current_state_ = previous_;
|
|
if (FLAG_log_state_changes) {
|
LOG(UncheckedStringEvent("Leaving", StateToString(state_)));
|
@@ -110,10 +110,10 @@
|
if (state_ == EXTERNAL) {
|
// We are reentering V8.
|
ASSERT(previous_->state_ != EXTERNAL);
|
- Heap::Unprotect();
|
+ v8_context()->heap_.Unprotect();
|
} else if (previous_->state_ == EXTERNAL) {
|
// We are leaving V8.
|
- Heap::Protect();
|
+ v8_context()->heap_.Protect();
|
}
|
}
|
#endif
|
Index: src/top.cc
|
===================================================================
|
--- src/top.cc (revision 3228)
|
+++ src/top.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -38,20 +38,155 @@
|
namespace v8 {
|
namespace internal {
|
|
-ThreadLocalTop Top::thread_local_;
|
-Mutex* Top::break_access_ = OS::CreateMutex();
|
+// Create a dummy thread that will wait forever on a semaphore. The only
|
+// purpose for this thread is to have some stack area to save essential data
|
+// into for use by a stacks only core dump (aka minidump).
|
+class PreallocatedMemoryThread: public Thread {
|
+ public:
|
+ PreallocatedMemoryThread() : keep_running_(true),
|
+ wait_for_ever_semaphore_(OS::CreateSemaphore(0)), data_ready_semaphore_ (OS::CreateSemaphore(0)),
|
+ data_(NULL), length_(0) {
|
+ }
|
|
-NoAllocationStringAllocator* preallocated_message_space = NULL;
|
+ // When the thread starts running it will allocate a fixed number of bytes
|
+ // on the stack and publish the location of this memory for others to use.
|
+ void Run() {
|
+ EmbeddedVector<char, 15 * 1024> local_buffer;
|
|
-Address top_addresses[] = {
|
-#define C(name) reinterpret_cast<Address>(Top::name()),
|
+ // Initialize the buffer with a known good value.
|
+ OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
|
+ local_buffer.length());
|
+
|
+ // Publish the local buffer and signal its availability.
|
+ data_ = local_buffer.start();
|
+ length_ = local_buffer.length();
|
+ data_ready_semaphore_->Signal();
|
+
|
+ while (keep_running_) {
|
+ // This thread will wait here until the end of time.
|
+ wait_for_ever_semaphore_->Wait();
|
+ }
|
+
|
+ // Make sure we access the buffer after the wait to remove all possibility
|
+ // of it being optimized away.
|
+ OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
|
+ local_buffer.length());
|
+ }
|
+
|
+ void RequestEnd() {
|
+ keep_running_ = false;
|
+ wait_for_ever_semaphore_->Signal();
|
+
|
+ // Wait for the thread to terminate.
|
+ Join();
|
+
|
+ if (data_ready_semaphore_ != NULL) {
|
+ delete data_ready_semaphore_;
|
+ data_ready_semaphore_ = NULL;
|
+ }
|
+
|
+ delete wait_for_ever_semaphore_;
|
+ wait_for_ever_semaphore_ = NULL;
|
+ }
|
+
|
+ char* data() {
|
+ if (data_ready_semaphore_ != NULL) {
|
+ // Initial access is guarded until the data has been published.
|
+ data_ready_semaphore_->Wait();
|
+ delete data_ready_semaphore_;
|
+ data_ready_semaphore_ = NULL;
|
+ }
|
+ return data_;
|
+ }
|
+
|
+ unsigned length() {
|
+ if (data_ready_semaphore_ != NULL) {
|
+ // Initial access is guarded until the data has been published.
|
+ data_ready_semaphore_->Wait();
|
+ delete data_ready_semaphore_;
|
+ data_ready_semaphore_ = NULL;
|
+ }
|
+ return length_;
|
+ }
|
+
|
+ private:
|
+ // Used to make sure that the thread keeps looping even for spurious wakeups.
|
+ bool keep_running_;
|
+
|
+ DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
|
+
|
+ // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
|
+ Semaphore* wait_for_ever_semaphore_;
|
+ // Semaphore to signal that the data has been initialized.
|
+ Semaphore* data_ready_semaphore_;
|
+
|
+ // Location and size of the preallocated memory block.
|
+ char* data_;
|
+ unsigned length_;
|
+};
|
+
|
+class Top::TopImpl {
|
+public:
|
+ // Debug.
|
+ // Mutex for serializing access to break control structures.
|
+ Mutex* break_access_;
|
+
|
+ NoAllocationStringAllocator* preallocated_message_space;
|
+ Address top_addresses[k_top_address_count];
|
+
|
+ // The preallocated memory thread singleton.
|
+ PreallocatedMemoryThread* the_thread_;
|
+ int stack_trace_nesting_level;
|
+ StringStream* incomplete_message;
|
+
|
+ bool initialized;
|
+
|
+ TopImpl(Top* top): initialized(false), break_access_(OS::CreateMutex()), preallocated_message_space (NULL),
|
+ incomplete_message(NULL), stack_trace_nesting_level(NULL), the_thread_(NULL) {
|
+ Address _top_addresses[] = {
|
+ #define C(name) reinterpret_cast<Address>(top->name()),
|
- TOP_ADDRESS_LIST(C)
|
- TOP_ADDRESS_LIST_PROF(C)
|
-#undef C
|
- NULL
|
-};
|
+ TOP_ADDRESS_LIST(C)
|
+ TOP_ADDRESS_LIST_PROF(C)
|
+ #undef C
|
+ NULL
|
+ };
|
+ for(int i = 0; i < k_top_address_count; ++i) top_addresses[i] = _top_addresses[i];
|
+ }
|
+ ~TopImpl() {
|
+ delete break_access_;
|
+ }
|
|
+ void StartThread() {
|
+ if (the_thread_ != NULL) return;
|
|
+ the_thread_ = new PreallocatedMemoryThread();
|
+ the_thread_->Start();
|
+ }
|
+
|
+ // Stop the PreallocatedMemoryThread and release its resources.
|
+ void StopThread() {
|
+ if (the_thread_ == NULL) return;
|
+ the_thread_->RequestEnd();
|
+
|
+ // Done with the thread entirely.
|
+ delete the_thread_;
|
+ the_thread_ = NULL;
|
+ }
|
+
|
+};
|
+
|
+Top::Top():top_impl(new TopImpl(this)) {}
|
+Top::~Top() { delete top_impl; }
|
+
|
+
|
+ThreadLocalTop::ThreadLocalTop():
|
+ context_(NULL),
|
+ has_pending_message_(false),
|
+ external_caught_exception_(false),
|
+ try_catch_handler_address_(NULL)
|
+{
|
+}
|
+
|
v8::TryCatch* ThreadLocalTop::TryCatchHandler() {
|
return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address());
|
}
|
@@ -66,7 +201,7 @@
|
stack_is_cooked_ = false;
|
try_catch_handler_address_ = NULL;
|
context_ = NULL;
|
- int id = ThreadManager::CurrentId();
|
+ int id = v8_context()->thread_manager_.CurrentId();
|
thread_id_ = (id == 0) ? ThreadManager::kInvalidId : id;
|
external_caught_exception_ = false;
|
failed_access_check_callback_ = NULL;
|
@@ -76,7 +211,7 @@
|
|
|
Address Top::get_address_from_id(Top::AddressId id) {
|
- return top_addresses[id];
|
+ return top_impl->top_addresses[id];
|
}
|
|
|
@@ -122,146 +257,34 @@
|
clear_scheduled_exception();
|
}
|
|
-
|
-// Create a dummy thread that will wait forever on a semaphore. The only
|
-// purpose for this thread is to have some stack area to save essential data
|
-// into for use by a stacks only core dump (aka minidump).
|
-class PreallocatedMemoryThread: public Thread {
|
- public:
|
- PreallocatedMemoryThread() : keep_running_(true) {
|
- wait_for_ever_semaphore_ = OS::CreateSemaphore(0);
|
- data_ready_semaphore_ = OS::CreateSemaphore(0);
|
- }
|
-
|
- // When the thread starts running it will allocate a fixed number of bytes
|
- // on the stack and publish the location of this memory for others to use.
|
- void Run() {
|
- EmbeddedVector<char, 15 * 1024> local_buffer;
|
-
|
- // Initialize the buffer with a known good value.
|
- OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
|
- local_buffer.length());
|
-
|
- // Publish the local buffer and signal its availability.
|
- data_ = local_buffer.start();
|
- length_ = local_buffer.length();
|
- data_ready_semaphore_->Signal();
|
-
|
- while (keep_running_) {
|
- // This thread will wait here until the end of time.
|
- wait_for_ever_semaphore_->Wait();
|
- }
|
-
|
- // Make sure we access the buffer after the wait to remove all possibility
|
- // of it being optimized away.
|
- OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
|
- local_buffer.length());
|
- }
|
-
|
- static char* data() {
|
- if (data_ready_semaphore_ != NULL) {
|
- // Initial access is guarded until the data has been published.
|
- data_ready_semaphore_->Wait();
|
- delete data_ready_semaphore_;
|
- data_ready_semaphore_ = NULL;
|
- }
|
- return data_;
|
- }
|
-
|
- static unsigned length() {
|
- if (data_ready_semaphore_ != NULL) {
|
- // Initial access is guarded until the data has been published.
|
- data_ready_semaphore_->Wait();
|
- delete data_ready_semaphore_;
|
- data_ready_semaphore_ = NULL;
|
- }
|
- return length_;
|
- }
|
-
|
- static void StartThread() {
|
- if (the_thread_ != NULL) return;
|
-
|
- the_thread_ = new PreallocatedMemoryThread();
|
- the_thread_->Start();
|
- }
|
-
|
- // Stop the PreallocatedMemoryThread and release its resources.
|
- static void StopThread() {
|
- if (the_thread_ == NULL) return;
|
-
|
- the_thread_->keep_running_ = false;
|
- wait_for_ever_semaphore_->Signal();
|
-
|
- // Wait for the thread to terminate.
|
- the_thread_->Join();
|
-
|
- if (data_ready_semaphore_ != NULL) {
|
- delete data_ready_semaphore_;
|
- data_ready_semaphore_ = NULL;
|
- }
|
-
|
- delete wait_for_ever_semaphore_;
|
- wait_for_ever_semaphore_ = NULL;
|
-
|
- // Done with the thread entirely.
|
- delete the_thread_;
|
- the_thread_ = NULL;
|
- }
|
-
|
- private:
|
- // Used to make sure that the thread keeps looping even for spurious wakeups.
|
- bool keep_running_;
|
-
|
- // The preallocated memory thread singleton.
|
- static PreallocatedMemoryThread* the_thread_;
|
- // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
|
- static Semaphore* wait_for_ever_semaphore_;
|
- // Semaphore to signal that the data has been initialized.
|
- static Semaphore* data_ready_semaphore_;
|
-
|
- // Location and size of the preallocated memory block.
|
- static char* data_;
|
- static unsigned length_;
|
-
|
- DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
|
-};
|
-
|
-PreallocatedMemoryThread* PreallocatedMemoryThread::the_thread_ = NULL;
|
-Semaphore* PreallocatedMemoryThread::wait_for_ever_semaphore_ = NULL;
|
-Semaphore* PreallocatedMemoryThread::data_ready_semaphore_ = NULL;
|
-char* PreallocatedMemoryThread::data_ = NULL;
|
-unsigned PreallocatedMemoryThread::length_ = 0;
|
-
|
-static bool initialized = false;
|
-
|
void Top::Initialize() {
|
- CHECK(!initialized);
|
+ CHECK(!top_impl->initialized);
|
|
InitializeThreadLocal();
|
|
// Only preallocate on the first initialization.
|
- if (FLAG_preallocate_message_memory && (preallocated_message_space == NULL)) {
|
+ if (FLAG_preallocate_message_memory && (top_impl->preallocated_message_space == NULL)) {
|
// Start the thread which will set aside some memory.
|
- PreallocatedMemoryThread::StartThread();
|
- preallocated_message_space =
|
- new NoAllocationStringAllocator(PreallocatedMemoryThread::data(),
|
- PreallocatedMemoryThread::length());
|
- PreallocatedStorage::Init(PreallocatedMemoryThread::length() / 4);
|
+ top_impl->StartThread();
|
+ top_impl->preallocated_message_space =
|
+ new NoAllocationStringAllocator(top_impl->the_thread_->data(),
|
+ top_impl->the_thread_->length());
|
+ PreallocatedStorage::Init(top_impl->the_thread_->length() / 4);
|
}
|
- initialized = true;
|
+ top_impl->initialized = true;
|
}
|
|
|
void Top::TearDown() {
|
- if (initialized) {
|
+ if (top_impl->initialized) {
|
// Remove the external reference to the preallocated stack memory.
|
- if (preallocated_message_space != NULL) {
|
- delete preallocated_message_space;
|
- preallocated_message_space = NULL;
|
+ if (top_impl->preallocated_message_space != NULL) {
|
+ delete top_impl->preallocated_message_space;
|
+ top_impl->preallocated_message_space = NULL;
|
}
|
|
- PreallocatedMemoryThread::StopThread();
|
- initialized = false;
|
+ top_impl->StopThread();
|
+ top_impl->initialized = false;
|
}
|
}
|
|
@@ -321,29 +344,25 @@
|
}
|
|
|
-static int stack_trace_nesting_level = 0;
|
-static StringStream* incomplete_message = NULL;
|
-
|
-
|
Handle<String> Top::StackTrace() {
|
- if (stack_trace_nesting_level == 0) {
|
- stack_trace_nesting_level++;
|
+ if (top_impl->stack_trace_nesting_level == 0) {
|
+ top_impl->stack_trace_nesting_level++;
|
HeapStringAllocator allocator;
|
StringStream::ClearMentionedObjectCache();
|
StringStream accumulator(&allocator);
|
- incomplete_message = &accumulator;
|
+ top_impl->incomplete_message = &accumulator;
|
PrintStack(&accumulator);
|
Handle<String> stack_trace = accumulator.ToString();
|
- incomplete_message = NULL;
|
- stack_trace_nesting_level = 0;
|
+ top_impl->incomplete_message = NULL;
|
+ top_impl->stack_trace_nesting_level = 0;
|
return stack_trace;
|
- } else if (stack_trace_nesting_level == 1) {
|
- stack_trace_nesting_level++;
|
+ } else if (top_impl->stack_trace_nesting_level == 1) {
|
+ top_impl->stack_trace_nesting_level++;
|
OS::PrintError(
|
"\n\nAttempt to print stack while printing stack (double fault)\n");
|
OS::PrintError(
|
"If you are lucky you may find a partial stack dump on stdout.\n\n");
|
- incomplete_message->OutputToStdOut();
|
+ top_impl->incomplete_message->OutputToStdOut();
|
return Factory::empty_symbol();
|
} else {
|
OS::Abort();
|
@@ -354,14 +373,14 @@
|
|
|
void Top::PrintStack() {
|
- if (stack_trace_nesting_level == 0) {
|
- stack_trace_nesting_level++;
|
+ if (top_impl->stack_trace_nesting_level == 0) {
|
+ top_impl->stack_trace_nesting_level++;
|
|
StringAllocator* allocator;
|
- if (preallocated_message_space == NULL) {
|
+ if (top_impl->preallocated_message_space == NULL) {
|
allocator = new HeapStringAllocator();
|
} else {
|
- allocator = preallocated_message_space;
|
+ allocator = top_impl->preallocated_message_space;
|
}
|
|
NativeAllocationChecker allocation_checker(
|
@@ -371,23 +390,23 @@
|
|
StringStream::ClearMentionedObjectCache();
|
StringStream accumulator(allocator);
|
- incomplete_message = &accumulator;
|
+ top_impl->incomplete_message = &accumulator;
|
PrintStack(&accumulator);
|
accumulator.OutputToStdOut();
|
accumulator.Log();
|
- incomplete_message = NULL;
|
- stack_trace_nesting_level = 0;
|
- if (preallocated_message_space == NULL) {
|
+ top_impl->incomplete_message = NULL;
|
+ top_impl->stack_trace_nesting_level = 0;
|
+ if (top_impl->preallocated_message_space == NULL) {
|
// Remove the HeapStringAllocator created above.
|
delete allocator;
|
}
|
- } else if (stack_trace_nesting_level == 1) {
|
- stack_trace_nesting_level++;
|
+ } else if (top_impl->stack_trace_nesting_level == 1) {
|
+ top_impl->stack_trace_nesting_level++;
|
OS::PrintError(
|
"\n\nAttempt to print stack while printing stack (double fault)\n");
|
OS::PrintError(
|
"If you are lucky you may find a partial stack dump on stdout.\n\n");
|
- incomplete_message->OutputToStdOut();
|
+ top_impl->incomplete_message->OutputToStdOut();
|
}
|
}
|
|
@@ -439,10 +458,10 @@
|
// Get the data object from access check info.
|
JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
|
Object* info = constructor->shared()->function_data();
|
- if (info == Heap::undefined_value()) return;
|
+ if (info == v8_context()->heap_.undefined_value()) return;
|
|
Object* data_obj = FunctionTemplateInfo::cast(info)->access_check_info();
|
- if (data_obj == Heap::undefined_value()) return;
|
+ if (data_obj == v8_context()->heap_.undefined_value()) return;
|
|
HandleScope scope;
|
Handle<JSObject> receiver_handle(receiver);
|
@@ -462,7 +481,7 @@
|
static MayAccessDecision MayAccessPreCheck(JSObject* receiver,
|
v8::AccessType type) {
|
// During bootstrapping, callback functions are not enabled yet.
|
- if (Bootstrapper::IsActive()) return YES;
|
+ if (v8_context()->bootstrapper_.IsActive()) return YES;
|
|
if (receiver->IsJSGlobalProxy()) {
|
Object* receiver_context = JSGlobalProxy::cast(receiver)->context();
|
@@ -470,7 +489,7 @@
|
|
// Get the global context of current top context.
|
// avoid using Top::global_context() because it uses Handle.
|
- Context* global_context = Top::context()->global()->global_context();
|
+ Context* global_context = v8_context()->top_.context()->global()->global_context();
|
if (receiver_context == global_context) return YES;
|
|
if (Context::cast(receiver_context)->security_token() ==
|
@@ -490,7 +509,7 @@
|
|
// Skip checks for hidden properties access. Note, we do not
|
// require existence of a context in this case.
|
- if (key == Heap::hidden_symbol()) return true;
|
+ if (key == v8_context()->heap_.hidden_symbol()) return true;
|
|
// Check for compatibility between the security tokens in the
|
// current lexical context and the accessed object.
|
@@ -502,10 +521,10 @@
|
// Get named access check callback
|
JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
|
Object* info = constructor->shared()->function_data();
|
- if (info == Heap::undefined_value()) return false;
|
+ if (info == v8_context()->heap_.undefined_value()) return false;
|
|
Object* data_obj = FunctionTemplateInfo::cast(info)->access_check_info();
|
- if (data_obj == Heap::undefined_value()) return false;
|
+ if (data_obj == v8_context()->heap_.undefined_value()) return false;
|
|
Object* fun_obj = AccessCheckInfo::cast(data_obj)->named_callback();
|
v8::NamedSecurityCallback callback =
|
@@ -547,10 +566,10 @@
|
// Get indexed access check callback
|
JSFunction* constructor = JSFunction::cast(receiver->map()->constructor());
|
Object* info = constructor->shared()->function_data();
|
- if (info == Heap::undefined_value()) return false;
|
+ if (info == v8_context()->heap_.undefined_value()) return false;
|
|
Object* data_obj = FunctionTemplateInfo::cast(info)->access_check_info();
|
- if (data_obj == Heap::undefined_value()) return false;
|
+ if (data_obj == v8_context()->heap_.undefined_value()) return false;
|
|
Object* fun_obj = AccessCheckInfo::cast(data_obj)->indexed_callback();
|
v8::IndexedSecurityCallback callback =
|
@@ -598,7 +617,7 @@
|
|
|
Failure* Top::TerminateExecution() {
|
- DoThrow(Heap::termination_exception(), NULL, NULL);
|
+ DoThrow(v8_context()->heap_.termination_exception(), NULL, NULL);
|
return Failure::Exception();
|
}
|
|
@@ -617,7 +636,7 @@
|
|
|
Failure* Top::ThrowIllegalOperation() {
|
- return Throw(Heap::illegal_access_symbol());
|
+ return Throw(v8_context()->heap_.illegal_access_symbol());
|
}
|
|
|
@@ -689,7 +708,7 @@
|
MessageLocation* location,
|
Handle<String> stack_trace) {
|
Handle<Object> message;
|
- if (!Bootstrapper::IsActive()) {
|
+ if (!v8_context()->bootstrapper_.IsActive()) {
|
// It's not safe to try to make message objects while the bootstrapper
|
// is active since the infrastructure may not have been properly
|
// initialized.
|
@@ -741,11 +760,12 @@
|
|
HandleScope scope;
|
Handle<Object> exception_handle(exception);
|
+ V8Context * const v8context = v8_context();
|
|
// Determine reporting and whether the exception is caught externally.
|
bool is_caught_externally = false;
|
bool is_out_of_memory = exception == Failure::OutOfMemoryException();
|
- bool is_termination_exception = exception == Heap::termination_exception();
|
+ bool is_termination_exception = exception == v8context->heap_.termination_exception();
|
bool catchable_by_javascript = !is_termination_exception && !is_out_of_memory;
|
bool should_return_exception =
|
ShouldReturnException(&is_caught_externally, catchable_by_javascript);
|
@@ -754,7 +774,7 @@
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Notify debugger of exception.
|
if (catchable_by_javascript) {
|
- Debugger::OnException(exception_handle, report_exception);
|
+ v8context->debug_.debugger()->OnException(exception_handle, report_exception);
|
}
|
#endif
|
|
@@ -770,7 +790,7 @@
|
ComputeLocation(&potential_computed_location);
|
location = &potential_computed_location;
|
}
|
- if (!Bootstrapper::IsActive()) {
|
+ if (!v8context->bootstrapper_.IsActive()) {
|
// It's not safe to try to make message objects or collect stack
|
// traces while the bootstrapper is active since the infrastructure
|
// may not have been properly initialized.
|
@@ -816,10 +836,10 @@
|
if (thread_local_.pending_exception_ == Failure::OutOfMemoryException()) {
|
context()->mark_out_of_memory();
|
} else if (thread_local_.pending_exception_ ==
|
- Heap::termination_exception()) {
|
+ v8_context()->heap_.termination_exception()) {
|
if (external_caught) {
|
thread_local_.TryCatchHandler()->can_continue_ = false;
|
- thread_local_.TryCatchHandler()->exception_ = Heap::null_value();
|
+ thread_local_.TryCatchHandler()->exception_ = v8_context()->heap_.null_value();
|
}
|
} else {
|
Handle<Object> exception(pending_exception());
|
@@ -865,7 +885,7 @@
|
// Allways reschedule out of memory exceptions.
|
if (!is_out_of_memory()) {
|
bool is_termination_exception =
|
- pending_exception() == Heap::termination_exception();
|
+ pending_exception() == v8_context()->heap_.termination_exception();
|
|
// Do not reschedule the exception if this is the bottom call.
|
bool clear_exception = is_bottom_call;
|
@@ -948,7 +968,7 @@
|
}
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -966,12 +986,12 @@
|
|
|
ExecutionAccess::ExecutionAccess() {
|
- Top::break_access_->Lock();
|
+ v8_context()->top_.top_impl->break_access_->Lock();
|
}
|
|
|
ExecutionAccess::~ExecutionAccess() {
|
- Top::break_access_->Unlock();
|
+ v8_context()->top_.top_impl->break_access_->Unlock();
|
}
|
|
|
Index: src/snapshot-common.cc
|
===================================================================
|
--- src/snapshot-common.cc (revision 3184)
|
+++ src/snapshot-common.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -40,14 +40,14 @@
|
bool Snapshot::Deserialize(const byte* content, int len) {
|
Deserializer des(content, len);
|
des.GetFlags();
|
- return V8::Initialize(&des);
|
+ return v8_context()->v8_.Initialize(&des);
|
}
|
|
|
bool Snapshot::Deserialize2(const byte* content, int len) {
|
SnapshotByteSource source(content, len);
|
Deserializer2 deserializer(&source);
|
- return V8::Initialize(&deserializer);
|
+ return v8_context()->v8_.Initialize(&deserializer);
|
}
|
|
|
Index: src/ia32/debug-ia32.cc
|
===================================================================
|
--- src/ia32/debug-ia32.cc (revision 3072)
|
+++ src/ia32/debug-ia32.cc Sat Nov 14 01:42:56 MSK 2009
|
@@ -37,7 +37,7 @@
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
|
bool BreakLocationIterator::IsDebugBreakAtReturn() {
|
- return Debug::IsDebugBreakAtReturn(rinfo());
|
+ return v8_context()->debug_.IsDebugBreakAtReturn(rinfo());
|
}
|
|
|
@@ -47,7 +47,7 @@
|
void BreakLocationIterator::SetDebugBreakAtReturn() {
|
ASSERT(Debug::kIa32JSReturnSequenceLength >=
|
Debug::kIa32CallInstructionLength);
|
- rinfo()->PatchCodeWithCall(Debug::debug_break_return()->entry(),
|
+ rinfo()->PatchCodeWithCall(v8_context()->debug_.debug_break_return()->entry(),
|
Debug::kIa32JSReturnSequenceLength - Debug::kIa32CallInstructionLength);
|
}
|
|
Index: src/zone.h
|
===================================================================
|
--- src/zone.h (revision 2939)
|
+++ src/zone.h Sat Nov 14 01:42:55 MSK 2009
|
@@ -72,7 +72,12 @@
|
static inline void adjust_segment_bytes_allocated(int delta);
|
|
private:
|
+ // Expand the Zone to hold at least 'size' more bytes and allocate
|
+ // the bytes. Returns the address of the newly allocated chunk of
|
+ // memory in the Zone. Should only be called if there isn't enough
|
+ // room in the Zone already.
|
+ static Address NewExpand(int size);
|
-
|
+
|
// All pointers returned from New() have this alignment.
|
static const int kAlignment = kPointerSize;
|
|
@@ -85,34 +90,41 @@
|
// Never keep segments larger than this size in bytes around.
|
static const int kMaximumKeptSegmentSize = 64 * KB;
|
|
- // Report zone excess when allocation exceeds this limit.
|
- static int zone_excess_limit_;
|
-
|
- // The number of bytes allocated in segments. Note that this number
|
- // includes memory allocated from the OS but not yet allocated from
|
- // the zone.
|
- static int segment_bytes_allocated_;
|
-
|
// The Zone is intentionally a singleton; you should not try to
|
// allocate instances of the class.
|
Zone() { UNREACHABLE(); }
|
+};
|
|
+class Segment;
|
+class ZoneData {
|
+ friend class Zone;
|
|
- // Expand the Zone to hold at least 'size' more bytes and allocate
|
- // the bytes. Returns the address of the newly allocated chunk of
|
- // memory in the Zone. Should only be called if there isn't enough
|
- // room in the Zone already.
|
- static Address NewExpand(int size);
|
+ // Report zone excess when allocation exceeds this limit.
|
+ int zone_excess_limit_;
|
|
+ // The number of bytes allocated in segments. Note that this number
|
+ // includes memory allocated from the OS but not yet allocated from
|
+ // the zone.
|
+ int segment_bytes_allocated_;
|
|
// The free region in the current (front) segment is represented as
|
// the half-open interval [position, limit). The 'position' variable
|
// is guaranteed to be aligned as dictated by kAlignment.
|
- static Address position_;
|
- static Address limit_;
|
-};
|
+ Address position_;
|
+ Address limit_;
|
|
+ Segment* head_;
|
+ int bytes_allocated_;
|
+ int nesting_;
|
|
+ bool allow_allocation_;
|
+ ZoneData();
|
+ friend class V8Context;
|
+ friend class ZoneScope;
|
+ friend class AssertNoZoneAllocation;
|
+ friend class Zone;
|
+ friend class Segment;
|
+};
|
// ZoneObject is an abstraction that helps define classes of objects
|
// allocated in the Zone. Use it as a base class; see ast.h.
|
class ZoneObject {
|
@@ -134,14 +146,15 @@
|
|
class AssertNoZoneAllocation {
|
public:
|
- AssertNoZoneAllocation() : prev_(allow_allocation_) {
|
- allow_allocation_ = false;
|
+ AssertNoZoneAllocation() {
|
+ V8Context* const v8context = v8_context();
|
+ prev_ = v8context->zone_data_.allow_allocation_;
|
+ v8context->zone_data_.allow_allocation_ = false;
|
}
|
- ~AssertNoZoneAllocation() { allow_allocation_ = prev_; }
|
- static bool allow_allocation() { return allow_allocation_; }
|
+ ~AssertNoZoneAllocation() { v8_context()->zone_data_.allow_allocation_ = prev_; }
|
+ static bool allow_allocation(V8Context* v8context) { return v8context->zone_data_.allow_allocation_; }
|
private:
|
bool prev_;
|
- static bool allow_allocation_;
|
};
|
|
|
@@ -178,16 +191,16 @@
|
class ZoneScope BASE_EMBEDDED {
|
public:
|
explicit ZoneScope(ZoneScopeMode mode) : mode_(mode) {
|
- nesting_++;
|
+ v8_context()->zone_data_.nesting_++;
|
}
|
|
virtual ~ZoneScope() {
|
if (ShouldDeleteOnExit()) Zone::DeleteAll();
|
- --nesting_;
|
+ --v8_context()->zone_data_.nesting_;
|
}
|
|
bool ShouldDeleteOnExit() {
|
- return nesting_ == 1 && mode_ == DELETE_ON_EXIT;
|
+ return v8_context()->zone_data_.nesting_ == 1 && mode_ == DELETE_ON_EXIT;
|
}
|
|
// For ZoneScopes that do not delete on exit by default, call this
|
@@ -196,11 +209,10 @@
|
mode_ = DELETE_ON_EXIT;
|
}
|
|
- static int nesting() { return nesting_; }
|
+ static int nesting(V8Context* v8context) { return v8context->zone_data_.nesting_; }
|
|
private:
|
ZoneScopeMode mode_;
|
- static int nesting_;
|
};
|
|
|
Index: src/usage-analyzer.cc
|
===================================================================
|
--- src/usage-analyzer.cc (revision 3048)
|
+++ src/usage-analyzer.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -419,7 +419,7 @@
|
|
bool AnalyzeVariableUsage(FunctionLiteral* lit) {
|
if (!FLAG_usage_computation) return true;
|
- HistogramTimerScope timer(&Counters::usage_analysis);
|
+ HistogramTimerScope timer(&v8_context()->counters_.usage_analysis);
|
return UsageComputer::Traverse(lit);
|
}
|
|
Index: src/platform-linux.cc
|
===================================================================
|
--- src/platform-linux.cc (revision 2848)
|
+++ src/platform-linux.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -583,8 +583,8 @@
|
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
|
-static Sampler* active_sampler_ = NULL;
|
-static pthread_t vm_thread_ = 0;
|
+static Sampler* active_sampler_ = NULL;///static
|
+static pthread_t vm_thread_ = 0; ///static
|
|
|
#if !defined(__GLIBC__) && (defined(__arm__) || defined(__thumb__))
|
Index: src/bootstrapper.cc
|
===================================================================
|
--- src/bootstrapper.cc (revision 3239)
|
+++ src/bootstrapper.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -41,6 +41,27 @@
|
namespace v8 {
|
namespace internal {
|
|
+// Pending fixups are code positions that refer to builtin code
|
+// objects that were not available at the time the code was generated.
|
+// The pending list is processed whenever an environment has been
|
+// created.
|
+class PendingFixups {
|
+ public:
|
+ void Add(Code* code, MacroAssembler* masm);
|
+ bool Process(Handle<JSBuiltinsObject> builtins);
|
+
|
+ void Iterate(ObjectVisitor* v);
|
+
|
+ PendingFixups():code_(0),name_(0), pc_(0), flags_(0) {}
|
+ private:
|
+ List<Object*> code_;
|
+ List<const char*> name_;
|
+ List<int> pc_;
|
+ List<uint32_t> flags_;
|
+
|
+ void Clear();
|
+};
|
+
|
// A SourceCodeCache uses a FixedArray to store pairs of
|
// (AsciiString*, JSFunction*), mapping names of native code files
|
// (runtime.js, etc.) to precompiled functions. Instead of mapping
|
@@ -51,7 +72,7 @@
|
explicit SourceCodeCache(Script::Type type): type_(type), cache_(NULL) { }
|
|
void Initialize(bool create_heap_objects) {
|
- cache_ = create_heap_objects ? Heap::empty_fixed_array() : NULL;
|
+ cache_ = create_heap_objects ? v8_context()->heap_.empty_fixed_array() : NULL;
|
}
|
|
void Iterate(ObjectVisitor* v) {
|
@@ -91,18 +112,102 @@
|
DISALLOW_COPY_AND_ASSIGN(SourceCodeCache);
|
};
|
|
-static SourceCodeCache natives_cache(Script::TYPE_NATIVE);
|
-static SourceCodeCache extensions_cache(Script::TYPE_EXTENSION);
|
+class Genesis BASE_EMBEDDED {
|
+ public:
|
+ Genesis(Handle<Object> global_object,
|
+ v8::Handle<v8::ObjectTemplate> global_template,
|
+ v8::ExtensionConfiguration* extensions,
|
+ Bootstrapper::BootstrapperImpl* bootstrapper_impl
|
+ );
|
+ ~Genesis();
|
+
|
+ Handle<Context> result() { return result_; }
|
+
|
+ // Support for thread preemption.
|
+ static int ArchiveSpacePerThread(Bootstrapper::BootstrapperImpl* bootstrapper_impl);
|
+ static char* ArchiveState(char* to, Bootstrapper::BootstrapperImpl* bootstrapper_impl);
|
+ static char* RestoreState(char* from, Bootstrapper::BootstrapperImpl* bootstrapper_impl);
|
+
|
+ private:
|
+ Bootstrapper::BootstrapperImpl *bootstrapper_impl_;
|
+ Handle<Context> global_context_;
|
+
|
+ // There may be more than one active genesis object: When GC is
|
+ // triggered during environment creation there may be weak handle
|
+ // processing callbacks which may create new environments.
|
+ Genesis* previous_;
|
+
|
+ Handle<Context> global_context() { return global_context_; }
|
+
|
+ void CreateRoots(v8::Handle<v8::ObjectTemplate> global_template,
|
+ Handle<Object> global_object);
|
+ void InstallNativeFunctions();
|
+ bool InstallNatives();
|
+ bool InstallExtensions(v8::ExtensionConfiguration* extensions);
|
+ bool InstallExtension(const char* name);
|
+ bool InstallExtension(v8::RegisteredExtension* current);
|
+ bool InstallSpecialObjects();
|
+ bool ConfigureApiObject(Handle<JSObject> object,
|
+ Handle<ObjectTemplateInfo> object_template);
|
+ bool ConfigureGlobalObjects(v8::Handle<v8::ObjectTemplate> global_template);
|
+
|
+ // Migrates all properties from the 'from' object to the 'to'
|
+ // object and overrides the prototype in 'to' with the one from
|
+ // 'from'.
|
+ void TransferObject(Handle<JSObject> from, Handle<JSObject> to);
|
+ void TransferNamedProperties(Handle<JSObject> from, Handle<JSObject> to);
|
+ void TransferIndexedProperties(Handle<JSObject> from, Handle<JSObject> to);
|
+
|
+ Handle<DescriptorArray> ComputeFunctionInstanceDescriptor(
|
+ bool make_prototype_read_only,
|
+ bool make_prototype_enumerable = false);
|
+ void MakeFunctionInstancePrototypeWritable();
|
+
|
+ void AddSpecialFunction(Handle<JSObject> prototype,
|
+ const char* name,
|
+ Handle<Code> code);
|
+
|
+ void BuildSpecialFunctionTable();
|
+
|
+ static bool CompileBuiltin(int index, Bootstrapper::BootstrapperImpl* bootstrapper_impl);
|
+ static bool CompileNative(Vector<const char> name, Handle<String> source,
|
+ Bootstrapper::BootstrapperImpl* bootstrapper_impl);
|
+ static bool CompileScriptCached(Vector<const char> name,
|
+ Handle<String> source,
|
+ SourceCodeCache* cache,
|
+ v8::Extension* extension,
|
+ bool use_runtime_context,
|
+ Bootstrapper::BootstrapperImpl* bootstrapper_impl
|
+ );
|
+
|
+ Handle<Context> result_;
|
+};
|
+
|
+class Bootstrapper::BootstrapperImpl {
|
+public:
|
+ SourceCodeCache natives_cache;
|
+ SourceCodeCache extensions_cache;
|
+ PendingFixups pending_fixups;
|
+ Genesis* current_;
|
-// This is for delete, not delete[].
|
+ // This is for delete, not delete[].
|
-static List<char*>* delete_these_non_arrays_on_tear_down = NULL;
|
+ List<char*>* delete_these_non_arrays_on_tear_down;
|
|
|
+ BootstrapperImpl(): natives_cache(Script::TYPE_NATIVE), extensions_cache(Script::TYPE_EXTENSION),
|
+ current_(NULL), delete_these_non_arrays_on_tear_down(NULL) {}
|
+};
|
+
|
+Bootstrapper::Bootstrapper(): bootstrapper_impl(new BootstrapperImpl() ) {}
|
+Bootstrapper::~Bootstrapper() {
|
+ delete bootstrapper_impl;
|
+}
|
+
|
Handle<String> Bootstrapper::NativesSourceLookup(int index) {
|
ASSERT(0 <= index && index < Natives::GetBuiltinsCount());
|
- if (Heap::natives_source_cache()->get(index)->IsUndefined()) {
|
+ if (v8_context()->heap_.natives_source_cache()->get(index)->IsUndefined()) {
|
if (!Snapshot::IsEnabled() || FLAG_new_snapshot) {
|
- if (delete_these_non_arrays_on_tear_down == NULL) {
|
- delete_these_non_arrays_on_tear_down = new List<char*>(2);
|
+ if (bootstrapper_impl->delete_these_non_arrays_on_tear_down == NULL) {
|
+ bootstrapper_impl->delete_these_non_arrays_on_tear_down = new List<char*>(2);
|
}
|
// We can use external strings for the natives.
|
NativesExternalStringResource* resource =
|
@@ -110,84 +215,55 @@
|
Natives::GetScriptSource(index).start());
|
// The resources are small objects and we only make a fixed number of
|
// them, but lets clean them up on exit for neatness.
|
- delete_these_non_arrays_on_tear_down->
|
+ bootstrapper_impl->delete_these_non_arrays_on_tear_down->
|
Add(reinterpret_cast<char*>(resource));
|
Handle<String> source_code =
|
Factory::NewExternalStringFromAscii(resource);
|
- Heap::natives_source_cache()->set(index, *source_code);
|
+ v8_context()->heap_.natives_source_cache()->set(index, *source_code);
|
} else {
|
// Old snapshot code can't cope with external strings at all.
|
Handle<String> source_code =
|
Factory::NewStringFromAscii(Natives::GetScriptSource(index));
|
- Heap::natives_source_cache()->set(index, *source_code);
|
+ v8_context()->heap_.natives_source_cache()->set(index, *source_code);
|
}
|
}
|
- Handle<Object> cached_source(Heap::natives_source_cache()->get(index));
|
+ Handle<Object> cached_source(v8_context()->heap_.natives_source_cache()->get(index));
|
return Handle<String>::cast(cached_source);
|
}
|
|
|
bool Bootstrapper::NativesCacheLookup(Vector<const char> name,
|
Handle<JSFunction>* handle) {
|
- return natives_cache.Lookup(name, handle);
|
+ return bootstrapper_impl->natives_cache.Lookup(name, handle);
|
}
|
|
|
void Bootstrapper::NativesCacheAdd(Vector<const char> name,
|
Handle<JSFunction> fun) {
|
- natives_cache.Add(name, fun);
|
+ bootstrapper_impl->natives_cache.Add(name, fun);
|
}
|
|
|
void Bootstrapper::Initialize(bool create_heap_objects) {
|
- natives_cache.Initialize(create_heap_objects);
|
- extensions_cache.Initialize(create_heap_objects);
|
+ bootstrapper_impl->natives_cache.Initialize(create_heap_objects);
|
+ bootstrapper_impl->extensions_cache.Initialize(create_heap_objects);
|
}
|
|
|
void Bootstrapper::TearDown() {
|
- if (delete_these_non_arrays_on_tear_down != NULL) {
|
- int len = delete_these_non_arrays_on_tear_down->length();
|
+ if (bootstrapper_impl->delete_these_non_arrays_on_tear_down != NULL) {
|
+ int len = bootstrapper_impl->delete_these_non_arrays_on_tear_down->length();
|
ASSERT(len < 20); // Don't use this mechanism for unbounded allocations.
|
for (int i = 0; i < len; i++) {
|
- delete delete_these_non_arrays_on_tear_down->at(i);
|
+ delete bootstrapper_impl->delete_these_non_arrays_on_tear_down->at(i);
|
}
|
- delete delete_these_non_arrays_on_tear_down;
|
- delete_these_non_arrays_on_tear_down = NULL;
|
+ delete bootstrapper_impl->delete_these_non_arrays_on_tear_down;
|
+ bootstrapper_impl->delete_these_non_arrays_on_tear_down = NULL;
|
}
|
-
|
- natives_cache.Initialize(false); // Yes, symmetrical
|
- extensions_cache.Initialize(false);
|
+ bootstrapper_impl->natives_cache.Initialize(false); // Yes, symmetrical
|
+ bootstrapper_impl->extensions_cache.Initialize(false);
|
}
|
|
-
|
-// Pending fixups are code positions that refer to builtin code
|
-// objects that were not available at the time the code was generated.
|
-// The pending list is processed whenever an environment has been
|
-// created.
|
-class PendingFixups : public AllStatic {
|
- public:
|
- static void Add(Code* code, MacroAssembler* masm);
|
- static bool Process(Handle<JSBuiltinsObject> builtins);
|
-
|
- static void Iterate(ObjectVisitor* v);
|
-
|
- private:
|
- static List<Object*> code_;
|
- static List<const char*> name_;
|
- static List<int> pc_;
|
- static List<uint32_t> flags_;
|
-
|
- static void Clear();
|
-};
|
-
|
-
|
-List<Object*> PendingFixups::code_(0);
|
-List<const char*> PendingFixups::name_(0);
|
-List<int> PendingFixups::pc_(0);
|
-List<uint32_t> PendingFixups::flags_(0);
|
-
|
-
|
void PendingFixups::Add(Code* code, MacroAssembler* masm) {
|
// Note this code is not only called during bootstrapping.
|
List<MacroAssembler::Unresolved>* unresolved = masm->unresolved();
|
@@ -243,13 +319,14 @@
|
}
|
Clear();
|
|
+ Builtins& global_builtins = v8_context()->builtins_;
|
// TODO(1240818): We should probably try to avoid doing this for all
|
// the V8 builtin JS files. It should only happen after running
|
// runtime.js - just like there shouldn't be any fixups left after
|
// that.
|
- for (int i = 0; i < Builtins::NumberOfJavaScriptBuiltins(); i++) {
|
+ for (int i = 0; i < global_builtins.NumberOfJavaScriptBuiltins(); i++) {
|
Builtins::JavaScript id = static_cast<Builtins::JavaScript>(i);
|
- Handle<String> name = Factory::LookupAsciiSymbol(Builtins::GetName(id));
|
+ Handle<String> name = Factory::LookupAsciiSymbol(global_builtins.GetName(id));
|
JSFunction* function = JSFunction::cast(builtins->GetProperty(*name));
|
builtins->set_javascript_builtin(id, function);
|
}
|
@@ -272,85 +349,12 @@
|
}
|
}
|
|
-
|
-class Genesis BASE_EMBEDDED {
|
- public:
|
- Genesis(Handle<Object> global_object,
|
- v8::Handle<v8::ObjectTemplate> global_template,
|
- v8::ExtensionConfiguration* extensions);
|
- ~Genesis();
|
-
|
- Handle<Context> result() { return result_; }
|
-
|
- Genesis* previous() { return previous_; }
|
- static Genesis* current() { return current_; }
|
-
|
- // Support for thread preemption.
|
- static int ArchiveSpacePerThread();
|
- static char* ArchiveState(char* to);
|
- static char* RestoreState(char* from);
|
-
|
- private:
|
- Handle<Context> global_context_;
|
-
|
- // There may be more than one active genesis object: When GC is
|
- // triggered during environment creation there may be weak handle
|
- // processing callbacks which may create new environments.
|
- Genesis* previous_;
|
- static Genesis* current_;
|
-
|
- Handle<Context> global_context() { return global_context_; }
|
-
|
- void CreateRoots(v8::Handle<v8::ObjectTemplate> global_template,
|
- Handle<Object> global_object);
|
- void InstallNativeFunctions();
|
- bool InstallNatives();
|
- bool InstallExtensions(v8::ExtensionConfiguration* extensions);
|
- bool InstallExtension(const char* name);
|
- bool InstallExtension(v8::RegisteredExtension* current);
|
- bool InstallSpecialObjects();
|
- bool ConfigureApiObject(Handle<JSObject> object,
|
- Handle<ObjectTemplateInfo> object_template);
|
- bool ConfigureGlobalObjects(v8::Handle<v8::ObjectTemplate> global_template);
|
-
|
- // Migrates all properties from the 'from' object to the 'to'
|
- // object and overrides the prototype in 'to' with the one from
|
- // 'from'.
|
- void TransferObject(Handle<JSObject> from, Handle<JSObject> to);
|
- void TransferNamedProperties(Handle<JSObject> from, Handle<JSObject> to);
|
- void TransferIndexedProperties(Handle<JSObject> from, Handle<JSObject> to);
|
-
|
- Handle<DescriptorArray> ComputeFunctionInstanceDescriptor(
|
- bool make_prototype_read_only,
|
- bool make_prototype_enumerable = false);
|
- void MakeFunctionInstancePrototypeWritable();
|
-
|
- void AddSpecialFunction(Handle<JSObject> prototype,
|
- const char* name,
|
- Handle<Code> code);
|
-
|
- void BuildSpecialFunctionTable();
|
-
|
- static bool CompileBuiltin(int index);
|
- static bool CompileNative(Vector<const char> name, Handle<String> source);
|
- static bool CompileScriptCached(Vector<const char> name,
|
- Handle<String> source,
|
- SourceCodeCache* cache,
|
- v8::Extension* extension,
|
- bool use_runtime_context);
|
-
|
- Handle<Context> result_;
|
-};
|
-
|
-Genesis* Genesis::current_ = NULL;
|
-
|
-
|
void Bootstrapper::Iterate(ObjectVisitor* v) {
|
- natives_cache.Iterate(v);
|
+ bootstrapper_impl->natives_cache.Iterate(v);
|
v->Synchronize("NativesCache");
|
- extensions_cache.Iterate(v);
|
+ bootstrapper_impl->extensions_cache.Iterate(v);
|
v->Synchronize("Extensions");
|
- PendingFixups::Iterate(v);
|
+ bootstrapper_impl->pending_fixups.Iterate(v);
|
v->Synchronize("PendingFixups");
|
}
|
|
@@ -358,12 +362,12 @@
|
// While setting up the environment, we collect code positions that
|
// need to be patched before we can run any code in the environment.
|
void Bootstrapper::AddFixup(Code* code, MacroAssembler* masm) {
|
- PendingFixups::Add(code, masm);
|
+ bootstrapper_impl->pending_fixups.Add(code, masm);
|
}
|
|
|
bool Bootstrapper::IsActive() {
|
- return Genesis::current() != NULL;
|
+ return bootstrapper_impl->current_ != NULL;
|
}
|
|
|
@@ -371,7 +375,7 @@
|
Handle<Object> global_object,
|
v8::Handle<v8::ObjectTemplate> global_template,
|
v8::ExtensionConfiguration* extensions) {
|
- Genesis genesis(global_object, global_template, extensions);
|
+ Genesis genesis(global_object, global_template, extensions, bootstrapper_impl);
|
return genesis.result();
|
}
|
|
@@ -395,8 +399,8 @@
|
|
|
Genesis::~Genesis() {
|
- ASSERT(current_ == this);
|
- current_ = previous_;
|
+ ASSERT(bootstrapper_impl_->current_ == this);
|
+ bootstrapper_impl_->current_ = previous_;
|
}
|
|
|
@@ -408,7 +412,7 @@
|
Builtins::Name call,
|
bool is_ecma_native) {
|
Handle<String> symbol = Factory::LookupAsciiSymbol(name);
|
- Handle<Code> call_code = Handle<Code>(Builtins::builtin(call));
|
+ Handle<Code> call_code = Handle<Code>(v8_context()->builtins_.builtin(call));
|
Handle<JSFunction> function =
|
Factory::NewFunctionWithPrototype(symbol,
|
type,
|
@@ -488,8 +492,9 @@
|
// global context).
|
global_context_ =
|
Handle<Context>::cast(
|
- GlobalHandles::Create(*Factory::NewGlobalContext()));
|
- Top::set_context(*global_context());
|
+ v8_context()->global_handles_.Create(*Factory::NewGlobalContext()));
|
+ Top& top = v8_context()->top_;
|
+ top.set_context(*global_context());
|
|
// Allocate the message listeners object.
|
v8::NeanderArray listeners;
|
@@ -510,7 +515,8 @@
|
function_map_descriptors = ComputeFunctionInstanceDescriptor(true);
|
fm->set_instance_descriptors(*function_map_descriptors);
|
|
- Handle<String> object_name = Handle<String>(Heap::Object_symbol());
|
+ Heap& heap = v8_context()->heap_;
|
+ Handle<String> object_name = Handle<String>(heap.Object_symbol());
|
|
{ // --- O b j e c t ---
|
Handle<JSFunction> object_fun =
|
@@ -523,13 +529,13 @@
|
global_context()->set_object_function(*object_fun);
|
|
// Allocate a new prototype for the object function.
|
- Handle<JSObject> prototype = Factory::NewJSObject(Top::object_function(),
|
+ Handle<JSObject> prototype = Factory::NewJSObject(top.object_function(),
|
TENURED);
|
|
global_context()->set_initial_object_prototype(*prototype);
|
SetPrototype(object_fun, prototype);
|
object_function_map->
|
- set_instance_descriptors(Heap::empty_descriptor_array());
|
+ set_instance_descriptors(heap.empty_descriptor_array());
|
}
|
|
// Allocate the empty function as the prototype for function ECMAScript
|
@@ -538,9 +544,11 @@
|
Handle<JSFunction> empty_function =
|
Factory::NewFunction(symbol, Factory::null_value());
|
|
+ Builtins& builtins = v8_context()->builtins_;
|
+
|
{ // --- E m p t y ---
|
Handle<Code> code =
|
- Handle<Code>(Builtins::builtin(Builtins::EmptyFunction));
|
+ Handle<Code>(builtins.builtin(Builtins::EmptyFunction));
|
empty_function->set_code(*code);
|
Handle<String> source = Factory::NewStringFromAscii(CStrVector("() {}"));
|
Handle<Script> script = Factory::NewScript(source);
|
@@ -580,8 +588,8 @@
|
}
|
|
if (js_global_template.is_null()) {
|
- Handle<String> name = Handle<String>(Heap::empty_symbol());
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
|
+ Handle<String> name = Handle<String>(heap.empty_symbol());
|
+ Handle<Code> code = Handle<Code>(builtins.builtin(Builtins::Illegal));
|
js_global_function =
|
Factory::NewFunction(name, JS_GLOBAL_OBJECT_TYPE,
|
JSGlobalObject::kSize, code, true);
|
@@ -591,7 +599,7 @@
|
Handle<JSObject>(
|
JSObject::cast(js_global_function->instance_prototype()));
|
SetProperty(prototype, Factory::constructor_symbol(),
|
- Top::object_function(), NONE);
|
+ top.object_function(), NONE);
|
} else {
|
Handle<FunctionTemplateInfo> js_global_constructor(
|
FunctionTemplateInfo::cast(js_global_template->constructor()));
|
@@ -612,8 +620,8 @@
|
{
|
Handle<JSFunction> global_proxy_function;
|
if (global_template.IsEmpty()) {
|
- Handle<String> name = Handle<String>(Heap::empty_symbol());
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
|
+ Handle<String> name = Handle<String>(heap.empty_symbol());
|
+ Handle<Code> code = Handle<Code>(builtins.builtin(Builtins::Illegal));
|
global_proxy_function =
|
Factory::NewFunction(name, JS_GLOBAL_PROXY_TYPE,
|
JSGlobalProxy::kSize, code, true);
|
@@ -667,7 +675,7 @@
|
}
|
|
Handle<JSObject> global = Handle<JSObject>(global_context()->global());
|
- SetProperty(global, object_name, Top::object_function(), DONT_ENUM);
|
+ SetProperty(global, object_name, top.object_function(), DONT_ENUM);
|
}
|
|
Handle<JSObject> global = Handle<JSObject>(global_context()->global());
|
@@ -679,10 +687,10 @@
|
{ // --- A r r a y ---
|
Handle<JSFunction> array_function =
|
InstallFunction(global, "Array", JS_ARRAY_TYPE, JSArray::kSize,
|
- Top::initial_object_prototype(), Builtins::ArrayCode,
|
+ top.initial_object_prototype(), Builtins::ArrayCode,
|
true);
|
array_function->shared()->set_construct_stub(
|
- Builtins::builtin(Builtins::ArrayConstructCode));
|
+ builtins.builtin(Builtins::ArrayConstructCode));
|
array_function->shared()->DontAdaptArguments();
|
|
// This seems a bit hackish, but we need to make sure Array.length
|
@@ -709,7 +717,7 @@
|
{ // --- N u m b e r ---
|
Handle<JSFunction> number_fun =
|
InstallFunction(global, "Number", JS_VALUE_TYPE, JSValue::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
true);
|
global_context()->set_number_function(*number_fun);
|
}
|
@@ -717,7 +725,7 @@
|
{ // --- B o o l e a n ---
|
Handle<JSFunction> boolean_fun =
|
InstallFunction(global, "Boolean", JS_VALUE_TYPE, JSValue::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
true);
|
global_context()->set_boolean_function(*boolean_fun);
|
}
|
@@ -725,7 +733,7 @@
|
{ // --- S t r i n g ---
|
Handle<JSFunction> string_fun =
|
InstallFunction(global, "String", JS_VALUE_TYPE, JSValue::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
true);
|
global_context()->set_string_function(*string_fun);
|
// Add 'length' property to strings.
|
@@ -747,7 +755,7 @@
|
// Builtin functions for Date.prototype.
|
Handle<JSFunction> date_fun =
|
InstallFunction(global, "Date", JS_VALUE_TYPE, JSValue::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
true);
|
|
global_context()->set_date_function(*date_fun);
|
@@ -758,7 +766,7 @@
|
// Builtin functions for RegExp.prototype.
|
Handle<JSFunction> regexp_fun =
|
InstallFunction(global, "RegExp", JS_REGEXP_TYPE, JSRegExp::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
true);
|
|
global_context()->set_regexp_function(*regexp_fun);
|
@@ -782,7 +790,7 @@
|
// This is done by introducing an anonymous function with
|
// class_name equals 'Arguments'.
|
Handle<String> symbol = Factory::LookupAsciiSymbol("Arguments");
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
|
+ Handle<Code> code = Handle<Code>(builtins.builtin(Builtins::Illegal));
|
Handle<JSObject> prototype =
|
Handle<JSObject>(
|
JSObject::cast(global_context()->object_function()->prototype()));
|
@@ -811,11 +819,11 @@
|
|
#ifdef DEBUG
|
LookupResult lookup;
|
- result->LocalLookup(Heap::callee_symbol(), &lookup);
|
+ result->LocalLookup(heap.callee_symbol(), &lookup);
|
ASSERT(lookup.IsValid() && (lookup.type() == FIELD));
|
ASSERT(lookup.GetFieldIndex() == Heap::arguments_callee_index);
|
|
- result->LocalLookup(Heap::length_symbol(), &lookup);
|
+ result->LocalLookup(heap.length_symbol(), &lookup);
|
ASSERT(lookup.IsValid() && (lookup.type() == FIELD));
|
ASSERT(lookup.GetFieldIndex() == Heap::arguments_length_index);
|
|
@@ -830,7 +838,7 @@
|
|
{ // --- context extension
|
// Create a function for the context extension objects.
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
|
+ Handle<Code> code = Handle<Code>(builtins.builtin(Builtins::Illegal));
|
Handle<JSFunction> context_extension_fun =
|
Factory::NewFunction(Factory::empty_symbol(),
|
JS_CONTEXT_EXTENSION_OBJECT_TYPE,
|
@@ -847,7 +855,7 @@
|
{
|
// Setup the call-as-function delegate.
|
Handle<Code> code =
|
- Handle<Code>(Builtins::builtin(Builtins::HandleApiCallAsFunction));
|
+ Handle<Code>(builtins.builtin(Builtins::HandleApiCallAsFunction));
|
Handle<JSFunction> delegate =
|
Factory::NewFunction(Factory::empty_symbol(), JS_OBJECT_TYPE,
|
JSObject::kHeaderSize, code, true);
|
@@ -858,7 +866,7 @@
|
{
|
// Setup the call-as-constructor delegate.
|
Handle<Code> code =
|
- Handle<Code>(Builtins::builtin(Builtins::HandleApiCallAsConstructor));
|
+ Handle<Code>(builtins.builtin(Builtins::HandleApiCallAsConstructor));
|
Handle<JSFunction> delegate =
|
Factory::NewFunction(Factory::empty_symbol(), JS_OBJECT_TYPE,
|
JSObject::kHeaderSize, code, true);
|
@@ -866,34 +874,36 @@
|
delegate->shared()->DontAdaptArguments();
|
}
|
|
- global_context()->set_special_function_table(Heap::empty_fixed_array());
|
+ global_context()->set_special_function_table(heap.empty_fixed_array());
|
|
// Initialize the out of memory slot.
|
- global_context()->set_out_of_memory(Heap::false_value());
|
+ global_context()->set_out_of_memory(heap.false_value());
|
|
// Initialize the data slot.
|
- global_context()->set_data(Heap::undefined_value());
|
+ global_context()->set_data(heap.undefined_value());
|
}
|
|
|
-bool Genesis::CompileBuiltin(int index) {
|
+bool Genesis::CompileBuiltin(int index, Bootstrapper::BootstrapperImpl* bootstrapper_impl) {
|
Vector<const char> name = Natives::GetScriptName(index);
|
- Handle<String> source_code = Bootstrapper::NativesSourceLookup(index);
|
- return CompileNative(name, source_code);
|
+ Handle<String> source_code = v8_context()->bootstrapper_.NativesSourceLookup(index);
|
+ return CompileNative(name, source_code, bootstrapper_impl);
|
}
|
|
|
-bool Genesis::CompileNative(Vector<const char> name, Handle<String> source) {
|
+bool Genesis::CompileNative(Vector<const char> name, Handle<String> source, Bootstrapper::BootstrapperImpl* bootstrapper_impl) {
|
HandleScope scope;
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debugger::set_compiling_natives(true);
|
+ Debugger* const debugger = v8_context()->debug_.debugger();
|
+ debugger->set_compiling_natives(true);
|
#endif
|
bool result =
|
- CompileScriptCached(name, source, &natives_cache, NULL, true);
|
- ASSERT(Top::has_pending_exception() != result);
|
- if (!result) Top::clear_pending_exception();
|
+ CompileScriptCached(name, source, &bootstrapper_impl->natives_cache, NULL, true, bootstrapper_impl);
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(top.has_pending_exception() != result);
|
+ if (!result) top.clear_pending_exception();
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debugger::set_compiling_natives(false);
|
+ debugger->set_compiling_natives(false);
|
#endif
|
return result;
|
}
|
@@ -903,7 +913,9 @@
|
Handle<String> source,
|
SourceCodeCache* cache,
|
v8::Extension* extension,
|
- bool use_runtime_context) {
|
+ bool use_runtime_context,
|
+ Bootstrapper::BootstrapperImpl* bootstrapper_impl
|
+ ) {
|
HandleScope scope;
|
Handle<JSFunction> boilerplate;
|
|
@@ -913,19 +925,20 @@
|
ASSERT(source->IsAsciiRepresentation());
|
Handle<String> script_name = Factory::NewStringFromUtf8(name);
|
boilerplate =
|
- Compiler::Compile(source, script_name, 0, 0, extension, NULL);
|
+ v8_context()->compiler_.Compile(source, script_name, 0, 0, extension, NULL);
|
if (boilerplate.is_null()) return false;
|
cache->Add(name, boilerplate);
|
}
|
|
+ Top& top = v8_context()->top_;
|
// Setup the function context. Conceptually, we should clone the
|
// function before overwriting the context but since we're in a
|
// single-threaded environment it is not strictly necessary.
|
- ASSERT(Top::context()->IsGlobalContext());
|
+ ASSERT(top.context()->IsGlobalContext());
|
Handle<Context> context =
|
Handle<Context>(use_runtime_context
|
- ? Top::context()->runtime_context()
|
- : Top::context());
|
+ ? top.context()->runtime_context()
|
+ : top.context());
|
Handle<JSFunction> fun =
|
Factory::NewFunctionFromBoilerplate(boilerplate, context);
|
|
@@ -933,14 +946,14 @@
|
// object as the receiver. Provide no parameters.
|
Handle<Object> receiver =
|
Handle<Object>(use_runtime_context
|
- ? Top::context()->builtins()
|
- : Top::context()->global());
|
+ ? top.context()->builtins()
|
+ : top.context()->global());
|
bool has_pending_exception;
|
Handle<Object> result =
|
Execution::Call(fun, receiver, 0, NULL, &has_pending_exception);
|
if (has_pending_exception) return false;
|
- return PendingFixups::Process(
|
- Handle<JSBuiltinsObject>(Top::context()->builtins()));
|
+ return bootstrapper_impl->pending_fixups.Process(
|
+ Handle<JSBuiltinsObject>(top.context()->builtins()));
|
}
|
|
|
@@ -978,7 +991,7 @@
|
// Create a function for the builtins object. Allocate space for the
|
// JavaScript builtins, a reference to the builtins object
|
// (itself) and a reference to the global_context directly in the object.
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::Illegal));
|
+ Handle<Code> code = Handle<Code>(v8_context()->builtins_.builtin(Builtins::Illegal));
|
Handle<JSFunction> builtins_fun =
|
Factory::NewFunction(Factory::empty_symbol(), JS_BUILTINS_OBJECT_TYPE,
|
JSBuiltinsObject::kSize, code, true);
|
@@ -1008,7 +1021,8 @@
|
// Create a bridge function that has context in the global context.
|
Handle<JSFunction> bridge =
|
Factory::NewFunction(Factory::empty_symbol(), Factory::undefined_value());
|
- ASSERT(bridge->context() == *Top::global_context());
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(bridge->context() == *top.global_context());
|
|
// Allocate the builtins context.
|
Handle<Context> context =
|
@@ -1021,10 +1035,10 @@
|
// Builtin functions for Script.
|
Handle<JSFunction> script_fun =
|
InstallFunction(builtins, "Script", JS_VALUE_TYPE, JSValue::kSize,
|
- Top::initial_object_prototype(), Builtins::Illegal,
|
+ top.initial_object_prototype(), Builtins::Illegal,
|
false);
|
Handle<JSObject> prototype =
|
- Factory::NewJSObject(Top::object_function(), TENURED);
|
+ Factory::NewJSObject(top.object_function(), TENURED);
|
SetPrototype(script_fun, prototype);
|
global_context()->set_script_function(*script_fun);
|
|
@@ -1137,22 +1151,22 @@
|
for (int i = Natives::GetDelayCount();
|
i < Natives::GetBuiltinsCount();
|
i++) {
|
- if (!CompileBuiltin(i)) return false;
|
+ if (!CompileBuiltin(i, bootstrapper_impl_)) return false;
|
}
|
|
// Setup natives with lazy loading.
|
SetupLazy(Handle<JSFunction>(global_context()->date_function()),
|
Natives::GetIndex("date"),
|
- Top::global_context(),
|
- Handle<Context>(Top::context()->runtime_context()));
|
+ top.global_context(),
|
+ Handle<Context>(top.context()->runtime_context()));
|
SetupLazy(Handle<JSFunction>(global_context()->regexp_function()),
|
Natives::GetIndex("regexp"),
|
- Top::global_context(),
|
- Handle<Context>(Top::context()->runtime_context()));
|
+ top.global_context(),
|
+ Handle<Context>(top.context()->runtime_context()));
|
SetupLazy(Handle<JSObject>(global_context()->json_object()),
|
Natives::GetIndex("json"),
|
- Top::global_context(),
|
- Handle<Context>(Top::context()->runtime_context()));
|
+ top.global_context(),
|
+ Handle<Context>(top.context()->runtime_context()));
|
|
} else if (strlen(FLAG_natives_file) != 0) {
|
// Otherwise install natives from natives file if file exists and
|
@@ -1161,7 +1175,7 @@
|
Vector<const char> source = ReadFile(FLAG_natives_file, &exists);
|
Handle<String> source_string = Factory::NewStringFromAscii(source);
|
if (source.is_empty()) return false;
|
- bool result = CompileNative(CStrVector(FLAG_natives_file), source_string);
|
+ bool result = CompileNative(CStrVector(FLAG_natives_file), source_string, bootstrapper_impl_);
|
if (!result) return false;
|
|
} else {
|
@@ -1175,19 +1189,19 @@
|
// Install Function.prototype.call and apply.
|
{ Handle<String> key = Factory::function_class_symbol();
|
Handle<JSFunction> function =
|
- Handle<JSFunction>::cast(GetProperty(Top::global(), key));
|
+ Handle<JSFunction>::cast(GetProperty(top.global(), key));
|
Handle<JSObject> proto =
|
Handle<JSObject>(JSObject::cast(function->instance_prototype()));
|
|
// Install the call and the apply functions.
|
Handle<JSFunction> call =
|
InstallFunction(proto, "call", JS_OBJECT_TYPE, JSObject::kHeaderSize,
|
- Factory::NewJSObject(Top::object_function(), TENURED),
|
+ Factory::NewJSObject(top.object_function(), TENURED),
|
Builtins::FunctionCall,
|
false);
|
Handle<JSFunction> apply =
|
InstallFunction(proto, "apply", JS_OBJECT_TYPE, JSObject::kHeaderSize,
|
- Factory::NewJSObject(Top::object_function(), TENURED),
|
+ Factory::NewJSObject(top.object_function(), TENURED),
|
Builtins::FunctionApply,
|
false);
|
|
@@ -1236,20 +1250,21 @@
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Expose the debug global object in global if a name for it is specified.
|
if (FLAG_expose_debug_as != NULL && strlen(FLAG_expose_debug_as) != 0) {
|
+ Debug& debug = v8_context()->debug_;
|
// If loading fails we just bail out without installing the
|
// debugger but without tanking the whole context.
|
- if (!Debug::Load())
|
+ if (!debug.Load())
|
return true;
|
// Set the security token for the debugger context to the same as
|
// the shell global context to allow calling between these (otherwise
|
// exposing debug global object doesn't make much sense).
|
- Debug::debug_context()->set_security_token(
|
+ debug.debug_context()->set_security_token(
|
global_context()->security_token());
|
|
Handle<String> debug_string =
|
Factory::LookupAsciiSymbol(FLAG_expose_debug_as);
|
SetProperty(js_global, debug_string,
|
- Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM);
|
+ Handle<Object>(debug.debug_context()->global_proxy()), DONT_ENUM);
|
}
|
#endif
|
|
@@ -1328,11 +1343,12 @@
|
Handle<String> source_code = Factory::NewStringFromAscii(source);
|
bool result = CompileScriptCached(CStrVector(extension->name()),
|
source_code,
|
- &extensions_cache, extension,
|
- false);
|
- ASSERT(Top::has_pending_exception() != result);
|
+ &bootstrapper_impl_->extensions_cache, extension,
|
+ false, bootstrapper_impl_);
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(top.has_pending_exception() != result);
|
if (!result) {
|
- Top::clear_pending_exception();
|
+ top.clear_pending_exception();
|
v8::Utils::ReportApiFailure(
|
"v8::Context::New()", "Error installing extension");
|
}
|
@@ -1378,8 +1394,9 @@
|
Handle<JSObject> obj =
|
Execution::InstantiateObject(object_template, &pending_exception);
|
if (pending_exception) {
|
- ASSERT(Top::has_pending_exception());
|
- Top::clear_pending_exception();
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(top.has_pending_exception());
|
+ top.clear_pending_exception();
|
return false;
|
}
|
TransferObject(obj, object);
|
@@ -1501,9 +1518,10 @@
|
|
Handle<DescriptorArray> function_map_descriptors =
|
ComputeFunctionInstanceDescriptor(false);
|
- Handle<Map> fm = Factory::CopyMapDropDescriptors(Top::function_map());
|
+ Top& top = v8_context()->top_;
|
+ Handle<Map> fm = Factory::CopyMapDropDescriptors(top.function_map());
|
fm->set_instance_descriptors(*function_map_descriptors);
|
- Top::context()->global_context()->set_function_map(*fm);
|
+ top.global_context()->set_function_map(*fm);
|
}
|
|
|
@@ -1539,7 +1557,7 @@
|
// Add special versions for Array.prototype.pop and push.
|
Handle<JSFunction> function =
|
Handle<JSFunction>(
|
- JSFunction::cast(global->GetProperty(Heap::Array_symbol())));
|
+ JSFunction::cast(global->GetProperty(v8_context()->heap_.Array_symbol())));
|
Handle<JSObject> visible_prototype =
|
Handle<JSObject>(JSObject::cast(function->prototype()));
|
// Remember to put push and pop on the hidden prototype if it's there.
|
@@ -1552,24 +1570,27 @@
|
push_and_pop_prototype = visible_prototype;
|
}
|
AddSpecialFunction(push_and_pop_prototype, "pop",
|
- Handle<Code>(Builtins::builtin(Builtins::ArrayPop)));
|
+ Handle<Code>(v8_context()->builtins_.builtin(Builtins::ArrayPop)));
|
AddSpecialFunction(push_and_pop_prototype, "push",
|
- Handle<Code>(Builtins::builtin(Builtins::ArrayPush)));
|
+ Handle<Code>(v8_context()->builtins_.builtin(Builtins::ArrayPush)));
|
}
|
|
|
Genesis::Genesis(Handle<Object> global_object,
|
v8::Handle<v8::ObjectTemplate> global_template,
|
- v8::ExtensionConfiguration* extensions) {
|
+ v8::ExtensionConfiguration* extensions,
|
+ Bootstrapper::BootstrapperImpl* bootstrapper_impl
|
+ ): bootstrapper_impl_(bootstrapper_impl) {
|
// Link this genesis object into the stacked genesis chain. This
|
// must be done before any early exits because the destructor
|
// will always do unlinking.
|
- previous_ = current_;
|
- current_ = this;
|
+ previous_ = bootstrapper_impl->current_;
|
+ bootstrapper_impl->current_ = this;
|
result_ = Handle<Context>::null();
|
|
// If V8 isn't running and cannot be initialized, just return.
|
- if (!V8::IsRunning() && !V8::Initialize(NULL)) return;
|
+ i::V8& v8 = v8_context()->v8_;
|
+ if (!v8.IsRunning() && !v8.Initialize(NULL)) return;
|
|
// Before creating the roots we must save the context and restore it
|
// on all function exits.
|
@@ -1597,46 +1618,46 @@
|
|
// Reserve space for statics needing saving and restoring.
|
int Bootstrapper::ArchiveSpacePerThread() {
|
- return Genesis::ArchiveSpacePerThread();
|
+ return Genesis::ArchiveSpacePerThread(bootstrapper_impl);
|
}
|
|
|
// Archive statics that are thread local.
|
char* Bootstrapper::ArchiveState(char* to) {
|
- return Genesis::ArchiveState(to);
|
+ return Genesis::ArchiveState(to, bootstrapper_impl);
|
}
|
|
|
// Restore statics that are thread local.
|
char* Bootstrapper::RestoreState(char* from) {
|
- return Genesis::RestoreState(from);
|
+ return Genesis::RestoreState(from, bootstrapper_impl);
|
}
|
|
|
// Called when the top-level V8 mutex is destroyed.
|
void Bootstrapper::FreeThreadResources() {
|
- ASSERT(Genesis::current() == NULL);
|
+ ASSERT(bootstrapper_impl->current_ == NULL);
|
}
|
|
|
// Reserve space for statics needing saving and restoring.
|
-int Genesis::ArchiveSpacePerThread() {
|
- return sizeof(current_);
|
+int Genesis::ArchiveSpacePerThread(Bootstrapper::BootstrapperImpl* bootstrapper_impl) {
|
+ return sizeof(bootstrapper_impl->current_);
|
}
|
|
|
// Archive statics that are thread local.
|
-char* Genesis::ArchiveState(char* to) {
|
- *reinterpret_cast<Genesis**>(to) = current_;
|
- current_ = NULL;
|
- return to + sizeof(current_);
|
+char* Genesis::ArchiveState(char* to, Bootstrapper::BootstrapperImpl* bootstrapper_impl) {
|
+ *reinterpret_cast<Genesis**>(to) = bootstrapper_impl->current_;
|
+ bootstrapper_impl->current_ = NULL;
|
+ return to + sizeof(bootstrapper_impl->current_);
|
}
|
|
|
// Restore statics that are thread local.
|
-char* Genesis::RestoreState(char* from) {
|
- current_ = *reinterpret_cast<Genesis**>(from);
|
- return from + sizeof(current_);
|
+char* Genesis::RestoreState(char* from, Bootstrapper::BootstrapperImpl* bootstrapper_impl) {
|
+ bootstrapper_impl->current_ = *reinterpret_cast<Genesis**>(from);
|
+ return from + sizeof(bootstrapper_impl->current_);
|
}
|
|
} } // namespace v8::internal
|
Index: src/spaces.cc
|
===================================================================
|
--- src/spaces.cc (revision 3142)
|
+++ src/spaces.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -140,19 +140,11 @@
|
// -----------------------------------------------------------------------------
|
// Page
|
|
-#ifdef DEBUG
|
-Page::RSetState Page::rset_state_ = Page::IN_USE;
|
-#endif
|
-
|
// -----------------------------------------------------------------------------
|
// CodeRange
|
|
-List<CodeRange::FreeBlock> CodeRange::free_list_(0);
|
-List<CodeRange::FreeBlock> CodeRange::allocation_list_(0);
|
-int CodeRange::current_allocation_block_index_ = 0;
|
-VirtualMemory* CodeRange::code_range_ = NULL;
|
+CodeRange::CodeRange(): free_list_(0), current_allocation_block_index_ (0), code_range_ (NULL) {}
|
|
-
|
bool CodeRange::Setup(const size_t requested) {
|
ASSERT(code_range_ == NULL);
|
|
@@ -219,7 +211,7 @@
|
}
|
|
// Code range is full or too fragmented.
|
- V8::FatalProcessOutOfMemory("CodeRange::GetNextAllocationBlock");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("CodeRange::GetNextAllocationBlock");
|
}
|
|
|
@@ -269,21 +261,15 @@
|
// -----------------------------------------------------------------------------
|
// MemoryAllocator
|
//
|
-int MemoryAllocator::capacity_ = 0;
|
-int MemoryAllocator::size_ = 0;
|
-
|
-VirtualMemory* MemoryAllocator::initial_chunk_ = NULL;
|
-
|
// 270 is an estimate based on the static default heap size of a pair of 256K
|
// semispaces and a 64M old generation.
|
const int kEstimatedNumberOfChunks = 270;
|
-List<MemoryAllocator::ChunkInfo> MemoryAllocator::chunks_(
|
- kEstimatedNumberOfChunks);
|
-List<int> MemoryAllocator::free_chunk_ids_(kEstimatedNumberOfChunks);
|
-int MemoryAllocator::max_nof_chunks_ = 0;
|
-int MemoryAllocator::top_ = 0;
|
|
+MemoryAllocator::MemoryAllocator():capacity_(0), size_(0), initial_chunk_(NULL),
|
+ max_nof_chunks_ (0), top_ (0), free_chunk_ids_(kEstimatedNumberOfChunks) {
|
|
+}
|
+
|
void MemoryAllocator::Push(int free_chunk_id) {
|
ASSERT(max_nof_chunks_ > 0);
|
ASSERT(top_ < max_nof_chunks_);
|
@@ -348,26 +334,31 @@
|
size_t* allocated,
|
Executability executable) {
|
if (size_ + static_cast<int>(requested) > capacity_) return NULL;
|
+ V8Context * const v8context = v8_context();
|
+ CodeRange* const code_range = v8context->heap_.code_range();
|
void* mem;
|
- if (executable == EXECUTABLE && CodeRange::exists()) {
|
- mem = CodeRange::AllocateRawMemory(requested, allocated);
|
+ if (executable == EXECUTABLE && code_range->exists()) {
|
+ mem = code_range->AllocateRawMemory(requested, allocated);
|
} else {
|
mem = OS::Allocate(requested, allocated, (executable == EXECUTABLE));
|
}
|
int alloced = *allocated;
|
size_ += alloced;
|
- Counters::memory_allocated.Increment(alloced);
|
+ v8context->counters_.memory_allocated.Increment(alloced);
|
return mem;
|
}
|
|
|
void MemoryAllocator::FreeRawMemory(void* mem, size_t length) {
|
- if (CodeRange::contains(static_cast<Address>(mem))) {
|
- CodeRange::FreeRawMemory(mem, length);
|
+ V8Context * const v8context = v8_context();
|
+ CodeRange* const code_range = v8context->heap_.code_range();
|
+
|
+ if (code_range->contains(static_cast<Address>(mem))) {
|
+ code_range->FreeRawMemory(mem, length);
|
} else {
|
OS::Free(mem, length);
|
}
|
- Counters::memory_allocated.Decrement(length);
|
+ v8context->counters_.memory_allocated.Decrement(length);
|
size_ -= length;
|
ASSERT(size_ >= 0);
|
}
|
@@ -445,7 +436,7 @@
|
if (!initial_chunk_->Commit(start, size, owner->executable() == EXECUTABLE)) {
|
return Page::FromAddress(NULL);
|
}
|
- Counters::memory_allocated.Increment(size);
|
+ v8_context()->counters_.memory_allocated.Increment(size);
|
|
// So long as we correctly overestimated the number of chunks we should not
|
// run out of chunk ids.
|
@@ -466,7 +457,7 @@
|
ASSERT(InInitialChunk(start + size - 1));
|
|
if (!initial_chunk_->Commit(start, size, executable)) return false;
|
- Counters::memory_allocated.Increment(size);
|
+ v8_context()->counters_.memory_allocated.Increment(size);
|
return true;
|
}
|
|
@@ -478,7 +469,7 @@
|
ASSERT(InInitialChunk(start + size - 1));
|
|
if (!initial_chunk_->Uncommit(start, size)) return false;
|
- Counters::memory_allocated.Decrement(size);
|
+ v8_context()->counters_.memory_allocated.Decrement(size);
|
return true;
|
}
|
|
@@ -558,7 +549,7 @@
|
// TODO(1240712): VirtualMemory::Uncommit has a return value which
|
// is ignored here.
|
initial_chunk_->Uncommit(c.address(), c.size());
|
- Counters::memory_allocated.Decrement(c.size());
|
+ v8_context()->counters_.memory_allocated.Decrement(c.size());
|
} else {
|
LOG(DeleteEvent("PagedChunk", c.address()));
|
FreeRawMemory(c.address(), c.size());
|
@@ -605,8 +596,8 @@
|
|
PagedSpace::PagedSpace(int max_capacity,
|
AllocationSpace id,
|
- Executability executable)
|
- : Space(id, executable) {
|
+ Executability executable, MemoryAllocator* memory_allocator)
|
+ : Space(id, executable), memory_allocator_(memory_allocator) {
|
max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
|
* Page::kObjectAreaSize;
|
accounting_stats_.Clear();
|
@@ -627,14 +618,14 @@
|
// contain at least one page, ignore it and allocate instead.
|
int pages_in_chunk = PagesInChunk(start, size);
|
if (pages_in_chunk > 0) {
|
- first_page_ = MemoryAllocator::CommitPages(RoundUp(start, Page::kPageSize),
|
+ first_page_ = memory_allocator_->CommitPages(RoundUp(start, Page::kPageSize),
|
Page::kPageSize * pages_in_chunk,
|
this, &num_pages);
|
} else {
|
int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
|
max_capacity_ / Page::kObjectAreaSize);
|
first_page_ =
|
- MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
|
+ memory_allocator_->AllocatePages(requested_pages, &num_pages, this);
|
if (!first_page_->is_valid()) return false;
|
}
|
|
@@ -665,7 +656,7 @@
|
|
|
void PagedSpace::TearDown() {
|
- first_page_ = MemoryAllocator::FreePages(first_page_);
|
+ first_page_ = memory_allocator_->FreePages(first_page_);
|
ASSERT(!first_page_->is_valid());
|
|
accounting_stats_.Clear();
|
@@ -705,7 +696,7 @@
|
Object* PagedSpace::FindObject(Address addr) {
|
// Note: this function can only be called before or after mark-compact GC
|
// because it accesses map pointers.
|
- ASSERT(!MarkCompactCollector::in_use());
|
+ ASSERT(!v8_context()->mark_compact_collector_.in_use());
|
|
if (!Contains(addr)) return Failure::Exception();
|
|
@@ -820,13 +811,13 @@
|
if (available_pages <= 0) return false;
|
|
int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
|
- Page* p = MemoryAllocator::AllocatePages(desired_pages, &desired_pages, this);
|
+ Page* p = memory_allocator_->AllocatePages(desired_pages, &desired_pages, this);
|
if (!p->is_valid()) return false;
|
|
accounting_stats_.ExpandSpace(desired_pages * Page::kObjectAreaSize);
|
ASSERT(Capacity() <= max_capacity_);
|
|
- MemoryAllocator::SetNextPage(last_page, p);
|
+ memory_allocator_->SetNextPage(last_page, p);
|
|
// Sequentially clear remembered set of new pages and and cache the
|
// new last page in the space.
|
@@ -863,8 +854,8 @@
|
}
|
|
// Free pages after top_page.
|
- Page* p = MemoryAllocator::FreePages(top_page->next_page());
|
- MemoryAllocator::SetNextPage(top_page, p);
|
+ Page* p = memory_allocator_->FreePages(top_page->next_page());
|
+ memory_allocator_->SetNextPage(top_page, p);
|
|
// Find out how many pages we failed to free and update last_page_.
|
// Please note pages can only be freed in whole chunks.
|
@@ -886,7 +877,7 @@
|
Page* last_page = AllocationTopPage();
|
Page* next_page = last_page->next_page();
|
while (next_page->is_valid()) {
|
- last_page = MemoryAllocator::FindLastPageInSameChunk(next_page);
|
+ last_page = memory_allocator_->FindLastPageInSameChunk(next_page);
|
next_page = last_page->next_page();
|
}
|
|
@@ -895,7 +886,7 @@
|
if (!Expand(last_page)) return false;
|
ASSERT(last_page->next_page()->is_valid());
|
last_page =
|
- MemoryAllocator::FindLastPageInSameChunk(last_page->next_page());
|
+ memory_allocator_->FindLastPageInSameChunk(last_page->next_page());
|
} while (Capacity() < capacity);
|
|
return true;
|
@@ -915,11 +906,12 @@
|
// space.
|
ASSERT(allocation_info_.VerifyPagedAllocation());
|
Page* top_page = Page::FromAllocationTop(allocation_info_.top);
|
- ASSERT(MemoryAllocator::IsPageInSpace(top_page, this));
|
+ ASSERT(memory_allocator_->IsPageInSpace(top_page, this));
|
|
// Loop over all the pages.
|
bool above_allocation_top = false;
|
Page* current_page = first_page_;
|
+ Heap& heap = v8_context()->heap_;
|
while (current_page->is_valid()) {
|
if (above_allocation_top) {
|
// We don't care what's above the allocation top.
|
@@ -945,7 +937,7 @@
|
// be in map space.
|
Map* map = object->map();
|
ASSERT(map->IsMap());
|
- ASSERT(Heap::map_space()->Contains(map));
|
+ ASSERT(heap.map_space()->Contains(map));
|
|
// Perform space-specific object verification.
|
VerifyObject(object);
|
@@ -977,12 +969,13 @@
|
|
|
bool NewSpace::Setup(Address start, int size) {
|
+ Heap& heap = v8_context()->heap_;
|
// Setup new space based on the preallocated memory block defined by
|
// start and size. The provided space is divided into two semi-spaces.
|
// To support fast containment testing in the new space, the size of
|
// this chunk must be a power of two and it must be aligned to its size.
|
- int initial_semispace_capacity = Heap::InitialSemiSpaceSize();
|
- int maximum_semispace_capacity = Heap::MaxSemiSpaceSize();
|
+ int initial_semispace_capacity = heap.InitialSemiSpaceSize();
|
+ int maximum_semispace_capacity = heap.MaxSemiSpaceSize();
|
|
ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
|
ASSERT(IsPowerOf2(maximum_semispace_capacity));
|
@@ -998,7 +991,7 @@
|
#undef SET_NAME
|
#endif
|
|
- ASSERT(size == 2 * Heap::ReservedSemiSpaceSize());
|
+ ASSERT(size == 2 * heap.ReservedSemiSpaceSize());
|
ASSERT(IsAddressAligned(start, size, 0));
|
|
if (!to_space_.Setup(start,
|
@@ -1085,7 +1078,7 @@
|
if (!to_space_.ShrinkTo(from_space_.Capacity())) {
|
// We are in an inconsistent state because we could not
|
// commit/uncommit memory from new space.
|
- V8::FatalProcessOutOfMemory("Failed to grow new space.");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Failed to grow new space.");
|
}
|
}
|
}
|
@@ -1106,7 +1099,7 @@
|
if (!to_space_.GrowTo(from_space_.Capacity())) {
|
// We are in an inconsistent state because we could not
|
// commit/uncommit memory from new space.
|
- V8::FatalProcessOutOfMemory("Failed to shrink new space.");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Failed to shrink new space.");
|
}
|
}
|
}
|
@@ -1148,6 +1141,7 @@
|
// There should be objects packed in from the low address up to the
|
// allocation pointer.
|
Address current = to_space_.low();
|
+ Heap& heap = v8_context()->heap_;
|
while (current < top()) {
|
HeapObject* object = HeapObject::FromAddress(current);
|
|
@@ -1155,7 +1149,7 @@
|
// be in map space.
|
Map* map = object->map();
|
ASSERT(map->IsMap());
|
- ASSERT(Heap::map_space()->Contains(map));
|
+ ASSERT(heap.map_space()->Contains(map));
|
|
// The object should not be code or a map.
|
ASSERT(!object->IsMap());
|
@@ -1180,7 +1174,7 @@
|
|
bool SemiSpace::Commit() {
|
ASSERT(!is_committed());
|
- if (!MemoryAllocator::CommitBlock(start_, capacity_, executable())) {
|
+ if (!v8_context()->heap_.memory_allocator()->CommitBlock(start_, capacity_, executable())) {
|
return false;
|
}
|
committed_ = true;
|
@@ -1190,7 +1184,7 @@
|
|
bool SemiSpace::Uncommit() {
|
ASSERT(is_committed());
|
- if (!MemoryAllocator::UncommitBlock(start_, capacity_)) {
|
+ if (!v8_context()->heap_.memory_allocator()->UncommitBlock(start_, capacity_)) {
|
return false;
|
}
|
committed_ = false;
|
@@ -1236,7 +1230,7 @@
|
int maximum_extra = maximum_capacity_ - capacity_;
|
int extra = Min(RoundUp(capacity_, OS::AllocateAlignment()),
|
maximum_extra);
|
- if (!MemoryAllocator::CommitBlock(high(), extra, executable())) {
|
+ if (!v8_context()->heap_.memory_allocator()->CommitBlock(high(), extra, executable())) {
|
return false;
|
}
|
capacity_ += extra;
|
@@ -1249,7 +1243,7 @@
|
ASSERT(new_capacity > capacity_);
|
size_t delta = new_capacity - capacity_;
|
ASSERT(IsAligned(delta, OS::AllocateAlignment()));
|
- if (!MemoryAllocator::CommitBlock(high(), delta, executable())) {
|
+ if (!v8_context()->heap_.memory_allocator()->CommitBlock(high(), delta, executable())) {
|
return false;
|
}
|
capacity_ = new_capacity;
|
@@ -1262,7 +1256,7 @@
|
ASSERT(new_capacity < capacity_);
|
size_t delta = capacity_ - new_capacity;
|
ASSERT(IsAligned(delta, OS::AllocateAlignment()));
|
- if (!MemoryAllocator::UncommitBlock(high() - delta, delta)) {
|
+ if (!v8_context()->heap_.memory_allocator()->UncommitBlock(high() - delta, delta)) {
|
return false;
|
}
|
capacity_ = new_capacity;
|
@@ -1311,8 +1305,8 @@
|
|
#ifdef DEBUG
|
// A static array of histogram info for each type.
|
-static HistogramInfo heap_histograms[LAST_TYPE+1];
|
-static JSObject::SpillInformation js_spill_information;
|
+static HistogramInfo heap_histograms[LAST_TYPE+1]; ///static
|
+static JSObject::SpillInformation js_spill_information; ///static
|
|
// heap_histograms is shared, always clear it before using it.
|
static void ClearHistograms() {
|
@@ -1329,7 +1323,7 @@
|
}
|
|
|
-static int code_kind_statistics[Code::NUMBER_OF_KINDS];
|
+static int code_kind_statistics[Code::NUMBER_OF_KINDS]; ///static
|
|
|
static void ClearCodeKindStatistics() {
|
@@ -1517,6 +1511,7 @@
|
void FreeListNode::set_size(int size_in_bytes) {
|
ASSERT(size_in_bytes > 0);
|
ASSERT(IsAligned(size_in_bytes, kPointerSize));
|
+ Heap& heap = v8_context()->heap_;
|
|
// We write a map and possibly size information to the block. If the block
|
// is big enough to be a ByteArray with at least one extra word (the next
|
@@ -1526,14 +1521,14 @@
|
// field and a next pointer, we give it a filler map that gives it the
|
// correct size.
|
if (size_in_bytes > ByteArray::kAlignedSize) {
|
- set_map(Heap::raw_unchecked_byte_array_map());
|
+ set_map(heap.raw_unchecked_byte_array_map());
|
// Can't use ByteArray::cast because it fails during deserialization.
|
ByteArray* this_as_byte_array = reinterpret_cast<ByteArray*>(this);
|
this_as_byte_array->set_length(ByteArray::LengthFor(size_in_bytes));
|
} else if (size_in_bytes == kPointerSize) {
|
- set_map(Heap::raw_unchecked_one_pointer_filler_map());
|
+ set_map(heap.raw_unchecked_one_pointer_filler_map());
|
} else if (size_in_bytes == 2 * kPointerSize) {
|
- set_map(Heap::raw_unchecked_two_pointer_filler_map());
|
+ set_map(heap.raw_unchecked_two_pointer_filler_map());
|
} else {
|
UNREACHABLE();
|
}
|
@@ -1544,7 +1539,7 @@
|
|
Address FreeListNode::next() {
|
ASSERT(IsFreeListNode(this));
|
- if (map() == Heap::raw_unchecked_byte_array_map()) {
|
+ if (map() == v8_context()->heap_.raw_unchecked_byte_array_map()) {
|
ASSERT(Size() >= kNextOffset + kPointerSize);
|
return Memory::Address_at(address() + kNextOffset);
|
} else {
|
@@ -1555,7 +1550,7 @@
|
|
void FreeListNode::set_next(Address next) {
|
ASSERT(IsFreeListNode(this));
|
- if (map() == Heap::raw_unchecked_byte_array_map()) {
|
+ if (map() == v8_context()->heap_.raw_unchecked_byte_array_map()) {
|
ASSERT(Size() >= kNextOffset + kPointerSize);
|
Memory::Address_at(address() + kNextOffset) = next;
|
} else {
|
@@ -1831,9 +1826,10 @@
|
return AllocateInNextPage(current_page, size_in_bytes);
|
}
|
|
+ Heap& heap = v8_context()->heap_;
|
// There is no next page in this space. Try free list allocation unless that
|
// is currently forbidden.
|
- if (!Heap::linear_allocation()) {
|
+ if (!heap.linear_allocation()) {
|
int wasted_bytes;
|
Object* result = free_list_.Allocate(size_in_bytes, &wasted_bytes);
|
accounting_stats_.WasteBytes(wasted_bytes);
|
@@ -1846,7 +1842,7 @@
|
// Free list allocation failed and there is no next page. Fail if we have
|
// hit the old generation size limit that should cause a garbage
|
// collection.
|
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
|
+ if (!heap.always_allocate() && heap.OldGenerationAllocationLimitReached()) {
|
return NULL;
|
}
|
|
@@ -1893,7 +1889,7 @@
|
|
// must be small, since an iteration is used for lookup
|
const int kMaxComments = 64;
|
-static CommentStatistic comments_statistics[kMaxComments+1];
|
+static CommentStatistic comments_statistics[kMaxComments+1]; ///static
|
|
|
void PagedSpace::ReportCodeStatistics() {
|
@@ -2023,6 +2019,7 @@
|
int rset_marked_array_elements = 0;
|
int cross_gen_pointers = 0;
|
int cross_gen_array_elements = 0;
|
+ Heap& heap = v8_context()->heap_;
|
|
PageIterator page_it(this, PageIterator::PAGES_IN_USE);
|
while (page_it.has_next()) {
|
@@ -2041,7 +2038,7 @@
|
int bitpos = intoff*kBitsPerByte + bitoff;
|
Address slot = p->OffsetToAddress(bitpos << kObjectAlignmentBits);
|
Object** obj = reinterpret_cast<Object**>(slot);
|
- if (*obj == Heap::raw_unchecked_fixed_array_map()) {
|
+ if (*obj == heap.raw_unchecked_fixed_array_map()) {
|
rset_marked_arrays++;
|
FixedArray* fa = FixedArray::cast(HeapObject::FromAddress(slot));
|
|
@@ -2053,12 +2050,12 @@
|
elm_addr < elm_stop; elm_addr += kPointerSize) {
|
// Filter non-heap-object pointers
|
Object** elm_p = reinterpret_cast<Object**>(elm_addr);
|
- if (Heap::InNewSpace(*elm_p))
|
+ if (heap.InNewSpace(*elm_p))
|
cross_gen_array_elements++;
|
}
|
} else {
|
rset_marked_pointers++;
|
- if (Heap::InNewSpace(*obj))
|
+ if (heap.InNewSpace(*obj))
|
cross_gen_pointers++;
|
}
|
}
|
@@ -2108,6 +2105,7 @@
|
PrintF(" ");
|
}
|
|
+ Heap& heap = v8_context()->heap_;
|
// Loop over all the words in the range.
|
while (rset_address < end) {
|
uint32_t rset_word = Memory::uint32_at(rset_address);
|
@@ -2124,7 +2122,7 @@
|
} else if ((rset_word & (1 << bit_position)) == 0) {
|
// Print a dot for zero bits.
|
PrintF(".");
|
- } else if (Heap::InNewSpace(*object_p)) {
|
+ } else if (heap.InNewSpace(*object_p)) {
|
// Print an X for one bits for pointers to new space.
|
PrintF("X");
|
} else {
|
@@ -2236,10 +2234,11 @@
|
return AllocateInNextPage(current_page, size_in_bytes);
|
}
|
|
+ Heap& heap = v8_context()->heap_;
|
// There is no next page in this space. Try free list allocation unless
|
// that is currently forbidden. The fixed space free list implicitly assumes
|
// that all free blocks are of the fixed size.
|
- if (!Heap::linear_allocation()) {
|
+ if (!heap.linear_allocation()) {
|
Object* result = free_list_.Allocate();
|
if (!result->IsFailure()) {
|
accounting_stats_.AllocateBytes(size_in_bytes);
|
@@ -2250,7 +2249,7 @@
|
// Free list allocation failed and there is no next page. Fail if we have
|
// hit the old generation size limit that should cause a garbage
|
// collection.
|
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
|
+ if (!heap.always_allocate() && heap.OldGenerationAllocationLimitReached()) {
|
return NULL;
|
}
|
|
@@ -2289,6 +2288,7 @@
|
int rset_marked_pointers = 0;
|
int cross_gen_pointers = 0;
|
|
+ Heap& heap = v8_context()->heap_;
|
PageIterator page_it(this, PageIterator::PAGES_IN_USE);
|
while (page_it.has_next()) {
|
Page* p = page_it.next();
|
@@ -2307,7 +2307,7 @@
|
Address slot = p->OffsetToAddress(bitpos << kObjectAlignmentBits);
|
Object** obj = reinterpret_cast<Object**>(slot);
|
rset_marked_pointers++;
|
- if (Heap::InNewSpace(*obj))
|
+ if (heap.InNewSpace(*obj))
|
cross_gen_pointers++;
|
}
|
}
|
@@ -2369,7 +2369,7 @@
|
void CellSpace::VerifyObject(HeapObject* object) {
|
// The object should be a global object property cell or a free-list node.
|
ASSERT(object->IsJSGlobalPropertyCell() ||
|
- object->map() == Heap::two_pointer_filler_map());
|
+ object->map() == v8_context()->heap_.two_pointer_filler_map());
|
}
|
#endif
|
|
@@ -2405,13 +2405,14 @@
|
size_t* chunk_size,
|
Executability executable) {
|
size_t requested = ChunkSizeFor(size_in_bytes);
|
- void* mem = MemoryAllocator::AllocateRawMemory(requested,
|
+ MemoryAllocator* const memory_allocator = v8_context()->heap_.memory_allocator();
|
+ void* mem = memory_allocator->AllocateRawMemory(requested,
|
chunk_size,
|
executable);
|
if (mem == NULL) return NULL;
|
LOG(NewEvent("LargeObjectChunk", mem, *chunk_size));
|
if (*chunk_size < requested) {
|
- MemoryAllocator::FreeRawMemory(mem, *chunk_size);
|
+ memory_allocator->FreeRawMemory(mem, *chunk_size);
|
LOG(DeleteEvent("LargeObjectChunk", mem));
|
return NULL;
|
}
|
@@ -2429,11 +2430,13 @@
|
// -----------------------------------------------------------------------------
|
// LargeObjectSpace
|
|
-LargeObjectSpace::LargeObjectSpace(AllocationSpace id)
|
+LargeObjectSpace::LargeObjectSpace(AllocationSpace id, MemoryAllocator* memory_allocator)
|
: Space(id, NOT_EXECUTABLE), // Managed on a per-allocation basis
|
first_chunk_(NULL),
|
size_(0),
|
- page_count_(0) {}
|
+ page_count_(0),
|
+ memory_allocator_(memory_allocator)
|
+ {}
|
|
|
bool LargeObjectSpace::Setup() {
|
@@ -2449,7 +2452,7 @@
|
LargeObjectChunk* chunk = first_chunk_;
|
first_chunk_ = first_chunk_->next();
|
LOG(DeleteEvent("LargeObjectChunk", chunk->address()));
|
- MemoryAllocator::FreeRawMemory(chunk->address(), chunk->size());
|
+ memory_allocator_->FreeRawMemory(chunk->address(), chunk->size());
|
}
|
|
size_ = 0;
|
@@ -2462,7 +2465,7 @@
|
void LargeObjectSpace::Protect() {
|
LargeObjectChunk* chunk = first_chunk_;
|
while (chunk != NULL) {
|
- MemoryAllocator::Protect(chunk->address(), chunk->size());
|
+ memory_allocator_->Protect(chunk->address(), chunk->size());
|
chunk = chunk->next();
|
}
|
}
|
@@ -2472,7 +2475,7 @@
|
LargeObjectChunk* chunk = first_chunk_;
|
while (chunk != NULL) {
|
bool is_code = chunk->GetObject()->IsCode();
|
- MemoryAllocator::Unprotect(chunk->address(), chunk->size(),
|
+ memory_allocator_->Unprotect(chunk->address(), chunk->size(),
|
is_code ? EXECUTABLE : NOT_EXECUTABLE);
|
chunk = chunk->next();
|
}
|
@@ -2485,10 +2488,11 @@
|
int object_size,
|
Executability executable) {
|
ASSERT(0 < object_size && object_size <= requested_size);
|
+ Heap& heap = v8_context()->heap_;
|
|
// Check if we want to force a GC before growing the old space further.
|
// If so, fail the allocation.
|
- if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) {
|
+ if (!heap.always_allocate() && heap.OldGenerationAllocationLimitReached()) {
|
return Failure::RetryAfterGC(requested_size, identity());
|
}
|
|
@@ -2588,7 +2592,7 @@
|
|
void LargeObjectSpace::IterateRSet(ObjectSlotCallback copy_object_func) {
|
ASSERT(Page::is_rset_in_use());
|
-
|
+ ///static
|
static void* lo_rset_histogram = StatsTable::CreateHistogram(
|
"V8.RSetLO",
|
0,
|
@@ -2597,6 +2601,7 @@
|
30);
|
|
LargeObjectIterator it(this);
|
+ Heap& heap = v8_context()->heap_;
|
while (it.has_next()) {
|
// We only have code, sequential strings, or fixed arrays in large
|
// object space, and only fixed arrays can possibly contain pointers to
|
@@ -2606,14 +2611,14 @@
|
// Iterate the normal page remembered set range.
|
Page* page = Page::FromAddress(object->address());
|
Address object_end = object->address() + object->Size();
|
- int count = Heap::IterateRSetRange(page->ObjectAreaStart(),
|
+ int count = heap.IterateRSetRange(page->ObjectAreaStart(),
|
Min(page->ObjectAreaEnd(), object_end),
|
page->RSetStart(),
|
copy_object_func);
|
|
// Iterate the extra array elements.
|
if (object_end > page->ObjectAreaEnd()) {
|
- count += Heap::IterateRSetRange(page->ObjectAreaEnd(), object_end,
|
+ count += heap.IterateRSetRange(page->ObjectAreaEnd(), object_end,
|
object_end, copy_object_func);
|
}
|
if (lo_rset_histogram != NULL) {
|
@@ -2631,7 +2636,7 @@
|
HeapObject* object = current->GetObject();
|
if (object->IsMarked()) {
|
object->ClearMark();
|
- MarkCompactCollector::tracer()->decrement_marked_count();
|
+ v8_context()->mark_compact_collector_.tracer()->decrement_marked_count();
|
previous = current;
|
current = current->next();
|
} else {
|
@@ -2652,7 +2657,7 @@
|
}
|
size_ -= chunk_size;
|
page_count_--;
|
- MemoryAllocator::FreeRawMemory(chunk_address, chunk_size);
|
+ memory_allocator_->FreeRawMemory(chunk_address, chunk_size);
|
LOG(DeleteEvent("LargeObjectChunk", chunk_address));
|
}
|
}
|
@@ -2674,6 +2679,7 @@
|
// We do not assume that the large object iterator works, because it depends
|
// on the invariants we are checking during verification.
|
void LargeObjectSpace::Verify() {
|
+ Heap& heap = v8_context()->heap_;
|
for (LargeObjectChunk* chunk = first_chunk_;
|
chunk != NULL;
|
chunk = chunk->next()) {
|
@@ -2687,7 +2693,7 @@
|
// in map space.
|
Map* map = object->map();
|
ASSERT(map->IsMap());
|
- ASSERT(Heap::map_space()->Contains(map));
|
+ ASSERT(heap.map_space()->Contains(map));
|
|
// We have only code, sequential strings, external strings
|
// (sequential strings that have been morphed into external
|
@@ -2714,9 +2720,9 @@
|
Object* element = array->get(j);
|
if (element->IsHeapObject()) {
|
HeapObject* element_object = HeapObject::cast(element);
|
- ASSERT(Heap::Contains(element_object));
|
+ ASSERT(heap.Contains(element_object));
|
ASSERT(element_object->map()->IsMap());
|
- if (Heap::InNewSpace(element_object)) {
|
+ if (heap.InNewSpace(element_object)) {
|
ASSERT(Page::IsRSetSet(object->address(),
|
FixedArray::kHeaderSize + j * kPointerSize));
|
}
|
Index: src/handles.cc
|
===================================================================
|
--- src/handles.cc (revision 3218)
|
+++ src/handles.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -43,19 +43,21 @@
|
namespace internal {
|
|
|
-v8::ImplementationUtilities::HandleScopeData HandleScope::current_ =
|
- { -1, NULL, NULL };
|
+HandleScopeImplementer::HandleScopeImplementer():
|
+ blocks_(0), entered_contexts_(0), saved_contexts_(0), spare_(NULL), ignore_out_of_memory_(false), call_depth_(0) {
|
+ v8::ImplementationUtilities::HandleScopeData current = { -1, NULL, NULL };
|
+ current_ = current;
|
+}
|
|
-
|
-int HandleScope::NumberOfHandles() {
|
- int n = HandleScopeImplementer::instance()->blocks()->length();
|
+int HandleScopeImplementer::NumberOfHandles() {
|
+ int n = v8_context()->handle_scope_implementer_.blocks()->length();
|
if (n == 0) return 0;
|
return ((n - 1) * kHandleBlockSize) +
|
- (current_.next - HandleScopeImplementer::instance()->blocks()->last());
|
+ (current_.next - v8_context()->handle_scope_implementer_.blocks()->last());
|
}
|
|
|
-Object** HandleScope::Extend() {
|
+Object** HandleScopeImplementer::Extend() {
|
Object** result = current_.next;
|
|
ASSERT(result == current_.limit);
|
@@ -66,11 +68,11 @@
|
"Cannot create a handle without a HandleScope");
|
return NULL;
|
}
|
- HandleScopeImplementer* impl = HandleScopeImplementer::instance();
|
+ HandleScopeImplementer& impl = v8_context()->handle_scope_implementer_;
|
// If there's more room in the last block, we use that. This is used
|
// for fast creation of scopes after scope barriers.
|
- if (!impl->blocks()->is_empty()) {
|
- Object** limit = &impl->blocks()->last()[kHandleBlockSize];
|
+ if (!impl.blocks()->is_empty()) {
|
+ Object** limit = &impl.blocks()->last()[kHandleBlockSize];
|
if (current_.limit != limit) {
|
current_.limit = limit;
|
}
|
@@ -80,10 +82,10 @@
|
// current handle scope by allocating a new handle block.
|
if (result == current_.limit) {
|
// If there's a spare block, use it for growing the current scope.
|
- result = impl->GetSpareOrNewBlock();
|
+ result = impl.GetSpareOrNewBlock();
|
// Add the extension to the global list of blocks, but count the
|
// extension as part of the current scope.
|
- impl->blocks()->Add(result);
|
+ impl.blocks()->Add(result);
|
current_.extensions++;
|
current_.limit = &result[kHandleBlockSize];
|
}
|
@@ -92,13 +94,13 @@
|
}
|
|
|
-void HandleScope::DeleteExtensions() {
|
+void HandleScopeImplementer::DeleteExtensions() {
|
ASSERT(current_.extensions != 0);
|
- HandleScopeImplementer::instance()->DeleteExtensions(current_.extensions);
|
+ v8_context()->handle_scope_implementer_.DeleteExtensions(current_.extensions);
|
}
|
|
|
-void HandleScope::ZapRange(Object** start, Object** end) {
|
+void HandleScopeImplementer::ZapRange(Object** start, Object** end) {
|
if (start == NULL) return;
|
for (Object** p = start; p < end; p++) {
|
*reinterpret_cast<Address*>(p) = v8::internal::kHandleZapValue;
|
@@ -106,17 +108,17 @@
|
}
|
|
|
-Address HandleScope::current_extensions_address() {
|
+Address HandleScopeImplementer::current_extensions_address() {
|
return reinterpret_cast<Address>(¤t_.extensions);
|
}
|
|
|
-Address HandleScope::current_next_address() {
|
+Address HandleScopeImplementer::current_next_address() {
|
return reinterpret_cast<Address>(¤t_.next);
|
}
|
|
|
-Address HandleScope::current_limit_address() {
|
+Address HandleScopeImplementer::current_limit_address() {
|
return reinterpret_cast<Address>(¤t_.limit);
|
}
|
|
@@ -136,7 +138,7 @@
|
Handle<JSGlobalProxy> ReinitializeJSGlobalProxy(
|
Handle<JSFunction> constructor,
|
Handle<JSGlobalProxy> global) {
|
- CALL_HEAP_FUNCTION(Heap::ReinitializeJSGlobalProxy(*constructor, *global),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.ReinitializeJSGlobalProxy(*constructor, *global),
|
JSGlobalProxy);
|
}
|
|
@@ -324,7 +326,7 @@
|
// Hidden properties object not found. Allocate a new hidden properties
|
// object if requested. Otherwise return the undefined value.
|
if (create_if_needed) {
|
- Handle<Object> hidden_obj = Factory::NewJSObject(Top::object_function());
|
+ Handle<Object> hidden_obj = Factory::NewJSObject(v8_context()->top_.object_function());
|
return SetProperty(obj, key, hidden_obj, DONT_ENUM);
|
} else {
|
return Factory::undefined_value();
|
@@ -349,7 +351,7 @@
|
|
|
Handle<Object> LookupSingleCharacterStringFromCode(uint32_t index) {
|
- CALL_HEAP_FUNCTION(Heap::LookupSingleCharacterStringFromCode(index), Object);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.LookupSingleCharacterStringFromCode(index), Object);
|
}
|
|
|
@@ -374,7 +376,7 @@
|
|
|
Handle<JSObject> Copy(Handle<JSObject> obj) {
|
- CALL_HEAP_FUNCTION(Heap::CopyJSObject(*obj), JSObject);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.CopyJSObject(*obj), JSObject);
|
}
|
|
|
@@ -395,8 +397,8 @@
|
Proxy* proxy = Script::cast(wrapper->value())->wrapper();
|
ASSERT(proxy->proxy() == reinterpret_cast<Address>(cache.location()));
|
proxy->set_proxy(0);
|
- GlobalHandles::Destroy(cache.location());
|
- Counters::script_wrappers.Decrement();
|
+ v8_context()->global_handles_.Destroy(cache.location());
|
+ v8_context()->counters_.script_wrappers.Decrement();
|
}
|
|
|
@@ -408,8 +410,8 @@
|
}
|
|
// Construct a new script wrapper.
|
- Counters::script_wrappers.Increment();
|
- Handle<JSFunction> constructor = Top::script_function();
|
+ v8_context()->counters_.script_wrappers.Increment();
|
+ Handle<JSFunction> constructor = v8_context()->top_.script_function();
|
Handle<JSValue> result =
|
Handle<JSValue>::cast(Factory::NewJSObject(constructor));
|
result->set_value(*script);
|
@@ -417,8 +419,9 @@
|
// Create a new weak global handle and use it to cache the wrapper
|
// for future use. The cache will automatically be cleared by the
|
// garbage collector when it is not used anymore.
|
- Handle<Object> handle = GlobalHandles::Create(*result);
|
- GlobalHandles::MakeWeak(handle.location(), NULL, &ClearWrapperCache);
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ Handle<Object> handle = global_handles.Create(*result);
|
+ global_handles.MakeWeak(handle.location(), NULL, &ClearWrapperCache);
|
script->wrapper()->set_proxy(reinterpret_cast<Address>(handle.location()));
|
return result;
|
}
|
@@ -546,18 +549,20 @@
|
Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
|
KeyCollectionType type) {
|
Handle<FixedArray> content = Factory::empty_fixed_array();
|
+ Heap& heap = v8_context()->heap_;
|
+ Top& top = v8_context()->top_;
|
|
// Only collect keys if access is permitted.
|
for (Handle<Object> p = object;
|
- *p != Heap::null_value();
|
+ *p != heap.null_value();
|
p = Handle<Object>(p->GetPrototype())) {
|
Handle<JSObject> current(JSObject::cast(*p));
|
|
// Check access rights if required.
|
if (current->IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(*current, Heap::undefined_value(),
|
+ !top.MayNamedAccess(*current, heap.undefined_value(),
|
v8::ACCESS_KEYS)) {
|
- Top::ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
|
+ top.ReportFailedAccessCheck(*current, v8::ACCESS_KEYS);
|
break;
|
}
|
|
@@ -596,7 +601,7 @@
|
|
|
Handle<JSArray> GetKeysFor(Handle<JSObject> object) {
|
- Counters::for_in.Increment();
|
+ v8_context()->counters_.for_in.Increment();
|
Handle<FixedArray> elements = GetKeysInFixedArrayFor(object,
|
INCLUDE_PROTOS);
|
return Factory::NewJSArrayWithElements(elements);
|
@@ -607,11 +612,11 @@
|
int index = 0;
|
if (object->HasFastProperties()) {
|
if (object->map()->instance_descriptors()->HasEnumCache()) {
|
- Counters::enum_cache_hits.Increment();
|
+ v8_context()->counters_.enum_cache_hits.Increment();
|
DescriptorArray* desc = object->map()->instance_descriptors();
|
return Handle<FixedArray>(FixedArray::cast(desc->GetEnumCache()));
|
}
|
- Counters::enum_cache_misses.Increment();
|
+ v8_context()->counters_.enum_cache_misses.Increment();
|
int num_enum = object->NumberOfEnumProperties();
|
Handle<FixedArray> storage = Factory::NewFixedArray(num_enum);
|
Handle<FixedArray> sort_array = Factory::NewFixedArray(num_enum);
|
@@ -647,9 +652,10 @@
|
int loop_nesting) {
|
// Compile the source information to a code object.
|
ASSERT(!shared->is_compiled());
|
- bool result = Compiler::CompileLazy(shared, loop_nesting);
|
- ASSERT(result != Top::has_pending_exception());
|
- if (!result && flag == CLEAR_EXCEPTION) Top::clear_pending_exception();
|
+ V8Context * const v8context = v8_context();
|
+ bool result = v8context->compiler_.CompileLazy(shared, loop_nesting);
|
+ ASSERT(result != v8context->top_.has_pending_exception());
|
+ if (!result && flag == CLEAR_EXCEPTION) v8context->top_.clear_pending_exception();
|
return result;
|
}
|
|
@@ -689,7 +695,7 @@
|
|
|
Handle<Code> ComputeLazyCompile(int argc) {
|
- CALL_HEAP_FUNCTION(StubCache::ComputeLazyCompile(argc), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeLazyCompile(argc), Code);
|
}
|
|
|
@@ -715,12 +721,13 @@
|
|
Handle<JSFunction> boilerplate;
|
|
- if (!Bootstrapper::NativesCacheLookup(name, &boilerplate)) {
|
- Handle<String> source_code = Bootstrapper::NativesSourceLookup(index);
|
+ V8Context * const v8context = v8_context();
|
+ if (!v8context->bootstrapper_.NativesCacheLookup(name, &boilerplate)) {
|
+ Handle<String> source_code = v8context->bootstrapper_.NativesSourceLookup(index);
|
Handle<String> script_name = Factory::NewStringFromAscii(name);
|
bool allow_natives_syntax = FLAG_allow_natives_syntax;
|
FLAG_allow_natives_syntax = true;
|
- boilerplate = Compiler::Compile(source_code, script_name, 0, 0, NULL, NULL);
|
+ boilerplate = v8context->compiler_.Compile(source_code, script_name, 0, 0, NULL, NULL);
|
FLAG_allow_natives_syntax = allow_natives_syntax;
|
// If the compilation failed (possibly due to stack overflows), we
|
// should never enter the result in the natives cache. Instead we
|
@@ -730,7 +737,7 @@
|
*pending_exception = true;
|
return;
|
}
|
- Bootstrapper::NativesCacheAdd(name, boilerplate);
|
+ v8context->bootstrapper_.NativesCacheAdd(name, boilerplate);
|
}
|
|
// We shouldn't get here if compiling the script failed.
|
@@ -741,9 +748,11 @@
|
// functions loading can be triggered. In that case ensure that the
|
// execution of the boilerplate is in the correct context.
|
SaveContext save;
|
- if (!Debug::debug_context().is_null() &&
|
- Top::context() == *Debug::debug_context()) {
|
- Top::set_context(*compile_context);
|
+ Top& top = v8_context()-> top_;
|
+ Debug& debug = v8_context()-> debug_;
|
+ if (!debug.debug_context().is_null() &&
|
+ top.context() == *debug.debug_context()) {
|
+ top.set_context(*compile_context);
|
}
|
#endif
|
|
Index: src/contexts.h
|
===================================================================
|
--- src/contexts.h (revision 2038)
|
+++ src/contexts.h Sat Nov 14 01:42:55 MSK 2009
|
@@ -262,12 +262,12 @@
|
|
// Tells whether the global context is marked with out of memory.
|
bool has_out_of_memory() {
|
- return global_context()->out_of_memory() == Heap::true_value();
|
+ return global_context()->out_of_memory() == v8_context()->heap_.true_value();
|
}
|
|
// Mark the global context with out of memory.
|
void mark_out_of_memory() {
|
- global_context()->set_out_of_memory(Heap::true_value());
|
+ global_context()->set_out_of_memory(v8_context()->heap_.true_value());
|
}
|
|
// The exception holder is the object used as a with object in
|
Index: src/runtime.h
|
===================================================================
|
--- src/runtime.h (revision 3209)
|
+++ src/runtime.h Sat Nov 14 01:43:14 MSK 2009
|
@@ -408,6 +408,9 @@
|
|
// Helper functions used stubs.
|
static void PerformGC(Object* result);
|
+
|
+ static void Setup();
|
+ static void TearDown();
|
};
|
|
|
Index: src/ia32/fast-codegen-ia32.cc
|
===================================================================
|
--- src/ia32/fast-codegen-ia32.cc (revision 3234)
|
+++ src/ia32/fast-codegen-ia32.cc Sat Nov 14 01:43:08 MSK 2009
|
@@ -393,7 +393,7 @@
|
|
// Build the function boilerplate and instantiate it.
|
Handle<JSFunction> boilerplate =
|
- Compiler::BuildBoilerplate(expr, script_, this);
|
+ v8_context()->compiler_.BuildBoilerplate(expr, script_, this);
|
if (HasStackOverflow()) return;
|
|
ASSERT(boilerplate->IsBoilerplate());
|
@@ -415,7 +415,7 @@
|
// object on the stack.
|
__ push(CodeGenerator::GlobalObject());
|
__ mov(ecx, expr->name());
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
|
// By emitting a nop we make sure that we do not have a test eax
|
// instruction after the call it is treated specially by the LoadIC code
|
@@ -514,7 +514,7 @@
|
ASSERT_EQ(Expression::kValue, value->context());
|
__ pop(eax);
|
__ mov(ecx, Immediate(key->handle()));
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// StoreIC leaves the receiver on the stack.
|
break;
|
@@ -687,7 +687,7 @@
|
__ pop(eax);
|
__ mov(ecx, var->name());
|
__ push(CodeGenerator::GlobalObject());
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// Overwrite the receiver on the stack with the result if needed.
|
DropAndMove(expr->context(), eax);
|
@@ -752,7 +752,7 @@
|
|
__ pop(eax);
|
__ mov(ecx, prop->key()->AsLiteral()->handle());
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
|
// If the assignment ends an initialization block, revert to fast case.
|
@@ -780,7 +780,7 @@
|
}
|
|
__ pop(eax);
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// This nop signals to the IC that there is no inlined code at the call
|
// site for it to patch.
|
@@ -817,7 +817,7 @@
|
// Do a NAMED property load.
|
// The IC expects the property name in ecx and the receiver on the stack.
|
__ mov(ecx, Immediate(key->AsLiteral()->handle()));
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// By emitting a nop we make sure that we do not have a test eax
|
// instruction after the call it is treated specially by the LoadIC code.
|
@@ -825,7 +825,7 @@
|
} else {
|
// Do a KEYED property load.
|
Visit(expr->key());
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// By emitting a nop we make sure that we do not have a "test eax,..."
|
// instruction after the call it is treated specially by the LoadIC code.
|
@@ -910,7 +910,7 @@
|
Visit(prop->key());
|
// Record source code position for IC call.
|
SetSourcePosition(prop->position());
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// By emitting a nop we make sure that we do not have a "test eax,..."
|
// instruction after the call it is treated specially by the LoadIC code.
|
@@ -935,7 +935,7 @@
|
// also use the fast code generator.
|
FunctionLiteral* lit = fun->AsFunctionLiteral();
|
if (lit != NULL &&
|
- lit->name()->Equals(Heap::empty_string()) &&
|
+ lit->name()->Equals(v8_context()->heap_.empty_string()) &&
|
loop_depth() == 0) {
|
lit->set_try_fast_codegen(true);
|
}
|
@@ -980,7 +980,7 @@
|
// Function is in esp[arg_count + 1].
|
__ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
|
|
- Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
|
+ Handle<Code> construct_builtin(v8_context()->builtins_.builtin(Builtins::JSConstructCall));
|
__ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
|
|
// Replace function on TOS with result in eax, or pop it.
|
@@ -1107,7 +1107,7 @@
|
Comment cmnt(masm_, "Global variable");
|
__ push(CodeGenerator::GlobalObject());
|
__ mov(ecx, Immediate(proxy->name()));
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize));
|
// Use a regular load, not a contextual load, to avoid a reference
|
// error.
|
__ call(ic, RelocInfo::CODE_TARGET);
|
@@ -1169,7 +1169,7 @@
|
// Call Store IC.
|
__ mov(ecx, proxy->AsVariable()->name());
|
__ push(CodeGenerator::GlobalObject());
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// Restore up stack after store IC.
|
__ add(Operand(esp), Immediate(kPointerSize));
|
Index: src/jump-target.cc
|
===================================================================
|
--- src/jump-target.cc (revision 2855)
|
+++ src/jump-target.cc Sat Nov 14 01:43:00 MSK 2009
|
@@ -37,9 +37,7 @@
|
// -------------------------------------------------------------------------
|
// JumpTarget implementation.
|
|
-bool JumpTarget::compiling_deferred_code_ = false;
|
|
-
|
void JumpTarget::Unuse() {
|
reaching_frames_.Clear();
|
merge_labels_.Clear();
|
@@ -53,9 +51,10 @@
|
// the directionality of the block. Compute: an entry frame for the
|
// block.
|
|
- Counters::compute_entry_frame.Increment();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->counters_.compute_entry_frame.Increment();
|
#ifdef DEBUG
|
- if (compiling_deferred_code_) {
|
+ if (v8context->code_generator_data_.compiling_deferred_code_) {
|
ASSERT(reaching_frames_.length() > 1);
|
VirtualFrame* frame = reaching_frames_[0];
|
bool all_identical = true;
|
Index: src/ia32/regexp-macro-assembler-ia32.cc
|
===================================================================
|
--- src/ia32/regexp-macro-assembler-ia32.cc (revision 3229)
|
+++ src/ia32/regexp-macro-assembler-ia32.cc Sat Nov 14 01:43:16 MSK 2009
|
@@ -934,8 +934,8 @@
|
int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
|
Code* re_code,
|
Address re_frame) {
|
- if (StackGuard::IsStackOverflow()) {
|
- Top::StackOverflow();
|
+ if (v8_context()->stack_guard_.IsStackOverflow()) {
|
+ v8_context()->top_.StackOverflow();
|
return EXCEPTION;
|
}
|
|
Index: src/mksnapshot.cc
|
===================================================================
|
--- src/mksnapshot.cc (revision 3208)
|
+++ src/mksnapshot.cc Sat Nov 14 01:42:56 MSK 2009
|
@@ -86,13 +86,13 @@
|
|
// We statically allocate a set of local counters to be used if we
|
// don't want to store the stats in a memory-mapped file
|
-static CounterCollection local_counters;
|
-static CounterCollection* counters = &local_counters;
|
+static CounterCollection local_counters; ///static
|
+static CounterCollection* counters = &local_counters; ///static
|
|
|
typedef std::map<std::string, int*> CounterMap;
|
typedef std::map<std::string, int*>::iterator CounterMapIterator;
|
-static CounterMap counter_table_;
|
+static CounterMap counter_table_; ///static
|
|
// Callback receiver when v8 has a counter to track.
|
static int* counter_callback(const char* name) {
|
@@ -185,10 +185,11 @@
|
int main2(int argc, char** argv) {
|
i::Serializer::Enable();
|
Persistent<Context> context = v8::Context::New();
|
+ V8Context * const v8context = v8_context();
|
// Make sure all builtin scripts are cached.
|
{ HandleScope scope;
|
for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
|
- i::Bootstrapper::NativesSourceLookup(i);
|
+ v8context->bootstrapper_.NativesSourceLookup(i);
|
}
|
}
|
context.Dispose();
|
@@ -196,7 +197,7 @@
|
i::Serializer2 ser(&sink);
|
// This results in a somewhat smaller snapshot, probably because it gets rid
|
// of some things that are cached between garbage collections.
|
- i::Heap::CollectAllGarbage(true);
|
+ v8context->heap_.CollectAllGarbage(true);
|
ser.Serialize();
|
return 0;
|
}
|
@@ -230,14 +231,15 @@
|
i::Serializer::Enable();
|
v8::Context::New(&extensions);
|
|
+ V8Context * const v8context = v8_context();
|
// Make sure all builtin scripts are cached.
|
{ HandleScope scope;
|
for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
|
- i::Bootstrapper::NativesSourceLookup(i);
|
+ v8context->bootstrapper_.NativesSourceLookup(i);
|
}
|
}
|
// Get rid of unreferenced scripts with a global GC.
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
i::Serializer ser;
|
ser.Serialize();
|
v8::internal::byte* bytes;
|
Index: src/debug.cc
|
===================================================================
|
--- src/debug.cc (revision 3077)
|
+++ src/debug.cc Sat Nov 14 01:43:11 MSK 2009
|
@@ -52,7 +52,7 @@
|
v8::Local<v8::String> s = value->ToString();
|
char* data = NewArray<char>(s->Length() + 1);
|
if (data == NULL) {
|
- V8::FatalProcessOutOfMemory("PrintLn");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("PrintLn");
|
return;
|
}
|
s->WriteAscii(data);
|
@@ -62,12 +62,12 @@
|
|
|
static Handle<Code> ComputeCallDebugBreak(int argc) {
|
- CALL_HEAP_FUNCTION(StubCache::ComputeCallDebugBreak(argc), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeCallDebugBreak(argc), Code);
|
}
|
|
|
static Handle<Code> ComputeCallDebugPrepareStepIn(int argc) {
|
- CALL_HEAP_FUNCTION(StubCache::ComputeCallDebugPrepareStepIn(argc), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeCallDebugPrepareStepIn(argc), Code);
|
}
|
|
|
@@ -95,6 +95,7 @@
|
void BreakLocationIterator::Next() {
|
AssertNoAllocation nogc;
|
ASSERT(!RinfoDone());
|
+ Debug& debug = v8_context()->debug_;
|
|
// Iterate through reloc info for code and original code stopping at each
|
// breakable code target.
|
@@ -134,13 +135,13 @@
|
return;
|
}
|
if (type_ == ALL_BREAK_LOCATIONS) {
|
- if (Debug::IsBreakStub(code)) {
|
+ if (debug.IsBreakStub(code)) {
|
break_point_++;
|
return;
|
}
|
} else {
|
ASSERT(type_ == SOURCE_BREAK_LOCATIONS);
|
- if (Debug::IsSourceBreakStub(code)) {
|
+ if (debug.IsSourceBreakStub(code)) {
|
break_point_++;
|
return;
|
}
|
@@ -404,7 +405,7 @@
|
if (RelocInfo::IsJSReturn(rmode())) {
|
return IsDebugBreakAtReturn();
|
} else {
|
- return Debug::IsDebugBreak(rinfo()->target_address());
|
+ return v8_context()->debug_.IsDebugBreak(rinfo()->target_address());
|
}
|
}
|
|
@@ -421,7 +422,7 @@
|
|
// Patch the code to invoke the builtin debug break function matching the
|
// calling convention used by the call site.
|
- Handle<Code> dbgbrk_code(Debug::FindDebugBreak(code, mode));
|
+ Handle<Code> dbgbrk_code(v8_context()->debug_.FindDebugBreak(code, mode));
|
rinfo()->set_target_address(dbgbrk_code->entry());
|
|
// For stubs that refer back to an inlined version clear the cached map for
|
@@ -502,12 +503,24 @@
|
#endif
|
}
|
|
+Debug::Debug():
|
+ has_break_points_(false),
|
+ script_cache_(NULL),
|
+ debug_info_list_(NULL),
|
+ debugger_(new Debugger(this)),
|
+ disable_break_(false),
|
+ break_on_exception_(false),
|
+ break_on_uncaught_exception_(true),
|
+ debug_context_(Handle<Context>()),
|
+ debug_break_return_(NULL),
|
+ message_handler_(NULL)
|
+{
|
+}
|
|
-bool Debug::has_break_points_ = false;
|
-ScriptCache* Debug::script_cache_ = NULL;
|
-DebugInfoListNode* Debug::debug_info_list_ = NULL;
|
+Debug::~Debug() {
|
+ delete debugger_;
|
+}
|
|
-
|
// Threading support.
|
void Debug::ThreadInit() {
|
thread_local_.break_count_ = 0;
|
@@ -524,11 +537,6 @@
|
thread_local_.pending_interrupts_ = 0;
|
}
|
|
-
|
-JSCallerSavedBuffer Debug::registers_;
|
-Debug::ThreadLocal Debug::thread_local_;
|
-
|
-
|
char* Debug::ArchiveDebug(char* storage) {
|
char* to = storage;
|
memcpy(to, reinterpret_cast<char*>(&thread_local_), sizeof(ThreadLocal));
|
@@ -554,18 +562,6 @@
|
return sizeof(ThreadLocal) + sizeof(registers_);
|
}
|
|
-
|
-// Default break enabled.
|
-bool Debug::disable_break_ = false;
|
-
|
-// Default call debugger on uncaught exception.
|
-bool Debug::break_on_exception_ = false;
|
-bool Debug::break_on_uncaught_exception_ = true;
|
-
|
-Handle<Context> Debug::debug_context_ = Handle<Context>();
|
-Code* Debug::debug_break_return_ = NULL;
|
-
|
-
|
void ScriptCache::Add(Handle<Script> script) {
|
// Create an entry in the hash map for the script.
|
int id = Smi::cast(script->id())->value();
|
@@ -578,9 +574,10 @@
|
|
// Globalize the script object, make it weak and use the location of the
|
// global handle as the value in the hash map.
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
Handle<Script> script_ =
|
- Handle<Script>::cast((GlobalHandles::Create(*script)));
|
- GlobalHandles::MakeWeak(reinterpret_cast<Object**>(script_.location()),
|
+ Handle<Script>::cast((global_handles.Create(*script)));
|
+ global_handles.MakeWeak(reinterpret_cast<Object**>(script_.location()),
|
this, ScriptCache::HandleWeakScript);
|
entry->value = script_.location();
|
}
|
@@ -601,21 +598,23 @@
|
|
|
void ScriptCache::ProcessCollectedScripts() {
|
+ Debugger* const debugger = v8_context()->debug_.debugger();
|
for (int i = 0; i < collected_scripts_.length(); i++) {
|
- Debugger::OnScriptCollected(collected_scripts_[i]);
|
+ debugger->OnScriptCollected(collected_scripts_[i]);
|
}
|
collected_scripts_.Clear();
|
}
|
|
|
void ScriptCache::Clear() {
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
// Iterate the script cache to get rid of all the weak handles.
|
for (HashMap::Entry* entry = Start(); entry != NULL; entry = Next(entry)) {
|
ASSERT(entry != NULL);
|
Object** location = reinterpret_cast<Object**>(entry->value);
|
ASSERT((*location)->IsScript());
|
- GlobalHandles::ClearWeakness(location);
|
- GlobalHandles::Destroy(location);
|
+ global_handles.ClearWeakness(location);
|
+ global_handles.Destroy(location);
|
}
|
// Clear the content of the hash map.
|
HashMap::Clear();
|
@@ -645,7 +644,7 @@
|
if (create_heap_objects) {
|
// Get code to handle debug break on return.
|
debug_break_return_ =
|
- Builtins::builtin(Builtins::Return_DebugBreak);
|
+ v8_context()->builtins_.builtin(Builtins::Return_DebugBreak);
|
ASSERT(debug_break_return_->IsCode());
|
}
|
}
|
@@ -653,9 +652,10 @@
|
|
void Debug::HandleWeakDebugInfo(v8::Persistent<v8::Value> obj, void* data) {
|
DebugInfoListNode* node = reinterpret_cast<DebugInfoListNode*>(data);
|
- RemoveDebugInfo(node->debug_info());
|
+ Debug & debug = v8_context()->debug_;
|
+ debug.RemoveDebugInfo(node->debug_info());
|
#ifdef DEBUG
|
- node = Debug::debug_info_list_;
|
+ node = debug.debug_info_list_;
|
while (node != NULL) {
|
ASSERT(node != reinterpret_cast<DebugInfoListNode*>(data));
|
node = node->next();
|
@@ -666,14 +666,15 @@
|
|
DebugInfoListNode::DebugInfoListNode(DebugInfo* debug_info): next_(NULL) {
|
// Globalize the request debug info object and make it weak.
|
- debug_info_ = Handle<DebugInfo>::cast((GlobalHandles::Create(debug_info)));
|
- GlobalHandles::MakeWeak(reinterpret_cast<Object**>(debug_info_.location()),
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ debug_info_ = Handle<DebugInfo>::cast((global_handles.Create(debug_info)));
|
+ global_handles.MakeWeak(reinterpret_cast<Object**>(debug_info_.location()),
|
this, Debug::HandleWeakDebugInfo);
|
}
|
|
|
DebugInfoListNode::~DebugInfoListNode() {
|
- GlobalHandles::Destroy(reinterpret_cast<Object**>(debug_info_.location()));
|
+ v8_context()->global_handles_.Destroy(reinterpret_cast<Object**>(debug_info_.location()));
|
}
|
|
|
@@ -686,26 +687,28 @@
|
}
|
|
// Find source and name for the requested script.
|
- Handle<String> source_code = Bootstrapper::NativesSourceLookup(index);
|
+ Handle<String> source_code = v8_context()->bootstrapper_.NativesSourceLookup(index);
|
Vector<const char> name = Natives::GetScriptName(index);
|
Handle<String> script_name = Factory::NewStringFromAscii(name);
|
|
// Compile the script.
|
- bool allow_natives_syntax = FLAG_allow_natives_syntax;
|
+ bool allow_natives_syntax = FLAG_allow_natives_syntax;
|
FLAG_allow_natives_syntax = true;
|
Handle<JSFunction> boilerplate;
|
- boilerplate = Compiler::Compile(source_code, script_name, 0, 0, NULL, NULL);
|
+ boilerplate = v8_context()->compiler_.Compile(source_code, script_name, 0, 0, NULL, NULL);
|
FLAG_allow_natives_syntax = allow_natives_syntax;
|
|
+ Top& top = v8_context()->top_;
|
+
|
// Silently ignore stack overflows during compilation.
|
if (boilerplate.is_null()) {
|
- ASSERT(Top::has_pending_exception());
|
- Top::clear_pending_exception();
|
+ ASSERT(top.has_pending_exception());
|
+ top.clear_pending_exception();
|
return false;
|
}
|
|
// Execute the boilerplate function in the debugger context.
|
- Handle<Context> context = Top::global_context();
|
+ Handle<Context> context = top.global_context();
|
bool caught_exception = false;
|
Handle<JSFunction> function =
|
Factory::NewFunctionFromBoilerplate(boilerplate, context);
|
@@ -735,9 +738,9 @@
|
|
// Bail out if we're already in the process of compiling the native
|
// JavaScript source code for the debugger.
|
- if (Debugger::compiling_natives() || Debugger::is_loading_debugger())
|
+ if (debugger_->compiling_natives() || debugger_->is_loading_debugger())
|
return false;
|
- Debugger::set_loading_debugger(true);
|
+ debugger_->set_loading_debugger(true);
|
|
// Disable breakpoints and interrupts while compiling and running the
|
// debugger scripts including the context creation code.
|
@@ -747,13 +750,13 @@
|
// Create the debugger context.
|
HandleScope scope;
|
Handle<Context> context =
|
- Bootstrapper::CreateEnvironment(Handle<Object>::null(),
|
+ v8_context()->bootstrapper_.CreateEnvironment(Handle<Object>::null(),
|
v8::Handle<ObjectTemplate>(),
|
NULL);
|
|
// Use the debugger context.
|
SaveContext save;
|
- Top::set_context(*context);
|
+ v8_context()->top_.set_context(*context);
|
|
// Expose the builtins object in the debugger context.
|
Handle<String> key = Factory::LookupAsciiSymbol("builtins");
|
@@ -761,21 +764,21 @@
|
SetProperty(global, key, Handle<Object>(global->builtins()), NONE);
|
|
// Compile the JavaScript for the debugger in the debugger context.
|
- Debugger::set_compiling_natives(true);
|
+ debugger_->set_compiling_natives(true);
|
bool caught_exception =
|
!CompileDebuggerScript(Natives::GetIndex("mirror")) ||
|
!CompileDebuggerScript(Natives::GetIndex("debug"));
|
- Debugger::set_compiling_natives(false);
|
+ debugger_->set_compiling_natives(false);
|
|
// Make sure we mark the debugger as not loading before we might
|
// return.
|
- Debugger::set_loading_debugger(false);
|
+ debugger_->set_loading_debugger(false);
|
|
// Check for caught exceptions.
|
if (caught_exception) return false;
|
|
// Debugger loaded.
|
- debug_context_ = Handle<Context>::cast(GlobalHandles::Create(*context));
|
+ debug_context_ = Handle<Context>::cast(v8_context()->global_handles_.Create(*context));
|
|
return true;
|
}
|
@@ -791,7 +794,7 @@
|
DestroyScriptCache();
|
|
// Clear debugger context global handle.
|
- GlobalHandles::Destroy(reinterpret_cast<Object**>(debug_context_.location()));
|
+ v8_context()->global_handles_.Destroy(reinterpret_cast<Object**>(debug_context_.location()));
|
debug_context_ = Handle<Context>();
|
}
|
|
@@ -815,17 +818,20 @@
|
// Get the top-most JavaScript frame.
|
JavaScriptFrameIterator it;
|
JavaScriptFrame* frame = it.frame();
|
+ V8Context* const v8context = v8_context();
|
+ Heap& heap = v8context->heap_;
|
+ Debug & debug = v8context->debug_;
|
|
// Just continue if breaks are disabled or debugger cannot be loaded.
|
- if (disable_break() || !Load()) {
|
- SetAfterBreakTarget(frame);
|
- return Heap::undefined_value();
|
+ if (debug.disable_break() || !debug.Load()) {
|
+ debug.SetAfterBreakTarget(frame);
|
+ return heap.undefined_value();
|
}
|
|
// Enter the debugger.
|
EnterDebugger debugger;
|
if (debugger.FailedToEnter()) {
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
|
// Postpone interrupt during breakpoint processing.
|
@@ -834,7 +840,7 @@
|
// Get the debug info (create it if it does not exist).
|
Handle<SharedFunctionInfo> shared =
|
Handle<SharedFunctionInfo>(JSFunction::cast(frame->function())->shared());
|
- Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
+ Handle<DebugInfo> debug_info = debug.GetDebugInfo(shared);
|
|
// Find the break point where execution has stopped.
|
BreakLocationIterator break_location_iterator(debug_info,
|
@@ -842,56 +848,56 @@
|
break_location_iterator.FindBreakLocationFromAddress(frame->pc());
|
|
// Check whether step next reached a new statement.
|
- if (!StepNextContinue(&break_location_iterator, frame)) {
|
+ if (!debug.StepNextContinue(&break_location_iterator, frame)) {
|
// Decrease steps left if performing multiple steps.
|
- if (thread_local_.step_count_ > 0) {
|
- thread_local_.step_count_--;
|
+ if (debug.thread_local_.step_count_ > 0) {
|
+ debug.thread_local_.step_count_--;
|
}
|
}
|
|
// If there is one or more real break points check whether any of these are
|
// triggered.
|
- Handle<Object> break_points_hit(Heap::undefined_value());
|
+ Handle<Object> break_points_hit(heap.undefined_value());
|
if (break_location_iterator.HasBreakPoint()) {
|
Handle<Object> break_point_objects =
|
Handle<Object>(break_location_iterator.BreakPointObjects());
|
- break_points_hit = CheckBreakPoints(break_point_objects);
|
+ break_points_hit = debug.CheckBreakPoints(break_point_objects);
|
}
|
|
// If step out is active skip everything until the frame where we need to step
|
// out to is reached, unless real breakpoint is hit.
|
- if (Debug::StepOutActive() && frame->fp() != Debug::step_out_fp() &&
|
+ if (debug.StepOutActive() && frame->fp() != debug.step_out_fp() &&
|
break_points_hit->IsUndefined() ) {
|
// Step count should always be 0 for StepOut.
|
- ASSERT(thread_local_.step_count_ == 0);
|
+ ASSERT(debug.thread_local_.step_count_ == 0);
|
} else if (!break_points_hit->IsUndefined() ||
|
- (thread_local_.last_step_action_ != StepNone &&
|
- thread_local_.step_count_ == 0)) {
|
+ (debug.thread_local_.last_step_action_ != StepNone &&
|
+ debug.thread_local_.step_count_ == 0)) {
|
// Notify debugger if a real break point is triggered or if performing
|
// single stepping with no more steps to perform. Otherwise do another step.
|
|
// Clear all current stepping setup.
|
- ClearStepping();
|
+ debug.ClearStepping();
|
|
// Notify the debug event listeners.
|
- Debugger::OnDebugBreak(break_points_hit, false);
|
- } else if (thread_local_.last_step_action_ != StepNone) {
|
+ debug.debugger()->OnDebugBreak(break_points_hit, false);
|
+ } else if (debug.thread_local_.last_step_action_ != StepNone) {
|
// Hold on to last step action as it is cleared by the call to
|
// ClearStepping.
|
- StepAction step_action = thread_local_.last_step_action_;
|
- int step_count = thread_local_.step_count_;
|
+ StepAction step_action = debug.thread_local_.last_step_action_;
|
+ int step_count = debug.thread_local_.step_count_;
|
|
// Clear all current stepping setup.
|
- ClearStepping();
|
+ debug.ClearStepping();
|
|
// Set up for the remaining steps.
|
- PrepareStep(step_action, step_count);
|
+ debug.PrepareStep(step_action, step_count);
|
}
|
|
// Install jump to the call address which was overwritten.
|
- SetAfterBreakTarget(frame);
|
+ debug.SetAfterBreakTarget(frame);
|
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
|
|
@@ -951,7 +957,7 @@
|
reinterpret_cast<Object**>(break_point_object.location())
|
};
|
Handle<Object> result = Execution::TryCall(check_break_point,
|
- Top::builtins(), argc, argv,
|
+ v8_context()->top_.builtins(), argc, argv,
|
&caught_exception);
|
|
// If exception or non boolean result handle as not triggered
|
@@ -960,7 +966,7 @@
|
}
|
|
// Return whether the break point is triggered.
|
- return *result == Heap::true_value();
|
+ return *result == v8_context()->heap_.true_value();
|
}
|
|
|
@@ -1232,9 +1238,10 @@
|
// Find out number of arguments from the stub minor key.
|
// Reverse lookup required as the minor key cannot be retrieved
|
// from the code object.
|
+ Heap& heap = v8_context()->heap_;
|
Handle<Object> obj(
|
- Heap::code_stubs()->SlowReverseLookup(*call_function_stub));
|
- ASSERT(*obj != Heap::undefined_value());
|
+ heap.code_stubs()->SlowReverseLookup(*call_function_stub));
|
+ ASSERT(*obj != heap.undefined_value());
|
ASSERT(obj->IsSmi());
|
// Get the STUB key and extract major and minor key.
|
uint32_t key = Smi::cast(*obj)->value();
|
@@ -1342,6 +1349,7 @@
|
|
// Find the builtin to use for invoking the debug break
|
Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
|
+ Builtins& builtins = v8_context()->builtins_;
|
// Find the builtin debug break function matching the calling convention
|
// used by the call site.
|
if (code->is_inline_cache_stub()) {
|
@@ -1349,32 +1357,32 @@
|
return ComputeCallDebugBreak(code->arguments_count());
|
}
|
if (code->is_load_stub()) {
|
- return Handle<Code>(Builtins::builtin(Builtins::LoadIC_DebugBreak));
|
+ return Handle<Code>(builtins.builtin(Builtins::LoadIC_DebugBreak));
|
}
|
if (code->is_store_stub()) {
|
- return Handle<Code>(Builtins::builtin(Builtins::StoreIC_DebugBreak));
|
+ return Handle<Code>(builtins.builtin(Builtins::StoreIC_DebugBreak));
|
}
|
if (code->is_keyed_load_stub()) {
|
Handle<Code> result =
|
- Handle<Code>(Builtins::builtin(Builtins::KeyedLoadIC_DebugBreak));
|
+ Handle<Code>(builtins.builtin(Builtins::KeyedLoadIC_DebugBreak));
|
return result;
|
}
|
if (code->is_keyed_store_stub()) {
|
Handle<Code> result =
|
- Handle<Code>(Builtins::builtin(Builtins::KeyedStoreIC_DebugBreak));
|
+ Handle<Code>(builtins.builtin(Builtins::KeyedStoreIC_DebugBreak));
|
return result;
|
}
|
}
|
if (RelocInfo::IsConstructCall(mode)) {
|
Handle<Code> result =
|
- Handle<Code>(Builtins::builtin(Builtins::ConstructCall_DebugBreak));
|
+ Handle<Code>(builtins.builtin(Builtins::ConstructCall_DebugBreak));
|
return result;
|
}
|
if (code->kind() == Code::STUB) {
|
ASSERT(code->major_key() == CodeStub::CallFunction ||
|
code->major_key() == CodeStub::StackCheck);
|
Handle<Code> result =
|
- Handle<Code>(Builtins::builtin(Builtins::StubNoRegisters_DebugBreak));
|
+ Handle<Code>(builtins.builtin(Builtins::StubNoRegisters_DebugBreak));
|
return result;
|
}
|
|
@@ -1386,10 +1394,10 @@
|
// Simple function for returning the source positions for active break points.
|
Handle<Object> Debug::GetSourceBreakLocations(
|
Handle<SharedFunctionInfo> shared) {
|
- if (!HasDebugInfo(shared)) return Handle<Object>(Heap::undefined_value());
|
+ if (!HasDebugInfo(shared)) return Handle<Object>(v8_context()->heap_.undefined_value());
|
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
|
if (debug_info->GetBreakPointCount() == 0) {
|
- return Handle<Object>(Heap::undefined_value());
|
+ return Handle<Object>(v8_context()->heap_.undefined_value());
|
}
|
Handle<FixedArray> locations =
|
Factory::NewFixedArray(debug_info->GetBreakPointCount());
|
@@ -1441,10 +1449,12 @@
|
if (fp == Debug::step_in_fp()) {
|
// Don't allow step into functions in the native context.
|
if (!function->IsBuiltin()) {
|
+ Builtins& builtins = v8_context()->builtins_;
|
+
|
if (function->shared()->code() ==
|
- Builtins::builtin(Builtins::FunctionApply) ||
|
+ builtins.builtin(Builtins::FunctionApply) ||
|
function->shared()->code() ==
|
- Builtins::builtin(Builtins::FunctionCall)) {
|
+ builtins.builtin(Builtins::FunctionCall)) {
|
// Handle function.apply and function.call separately to flood the
|
// function to be called and not the code for Builtins::FunctionApply or
|
// Builtins::FunctionCall. The receiver of call/apply is the target
|
@@ -1565,7 +1575,7 @@
|
} else {
|
prev->set_next(current->next());
|
}
|
- current->debug_info()->shared()->set_debug_info(Heap::undefined_value());
|
+ current->debug_info()->shared()->set_debug_info(v8_context()->heap_.undefined_value());
|
delete current;
|
|
// If there are no more debug info objects there are not more break
|
@@ -1662,12 +1672,13 @@
|
|
void Debug::ClearMirrorCache() {
|
HandleScope scope;
|
- ASSERT(Top::context() == *Debug::debug_context());
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(top.context() == *Debug::debug_context());
|
|
// Clear the mirror cache.
|
Handle<String> function_name =
|
Factory::LookupSymbol(CStrVector("ClearMirrorCache"));
|
- Handle<Object> fun(Top::global()->GetProperty(*function_name));
|
+ Handle<Object> fun(top.global()->GetProperty(*function_name));
|
ASSERT(fun->IsJSFunction());
|
bool caught_exception;
|
Handle<Object> js_object = Execution::TryCall(
|
@@ -1683,8 +1694,9 @@
|
// Perform two GCs to get rid of all unreferenced scripts. The first GC gets
|
// rid of all the cached script wrappers and the second gets rid of the
|
// scripts which is no longer referenced.
|
- Heap::CollectAllGarbage(false);
|
- Heap::CollectAllGarbage(false);
|
+ Heap& heap = v8_context()->heap_;
|
+ heap.CollectAllGarbage(false);
|
+ heap.CollectAllGarbage(false);
|
|
ASSERT(script_cache_ == NULL);
|
script_cache_ = new ScriptCache();
|
@@ -1734,7 +1746,7 @@
|
|
// Perform GC to get unreferenced scripts evicted from the cache before
|
// returning the content.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
// Get the scripts from the cache.
|
return script_cache_->GetScripts();
|
@@ -1749,29 +1761,35 @@
|
}
|
|
|
-Mutex* Debugger::debugger_access_ = OS::CreateMutex();
|
-Handle<Object> Debugger::event_listener_ = Handle<Object>();
|
-Handle<Object> Debugger::event_listener_data_ = Handle<Object>();
|
-bool Debugger::compiling_natives_ = false;
|
-bool Debugger::is_loading_debugger_ = false;
|
-bool Debugger::never_unload_debugger_ = false;
|
-v8::Debug::MessageHandler2 Debugger::message_handler_ = NULL;
|
-bool Debugger::debugger_unload_pending_ = false;
|
-v8::Debug::HostDispatchHandler Debugger::host_dispatch_handler_ = NULL;
|
-int Debugger::host_dispatch_micros_ = 100 * 1000;
|
-DebuggerAgent* Debugger::agent_ = NULL;
|
-LockingCommandMessageQueue Debugger::command_queue_(kQueueInitialSize);
|
-Semaphore* Debugger::command_received_ = OS::CreateSemaphore(0);
|
+Debugger::Debugger(Debug* debug) :
|
+ debugger_access_(OS::CreateMutex()),
|
+ event_listener_ (Handle<Object>()),
|
+ event_listener_data_(Handle<Object>()),
|
+ compiling_natives_(false),
|
+ is_loading_debugger_ (false),
|
+ never_unload_debugger_(false),
|
+ message_handler_(NULL),
|
+ debugger_unload_pending_(false),
|
+ host_dispatch_handler_(NULL),
|
+ host_dispatch_micros_(100 * 1000),
|
+ agent_(NULL),
|
+ command_queue_(kQueueInitialSize),
|
+ command_received_(OS::CreateSemaphore(0)),
|
+ debug_(debug)
|
+{
|
+}
|
|
+Debugger::~Debugger() {}
|
|
Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name,
|
int argc, Object*** argv,
|
bool* caught_exception) {
|
- ASSERT(Top::context() == *Debug::debug_context());
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(top.context() == *debug_->debug_context());
|
|
// Create the execution state object.
|
Handle<String> constructor_str = Factory::LookupSymbol(constructor_name);
|
- Handle<Object> constructor(Top::global()->GetProperty(*constructor_str));
|
+ Handle<Object> constructor(top.global()->GetProperty(*constructor_str));
|
ASSERT(constructor->IsJSFunction());
|
if (!constructor->IsJSFunction()) {
|
*caught_exception = true;
|
@@ -1779,7 +1797,7 @@
|
}
|
Handle<Object> js_object = Execution::TryCall(
|
Handle<JSFunction>::cast(constructor),
|
- Handle<JSObject>(Debug::debug_context()->global()), argc, argv,
|
+ Handle<JSObject>(debug_->debug_context()->global()), argc, argv,
|
caught_exception);
|
return js_object;
|
}
|
@@ -1787,7 +1805,7 @@
|
|
Handle<Object> Debugger::MakeExecutionState(bool* caught_exception) {
|
// Create the execution state object.
|
- Handle<Object> break_id = Factory::NewNumberFromInt(Debug::break_id());
|
+ Handle<Object> break_id = Factory::NewNumberFromInt(debug_->break_id());
|
const int argc = 1;
|
Object** argv[argc] = { break_id.location() };
|
return MakeJSObject(CStrVector("MakeExecutionState"),
|
@@ -1872,17 +1890,17 @@
|
HandleScope scope;
|
|
// Bail out based on state or if there is no listener for this event
|
- if (Debug::InDebugger()) return;
|
+ if (debug_->InDebugger()) return;
|
if (!Debugger::EventActive(v8::Exception)) return;
|
|
// Bail out if exception breaks are not active
|
if (uncaught) {
|
// Uncaught exceptions are reported by either flags.
|
- if (!(Debug::break_on_uncaught_exception() ||
|
- Debug::break_on_exception())) return;
|
+ if (!(debug_->break_on_uncaught_exception() ||
|
+ debug_->break_on_exception())) return;
|
} else {
|
// Caught exceptions are reported is activated.
|
- if (!Debug::break_on_exception()) return;
|
+ if (!debug_->break_on_exception()) return;
|
}
|
|
// Enter the debugger.
|
@@ -1890,7 +1908,7 @@
|
if (debugger.FailedToEnter()) return;
|
|
// Clear all current stepping setup.
|
- Debug::ClearStepping();
|
+ debug_->ClearStepping();
|
// Create the event data object.
|
bool caught_exception = false;
|
Handle<Object> exec_state = MakeExecutionState(&caught_exception);
|
@@ -1914,14 +1932,15 @@
|
bool auto_continue) {
|
HandleScope scope;
|
|
+ V8Context* const v8context = v8_context();
|
// Debugger has already been entered by caller.
|
- ASSERT(Top::context() == *Debug::debug_context());
|
+ ASSERT(v8context->top_.context() == *debug_->debug_context());
|
|
// Bail out if there is no listener for this event
|
if (!Debugger::EventActive(v8::Break)) return;
|
|
// Debugger must be entered in advance.
|
- ASSERT(Top::context() == *Debug::debug_context());
|
+ ASSERT(v8context->top_.context() == *debug_->debug_context());
|
|
// Create the event data object.
|
bool caught_exception = false;
|
@@ -1947,7 +1966,7 @@
|
HandleScope scope;
|
|
// Bail out based on state or if there is no listener for this event
|
- if (Debug::InDebugger()) return;
|
+ if (debug_->InDebugger()) return;
|
if (compiling_natives()) return;
|
if (!EventActive(v8::BeforeCompile)) return;
|
|
@@ -1975,7 +1994,7 @@
|
HandleScope scope;
|
|
// Add the newly compiled script to the script cache.
|
- Debug::AddScriptToScriptCache(script);
|
+ debug_->AddScriptToScriptCache(script);
|
|
// No more to do if not debugging.
|
if (!IsDebuggerActive()) return;
|
@@ -1984,7 +2003,7 @@
|
if (compiling_natives()) return;
|
|
// Store whether in debugger before entering debugger.
|
- bool in_debugger = Debug::InDebugger();
|
+ bool in_debugger = debug_->InDebugger();
|
|
// Enter the debugger.
|
EnterDebugger debugger;
|
@@ -1995,7 +2014,7 @@
|
|
// Get the function UpdateScriptBreakPoints (defined in debug-delay.js).
|
Handle<Object> update_script_break_points =
|
- Handle<Object>(Debug::debug_context()->global()->GetProperty(
|
+ Handle<Object>(debug_->debug_context()->global()->GetProperty(
|
*Factory::LookupAsciiSymbol("UpdateScriptBreakPoints")));
|
if (!update_script_break_points->IsJSFunction()) {
|
return;
|
@@ -2012,7 +2031,7 @@
|
Object** argv[argc] = { reinterpret_cast<Object**>(wrapper.location()) };
|
Handle<Object> result = Execution::TryCall(
|
Handle<JSFunction>::cast(update_script_break_points),
|
- Top::builtins(), argc, argv,
|
+ v8_context()->top_.builtins(), argc, argv,
|
&caught_exception);
|
if (caught_exception) {
|
return;
|
@@ -2041,7 +2060,7 @@
|
HandleScope scope;
|
|
// Bail out based on state or if there is no listener for this event
|
- if (Debug::InDebugger()) return;
|
+ if (debug_->InDebugger()) return;
|
if (compiling_natives()) return;
|
if (!Debugger::EventActive(v8::NewFunction)) return;
|
|
@@ -2095,7 +2114,7 @@
|
|
// Clear any pending debug break if this is a real break.
|
if (!auto_continue) {
|
- Debug::clear_interrupt_pending(DEBUGBREAK);
|
+ debug_->clear_interrupt_pending(DEBUGBREAK);
|
}
|
|
// Create the execution state.
|
@@ -2134,7 +2153,7 @@
|
exec_state.location(),
|
Handle<Object>::cast(event_data).location(),
|
event_listener_data_.location() };
|
- Handle<Object> result = Execution::TryCall(fun, Top::global(),
|
+ Handle<Object> result = Execution::TryCall(fun, v8_context()->top_.global(),
|
argc, argv, &caught_exception);
|
// Silently ignore exceptions from debug event listeners.
|
}
|
@@ -2144,11 +2163,11 @@
|
|
void Debugger::UnloadDebugger() {
|
// Make sure that there are no breakpoints left.
|
- Debug::ClearAllBreakPoints();
|
+ debug_->ClearAllBreakPoints();
|
|
// Unload the debugger if feasible.
|
if (!never_unload_debugger_) {
|
- Debug::Unload();
|
+ debug_->Unload();
|
}
|
|
// Clear the flag indicating that the debugger should be unloaded.
|
@@ -2162,7 +2181,7 @@
|
bool auto_continue) {
|
HandleScope scope;
|
|
- if (!Debug::Load()) return;
|
+ if (!debug_->Load()) return;
|
|
// Process the individual events.
|
bool sendEventMessage = false;
|
@@ -2190,8 +2209,8 @@
|
// The debug command interrupt flag might have been set when the command was
|
// added. It should be enough to clear the flag only once while we are in the
|
// debugger.
|
- ASSERT(Debug::InDebugger());
|
- StackGuard::Continue(DEBUGCOMMAND);
|
+ ASSERT(debug_->InDebugger());
|
+ v8_context()->stack_guard_.Continue(DEBUGCOMMAND);
|
|
// Notify the debugger that a debug event has occurred unless auto continue is
|
// active in which case no event is send.
|
@@ -2254,7 +2273,7 @@
|
|
// Get the command from the queue.
|
CommandMessage command = command_queue_.Get();
|
- Logger::DebugTag("Got request from command queue, in interactive loop.");
|
+ v8_context()->logger_.DebugTag("Got request from command queue, in interactive loop.");
|
if (!Debugger::IsDebuggerActive()) {
|
// Delete command text and user data.
|
command.Dispose();
|
@@ -2329,16 +2348,16 @@
|
void Debugger::SetEventListener(Handle<Object> callback,
|
Handle<Object> data) {
|
HandleScope scope;
|
-
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
// Clear the global handles for the event listener and the event listener data
|
// object.
|
if (!event_listener_.is_null()) {
|
- GlobalHandles::Destroy(
|
+ global_handles.Destroy(
|
reinterpret_cast<Object**>(event_listener_.location()));
|
event_listener_ = Handle<Object>();
|
}
|
if (!event_listener_data_.is_null()) {
|
- GlobalHandles::Destroy(
|
+ global_handles.Destroy(
|
reinterpret_cast<Object**>(event_listener_data_.location()));
|
event_listener_data_ = Handle<Object>();
|
}
|
@@ -2346,11 +2365,11 @@
|
// If there is a new debug event listener register it together with its data
|
// object.
|
if (!callback->IsUndefined() && !callback->IsNull()) {
|
- event_listener_ = Handle<Object>::cast(GlobalHandles::Create(*callback));
|
+ event_listener_ = Handle<Object>::cast(global_handles.Create(*callback));
|
if (data.is_null()) {
|
data = Factory::undefined_value();
|
}
|
- event_listener_data_ = Handle<Object>::cast(GlobalHandles::Create(*data));
|
+ event_listener_data_ = Handle<Object>::cast(global_handles.Create(*data));
|
}
|
|
ListenersChanged();
|
@@ -2365,7 +2384,7 @@
|
if (handler == NULL) {
|
// Send an empty command to the debugger if in a break to make JavaScript
|
// run again if the debugger is closed.
|
- if (Debug::InDebugger()) {
|
+ if (debug_->InDebugger()) {
|
ProcessCommand(Vector<const uint16_t>::empty());
|
}
|
}
|
@@ -2373,14 +2392,15 @@
|
|
|
void Debugger::ListenersChanged() {
|
+ V8Context * v8context = v8_context();
|
if (IsDebuggerActive()) {
|
// Disable the compilation cache when the debugger is active.
|
- CompilationCache::Disable();
|
+ v8context->compilation_cache_.Disable();
|
} else {
|
- CompilationCache::Enable();
|
+ v8context->compilation_cache_.Enable();
|
|
// Unload the debugger if event listener and message handler cleared.
|
- if (Debug::InDebugger()) {
|
+ if (v8context->debug_.InDebugger()) {
|
// If we are in debugger set the flag to unload the debugger when last
|
// EnterDebugger on the current stack is destroyed.
|
debugger_unload_pending_ = true;
|
@@ -2420,13 +2440,13 @@
|
Vector<uint16_t>(const_cast<uint16_t*>(command.start()),
|
command.length()),
|
client_data);
|
- Logger::DebugTag("Put command on command_queue.");
|
+ v8_context()->logger_.DebugTag("Put command on command_queue.");
|
command_queue_.Put(message);
|
command_received_->Signal();
|
|
// Set the debug command break flag to have the command processed.
|
- if (!Debug::InDebugger()) {
|
- StackGuard::DebugCommand();
|
+ if (!debug_->InDebugger()) {
|
+ v8_context()->stack_guard_.DebugCommand();
|
}
|
}
|
|
@@ -2588,7 +2608,7 @@
|
|
|
v8::Handle<v8::Context> MessageImpl::GetEventContext() const {
|
- Handle<Context> context = Debug::debugger_entry()->GetContext();
|
+ Handle<Context> context = v8_context()->debug_.debugger_entry()->GetContext();
|
// Top::context() may have been NULL when "script collected" event occured.
|
if (*context == NULL) {
|
ASSERT(event_ == v8::ScriptCollected);
|
@@ -2699,7 +2719,7 @@
|
CommandMessage LockingCommandMessageQueue::Get() {
|
ScopedLock sl(lock_);
|
CommandMessage result = queue_.Get();
|
- Logger::DebugEvent("Get", result.text());
|
+ v8_context()->logger_.DebugEvent("Get", result.text());
|
return result;
|
}
|
|
@@ -2707,7 +2727,7 @@
|
void LockingCommandMessageQueue::Put(const CommandMessage& message) {
|
ScopedLock sl(lock_);
|
queue_.Put(message);
|
- Logger::DebugEvent("Put", message.text());
|
+ v8_context()->logger_.DebugEvent("Put", message.text());
|
}
|
|
|
Index: src/regexp-stack.cc
|
===================================================================
|
--- src/regexp-stack.cc (revision 2977)
|
+++ src/regexp-stack.cc Sat Nov 14 01:43:17 MSK 2009
|
@@ -32,24 +32,26 @@
|
namespace v8 {
|
namespace internal {
|
|
-RegExpStack::RegExpStack() {
|
+RegExpStackControl::RegExpStackControl() {
|
// Initialize, if not already initialized.
|
- RegExpStack::EnsureCapacity(0);
|
+ v8_context()->reg_exp_stack_.EnsureCapacity(0);
|
}
|
|
|
-RegExpStack::~RegExpStack() {
|
+RegExpStackControl::~RegExpStackControl() {
|
// Reset the buffer if it has grown.
|
- RegExpStack::Reset();
|
+ v8_context()->reg_exp_stack_.Reset();
|
}
|
|
+RegExpStack::RegExpStack():thread_local_(this), static_offsets_vector_(NULL) {
|
+}
|
|
char* RegExpStack::ArchiveStack(char* to) {
|
size_t size = sizeof(thread_local_);
|
memcpy(reinterpret_cast<void*>(to),
|
&thread_local_,
|
size);
|
- thread_local_ = ThreadLocal();
|
+ thread_local_ = ThreadLocal(this);
|
return to + size;
|
}
|
|
@@ -64,15 +66,15 @@
|
void RegExpStack::Reset() {
|
if (thread_local_.memory_size_ > kMinimumStackSize) {
|
DeleteArray(thread_local_.memory_);
|
- thread_local_ = ThreadLocal();
|
+ thread_local_ = ThreadLocal(this);
|
}
|
}
|
|
|
void RegExpStack::ThreadLocal::Free() {
|
- if (thread_local_.memory_size_ > 0) {
|
- DeleteArray(thread_local_.memory_);
|
- thread_local_ = ThreadLocal();
|
+ if (stack_->thread_local_.memory_size_ > 0) {
|
+ DeleteArray(stack_->thread_local_.memory_);
|
+ stack_->thread_local_ = ThreadLocal(stack_);
|
}
|
}
|
|
@@ -96,8 +98,4 @@
|
}
|
return thread_local_.memory_ + thread_local_.memory_size_;
|
}
|
-
|
-
|
-RegExpStack::ThreadLocal RegExpStack::thread_local_;
|
-
|
}} // namespace v8::internal
|
Index: src/rewriter.cc
|
===================================================================
|
--- src/rewriter.cc (revision 3048)
|
+++ src/rewriter.cc Sat Nov 14 01:43:22 MSK 2009
|
@@ -241,7 +241,7 @@
|
}
|
|
if (!var->is_this() &&
|
- !Heap::result_symbol()->Equals(*var->name())) {
|
+ !v8_context()->heap_.result_symbol()->Equals(*var->name())) {
|
func_name_inferrer_.PushName(var->name());
|
}
|
}
|
@@ -254,7 +254,7 @@
|
node->type()->SetAsLikelySmi();
|
} else if (literal->IsString()) {
|
Handle<String> lit_str(Handle<String>::cast(literal));
|
- if (!Heap::prototype_symbol()->Equals(*lit_str)) {
|
+ if (!v8_context()->heap_.prototype_symbol()->Equals(*lit_str)) {
|
func_name_inferrer_.PushName(lit_str);
|
}
|
}
|
@@ -820,7 +820,7 @@
|
|
|
bool Rewriter::Process(FunctionLiteral* function) {
|
- HistogramTimerScope timer(&Counters::rewriting);
|
+ HistogramTimerScope timer(&v8_context()->counters_.rewriting);
|
Scope* scope = function->scope();
|
if (scope->is_function_scope()) return true;
|
|
@@ -841,7 +841,7 @@
|
ZoneList<Statement*>* body = function->body();
|
|
if (FLAG_optimize_ast && !body->is_empty()) {
|
- HistogramTimerScope timer(&Counters::ast_optimization);
|
+ HistogramTimerScope timer(&v8_context()->counters_.ast_optimization);
|
AstOptimizer optimizer(function->name());
|
optimizer.Optimize(body);
|
if (optimizer.HasStackOverflow()) {
|
Index: test/cctest/test-log-stack-tracer.cc
|
===================================================================
|
--- test/cctest/test-log-stack-tracer.cc (revision 3209)
|
+++ test/cctest/test-log-stack-tracer.cc Sun Nov 15 12:42:13 MSK 2009
|
@@ -21,6 +21,7 @@
|
using v8::Script;
|
using v8::String;
|
using v8::Value;
|
+using v8::v8_context;
|
|
using v8::internal::byte;
|
using v8::internal::Address;
|
@@ -58,11 +59,11 @@
|
// Hide c_entry_fp to emulate situation when sampling is done while
|
// pure JS code is being executed
|
static void DoTraceHideCEntryFPAddress(Address fp) {
|
- v8::internal::Address saved_c_frame_fp = *(Top::c_entry_fp_address());
|
+ v8::internal::Address saved_c_frame_fp = *(v8_context()->top_.c_entry_fp_address());
|
CHECK(saved_c_frame_fp);
|
- *(Top::c_entry_fp_address()) = 0;
|
+ *(v8_context()->top_.c_entry_fp_address()) = 0;
|
DoTrace(fp);
|
- *(Top::c_entry_fp_address()) = saved_c_frame_fp;
|
+ *(v8_context()->top_.c_entry_fp_address()) = saved_c_frame_fp;
|
}
|
|
|
@@ -152,8 +153,8 @@
|
|
|
static Address GetJsEntrySp() {
|
- CHECK_NE(NULL, Top::GetCurrentThread());
|
- return Top::js_entry_sp(Top::GetCurrentThread());
|
+ CHECK_NE(NULL, v8_context()->top_.GetCurrentThread());
|
+ return v8_context()->top_.js_entry_sp(v8_context()->top_.GetCurrentThread());
|
}
|
|
|
Index: src/v8.h
|
===================================================================
|
--- src/v8.h (revision 3142)
|
+++ src/v8.h Sat Nov 14 01:43:03 MSK 2009
|
@@ -72,7 +72,7 @@
|
namespace v8 {
|
namespace internal {
|
|
-class V8 : public AllStatic {
|
+class V8 {
|
public:
|
// Global actions.
|
|
@@ -80,38 +80,58 @@
|
// created from scratch. If a non-null Deserializer is given, the
|
// initial state is created by reading the deserialized data into an
|
// empty heap.
|
- static bool Initialize(GenericDeserializer* des);
|
- static void TearDown();
|
- static bool IsRunning() { return is_running_; }
|
+ bool Initialize(GenericDeserializer* des);
|
+ void TearDown();
|
+ bool IsRunning() { return is_running_; }
|
// To be dead you have to have lived
|
- static bool IsDead() { return has_fatal_error_ || has_been_disposed_; }
|
- static void SetFatalError();
|
+ bool IsDead() { return has_fatal_error_ || has_been_disposed_; }
|
+ void SetFatalError();
|
|
// Report process out of memory. Implementation found in api.cc.
|
- static void FatalProcessOutOfMemory(const char* location);
|
+ void FatalProcessOutOfMemory(const char* location);
|
|
// Random number generation support. Not cryptographically safe.
|
static uint32_t Random();
|
static Smi* RandomPositiveSmi();
|
|
// Idle notification directly from the API.
|
- static bool IdleNotification();
|
+ bool IdleNotification();
|
|
+ FatalErrorCallback get_exception_behavior() const {
|
+ return exception_behavior;
|
+ }
|
+
|
+ void set_exception_behavior(FatalErrorCallback _exception_behavior) {
|
+ exception_behavior = _exception_behavior;
|
+ }
|
private:
|
+ // Track whether this V8 instance has ever called v8::Locker. This allows the
|
+ // API code to verify that the lock is always held when V8 is being entered.
|
+
|
+ bool active_;
|
// True if engine is currently running
|
- static bool is_running_;
|
+ bool is_running_;
|
// True if V8 has ever been run
|
- static bool has_been_setup_;
|
+ bool has_been_setup_;
|
// True if error has been signaled for current engine
|
// (reset to false if engine is restarted)
|
- static bool has_fatal_error_;
|
+ bool has_fatal_error_;
|
// True if engine has been shut down
|
// (reset if engine is restarted)
|
- static bool has_been_disposed_;
|
+ bool has_been_disposed_;
|
+
|
+ FatalErrorCallback exception_behavior;
|
+ V8();
|
+ DISALLOW_COPY_AND_ASSIGN(V8);
|
+
|
+ friend class V8Context;
|
+ friend class Locker;
|
};
|
|
-} } // namespace v8::internal
|
+}
|
+} // namespace v8::internal
|
|
namespace i = v8::internal;
|
|
+#include "v8-global-context.h"
|
#endif // V8_V8_H_
|
Index: src/top.h
|
===================================================================
|
--- src/top.h (revision 3228)
|
+++ src/top.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -35,7 +35,7 @@
|
|
|
#define RETURN_IF_SCHEDULED_EXCEPTION() \
|
- if (Top::has_scheduled_exception()) return Top::PromoteScheduledException()
|
+ if (v8_context()->top_.has_scheduled_exception()) return v8_context()->top_.PromoteScheduledException()
|
|
// Top has static variables used for JavaScript execution.
|
|
@@ -43,6 +43,7 @@
|
|
class ThreadLocalTop BASE_EMBEDDED {
|
public:
|
+ ThreadLocalTop();
|
// Initialize the thread data.
|
void Initialize();
|
|
@@ -142,84 +143,84 @@
|
k_top_address_count
|
};
|
|
- static Address get_address_from_id(AddressId id);
|
+ Address get_address_from_id(AddressId id);
|
|
// Access to top context (where the current function object was created).
|
- static Context* context() { return thread_local_.context_; }
|
- static void set_context(Context* context) {
|
+ Context* context() { return thread_local_.context_; }
|
+ void set_context(Context* context) {
|
thread_local_.context_ = context;
|
}
|
- static Context** context_address() { return &thread_local_.context_; }
|
+ Context** context_address() { return &thread_local_.context_; }
|
|
- static SaveContext* save_context() {return thread_local_.save_context_; }
|
- static void set_save_context(SaveContext* save) {
|
+ SaveContext* save_context() {return thread_local_.save_context_; }
|
+ void set_save_context(SaveContext* save) {
|
thread_local_.save_context_ = save;
|
}
|
|
// Access to current thread id.
|
- static int thread_id() { return thread_local_.thread_id_; }
|
- static void set_thread_id(int id) { thread_local_.thread_id_ = id; }
|
+ int thread_id() { return thread_local_.thread_id_; }
|
+ void set_thread_id(int id) { thread_local_.thread_id_ = id; }
|
|
// Interface to pending exception.
|
- static Object* pending_exception() {
|
+ Object* pending_exception() {
|
ASSERT(has_pending_exception());
|
return thread_local_.pending_exception_;
|
}
|
- static bool external_caught_exception() {
|
+ bool external_caught_exception() {
|
return thread_local_.external_caught_exception_;
|
}
|
- static void set_pending_exception(Object* exception) {
|
+ void set_pending_exception(Object* exception) {
|
thread_local_.pending_exception_ = exception;
|
}
|
- static void clear_pending_exception() {
|
- thread_local_.pending_exception_ = Heap::the_hole_value();
|
+ void clear_pending_exception() {
|
+ thread_local_.pending_exception_ = v8_context()->heap_.the_hole_value();
|
}
|
|
- static Object** pending_exception_address() {
|
+ Object** pending_exception_address() {
|
return &thread_local_.pending_exception_;
|
}
|
- static bool has_pending_exception() {
|
+ bool has_pending_exception() {
|
return !thread_local_.pending_exception_->IsTheHole();
|
}
|
- static void clear_pending_message() {
|
+ void clear_pending_message() {
|
thread_local_.has_pending_message_ = false;
|
thread_local_.pending_message_ = NULL;
|
- thread_local_.pending_message_obj_ = Heap::the_hole_value();
|
+ thread_local_.pending_message_obj_ = v8_context()->heap_.the_hole_value();
|
thread_local_.pending_message_script_ = NULL;
|
}
|
- static v8::TryCatch* try_catch_handler() {
|
+ v8::TryCatch* try_catch_handler() {
|
return thread_local_.TryCatchHandler();
|
}
|
- static Address try_catch_handler_address() {
|
+ Address try_catch_handler_address() {
|
return thread_local_.try_catch_handler_address();
|
}
|
// This method is called by the api after operations that may throw
|
// exceptions. If an exception was thrown and not handled by an external
|
// handler the exception is scheduled to be rethrown when we return to running
|
// JavaScript code. If an exception is scheduled true is returned.
|
- static bool OptionalRescheduleException(bool is_bottom_call);
|
+ bool OptionalRescheduleException(bool is_bottom_call);
|
|
|
- static bool* external_caught_exception_address() {
|
+ bool* external_caught_exception_address() {
|
return &thread_local_.external_caught_exception_;
|
}
|
|
- static Object** scheduled_exception_address() {
|
+ Object** scheduled_exception_address() {
|
return &thread_local_.scheduled_exception_;
|
}
|
|
- static Object* scheduled_exception() {
|
+ Object* scheduled_exception() {
|
ASSERT(has_scheduled_exception());
|
return thread_local_.scheduled_exception_;
|
}
|
- static bool has_scheduled_exception() {
|
+ bool has_scheduled_exception() {
|
return !thread_local_.scheduled_exception_->IsTheHole();
|
}
|
- static void clear_scheduled_exception() {
|
- thread_local_.scheduled_exception_ = Heap::the_hole_value();
|
+ void clear_scheduled_exception() {
|
+ thread_local_.scheduled_exception_ = v8_context()->heap_.the_hole_value();
|
}
|
|
- static void setup_external_caught() {
|
+ void setup_external_caught() {
|
thread_local_.external_caught_exception_ =
|
has_pending_exception() &&
|
(thread_local_.catcher_ != NULL) &&
|
@@ -228,161 +229,164 @@
|
|
// Tells whether the current context has experienced an out of memory
|
// exception.
|
- static bool is_out_of_memory();
|
+ bool is_out_of_memory();
|
|
// JS execution stack (see frames.h).
|
- static Address c_entry_fp(ThreadLocalTop* thread) {
|
+ Address c_entry_fp(ThreadLocalTop* thread) {
|
return thread->c_entry_fp_;
|
}
|
- static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
|
+ Address handler(ThreadLocalTop* thread) { return thread->handler_; }
|
|
- static inline Address* c_entry_fp_address() {
|
+ inline Address* c_entry_fp_address() {
|
return &thread_local_.c_entry_fp_;
|
}
|
- static inline Address* handler_address() { return &thread_local_.handler_; }
|
+ inline Address* handler_address() { return &thread_local_.handler_; }
|
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
// Bottom JS entry (see StackTracer::Trace in log.cc).
|
- static Address js_entry_sp(ThreadLocalTop* thread) {
|
+ Address js_entry_sp(ThreadLocalTop* thread) {
|
return thread->js_entry_sp_;
|
}
|
- static inline Address* js_entry_sp_address() {
|
+ inline Address* js_entry_sp_address() {
|
return &thread_local_.js_entry_sp_;
|
}
|
#endif
|
|
// Generated code scratch locations.
|
- static void* formal_count_address() { return &thread_local_.formal_count_; }
|
+ void* formal_count_address() { return &thread_local_.formal_count_; }
|
|
- static void MarkCompactPrologue(bool is_compacting);
|
- static void MarkCompactEpilogue(bool is_compacting);
|
- static void MarkCompactPrologue(bool is_compacting,
|
+ void MarkCompactPrologue(bool is_compacting);
|
+ void MarkCompactEpilogue(bool is_compacting);
|
+ void MarkCompactPrologue(bool is_compacting,
|
char* archived_thread_data);
|
- static void MarkCompactEpilogue(bool is_compacting,
|
+ void MarkCompactEpilogue(bool is_compacting,
|
char* archived_thread_data);
|
- static void PrintCurrentStackTrace(FILE* out);
|
- static void PrintStackTrace(FILE* out, char* thread_data);
|
- static void PrintStack(StringStream* accumulator);
|
- static void PrintStack();
|
- static Handle<String> StackTrace();
|
+ void PrintCurrentStackTrace(FILE* out);
|
+ void PrintStackTrace(FILE* out, char* thread_data);
|
+ void PrintStack(StringStream* accumulator);
|
+ void PrintStack();
|
+ Handle<String> StackTrace();
|
|
// Returns if the top context may access the given global object. If
|
// the result is false, the pending exception is guaranteed to be
|
// set.
|
- static bool MayNamedAccess(JSObject* receiver,
|
+ bool MayNamedAccess(JSObject* receiver,
|
Object* key,
|
v8::AccessType type);
|
- static bool MayIndexedAccess(JSObject* receiver,
|
+ bool MayIndexedAccess(JSObject* receiver,
|
uint32_t index,
|
v8::AccessType type);
|
|
- static void SetFailedAccessCheckCallback(
|
+ void SetFailedAccessCheckCallback(
|
v8::FailedAccessCheckCallback callback);
|
- static void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
|
+ void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
|
|
// Exception throwing support. The caller should use the result
|
// of Throw() as its return value.
|
- static Failure* Throw(Object* exception, MessageLocation* location = NULL);
|
+ Failure* Throw(Object* exception, MessageLocation* location = NULL);
|
// Re-throw an exception. This involves no error reporting since
|
// error reporting was handled when the exception was thrown
|
// originally.
|
- static Failure* ReThrow(Object* exception, MessageLocation* location = NULL);
|
- static void ScheduleThrow(Object* exception);
|
- static void ReportPendingMessages();
|
- static Failure* ThrowIllegalOperation();
|
+ Failure* ReThrow(Object* exception, MessageLocation* location = NULL);
|
+ void ScheduleThrow(Object* exception);
|
+ void ReportPendingMessages();
|
+ Failure* ThrowIllegalOperation();
|
|
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
|
- static Object* PromoteScheduledException();
|
- static void DoThrow(Object* exception,
|
+ Object* PromoteScheduledException();
|
+ void DoThrow(Object* exception,
|
MessageLocation* location,
|
const char* message);
|
- static bool ShouldReturnException(bool* is_caught_externally,
|
+ bool ShouldReturnException(bool* is_caught_externally,
|
bool catchable_by_javascript);
|
- static void ReportUncaughtException(Handle<Object> exception,
|
+ void ReportUncaughtException(Handle<Object> exception,
|
MessageLocation* location,
|
Handle<String> stack_trace);
|
|
// Attempts to compute the current source location, storing the
|
// result in the target out parameter.
|
- static void ComputeLocation(MessageLocation* target);
|
+ void ComputeLocation(MessageLocation* target);
|
|
// Override command line flag.
|
- static void TraceException(bool flag);
|
+ void TraceException(bool flag);
|
|
// Out of resource exception helpers.
|
- static Failure* StackOverflow();
|
- static Failure* TerminateExecution();
|
+ Failure* StackOverflow();
|
+ Failure* TerminateExecution();
|
|
// Administration
|
- static void Initialize();
|
- static void TearDown();
|
- static void Iterate(ObjectVisitor* v);
|
- static void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
|
- static char* Iterate(ObjectVisitor* v, char* t);
|
+ void Initialize();
|
+ void TearDown();
|
+ void Iterate(ObjectVisitor* v);
|
+ void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
|
+ char* Iterate(ObjectVisitor* v, char* t);
|
|
// Returns the global object of the current context. It could be
|
// a builtin object, or a js global object.
|
- static Handle<GlobalObject> global() {
|
+ Handle<GlobalObject> global() {
|
return Handle<GlobalObject>(context()->global());
|
}
|
|
// Returns the global proxy object of the current context.
|
- static Object* global_proxy() {
|
+ Object* global_proxy() {
|
return context()->global_proxy();
|
}
|
|
// Returns the current global context.
|
- static Handle<Context> global_context();
|
+ Handle<Context> global_context();
|
|
// Returns the global context of the calling JavaScript code. That
|
// is, the global context of the top-most JavaScript frame.
|
- static Handle<Context> GetCallingGlobalContext();
|
+ Handle<Context> GetCallingGlobalContext();
|
|
- static Handle<JSBuiltinsObject> builtins() {
|
+ Handle<JSBuiltinsObject> builtins() {
|
return Handle<JSBuiltinsObject>(thread_local_.context_->builtins());
|
}
|
|
- static Object* LookupSpecialFunction(JSObject* receiver,
|
+ Object* LookupSpecialFunction(JSObject* receiver,
|
JSObject* prototype,
|
JSFunction* value);
|
|
- static void RegisterTryCatchHandler(v8::TryCatch* that);
|
- static void UnregisterTryCatchHandler(v8::TryCatch* that);
|
+ void RegisterTryCatchHandler(v8::TryCatch* that);
|
+ void UnregisterTryCatchHandler(v8::TryCatch* that);
|
|
#define TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR(index, type, name) \
|
- static Handle<type> name() { \
|
+ Handle<type> name() { \
|
return Handle<type>(context()->global_context()->name()); \
|
}
|
GLOBAL_CONTEXT_FIELDS(TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR)
|
#undef TOP_GLOBAL_CONTEXT_FIELD_ACCESSOR
|
|
- static inline ThreadLocalTop* GetCurrentThread() { return &thread_local_; }
|
- static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
|
- static char* ArchiveThread(char* to);
|
- static char* RestoreThread(char* from);
|
- static void FreeThreadResources() { thread_local_.Free(); }
|
+ inline ThreadLocalTop* GetCurrentThread() { return &thread_local_; }
|
+ int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
|
+ char* ArchiveThread(char* to);
|
+ char* RestoreThread(char* from);
|
+ void FreeThreadResources() { thread_local_.Free(); }
|
|
static const char* kStackOverflowMessage;
|
|
private:
|
// The context that initiated this JS execution.
|
- static ThreadLocalTop thread_local_;
|
- static void InitializeThreadLocal();
|
- static void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
|
- static void MarkCompactPrologue(bool is_compacting,
|
+ ThreadLocalTop thread_local_;
|
+ void InitializeThreadLocal();
|
+ void PrintStackTrace(FILE* out, ThreadLocalTop* thread);
|
+ void MarkCompactPrologue(bool is_compacting,
|
ThreadLocalTop* archived_thread_data);
|
- static void MarkCompactEpilogue(bool is_compacting,
|
+ void MarkCompactEpilogue(bool is_compacting,
|
ThreadLocalTop* archived_thread_data);
|
|
- // Debug.
|
- // Mutex for serializing access to break control structures.
|
- static Mutex* break_access_;
|
+ class TopImpl;
|
+ TopImpl* const top_impl;
|
|
friend class SaveContext;
|
friend class AssertNoContextChange;
|
friend class ExecutionAccess;
|
+ friend class V8Context;
|
|
- static void FillCache();
|
+ void FillCache();
|
+ Top();
|
+ ~Top();
|
+ DISALLOW_COPY_AND_ASSIGN(Top);
|
};
|
|
|
@@ -392,12 +396,12 @@
|
class SaveContext BASE_EMBEDDED {
|
public:
|
SaveContext()
|
- : context_(Top::context()),
|
+ : context_(v8_context()->top_.context()),
|
#if __GNUC_VERSION__ >= 40100 && __GNUC_VERSION__ < 40300
|
dummy_(Top::context()),
|
#endif
|
- prev_(Top::save_context()) {
|
- Top::set_save_context(this);
|
+ prev_(v8_context()->top_.save_context()) {
|
+ v8_context()->top_.set_save_context(this);
|
|
// If there is no JS frame under the current C frame, use the value 0.
|
JavaScriptFrameIterator it;
|
@@ -405,8 +409,8 @@
|
}
|
|
~SaveContext() {
|
- Top::set_context(*context_);
|
- Top::set_save_context(prev_);
|
+ v8_context()->top_.set_context(*context_);
|
+ v8_context()->top_.set_save_context(prev_);
|
}
|
|
Handle<Context> context() { return context_; }
|
@@ -431,11 +435,11 @@
|
#ifdef DEBUG
|
public:
|
AssertNoContextChange() :
|
- context_(Top::context()) {
|
+ context_(v8_context()->top_.context()) {
|
}
|
|
~AssertNoContextChange() {
|
- ASSERT(Top::context() == *context_);
|
+ ASSERT(v8_context()->top_.context() == *context_);
|
}
|
|
private:
|
Index: src/v8threads.cc
|
===================================================================
|
--- src/v8threads.cc (revision 3008)
|
+++ src/v8threads.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -36,88 +36,88 @@
|
|
namespace v8 {
|
|
-static internal::Thread::LocalStorageKey thread_state_key =
|
- internal::Thread::CreateThreadLocalKey();
|
-static internal::Thread::LocalStorageKey thread_id_key =
|
- internal::Thread::CreateThreadLocalKey();
|
-
|
-
|
-// Track whether this V8 instance has ever called v8::Locker. This allows the
|
-// API code to verify that the lock is always held when V8 is being entered.
|
-bool Locker::active_ = false;
|
-
|
-
|
// Constructor for the Locker object. Once the Locker is constructed the
|
// current thread will be guaranteed to have the big V8 lock.
|
-Locker::Locker() : has_lock_(false), top_level_(true) {
|
+Locker::Locker() :
|
+ has_lock_(false),
|
+ top_level_(true)
|
+{
|
// Record that the Locker has been used at least once.
|
- active_ = true;
|
+ V8Context* const v8_context_ = v8_context();
|
+ v8_context_->v8_.active_ = true;
|
// Get the big lock if necessary.
|
- if (!internal::ThreadManager::IsLockedByCurrentThread()) {
|
- internal::ThreadManager::Lock();
|
+ if (!v8_context_->thread_manager_.IsLockedByCurrentThread()) {
|
+ v8_context_->thread_manager_.Lock();
|
has_lock_ = true;
|
// Make sure that V8 is initialized. Archiving of threads interferes
|
// with deserialization by adding additional root pointers, so we must
|
// initialize here, before anyone can call ~Locker() or Unlocker().
|
- if (!internal::V8::IsRunning()) {
|
+ if (!v8_context_->v8_.IsRunning()) {
|
V8::Initialize();
|
}
|
// This may be a locker within an unlocker in which case we have to
|
// get the saved state for this thread and restore it.
|
- if (internal::ThreadManager::RestoreThread()) {
|
+ if (v8_context_->thread_manager_.RestoreThread()) {
|
top_level_ = false;
|
} else {
|
internal::ExecutionAccess access;
|
- internal::StackGuard::ClearThread(access);
|
- internal::StackGuard::InitThread(access);
|
+ v8_context_->stack_guard_.ClearThread(access);
|
+ v8_context_->stack_guard_.InitThread(access);
|
}
|
}
|
- ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
|
+ ASSERT(v8_context_->thread_manager_.IsLockedByCurrentThread());
|
|
// Make sure this thread is assigned a thread id.
|
- internal::ThreadManager::AssignId();
|
+ v8_context_->thread_manager_.AssignId();
|
}
|
|
|
-bool Locker::IsLocked() {
|
- return internal::ThreadManager::IsLockedByCurrentThread();
|
+bool Locker::IsLocked()
|
+{
|
+ return v8_context()->thread_manager_.IsLockedByCurrentThread();
|
}
|
|
+bool Locker::IsActive() {
|
+ return v8_context()->v8_.active_;
|
+}
|
|
Locker::~Locker() {
|
- ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
|
+ V8Context* const v8_context_ = v8_context();
|
+ ASSERT(v8_context_->thread_manager_.IsLockedByCurrentThread());
|
if (has_lock_) {
|
if (top_level_) {
|
- internal::ThreadManager::FreeThreadResources();
|
+ v8_context_->thread_manager_.FreeThreadResources();
|
} else {
|
- internal::ThreadManager::ArchiveThread();
|
+ v8_context_->thread_manager_.ArchiveThread();
|
}
|
- internal::ThreadManager::Unlock();
|
+ v8_context_->thread_manager_.Unlock();
|
}
|
}
|
|
|
Unlocker::Unlocker() {
|
- ASSERT(internal::ThreadManager::IsLockedByCurrentThread());
|
- internal::ThreadManager::ArchiveThread();
|
- internal::ThreadManager::Unlock();
|
+ V8Context* const context = v8_context();
|
+ ASSERT(context->thread_manager_.IsLockedByCurrentThread());
|
+ context->thread_manager_.ArchiveThread();
|
+ context->thread_manager_.Unlock();
|
}
|
|
|
Unlocker::~Unlocker() {
|
- ASSERT(!internal::ThreadManager::IsLockedByCurrentThread());
|
- internal::ThreadManager::Lock();
|
- internal::ThreadManager::RestoreThread();
|
+ internal::ThreadManager& thread_manager = v8_context()->thread_manager_;
|
+ ASSERT(!thread_manager.IsLockedByCurrentThread());
|
+ thread_manager.Lock();
|
+ thread_manager.RestoreThread();
|
}
|
|
|
void Locker::StartPreemption(int every_n_ms) {
|
- v8::internal::ContextSwitcher::StartPreemption(every_n_ms);
|
+ v8_context()->thread_manager_.StartPreemption(every_n_ms);
|
}
|
|
|
void Locker::StopPreemption() {
|
- v8::internal::ContextSwitcher::StopPreemption();
|
+ v8_context()->thread_manager_.StopPreemption();
|
}
|
|
|
@@ -148,26 +148,28 @@
|
if (lazily_archived_thread_.IsValid()) {
|
EagerlyArchiveThread();
|
}
|
+
|
+ V8Context * const v8context = v8_context();
|
ThreadState* state =
|
reinterpret_cast<ThreadState*>(Thread::GetThreadLocal(thread_state_key));
|
if (state == NULL) {
|
// This is a new thread.
|
- StackGuard::InitThread(access);
|
+ v8context->stack_guard_.InitThread(access);
|
return false;
|
}
|
char* from = state->data();
|
- from = HandleScopeImplementer::RestoreThread(from);
|
- from = Top::RestoreThread(from);
|
- from = Relocatable::RestoreState(from);
|
+ from = v8context->handle_scope_implementer_.RestoreThread(from);
|
+ from = v8context->top_.RestoreThread(from);
|
+ from = v8context->relocatable_data_.RestoreState(from);
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- from = Debug::RestoreDebug(from);
|
+ from = v8context->debug_.RestoreDebug(from);
|
#endif
|
- from = StackGuard::RestoreStackGuard(from);
|
- from = RegExpStack::RestoreStack(from);
|
- from = Bootstrapper::RestoreState(from);
|
+ from = v8context->stack_guard_.RestoreStackGuard(from);
|
+ from = v8context->reg_exp_stack_.RestoreStack(from);
|
+ from = v8context->bootstrapper_.RestoreState(from);
|
Thread::SetThreadLocal(thread_state_key, NULL);
|
if (state->terminate_on_restore()) {
|
- StackGuard::TerminateExecution();
|
+ v8context->stack_guard_.TerminateExecution();
|
state->set_terminate_on_restore(false);
|
}
|
state->set_id(kInvalidId);
|
@@ -191,25 +193,23 @@
|
|
|
static int ArchiveSpacePerThread() {
|
- return HandleScopeImplementer::ArchiveSpacePerThread() +
|
- Top::ArchiveSpacePerThread() +
|
+ V8Context* const v8context = v8_context();
|
+ return v8context->handle_scope_implementer_.ArchiveSpacePerThread() +
|
+ v8context->top_.ArchiveSpacePerThread() +
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debug::ArchiveSpacePerThread() +
|
+ v8context->debug_.ArchiveSpacePerThread() +
|
#endif
|
- StackGuard::ArchiveSpacePerThread() +
|
- RegExpStack::ArchiveSpacePerThread() +
|
- Bootstrapper::ArchiveSpacePerThread() +
|
- Relocatable::ArchiveSpacePerThread();
|
+ v8context->stack_guard_.ArchiveSpacePerThread() +
|
+ v8context->reg_exp_stack_.ArchiveSpacePerThread() +
|
+ v8context->bootstrapper_.ArchiveSpacePerThread() +
|
+ v8context->relocatable_data_.ArchiveSpacePerThread();
|
}
|
|
|
-ThreadState* ThreadState::free_anchor_ = new ThreadState();
|
-ThreadState* ThreadState::in_use_anchor_ = new ThreadState();
|
-
|
-
|
-ThreadState::ThreadState() : id_(ThreadManager::kInvalidId),
|
+ThreadState::ThreadState(ThreadManager* thread_manager) : id_(ThreadManager::kInvalidId),
|
terminate_on_restore_(false),
|
- next_(this), previous_(this) {
|
+ next_(this), previous_(this),
|
+ thread_manager_(thread_manager) {
|
}
|
|
|
@@ -226,7 +226,7 @@
|
|
void ThreadState::LinkInto(List list) {
|
ThreadState* flying_anchor =
|
- list == FREE_LIST ? free_anchor_ : in_use_anchor_;
|
+ list == FREE_LIST ? thread_manager_->free_anchor_ : thread_manager_->in_use_anchor_;
|
next_ = flying_anchor->next_;
|
previous_ = flying_anchor;
|
flying_anchor->next_ = this;
|
@@ -234,10 +234,10 @@
|
}
|
|
|
-ThreadState* ThreadState::GetFree() {
|
+ThreadState* ThreadManager::GetFree() {
|
ThreadState* gotten = free_anchor_->next_;
|
if (gotten == free_anchor_) {
|
- ThreadState* new_thread_state = new ThreadState();
|
+ ThreadState* new_thread_state = new ThreadState(this);
|
new_thread_state->AllocateSpace();
|
return new_thread_state;
|
}
|
@@ -246,13 +246,13 @@
|
|
|
// Gets the first in the list of archived threads.
|
-ThreadState* ThreadState::FirstInUse() {
|
+ThreadState* ThreadManager::FirstInUse() {
|
return in_use_anchor_->Next();
|
}
|
|
|
ThreadState* ThreadState::Next() {
|
- if (next_ == in_use_anchor_) return NULL;
|
+ if (next_ == thread_manager_->in_use_anchor_) return NULL;
|
return next_;
|
}
|
|
@@ -260,17 +260,21 @@
|
// Thread ids must start with 1, because in TLS having thread id 0 can't
|
// be distinguished from not having a thread id at all (since NULL is
|
// defined as 0.)
|
-int ThreadManager::last_id_ = 0;
|
-Mutex* ThreadManager::mutex_ = OS::CreateMutex();
|
-ThreadHandle ThreadManager::mutex_owner_(ThreadHandle::INVALID);
|
-ThreadHandle ThreadManager::lazily_archived_thread_(ThreadHandle::INVALID);
|
-ThreadState* ThreadManager::lazily_archived_thread_state_ = NULL;
|
+ThreadManager::ThreadManager():
|
+ last_id_(0), mutex_(OS::CreateMutex()), mutex_owner_(ThreadHandle::INVALID),
|
+ lazily_archived_thread_(ThreadHandle::INVALID), lazily_archived_thread_state_ (NULL),
|
+ free_anchor_(new ThreadState(this)), in_use_anchor_(new ThreadState(this)),
|
+ thread_state_key (Thread::CreateThreadLocalKey()),
|
+ thread_id_key (Thread::CreateThreadLocalKey())
|
+{
|
+}
|
|
+ThreadManager::~ThreadManager() {}
|
|
void ThreadManager::ArchiveThread() {
|
ASSERT(!lazily_archived_thread_.IsValid());
|
ASSERT(!IsArchived());
|
- ThreadState* state = ThreadState::GetFree();
|
+ ThreadState* state = GetFree();
|
state->Unlink();
|
Thread::SetThreadLocal(thread_state_key, reinterpret_cast<void*>(state));
|
lazily_archived_thread_.Initialize(ThreadHandle::SELF);
|
@@ -285,31 +289,33 @@
|
ThreadState* state = lazily_archived_thread_state_;
|
state->LinkInto(ThreadState::IN_USE_LIST);
|
char* to = state->data();
|
+ V8Context * const v8context = v8_context();
|
// Ensure that data containing GC roots are archived first, and handle them
|
// in ThreadManager::Iterate(ObjectVisitor*).
|
- to = HandleScopeImplementer::ArchiveThread(to);
|
- to = Top::ArchiveThread(to);
|
- to = Relocatable::ArchiveState(to);
|
+ to = v8context->handle_scope_implementer_.ArchiveThread(to);
|
+ to = v8context->top_.ArchiveThread(to);
|
+ to = v8context->relocatable_data_.ArchiveState(to);
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- to = Debug::ArchiveDebug(to);
|
+ to = v8context->debug_.ArchiveDebug(to);
|
#endif
|
- to = StackGuard::ArchiveStackGuard(to);
|
- to = RegExpStack::ArchiveStack(to);
|
- to = Bootstrapper::ArchiveState(to);
|
+ to = v8context->stack_guard_.ArchiveStackGuard(to);
|
+ to = v8context->reg_exp_stack_.ArchiveStack(to);
|
+ to = v8context->bootstrapper_.ArchiveState(to);
|
lazily_archived_thread_.Initialize(ThreadHandle::INVALID);
|
lazily_archived_thread_state_ = NULL;
|
}
|
|
|
void ThreadManager::FreeThreadResources() {
|
- HandleScopeImplementer::FreeThreadResources();
|
- Top::FreeThreadResources();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->handle_scope_implementer_.FreeThreadResources();
|
+ v8context->top_.FreeThreadResources();
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debug::FreeThreadResources();
|
+ v8context->debug_.FreeThreadResources();
|
#endif
|
- StackGuard::FreeThreadResources();
|
- RegExpStack::FreeThreadResources();
|
- Bootstrapper::FreeThreadResources();
|
+ v8context->stack_guard_.FreeThreadResources();
|
+ v8context->reg_exp_stack_.FreeThreadResources();
|
+ v8context->bootstrapper_.FreeThreadResources();
|
}
|
|
|
@@ -320,35 +326,38 @@
|
|
void ThreadManager::Iterate(ObjectVisitor* v) {
|
// Expecting no threads during serialization/deserialization
|
- for (ThreadState* state = ThreadState::FirstInUse();
|
+ V8Context * const v8context = v8_context();
|
+ for (ThreadState* state = FirstInUse();
|
state != NULL;
|
state = state->Next()) {
|
char* data = state->data();
|
- data = HandleScopeImplementer::Iterate(v, data);
|
- data = Top::Iterate(v, data);
|
- data = Relocatable::Iterate(v, data);
|
+ data = v8context->handle_scope_implementer_.Iterate(v, data);
|
+ data = v8context->top_.Iterate(v, data);
|
+ data = v8context->relocatable_data_.Iterate(v, data);
|
}
|
}
|
|
|
void ThreadManager::MarkCompactPrologue(bool is_compacting) {
|
- for (ThreadState* state = ThreadState::FirstInUse();
|
+ V8Context * const v8context = v8_context();
|
+ for (ThreadState* state = FirstInUse();
|
state != NULL;
|
state = state->Next()) {
|
char* data = state->data();
|
- data += HandleScopeImplementer::ArchiveSpacePerThread();
|
- Top::MarkCompactPrologue(is_compacting, data);
|
+ data += v8context->handle_scope_implementer_.ArchiveSpacePerThread();
|
+ v8context->top_.MarkCompactPrologue(is_compacting, data);
|
}
|
}
|
|
|
void ThreadManager::MarkCompactEpilogue(bool is_compacting) {
|
- for (ThreadState* state = ThreadState::FirstInUse();
|
+ V8Context * const v8context = v8_context();
|
+ for (ThreadState* state = FirstInUse();
|
state != NULL;
|
state = state->Next()) {
|
char* data = state->data();
|
- data += HandleScopeImplementer::ArchiveSpacePerThread();
|
- Top::MarkCompactEpilogue(is_compacting, data);
|
+ data += v8context->handle_scope_implementer_.ArchiveSpacePerThread();
|
+ v8context->top_.MarkCompactEpilogue(is_compacting, data);
|
}
|
}
|
|
@@ -364,7 +373,7 @@
|
int thread_id = ++last_id_;
|
ASSERT(thread_id > 0); // see the comment near last_id_ definition.
|
Thread::SetThreadLocalInt(thread_id_key, thread_id);
|
- Top::set_thread_id(thread_id);
|
+ v8_context()->top_.set_thread_id(thread_id);
|
}
|
}
|
|
@@ -375,7 +384,7 @@
|
|
|
void ThreadManager::TerminateExecution(int thread_id) {
|
- for (ThreadState* state = ThreadState::FirstInUse();
|
+ for (ThreadState* state = FirstInUse();
|
state != NULL;
|
state = state->Next()) {
|
if (thread_id == state->id()) {
|
@@ -385,11 +394,6 @@
|
}
|
|
|
-// This is the ContextSwitcher singleton. There is at most a single thread
|
-// running which delivers preemption events to V8 threads.
|
-ContextSwitcher* ContextSwitcher::singleton_ = NULL;
|
-
|
-
|
ContextSwitcher::ContextSwitcher(int every_n_ms)
|
: keep_going_(true),
|
sleep_ms_(every_n_ms) {
|
@@ -398,7 +402,7 @@
|
|
// Set the scheduling interval of V8 threads. This function starts the
|
// ContextSwitcher thread if needed.
|
-void ContextSwitcher::StartPreemption(int every_n_ms) {
|
+void ThreadManager::StartPreemption(int every_n_ms) {
|
ASSERT(Locker::IsLocked());
|
if (singleton_ == NULL) {
|
// If the ContextSwitcher thread is not running at the moment start it now.
|
@@ -414,7 +418,7 @@
|
|
// Disable preemption of V8 threads. If multiple threads want to use V8 they
|
// must cooperatively schedule amongst them from this point on.
|
-void ContextSwitcher::StopPreemption() {
|
+void ThreadManager::StopPreemption() {
|
ASSERT(Locker::IsLocked());
|
if (singleton_ != NULL) {
|
// The ContextSwitcher thread is running. We need to stop it and release
|
@@ -431,15 +435,16 @@
|
// Main loop of the ContextSwitcher thread: Preempt the currently running V8
|
// thread at regular intervals.
|
void ContextSwitcher::Run() {
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
while (keep_going_) {
|
OS::Sleep(sleep_ms_);
|
- StackGuard::Preempt();
|
+ stack_guard.Preempt();
|
}
|
}
|
|
|
// Acknowledge the preemption by the receiving thread.
|
-void ContextSwitcher::PreemptionReceived() {
|
+void ThreadManager::PreemptionReceived() {
|
ASSERT(Locker::IsLocked());
|
// There is currently no accounting being done for this. But could be in the
|
// future, which is why we leave this in.
|
Index: src/register-allocator.cc
|
===================================================================
|
--- src/register-allocator.cc (revision 2855)
|
+++ src/register-allocator.cc Sat Nov 14 01:42:56 MSK 2009
|
@@ -45,8 +45,7 @@
|
|
|
Result::ZoneObjectList* Result::ConstantList() {
|
- static ZoneObjectList list(10);
|
- return &list;
|
+ return v8_context()->code_generator_data_.result_constants_list();
|
}
|
|
|
Index: src/log.h
|
===================================================================
|
--- src/log.h (revision 3069)
|
+++ src/log.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -80,8 +80,9 @@
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
#define LOG(Call) \
|
do { \
|
- if (v8::internal::Logger::is_logging()) \
|
- v8::internal::Logger::Call; \
|
+ internal::Logger & logger = v8_context()->logger_; \
|
+ if (logger.is_logging()) \
|
+ logger.Call; \
|
} while (false)
|
#else
|
#define LOG(Call) ((void) 0)
|
@@ -91,7 +92,7 @@
|
class VMState BASE_EMBEDDED {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
public:
|
- inline explicit VMState(StateTag state);
|
+ inline explicit VMState(StateTag state, Logger& logger = v8_context()->logger_);
|
inline ~VMState();
|
|
StateTag state() { return state_; }
|
@@ -143,131 +144,135 @@
|
#undef DECLARE_ENUM
|
|
// Acquires resources for logging if the right flags are set.
|
- static bool Setup();
|
+ bool Setup();
|
|
// Frees resources acquired in Setup.
|
- static void TearDown();
|
+ void TearDown();
|
|
// Enable the computation of a sliding window of states.
|
- static void EnableSlidingStateWindow();
|
+ void EnableSlidingStateWindow();
|
|
// Write a raw string to the log to be used as a preamble.
|
// No check is made that the 'preamble' is actually at the beginning
|
// of the log. The preample is used to write code events saved in the
|
// snapshot.
|
- static void Preamble(const char* content);
|
+ void Preamble(const char* content);
|
|
// Emits an event with a string value -> (name, value).
|
- static void StringEvent(const char* name, const char* value);
|
+ void StringEvent(const char* name, const char* value);
|
|
// Emits an event with an int value -> (name, value).
|
- static void IntEvent(const char* name, int value);
|
+ void IntEvent(const char* name, int value);
|
|
// Emits an event with an handle value -> (name, location).
|
- static void HandleEvent(const char* name, Object** location);
|
+ void HandleEvent(const char* name, Object** location);
|
|
// Emits memory management events for C allocated structures.
|
- static void NewEvent(const char* name, void* object, size_t size);
|
- static void DeleteEvent(const char* name, void* object);
|
+ void NewEvent(const char* name, void* object, size_t size);
|
+ void DeleteEvent(const char* name, void* object);
|
|
// Emits an event with a tag, and some resource usage information.
|
// -> (name, tag, <rusage information>).
|
// Currently, the resource usage information is a process time stamp
|
// and a real time timestamp.
|
- static void ResourceEvent(const char* name, const char* tag);
|
+ void ResourceEvent(const char* name, const char* tag);
|
|
// Emits an event that an undefined property was read from an
|
// object.
|
- static void SuspectReadEvent(String* name, Object* obj);
|
+ void SuspectReadEvent(String* name, Object* obj);
|
|
// Emits an event when a message is put on or read from a debugging queue.
|
// DebugTag lets us put a call-site specific label on the event.
|
- static void DebugTag(const char* call_site_tag);
|
- static void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
|
+ void DebugTag(const char* call_site_tag);
|
+ void DebugEvent(const char* event_type, Vector<uint16_t> parameter);
|
|
|
// ==== Events logged by --log-api. ====
|
- static void ApiNamedSecurityCheck(Object* key);
|
- static void ApiIndexedSecurityCheck(uint32_t index);
|
- static void ApiNamedPropertyAccess(const char* tag,
|
+ void ApiNamedSecurityCheck(Object* key);
|
+ void ApiIndexedSecurityCheck(uint32_t index);
|
+ void ApiNamedPropertyAccess(const char* tag,
|
JSObject* holder,
|
Object* name);
|
- static void ApiIndexedPropertyAccess(const char* tag,
|
+ void ApiIndexedPropertyAccess(const char* tag,
|
JSObject* holder,
|
uint32_t index);
|
- static void ApiObjectAccess(const char* tag, JSObject* obj);
|
- static void ApiEntryCall(const char* name);
|
+ void ApiObjectAccess(const char* tag, JSObject* obj);
|
+ void ApiEntryCall(const char* name);
|
|
|
// ==== Events logged by --log-code. ====
|
// Emits a code create event.
|
- static void CodeCreateEvent(LogEventsAndTags tag,
|
+ void CodeCreateEvent(LogEventsAndTags tag,
|
Code* code, const char* source);
|
- static void CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name);
|
- static void CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name,
|
+ void CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name);
|
+ void CodeCreateEvent(LogEventsAndTags tag, Code* code, String* name,
|
String* source, int line);
|
- static void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
|
+ void CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count);
|
// Emits a code create event for a RegExp.
|
- static void RegExpCodeCreateEvent(Code* code, String* source);
|
+ void RegExpCodeCreateEvent(Code* code, String* source);
|
// Emits a code move event.
|
- static void CodeMoveEvent(Address from, Address to);
|
+ void CodeMoveEvent(Address from, Address to);
|
// Emits a code delete event.
|
- static void CodeDeleteEvent(Address from);
|
+ void CodeDeleteEvent(Address from);
|
|
// ==== Events logged by --log-gc. ====
|
// Heap sampling events: start, end, and individual types.
|
- static void HeapSampleBeginEvent(const char* space, const char* kind);
|
- static void HeapSampleEndEvent(const char* space, const char* kind);
|
- static void HeapSampleItemEvent(const char* type, int number, int bytes);
|
- static void HeapSampleJSConstructorEvent(const char* constructor,
|
+ void HeapSampleBeginEvent(const char* space, const char* kind);
|
+ void HeapSampleEndEvent(const char* space, const char* kind);
|
+ void HeapSampleItemEvent(const char* type, int number, int bytes);
|
+ void HeapSampleJSConstructorEvent(const char* constructor,
|
int number, int bytes);
|
- static void HeapSampleJSRetainersEvent(const char* constructor,
|
+ void HeapSampleJSRetainersEvent(const char* constructor,
|
const char* event);
|
- static void HeapSampleJSProducerEvent(const char* constructor,
|
+ void HeapSampleJSProducerEvent(const char* constructor,
|
Address* stack);
|
- static void HeapSampleStats(const char* space, const char* kind,
|
+ void HeapSampleStats(const char* space, const char* kind,
|
int capacity, int used);
|
|
- static void SharedLibraryEvent(const char* library_path,
|
+ void SharedLibraryEvent(const char* library_path,
|
uintptr_t start,
|
uintptr_t end);
|
- static void SharedLibraryEvent(const wchar_t* library_path,
|
+ void SharedLibraryEvent(const wchar_t* library_path,
|
uintptr_t start,
|
uintptr_t end);
|
|
// ==== Events logged by --log-regexp ====
|
// Regexp compilation and execution events.
|
|
- static void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
|
+ void RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache);
|
|
// Log an event reported from generated code
|
- static void LogRuntime(Vector<const char> format, JSArray* args);
|
+ void LogRuntime(Vector<const char> format, JSArray* args);
|
|
+private:
|
+ friend class V8Context;
|
+ Logger();
|
+ DISALLOW_COPY_AND_ASSIGN(Logger);
|
+public:
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- static StateTag state() {
|
+ StateTag state() {
|
return current_state_ ? current_state_->state() : OTHER;
|
}
|
|
- static bool is_logging() {
|
+ bool is_logging() {
|
return is_logging_;
|
}
|
|
// Pause/Resume collection of profiling data.
|
// When data collection is paused, CPU Tick events are discarded until
|
// data collection is Resumed.
|
- static void PauseProfiler(int flags);
|
- static void ResumeProfiler(int flags);
|
- static int GetActiveProfilerModules();
|
+ void PauseProfiler(int flags);
|
+ void ResumeProfiler(int flags);
|
+ int GetActiveProfilerModules();
|
|
// If logging is performed into a memory buffer, allows to
|
// retrieve previously written messages. See v8.h.
|
- static int GetLogLines(int from_pos, char* dest_buf, int max_size);
|
+ int GetLogLines(int from_pos, char* dest_buf, int max_size);
|
|
// Logs all compiled functions found in the heap.
|
- static void LogCompiledFunctions();
|
+ void LogCompiledFunctions();
|
|
private:
|
-
|
// Profiler's sampling interval (in milliseconds).
|
static const int kSamplingIntervalMs = 1;
|
|
@@ -275,51 +280,51 @@
|
static const int kCompressionWindowSize = 4;
|
|
// Emits the profiler's first message.
|
- static void ProfilerBeginEvent();
|
+ void ProfilerBeginEvent();
|
|
// Emits aliases for compressed messages.
|
- static void LogAliases();
|
+ void LogAliases();
|
|
// Emits the source code of a regexp. Used by regexp events.
|
- static void LogRegExpSource(Handle<JSRegExp> regexp);
|
+ void LogRegExpSource(Handle<JSRegExp> regexp);
|
|
// Emits a profiler tick event. Used by the profiler thread.
|
- static void TickEvent(TickSample* sample, bool overflow);
|
+ void TickEvent(TickSample* sample, bool overflow);
|
|
- static void ApiEvent(const char* name, ...);
|
+ void ApiEvent(const char* name, ...);
|
|
// Logs a StringEvent regardless of whether FLAG_log is true.
|
- static void UncheckedStringEvent(const char* name, const char* value);
|
+ void UncheckedStringEvent(const char* name, const char* value);
|
|
// Stops logging and profiling in case of insufficient resources.
|
static void StopLoggingAndProfiling();
|
|
// Returns whether profiler's sampler is active.
|
- static bool IsProfilerSamplerActive();
|
+ bool IsProfilerSamplerActive();
|
|
// The sampler used by the profiler and the sliding state window.
|
- static Ticker* ticker_;
|
+ Ticker* ticker_;
|
|
// When the statistical profile is active, profiler_
|
// points to a Profiler, that handles collection
|
// of samples.
|
- static Profiler* profiler_;
|
+ Profiler* profiler_;
|
|
// A stack of VM states.
|
- static VMState* current_state_;
|
+ VMState* current_state_;
|
|
// Singleton bottom or default vm state.
|
- static VMState bottom_state_;
|
+ VMState bottom_state_;
|
|
// SlidingStateWindow instance keeping a sliding window of the most
|
// recent VM states.
|
- static SlidingStateWindow* sliding_state_window_;
|
+ SlidingStateWindow* sliding_state_window_;
|
|
// An array of log events names.
|
- static const char** log_events_;
|
+ const char** log_events_;
|
|
// An instance of helper created if log compression is enabled.
|
- static CompressionHelper* compression_helper_;
|
+ CompressionHelper* compression_helper_;
|
|
// Internal implementation classes with access to
|
// private members.
|
@@ -332,9 +337,9 @@
|
|
friend class LoggerTestHelper;
|
|
- static bool is_logging_;
|
+ bool is_logging_;
|
#else
|
- static bool is_logging() { return false; }
|
+ bool is_logging() { return false; }
|
#endif
|
};
|
|
Index: src/contexts.cc
|
===================================================================
|
--- src/contexts.cc (revision 2230)
|
+++ src/contexts.cc Sat Nov 14 01:43:05 MSK 2009
|
@@ -152,11 +152,11 @@
|
// slot found.
|
int index =
|
ScopeInfo<>::ContextSlotIndex(*code,
|
- Heap::arguments_shadow_symbol(),
|
+ v8_context()->heap_.arguments_shadow_symbol(),
|
NULL);
|
ASSERT(index >= 0); // arguments must exist and be in the heap context
|
Handle<JSObject> arguments(JSObject::cast(context->get(index)));
|
- ASSERT(arguments->HasLocalProperty(Heap::length_symbol()));
|
+ ASSERT(arguments->HasLocalProperty(v8_context()->heap_.length_symbol()));
|
if (FLAG_trace_contexts) {
|
PrintF("=> found parameter %d in arguments object\n", param_index);
|
}
|
@@ -239,14 +239,14 @@
|
bool Context::IsBootstrappingOrContext(Object* object) {
|
// During bootstrapping we allow all objects to pass as
|
// contexts. This is necessary to fix circular dependencies.
|
- return Bootstrapper::IsActive() || object->IsContext();
|
+ return v8_context()->bootstrapper_.IsActive() || object->IsContext();
|
}
|
|
|
bool Context::IsBootstrappingOrGlobalObject(Object* object) {
|
// During bootstrapping we allow all objects to pass as global
|
// objects. This is necessary to fix circular dependencies.
|
- return Bootstrapper::IsActive() || object->IsGlobalObject();
|
+ return v8_context()->bootstrapper_.IsActive() || object->IsGlobalObject();
|
}
|
#endif
|
|
Index: src/handles-inl.h
|
===================================================================
|
--- src/handles-inl.h (revision 2692)
|
+++ src/handles-inl.h Sat Nov 14 01:43:01 MSK 2009
|
@@ -36,10 +36,20 @@
|
namespace v8 {
|
namespace internal {
|
|
+inline HandleScope::HandleScope() {
|
+ HandleScopeImplementer& handle_scope_implementer = v8_context()->handle_scope_implementer_;
|
+ previous_ = handle_scope_implementer.current_;
|
+ handle_scope_implementer.current_.extensions = 0;
|
+}
|
+
|
+inline HandleScope::~HandleScope() {
|
+ v8_context()->handle_scope_implementer_.Leave(&previous_);
|
+}
|
+
|
template<class T>
|
Handle<T>::Handle(T* obj) {
|
ASSERT(!obj->IsFailure());
|
- location_ = HandleScope::CreateHandle(obj);
|
+ location_ = v8_context()->handle_scope_implementer_.CreateHandle(obj);
|
}
|
|
|
Index: src/compiler.h
|
===================================================================
|
--- src/compiler.h (revision 3218)
|
+++ src/compiler.h Sat Nov 14 01:43:03 MSK 2009
|
@@ -46,7 +46,7 @@
|
// This means you need to call Factory::NewFunctionFromBoilerplate
|
// before you have a real function with context.
|
|
-class Compiler : public AllStatic {
|
+class Compiler {
|
public:
|
enum ValidationState { VALIDATE_JSON, DONT_VALIDATE_JSON };
|
|
@@ -55,14 +55,14 @@
|
// the return handle contains NULL.
|
|
// Compile a String source within a context.
|
- static Handle<JSFunction> Compile(Handle<String> source,
|
+ Handle<JSFunction> Compile(Handle<String> source,
|
Handle<Object> script_name,
|
int line_offset, int column_offset,
|
v8::Extension* extension,
|
ScriptDataImpl* script_Data);
|
|
// Compile a String source within a context for Eval.
|
- static Handle<JSFunction> CompileEval(Handle<String> source,
|
+ Handle<JSFunction> CompileEval(Handle<String> source,
|
Handle<Context> context,
|
bool is_global,
|
ValidationState validation);
|
@@ -70,20 +70,25 @@
|
// Compile from function info (used for lazy compilation). Returns
|
// true on success and false if the compilation resulted in a stack
|
// overflow.
|
- static bool CompileLazy(Handle<SharedFunctionInfo> shared, int loop_nesting);
|
+ bool CompileLazy(Handle<SharedFunctionInfo> shared, int loop_nesting);
|
|
// Compile a function boilerplate object (the function is possibly
|
// lazily compiled). Called recursively from a backend code
|
// generator 'caller' to build the boilerplate.
|
- static Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node,
|
+ Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node,
|
Handle<Script> script,
|
AstVisitor* caller);
|
|
// Set the function info for a newly compiled function.
|
- static void SetFunctionInfo(Handle<JSFunction> fun,
|
+ void SetFunctionInfo(Handle<JSFunction> fun,
|
FunctionLiteral* lit,
|
bool is_toplevel,
|
Handle<Script> script);
|
+private:
|
+ Compiler();
|
+ DISALLOW_COPY_AND_ASSIGN(Compiler);
|
+ friend class V8Context;
|
+ StaticResource<SafeStringInputBuffer> safe_string_input_buffer;
|
};
|
|
|
Index: test/cctest/test-regexp.cc
|
===================================================================
|
--- test/cctest/test-regexp.cc (revision 2785)
|
+++ test/cctest/test-regexp.cc Sun Nov 15 12:50:47 MSK 2009
|
@@ -56,10 +56,11 @@
|
#endif
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
|
static SmartPointer<const char> Parse(const char* input) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
v8::HandleScope scope;
|
ZoneScope zone_scope(DELETE_ON_EXIT);
|
FlatStringReader reader(CStrVector(input));
|
@@ -72,7 +73,7 @@
|
}
|
|
static bool CheckSimple(const char* input) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
v8::HandleScope scope;
|
unibrow::Utf8InputBuffer<> buffer(input, strlen(input));
|
ZoneScope zone_scope(DELETE_ON_EXIT);
|
@@ -90,7 +91,7 @@
|
};
|
|
static MinMaxPair CheckMinMaxMatch(const char* input) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
v8::HandleScope scope;
|
unibrow::Utf8InputBuffer<> buffer(input, strlen(input));
|
ZoneScope zone_scope(DELETE_ON_EXIT);
|
@@ -116,7 +117,7 @@
|
}
|
|
TEST(Parser) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
CHECK_PARSE_EQ("abc", "'abc'");
|
CHECK_PARSE_EQ("", "%");
|
CHECK_PARSE_EQ("abc|def", "(| 'abc' 'def')");
|
@@ -347,7 +348,7 @@
|
|
static void ExpectError(const char* input,
|
const char* expected) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
v8::HandleScope scope;
|
ZoneScope zone_scope(DELETE_ON_EXIT);
|
FlatStringReader reader(CStrVector(input));
|
@@ -361,7 +362,7 @@
|
|
|
TEST(Errors) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
const char* kEndBackslash = "\\ at end of pattern";
|
ExpectError("\\", kEndBackslash);
|
const char* kUnterminatedGroup = "Unterminated group";
|
@@ -457,7 +458,7 @@
|
|
|
static RegExpNode* Compile(const char* input, bool multiline, bool is_ascii) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
FlatStringReader reader(CStrVector(input));
|
RegExpCompileData compile_data;
|
if (!v8::internal::ParseRegExp(&reader, multiline, &compile_data))
|
@@ -1188,8 +1189,8 @@
|
true);
|
|
CHECK_EQ(NativeRegExpMacroAssembler::EXCEPTION, result);
|
- CHECK(Top::has_pending_exception());
|
- Top::clear_pending_exception();
|
+ CHECK(v8_context()->top_.has_pending_exception());
|
+ v8_context()->top_.clear_pending_exception();
|
}
|
|
|
@@ -1237,7 +1238,7 @@
|
CHECK_EQ(0, captures[0]);
|
CHECK_EQ(42, captures[1]);
|
|
- Top::clear_pending_exception();
|
+ v8_context()->top_.clear_pending_exception();
|
}
|
|
#else // ! V8_REGEX_NATIVE
|
@@ -1551,6 +1552,6 @@
|
|
|
TEST(Graph) {
|
- V8::Initialize(NULL);
|
+ v8_context()->v8_.Initialize(NULL);
|
Execute("(?:(?:x(.))?\1)+$", false, true, true);
|
}
|
Index: test/cctest/test-decls.cc
|
===================================================================
|
--- test/cctest/test-decls.cc (revision 2708)
|
+++ test/cctest/test-decls.cc Sun Nov 15 12:30:09 MSK 2009
|
@@ -130,7 +130,7 @@
|
InitializeIfNeeded();
|
// A retry after a GC may pollute the counts, so perform gc now
|
// to avoid that.
|
- v8::internal::Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
|
+ v8_context()->heap_.CollectGarbage(0, v8::internal::NEW_SPACE);
|
HandleScope scope;
|
TryCatch catcher;
|
catcher.SetVerbose(true);
|
Index: src/execution.h
|
===================================================================
|
--- src/execution.h (revision 3229)
|
+++ src/execution.h Sun Nov 15 12:58:09 MSK 2009
|
@@ -137,87 +137,87 @@
|
static Handle<Object> GetConstructorDelegate(Handle<Object> object);
|
};
|
|
-
|
+class ExecutionData;
|
class ExecutionAccess;
|
|
|
// StackGuard contains the handling of the limits that are used to limit the
|
// number of nested invocations of JavaScript and the stack size used in each
|
// invocation.
|
-class StackGuard : public AllStatic {
|
+class StackGuard {
|
public:
|
// Pass the address beyond which the stack should not grow. The stack
|
// is assumed to grow downwards.
|
- static void SetStackLimit(uintptr_t limit);
|
+ void SetStackLimit(uintptr_t limit);
|
|
// Threading support.
|
- static char* ArchiveStackGuard(char* to);
|
- static char* RestoreStackGuard(char* from);
|
- static int ArchiveSpacePerThread();
|
- static void FreeThreadResources();
|
+ char* ArchiveStackGuard(char* to);
|
+ char* RestoreStackGuard(char* from);
|
+ int ArchiveSpacePerThread();
|
+ void FreeThreadResources();
|
// Sets up the default stack guard for this thread if it has not
|
// already been set up.
|
- static void InitThread(const ExecutionAccess& lock);
|
+ void InitThread(const ExecutionAccess& lock);
|
// Clears the stack guard for this thread so it does not look as if
|
// it has been set up.
|
- static void ClearThread(const ExecutionAccess& lock);
|
+ void ClearThread(const ExecutionAccess& lock);
|
|
- static bool IsStackOverflow();
|
- static bool IsPreempted();
|
- static void Preempt();
|
- static bool IsInterrupted();
|
- static void Interrupt();
|
- static bool IsTerminateExecution();
|
- static void TerminateExecution();
|
+ bool IsStackOverflow();
|
+ bool IsPreempted();
|
+ void Preempt();
|
+ bool IsInterrupted();
|
+ void Interrupt();
|
+ bool IsTerminateExecution();
|
+ void TerminateExecution();
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- static bool IsDebugBreak();
|
- static void DebugBreak();
|
- static bool IsDebugCommand();
|
- static void DebugCommand();
|
+ bool IsDebugBreak();
|
+ void DebugBreak();
|
+ bool IsDebugCommand();
|
+ void DebugCommand();
|
#endif
|
- static void Continue(InterruptFlag after_what);
|
+ void Continue(InterruptFlag after_what);
|
|
// This provides an asynchronous read of the stack limits for the current
|
// thread. There are no locks protecting this, but it is assumed that you
|
// have the global V8 lock if you are using multiple V8 threads.
|
- static uintptr_t climit() {
|
+ uintptr_t climit() {
|
return thread_local_.climit_;
|
}
|
- static uintptr_t jslimit() {
|
+ uintptr_t jslimit() {
|
return thread_local_.jslimit_;
|
}
|
- static uintptr_t real_jslimit() {
|
+ uintptr_t real_jslimit() {
|
return thread_local_.real_jslimit_;
|
}
|
- static Address address_of_jslimit() {
|
+ Address address_of_jslimit() {
|
return reinterpret_cast<Address>(&thread_local_.jslimit_);
|
}
|
- static Address address_of_real_jslimit() {
|
+ Address address_of_real_jslimit() {
|
return reinterpret_cast<Address>(&thread_local_.real_jslimit_);
|
}
|
-
|
+ StackGuard() {} // public for tests
|
private:
|
// You should hold the ExecutionAccess lock when calling this method.
|
- static bool IsSet(const ExecutionAccess& lock);
|
+ bool IsSet(const ExecutionAccess& lock);
|
|
// You should hold the ExecutionAccess lock when calling this method.
|
- static void set_limits(uintptr_t value, const ExecutionAccess& lock) {
|
+ void set_limits(uintptr_t value, const ExecutionAccess& lock) {
|
thread_local_.jslimit_ = value;
|
thread_local_.climit_ = value;
|
- Heap::SetStackLimits();
|
+ v8_context()->heap_.SetStackLimits();
|
}
|
|
// Reset limits to actual values. For example after handling interrupt.
|
// You should hold the ExecutionAccess lock when calling this method.
|
- static void reset_limits(const ExecutionAccess& lock) {
|
+ void reset_limits(const ExecutionAccess& lock) {
|
thread_local_.jslimit_ = thread_local_.real_jslimit_;
|
thread_local_.climit_ = thread_local_.real_climit_;
|
- Heap::SetStackLimits();
|
+ v8_context()->heap_.SetStackLimits();
|
}
|
|
// Enable or disable interrupts.
|
- static void EnableInterrupts();
|
- static void DisableInterrupts();
|
+ void EnableInterrupts();
|
+ void DisableInterrupts();
|
|
static const uintptr_t kLimitSize = kPointerSize * 128 * KB;
|
|
@@ -231,7 +231,6 @@
|
|
class ThreadLocal {
|
public:
|
- ThreadLocal() { Clear(); }
|
// You should hold the ExecutionAccess lock when you call Initialize or
|
// Clear.
|
void Initialize();
|
@@ -256,10 +255,13 @@
|
int interrupt_flags_;
|
};
|
|
- static ThreadLocal thread_local_;
|
-
|
+ ThreadLocal thread_local_;
|
+ static void Setup();
|
+ static void TearDown();
|
+ DISALLOW_COPY_AND_ASSIGN(StackGuard);
|
friend class StackLimitCheck;
|
friend class PostponeInterruptsScope;
|
+ friend class V8Context;
|
};
|
|
|
@@ -267,12 +269,13 @@
|
class StackLimitCheck BASE_EMBEDDED {
|
public:
|
bool HasOverflowed() const {
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
// Stack has overflowed in C++ code only if stack pointer exceeds the C++
|
// stack guard and the limits are not set to interrupt values.
|
// TODO(214): Stack overflows are ignored if a interrupt is pending. This
|
// code should probably always use the initial C++ limit.
|
- return (reinterpret_cast<uintptr_t>(this) < StackGuard::climit()) &&
|
- StackGuard::IsStackOverflow();
|
+ return (reinterpret_cast<uintptr_t>(this) < stack_guard.climit()) &&
|
+ stack_guard.IsStackOverflow();
|
}
|
};
|
|
@@ -284,13 +287,15 @@
|
class PostponeInterruptsScope BASE_EMBEDDED {
|
public:
|
PostponeInterruptsScope() {
|
- StackGuard::thread_local_.postpone_interrupts_nesting_++;
|
- StackGuard::DisableInterrupts();
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
+ stack_guard.thread_local_.postpone_interrupts_nesting_++;
|
+ stack_guard.DisableInterrupts();
|
}
|
|
~PostponeInterruptsScope() {
|
- if (--StackGuard::thread_local_.postpone_interrupts_nesting_ == 0) {
|
- StackGuard::EnableInterrupts();
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
+ if (--stack_guard.thread_local_.postpone_interrupts_nesting_ == 0) {
|
+ stack_guard.EnableInterrupts();
|
}
|
}
|
};
|
Index: src/api.h
|
===================================================================
|
--- src/api.h (revision 3209)
|
+++ src/api.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -148,7 +148,20 @@
|
UNVISITED, VISITED, INSTALLED
|
};
|
|
+class RegisteredExtension;
|
+class internal::ApiData {
|
+public:
|
+ StringInputBuffer write_input_buffer;
|
+ // To distinguish the function templates, so that we can find them in the
|
+ // function cache of the global context.
|
+ int next_serial_number;
|
|
+ EmbeddedVector<char, 128> buffer;
|
+ RegisteredExtension* first_extension_;
|
+ RegisteredExtension* first_auto_extension_;
|
+ ApiData():next_serial_number(0), first_extension_(NULL),first_auto_extension_(NULL) {}
|
+};
|
+
|
class RegisteredExtension {
|
public:
|
explicit RegisteredExtension(Extension* extension);
|
@@ -158,14 +171,12 @@
|
RegisteredExtension* next_auto() { return next_auto_; }
|
ExtensionTraversalState state() { return state_; }
|
void set_state(ExtensionTraversalState value) { state_ = value; }
|
- static RegisteredExtension* first_extension() { return first_extension_; }
|
+ static RegisteredExtension* first_extension() { return v8_context()->api_data.first_extension_; }
|
private:
|
Extension* extension_;
|
RegisteredExtension* next_;
|
RegisteredExtension* next_auto_;
|
ExtensionTraversalState state_;
|
- static RegisteredExtension* first_extension_;
|
- static RegisteredExtension* first_auto_extension_;
|
};
|
|
|
@@ -318,26 +329,40 @@
|
// data.
|
class HandleScopeImplementer {
|
public:
|
+ // Counts the number of allocated handles.
|
+ int NumberOfHandles();
|
|
- HandleScopeImplementer()
|
- : blocks_(0),
|
- entered_contexts_(0),
|
- saved_contexts_(0),
|
- spare_(NULL),
|
- ignore_out_of_memory_(false),
|
- call_depth_(0) { }
|
+ // Creates a new handle with the given value.
|
+ template <typename T>
|
+ inline T** CreateHandle(T* value) {
|
+ internal::Object** cur = current_.next;
|
+ if (cur == current_.limit) cur = Extend();
|
+ // Update the current next field, set the value in the created
|
+ // handle, and return the result.
|
+ ASSERT(cur < current_.limit);
|
+ current_.next = cur + 1;
|
|
- static HandleScopeImplementer* instance();
|
+ T** result = reinterpret_cast<T**>(cur);
|
+ *result = value;
|
+ return result;
|
+ }
|
|
+ // Deallocates any extensions used by the current scope.
|
+ void DeleteExtensions();
|
+
|
+ Address current_extensions_address();
|
+ Address current_next_address();
|
+ Address current_limit_address();
|
+
|
// Threading support for handle data.
|
- static int ArchiveSpacePerThread();
|
- static char* RestoreThread(char* from);
|
- static char* ArchiveThread(char* to);
|
- static void FreeThreadResources();
|
+ int ArchiveSpacePerThread();
|
+ char* RestoreThread(char* from);
|
+ char* ArchiveThread(char* to);
|
+ void FreeThreadResources();
|
|
// Garbage collection support.
|
- static void Iterate(v8::internal::ObjectVisitor* v);
|
- static char* Iterate(v8::internal::ObjectVisitor* v, char* data);
|
+ void Iterate(v8::internal::ObjectVisitor* v);
|
+ char* Iterate(v8::internal::ObjectVisitor* v, char* data);
|
|
|
inline internal::Object** GetSpareOrNewBlock();
|
@@ -404,6 +429,39 @@
|
char* ArchiveThreadHelper(char* to);
|
|
DISALLOW_COPY_AND_ASSIGN(HandleScopeImplementer);
|
+
|
+ v8::ImplementationUtilities::HandleScopeData current_;
|
+ // Pushes a fresh handle scope to be used when allocating new handles.
|
+ void Enter(
|
+ v8::ImplementationUtilities::HandleScopeData* previous) {
|
+ *previous = current_;
|
+ current_.extensions = 0;
|
+ }
|
+
|
+ // Re-establishes the previous scope state. Should be called only
|
+ // once, and only for the current scope.
|
+ void Leave(
|
+ const v8::ImplementationUtilities::HandleScopeData* previous) {
|
+ if (current_.extensions > 0) {
|
+ DeleteExtensions();
|
+ }
|
+ current_ = *previous;
|
+#ifdef DEBUG
|
+ ZapRange(current_.next, current_.limit);
|
+#endif
|
+ }
|
+
|
+ HandleScopeImplementer();
|
+ // Extend the handle scope making room for more handles.
|
+ internal::Object** Extend();
|
+
|
+ // Zaps the handles in the half-open interval [start, end).
|
+ static void ZapRange(internal::Object** start, internal::Object** end);
|
+
|
+ friend class v8::HandleScope;
|
+ friend class HandleScope;
|
+ friend class V8Context;
|
+ friend class v8::ImplementationUtilities;
|
};
|
|
|
Index: src/spaces-inl.h
|
===================================================================
|
--- src/spaces-inl.h (revision 3098)
|
+++ src/spaces-inl.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -82,12 +82,12 @@
|
// Page
|
|
Page* Page::next_page() {
|
- return MemoryAllocator::GetNextPage(this);
|
+ return v8_context()->heap_.memory_allocator()->GetNextPage(this);
|
}
|
|
|
Address Page::AllocationTop() {
|
- PagedSpace* owner = MemoryAllocator::PageOwner(this);
|
+ PagedSpace* owner = v8_context()->heap_.memory_allocator()->PageOwner(this);
|
return owner->PageAllocationTop(this);
|
}
|
|
@@ -272,7 +272,7 @@
|
Page* p = Page::FromAddress(addr);
|
ASSERT(p->is_valid());
|
|
- return MemoryAllocator::IsPageInSpace(p, this);
|
+ return v8_context()->heap_.memory_allocator()->IsPageInSpace(p, this);
|
}
|
|
|
@@ -362,9 +362,10 @@
|
|
|
bool FreeListNode::IsFreeListNode(HeapObject* object) {
|
- return object->map() == Heap::raw_unchecked_byte_array_map()
|
- || object->map() == Heap::raw_unchecked_one_pointer_filler_map()
|
- || object->map() == Heap::raw_unchecked_two_pointer_filler_map();
|
+ Heap& heap = v8_context()->heap_;
|
+ return object->map() == heap.raw_unchecked_byte_array_map()
|
+ || object->map() == heap.raw_unchecked_one_pointer_filler_map()
|
+ || object->map() == heap.raw_unchecked_two_pointer_filler_map();
|
}
|
|
} } // namespace v8::internal
|
Index: src/api.cc
|
===================================================================
|
--- src/api.cc (revision 3228)
|
+++ src/api.cc Sun Nov 15 12:53:54 MSK 2009
|
@@ -62,30 +62,31 @@
|
|
|
#define EXCEPTION_PREAMBLE() \
|
- thread_local.IncrementCallDepth(); \
|
- ASSERT(!i::Top::external_caught_exception()); \
|
+ v8_context()->handle_scope_implementer_.IncrementCallDepth(); \
|
+ ASSERT(!v8_context()->top_.external_caught_exception()); \
|
bool has_pending_exception = false
|
|
|
#define EXCEPTION_BAILOUT_CHECK(value) \
|
do { \
|
- thread_local.DecrementCallDepth(); \
|
+ V8Context* const v8context = v8_context(); \
|
+ v8context->handle_scope_implementer_.DecrementCallDepth(); \
|
if (has_pending_exception) { \
|
- if (thread_local.CallDepthIsZero() && i::Top::is_out_of_memory()) { \
|
- if (!thread_local.ignore_out_of_memory()) \
|
- i::V8::FatalProcessOutOfMemory(NULL); \
|
+ if (v8context->handle_scope_implementer_.CallDepthIsZero() && v8context->top_.is_out_of_memory()) { \
|
+ if (!v8context->handle_scope_implementer_.ignore_out_of_memory()) \
|
+ v8context->v8_.FatalProcessOutOfMemory(NULL); \
|
} \
|
- bool call_depth_is_zero = thread_local.CallDepthIsZero(); \
|
- i::Top::OptionalRescheduleException(call_depth_is_zero); \
|
+ bool call_depth_is_zero = v8context->handle_scope_implementer_.CallDepthIsZero(); \
|
+ v8context->top_.OptionalRescheduleException(call_depth_is_zero); \
|
return value; \
|
} \
|
} while (false)
|
|
|
-#define API_ENTRY_CHECK(msg) \
|
+#define API_ENTRY_CHECK(msg, context) \
|
do { \
|
if (v8::Locker::IsActive()) { \
|
- ApiCheck(i::ThreadManager::IsLockedByCurrentThread(), \
|
+ ApiCheck(context->thread_manager_.IsLockedByCurrentThread(), \
|
msg, \
|
"Entering the V8 API without proper locking in place"); \
|
} \
|
@@ -94,13 +95,10 @@
|
// --- D a t a t h a t i s s p e c i f i c t o a t h r e a d ---
|
|
|
-static i::HandleScopeImplementer thread_local;
|
|
-
|
// --- E x c e p t i o n B e h a v i o r ---
|
|
|
-static FatalErrorCallback exception_behavior = NULL;
|
int i::Internals::kJSObjectType = JS_OBJECT_TYPE;
|
int i::Internals::kFirstNonstringType = FIRST_NONSTRING_TYPE;
|
int i::Internals::kProxyType = PROXY_TYPE;
|
@@ -113,11 +111,12 @@
|
|
|
|
-static FatalErrorCallback& GetFatalErrorHandler() {
|
- if (exception_behavior == NULL) {
|
- exception_behavior = DefaultFatalErrorHandler;
|
+static FatalErrorCallback GetFatalErrorHandler() {
|
+ i::V8& impl= v8_context()->v8_;
|
+ if (impl.get_exception_behavior() == NULL) {
|
+ impl.set_exception_behavior(DefaultFatalErrorHandler);
|
}
|
- return exception_behavior;
|
+ return impl.get_exception_behavior();
|
}
|
|
|
@@ -137,20 +136,20 @@
|
|
|
void V8::SetFatalErrorHandler(FatalErrorCallback that) {
|
- exception_behavior = that;
|
+ v8_context()->v8_.set_exception_behavior(that);
|
}
|
|
|
bool Utils::ReportApiFailure(const char* location, const char* message) {
|
FatalErrorCallback callback = GetFatalErrorHandler();
|
callback(location, message);
|
- i::V8::SetFatalError();
|
+ v8_context()->v8_.SetFatalError();
|
return false;
|
}
|
|
|
bool V8::IsDead() {
|
- return i::V8::IsDead();
|
+ return v8_context()->v8_.IsDead();
|
}
|
|
|
@@ -188,8 +187,8 @@
|
* yet been done.
|
*/
|
static inline bool IsDeadCheck(const char* location) {
|
- return !i::V8::IsRunning()
|
- && i::V8::IsDead() ? ReportV8Dead(location) : false;
|
+ internal::V8& v8 = v8_context()->v8_;
|
+ return !v8.IsRunning() && v8.IsDead() ? ReportV8Dead(location) : false;
|
}
|
|
|
@@ -203,32 +202,28 @@
|
}
|
|
// --- S t a t i c s ---
|
-
|
-
|
-static i::StringInputBuffer write_input_buffer;
|
-
|
-
|
-static inline bool EnsureInitialized(const char* location) {
|
- if (i::V8::IsRunning()) {
|
+static inline bool EnsureInitialized(const char* location, V8Context* const v8context = v8_context()) {
|
+ if (v8context->v8_.IsRunning()) {
|
return true;
|
}
|
if (IsDeadCheck(location)) {
|
return false;
|
}
|
- return ApiCheck(v8::V8::Initialize(), location, "Error initializing V8");
|
+
|
+ return ApiCheck(V8::Initialize(), location, "Error initializing V8");
|
}
|
|
|
ImplementationUtilities::HandleScopeData*
|
ImplementationUtilities::CurrentHandleScope() {
|
- return &i::HandleScope::current_;
|
+ return &v8_context()->handle_scope_implementer_.current_;
|
}
|
|
|
#ifdef DEBUG
|
void ImplementationUtilities::ZapHandleRange(i::Object** begin,
|
i::Object** end) {
|
- i::HandleScope::ZapRange(begin, end);
|
+ v8_context()->handle_scope_implementer_.ZapRange(begin, end);
|
}
|
#endif
|
|
@@ -270,27 +265,25 @@
|
v8::Handle<Value> ThrowException(v8::Handle<v8::Value> value) {
|
if (IsDeadCheck("v8::ThrowException()")) return v8::Handle<Value>();
|
ENTER_V8;
|
+ V8Context * const v8context = v8_context();
|
// If we're passed an empty handle, we throw an undefined exception
|
// to deal more gracefully with out of memory situations.
|
if (value.IsEmpty()) {
|
- i::Top::ScheduleThrow(i::Heap::undefined_value());
|
+ v8context->top_.ScheduleThrow(v8context->heap_.undefined_value());
|
} else {
|
- i::Top::ScheduleThrow(*Utils::OpenHandle(*value));
|
+ v8context->top_.ScheduleThrow(*Utils::OpenHandle(*value));
|
}
|
return v8::Undefined();
|
}
|
|
|
-RegisteredExtension* RegisteredExtension::first_extension_ = NULL;
|
-
|
-
|
RegisteredExtension::RegisteredExtension(Extension* extension)
|
: extension_(extension), state_(UNVISITED) { }
|
|
|
void RegisteredExtension::Register(RegisteredExtension* that) {
|
- that->next_ = RegisteredExtension::first_extension_;
|
- RegisteredExtension::first_extension_ = that;
|
+ that->next_ = v8_context()->api_data.first_extension_;
|
+ v8_context()->api_data.first_extension_ = that;
|
}
|
|
|
@@ -345,12 +338,12 @@
|
int young_space_size = constraints->max_young_space_size();
|
int old_gen_size = constraints->max_old_space_size();
|
if (young_space_size != 0 || old_gen_size != 0) {
|
- bool result = i::Heap::ConfigureHeap(young_space_size / 2, old_gen_size);
|
+ bool result = v8_context()->heap_.ConfigureHeap(young_space_size / 2, old_gen_size);
|
if (!result) return false;
|
}
|
if (constraints->stack_limit() != NULL) {
|
uintptr_t limit = reinterpret_cast<uintptr_t>(constraints->stack_limit());
|
- i::StackGuard::SetStackLimit(limit);
|
+ v8_context()->stack_guard_.SetStackLimit(limit);
|
}
|
return true;
|
}
|
@@ -359,8 +352,7 @@
|
i::Object** V8::GlobalizeReference(i::Object** obj) {
|
if (IsDeadCheck("V8::Persistent::New")) return NULL;
|
LOG_API("Persistent::New");
|
- i::Handle<i::Object> result =
|
- i::GlobalHandles::Create(*obj);
|
+ i::Handle<i::Object> result = v8_context()->global_handles_.Create(*obj);
|
return result.location();
|
}
|
|
@@ -368,60 +360,65 @@
|
void V8::MakeWeak(i::Object** object, void* parameters,
|
WeakReferenceCallback callback) {
|
LOG_API("MakeWeak");
|
- i::GlobalHandles::MakeWeak(object, parameters, callback);
|
+ v8_context()->global_handles_.MakeWeak(object, parameters, callback);
|
}
|
|
|
void V8::ClearWeak(i::Object** obj) {
|
LOG_API("ClearWeak");
|
- i::GlobalHandles::ClearWeakness(obj);
|
+ v8_context()->global_handles_.ClearWeakness(obj);
|
}
|
|
|
bool V8::IsGlobalNearDeath(i::Object** obj) {
|
LOG_API("IsGlobalNearDeath");
|
- if (!i::V8::IsRunning()) return false;
|
- return i::GlobalHandles::IsNearDeath(obj);
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return false;
|
+ return v8context->global_handles_.IsNearDeath(obj);
|
}
|
|
|
bool V8::IsGlobalWeak(i::Object** obj) {
|
LOG_API("IsGlobalWeak");
|
- if (!i::V8::IsRunning()) return false;
|
- return i::GlobalHandles::IsWeak(obj);
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return false;
|
+ return v8context->global_handles_.IsWeak(obj);
|
}
|
|
|
void V8::DisposeGlobal(i::Object** obj) {
|
LOG_API("DisposeGlobal");
|
- if (!i::V8::IsRunning()) return;
|
- if ((*obj)->IsGlobalContext()) i::Heap::NotifyContextDisposed();
|
- i::GlobalHandles::Destroy(obj);
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ if ((*obj)->IsGlobalContext()) v8context->heap_.NotifyContextDisposed();
|
+ v8context->global_handles_.Destroy(obj);
|
}
|
|
// --- H a n d l e s ---
|
|
|
-HandleScope::HandleScope() : is_closed_(false) {
|
- API_ENTRY_CHECK("HandleScope::HandleScope");
|
- i::HandleScope::Enter(&previous_);
|
+HandleScope::HandleScope() :
|
+ is_closed_(false) {
|
+ V8Context* const v8context = v8_context();
|
+ API_ENTRY_CHECK("HandleScope::HandleScope", v8context);
|
+ v8_context()->handle_scope_implementer_.Enter(&previous_);
|
}
|
|
|
HandleScope::~HandleScope() {
|
if (!is_closed_) {
|
- i::HandleScope::Leave(&previous_);
|
+ v8_context()->handle_scope_implementer_.Leave(&previous_);
|
}
|
}
|
|
|
int HandleScope::NumberOfHandles() {
|
- return i::HandleScope::NumberOfHandles();
|
+ return v8_context()->handle_scope_implementer_.NumberOfHandles();
|
}
|
|
|
i::Object** v8::HandleScope::CreateHandle(i::Object* value) {
|
- return i::HandleScope::CreateHandle(value);
|
+ return v8_context()->handle_scope_implementer_.CreateHandle(value);
|
}
|
|
|
@@ -429,24 +426,25 @@
|
if (IsDeadCheck("v8::Context::Enter()")) return;
|
ENTER_V8;
|
i::Handle<i::Context> env = Utils::OpenHandle(this);
|
- thread_local.EnterContext(env);
|
-
|
- thread_local.SaveContext(i::Top::context());
|
- i::Top::set_context(*env);
|
+ V8Context* const v8context = v8_context();
|
+ v8context->handle_scope_implementer_.EnterContext(env);
|
+ v8context->handle_scope_implementer_.SaveContext(v8context->top_.context());
|
+ v8context->top_.set_context(*env);
|
}
|
|
|
void Context::Exit() {
|
- if (!i::V8::IsRunning()) return;
|
- if (!ApiCheck(thread_local.LeaveLastContext(),
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ if (!ApiCheck(v8context->handle_scope_implementer_.LeaveLastContext(),
|
"v8::Context::Exit()",
|
"Cannot exit non-entered context")) {
|
return;
|
}
|
|
// Content of 'last_context' could be NULL.
|
- i::Context* last_context = thread_local.RestoreContext();
|
- i::Top::set_context(last_context);
|
+ i::Context* last_context = v8context->handle_scope_implementer_.RestoreContext();
|
+ v8context->top_.set_context(last_context);
|
}
|
|
|
@@ -495,7 +493,7 @@
|
// Read the result before popping the handle block.
|
i::Object* result = *value;
|
is_closed_ = true;
|
- i::HandleScope::Leave(&previous_);
|
+ v8_context()->handle_scope_implementer_.Leave(&previous_);
|
|
// Allocate a new handle on the previous handle block.
|
i::Handle<i::Object> handle(result);
|
@@ -620,12 +618,6 @@
|
Utils::OpenHandle(this)->set_parent_template(*Utils::OpenHandle(*value));
|
}
|
|
-
|
-// To distinguish the function templates, so that we can find them in the
|
-// function cache of the global context.
|
-static int next_serial_number = 0;
|
-
|
-
|
Local<FunctionTemplate> FunctionTemplate::New(InvocationCallback callback,
|
v8::Handle<Value> data, v8::Handle<Signature> signature) {
|
EnsureInitialized("v8::FunctionTemplate::New()");
|
@@ -636,7 +628,7 @@
|
i::Handle<i::FunctionTemplateInfo> obj =
|
i::Handle<i::FunctionTemplateInfo>::cast(struct_obj);
|
InitializeFunctionTemplate(obj);
|
- obj->set_serial_number(i::Smi::FromInt(next_serial_number++));
|
+ obj->set_serial_number(i::Smi::FromInt(v8_context()->api_data.next_serial_number++));
|
if (callback != 0) {
|
if (data.IsEmpty()) data = v8::Undefined();
|
Utils::ToLocal(obj)->SetCallHandler(callback, data);
|
@@ -1091,7 +1083,7 @@
|
if (pre_data != NULL && !pre_data->SanityCheck()) {
|
pre_data = NULL;
|
}
|
- i::Handle<i::JSFunction> boilerplate = i::Compiler::Compile(str,
|
+ i::Handle<i::JSFunction> boilerplate = v8_context()->compiler_.Compile(str,
|
name_obj,
|
line_offset,
|
column_offset,
|
@@ -1122,7 +1114,7 @@
|
i::Handle<i::JSFunction> boilerplate = Utils::OpenHandle(*generic);
|
i::Handle<i::JSFunction> result =
|
i::Factory::NewFunctionFromBoilerplate(boilerplate,
|
- i::Top::global_context());
|
+ v8_context()->top_.global_context());
|
return Local<Script>(ToApi<Script>(result));
|
}
|
|
@@ -1142,12 +1134,13 @@
|
{
|
HandleScope scope;
|
i::Handle<i::JSFunction> fun = Utils::OpenHandle(this);
|
+ V8Context* const v8context = v8_context();
|
if (fun->IsBoilerplate()) {
|
fun = i::Factory::NewFunctionFromBoilerplate(fun,
|
- i::Top::global_context());
|
+ v8context->top_.global_context());
|
}
|
EXCEPTION_PREAMBLE();
|
- i::Handle<i::Object> receiver(i::Top::context()->global_proxy());
|
+ i::Handle<i::Object> receiver(v8context->top_.context()->global_proxy());
|
i::Handle<i::Object> result =
|
i::Execution::Call(fun, receiver, 0, NULL, &has_pending_exception);
|
EXCEPTION_BAILOUT_CHECK(Local<Value>());
|
@@ -1191,14 +1184,14 @@
|
|
|
v8::TryCatch::TryCatch()
|
- : next_(i::Top::try_catch_handler_address()),
|
- exception_(i::Heap::the_hole_value()),
|
+ : next_(v8_context()->top_.try_catch_handler()),
|
+ exception_(v8_context()->heap_.the_hole_value()),
|
message_(i::Smi::FromInt(0)),
|
is_verbose_(false),
|
can_continue_(true),
|
capture_message_(true),
|
rethrow_(false) {
|
- i::Top::RegisterTryCatchHandler(this);
|
+ v8_context()->top_.RegisterTryCatchHandler(this);
|
}
|
|
|
@@ -1206,10 +1199,10 @@
|
if (rethrow_) {
|
v8::HandleScope scope;
|
v8::Local<v8::Value> exc = v8::Local<v8::Value>::New(Exception());
|
- i::Top::UnregisterTryCatchHandler(this);
|
+ v8_context()->top_.UnregisterTryCatchHandler(this);
|
v8::ThrowException(exc);
|
} else {
|
- i::Top::UnregisterTryCatchHandler(this);
|
+ v8_context()->top_.UnregisterTryCatchHandler(this);
|
}
|
}
|
|
@@ -1269,7 +1262,7 @@
|
|
|
void v8::TryCatch::Reset() {
|
- exception_ = i::Heap::the_hole_value();
|
+ exception_ = v8_context()->heap_.the_hole_value();
|
message_ = i::Smi::FromInt(0);
|
}
|
|
@@ -1336,7 +1329,7 @@
|
i::Object** argv[],
|
bool* has_pending_exception) {
|
i::Handle<i::String> fmt_str = i::Factory::LookupAsciiSymbol(name);
|
- i::Object* object_fun = i::Top::builtins()->GetProperty(*fmt_str);
|
+ i::Object* object_fun = v8_context()->top_.builtins()->GetProperty(*fmt_str);
|
i::Handle<i::JSFunction> fun =
|
i::Handle<i::JSFunction>(i::JSFunction::cast(object_fun));
|
i::Handle<i::Object> value =
|
@@ -1350,7 +1343,7 @@
|
bool* has_pending_exception) {
|
i::Object** argv[1] = { data.location() };
|
return CallV8HeapFunction(name,
|
- i::Top::builtins(),
|
+ v8_context()->top_.builtins(),
|
1,
|
argv,
|
has_pending_exception);
|
@@ -1441,7 +1434,7 @@
|
void Message::PrintCurrentStackTrace(FILE* out) {
|
if (IsDeadCheck("v8::Message::PrintCurrentStackTrace()")) return;
|
ENTER_V8;
|
- i::Top::PrintCurrentStackTrace(out);
|
+ v8_context()->top_.PrintCurrentStackTrace(out);
|
}
|
|
|
@@ -1530,7 +1523,7 @@
|
bool Value::IsDate() const {
|
if (IsDeadCheck("v8::Value::IsDate()")) return false;
|
i::Handle<i::Object> obj = Utils::OpenHandle(this);
|
- return obj->HasSpecificClassOf(i::Heap::Date_symbol());
|
+ return obj->HasSpecificClassOf(v8_context()->heap_.Date_symbol());
|
}
|
|
|
@@ -1699,7 +1692,7 @@
|
void v8::Date::CheckCast(v8::Value* that) {
|
if (IsDeadCheck("v8::Date::Cast()")) return;
|
i::Handle<i::Object> obj = Utils::OpenHandle(that);
|
- ApiCheck(obj->HasSpecificClassOf(i::Heap::Date_symbol()),
|
+ ApiCheck(obj->HasSpecificClassOf(v8_context()->heap_.Date_symbol()),
|
"v8::Date::Cast()",
|
"Could not convert to date");
|
}
|
@@ -2231,7 +2224,7 @@
|
do {
|
// Generate a random 32-bit hash value but limit range to fit
|
// within a smi.
|
- hash_value = i::V8::Random() & i::Smi::kMaxValue;
|
+ hash_value = v8_context()->v8_.Random() & i::Smi::kMaxValue;
|
attempts++;
|
} while (hash_value == 0 && attempts < 30);
|
hash_value = hash_value != 0 ? hash_value : 1; // never return 0
|
@@ -2419,6 +2412,7 @@
|
LOG_API("String::WriteUtf8");
|
ENTER_V8;
|
i::Handle<i::String> str = Utils::OpenHandle(this);
|
+ i::StringInputBuffer& write_input_buffer = v8_context()->api_data.write_input_buffer;
|
write_input_buffer.Reset(0, *str);
|
int len = str->length();
|
// Encode the first K - 3 bytes directly into the buffer since we
|
@@ -2469,6 +2463,8 @@
|
if ( (length == -1) || (length > str->length() - start) )
|
end = str->length() - start;
|
if (end < 0) return 0;
|
+ i::StringInputBuffer& write_input_buffer = v8_context()->api_data.write_input_buffer;
|
+
|
write_input_buffer.Reset(start, *str);
|
int i;
|
for (i = 0; i < end; i++) {
|
@@ -2632,7 +2628,9 @@
|
// --- E n v i r o n m e n t ---
|
|
bool v8::V8::Initialize() {
|
- if (i::V8::IsRunning()) return true;
|
+ V8Context* const v8context = v8_context();
|
+ i::V8& v8 = v8context->v8_;
|
+ if (v8.IsRunning()) return true;
|
ENTER_V8;
|
HandleScope scope;
|
if (i::FLAG_new_snapshot) {
|
@@ -2640,12 +2638,12 @@
|
} else {
|
if (i::Snapshot::Initialize()) return true;
|
}
|
- return i::V8::Initialize(NULL);
|
+ return v8.Initialize(NULL);
|
}
|
|
|
bool v8::V8::Dispose() {
|
- i::V8::TearDown();
|
+ v8_context()->v8_.TearDown();
|
return true;
|
}
|
|
@@ -2654,27 +2652,30 @@
|
|
|
void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
|
- heap_statistics->set_total_heap_size(i::Heap::CommittedMemory());
|
- heap_statistics->set_used_heap_size(i::Heap::SizeOfObjects());
|
+ i::Heap& heap = v8_context()->heap_;
|
+ heap_statistics->set_total_heap_size(heap.CommittedMemory());
|
+ heap_statistics->set_used_heap_size(heap.SizeOfObjects());
|
}
|
|
|
bool v8::V8::IdleNotification() {
|
+ i::V8& v8 = v8_context()->v8_;
|
// Returning true tells the caller that it need not
|
// continue to call IdleNotification.
|
- if (!i::V8::IsRunning()) return true;
|
- return i::V8::IdleNotification();
|
+ if (!v8.IsRunning()) return true;
|
+ return v8.IdleNotification();
|
}
|
|
|
void v8::V8::LowMemoryNotification() {
|
- if (!i::V8::IsRunning()) return;
|
- i::Heap::CollectAllGarbage(true);
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ v8context->heap_.CollectAllGarbage(true);
|
}
|
|
|
const char* v8::V8::GetVersion() {
|
- static v8::internal::EmbeddedVector<char, 128> buffer;
|
+ i::EmbeddedVector<char, 128> & buffer = v8_context()->api_data.buffer;
|
v8::internal::Version::GetString(buffer);
|
return buffer.start();
|
}
|
@@ -2704,12 +2705,13 @@
|
i::Handle<i::Context> env;
|
{
|
ENTER_V8;
|
+ i::Heap& heap = v8_context()->heap_;
|
#if defined(ANDROID)
|
// On mobile device, full GC is expensive, leave it to the system to
|
// decide when should make a full GC.
|
#else
|
// Give the heap a chance to cleanup if we've disposed contexts.
|
- i::Heap::CollectAllGarbageIfContextDisposed();
|
+ heap.CollectAllGarbageIfContextDisposed();
|
#endif
|
v8::Handle<ObjectTemplate> proxy_template = global_template;
|
i::Handle<i::FunctionTemplateInfo> proxy_constructor;
|
@@ -2739,12 +2741,12 @@
|
proxy_constructor->set_needs_access_check(
|
global_constructor->needs_access_check());
|
global_constructor->set_needs_access_check(false);
|
- global_constructor->set_access_check_info(i::Heap::undefined_value());
|
+ global_constructor->set_access_check_info(heap.undefined_value());
|
}
|
}
|
|
// Create the environment.
|
- env = i::Bootstrapper::CreateEnvironment(
|
+ env = v8_context()->bootstrapper_.CreateEnvironment(
|
Utils::OpenHandle(*global_object),
|
proxy_template,
|
extensions);
|
@@ -2800,13 +2802,13 @@
|
|
|
bool Context::InContext() {
|
- return i::Top::context() != NULL;
|
+ return v8_context()->top_.context() != NULL;
|
}
|
|
|
v8::Local<v8::Context> Context::GetEntered() {
|
if (IsDeadCheck("v8::Context::GetEntered()")) return Local<Context>();
|
- i::Handle<i::Object> last = thread_local.LastEnteredContext();
|
+ i::Handle<i::Object> last = v8_context()->handle_scope_implementer_.LastEnteredContext();
|
if (last.is_null()) return Local<Context>();
|
i::Handle<i::Context> context = i::Handle<i::Context>::cast(last);
|
return Utils::ToLocal(context);
|
@@ -2815,7 +2817,7 @@
|
|
v8::Local<v8::Context> Context::GetCurrent() {
|
if (IsDeadCheck("v8::Context::GetCurrent()")) return Local<Context>();
|
- i::Handle<i::Object> current = i::Top::global_context();
|
+ i::Handle<i::Object> current = v8_context()->top_.global_context();
|
if (current.is_null()) return Local<Context>();
|
i::Handle<i::Context> context = i::Handle<i::Context>::cast(current);
|
return Utils::ToLocal(context);
|
@@ -2824,7 +2826,7 @@
|
|
v8::Local<v8::Context> Context::GetCalling() {
|
if (IsDeadCheck("v8::Context::GetCalling()")) return Local<Context>();
|
- i::Handle<i::Object> calling = i::Top::GetCallingGlobalContext();
|
+ i::Handle<i::Object> calling = v8_context()->top_.GetCallingGlobalContext();
|
if (calling.is_null()) return Local<Context>();
|
i::Handle<i::Context> context = i::Handle<i::Context>::cast(calling);
|
return Utils::ToLocal(context);
|
@@ -2847,7 +2849,7 @@
|
i::Object** ctx = reinterpret_cast<i::Object**>(this);
|
i::Handle<i::Context> context =
|
i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
|
- i::Bootstrapper::DetachGlobal(context);
|
+ v8_context()->bootstrapper_.DetachGlobal(context);
|
}
|
|
|
@@ -3057,7 +3059,7 @@
|
reinterpret_cast<v8::String::ExternalStringResource*>(parameter);
|
if (resource != NULL) {
|
const size_t total_size = resource->length() * sizeof(*resource->data());
|
- i::Counters::total_external_string_memory.Decrement(total_size);
|
+ v8_context()->counters_.total_external_string_memory.Decrement(total_size);
|
|
// The object will continue to live in the JavaScript heap until the
|
// handle is entirely cleaned out by the next GC. For example the
|
@@ -3087,7 +3089,7 @@
|
reinterpret_cast<v8::String::ExternalAsciiStringResource*>(parameter);
|
if (resource != NULL) {
|
const size_t total_size = resource->length() * sizeof(*resource->data());
|
- i::Counters::total_external_string_memory.Decrement(total_size);
|
+ v8_context()->counters_.total_external_string_memory.Decrement(total_size);
|
|
// The object will continue to live in the JavaScript heap until the
|
// handle is entirely cleaned out by the next GC. For example the
|
@@ -3109,10 +3111,11 @@
|
LOG_API("String::NewExternal");
|
ENTER_V8;
|
const size_t total_size = resource->length() * sizeof(*resource->data());
|
- i::Counters::total_external_string_memory.Increment(total_size);
|
+ v8_context()->counters_.total_external_string_memory.Increment(total_size);
|
i::Handle<i::String> result = NewExternalStringHandle(resource);
|
- i::Handle<i::Object> handle = i::GlobalHandles::Create(*result);
|
- i::GlobalHandles::MakeWeak(handle.location(),
|
+ i::GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ i::Handle<i::Object> handle = global_handles.Create(*result);
|
+ global_handles.MakeWeak(handle.location(),
|
resource,
|
&DisposeExternalString);
|
return Utils::ToLocal(result);
|
@@ -3129,8 +3132,9 @@
|
// Operation was successful and the string is not a symbol. In this case
|
// we need to make sure that the we call the destructor for the external
|
// resource when no strong references to the string remain.
|
- i::Handle<i::Object> handle = i::GlobalHandles::Create(*obj);
|
- i::GlobalHandles::MakeWeak(handle.location(),
|
+ i::GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ i::Handle<i::Object> handle = global_handles.Create(*obj);
|
+ global_handles.MakeWeak(handle.location(),
|
resource,
|
&DisposeExternalString);
|
}
|
@@ -3144,10 +3148,11 @@
|
LOG_API("String::NewExternal");
|
ENTER_V8;
|
const size_t total_size = resource->length() * sizeof(*resource->data());
|
- i::Counters::total_external_string_memory.Increment(total_size);
|
+ v8_context()->counters_.total_external_string_memory.Increment(total_size);
|
i::Handle<i::String> result = NewExternalAsciiStringHandle(resource);
|
- i::Handle<i::Object> handle = i::GlobalHandles::Create(*result);
|
- i::GlobalHandles::MakeWeak(handle.location(),
|
+ i::GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ i::Handle<i::Object> handle = global_handles.Create(*result);
|
+ global_handles.MakeWeak(handle.location(),
|
resource,
|
&DisposeExternalAsciiString);
|
return Utils::ToLocal(result);
|
@@ -3165,8 +3170,9 @@
|
// Operation was successful and the string is not a symbol. In this case
|
// we need to make sure that the we call the destructor for the external
|
// resource when no strong references to the string remain.
|
- i::Handle<i::Object> handle = i::GlobalHandles::Create(*obj);
|
- i::GlobalHandles::MakeWeak(handle.location(),
|
+ i::GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ i::Handle<i::Object> handle = global_handles.Create(*obj);
|
+ global_handles.MakeWeak(handle.location(),
|
resource,
|
&DisposeExternalAsciiString);
|
}
|
@@ -3190,7 +3196,7 @@
|
LOG_API("Object::New");
|
ENTER_V8;
|
i::Handle<i::JSObject> obj =
|
- i::Factory::NewJSObject(i::Top::object_function());
|
+ i::Factory::NewJSObject(v8_context()->top_.object_function());
|
return Utils::ToLocal(obj);
|
}
|
|
@@ -3307,7 +3313,7 @@
|
|
|
void V8::IgnoreOutOfMemoryException() {
|
- thread_local.set_ignore_out_of_memory(true);
|
+ v8_context()->handle_scope_implementer_.set_ignore_out_of_memory(true);
|
}
|
|
|
@@ -3319,8 +3325,7 @@
|
NeanderArray listeners(i::Factory::message_listeners());
|
NeanderObject obj(2);
|
obj.set(0, *i::Factory::NewProxy(FUNCTION_ADDR(that)));
|
- obj.set(1, data.IsEmpty() ?
|
- i::Heap::undefined_value() :
|
+ obj.set(1, data.IsEmpty() ?v8_context()->heap_.undefined_value() :
|
*Utils::OpenHandle(*data));
|
listeners.add(obj.value());
|
return true;
|
@@ -3333,13 +3338,15 @@
|
ENTER_V8;
|
HandleScope scope;
|
NeanderArray listeners(i::Factory::message_listeners());
|
+ i::Heap& heap = v8_context()->heap_;
|
+
|
for (int i = 0; i < listeners.length(); i++) {
|
if (listeners.get(i)->IsUndefined()) continue; // skip deleted ones
|
|
NeanderObject listener(i::JSObject::cast(listeners.get(i)));
|
i::Handle<i::Proxy> callback_obj(i::Proxy::cast(listener.get(0)));
|
if (callback_obj->proxy() == FUNCTION_ADDR(that)) {
|
- listeners.set(i, i::Heap::undefined_value());
|
+ listeners.set(i, heap.undefined_value());
|
}
|
}
|
}
|
@@ -3362,59 +3369,59 @@
|
|
void V8::EnableSlidingStateWindow() {
|
if (IsDeadCheck("v8::V8::EnableSlidingStateWindow()")) return;
|
- i::Logger::EnableSlidingStateWindow();
|
+ v8_context()->logger_.EnableSlidingStateWindow();
|
}
|
|
|
void V8::SetFailedAccessCheckCallbackFunction(
|
FailedAccessCheckCallback callback) {
|
if (IsDeadCheck("v8::V8::SetFailedAccessCheckCallbackFunction()")) return;
|
- i::Top::SetFailedAccessCheckCallback(callback);
|
+ v8_context()->top_.SetFailedAccessCheckCallback(callback);
|
}
|
|
|
void V8::AddObjectGroup(Persistent<Value>* objects, size_t length) {
|
if (IsDeadCheck("v8::V8::AddObjectGroup()")) return;
|
STATIC_ASSERT(sizeof(Persistent<Value>) == sizeof(i::Object**));
|
- i::GlobalHandles::AddGroup(reinterpret_cast<i::Object***>(objects), length);
|
+ v8_context()->global_handles_.AddGroup(reinterpret_cast<i::Object***>(objects), length);
|
}
|
|
|
int V8::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
|
if (IsDeadCheck("v8::V8::AdjustAmountOfExternalAllocatedMemory()")) return 0;
|
- return i::Heap::AdjustAmountOfExternalAllocatedMemory(change_in_bytes);
|
+ return v8_context()->heap_.AdjustAmountOfExternalAllocatedMemory(change_in_bytes);
|
}
|
|
|
void V8::SetGlobalGCPrologueCallback(GCCallback callback) {
|
if (IsDeadCheck("v8::V8::SetGlobalGCPrologueCallback()")) return;
|
- i::Heap::SetGlobalGCPrologueCallback(callback);
|
+ v8_context()->heap_.SetGlobalGCPrologueCallback(callback);
|
}
|
|
|
void V8::SetGlobalGCEpilogueCallback(GCCallback callback) {
|
if (IsDeadCheck("v8::V8::SetGlobalGCEpilogueCallback()")) return;
|
- i::Heap::SetGlobalGCEpilogueCallback(callback);
|
+ v8_context()->heap_.SetGlobalGCEpilogueCallback(callback);
|
}
|
|
|
void V8::PauseProfiler() {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- i::Logger::PauseProfiler(PROFILER_MODULE_CPU);
|
+ v8_context()->logger_.PauseProfiler(PROFILER_MODULE_CPU);
|
#endif
|
}
|
|
|
void V8::ResumeProfiler() {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- i::Logger::ResumeProfiler(PROFILER_MODULE_CPU);
|
+ v8_context()->logger_.ResumeProfiler(PROFILER_MODULE_CPU);
|
#endif
|
}
|
|
|
bool V8::IsProfilerPaused() {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- return i::Logger::GetActiveProfilerModules() & PROFILER_MODULE_CPU;
|
+ return v8_context()->logger_.GetActiveProfilerModules() & PROFILER_MODULE_CPU;
|
#else
|
return true;
|
#endif
|
@@ -3423,6 +3430,7 @@
|
|
void V8::ResumeProfilerEx(int flags) {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
+ i::Logger& logger = v8_context()->logger_;
|
if (flags & PROFILER_MODULE_HEAP_SNAPSHOT) {
|
// Snapshot mode: resume modules, perform GC, then pause only
|
// those modules which haven't been started prior to making a
|
@@ -3430,12 +3438,12 @@
|
|
// Reset snapshot flag and CPU module flags.
|
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
|
- const int current_flags = i::Logger::GetActiveProfilerModules();
|
- i::Logger::ResumeProfiler(flags);
|
- i::Heap::CollectAllGarbage(false);
|
- i::Logger::PauseProfiler(~current_flags & flags);
|
+ const int current_flags = logger.GetActiveProfilerModules();
|
+ logger.ResumeProfiler(flags);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
+ logger.PauseProfiler(~current_flags & flags);
|
} else {
|
- i::Logger::ResumeProfiler(flags);
|
+ logger.ResumeProfiler(flags);
|
}
|
#endif
|
}
|
@@ -3443,14 +3451,14 @@
|
|
void V8::PauseProfilerEx(int flags) {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- i::Logger::PauseProfiler(flags);
|
+ v8_context()->logger_.PauseProfiler(flags);
|
#endif
|
}
|
|
|
int V8::GetActiveProfilerModules() {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- return i::Logger::GetActiveProfilerModules();
|
+ return v8_context()->logger_.GetActiveProfilerModules();
|
#else
|
return PROFILER_MODULE_NONE;
|
#endif
|
@@ -3459,36 +3467,39 @@
|
|
int V8::GetLogLines(int from_pos, char* dest_buf, int max_size) {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- return i::Logger::GetLogLines(from_pos, dest_buf, max_size);
|
+ return v8_context()->logger_.GetLogLines(from_pos, dest_buf, max_size);
|
#endif
|
return 0;
|
}
|
|
|
int V8::GetCurrentThreadId() {
|
- API_ENTRY_CHECK("V8::GetCurrentThreadId()");
|
+ V8Context* const v8context = v8_context();
|
+ API_ENTRY_CHECK("V8::GetCurrentThreadId()", v8context);
|
EnsureInitialized("V8::GetCurrentThreadId()");
|
- return i::Top::thread_id();
|
+ return v8context->top_.thread_id();
|
}
|
|
|
void V8::TerminateExecution(int thread_id) {
|
- if (!i::V8::IsRunning()) return;
|
- API_ENTRY_CHECK("V8::GetCurrentThreadId()");
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ API_ENTRY_CHECK("V8::GetCurrentThreadId()", v8context);
|
// If the thread_id identifies the current thread just terminate
|
// execution right away. Otherwise, ask the thread manager to
|
// terminate the thread with the given id if any.
|
- if (thread_id == i::Top::thread_id()) {
|
- i::StackGuard::TerminateExecution();
|
+ if (thread_id == v8context->top_.thread_id()) {
|
+ v8context->stack_guard_.TerminateExecution();
|
} else {
|
- i::ThreadManager::TerminateExecution(thread_id);
|
+ v8context->thread_manager_.TerminateExecution(thread_id);
|
}
|
}
|
|
|
void V8::TerminateExecution() {
|
- if (!i::V8::IsRunning()) return;
|
- i::StackGuard::TerminateExecution();
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ v8context->stack_guard_.TerminateExecution();
|
}
|
|
|
@@ -3652,7 +3663,8 @@
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
bool Debug::SetDebugEventListener(EventCallback that, Handle<Value> data) {
|
- EnsureInitialized("v8::Debug::SetDebugEventListener()");
|
+ V8Context* const v8context = v8_context();
|
+ EnsureInitialized("v8::Debug::SetDebugEventListener()", v8context);
|
ON_BAILOUT("v8::Debug::SetDebugEventListener()", return false);
|
ENTER_V8;
|
HandleScope scope;
|
@@ -3660,7 +3672,7 @@
|
if (that != NULL) {
|
proxy = i::Factory::NewProxy(FUNCTION_ADDR(that));
|
}
|
- i::Debugger::SetEventListener(proxy, Utils::OpenHandle(*data));
|
+ v8context->debug_.debugger()->SetEventListener(proxy, Utils::OpenHandle(*data));
|
return true;
|
}
|
|
@@ -3669,21 +3681,20 @@
|
Handle<Value> data) {
|
ON_BAILOUT("v8::Debug::SetDebugEventListener()", return false);
|
ENTER_V8;
|
- i::Debugger::SetEventListener(Utils::OpenHandle(*that),
|
+ v8_context()->debug_.debugger()->SetEventListener(Utils::OpenHandle(*that),
|
Utils::OpenHandle(*data));
|
return true;
|
}
|
|
|
void Debug::DebugBreak() {
|
- if (!i::V8::IsRunning()) return;
|
- i::StackGuard::DebugBreak();
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ v8context->stack_guard_.DebugBreak();
|
}
|
|
-
|
-static v8::Debug::MessageHandler message_handler = NULL;
|
-
|
static void MessageHandlerWrapper(const v8::Debug::Message& message) {
|
+ v8::Debug::MessageHandler message_handler = v8_context()->debug_.message_handler_;
|
if (message_handler) {
|
v8::String::Value json(message.GetJSON());
|
message_handler(*json, json.length(), message.GetClientData());
|
@@ -3693,59 +3704,65 @@
|
|
void Debug::SetMessageHandler(v8::Debug::MessageHandler handler,
|
bool message_handler_thread) {
|
- EnsureInitialized("v8::Debug::SetMessageHandler");
|
+ V8Context* const v8context = v8_context();
|
+ EnsureInitialized("v8::Debug::SetMessageHandler", v8context);
|
ENTER_V8;
|
// Message handler thread not supported any more. Parameter temporally left in
|
// the API for client compatability reasons.
|
CHECK(!message_handler_thread);
|
|
// TODO(sgjesse) support the old message handler API through a simple wrapper.
|
+ v8::Debug::MessageHandler &message_handler = v8_context()->debug_.message_handler_;
|
message_handler = handler;
|
if (message_handler != NULL) {
|
- i::Debugger::SetMessageHandler(MessageHandlerWrapper);
|
+ v8context->debug_.debugger()->SetMessageHandler(MessageHandlerWrapper);
|
} else {
|
- i::Debugger::SetMessageHandler(NULL);
|
+ v8context->debug_.debugger()->SetMessageHandler(NULL);
|
}
|
}
|
|
|
void Debug::SetMessageHandler2(v8::Debug::MessageHandler2 handler) {
|
- EnsureInitialized("v8::Debug::SetMessageHandler");
|
+ V8Context* const v8context = v8_context();
|
+ EnsureInitialized("v8::Debug::SetMessageHandler", v8context);
|
ENTER_V8;
|
HandleScope scope;
|
- i::Debugger::SetMessageHandler(handler);
|
+ v8context->debug_.debugger()->SetMessageHandler(handler);
|
}
|
|
|
void Debug::SendCommand(const uint16_t* command, int length,
|
ClientData* client_data) {
|
- if (!i::V8::IsRunning()) return;
|
- i::Debugger::ProcessCommand(i::Vector<const uint16_t>(command, length),
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return;
|
+ v8context->debug_.debugger()->ProcessCommand(i::Vector<const uint16_t>(command, length),
|
client_data);
|
}
|
|
|
void Debug::SetHostDispatchHandler(HostDispatchHandler handler,
|
int period) {
|
- EnsureInitialized("v8::Debug::SetHostDispatchHandler");
|
+ V8Context* const v8context = v8_context();
|
+ EnsureInitialized("v8::Debug::SetHostDispatchHandler", v8context);
|
ENTER_V8;
|
- i::Debugger::SetHostDispatchHandler(handler, period);
|
+ v8context->debug_.debugger()->SetHostDispatchHandler(handler, period);
|
}
|
|
|
Local<Value> Debug::Call(v8::Handle<v8::Function> fun,
|
v8::Handle<v8::Value> data) {
|
- if (!i::V8::IsRunning()) return Local<Value>();
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return Local<Value>();
|
ON_BAILOUT("v8::Debug::Call()", return Local<Value>());
|
ENTER_V8;
|
i::Handle<i::Object> result;
|
EXCEPTION_PREAMBLE();
|
if (data.IsEmpty()) {
|
- result = i::Debugger::Call(Utils::OpenHandle(*fun),
|
+ result = v8context->debug_.debugger()->Call(Utils::OpenHandle(*fun),
|
i::Factory::undefined_value(),
|
&has_pending_exception);
|
} else {
|
- result = i::Debugger::Call(Utils::OpenHandle(*fun),
|
+ result = v8context->debug_.debugger()->Call(Utils::OpenHandle(*fun),
|
Utils::OpenHandle(*data),
|
&has_pending_exception);
|
}
|
@@ -3755,12 +3772,13 @@
|
|
|
Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) {
|
- if (!i::V8::IsRunning()) return Local<Value>();
|
+ V8Context* const v8context = v8_context();
|
+ if (!v8context->v8_.IsRunning()) return Local<Value>();
|
ON_BAILOUT("v8::Debug::GetMirror()", return Local<Value>());
|
ENTER_V8;
|
v8::HandleScope scope;
|
- i::Debug::Load();
|
- i::Handle<i::JSObject> debug(i::Debug::debug_context()->global());
|
+ v8context->debug_.Load();
|
+ i::Handle<i::JSObject> debug(v8context->debug_.debug_context()->global());
|
i::Handle<i::String> name = i::Factory::LookupAsciiSymbol("MakeMirror");
|
i::Handle<i::Object> fun_obj = i::GetProperty(debug, name);
|
i::Handle<i::JSFunction> fun = i::Handle<i::JSFunction>::cast(fun_obj);
|
@@ -3777,25 +3795,21 @@
|
|
|
bool Debug::EnableAgent(const char* name, int port) {
|
- return i::Debugger::StartAgent(name, port);
|
+ V8Context* const v8context = v8_context();
|
+ return v8context->debug_.debugger()->StartAgent(name, port);
|
}
|
#endif // ENABLE_DEBUGGER_SUPPORT
|
|
namespace internal {
|
|
|
-HandleScopeImplementer* HandleScopeImplementer::instance() {
|
- return &thread_local;
|
-}
|
-
|
-
|
void HandleScopeImplementer::FreeThreadResources() {
|
- thread_local.Free();
|
+ v8_context()->handle_scope_implementer_.Free();
|
}
|
|
|
char* HandleScopeImplementer::ArchiveThread(char* storage) {
|
- return thread_local.ArchiveThreadHelper(storage);
|
+ return v8_context()->handle_scope_implementer_.ArchiveThreadHelper(storage);
|
}
|
|
|
@@ -3813,12 +3827,12 @@
|
|
|
int HandleScopeImplementer::ArchiveSpacePerThread() {
|
- return sizeof(thread_local);
|
+ return sizeof(HandleScopeImplementer);
|
}
|
|
|
char* HandleScopeImplementer::RestoreThread(char* storage) {
|
- return thread_local.RestoreThreadHelper(storage);
|
+ return v8_context()->handle_scope_implementer_.RestoreThreadHelper(storage);
|
}
|
|
|
@@ -3851,8 +3865,8 @@
|
void HandleScopeImplementer::Iterate(ObjectVisitor* v) {
|
v8::ImplementationUtilities::HandleScopeData* current =
|
v8::ImplementationUtilities::CurrentHandleScope();
|
- thread_local.handle_scope_data_ = *current;
|
- thread_local.IterateThis(v);
|
+ v8_context()->handle_scope_implementer_.handle_scope_data_ = *current;
|
+ v8_context()->handle_scope_implementer_.IterateThis(v);
|
}
|
|
|
Index: src/log.cc
|
===================================================================
|
--- src/log.cc (revision 3154)
|
+++ src/log.cc Sat Nov 14 01:43:04 MSK 2009
|
@@ -59,12 +59,12 @@
|
|
|
void IncrementStateCounter(StateTag state) {
|
- Counters::state_counters[state].Increment();
|
+ v8_context()->counters_.state_counters[state].Increment();
|
}
|
|
|
void DecrementStateCounter(StateTag state) {
|
- Counters::state_counters[state].Decrement();
|
+ v8_context()->counters_.state_counters[state].Decrement();
|
}
|
};
|
|
@@ -132,7 +132,7 @@
|
bool running_;
|
|
// Tells whether we are currently recording tick samples.
|
- static bool paused_;
|
+ static bool paused_; ///static
|
};
|
|
bool Profiler::paused_ = false;
|
@@ -147,7 +147,7 @@
|
return;
|
}
|
|
- const Address js_entry_sp = Top::js_entry_sp(Top::GetCurrentThread());
|
+ const Address js_entry_sp = v8_context()->top_.js_entry_sp(v8_context()->top_.GetCurrentThread());
|
if (js_entry_sp == 0) {
|
// Not executing JS now.
|
sample->frames_count = 0;
|
@@ -221,12 +221,12 @@
|
for (int i = 0; i < kBufferSize; i++) {
|
buffer_[i] = static_cast<byte>(OTHER);
|
}
|
- Logger::ticker_->SetWindow(this);
|
+ v8_context()->logger_.ticker_->SetWindow(this);
|
}
|
|
|
SlidingStateWindow::~SlidingStateWindow() {
|
- Logger::ticker_->ClearWindow();
|
+ v8_context()->logger_.ticker_->ClearWindow();
|
}
|
|
|
@@ -271,10 +271,11 @@
|
Start();
|
|
// Register to get ticks.
|
- Logger::ticker_->SetProfiler(this);
|
+ Logger & logger = v8_context()->logger_;
|
+ logger.ticker_->SetProfiler(this);
|
|
- Logger::ProfilerBeginEvent();
|
- Logger::LogAliases();
|
+ logger.ProfilerBeginEvent();
|
+ logger.LogAliases();
|
}
|
|
|
@@ -282,7 +283,7 @@
|
if (!engaged_) return;
|
|
// Stop receiving ticks.
|
- Logger::ticker_->ClearProfiler();
|
+ v8_context()->logger_.ticker_->ClearProfiler();
|
|
// Terminate the worker thread by setting running_ to false,
|
// inserting a fake element in the queue and then wait for
|
@@ -300,10 +301,11 @@
|
|
void Profiler::Run() {
|
TickSample sample;
|
- bool overflow = Logger::profiler_->Remove(&sample);
|
+ Logger & logger = v8_context()->logger_;
|
+ bool overflow = logger.profiler_->Remove(&sample);
|
while (running_) {
|
LOG(TickEvent(&sample, overflow));
|
- overflow = Logger::profiler_->Remove(&sample);
|
+ overflow = logger.profiler_->Remove(&sample);
|
}
|
}
|
|
@@ -311,14 +313,14 @@
|
//
|
// Logger class implementation.
|
//
|
-Ticker* Logger::ticker_ = NULL;
|
-Profiler* Logger::profiler_ = NULL;
|
-VMState* Logger::current_state_ = NULL;
|
-VMState Logger::bottom_state_(EXTERNAL);
|
-SlidingStateWindow* Logger::sliding_state_window_ = NULL;
|
-const char** Logger::log_events_ = NULL;
|
-CompressionHelper* Logger::compression_helper_ = NULL;
|
-bool Logger::is_logging_ = false;
|
+Logger::Logger():
|
+ ticker_(NULL),
|
+ profiler_(NULL),
|
+ current_state_(NULL),
|
+ bottom_state_(EXTERNAL,*this),
|
+ sliding_state_window_(NULL), log_events_ (NULL), compression_helper_ (NULL), is_logging_ (false)
|
+{
|
+}
|
|
#define DECLARE_LONG_EVENT(ignore1, long_name, ignore2) long_name,
|
const char* kLongLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
|
@@ -658,7 +660,7 @@
|
return msg->RetrieveCompressedPrevious(&compressor_);
|
}
|
OS::SNPrintF(prefix_, "%s,%d,",
|
- Logger::log_events_[Logger::REPEAT_META_EVENT],
|
+ v8_context()->logger_.log_events_[Logger::REPEAT_META_EVENT],
|
repeat_count_ + 1);
|
repeat_count_ = 0;
|
return msg->RetrieveCompressedPrevious(&compressor_, prefix_.start());
|
@@ -781,7 +783,7 @@
|
|
void Logger::CodeMoveEvent(Address from, Address to) {
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
- static Address prev_to_ = NULL;
|
+ static Address prev_to_ = NULL; ///static
|
if (!Log::IsEnabled() || !FLAG_log_code) return;
|
LogMessageBuilder msg;
|
msg.Append("%s,", log_events_[CODE_MOVE_EVENT]);
|
@@ -839,7 +841,7 @@
|
LogMessageBuilder msg;
|
String* class_name = obj->IsJSObject()
|
? JSObject::cast(obj)->class_name()
|
- : Heap::empty_string();
|
+ : v8_context()->heap_.empty_string();
|
msg.Append("suspect-read,");
|
msg.Append(class_name);
|
msg.Append(',');
|
@@ -994,7 +996,7 @@
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
void Logger::TickEvent(TickSample* sample, bool overflow) {
|
if (!Log::IsEnabled() || !FLAG_prof) return;
|
- static Address prev_sp = NULL;
|
+ static Address prev_sp = NULL; ///static
|
LogMessageBuilder msg;
|
msg.Append("%s,", log_events_[TICK_EVENT]);
|
Address prev_addr = reinterpret_cast<Address>(sample->pc);
|
@@ -1085,7 +1087,7 @@
|
// either from main or Profiler's thread.
|
void Logger::StopLoggingAndProfiling() {
|
Log::stop();
|
- PauseProfiler(PROFILER_MODULE_CPU);
|
+ v8_context()->logger_.PauseProfiler(PROFILER_MODULE_CPU);
|
}
|
|
|
Index: src/v8-global-context.cc
|
===================================================================
|
--- src/v8-global-context.cc Sat Nov 14 01:43:00 MSK 2009
|
+++ src/v8-global-context.cc Sat Nov 14 01:43:00 MSK 2009
|
@@ -0,0 +1,166 @@
|
+// Copyright 2009 the V8 project authors. All rights reserved.
|
+// Redistribution and use in source and binary forms, with or without
|
+// modification, are permitted provided that the following conditions are
|
+// met:
|
+//
|
+// * Redistributions of source code must retain the above copyright
|
+// notice, this list of conditions and the following disclaimer.
|
+// * Redistributions in binary form must reproduce the above
|
+// copyright notice, this list of conditions and the following
|
+// disclaimer in the documentation and/or other materials provided
|
+// with the distribution.
|
+// * Neither the name of Google Inc. nor the names of its
|
+// contributors may be used to endorse or promote products derived
|
+// from this software without specific prior written permission.
|
+//
|
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
+
|
+#include "v8.h"
|
+
|
+#include "api.h"
|
+#include "bootstrapper.h"
|
+#include "debug.h"
|
+#include "execution.h"
|
+#include "heap.h"
|
+#include "v8threads.h"
|
+#include "regexp-stack.h"
|
+#include "compilation-cache.h"
|
+#include "global-handles.h"
|
+#include "scopeinfo.h"
|
+#include "stub-cache.h"
|
+#include "mark-compact.h"
|
+#include "builtins.h"
|
+#include "debug.h"
|
+#include "scanner.h"
|
+#include "compiler.h"
|
+#include "disassembler.h"
|
+
|
+namespace v8 {
|
+
|
+V8ContextProvider::V8ContextProvider():v8context(new V8Context()) {}
|
+V8ContextProvider::~V8ContextProvider() { delete v8context; }
|
+
|
+#ifndef V8_SINGLE_THREADED
|
+internal::Thread::LocalStorageKey default_context = internal::Thread::CreateThreadLocalKey();
|
+
|
+void BindContext(V8Context* context) {
|
+ ASSERT(v8_context() != context);
|
+ internal::Thread::SetThreadLocal(default_context, context);
|
+}
|
+#else
|
+V8Context* default_context;
|
+#endif
|
+
|
+V8Context v8context; // not needed for !V8_SINGLE_THREADED but left for compatibility
|
+
|
+V8Context::V8Context():
|
+ thread_manager_(*new i::ThreadManager()),
|
+ v8_(*new i::V8()),
|
+ top_(*new i::Top()),
|
+ keyed_lookup_cache_(*new i::KeyedLookupCache()),
|
+ descriptor_lookup_cache_(*new i::DescriptorLookupCache()),
|
+ context_slot_cache_(*new i::ContextSlotCache()),
|
+ compilation_cache_(*new i::CompilationCache()),
|
+ global_handles_(*new i::GlobalHandles()),
|
+ transcendental_caches_(*new i::TranscendentalCaches()),
|
+ counters_(*new i::Counters()),
|
+ heap_(*new i::Heap()),
|
+ stack_guard_(*new i::StackGuard()),
|
+ mark_compact_collector_(*new i::MarkCompactCollector()),
|
+ stub_cache_(*new i::StubCache()),
|
+ handle_scope_implementer_(*new i::HandleScopeImplementer()),
|
+ logger_(*new i::Logger()),
|
+ bootstrapper_(*new i::Bootstrapper()),
|
+ builtins_(*new i::Builtins()),
|
+ #ifdef ENABLE_DEBUGGER_SUPPORT
|
+ debug_(*new i::Debug()),
|
+ debugger_agent_(NULL),
|
+ #endif
|
+ relocatable_data_(*new i::RelocatableData()),
|
+ reg_exp_stack_(*new i::RegExpStack()),
|
+ compiler_(*new i::Compiler()),
|
+ scanner_(*new i::Scanner()),
|
+ storage_data_(*new i::StorageData()) ,
|
+ zone_data_(*new i::ZoneData()),
|
+ code_generator_data_(*new i::CodeGeneratorData()),
|
+ api_data(*new i::ApiData()),
|
+ objects_data(new i::ObjectsData()),
|
+ runtime_data_(NULL),
|
+ external_reference_table_(NULL),
|
+ stats_table_data_(*new i::StatsTableData()),
|
+ #ifdef ENABLE_DISASSEMBLER
|
+ disassembler_data_(NULL),
|
+ #endif
|
+ assembler_data_(NULL)
|
+{
|
+ #ifndef V8_SINGLE_THREADED
|
+ BindContext(this);
|
+ #endif
|
+ i::StackGuard::Setup();
|
+ i::Runtime::Setup();
|
+ i::Assembler::Setup();
|
+ #ifdef ENABLE_DISASSEMBLER
|
+ i::Disassembler::Setup();
|
+ #endif
|
+}
|
+
|
+V8Context::~V8Context() {
|
+ #ifndef V8_SINGLE_THREADED
|
+ thread_manager_.FreeThreadResources();
|
+ #endif
|
+
|
+ delete &thread_manager_;
|
+ delete &v8_;
|
+ delete &top_;
|
+ delete &keyed_lookup_cache_;
|
+ delete &descriptor_lookup_cache_;
|
+ delete &context_slot_cache_;
|
+ delete &compilation_cache_;
|
+ delete &global_handles_;
|
+ delete &transcendental_caches_;
|
+ delete &counters_;
|
+ delete &stack_guard_;
|
+ delete &heap_;
|
+ delete &mark_compact_collector_;
|
+ delete &stub_cache_;
|
+ delete &handle_scope_implementer_;
|
+ delete &logger_;
|
+ delete &bootstrapper_;
|
+ delete &builtins_;
|
+ delete &code_generator_data_;
|
+
|
+ #ifdef ENABLE_DEBUGGER_SUPPORT
|
+ delete &debug_;
|
+ #endif
|
+ delete &relocatable_data_;
|
+ delete ®_exp_stack_;
|
+ delete &compiler_;
|
+ delete &scanner_;
|
+ delete &storage_data_;
|
+ delete &zone_data_;
|
+ delete objects_data;
|
+ delete &api_data;
|
+ delete &stats_table_data_;
|
+ i::Runtime::TearDown();
|
+ i::StackGuard::TearDown();
|
+
|
+ i::Assembler::TearDown();
|
+ #ifdef ENABLE_DISASSEMBLER
|
+ i::Disassembler::TearDown();
|
+ #endif
|
+
|
+ #ifndef V8_SINGLE_THREADED
|
+ BindContext(NULL);
|
+ #endif
|
+}
|
+}
|
Index: test/cctest/test-assembler-ia32.cc
|
===================================================================
|
--- test/cctest/test-assembler-ia32.cc (revision 1368)
|
+++ test/cctest/test-assembler-ia32.cc Sun Nov 15 12:39:10 MSK 2009
|
@@ -37,8 +37,8 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
-
|
typedef int (*F0)();
|
typedef int (*F1)(int x);
|
typedef int (*F2)(int x, int y);
|
@@ -69,10 +69,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -107,10 +107,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -149,10 +149,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -182,10 +182,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
// don't print the code - our disassembler can't handle cvttss2si
|
// instead print bytes
|
Disassembler::Dump(stdout,
|
@@ -215,10 +215,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
// don't print the code - our disassembler can't handle cvttsd2si
|
// instead print bytes
|
Disassembler::Dump(stdout,
|
@@ -245,10 +245,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
F0 f = FUNCTION_CAST<F0>(code->entry());
|
int res = f();
|
CHECK_EQ(42, res);
|
@@ -281,10 +281,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
#ifdef DEBUG
|
::printf("\n---\n");
|
// don't print the code - our disassembler can't handle SSE instructions
|
@@ -320,10 +320,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -376,10 +376,10 @@
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
Code* code =
|
- Code::cast(Heap::CreateCode(desc,
|
+ Code::cast(v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value())));
|
+ Handle<Object>(v8_context()->heap_.undefined_value())));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
Index: src/counters.cc
|
===================================================================
|
--- src/counters.cc (revision 2038)
|
+++ src/counters.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -33,9 +33,11 @@
|
namespace v8 {
|
namespace internal {
|
|
-CounterLookupCallback StatsTable::lookup_function_ = NULL;
|
-CreateHistogramCallback StatsTable::create_histogram_function_ = NULL;
|
-AddHistogramSampleCallback StatsTable::add_histogram_sample_function_ = NULL;
|
+StatsTableData::StatsTableData():
|
+ lookup_function_(NULL),
|
+ create_histogram_function_ (NULL),
|
+ add_histogram_sample_function_(NULL)
|
+{}
|
|
// Start the timer.
|
void StatsCounterTimer::Start() {
|
Index: test/cctest/test-serialize.cc
|
===================================================================
|
--- test/cctest/test-serialize.cc (revision 3229)
|
+++ test/cctest/test-serialize.cc Sun Nov 15 13:01:56 MSK 2009
|
@@ -39,6 +39,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static const unsigned kCounters = 256;
|
static int local_counters[kCounters];
|
@@ -101,7 +102,7 @@
|
|
TEST(ExternalReferenceEncoder) {
|
StatsTable::SetCounterFunction(counter_function);
|
- Heap::Setup(false);
|
+ v8_context()->heap_.Setup(false);
|
ExternalReferenceEncoder encoder;
|
CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode),
|
Encode(encoder, Builtins::ArrayCode));
|
@@ -112,7 +113,7 @@
|
CHECK_EQ(make_code(DEBUG_ADDRESS, register_code(3)),
|
Encode(encoder, Debug_Address(Debug::k_register_address, 3)));
|
ExternalReference keyed_load_function_prototype =
|
- ExternalReference(&Counters::keyed_load_function_prototype);
|
+ ExternalReference(&v8_context()->counters_.keyed_load_function_prototype);
|
CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype),
|
encoder.Encode(keyed_load_function_prototype.address()));
|
ExternalReference passed_function =
|
@@ -142,7 +143,7 @@
|
|
TEST(ExternalReferenceDecoder) {
|
StatsTable::SetCounterFunction(counter_function);
|
- Heap::Setup(false);
|
+ v8_context()->heap_.Setup(false);
|
ExternalReferenceDecoder decoder;
|
CHECK_EQ(AddressOf(Builtins::ArrayCode),
|
decoder.Decode(make_code(BUILTIN, Builtins::ArrayCode)));
|
@@ -153,7 +154,7 @@
|
CHECK_EQ(AddressOf(Debug_Address(Debug::k_register_address, 3)),
|
decoder.Decode(make_code(DEBUG_ADDRESS, register_code(3))));
|
ExternalReference keyed_load_function =
|
- ExternalReference(&Counters::keyed_load_function_prototype);
|
+ ExternalReference(&v8_context()->counters_.keyed_load_function_prototype);
|
CHECK_EQ(keyed_load_function.address(),
|
decoder.Decode(
|
make_code(STATS_COUNTER,
|
@@ -254,12 +255,12 @@
|
static void SanityCheck() {
|
v8::HandleScope scope;
|
#ifdef DEBUG
|
- Heap::Verify();
|
+ v8_context()->heap_.Verify();
|
#endif
|
- CHECK(Top::global()->IsJSObject());
|
- CHECK(Top::global_context()->IsContext());
|
- CHECK(Top::special_function_table()->IsFixedArray());
|
- CHECK(Heap::symbol_table()->IsSymbolTable());
|
+ CHECK(v8_context()->top_.global()->IsJSObject());
|
+ CHECK(v8_context()->top_.global_context()->IsContext());
|
+ CHECK(v8_context()->top_.special_function_table()->IsFixedArray());
|
+ CHECK(v8_context()->heap_.symbol_table()->IsSymbolTable());
|
CHECK(!Factory::LookupAsciiSymbol("Empty")->IsFailure());
|
}
|
|
Index: src/allocation.h
|
===================================================================
|
--- src/allocation.h (revision 2038)
|
+++ src/allocation.h Sat Nov 14 01:43:15 MSK 2009
|
@@ -28,42 +28,11 @@
|
#ifndef V8_ALLOCATION_H_
|
#define V8_ALLOCATION_H_
|
|
+#include "v8-global-context.h"
|
+
|
namespace v8 {
|
namespace internal {
|
|
-
|
-// A class that controls whether allocation is allowed. This is for
|
-// the C++ heap only!
|
-class NativeAllocationChecker {
|
- public:
|
- typedef enum { ALLOW, DISALLOW } NativeAllocationAllowed;
|
- explicit inline NativeAllocationChecker(NativeAllocationAllowed allowed)
|
- : allowed_(allowed) {
|
-#ifdef DEBUG
|
- if (allowed == DISALLOW) {
|
- allocation_disallowed_++;
|
- }
|
-#endif
|
- }
|
- ~NativeAllocationChecker() {
|
-#ifdef DEBUG
|
- if (allowed_ == DISALLOW) {
|
- allocation_disallowed_--;
|
- }
|
-#endif
|
- ASSERT(allocation_disallowed_ >= 0);
|
- }
|
- static inline bool allocation_allowed() {
|
- return allocation_disallowed_ == 0;
|
- }
|
- private:
|
- // This static counter ensures that NativeAllocationCheckers can be nested.
|
- static int allocation_disallowed_;
|
- // This flag applies to this particular instance.
|
- NativeAllocationAllowed allowed_;
|
-};
|
-
|
-
|
// Superclass for classes managed with new & delete.
|
class Malloced {
|
public:
|
@@ -143,6 +112,7 @@
|
public:
|
explicit PreallocatedStorage(size_t size);
|
size_t size() { return size_; }
|
+
|
static void* New(size_t size);
|
static void Delete(void* p);
|
|
@@ -153,17 +123,64 @@
|
size_t size_;
|
PreallocatedStorage* previous_;
|
PreallocatedStorage* next_;
|
- static bool preallocated_;
|
|
- static PreallocatedStorage in_use_list_;
|
- static PreallocatedStorage free_list_;
|
-
|
void LinkTo(PreallocatedStorage* other);
|
void Unlink();
|
DISALLOW_IMPLICIT_CONSTRUCTORS(PreallocatedStorage);
|
};
|
|
+class StorageData {
|
+private:
|
+ // This static counter ensures that NativeAllocationCheckers can be nested.
|
+ int allocation_disallowed_;
|
+ bool preallocated_;
|
|
+ PreallocatedStorage in_use_list_;
|
+ PreallocatedStorage free_list_;
|
+ #ifdef DEBUG
|
+ private:
|
+ bool rset_used_; // state of the remembered set
|
+ #endif
|
+
|
+
|
+ DISALLOW_COPY_AND_ASSIGN(StorageData);
|
+ StorageData();
|
+ friend class V8Context;
|
+ friend class PreallocatedStorage;
|
+ friend class NativeAllocationChecker;
|
+ friend class Page;
|
+};
|
+
|
+// A class that controls whether allocation is allowed. This is for
|
+// the C++ heap only!
|
+class NativeAllocationChecker {
|
+ public:
|
+ typedef enum { ALLOW, DISALLOW } NativeAllocationAllowed;
|
+ explicit inline NativeAllocationChecker(NativeAllocationAllowed allowed)
|
+ : allowed_(allowed) {
|
+#ifdef DEBUG
|
+ if (allowed == DISALLOW) {
|
+ v8_context()->storage_data_.allocation_disallowed_++;
|
+ }
|
+#endif
|
+ }
|
+ ~NativeAllocationChecker() {
|
+#ifdef DEBUG
|
+ if (allowed_ == DISALLOW) {
|
+ v8_context()->storage_data_.allocation_disallowed_--;
|
+ }
|
+#endif
|
+ ASSERT(v8_context()->storage_data_.allocation_disallowed_ >= 0);
|
+ }
|
+ static inline bool allocation_allowed() {
|
+ v8::V8Context * const v8context = v8_context();
|
+ return v8context ? v8context->storage_data_.allocation_disallowed_ == 0: true;
|
+ }
|
+ private:
|
+ // This flag applies to this particular instance.
|
+ NativeAllocationAllowed allowed_;
|
+};
|
+
|
} } // namespace v8::internal
|
|
#endif // V8_ALLOCATION_H_
|
Index: src/interpreter-irregexp.cc
|
===================================================================
|
--- src/interpreter-irregexp.cc (revision 2855)
|
+++ src/interpreter-irregexp.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -40,7 +40,7 @@
|
namespace internal {
|
|
|
-static unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize;
|
+static unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize;///static
|
|
|
static bool BackRefMatchesNoCase(int from,
|
@@ -179,7 +179,7 @@
|
static const int kBacktrackStackSize = 10000;
|
|
int* data_;
|
- static int* cache_;
|
+ static int* cache_;///static
|
|
DISALLOW_COPY_AND_ASSIGN(BacktrackStack);
|
};
|
Index: src/prettyprinter.h
|
===================================================================
|
--- src/prettyprinter.h (revision 3051)
|
+++ src/prettyprinter.h Sat Nov 14 01:43:21 MSK 2009
|
@@ -108,7 +108,7 @@
|
void inc_indent() { indent_++; }
|
void dec_indent() { indent_--; }
|
|
- static int indent_;
|
+ static int indent_; ///static
|
};
|
|
|
Index: src/bootstrapper.h
|
===================================================================
|
--- src/bootstrapper.h (revision 3238)
|
+++ src/bootstrapper.h Sat Nov 14 01:43:16 MSK 2009
|
@@ -34,46 +34,53 @@
|
|
// The Boostrapper is the public interface for creating a JavaScript global
|
// context.
|
-class Bootstrapper : public AllStatic {
|
+class Bootstrapper {
|
public:
|
// Requires: Heap::Setup has been called.
|
- static void Initialize(bool create_heap_objects);
|
- static void TearDown();
|
+ void Initialize(bool create_heap_objects);
|
+ void TearDown();
|
|
// Creates a JavaScript Global Context with initial object graph.
|
// The returned value is a global handle casted to V8Environment*.
|
- static Handle<Context> CreateEnvironment(
|
+ Handle<Context> CreateEnvironment(
|
Handle<Object> global_object,
|
v8::Handle<v8::ObjectTemplate> global_template,
|
v8::ExtensionConfiguration* extensions);
|
|
// Detach the environment from its outer global object.
|
- static void DetachGlobal(Handle<Context> env);
|
+ void DetachGlobal(Handle<Context> env);
|
|
// Traverses the pointers for memory management.
|
- static void Iterate(ObjectVisitor* v);
|
+ void Iterate(ObjectVisitor* v);
|
|
// Accessors for the native scripts cache. Used in lazy loading.
|
- static Handle<String> NativesSourceLookup(int index);
|
- static bool NativesCacheLookup(Vector<const char> name,
|
+ Handle<String> NativesSourceLookup(int index);
|
+ bool NativesCacheLookup(Vector<const char> name,
|
Handle<JSFunction>* handle);
|
- static void NativesCacheAdd(Vector<const char> name, Handle<JSFunction> fun);
|
+ void NativesCacheAdd(Vector<const char> name, Handle<JSFunction> fun);
|
|
// Append code that needs fixup at the end of boot strapping.
|
- static void AddFixup(Code* code, MacroAssembler* masm);
|
+ void AddFixup(Code* code, MacroAssembler* masm);
|
|
// Tells whether bootstrapping is active.
|
- static bool IsActive();
|
+ bool IsActive();
|
|
// Encoding/decoding support for fixup flags.
|
class FixupFlagsUseCodeObject: public BitField<bool, 0, 1> {};
|
class FixupFlagsArgumentsCount: public BitField<uint32_t, 1, 32-1> {};
|
|
// Support for thread preemption.
|
- static int ArchiveSpacePerThread();
|
- static char* ArchiveState(char* to);
|
- static char* RestoreState(char* from);
|
- static void FreeThreadResources();
|
+ int ArchiveSpacePerThread();
|
+ char* ArchiveState(char* to);
|
+ char* RestoreState(char* from);
|
+ void FreeThreadResources();
|
+ class BootstrapperImpl;
|
+private:
|
+ BootstrapperImpl* const bootstrapper_impl;
|
+ friend class V8Context;
|
+ Bootstrapper();
|
+ ~Bootstrapper();
|
+ DISALLOW_COPY_AND_ASSIGN(Bootstrapper);
|
};
|
|
|
Index: src/heap.cc
|
===================================================================
|
--- src/heap.cc (revision 3230)
|
+++ src/heap.cc Sat Nov 14 01:43:05 MSK 2009
|
@@ -49,81 +49,149 @@
|
namespace v8 {
|
namespace internal {
|
|
+// A queue of pointers and maps of to-be-promoted objects during a
|
+// scavenge collection.
|
+class PromotionQueue {
|
+ public:
|
+ void Initialize(Address start_address) {
|
+ front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
|
+ }
|
|
-String* Heap::hidden_symbol_;
|
-Object* Heap::roots_[Heap::kRootListLength];
|
+ bool is_empty() { return front_ <= rear_; }
|
|
+ void insert(HeapObject* object, Map* map) {
|
+ *(--rear_) = object;
|
+ *(--rear_) = map;
|
+ // Assert no overflow into live objects.
|
+ ASSERT(reinterpret_cast<Address>(rear_) >= v8_context()->heap_.new_space()->top());
|
+ }
|
|
-NewSpace Heap::new_space_;
|
-OldSpace* Heap::old_pointer_space_ = NULL;
|
-OldSpace* Heap::old_data_space_ = NULL;
|
-OldSpace* Heap::code_space_ = NULL;
|
-MapSpace* Heap::map_space_ = NULL;
|
-CellSpace* Heap::cell_space_ = NULL;
|
-LargeObjectSpace* Heap::lo_space_ = NULL;
|
+ void remove(HeapObject** object, Map** map) {
|
+ *object = *(--front_);
|
+ *map = Map::cast(*(--front_));
|
+ // Assert no underflow.
|
+ ASSERT(front_ >= rear_);
|
+ }
|
|
-static const int kMinimumPromotionLimit = 2*MB;
|
-static const int kMinimumAllocationLimit = 8*MB;
|
+ private:
|
+ // The front of the queue is higher in memory than the rear.
|
+ HeapObject** front_;
|
+ HeapObject** rear_;
|
+};
|
|
-int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
|
-int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
|
+class Heap::HeapImpl {
|
+public:
|
+ // Shared state read by the scavenge collector and set by ScavengeObject.
|
+ PromotionQueue promotion_queue;
|
+ int number_idle_notifications;
|
+ int last_gc_count;
|
+ void* paged_rset_histogram;
|
|
-int Heap::old_gen_exhausted_ = false;
|
+#ifdef DEBUG
|
+ bool search_for_any_global;
|
+ Object* search_target;
|
+ bool found_target;
|
+ List<Object*> object_stack;
|
+#endif
|
|
-int Heap::amount_of_external_allocated_memory_ = 0;
|
-int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
|
+ HeapImpl():
|
+ number_idle_notifications(0),
|
+ last_gc_count(0),
|
+ paged_rset_histogram(NULL)
|
+ #ifdef DEBUG
|
+ ,search_target(NULL),
|
+ object_stack(20),
|
+ search_for_any_global(false),
|
+ found_target(NULL)
|
+ #endif
|
+ {
|
+ }
|
|
+#ifdef DEBUG
|
+ void MarkRootObjectRecursively(Object** root);
|
+#endif
|
+};
|
+
|
+static const int kMinimumPromotionLimit = 2*MB;
|
+static const int kMinimumAllocationLimit = 8*MB;
|
+
|
// semispace_size_ should be a power of 2 and old_generation_size_ should be
|
// a multiple of Page::kPageSize.
|
#if defined(ANDROID)
|
-int Heap::max_semispace_size_ = 512*KB;
|
-int Heap::max_old_generation_size_ = 128*MB;
|
-int Heap::initial_semispace_size_ = 128*KB;
|
-size_t Heap::code_range_size_ = 0;
|
+static const int kMaxSemispaceSize = 512*KB;
|
+static const int kMaxOldGenerationSize = 128*MB;
|
+static const int kInitialSemispaceSize = 128*KB;
|
+static const size_t kCodeRangeSize = 0;
|
#elif defined(V8_TARGET_ARCH_X64)
|
-int Heap::max_semispace_size_ = 16*MB;
|
-int Heap::max_old_generation_size_ = 1*GB;
|
-int Heap::initial_semispace_size_ = 1*MB;
|
-size_t Heap::code_range_size_ = 512*MB;
|
+static const int kMaxSemispaceSize = 16*MB;
|
+static const int kMaxOldGenerationSize = 1*GB;
|
+static const int kInitialSemispaceSize = 1*MB;
|
+static const size_t kCodeRangeSize = 512*MB;
|
#else
|
-int Heap::max_semispace_size_ = 8*MB;
|
-int Heap::max_old_generation_size_ = 512*MB;
|
-int Heap::initial_semispace_size_ = 512*KB;
|
-size_t Heap::code_range_size_ = 0;
|
+static const int kMaxSemispaceSize = 8*MB;
|
+static const int kMaxOldGenerationSize = 512*MB;
|
+static const int kInitialSemispaceSize = 512*KB;
|
+static const size_t kCodeRangeSize = 0;
|
#endif
|
|
+
|
+Heap::Heap():
|
+ old_pointer_space_(NULL),
|
+ old_data_space_(NULL),
|
+ code_space_(NULL),
|
+ map_space_(NULL),
|
+ cell_space_(NULL),
|
+ lo_space_(NULL),
|
+ old_gen_promotion_limit_(kMinimumPromotionLimit),
|
+ old_gen_allocation_limit_(kMinimumAllocationLimit),
|
+ old_gen_exhausted_(false),
|
+ amount_of_external_allocated_memory_(0),
|
+ amount_of_external_allocated_memory_at_last_global_gc_(0),
|
+ memory_allocator_(new MemoryAllocator()),
|
+ max_semispace_size_(kMaxSemispaceSize),
|
+ max_old_generation_size_(kMaxOldGenerationSize),
|
+ initial_semispace_size_(kInitialSemispaceSize),
|
+ code_range_size_(kCodeRangeSize),
|
+
|
-// The snapshot semispace size will be the default semispace size if
|
-// snapshotting is used and will be the requested semispace size as
|
-// set up by ConfigureHeap otherwise.
|
+ // The snapshot semispace size will be the default semispace size if
|
+ // snapshotting is used and will be the requested semispace size as
|
+ // set up by ConfigureHeap otherwise.
|
-int Heap::reserved_semispace_size_ = Heap::max_semispace_size_;
|
-
|
-GCCallback Heap::global_gc_prologue_callback_ = NULL;
|
-GCCallback Heap::global_gc_epilogue_callback_ = NULL;
|
-
|
+ reserved_semispace_size_(kMaxSemispaceSize),
|
+ global_gc_prologue_callback_(NULL),
|
+ global_gc_epilogue_callback_(NULL),
|
-// Variables set based on semispace_size_ and old_generation_size_ in
|
-// ConfigureHeap.
|
+ // Variables set based on semispace_size_ and old_generation_size_ in
|
+ // ConfigureHeap.
|
|
-// Will be 4 * reserved_semispace_size_ to ensure that young
|
-// generation can be aligned to its size.
|
+ // Will be 4 * reserved_semispace_size_ to ensure that young
|
+ // generation can be aligned to its size.
|
-int Heap::survived_since_last_expansion_ = 0;
|
-int Heap::external_allocation_limit_ = 0;
|
-
|
+
|
-Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
|
-
|
-int Heap::mc_count_ = 0;
|
-int Heap::gc_count_ = 0;
|
-
|
-int Heap::always_allocate_scope_depth_ = 0;
|
-int Heap::linear_allocation_scope_depth_ = 0;
|
-bool Heap::context_disposed_pending_ = false;
|
-
|
+ survived_since_last_expansion_(0),
|
+ external_allocation_limit_(0),
|
+ gc_state_(NOT_IN_GC),
|
+ mc_count_ (0),
|
+ gc_count_ (0),
|
+ always_allocate_scope_depth_(0),
|
+ linear_allocation_scope_depth_(0),
|
+ context_disposed_pending_(false)
|
-#ifdef DEBUG
|
+ #ifdef DEBUG
|
-bool Heap::allocation_allowed_ = true;
|
-
|
-int Heap::allocation_timeout_ = 0;
|
-bool Heap::disallow_allocation_failure_ = false;
|
+ ,
|
+ allocation_allowed_ (true),
|
+ allocation_timeout_ (0),
|
+ disallow_allocation_failure_(false)
|
-#endif // DEBUG
|
+ #endif // DEBUG
|
+ ,heap_configured(false),
|
+ code_range_(new CodeRange()),
|
+ heap_impl_(new HeapImpl())
|
+{
|
+ for(int i = 0; i < kRootListLength; ++i) {
|
+ roots_[i] = NULL;
|
+ }
|
+}
|
|
+Heap::~Heap() {
|
+ delete memory_allocator_;
|
+ delete code_range_;
|
+}
|
|
int Heap::Capacity() {
|
if (!HasBeenSetup()) return 0;
|
@@ -173,21 +241,22 @@
|
|
|
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
|
+ V8Context * const v8context = v8_context();
|
// Is global GC requested?
|
if (space != NEW_SPACE || FLAG_gc_global) {
|
- Counters::gc_compactor_caused_by_request.Increment();
|
+ v8context->counters_.gc_compactor_caused_by_request.Increment();
|
return MARK_COMPACTOR;
|
}
|
|
// Is enough data promoted to justify a global GC?
|
if (OldGenerationPromotionLimitReached()) {
|
- Counters::gc_compactor_caused_by_promoted_data.Increment();
|
+ v8context->counters_.gc_compactor_caused_by_promoted_data.Increment();
|
return MARK_COMPACTOR;
|
}
|
|
// Have allocation in OLD and LO failed?
|
if (old_gen_exhausted_) {
|
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
|
+ v8context->counters_.gc_compactor_caused_by_oldspace_exhaustion.Increment();
|
return MARK_COMPACTOR;
|
}
|
|
@@ -200,8 +269,8 @@
|
// and does not count available bytes already in the old space or code
|
// space. Undercounting is safe---we may get an unrequested full GC when
|
// a scavenge would have succeeded.
|
- if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
|
- Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
|
+ if (memory_allocator_->MaxAvailable() <= new_space_.Size()) {
|
+ v8context->counters_.gc_compactor_caused_by_oldspace_exhaustion.Increment();
|
return MARK_COMPACTOR;
|
}
|
|
@@ -244,9 +313,10 @@
|
#if defined(ENABLE_LOGGING_AND_PROFILING)
|
void Heap::PrintShortHeapStatistics() {
|
if (!FLAG_trace_gc_verbose) return;
|
+
|
PrintF("Memory allocator, used: %8d, available: %8d\n",
|
- MemoryAllocator::Size(),
|
- MemoryAllocator::Available());
|
+ memory_allocator_->Size(),
|
+ memory_allocator_->Available());
|
PrintF("New space, used: %8d, available: %8d\n",
|
Heap::new_space_.Size(),
|
new_space_.Available());
|
@@ -299,7 +369,7 @@
|
|
|
void Heap::GarbageCollectionPrologue() {
|
- TranscendentalCache::Clear();
|
+ v8_context()->transcendental_caches_.Clear();
|
gc_count_++;
|
#ifdef DEBUG
|
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
|
@@ -334,6 +404,7 @@
|
}
|
|
void Heap::GarbageCollectionEpilogue() {
|
+ V8Context* const v8context = v8_context();
|
#ifdef DEBUG
|
allow_allocation(true);
|
ZapFromSpace();
|
@@ -342,21 +413,20 @@
|
Verify();
|
}
|
|
- if (FLAG_print_global_handles) GlobalHandles::Print();
|
+ if (FLAG_print_global_handles) v8context->global_handles_.Print();
|
if (FLAG_print_handles) PrintHandles();
|
if (FLAG_gc_verbose) Print();
|
if (FLAG_code_stats) ReportCodeStatistics("After GC");
|
#endif
|
|
- Counters::alive_after_last_gc.Set(SizeOfObjects());
|
-
|
- Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
|
- Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
|
+ v8context->counters_.alive_after_last_gc.Set(SizeOfObjects());
|
+ v8context->counters_.symbol_table_capacity.Set(symbol_table()->Capacity());
|
+ v8context->counters_.number_of_symbols.Set(symbol_table()->NumberOfElements());
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
ReportStatisticsAfterGC();
|
#endif
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debug::AfterGarbageCollection();
|
+ v8context->debug_.AfterGarbageCollection();
|
#endif
|
}
|
|
@@ -365,9 +435,9 @@
|
// Since we are ignoring the return value, the exact choice of space does
|
// not matter, so long as we do not specify NEW_SPACE, which would not
|
// cause a full GC.
|
- MarkCompactCollector::SetForceCompaction(force_compaction);
|
+ v8_context()->mark_compact_collector_.SetForceCompaction(force_compaction);
|
CollectGarbage(0, OLD_POINTER_SPACE);
|
- MarkCompactCollector::SetForceCompaction(false);
|
+ v8_context()->mark_compact_collector_.SetForceCompaction(false);
|
}
|
|
|
@@ -377,7 +447,7 @@
|
// contexts are disposed and leave it to the embedder to make
|
// informed decisions about when to force a collection.
|
if (!FLAG_expose_gc && context_disposed_pending_) {
|
- HistogramTimerScope scope(&Counters::gc_context);
|
+ HistogramTimerScope scope(&v8_context()->counters_.gc_context);
|
CollectAllGarbage(false);
|
}
|
context_disposed_pending_ = false;
|
@@ -413,8 +483,8 @@
|
tracer.set_collector(collector);
|
|
HistogramTimer* rate = (collector == SCAVENGER)
|
- ? &Counters::gc_scavenger
|
- : &Counters::gc_compactor;
|
+ ? &v8_context()->counters_.gc_scavenger
|
+ : &v8_context()->counters_.gc_compactor;
|
rate->Start();
|
PerformGarbageCollection(space, collector, &tracer);
|
rate->Stop();
|
@@ -474,7 +544,7 @@
|
static void VerifySymbolTable() {
|
#ifdef DEBUG
|
SymbolTableVerifier verifier;
|
- Heap::symbol_table()->IterateElements(&verifier);
|
+ v8_context()->heap_.symbol_table()->IterateElements(&verifier);
|
#endif // DEBUG
|
}
|
|
@@ -489,7 +559,7 @@
|
|
// Committing memory to from space failed again.
|
// Memory is exhausted and we will die.
|
- V8::FatalProcessOutOfMemory("Committing semi space failed.");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Committing semi space failed.");
|
}
|
|
|
@@ -514,15 +584,16 @@
|
}
|
Scavenge();
|
|
- Counters::objs_since_last_young.Set(0);
|
+ V8Context * const v8context = v8_context();
|
+ v8context->counters_.objs_since_last_young.Set(0);
|
|
if (collector == MARK_COMPACTOR) {
|
DisableAssertNoAllocation allow_allocation;
|
- GlobalHandles::PostGarbageCollectionProcessing();
|
+ v8context->global_handles_.PostGarbageCollectionProcessing();
|
}
|
|
// Update relocatables.
|
- Relocatable::PostGarbageCollectionProcessing();
|
+ v8context->relocatable_data_.PostGarbageCollectionProcessing();
|
|
if (collector == MARK_COMPACTOR) {
|
// Register the amount of external allocated memory.
|
@@ -544,13 +615,14 @@
|
tracer->set_full_gc_count(mc_count_);
|
LOG(ResourceEvent("markcompact", "begin"));
|
|
- MarkCompactCollector::Prepare(tracer);
|
+ MarkCompactCollector & mark_compact_collector = v8_context()->mark_compact_collector_;
|
+ mark_compact_collector.Prepare(tracer);
|
|
- bool is_compacting = MarkCompactCollector::IsCompacting();
|
+ bool is_compacting = mark_compact_collector.IsCompacting();
|
|
MarkCompactPrologue(is_compacting);
|
|
- MarkCompactCollector::CollectGarbage();
|
+ mark_compact_collector.CollectGarbage();
|
|
MarkCompactEpilogue(is_compacting);
|
|
@@ -560,7 +632,7 @@
|
|
Shrink();
|
|
- Counters::objs_since_last_full.Set(0);
|
+ v8_context()->counters_.objs_since_last_full.Set(0);
|
context_disposed_pending_ = false;
|
}
|
|
@@ -568,20 +640,21 @@
|
void Heap::MarkCompactPrologue(bool is_compacting) {
|
// At any old GC clear the keyed lookup cache to enable collection of unused
|
// maps.
|
- KeyedLookupCache::Clear();
|
- ContextSlotCache::Clear();
|
- DescriptorLookupCache::Clear();
|
+ V8Context* v8context = v8_context();
|
+ v8context->keyed_lookup_cache_.Clear();
|
+ v8context->context_slot_cache_.Clear();
|
+ v8context->descriptor_lookup_cache_.Clear();
|
|
- CompilationCache::MarkCompactPrologue();
|
+ v8context->compilation_cache_.MarkCompactPrologue();
|
|
- Top::MarkCompactPrologue(is_compacting);
|
- ThreadManager::MarkCompactPrologue(is_compacting);
|
+ v8context->top_.MarkCompactPrologue(is_compacting);
|
+ v8context->thread_manager_.MarkCompactPrologue(is_compacting);
|
}
|
|
|
void Heap::MarkCompactEpilogue(bool is_compacting) {
|
- Top::MarkCompactEpilogue(is_compacting);
|
- ThreadManager::MarkCompactEpilogue(is_compacting);
|
+ v8_context()->top_.MarkCompactEpilogue(is_compacting);
|
+ v8_context()->thread_manager_.MarkCompactEpilogue(is_compacting);
|
}
|
|
|
@@ -609,48 +682,13 @@
|
private:
|
void ScavengePointer(Object** p) {
|
Object* object = *p;
|
- if (!Heap::InNewSpace(object)) return;
|
- Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
+ Heap& heap = v8_context()->heap_;
|
+ if (!heap.InNewSpace(object)) return;
|
+ heap.ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
reinterpret_cast<HeapObject*>(object));
|
}
|
};
|
|
-
|
-// A queue of pointers and maps of to-be-promoted objects during a
|
-// scavenge collection.
|
-class PromotionQueue {
|
- public:
|
- void Initialize(Address start_address) {
|
- front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
|
- }
|
-
|
- bool is_empty() { return front_ <= rear_; }
|
-
|
- void insert(HeapObject* object, Map* map) {
|
- *(--rear_) = object;
|
- *(--rear_) = map;
|
- // Assert no overflow into live objects.
|
- ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
|
- }
|
-
|
- void remove(HeapObject** object, Map** map) {
|
- *object = *(--front_);
|
- *map = Map::cast(*(--front_));
|
- // Assert no underflow.
|
- ASSERT(front_ >= rear_);
|
- }
|
-
|
- private:
|
- // The front of the queue is higher in memory than the rear.
|
- HeapObject** front_;
|
- HeapObject** rear_;
|
-};
|
-
|
-
|
-// Shared state read by the scavenge collector and set by ScavengeObject.
|
-static PromotionQueue promotion_queue;
|
-
|
-
|
#ifdef DEBUG
|
// Visitor class to verify pointers in code or data space do not point into
|
// new space.
|
@@ -659,7 +697,7 @@
|
void VisitPointers(Object** start, Object**end) {
|
for (Object** current = start; current < end; current++) {
|
if ((*current)->IsHeapObject()) {
|
- ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
|
+ ASSERT(!heap_.InNewSpace(HeapObject::cast(*current)));
|
}
|
}
|
}
|
@@ -670,13 +708,14 @@
|
// Verify that there are no pointers to new space in spaces where we
|
// do not expect them.
|
VerifyNonPointerSpacePointersVisitor v;
|
- HeapObjectIterator code_it(Heap::code_space());
|
+ Heap& heap = v8_context()->heap_;
|
+ HeapObjectIterator code_it(heap.code_space());
|
while (code_it.has_next()) {
|
HeapObject* object = code_it.next();
|
object->Iterate(&v);
|
}
|
|
- HeapObjectIterator data_it(Heap::old_data_space());
|
+ HeapObjectIterator data_it(heap.old_data_space());
|
while (data_it.has_next()) data_it.next()->Iterate(&v);
|
}
|
#endif
|
@@ -693,7 +732,7 @@
|
LOG(ResourceEvent("scavenge", "begin"));
|
|
// Clear descriptor cache.
|
- DescriptorLookupCache::Clear();
|
+ v8_context()->descriptor_lookup_cache_.Clear();
|
|
// Used for updating survived_since_last_expansion_ at function end.
|
int survived_watermark = PromotedSpaceSize();
|
@@ -729,7 +768,7 @@
|
// frees up its size in bytes from the top of the new space, and
|
// objects are at least one pointer in size.
|
Address new_space_front = new_space_.ToSpaceLow();
|
- promotion_queue.Initialize(new_space_.ToSpaceHigh());
|
+ heap_impl_->promotion_queue.Initialize(new_space_.ToSpaceHigh());
|
|
ScavengeVisitor scavenge_visitor;
|
// Copy roots.
|
@@ -766,10 +805,10 @@
|
}
|
|
// Promote and process all the to-be-promoted objects.
|
- while (!promotion_queue.is_empty()) {
|
+ while (!heap_impl_->promotion_queue.is_empty()) {
|
HeapObject* source;
|
Map* map;
|
- promotion_queue.remove(&source, &map);
|
+ heap_impl_->promotion_queue.remove(&source, &map);
|
// Copy the from-space object to its new location (given by the
|
// forwarding address) and fix its map.
|
HeapObject* target = source->map_word().ToForwardingAddress();
|
@@ -854,7 +893,7 @@
|
// newly copied to old space, and it is cleared before rebuilding in the
|
// mark-compact collector.
|
ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
|
- if (Heap::InNewSpace(*p)) {
|
+ if (v8_context()->heap_.InNewSpace(*p)) {
|
Page::SetRSet(reinterpret_cast<Address>(p), 0);
|
}
|
}
|
@@ -960,7 +999,7 @@
|
InstanceType type = map->instance_type();
|
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
|
ASSERT(object->IsString() && !object->IsSymbol());
|
- return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
|
+ return ConsString::cast(object)->unchecked_second() == v8_context()->heap_.empty_string();
|
}
|
|
|
@@ -1000,7 +1039,7 @@
|
// forwarding address over the map word of the from-space
|
// object.
|
HeapObject* target = HeapObject::cast(result);
|
- promotion_queue.insert(object, first_word.ToMap());
|
+ heap_impl_->promotion_queue.insert(object, first_word.ToMap());
|
object->set_map_word(MapWord::FromForwardingAddress(target));
|
|
// Give the space allocated for the result a proper map by
|
@@ -1024,7 +1063,7 @@
|
// top of the to space to be swept and copied later. Write the
|
// forwarding address over the map word of the from-space
|
// object.
|
- promotion_queue.insert(object, first_word.ToMap());
|
+ heap_impl_->promotion_queue.insert(object, first_word.ToMap());
|
object->set_map_word(MapWord::FromForwardingAddress(target));
|
|
// Give the space allocated for the result a proper map by
|
@@ -1058,7 +1097,7 @@
|
|
|
void Heap::ScavengePointer(HeapObject** p) {
|
- ScavengeObject(p, *p);
|
+ v8_context()->heap_.ScavengeObject(p, *p);
|
}
|
|
|
@@ -1559,17 +1598,18 @@
|
// Handling of script id generation is in Factory::NewScript.
|
set_last_script_id(undefined_value());
|
|
+ V8Context* const v8context = v8_context();
|
// Initialize keyed lookup cache.
|
- KeyedLookupCache::Clear();
|
+ v8context->keyed_lookup_cache_.Clear();
|
|
// Initialize context slot cache.
|
- ContextSlotCache::Clear();
|
+ v8context->context_slot_cache_.Clear();
|
|
// Initialize descriptor cache.
|
- DescriptorLookupCache::Clear();
|
+ v8context->descriptor_lookup_cache_.Clear();
|
|
// Initialize compilation cache.
|
- CompilationCache::Clear();
|
+ v8context->compilation_cache_.Clear();
|
|
return true;
|
}
|
@@ -1741,9 +1781,9 @@
|
|
SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
|
share->set_name(name);
|
- Code* illegal = Builtins::builtin(Builtins::Illegal);
|
+ Code* illegal = v8_context()->builtins_.builtin(Builtins::Illegal);
|
share->set_code(illegal);
|
- Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
|
+ Code* construct_stub = v8_context()->builtins_.builtin(Builtins::JSConstructStubGeneric);
|
share->set_construct_stub(construct_stub);
|
share->set_expected_nof_properties(0);
|
share->set_length(0);
|
@@ -1775,7 +1815,7 @@
|
// Make sure that an out of memory exception is thrown if the length
|
// of the new cons string is too large.
|
if (length > String::kMaxLength || length < 0) {
|
- Top::context()->mark_out_of_memory();
|
+ v8_context()->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
|
@@ -2081,7 +2121,7 @@
|
// Initialize the object
|
HeapObject::cast(result)->set_map(code_map());
|
Code* code = Code::cast(result);
|
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
|
+ ASSERT(!code_range_->exists() || code_range_->contains(code->address()));
|
code->set_instruction_size(desc.instr_size);
|
code->set_relocation_size(desc.reloc_size);
|
code->set_sinfo_size(sinfo_size);
|
@@ -2126,7 +2166,7 @@
|
obj_size);
|
// Relocate the copy.
|
Code* new_code = Code::cast(result);
|
- ASSERT(!CodeRange::exists() || CodeRange::contains(code->address()));
|
+ ASSERT(!code_range_->exists() || code_range_->contains(code->address()));
|
new_code->Relocate(new_addr - old_addr);
|
return new_code;
|
}
|
@@ -2198,7 +2238,7 @@
|
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
|
|
JSObject* boilerplate =
|
- Top::context()->global_context()->arguments_boilerplate();
|
+ v8_context()->top_.context()->global_context()->arguments_boilerplate();
|
|
// Make the clone.
|
Map* map = boilerplate->map();
|
@@ -2519,7 +2559,7 @@
|
PretenureFlag pretenure) {
|
// Count the number of characters in the UTF-8 string and check if
|
// it is an ASCII string.
|
- Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
|
+ Access<Scanner::Utf8Decoder> decoder(v8_context()->scanner_.utf8_decoder());
|
decoder->Reset(string.start(), string.length());
|
int chars = 0;
|
bool is_ascii = true;
|
@@ -2967,33 +3007,31 @@
|
static const int kIdlesBeforeScavenge = 4;
|
static const int kIdlesBeforeMarkSweep = 7;
|
static const int kIdlesBeforeMarkCompact = 8;
|
- static int number_idle_notifications = 0;
|
- static int last_gc_count = gc_count_;
|
|
bool finished = false;
|
|
- if (last_gc_count == gc_count_) {
|
- number_idle_notifications++;
|
+ if (heap_impl_->last_gc_count == gc_count_) {
|
+ heap_impl_->number_idle_notifications++;
|
} else {
|
- number_idle_notifications = 0;
|
- last_gc_count = gc_count_;
|
+ heap_impl_->number_idle_notifications = 0;
|
+ heap_impl_->last_gc_count = gc_count_;
|
}
|
|
- if (number_idle_notifications == kIdlesBeforeScavenge) {
|
+ if (heap_impl_->number_idle_notifications == kIdlesBeforeScavenge) {
|
CollectGarbage(0, NEW_SPACE);
|
new_space_.Shrink();
|
- last_gc_count = gc_count_;
|
+ heap_impl_->last_gc_count = gc_count_;
|
|
- } else if (number_idle_notifications == kIdlesBeforeMarkSweep) {
|
+ } else if (heap_impl_->number_idle_notifications == kIdlesBeforeMarkSweep) {
|
CollectAllGarbage(false);
|
new_space_.Shrink();
|
- last_gc_count = gc_count_;
|
+ heap_impl_->last_gc_count = gc_count_;
|
|
- } else if (number_idle_notifications == kIdlesBeforeMarkCompact) {
|
+ } else if (heap_impl_->number_idle_notifications == kIdlesBeforeMarkCompact) {
|
CollectAllGarbage(true);
|
new_space_.Shrink();
|
- last_gc_count = gc_count_;
|
- number_idle_notifications = 0;
|
+ heap_impl_->last_gc_count = gc_count_;
|
+ heap_impl_->number_idle_notifications = 0;
|
finished = true;
|
}
|
|
@@ -3007,7 +3045,7 @@
|
|
void Heap::Print() {
|
if (!HasBeenSetup()) return;
|
- Top::PrintStack();
|
+ v8_context()->top_.PrintStack();
|
AllSpaces spaces;
|
while (Space* space = spaces.next()) space->Print();
|
}
|
@@ -3036,12 +3074,13 @@
|
PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
|
|
PrintF("\n");
|
- PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
|
- GlobalHandles::PrintStats();
|
+ V8Context * const v8context = v8_context();
|
+ PrintF("Number of handles : %d\n", v8context->handle_scope_implementer_.NumberOfHandles());
|
+ v8context->global_handles_.PrintStats();
|
PrintF("\n");
|
|
PrintF("Heap statistics : ");
|
- MemoryAllocator::ReportStatistics();
|
+ memory_allocator_->ReportStatistics();
|
PrintF("To space : ");
|
new_space_.ReportStatistics();
|
PrintF("Old pointer space : ");
|
@@ -3223,19 +3262,13 @@
|
ASSERT(Page::is_rset_in_use());
|
ASSERT(space == old_pointer_space_ || space == map_space_);
|
|
- static void* paged_rset_histogram = StatsTable::CreateHistogram(
|
- "V8.RSetPaged",
|
- 0,
|
- Page::kObjectAreaSize / kPointerSize,
|
- 30);
|
-
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
while (it.has_next()) {
|
Page* page = it.next();
|
int count = IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
|
page->RSetStart(), copy_object_func);
|
- if (paged_rset_histogram != NULL) {
|
- StatsTable::AddHistogramSample(paged_rset_histogram, count);
|
+ if (heap_impl_->paged_rset_histogram != NULL) {
|
+ StatsTable::AddHistogramSample(heap_impl_->paged_rset_histogram, count);
|
}
|
}
|
}
|
@@ -3255,49 +3288,45 @@
|
v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_));
|
v->Synchronize("symbol");
|
|
- Bootstrapper::Iterate(v);
|
+ v8::V8Context * const v8context = v8_context();
|
+ v8context->bootstrapper_.Iterate(v);
|
v->Synchronize("bootstrapper");
|
- Top::Iterate(v);
|
+ v8context->top_.Iterate(v);
|
v->Synchronize("top");
|
- Relocatable::Iterate(v);
|
+ v8context->relocatable_data_.Iterate(v);
|
v->Synchronize("relocatable");
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debug::Iterate(v);
|
+ v8context->debug_.Iterate(v);
|
#endif
|
v->Synchronize("debug");
|
- CompilationCache::Iterate(v);
|
+ v8context->compilation_cache_.Iterate(v);
|
v->Synchronize("compilationcache");
|
|
// Iterate over local handles in handle scopes.
|
- HandleScopeImplementer::Iterate(v);
|
+ v8context->handle_scope_implementer_.Iterate(v);
|
v->Synchronize("handlescope");
|
|
// Iterate over the builtin code objects and code stubs in the heap. Note
|
// that it is not strictly necessary to iterate over code objects on
|
// scavenge collections. We still do it here because this same function
|
// is used by the mark-sweep collector and the deserializer.
|
- Builtins::IterateBuiltins(v);
|
+ v8context->builtins_.IterateBuiltins(v);
|
v->Synchronize("builtins");
|
|
// Iterate over global handles.
|
if (mode == VISIT_ONLY_STRONG) {
|
- GlobalHandles::IterateStrongRoots(v);
|
+ v8context->global_handles_.IterateStrongRoots(v);
|
} else {
|
- GlobalHandles::IterateAllRoots(v);
|
+ v8context->global_handles_.IterateAllRoots(v);
|
}
|
v->Synchronize("globalhandles");
|
|
// Iterate over pointers being held by inactive threads.
|
- ThreadManager::Iterate(v);
|
+ v8context->thread_manager_.Iterate(v);
|
v->Synchronize("threadmanager");
|
}
|
|
-
|
-// Flag is set when the heap has been configured. The heap can be repeatedly
|
-// configured through the API until it is setup.
|
-static bool heap_configured = false;
|
-
|
// TODO(1236194): Since the heap size is configurable on the command line
|
// and through the API, we should gracefully handle the case that the heap
|
// size is not big enough to fit all the initial objects.
|
@@ -3378,9 +3407,9 @@
|
// space. The chunk is double the size of the requested reserved
|
// new space size to ensure that we can find a pair of semispaces that
|
// are contiguous and aligned to their size.
|
- if (!MemoryAllocator::Setup(MaxReserved())) return false;
|
+ if (!memory_allocator_->Setup(MaxReserved())) return false;
|
void* chunk =
|
- MemoryAllocator::ReserveInitialChunk(4 * reserved_semispace_size_);
|
+ memory_allocator_->ReserveInitialChunk(4 * reserved_semispace_size_);
|
if (chunk == NULL) return false;
|
|
// Align the pair of semispaces to their size, which must be a power
|
@@ -3393,13 +3422,13 @@
|
|
// Initialize old pointer space.
|
old_pointer_space_ =
|
- new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
|
+ new OldSpace(max_old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE, memory_allocator_);
|
if (old_pointer_space_ == NULL) return false;
|
if (!old_pointer_space_->Setup(NULL, 0)) return false;
|
|
// Initialize old data space.
|
old_data_space_ =
|
- new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
|
+ new OldSpace(max_old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE, memory_allocator_);
|
if (old_data_space_ == NULL) return false;
|
if (!old_data_space_->Setup(NULL, 0)) return false;
|
|
@@ -3408,30 +3437,30 @@
|
// On 64-bit platform(s), we put all code objects in a 2 GB range of
|
// virtual address space, so that they can call each other with near calls.
|
if (code_range_size_ > 0) {
|
- if (!CodeRange::Setup(code_range_size_)) {
|
+ if (!code_range_->Setup(code_range_size_)) {
|
return false;
|
}
|
}
|
|
code_space_ =
|
- new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE);
|
+ new OldSpace(max_old_generation_size_, CODE_SPACE, EXECUTABLE, memory_allocator_);
|
if (code_space_ == NULL) return false;
|
if (!code_space_->Setup(NULL, 0)) return false;
|
|
// Initialize map space.
|
- map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
|
+ map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE, memory_allocator_);
|
if (map_space_ == NULL) return false;
|
if (!map_space_->Setup(NULL, 0)) return false;
|
|
// Initialize global property cell space.
|
- cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE);
|
+ cell_space_ = new CellSpace(max_old_generation_size_, CELL_SPACE, memory_allocator_);
|
if (cell_space_ == NULL) return false;
|
if (!cell_space_->Setup(NULL, 0)) return false;
|
|
// The large object code space may contain code or data. We set the memory
|
// to be non-executable here for safety, but this means we need to enable it
|
// explicitly when allocating large code objects.
|
- lo_space_ = new LargeObjectSpace(LO_SPACE);
|
+ lo_space_ = new LargeObjectSpace(LO_SPACE, memory_allocator_);
|
if (lo_space_ == NULL) return false;
|
if (!lo_space_->Setup()) return false;
|
|
@@ -3452,6 +3481,11 @@
|
ProducerHeapProfile::Setup();
|
#endif
|
|
+ heap_impl_->paged_rset_histogram = StatsTable::CreateHistogram(
|
+ "V8.RSetPaged",
|
+ 0,
|
+ Page::kObjectAreaSize / kPointerSize,
|
+ 30);
|
return true;
|
}
|
|
@@ -3464,15 +3498,15 @@
|
// These are actually addresses, but the tag makes the GC ignore it.
|
roots_[kStackLimitRootIndex] =
|
reinterpret_cast<Object*>(
|
- (StackGuard::jslimit() & ~kSmiTagMask) | kSmiTag);
|
+ (v8_context()->stack_guard_.jslimit() & ~kSmiTagMask) | kSmiTag);
|
roots_[kRealStackLimitRootIndex] =
|
reinterpret_cast<Object*>(
|
- (StackGuard::real_jslimit() & ~kSmiTagMask) | kSmiTag);
|
+ (v8_context()->stack_guard_.real_jslimit() & ~kSmiTagMask) | kSmiTag);
|
}
|
|
|
void Heap::TearDown() {
|
- GlobalHandles::TearDown();
|
+ v8_context()->global_handles_.TearDown();
|
|
new_space_.TearDown();
|
|
@@ -3512,7 +3546,7 @@
|
lo_space_ = NULL;
|
}
|
|
- MemoryAllocator::TearDown();
|
+ memory_allocator_->TearDown();
|
}
|
|
|
@@ -3556,7 +3590,7 @@
|
void Heap::PrintHandles() {
|
PrintF("Handles:\n");
|
PrintHandleVisitor v;
|
- HandleScopeImplementer::Iterate(&v);
|
+ v8_context()->handle_scope_implementer_.Iterate(&v);
|
}
|
|
#endif
|
@@ -3565,19 +3599,19 @@
|
Space* AllSpaces::next() {
|
switch (counter_++) {
|
case NEW_SPACE:
|
- return Heap::new_space();
|
+ return v8_context()->heap_.new_space();
|
case OLD_POINTER_SPACE:
|
- return Heap::old_pointer_space();
|
+ return v8_context()->heap_.old_pointer_space();
|
case OLD_DATA_SPACE:
|
- return Heap::old_data_space();
|
+ return v8_context()->heap_.old_data_space();
|
case CODE_SPACE:
|
- return Heap::code_space();
|
+ return v8_context()->heap_.code_space();
|
case MAP_SPACE:
|
- return Heap::map_space();
|
+ return v8_context()->heap_.map_space();
|
case CELL_SPACE:
|
- return Heap::cell_space();
|
+ return v8_context()->heap_.cell_space();
|
case LO_SPACE:
|
- return Heap::lo_space();
|
+ return v8_context()->heap_.lo_space();
|
default:
|
return NULL;
|
}
|
@@ -3587,15 +3621,15 @@
|
PagedSpace* PagedSpaces::next() {
|
switch (counter_++) {
|
case OLD_POINTER_SPACE:
|
- return Heap::old_pointer_space();
|
+ return v8_context()->heap_.old_pointer_space();
|
case OLD_DATA_SPACE:
|
- return Heap::old_data_space();
|
+ return v8_context()->heap_.old_data_space();
|
case CODE_SPACE:
|
- return Heap::code_space();
|
+ return v8_context()->heap_.code_space();
|
case MAP_SPACE:
|
- return Heap::map_space();
|
+ return v8_context()->heap_.map_space();
|
case CELL_SPACE:
|
- return Heap::cell_space();
|
+ return v8_context()->heap_.cell_space();
|
default:
|
return NULL;
|
}
|
@@ -3606,11 +3640,11 @@
|
OldSpace* OldSpaces::next() {
|
switch (counter_++) {
|
case OLD_POINTER_SPACE:
|
- return Heap::old_pointer_space();
|
+ return v8_context()->heap_.old_pointer_space();
|
case OLD_DATA_SPACE:
|
- return Heap::old_data_space();
|
+ return v8_context()->heap_.old_data_space();
|
case CODE_SPACE:
|
- return Heap::code_space();
|
+ return v8_context()->heap_.code_space();
|
default:
|
return NULL;
|
}
|
@@ -3655,25 +3689,25 @@
|
|
switch (current_space_) {
|
case NEW_SPACE:
|
- iterator_ = new SemiSpaceIterator(Heap::new_space());
|
+ iterator_ = new SemiSpaceIterator(v8_context()->heap_.new_space());
|
break;
|
case OLD_POINTER_SPACE:
|
- iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
|
+ iterator_ = new HeapObjectIterator(v8_context()->heap_.old_pointer_space());
|
break;
|
case OLD_DATA_SPACE:
|
- iterator_ = new HeapObjectIterator(Heap::old_data_space());
|
+ iterator_ = new HeapObjectIterator(v8_context()->heap_.old_data_space());
|
break;
|
case CODE_SPACE:
|
- iterator_ = new HeapObjectIterator(Heap::code_space());
|
+ iterator_ = new HeapObjectIterator(v8_context()->heap_.code_space());
|
break;
|
case MAP_SPACE:
|
- iterator_ = new HeapObjectIterator(Heap::map_space());
|
+ iterator_ = new HeapObjectIterator(v8_context()->heap_.map_space());
|
break;
|
case CELL_SPACE:
|
- iterator_ = new HeapObjectIterator(Heap::cell_space());
|
+ iterator_ = new HeapObjectIterator(v8_context()->heap_.cell_space());
|
break;
|
case LO_SPACE:
|
- iterator_ = new LargeObjectIterator(Heap::lo_space());
|
+ iterator_ = new LargeObjectIterator(v8_context()->heap_.lo_space());
|
break;
|
}
|
|
@@ -3747,19 +3781,14 @@
|
|
|
#ifdef DEBUG
|
-
|
-static bool search_for_any_global;
|
-static Object* search_target;
|
-static bool found_target;
|
-static List<Object*> object_stack(20);
|
-
|
-
|
// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
|
static const int kMarkTag = 2;
|
|
-static void MarkObjectRecursively(Object** p);
|
class MarkObjectVisitor : public ObjectVisitor {
|
+ Heap::HeapImpl* const heap_impl_;
|
public:
|
+ MarkObjectVisitor(Heap::HeapImpl * heap_impl):heap_impl_(heap_impl) {}
|
+
|
void VisitPointers(Object** start, Object** end) {
|
// Copy all HeapObject pointers in [start, end)
|
for (Object** p = start; p < end; p++) {
|
@@ -3767,11 +3796,11 @@
|
MarkObjectRecursively(p);
|
}
|
}
|
+
|
+ void MarkObjectRecursively(Object** p);
|
};
|
|
-static MarkObjectVisitor mark_visitor;
|
-
|
-static void MarkObjectRecursively(Object** p) {
|
+void MarkObjectVisitor::MarkObjectRecursively(Object** p) {
|
if (!(*p)->IsHeapObject()) return;
|
|
HeapObject* obj = HeapObject::cast(*p);
|
@@ -3780,11 +3809,11 @@
|
|
if (!map->IsHeapObject()) return; // visited before
|
|
- if (found_target) return; // stop if target found
|
- object_stack.Add(obj);
|
- if ((search_for_any_global && obj->IsJSGlobalObject()) ||
|
- (!search_for_any_global && (obj == search_target))) {
|
- found_target = true;
|
+ if (heap_impl_->found_target) return; // stop if target found
|
+ heap_impl_->object_stack.Add(obj);
|
+ if ((heap_impl_->search_for_any_global && obj->IsJSGlobalObject()) ||
|
+ (!heap_impl_->search_for_any_global && (obj == heap_impl_->search_target))) {
|
+ heap_impl_->found_target = true;
|
return;
|
}
|
|
@@ -3798,14 +3827,13 @@
|
MarkObjectRecursively(&map);
|
|
obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
|
- &mark_visitor);
|
+ this);
|
|
- if (!found_target) // don't pop if found the target
|
- object_stack.RemoveLast();
|
+ if (!heap_impl_->found_target) // don't pop if found the target
|
+ heap_impl_->object_stack.RemoveLast();
|
}
|
|
|
-static void UnmarkObjectRecursively(Object** p);
|
class UnmarkObjectVisitor : public ObjectVisitor {
|
public:
|
void VisitPointers(Object** start, Object** end) {
|
@@ -3815,11 +3843,10 @@
|
UnmarkObjectRecursively(p);
|
}
|
}
|
+ void UnmarkObjectRecursively(Object** p);
|
};
|
|
-static UnmarkObjectVisitor unmark_visitor;
|
-
|
-static void UnmarkObjectRecursively(Object** p) {
|
+void UnmarkObjectVisitor::UnmarkObjectRecursively(Object** p) {
|
if (!(*p)->IsHeapObject()) return;
|
|
HeapObject* obj = HeapObject::cast(*p);
|
@@ -3842,11 +3869,11 @@
|
|
obj->IterateBody(Map::cast(map_p)->instance_type(),
|
obj->SizeFromMap(Map::cast(map_p)),
|
- &unmark_visitor);
|
+ this);
|
}
|
|
|
-static void MarkRootObjectRecursively(Object** root) {
|
+void Heap::HeapImpl::MarkRootObjectRecursively(Object** root) {
|
if (search_for_any_global) {
|
ASSERT(search_target == NULL);
|
} else {
|
@@ -3855,8 +3882,10 @@
|
found_target = false;
|
object_stack.Clear();
|
|
- MarkObjectRecursively(root);
|
- UnmarkObjectRecursively(root);
|
+ MarkObjectVisitor mark_object_visitor(this);
|
+ mark_object_visitor.MarkObjectRecursively(root);
|
+ UnmarkObjectVisitor unmark_visitor;
|
+ unmark_visitor.UnmarkObjectRecursively(root);
|
|
if (found_target) {
|
PrintF("=====================================\n");
|
@@ -3876,12 +3905,15 @@
|
|
// Helper class for visiting HeapObjects recursively.
|
class MarkRootVisitor: public ObjectVisitor {
|
+ Heap::HeapImpl* const heap_impl_;
|
public:
|
+ MarkRootVisitor(Heap::HeapImpl* heap_impl):heap_impl_(heap_impl) {}
|
+
|
void VisitPointers(Object** start, Object** end) {
|
// Visit all HeapObject pointers in [start, end)
|
for (Object** p = start; p < end; p++) {
|
if ((*p)->IsHeapObject())
|
- MarkRootObjectRecursively(p);
|
+ heap_impl_->MarkRootObjectRecursively(p);
|
}
|
}
|
};
|
@@ -3890,10 +3922,10 @@
|
// Triggers a depth-first traversal of reachable objects from roots
|
// and finds a path to a specific heap object and prints it.
|
void Heap::TracePathToObject() {
|
- search_target = NULL;
|
- search_for_any_global = false;
|
+ heap_impl_->search_target = NULL;
|
+ heap_impl_->search_for_any_global = false;
|
|
- MarkRootVisitor root_visitor;
|
+ MarkRootVisitor root_visitor(heap_impl_);
|
IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
|
}
|
|
@@ -3902,10 +3934,10 @@
|
// and finds a path to any global object and prints it. Useful for
|
// determining the source for leaks of global objects.
|
void Heap::TracePathToGlobal() {
|
- search_target = NULL;
|
- search_for_any_global = true;
|
+ heap_impl_->search_target = NULL;
|
+ heap_impl_->search_for_any_global = true;
|
|
- MarkRootVisitor root_visitor;
|
+ MarkRootVisitor root_visitor(heap_impl_);
|
IterateRoots(&root_visitor, VISIT_ONLY_STRONG);
|
}
|
#endif
|
@@ -3920,8 +3952,8 @@
|
marked_count_(0) {
|
// These two fields reflect the state of the previous full collection.
|
// Set them before they are changed by the collector.
|
- previous_has_compacted_ = MarkCompactCollector::HasCompacted();
|
- previous_marked_count_ = MarkCompactCollector::previous_marked_count();
|
+ previous_has_compacted_ = v8_context()->mark_compact_collector_.HasCompacted();
|
+ previous_marked_count_ = v8_context()->mark_compact_collector_.previous_marked_count();
|
if (!FLAG_trace_gc) return;
|
start_time_ = OS::TimeCurrentMillis();
|
start_size_ = SizeOfHeapObjects();
|
@@ -3937,7 +3969,7 @@
|
static_cast<int>(OS::TimeCurrentMillis() - start_time_));
|
|
#if defined(ENABLE_LOGGING_AND_PROFILING)
|
- Heap::PrintShortHeapStatistics();
|
+ v8_context()->heap_.PrintShortHeapStatistics();
|
#endif
|
}
|
|
@@ -3947,7 +3979,7 @@
|
case SCAVENGER:
|
return "Scavenge";
|
case MARK_COMPACTOR:
|
- return MarkCompactCollector::HasCompacted() ? "Mark-compact"
|
+ return v8_context()->mark_compact_collector_.HasCompacted() ? "Mark-compact"
|
: "Mark-sweep";
|
}
|
return "Unknown GC";
|
@@ -3974,7 +4006,7 @@
|
|
void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
|
String* symbol;
|
- if (Heap::LookupSymbolIfExists(name, &symbol)) {
|
+ if (v8_context()->heap_.LookupSymbolIfExists(name, &symbol)) {
|
int index = Hash(map, symbol);
|
Key& key = keys_[index];
|
key.map = map;
|
@@ -3988,28 +4020,14 @@
|
for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
|
}
|
|
-
|
-KeyedLookupCache::Key KeyedLookupCache::keys_[KeyedLookupCache::kLength];
|
-
|
-
|
-int KeyedLookupCache::field_offsets_[KeyedLookupCache::kLength];
|
-
|
-
|
void DescriptorLookupCache::Clear() {
|
for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
|
}
|
|
-
|
-DescriptorLookupCache::Key
|
-DescriptorLookupCache::keys_[DescriptorLookupCache::kLength];
|
-
|
-int DescriptorLookupCache::results_[DescriptorLookupCache::kLength];
|
-
|
-
|
#ifdef DEBUG
|
bool Heap::GarbageCollectionGreedyCheck() {
|
ASSERT(FLAG_gc_greedy);
|
- if (Bootstrapper::IsActive()) return true;
|
+ if (v8_context()->bootstrapper_.IsActive()) return true;
|
if (disallow_allocation_failure()) return true;
|
return CollectGarbage(0, NEW_SPACE);
|
}
|
@@ -4028,11 +4046,8 @@
|
}
|
|
|
-TranscendentalCache* TranscendentalCache::caches_[kNumberOfCaches];
|
-
|
-
|
-void TranscendentalCache::Clear() {
|
- for (int i = 0; i < kNumberOfCaches; i++) {
|
+void TranscendentalCaches::Clear() {
|
+ for (int i = 0; i < TranscendentalCache::kNumberOfCaches; i++) {
|
if (caches_[i] != NULL) {
|
delete caches_[i];
|
caches_[i] = NULL;
|
Index: src/ic-inl.h
|
===================================================================
|
--- src/ic-inl.h (revision 2876)
|
+++ src/ic-inl.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -43,11 +43,12 @@
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// First check if any break points are active if not just return the address
|
// of the call.
|
- if (!Debug::has_break_points()) return result;
|
+ Debug& debug = v8_context()->debug_;
|
+ if (!debug.has_break_points()) return result;
|
|
// At least one break point is active perform additional test to ensure that
|
// break point locations are updated correctly.
|
- if (Debug::IsDebugBreak(Assembler::target_address_at(result))) {
|
+ if (debug.IsDebugBreak(Assembler::target_address_at(result))) {
|
// If the call site is a call to debug break then return the address in
|
// the original code instead of the address in the running code. This will
|
// cause the original code to be updated and keeps the breakpoint active in
|
Index: src/d8.cc
|
===================================================================
|
--- src/d8.cc (revision 2838)
|
+++ src/d8.cc Sat Nov 14 02:12:34 MSK 2009
|
@@ -442,8 +442,8 @@
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Install the debugger object in the utility scope
|
- i::Debug::Load();
|
- i::JSObject* debug = i::Debug::debug_context()->global();
|
+ v8_context()->debug_.Load();
|
+ i::JSObject* debug = v8_context()->debug_.debug_context()->global();
|
utility_context_->Global()->Set(String::New("$debug"),
|
Utils::ToLocal(&debug));
|
#endif
|
@@ -474,7 +474,7 @@
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Set the security token of the debug context to allow access.
|
- i::Debug::debug_context()->set_security_token(i::Heap::undefined_value());
|
+ v8_context()->debug_.debug_context()->set_security_token(v8_context()->heap_.undefined_value());
|
|
// Start the debugger agent if requested.
|
if (i::FLAG_debugger_agent) {
|
Index: src/objects-inl.h
|
===================================================================
|
--- src/objects-inl.h (revision 3238)
|
+++ src/objects-inl.h Sat Nov 14 01:42:55 MSK 2009
|
@@ -464,22 +464,23 @@
|
|
|
bool Object::IsContext() {
|
+ Heap& heap = v8_context()->heap_;
|
return Object::IsHeapObject()
|
- && (HeapObject::cast(this)->map() == Heap::context_map() ||
|
- HeapObject::cast(this)->map() == Heap::catch_context_map() ||
|
- HeapObject::cast(this)->map() == Heap::global_context_map());
|
+ && (HeapObject::cast(this)->map() == heap.context_map() ||
|
+ HeapObject::cast(this)->map() == heap.catch_context_map() ||
|
+ HeapObject::cast(this)->map() == heap.global_context_map());
|
}
|
|
|
bool Object::IsCatchContext() {
|
return Object::IsHeapObject()
|
- && HeapObject::cast(this)->map() == Heap::catch_context_map();
|
+ && HeapObject::cast(this)->map() == v8_context()->heap_.catch_context_map();
|
}
|
|
|
bool Object::IsGlobalContext() {
|
return Object::IsHeapObject()
|
- && HeapObject::cast(this)->map() == Heap::global_context_map();
|
+ && HeapObject::cast(this)->map() == v8_context()->heap_.global_context_map();
|
}
|
|
|
@@ -561,17 +562,17 @@
|
|
bool Object::IsHashTable() {
|
return Object::IsHeapObject()
|
- && HeapObject::cast(this)->map() == Heap::hash_table_map();
|
+ && HeapObject::cast(this)->map() == v8_context()->heap_.hash_table_map();
|
}
|
|
|
bool Object::IsDictionary() {
|
- return IsHashTable() && this != Heap::symbol_table();
|
+ return IsHashTable() && this != v8_context()->heap_.symbol_table();
|
}
|
|
|
bool Object::IsSymbolTable() {
|
- return IsHashTable() && this == Heap::raw_unchecked_symbol_table();
|
+ return IsHashTable() && this == v8_context()->heap_.raw_unchecked_symbol_table();
|
}
|
|
|
@@ -655,27 +656,27 @@
|
|
|
bool Object::IsUndefined() {
|
- return this == Heap::undefined_value();
|
+ return this == v8_context()->heap_.undefined_value();
|
}
|
|
|
bool Object::IsTheHole() {
|
- return this == Heap::the_hole_value();
|
+ return this == v8_context()->heap_.the_hole_value();
|
}
|
|
|
bool Object::IsNull() {
|
- return this == Heap::null_value();
|
+ return this == v8_context()->heap_.null_value();
|
}
|
|
|
bool Object::IsTrue() {
|
- return this == Heap::true_value();
|
+ return this == v8_context()->heap_.true_value();
|
}
|
|
|
bool Object::IsFalse() {
|
- return this == Heap::false_value();
|
+ return this == v8_context()->heap_.false_value();
|
}
|
|
|
@@ -733,17 +734,18 @@
|
|
|
#define WRITE_BARRIER(object, offset) \
|
- Heap::RecordWrite(object->address(), offset);
|
+ v8_context()->heap_.RecordWrite(object->address(), offset);
|
|
// CONDITIONAL_WRITE_BARRIER must be issued after the actual
|
// write due to the assert validating the written value.
|
#define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
|
if (mode == UPDATE_WRITE_BARRIER) { \
|
- Heap::RecordWrite(object->address(), offset); \
|
+ v8_context()->heap_.RecordWrite(object->address(), offset); \
|
} else { \
|
ASSERT(mode == SKIP_WRITE_BARRIER); \
|
- ASSERT(Heap::InNewSpace(object) || \
|
- !Heap::InNewSpace(READ_FIELD(object, offset))); \
|
+ Heap & heap = v8_context()->heap_; \
|
+ ASSERT(heap.InNewSpace(object) || \
|
+ !heap.InNewSpace(READ_FIELD(object, offset))); \
|
}
|
|
#define READ_DOUBLE_FIELD(p, offset) \
|
@@ -1170,14 +1172,16 @@
|
|
|
void JSObject::initialize_properties() {
|
- ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
|
- WRITE_FIELD(this, kPropertiesOffset, Heap::empty_fixed_array());
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.empty_fixed_array()));
|
+ WRITE_FIELD(this, kPropertiesOffset, heap.empty_fixed_array());
|
}
|
|
|
void JSObject::initialize_elements() {
|
- ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
|
- WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.empty_fixed_array()));
|
+ WRITE_FIELD(this, kElementsOffset, heap.empty_fixed_array());
|
}
|
|
|
@@ -1311,7 +1315,7 @@
|
|
|
void JSObject::InitializeBody(int object_size) {
|
- Object* value = Heap::undefined_value();
|
+ Object* value = v8_context()->heap_.undefined_value();
|
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
WRITE_FIELD(this, offset, value);
|
}
|
@@ -1319,7 +1323,7 @@
|
|
|
void Struct::InitializeBody(int object_size) {
|
- Object* value = Heap::undefined_value();
|
+ Object* value = v8_context()->heap_.undefined_value();
|
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
|
WRITE_FIELD(this, offset, value);
|
}
|
@@ -1385,7 +1389,7 @@
|
|
|
WriteBarrierMode HeapObject::GetWriteBarrierMode() {
|
- if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
+ if (v8_context()->heap_.InNewSpace(this)) return SKIP_WRITE_BARRIER;
|
return UPDATE_WRITE_BARRIER;
|
}
|
|
@@ -1408,30 +1412,34 @@
|
|
void FixedArray::set_undefined(int index) {
|
ASSERT(index >= 0 && index < this->length());
|
- ASSERT(!Heap::InNewSpace(Heap::undefined_value()));
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.undefined_value()));
|
WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
|
- Heap::undefined_value());
|
+ heap.undefined_value());
|
}
|
|
|
void FixedArray::set_null(int index) {
|
ASSERT(index >= 0 && index < this->length());
|
- ASSERT(!Heap::InNewSpace(Heap::null_value()));
|
- WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::null_value());
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.null_value()));
|
+ WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap.null_value());
|
}
|
|
|
void FixedArray::set_the_hole(int index) {
|
ASSERT(index >= 0 && index < this->length());
|
- ASSERT(!Heap::InNewSpace(Heap::the_hole_value()));
|
- WRITE_FIELD(this, kHeaderSize + index * kPointerSize, Heap::the_hole_value());
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.the_hole_value()));
|
+ WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap.the_hole_value());
|
}
|
|
|
bool DescriptorArray::IsEmpty() {
|
- ASSERT(this == Heap::empty_descriptor_array() ||
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(this == heap.empty_descriptor_array() ||
|
this->length() > 2);
|
- return this == Heap::empty_descriptor_array();
|
+ return this == heap.empty_descriptor_array();
|
}
|
|
|
@@ -1539,9 +1547,10 @@
|
// Range check.
|
ASSERT(descriptor_number < number_of_descriptors());
|
|
+ Heap & heap = v8_context()->heap_;
|
// Make sure non of the elements in desc are in new space.
|
- ASSERT(!Heap::InNewSpace(desc->GetKey()));
|
- ASSERT(!Heap::InNewSpace(desc->GetValue()));
|
+ ASSERT(!heap.InNewSpace(desc->GetKey()));
|
+ ASSERT(!heap.InNewSpace(desc->GetValue()));
|
|
fast_set(this, ToKeyIndex(descriptor_number), desc->GetKey());
|
FixedArray* content_array = GetContentArray();
|
@@ -1899,12 +1908,13 @@
|
Map* ExternalAsciiString::StringMap(int length) {
|
Map* map;
|
// Number of characters: determines the map.
|
+ Heap & heap = v8_context()->heap_;
|
if (length <= String::kMaxShortSize) {
|
- map = Heap::short_external_ascii_string_map();
|
+ map = heap.short_external_ascii_string_map();
|
} else if (length <= String::kMaxMediumSize) {
|
- map = Heap::medium_external_ascii_string_map();
|
+ map = heap.medium_external_ascii_string_map();
|
} else {
|
- map = Heap::long_external_ascii_string_map();
|
+ map = heap.long_external_ascii_string_map();
|
}
|
return map;
|
}
|
@@ -1912,13 +1922,14 @@
|
|
Map* ExternalAsciiString::SymbolMap(int length) {
|
Map* map;
|
+ Heap & heap = v8_context()->heap_;
|
// Number of characters: determines the map.
|
if (length <= String::kMaxShortSize) {
|
- map = Heap::short_external_ascii_symbol_map();
|
+ map = heap.short_external_ascii_symbol_map();
|
} else if (length <= String::kMaxMediumSize) {
|
- map = Heap::medium_external_ascii_symbol_map();
|
+ map = heap.medium_external_ascii_symbol_map();
|
} else {
|
- map = Heap::long_external_ascii_symbol_map();
|
+ map = heap.long_external_ascii_symbol_map();
|
}
|
return map;
|
}
|
@@ -1937,13 +1948,14 @@
|
|
Map* ExternalTwoByteString::StringMap(int length) {
|
Map* map;
|
+ Heap & heap = v8_context()->heap_;
|
// Number of characters: determines the map.
|
if (length <= String::kMaxShortSize) {
|
- map = Heap::short_external_string_map();
|
+ map = heap.short_external_string_map();
|
} else if (length <= String::kMaxMediumSize) {
|
- map = Heap::medium_external_string_map();
|
+ map = heap.medium_external_string_map();
|
} else {
|
- map = Heap::long_external_string_map();
|
+ map = heap.long_external_string_map();
|
}
|
return map;
|
}
|
@@ -1951,13 +1963,14 @@
|
|
Map* ExternalTwoByteString::SymbolMap(int length) {
|
Map* map;
|
+ Heap & heap = v8_context()->heap_;
|
// Number of characters: determines the map.
|
if (length <= String::kMaxShortSize) {
|
- map = Heap::short_external_symbol_map();
|
+ map = heap.short_external_symbol_map();
|
} else if (length <= String::kMaxMediumSize) {
|
- map = Heap::medium_external_symbol_map();
|
+ map = heap.medium_external_symbol_map();
|
} else {
|
- map = Heap::long_external_symbol_map();
|
+ map = heap.long_external_symbol_map();
|
}
|
return map;
|
}
|
@@ -2601,7 +2614,7 @@
|
|
|
bool JSFunction::IsBoilerplate() {
|
- return map() == Heap::boilerplate_function_map();
|
+ return map() == v8_context()->heap_.boilerplate_function_map();
|
}
|
|
|
@@ -2636,7 +2649,7 @@
|
|
|
void JSFunction::set_context(Object* value) {
|
- ASSERT(value == Heap::undefined_value() || value->IsContext());
|
+ ASSERT(value == v8_context()->heap_.undefined_value() || value->IsContext());
|
WRITE_FIELD(this, kContextOffset, value);
|
WRITE_BARRIER(this, kContextOffset);
|
}
|
@@ -2834,7 +2847,7 @@
|
Array* array = elements();
|
if (array->IsFixedArray()) {
|
// FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray.
|
- if (array->map() == Heap::fixed_array_map()) {
|
+ if (array->map() == v8_context()->heap_.fixed_array_map()) {
|
return FAST_ELEMENTS;
|
}
|
ASSERT(array->IsDictionary());
|
@@ -3107,8 +3120,9 @@
|
// No write barrier is needed since empty_fixed_array is not in new space.
|
// Please note this function is used during marking:
|
// - MarkCompactCollector::MarkUnmarkedObject
|
- ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array()));
|
- WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array());
|
+ Heap & heap = v8_context()->heap_;
|
+ ASSERT(!heap.InNewSpace(heap.raw_unchecked_empty_fixed_array()));
|
+ WRITE_FIELD(this, kCodeCacheOffset, heap.raw_unchecked_empty_fixed_array());
|
}
|
|
|
@@ -3127,7 +3141,7 @@
|
|
Object* FixedArray::Copy() {
|
if (length() == 0) return this;
|
- return Heap::CopyFixedArray(this);
|
+ return v8_context()->heap_.CopyFixedArray(this);
|
}
|
|
|
Index: src/ia32/assembler-ia32.cc
|
===================================================================
|
--- src/ia32/assembler-ia32.cc (revision 3113)
|
+++ src/ia32/assembler-ia32.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -47,17 +47,15 @@
|
// Implementation of CpuFeatures
|
|
// Safe default is no features.
|
-uint64_t CpuFeatures::supported_ = 0;
|
-uint64_t CpuFeatures::enabled_ = 0;
|
-
|
-
|
-// The Probe method needs executable memory, so it uses Heap::CreateCode.
|
+// The Probe method needs executable memory, so it uses v8_context()->heap_.CreateCode.
|
// Allocation failure is silent and leads to safe default.
|
void CpuFeatures::Probe() {
|
- ASSERT(Heap::HasBeenSetup());
|
- ASSERT(supported_ == 0);
|
+ V8Context* const v8context = v8_context();
|
+ ASSERT(v8context->heap_.HasBeenSetup());
|
+ ASSERT(v8context->assembler_data_->supported_ == 0);
|
if (Serializer::enabled()) return; // No features if we might serialize.
|
|
+
|
Assembler assm(NULL, 0);
|
Label cpuid, done;
|
#define __ assm.
|
@@ -90,11 +88,11 @@
|
// safe here.
|
__ bind(&cpuid);
|
__ mov(eax, 1);
|
- supported_ = (1 << CPUID);
|
+ v8context->assembler_data_->supported_ = (1 << CPUID);
|
{ Scope fscope(CPUID);
|
__ cpuid();
|
}
|
- supported_ = 0;
|
+ v8context->assembler_data_->supported_ = 0;
|
|
// Move the result from ecx:edx to edx:eax and make sure to mark the
|
// CPUID feature as supported.
|
@@ -114,7 +112,7 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
Handle<Code>::null());
|
@@ -123,7 +121,7 @@
|
Code::cast(code), "CpuFeatures::Probe"));
|
typedef uint64_t (*F0)();
|
F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry());
|
- supported_ = probe();
|
+ v8context->assembler_data_->supported_ = probe();
|
}
|
|
|
@@ -270,10 +268,8 @@
|
static void InitCoverageLog();
|
#endif
|
|
-// spare_buffer_
|
-byte* Assembler::spare_buffer_ = NULL;
|
-
|
Assembler::Assembler(void* buffer, int buffer_size) {
|
+ byte*& spare_buffer_ = v8_context()->assembler_data_->spare_buffer_;
|
if (buffer == NULL) {
|
// do our own buffer management
|
if (buffer_size <= kMinimalBufferSize) {
|
@@ -326,6 +322,7 @@
|
|
Assembler::~Assembler() {
|
if (own_buffer_) {
|
+ byte*& spare_buffer_ = v8_context()->assembler_data_->spare_buffer_;
|
if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
|
spare_buffer_ = buffer_;
|
} else {
|
@@ -347,7 +344,7 @@
|
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
|
desc->origin = this;
|
|
- Counters::reloc_info_size.Increment(desc->reloc_size);
|
+ v8_context()->counters_.reloc_info_size.Increment(desc->reloc_size);
|
}
|
|
|
@@ -2125,8 +2122,8 @@
|
// Some internal data structures overflow for very large buffers,
|
// they must ensure that kMaximalBufferSize is not too large.
|
if ((desc.buffer_size > kMaximalBufferSize) ||
|
- (desc.buffer_size > Heap::MaxOldGenerationSize())) {
|
- V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
|
+ (desc.buffer_size > v8_context()->heap_.MaxOldGenerationSize())) {
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Assembler::GrowBuffer");
|
}
|
|
// setup new buffer
|
@@ -2148,6 +2145,7 @@
|
reloc_info_writer.pos(), desc.reloc_size);
|
|
// switch buffers
|
+ byte*& spare_buffer_ = v8_context()->assembler_data_->spare_buffer_;
|
if (spare_buffer_ == NULL && buffer_size_ == kMinimalBufferSize) {
|
spare_buffer_ = buffer_;
|
} else {
|
@@ -2254,9 +2252,16 @@
|
reloc_info_writer.Write(&rinfo);
|
}
|
|
+void Assembler::Setup() {
|
+ v8_context()->assembler_data_ = new AssemblerData();
|
+}
|
|
+void Assembler::TearDown() {
|
+ delete v8_context()->assembler_data_;
|
+}
|
+
|
#ifdef GENERATED_CODE_COVERAGE
|
-static FILE* coverage_log = NULL;
|
+static FILE* coverage_log = NULL; ///static
|
|
|
static void InitCoverageLog() {
|
Index: src/regexp-stack.h
|
===================================================================
|
--- src/regexp-stack.h (revision 2977)
|
+++ src/regexp-stack.h Sat Nov 14 01:42:57 MSK 2009
|
@@ -31,6 +31,13 @@
|
namespace v8 {
|
namespace internal {
|
|
+class RegExpStackControl {
|
+public:
|
+ // Create and delete an instance to control the life-time of a growing stack.
|
+ RegExpStackControl(); // Initializes the stack memory area if necessary.
|
+ ~RegExpStackControl(); // Releases the stack if it has grown.
|
+};
|
+
|
// Maintains a per-v8thread stack area that can be used by irregexp
|
// implementation for its backtracking stack.
|
// Since there is only one stack area, the Irregexp implementation is not
|
@@ -43,36 +50,37 @@
|
// check.
|
static const int kStackLimitSlack = 32;
|
|
- // Create and delete an instance to control the life-time of a growing stack.
|
- RegExpStack(); // Initializes the stack memory area if necessary.
|
- ~RegExpStack(); // Releases the stack if it has grown.
|
-
|
// Gives the top of the memory used as stack.
|
- static Address stack_base() {
|
+ Address stack_base() {
|
ASSERT(thread_local_.memory_size_ != 0);
|
return thread_local_.memory_ + thread_local_.memory_size_;
|
}
|
|
// The total size of the memory allocated for the stack.
|
- static size_t stack_capacity() { return thread_local_.memory_size_; }
|
+ size_t stack_capacity() { return thread_local_.memory_size_; }
|
|
// If the stack pointer gets below the limit, we should react and
|
// either grow the stack or report an out-of-stack exception.
|
// There is only a limited number of locations below the stack limit,
|
// so users of the stack should check the stack limit during any
|
// sequence of pushes longer that this.
|
- static Address* limit_address() { return &(thread_local_.limit_); }
|
+ Address* limit_address() { return &(thread_local_.limit_); }
|
|
// Ensures that there is a memory area with at least the specified size.
|
// If passing zero, the default/minimum size buffer is allocated.
|
- static Address EnsureCapacity(size_t size);
|
+ Address EnsureCapacity(size_t size);
|
|
// Thread local archiving.
|
- static size_t ArchiveSpacePerThread() { return sizeof(thread_local_); }
|
- static char* ArchiveStack(char* to);
|
- static char* RestoreStack(char* from);
|
- static void FreeThreadResources() { thread_local_.Free(); }
|
+ size_t ArchiveSpacePerThread() { return sizeof(thread_local_); }
|
+ char* ArchiveStack(char* to);
|
+ char* RestoreStack(char* from);
|
+ void FreeThreadResources() { thread_local_.Free(); }
|
|
+ unibrow::Mapping<unibrow::Ecma262Canonicalize> canonicalize_;
|
+ unibrow::Mapping<unibrow::Ecma262UnCanonicalize> uncanonicalize_;
|
+ unibrow::Mapping<unibrow::CanonicalizationRange> canonrange_;
|
+ int* static_offsets_vector_;
|
+
|
private:
|
// Artificial limit used when no memory has been allocated.
|
static const uintptr_t kMemoryTop = static_cast<uintptr_t>(-1);
|
@@ -85,8 +93,9 @@
|
|
// Structure holding the allocated memory, size and limit.
|
struct ThreadLocal {
|
- ThreadLocal()
|
+ ThreadLocal(RegExpStack* stack)
|
: memory_(NULL),
|
+ stack_(stack),
|
memory_size_(0),
|
limit_(reinterpret_cast<Address>(kMemoryTop)) {}
|
// If memory_size_ > 0 then memory_ must be non-NULL.
|
@@ -94,16 +103,24 @@
|
size_t memory_size_;
|
Address limit_;
|
void Free();
|
+ private:
|
+ RegExpStack* stack_;
|
};
|
|
// Resets the buffer if it has grown beyond the default/minimum size.
|
// After this, the buffer is either the default size, or it is empty, so
|
// you have to call EnsureCapacity before using it again.
|
- static void Reset();
|
+ void Reset();
|
|
- static ThreadLocal thread_local_;
|
-};
|
+ ThreadLocal thread_local_;
|
|
+ RegExpStack();
|
+ DISALLOW_COPY_AND_ASSIGN(RegExpStack);
|
+
|
+ friend struct ThreadLocal;
|
+ friend class V8Context;
|
+ friend class RegExpStackControl;
|
+};
|
}} // namespace v8::internal
|
|
#endif // V8_REGEXP_STACK_H_
|
Index: src/heap-inl.h
|
===================================================================
|
--- src/heap-inl.h (revision 2748)
|
+++ src/heap-inl.h Sun Nov 15 12:30:08 MSK 2009
|
@@ -55,14 +55,15 @@
|
ASSERT(space != NEW_SPACE ||
|
retry_space == OLD_POINTER_SPACE ||
|
retry_space == OLD_DATA_SPACE);
|
+ V8Context* const v8context = v8_context();
|
#ifdef DEBUG
|
if (FLAG_gc_interval >= 0 &&
|
!disallow_allocation_failure_ &&
|
- Heap::allocation_timeout_-- <= 0) {
|
+ v8context->heap_.allocation_timeout_-- <= 0) {
|
return Failure::RetryAfterGC(size_in_bytes, space);
|
}
|
- Counters::objs_since_last_full.Increment();
|
- Counters::objs_since_last_young.Increment();
|
+ v8context->counters_.objs_since_last_full.Increment();
|
+ v8context->counters_.objs_since_last_young.Increment();
|
#endif
|
Object* result;
|
if (NEW_SPACE == space) {
|
@@ -111,8 +112,8 @@
|
|
Object* Heap::AllocateRawMap() {
|
#ifdef DEBUG
|
- Counters::objs_since_last_full.Increment();
|
- Counters::objs_since_last_young.Increment();
|
+ v8_context()->counters_.objs_since_last_full.Increment();
|
+ v8_context()->counters_.objs_since_last_young.Increment();
|
#endif
|
Object* result = map_space_->AllocateRaw(Map::kSize);
|
if (result->IsFailure()) old_gen_exhausted_ = true;
|
@@ -122,8 +123,8 @@
|
|
Object* Heap::AllocateRawCell() {
|
#ifdef DEBUG
|
- Counters::objs_since_last_full.Increment();
|
- Counters::objs_since_last_young.Increment();
|
+ v8_context()->counters_.objs_since_last_full.Increment();
|
+ v8_context()->counters_.objs_since_last_young.Increment();
|
#endif
|
Object* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
|
if (result->IsFailure()) old_gen_exhausted_ = true;
|
@@ -257,7 +258,7 @@
|
|
|
#define GC_GREEDY_CHECK() \
|
- ASSERT(!FLAG_gc_greedy || v8::internal::Heap::GarbageCollectionGreedyCheck())
|
+ ASSERT(!FLAG_gc_greedy || v8_context()->heap_.GarbageCollectionGreedyCheck())
|
|
|
// Calls the FUNCTION_CALL function and retries it up to three times
|
@@ -271,21 +272,22 @@
|
do { \
|
GC_GREEDY_CHECK(); \
|
Object* __object__ = FUNCTION_CALL; \
|
+ V8Context* const v8context = v8_context(); \
|
if (!__object__->IsFailure()) RETURN_VALUE; \
|
if (__object__->IsOutOfMemoryFailure()) { \
|
- v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \
|
+ v8context->v8_.FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \
|
} \
|
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
|
- Heap::CollectGarbage(Failure::cast(__object__)->requested(), \
|
+ v8context->heap_.CollectGarbage(Failure::cast(__object__)->requested(), \
|
Failure::cast(__object__)->allocation_space()); \
|
__object__ = FUNCTION_CALL; \
|
if (!__object__->IsFailure()) RETURN_VALUE; \
|
if (__object__->IsOutOfMemoryFailure()) { \
|
- v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \
|
+ v8context->v8_.FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \
|
} \
|
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
|
- Counters::gc_last_resort_from_handles.Increment(); \
|
- Heap::CollectAllGarbage(false); \
|
+ v8context->counters_.gc_last_resort_from_handles.Increment(); \
|
+ v8context->heap_.CollectAllGarbage(false); \
|
{ \
|
AlwaysAllocateScope __scope__; \
|
__object__ = FUNCTION_CALL; \
|
@@ -294,7 +296,7 @@
|
if (__object__->IsOutOfMemoryFailure() || \
|
__object__->IsRetryAfterGC()) { \
|
/* TODO(1181417): Fix this. */ \
|
- v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \
|
+ v8context->v8_.FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \
|
} \
|
RETURN_EMPTY; \
|
} while (false)
|
Index: src/scopes.cc
|
===================================================================
|
--- src/scopes.cc (revision 3225)
|
+++ src/scopes.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -49,7 +49,7 @@
|
};
|
|
|
-static ZoneAllocator LocalsMapAllocator;
|
+static ZoneAllocator LocalsMapAllocator; ///static
|
|
|
// ----------------------------------------------------------------------------
|
Index: src/parser.cc
|
===================================================================
|
--- src/parser.cc (revision 3198)
|
+++ src/parser.cc Sat Nov 14 01:43:01 MSK 2009
|
@@ -1034,7 +1034,7 @@
|
Statement* AstBuildingParserFactory::EmptyStatement() {
|
// Use a statically allocated empty statement singleton to avoid
|
// allocating lots and lots of empty statements.
|
- static v8::internal::EmptyStatement empty;
|
+ static v8::internal::EmptyStatement empty;///static
|
return ∅
|
}
|
|
@@ -1173,7 +1173,7 @@
|
|
bool Parser::PreParseProgram(Handle<String> source,
|
unibrow::CharacterStream* stream) {
|
- HistogramTimerScope timer(&Counters::pre_parse);
|
+ HistogramTimerScope timer(&v8_context()->counters_.pre_parse);
|
AssertNoZoneAllocation assert_no_zone_allocation;
|
AssertNoAllocation assert_no_allocation;
|
NoHandleAllocation no_handle_allocation;
|
@@ -1195,8 +1195,8 @@
|
bool in_global_context) {
|
CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
|
|
- HistogramTimerScope timer(&Counters::parse);
|
- Counters::total_parse_size.Increment(source->length());
|
+ HistogramTimerScope timer(&v8_context()->counters_.parse);
|
+ v8_context()->counters_.total_parse_size.Increment(source->length());
|
|
// Initialize parser state.
|
source->TryFlattenIfNotFlat();
|
@@ -1235,7 +1235,7 @@
|
source->length(),
|
false));
|
} else if (scanner().stack_overflow()) {
|
- Top::StackOverflow();
|
+ v8_context()->top_.StackOverflow();
|
}
|
}
|
|
@@ -1254,9 +1254,9 @@
|
int start_position,
|
bool is_expression) {
|
CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
|
- HistogramTimerScope timer(&Counters::parse_lazy);
|
+ HistogramTimerScope timer(&v8_context()->counters_.parse_lazy);
|
source->TryFlattenIfNotFlat();
|
- Counters::total_parse_size.Increment(source->length());
|
+ v8_context()->counters_.total_parse_size.Increment(source->length());
|
SafeStringInputBuffer buffer(source.location());
|
|
// Initialize parser state.
|
@@ -1290,7 +1290,7 @@
|
// If there was a stack overflow we have to get rid of AST and it is
|
// not safe to do before scope has been deleted.
|
if (result == NULL) {
|
- Top::StackOverflow();
|
+ v8_context()->top_.StackOverflow();
|
zone_scope.DeleteOnExit();
|
}
|
return result;
|
@@ -1313,7 +1313,7 @@
|
SetElement(array, i, Factory::NewStringFromUtf8(CStrVector(args[i])));
|
}
|
Handle<Object> result = Factory::NewSyntaxError(type, array);
|
- Top::Throw(*result, &location);
|
+ v8_context()->top_.Throw(*result, &location);
|
}
|
|
|
@@ -3630,7 +3630,7 @@
|
if (is_lazily_compiled && pre_data() != NULL) {
|
FunctionEntry entry = pre_data()->GetFunctionEnd(start_pos);
|
int end_pos = entry.end_pos();
|
- Counters::total_preparse_skipped.Increment(end_pos - start_pos);
|
+ v8_context()->counters_.total_preparse_skipped.Increment(end_pos - start_pos);
|
scanner_.SeekForward(end_pos);
|
materialized_literal_count = entry.literal_count();
|
expected_property_count = entry.property_count();
|
@@ -4326,7 +4326,7 @@
|
};
|
|
|
-static unibrow::Predicate<SourceCharacter> source_character;
|
+static unibrow::Predicate<SourceCharacter> source_character;///static
|
|
|
static inline bool IsSourceCharacter(uc32 c) {
|
@@ -4701,7 +4701,7 @@
|
// MakeAST() is just a wrapper for the corresponding Parser calls
|
// so we don't have to expose the entire Parser class in the .h file.
|
|
-static bool always_allow_natives_syntax = false;
|
+static bool always_allow_natives_syntax = false; ///static
|
|
|
ParserMessage::~ParserMessage() {
|
@@ -4733,7 +4733,7 @@
|
bool allow_natives_syntax =
|
always_allow_natives_syntax ||
|
FLAG_allow_natives_syntax ||
|
- Bootstrapper::IsActive();
|
+ v8_context()->bootstrapper_.IsActive();
|
PreParser parser(no_script, allow_natives_syntax, extension);
|
if (!parser.PreParseProgram(source, stream)) return NULL;
|
// The list owns the backing store so we need to clone the vector.
|
@@ -4773,7 +4773,7 @@
|
bool allow_natives_syntax =
|
always_allow_natives_syntax ||
|
FLAG_allow_natives_syntax ||
|
- Bootstrapper::IsActive();
|
+ v8_context()->bootstrapper_.IsActive();
|
AstBuildingParser parser(script, allow_natives_syntax, extension, pre_data);
|
if (pre_data != NULL && pre_data->has_error()) {
|
Scanner::Location loc = pre_data->MessageLocation();
|
Index: src/dateparser.h
|
===================================================================
|
--- src/dateparser.h (revision 2038)
|
+++ src/dateparser.h Sat Nov 14 01:43:02 MSK 2009
|
@@ -101,7 +101,7 @@
|
bool Skip(uint32_t c) { return ch_ == c ? (Next(), true) : false; }
|
|
bool SkipWhiteSpace() {
|
- return Scanner::kIsWhiteSpace.get(ch_) ? (Next(), true) : false;
|
+ return v8_context()->scanner_.kIsWhiteSpace.get(ch_) ? (Next(), true) : false;
|
}
|
|
bool SkipParentheses() {
|
Index: src/objects-debug.cc
|
===================================================================
|
--- src/objects-debug.cc (revision 3209)
|
+++ src/objects-debug.cc Sat Nov 14 01:43:09 MSK 2009
|
@@ -298,7 +298,7 @@
|
|
void HeapObject::VerifyHeapPointer(Object* p) {
|
ASSERT(p->IsHeapObject());
|
- ASSERT(Heap::Contains(HeapObject::cast(p)));
|
+ ASSERT(v8_context()->heap_.Contains(HeapObject::cast(p)));
|
}
|
|
|
@@ -672,10 +672,10 @@
|
|
|
void Map::MapVerify() {
|
- ASSERT(!Heap::InNewSpace(this));
|
+ ASSERT(!v8_context()->heap_.InNewSpace(this));
|
ASSERT(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
|
ASSERT(kPointerSize <= instance_size()
|
- && instance_size() < Heap::Capacity());
|
+ && instance_size() < v8_context()->heap_.Capacity());
|
VerifyHeapPointer(prototype());
|
VerifyHeapPointer(instance_descriptors());
|
}
|
@@ -739,7 +739,7 @@
|
CHECK(IsString());
|
CHECK(length() >= 0 && length() <= Smi::kMaxValue);
|
if (IsSymbol()) {
|
- CHECK(!Heap::InNewSpace(this));
|
+ CHECK(!v8_context()->heap_.InNewSpace(this));
|
}
|
}
|
|
@@ -877,7 +877,7 @@
|
VerifyHeapPointer(to_string());
|
Object* number = to_number();
|
if (number->IsHeapObject()) {
|
- ASSERT(number == Heap::nan_value());
|
+ ASSERT(number == v8_context()->heap_.nan_value());
|
} else {
|
ASSERT(number->IsSmi());
|
int value = Smi::cast(number)->value();
|
@@ -1240,8 +1240,9 @@
|
int holes = 0;
|
FixedArray* e = FixedArray::cast(elements());
|
int len = e->length();
|
+ Heap& heap = v8_context()->heap_;
|
for (int i = 0; i < len; i++) {
|
- if (e->get(i) == Heap::the_hole_value()) holes++;
|
+ if (e->get(i) == heap.the_hole_value()) holes++;
|
}
|
info->number_of_fast_used_elements_ += len - holes;
|
info->number_of_fast_unused_elements_ += holes;
|
Index: src/codegen.h
|
===================================================================
|
--- src/codegen.h (revision 3218)
|
+++ src/codegen.h Sat Nov 14 01:43:18 MSK 2009
|
@@ -100,21 +100,22 @@
|
class CodeGeneratorScope BASE_EMBEDDED {
|
public:
|
explicit CodeGeneratorScope(CodeGenerator* cgen) {
|
- previous_ = top_;
|
- top_ = cgen;
|
+ CodeGeneratorData& code_generator_data = v8_context()->code_generator_data_;
|
+ previous_ = code_generator_data.top_;
|
+ code_generator_data.top_ = cgen;
|
}
|
|
~CodeGeneratorScope() {
|
- top_ = previous_;
|
+ v8_context()->code_generator_data_.top_ = previous_;
|
}
|
|
static CodeGenerator* Current() {
|
- ASSERT(top_ != NULL);
|
- return top_;
|
+ CodeGeneratorData& code_generator_data = v8_context()->code_generator_data_;
|
+ ASSERT(code_generator_data.top_ != NULL);
|
+ return code_generator_data.top_;
|
}
|
|
private:
|
- static CodeGenerator* top_;
|
CodeGenerator* previous_;
|
};
|
|
Index: src/assembler.h
|
===================================================================
|
--- src/assembler.h (revision 3229)
|
+++ src/assembler.h Sat Nov 14 01:42:53 MSK 2009
|
@@ -363,6 +363,12 @@
|
|
typedef void* ExternalReferenceRedirector(void* original, bool fp_return);
|
|
+class BasicAssemblerData {
|
+public:
|
+ ExternalReferenceRedirector* redirector_;
|
+protected:
|
+ BasicAssemblerData():redirector_(NULL) {}
|
+};
|
|
// An ExternalReference represents a C++ address used in the generated
|
// code. All references to C++ functions and variables must be encapsulated in
|
@@ -459,24 +465,28 @@
|
// This lets you register a function that rewrites all external references.
|
// Used by the ARM simulator to catch calls to external references.
|
static void set_redirector(ExternalReferenceRedirector* redirector) {
|
- ASSERT(redirector_ == NULL); // We can't stack them.
|
- redirector_ = redirector;
|
+ ExternalReferenceRedirector* & current_redirector =
|
+ reinterpret_cast<BasicAssemblerData*>(v8_context()->assembler_data_)->redirector_;
|
+ ASSERT(current_redirector == NULL); // We can't stack them.
|
+ current_redirector = redirector;
|
}
|
|
private:
|
explicit ExternalReference(void* address)
|
: address_(address) {}
|
|
- static ExternalReferenceRedirector* redirector_;
|
-
|
static void* Redirect(void* address, bool fp_return = false) {
|
- if (redirector_ == NULL) return address;
|
- return (*redirector_)(address, fp_return);
|
+ ExternalReferenceRedirector* redirector =
|
+ reinterpret_cast<BasicAssemblerData*>(v8_context()->assembler_data_)->redirector_;
|
+ if (redirector == NULL) return address;
|
+ return (*redirector)(address, fp_return);
|
}
|
|
static void* Redirect(Address address_arg, bool fp_return = false) {
|
void* address = reinterpret_cast<void*>(address_arg);
|
- return redirector_ == NULL ? address : (*redirector_)(address, fp_return);
|
+ ExternalReferenceRedirector* redirector =
|
+ reinterpret_cast<BasicAssemblerData*>(v8_context()->assembler_data_)->redirector_;
|
+ return redirector == NULL ? address : (*redirector)(address, fp_return);
|
}
|
|
void* address_;
|
Index: src/ia32/assembler-ia32.h
|
===================================================================
|
--- src/ia32/assembler-ia32.h (revision 3184)
|
+++ src/ia32/assembler-ia32.h Sat Nov 14 01:42:56 MSK 2009
|
@@ -40,6 +40,18 @@
|
namespace v8 {
|
namespace internal {
|
|
+class AssemblerData:public BasicAssemblerData {
|
+public:
|
+ // A previously allocated buffer of kMinimalBufferSize bytes, or NULL.
|
+ byte* spare_buffer_;
|
+ uint64_t supported_;
|
+ uint64_t enabled_;
|
+private:
|
+ AssemblerData():BasicAssemblerData(),spare_buffer_(NULL),supported_(0), enabled_(0) {}
|
+ ~AssemblerData() { delete spare_buffer_; }
|
+ friend class Assembler;
|
+};
|
+
|
// CPU Registers.
|
//
|
// 1) We would prefer to use an enum, but enum values are assignment-
|
@@ -371,11 +383,11 @@
|
if (f == SSE3 && !FLAG_enable_sse3) return false;
|
if (f == CMOV && !FLAG_enable_cmov) return false;
|
if (f == RDTSC && !FLAG_enable_rdtsc) return false;
|
- return (supported_ & (static_cast<uint64_t>(1) << f)) != 0;
|
+ return (v8_context()->assembler_data_->supported_ & (static_cast<uint64_t>(1) << f)) != 0;
|
}
|
// Check whether a feature is currently enabled.
|
static bool IsEnabled(Feature f) {
|
- return (enabled_ & (static_cast<uint64_t>(1) << f)) != 0;
|
+ return (v8_context()->assembler_data_->enabled_ & (static_cast<uint64_t>(1) << f)) != 0;
|
}
|
// Enable a specified feature within a scope.
|
class Scope BASE_EMBEDDED {
|
@@ -383,10 +395,11 @@
|
public:
|
explicit Scope(Feature f) {
|
ASSERT(CpuFeatures::IsSupported(f));
|
- old_enabled_ = CpuFeatures::enabled_;
|
- CpuFeatures::enabled_ |= (static_cast<uint64_t>(1) << f);
|
+ uint64_t & enabled = v8_context()->assembler_data_->enabled_;
|
+ old_enabled_ = enabled;
|
+ enabled |= (static_cast<uint64_t>(1) << f);
|
}
|
- ~Scope() { CpuFeatures::enabled_ = old_enabled_; }
|
+ ~Scope() { v8_context()->assembler_data_->enabled_ = old_enabled_; }
|
private:
|
uint64_t old_enabled_;
|
#else
|
@@ -394,9 +407,6 @@
|
explicit Scope(Feature f) {}
|
#endif
|
};
|
- private:
|
- static uint64_t supported_;
|
- static uint64_t enabled_;
|
};
|
|
|
@@ -851,8 +861,6 @@
|
int buffer_size_;
|
// True if the assembler owns the buffer, false if buffer is external.
|
bool own_buffer_;
|
- // A previously allocated buffer of kMinimalBufferSize bytes, or NULL.
|
- static byte* spare_buffer_;
|
|
// code generation
|
byte* pc_; // the program counter; moves forward
|
@@ -866,6 +874,10 @@
|
int current_position_;
|
int written_statement_position_;
|
int written_position_;
|
+
|
+ static void Setup();
|
+ static void TearDown();
|
+ friend class V8Context;
|
};
|
|
|
Index: src/property.h
|
===================================================================
|
--- src/property.h (revision 2428)
|
+++ src/property.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -47,7 +47,7 @@
|
|
Object* KeyToSymbol() {
|
if (!StringShape(key_).IsSymbol()) {
|
- Object* result = Heap::LookupSymbol(key_);
|
+ Object* result = v8_context()->heap_.LookupSymbol(key_);
|
if (result->IsFailure()) return result;
|
key_ = String::cast(result);
|
}
|
@@ -295,7 +295,7 @@
|
Object* GetCallbackObject() {
|
if (lookup_type_ == CONSTANT_TYPE) {
|
// For now we only have the __proto__ as constant type.
|
- return Heap::prototype_accessors();
|
+ return v8_context()->heap_.prototype_accessors();
|
}
|
return GetValue();
|
}
|
Index: src/scopeinfo.h
|
===================================================================
|
--- src/scopeinfo.h (revision 2230)
|
+++ src/scopeinfo.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -171,25 +171,25 @@
|
public:
|
// Lookup context slot index for (code, name).
|
// If absent, kNotFound is returned.
|
- static int Lookup(Code* code,
|
+ int Lookup(Code* code,
|
String* name,
|
Variable::Mode* mode);
|
|
// Update an element in the cache.
|
- static void Update(Code* code,
|
+ void Update(Code* code,
|
String* name,
|
Variable::Mode mode,
|
int slot_index);
|
|
// Clear the cache.
|
- static void Clear();
|
+ void Clear();
|
|
static const int kNotFound = -2;
|
private:
|
inline static int Hash(Code* code, String* name);
|
|
#ifdef DEBUG
|
- static void ValidateEntry(Code* code,
|
+ void ValidateEntry(Code* code,
|
String* name,
|
Variable::Mode mode,
|
int slot_index);
|
@@ -226,8 +226,12 @@
|
uint32_t value_;
|
};
|
|
- static Key keys_[kLength];
|
- static uint32_t values_[kLength];
|
+ Key keys_[kLength];
|
+ uint32_t values_[kLength];
|
+
|
+ ContextSlotCache() {}
|
+ DISALLOW_COPY_AND_ASSIGN(ContextSlotCache);
|
+ friend class V8Context;
|
};
|
|
|
Index: src/ia32/macro-assembler-ia32.cc
|
===================================================================
|
--- src/ia32/macro-assembler-ia32.cc (revision 3237)
|
+++ src/ia32/macro-assembler-ia32.cc Sat Nov 14 01:43:24 MSK 2009
|
@@ -44,7 +44,7 @@
|
unresolved_(0),
|
generating_stub_(false),
|
allow_stub_calls_(true),
|
- code_object_(Heap::undefined_value()) {
|
+ code_object_(v8_context()->heap_.undefined_value()) {
|
}
|
|
|
@@ -160,14 +160,14 @@
|
if (Serializer::enabled()) {
|
// Can't do arithmetic on external references if it might get serialized.
|
mov(value, Operand(object));
|
- and_(value, Heap::NewSpaceMask());
|
+ and_(value, v8_context()->heap_.NewSpaceMask());
|
cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
|
j(equal, &done);
|
} else {
|
int32_t new_space_start = reinterpret_cast<int32_t>(
|
ExternalReference::new_space_start().address());
|
lea(value, Operand(object, -new_space_start));
|
- and_(value, Heap::NewSpaceMask());
|
+ and_(value, v8_context()->heap_.NewSpaceMask());
|
j(equal, &done);
|
}
|
|
@@ -519,7 +519,7 @@
|
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
|
|
JSObject* prototype = JSObject::cast(object->GetPrototype());
|
- if (Heap::InNewSpace(prototype)) {
|
+ if (v8_context()->heap_.InNewSpace(prototype)) {
|
// Get the map of the current object.
|
mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
|
cmp(Operand(scratch), Immediate(Handle<Map>(object->map())));
|
@@ -1053,7 +1053,7 @@
|
|
if (!definitely_matches) {
|
Handle<Code> adaptor =
|
- Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
|
+ Handle<Code>(v8_context()->builtins_.builtin(Builtins::ArgumentsAdaptorTrampoline));
|
if (!code_constant.is_null()) {
|
mov(edx, Immediate(code_constant));
|
add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
|
@@ -1181,7 +1181,7 @@
|
mov(edi, FieldOperand(edx, builtins_offset));
|
|
|
- return Builtins::GetCode(id, resolved);
|
+ return v8_context()->builtins_.GetCode(id, resolved);
|
}
|
|
|
Index: src/compilation-cache.h
|
===================================================================
|
--- src/compilation-cache.h (revision 2233)
|
+++ src/compilation-cache.h Sat Nov 14 01:43:19 MSK 2009
|
@@ -40,7 +40,7 @@
|
// Finds the script function boilerplate for a source
|
// string. Returns an empty handle if the cache doesn't contain a
|
// script for the given source string with the right origin.
|
- static Handle<JSFunction> LookupScript(Handle<String> source,
|
+ Handle<JSFunction> LookupScript(Handle<String> source,
|
Handle<Object> name,
|
int line_offset,
|
int column_offset);
|
@@ -48,48 +48,57 @@
|
// Finds the function boilerplate for a source string for eval in a
|
// given context. Returns an empty handle if the cache doesn't
|
// contain a script for the given source string.
|
- static Handle<JSFunction> LookupEval(Handle<String> source,
|
+ Handle<JSFunction> LookupEval(Handle<String> source,
|
Handle<Context> context,
|
bool is_global);
|
|
// Returns the regexp data associated with the given regexp if it
|
// is in cache, otherwise an empty handle.
|
- static Handle<FixedArray> LookupRegExp(Handle<String> source,
|
+ Handle<FixedArray> LookupRegExp(Handle<String> source,
|
JSRegExp::Flags flags);
|
|
// Associate the (source, kind) pair to the boilerplate. This may
|
// overwrite an existing mapping.
|
- static void PutScript(Handle<String> source,
|
+ void PutScript(Handle<String> source,
|
Handle<JSFunction> boilerplate);
|
|
// Associate the (source, context->closure()->shared(), kind) triple
|
// with the boilerplate. This may overwrite an existing mapping.
|
- static void PutEval(Handle<String> source,
|
+ void PutEval(Handle<String> source,
|
Handle<Context> context,
|
bool is_global,
|
Handle<JSFunction> boilerplate);
|
|
// Associate the (source, flags) pair to the given regexp data.
|
// This may overwrite an existing mapping.
|
- static void PutRegExp(Handle<String> source,
|
+ void PutRegExp(Handle<String> source,
|
JSRegExp::Flags flags,
|
Handle<FixedArray> data);
|
|
// Clear the cache - also used to initialize the cache at startup.
|
- static void Clear();
|
+ void Clear();
|
|
// GC support.
|
- static void Iterate(ObjectVisitor* v);
|
+ void Iterate(ObjectVisitor* v);
|
|
// Notify the cache that a mark-sweep garbage collection is about to
|
// take place. This is used to retire entries from the cache to
|
// avoid keeping them alive too long without using them.
|
- static void MarkCompactPrologue();
|
+ void MarkCompactPrologue();
|
|
// Enable/disable compilation cache. Used by debugger to disable compilation
|
// cache during debugging to make sure new scripts are always compiled.
|
- static void Enable();
|
- static void Disable();
|
+ void Enable();
|
+ void Disable();
|
+
|
+private:
|
+ inline bool IsEnabled();
|
+ CompilationCache();
|
+ ~CompilationCache();
|
+ DISALLOW_COPY_AND_ASSIGN(CompilationCache);
|
+ struct CompilationCacheImpl;
|
+ CompilationCacheImpl * const compilation_cache_impl;
|
+ friend class V8Context;
|
};
|
|
|
Index: src/v8.cc
|
===================================================================
|
--- src/v8.cc (revision 3229)
|
+++ src/v8.cc Sat Nov 14 01:42:55 MSK 2009
|
@@ -37,10 +37,15 @@
|
namespace v8 {
|
namespace internal {
|
|
-bool V8::is_running_ = false;
|
-bool V8::has_been_setup_ = false;
|
-bool V8::has_been_disposed_ = false;
|
-bool V8::has_fatal_error_ = false;
|
+V8::V8():
|
+ is_running_(false),
|
+ has_been_setup_(false),
|
+ has_been_disposed_(false),
|
+ has_fatal_error_(false),
|
+ exception_behavior(NULL),
|
+ active_(false)
|
+{
|
+}
|
|
bool V8::Initialize(GenericDeserializer *des) {
|
bool create_heap_objects = des == NULL;
|
@@ -57,7 +62,8 @@
|
#endif
|
|
// Enable logging before setting up the heap
|
- Logger::Setup();
|
+ v8::V8Context * const v8context = v8_context();
|
+ v8context->logger_.Setup();
|
if (des) des->GetLog();
|
|
// Setup the platform OS support.
|
@@ -73,19 +79,19 @@
|
// will ensure this too, but we don't have to use lockers if we are only
|
// using one thread.
|
ExecutionAccess lock;
|
- StackGuard::InitThread(lock);
|
+ v8context->stack_guard_.InitThread(lock);
|
}
|
|
// Setup the object heap
|
- ASSERT(!Heap::HasBeenSetup());
|
- if (!Heap::Setup(create_heap_objects)) {
|
+ ASSERT(!v8context->heap_.HasBeenSetup());
|
+ if (!v8context->heap_.Setup(create_heap_objects)) {
|
SetFatalError();
|
return false;
|
}
|
|
- Bootstrapper::Initialize(create_heap_objects);
|
- Builtins::Setup(create_heap_objects);
|
- Top::Initialize();
|
+ v8context->bootstrapper_.Initialize(create_heap_objects);
|
+ v8context->builtins_.Setup(create_heap_objects);
|
+ v8context->top_.Initialize();
|
|
if (FLAG_preemption) {
|
v8::Locker locker;
|
@@ -93,19 +99,19 @@
|
}
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- Debug::Setup(create_heap_objects);
|
+ v8context->debug_.Setup(create_heap_objects);
|
#endif
|
- StubCache::Initialize(create_heap_objects);
|
+ v8context->stub_cache_.Initialize(create_heap_objects);
|
|
// If we are deserializing, read the state into the now-empty heap.
|
if (des != NULL) {
|
des->Deserialize();
|
- StubCache::Clear();
|
+ v8context->stub_cache_.Clear();
|
}
|
|
// Deserializing may put strange things in the root array's copy of the
|
// stack guard.
|
- Heap::SetStackLimits();
|
+ v8context->heap_.SetStackLimits();
|
|
// Setup the CPU support. Must be done after heap setup and after
|
// any deserialization because we have to have the initial heap
|
@@ -129,18 +135,20 @@
|
|
OProfileAgent::TearDown();
|
|
+ v8::V8Context * const v8context = v8_context();
|
+
|
if (FLAG_preemption) {
|
v8::Locker locker;
|
v8::Locker::StopPreemption();
|
}
|
|
- Builtins::TearDown();
|
- Bootstrapper::TearDown();
|
+ v8context->builtins_.TearDown();
|
+ v8context->bootstrapper_.TearDown();
|
|
- Top::TearDown();
|
+ v8context->top_.TearDown();
|
|
- Heap::TearDown();
|
- Logger::TearDown();
|
+ v8context->heap_.TearDown();
|
+ v8context->logger_.TearDown();
|
|
is_running_ = false;
|
has_been_disposed_ = true;
|
@@ -149,7 +157,7 @@
|
|
uint32_t V8::Random() {
|
// Random number generator using George Marsaglia's MWC algorithm.
|
- static uint32_t hi = 0;
|
+ static uint32_t hi = 0; ///static
|
static uint32_t lo = 0;
|
|
// Initialize seed using the system random(). If one of the seeds
|
@@ -172,7 +180,7 @@
|
if (!FLAG_use_idle_notification) return true;
|
|
// Tell the heap that it may want to adjust.
|
- return Heap::IdleNotification();
|
+ return v8_context()->heap_.IdleNotification();
|
}
|
|
static const uint32_t kRandomPositiveSmiMax = 0x3fffffff;
|
Index: src/factory.h
|
===================================================================
|
--- src/factory.h (revision 3117)
|
+++ src/factory.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -316,7 +316,7 @@
|
#define ROOT_ACCESSOR(type, name, camel_name) \
|
static inline Handle<type> name() { \
|
return Handle<type>(bit_cast<type**, Object**>( \
|
- &Heap::roots_[Heap::k##camel_name##RootIndex])); \
|
+ &v8_context()->heap_.roots_[Heap::k##camel_name##RootIndex])); \
|
}
|
ROOT_LIST(ROOT_ACCESSOR)
|
#undef ROOT_ACCESSOR_ACCESSOR
|
@@ -324,13 +324,13 @@
|
#define SYMBOL_ACCESSOR(name, str) \
|
static inline Handle<String> name() { \
|
return Handle<String>(bit_cast<String**, Object**>( \
|
- &Heap::roots_[Heap::k##name##RootIndex])); \
|
+ &v8_context()->heap_.roots_[Heap::k##name##RootIndex])); \
|
}
|
SYMBOL_LIST(SYMBOL_ACCESSOR)
|
#undef SYMBOL_ACCESSOR
|
|
static Handle<String> hidden_symbol() {
|
- return Handle<String>(&Heap::hidden_symbol_);
|
+ return Handle<String>(&v8_context()->heap_.hidden_symbol_);
|
}
|
|
static Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
|
Index: src/ia32/virtual-frame-ia32.cc
|
===================================================================
|
--- src/ia32/virtual-frame-ia32.cc (revision 3036)
|
+++ src/ia32/virtual-frame-ia32.cc Sat Nov 14 01:42:58 MSK 2009
|
@@ -865,7 +865,7 @@
|
// Name and receiver are on the top of the frame. The IC expects
|
// name in ecx and receiver on the stack. It does not drop the
|
// receiver.
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize));
|
Result name = Pop();
|
PrepareForCall(1, 0); // One stack arg, not callee-dropped.
|
name.ToRegister(ecx);
|
@@ -877,7 +877,7 @@
|
Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
|
// Key and receiver are on top of the frame. The IC expects them on
|
// the stack. It does not drop them.
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Initialize));
|
PrepareForCall(2, 0); // Two stack args, neither callee-dropped.
|
return RawCallCodeObject(ic, mode);
|
}
|
@@ -887,7 +887,7 @@
|
// Name, value, and receiver are on top of the frame. The IC
|
// expects name in ecx, value in eax, and receiver on the stack. It
|
// does not drop the receiver.
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize));
|
Result name = Pop();
|
Result value = Pop();
|
PrepareForCall(1, 0); // One stack arg, not callee-dropped.
|
@@ -918,7 +918,7 @@
|
// Value, key, and receiver are on the top of the frame. The IC
|
// expects value in eax and key and receiver on the stack. It does
|
// not drop the key and receiver.
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Initialize));
|
// TODO(1222589): Make the IC grab the values from the stack.
|
Result value = Pop();
|
PrepareForCall(2, 0); // Two stack args, neither callee-dropped.
|
@@ -947,7 +947,7 @@
|
// Arguments, receiver, and function are on top of the frame. The
|
// IC expects arg count in eax, function in edi, and the arguments
|
// and receiver on the stack.
|
- Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::JSConstructCall));
|
// Duplicate the function before preparing the frame.
|
PushElementAt(arg_count + 1);
|
Result function = Pop();
|
Index: src/conversions.cc
|
===================================================================
|
--- src/conversions.cc (revision 3116)
|
+++ src/conversions.cc Sat Nov 14 01:43:10 MSK 2009
|
@@ -102,12 +102,12 @@
|
|
static inline bool IsSpace(const char* str, int index) {
|
ASSERT(index >= 0 && index < static_cast<int>(strlen(str)));
|
- return Scanner::kIsWhiteSpace.get(str[index]);
|
+ return v8_context()->scanner_.kIsWhiteSpace.get(str[index]);
|
}
|
|
|
static inline bool IsSpace(String* str, int index) {
|
- return Scanner::kIsWhiteSpace.get(str->Get(index));
|
+ return v8_context()->scanner_.kIsWhiteSpace.get(str->Get(index));
|
}
|
|
|
@@ -352,7 +352,24 @@
|
return InternalStringToDouble(str, flags, empty_string_val);
|
}
|
|
+#ifndef V8_SINGLE_THREADED
|
+static Mutex* dtoa_lock = OS::CreateMutex();
|
+#endif
|
|
+class DtoaLocker {
|
+public:
|
+ DtoaLocker() {
|
+ #ifndef V8_SINGLE_THREADED
|
+ dtoa_lock->Lock();
|
+ #endif
|
+ }
|
+ ~DtoaLocker() {
|
+ #ifndef V8_SINGLE_THREADED
|
+ dtoa_lock->Unlock();
|
+ #endif
|
+ }
|
+};
|
+
|
extern "C" char* dtoa(double d, int mode, int ndigits,
|
int* decpt, int* sign, char** rve);
|
|
@@ -381,7 +398,7 @@
|
default: {
|
int decimal_point;
|
int sign;
|
-
|
+ DtoaLocker dtoa_locker;
|
char* decimal_rep = dtoa(v, 0, 0, &decimal_point, &sign, NULL);
|
int length = strlen(decimal_rep);
|
|
@@ -464,6 +481,7 @@
|
// Find a sufficiently precise decimal representation of n.
|
int decimal_point;
|
int sign;
|
+ DtoaLocker dtoa_locker;
|
char* decimal_rep = dtoa(abs_value, 3, f, &decimal_point, &sign, NULL);
|
int decimal_rep_length = strlen(decimal_rep);
|
|
@@ -551,6 +569,7 @@
|
int decimal_point;
|
int sign;
|
char* decimal_rep = NULL;
|
+ DtoaLocker dtoa_locker;
|
if (f == -1) {
|
decimal_rep = dtoa(value, 0, 0, &decimal_point, &sign, NULL);
|
f = strlen(decimal_rep) - 1;
|
@@ -584,6 +603,7 @@
|
// Find a sufficiently precise decimal representation of n.
|
int decimal_point;
|
int sign;
|
+ DtoaLocker dtoa_locker;
|
char* decimal_rep = dtoa(value, 2, p, &decimal_point, &sign, NULL);
|
int decimal_rep_length = strlen(decimal_rep);
|
ASSERT(decimal_rep_length <= p);
|
Index: src/ia32/frames-ia32.cc
|
===================================================================
|
--- src/ia32/frames-ia32.cc (revision 3209)
|
+++ src/ia32/frames-ia32.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -74,7 +74,7 @@
|
|
Address JavaScriptFrame::GetCallerStackPointer() const {
|
int arguments;
|
- if (Heap::gc_state() != Heap::NOT_IN_GC || disable_heap_access_) {
|
+ if (v8_context()->heap_.gc_state() != Heap::NOT_IN_GC || disable_heap_access_) {
|
// The arguments for cooked frames are traversed as if they were
|
// expression stack elements of the calling frame. The reason for
|
// this rather strange decision is that we cannot access the
|
Index: src/builtins.cc
|
===================================================================
|
--- src/builtins.cc (revision 3096)
|
+++ src/builtins.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -67,7 +67,7 @@
|
|
|
#define BUILTIN_END \
|
- return Heap::undefined_value(); \
|
+ return v8_context()->heap_.undefined_value(); \
|
}
|
|
|
@@ -82,7 +82,8 @@
|
StackFrame* frame = it.frame();
|
bool reference_result = frame->is_construct();
|
#endif
|
- Address fp = Top::c_entry_fp(Top::GetCurrentThread());
|
+ Top& top = v8_context()->top_;
|
+ Address fp = top.c_entry_fp(top.GetCurrentThread());
|
// Because we know fp points to an exit frame we can use the relevant
|
// part of ExitFrame::ComputeCallerState directly.
|
const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
|
@@ -105,9 +106,10 @@
|
Handle<Code> Builtins::GetCode(JavaScript id, bool* resolved) {
|
Code* code = Builtins::builtin(Builtins::Illegal);
|
*resolved = false;
|
+ Top& top = v8_context()->top_;
|
|
- if (Top::context() != NULL) {
|
- Object* object = Top::builtins()->javascript_builtin(id);
|
+ if (top.context() != NULL) {
|
+ Object* object = top.builtins()->javascript_builtin(id);
|
if (object->IsJSFunction()) {
|
Handle<JSFunction> function(JSFunction::cast(object));
|
// Make sure the number of parameters match the formal parameter count.
|
@@ -136,16 +138,18 @@
|
|
|
BUILTIN(ArrayCodeGeneric) {
|
- Counters::array_function_runtime.Increment();
|
+ V8Context* const v8context = v8_context();
|
+ v8context->counters_.array_function_runtime.Increment();
|
|
JSArray* array;
|
+
|
if (CalledAsConstructor()) {
|
array = JSArray::cast(*receiver);
|
} else {
|
// Allocate the JS Array
|
JSFunction* constructor =
|
- Top::context()->global_context()->array_function();
|
- Object* obj = Heap::AllocateJSObject(constructor);
|
+ v8context->top_.context()->global_context()->array_function();
|
+ Object* obj = v8context->heap_.AllocateJSObject(constructor);
|
if (obj->IsFailure()) return obj;
|
array = JSArray::cast(obj);
|
}
|
@@ -159,7 +163,7 @@
|
if (obj->IsSmi()) {
|
int len = Smi::cast(obj)->value();
|
if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(len);
|
+ Object* obj = v8context->heap_.AllocateFixedArrayWithHoles(len);
|
if (obj->IsFailure()) return obj;
|
array->SetContent(FixedArray::cast(obj));
|
return array;
|
@@ -179,7 +183,7 @@
|
// Take the arguments as elements.
|
int number_of_elements = args.length() - 1;
|
Smi* len = Smi::FromInt(number_of_elements);
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(len->value());
|
+ Object* obj = v8context->heap_.AllocateFixedArrayWithHoles(len->value());
|
if (obj->IsFailure()) return obj;
|
FixedArray* elms = FixedArray::cast(obj);
|
WriteBarrierMode mode = elms->GetWriteBarrierMode();
|
@@ -216,7 +220,7 @@
|
} else {
|
// New backing storage is needed.
|
int capacity = new_length + (new_length >> 1) + 16;
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(capacity);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArrayWithHoles(capacity);
|
if (obj->IsFailure()) return obj;
|
FixedArray* new_elms = FixedArray::cast(obj);
|
WriteBarrierMode mode = new_elms->GetWriteBarrierMode();
|
@@ -239,7 +243,7 @@
|
BUILTIN(ArrayPop) {
|
JSArray* array = JSArray::cast(*receiver);
|
ASSERT(array->HasFastElements());
|
- Object* undefined = Heap::undefined_value();
|
+ Object* undefined = v8_context()->heap_.undefined_value();
|
|
int len = Smi::cast(array->length())->value();
|
if (len == 0) return undefined;
|
@@ -259,7 +263,7 @@
|
|
// Remember to check the prototype chain.
|
JSFunction* array_function =
|
- Top::context()->global_context()->array_function();
|
+ v8_context()->top_.context()->global_context()->array_function();
|
JSObject* prototype = JSObject::cast(array_function->prototype());
|
top = prototype->GetElement(len - 1);
|
|
@@ -287,15 +291,16 @@
|
SignatureInfo* sig = SignatureInfo::cast(sig_obj);
|
// If necessary, check the receiver
|
Object* recv_type = sig->receiver();
|
+ Heap& heap = v8_context()->heap_;
|
|
Object* holder = recv;
|
if (!recv_type->IsUndefined()) {
|
- for (; holder != Heap::null_value(); holder = holder->GetPrototype()) {
|
+ for (; holder != heap.null_value(); holder = holder->GetPrototype()) {
|
if (holder->IsInstanceOf(FunctionTemplateInfo::cast(recv_type))) {
|
break;
|
}
|
}
|
- if (holder == Heap::null_value()) return holder;
|
+ if (holder == heap.null_value()) return holder;
|
}
|
Object* args_obj = sig->args();
|
// If there is no argument signature we're done
|
@@ -308,13 +313,13 @@
|
if (argtype->IsUndefined()) continue;
|
Object** arg = &argv[-1 - i];
|
Object* current = *arg;
|
- for (; current != Heap::null_value(); current = current->GetPrototype()) {
|
+ for (; current != heap.null_value(); current = current->GetPrototype()) {
|
if (current->IsInstanceOf(FunctionTemplateInfo::cast(argtype))) {
|
*arg = current;
|
break;
|
}
|
}
|
- if (current == Heap::null_value()) *arg = Heap::undefined_value();
|
+ if (current == heap.null_value()) *arg = heap.undefined_value();
|
}
|
return holder;
|
}
|
@@ -326,7 +331,7 @@
|
|
// TODO(428): Remove use of static variable, handle API callbacks directly.
|
Handle<JSFunction> function =
|
- Handle<JSFunction>(JSFunction::cast(Builtins::builtin_passed_function));
|
+ Handle<JSFunction>(JSFunction::cast(v8_context()->builtins_.builtin_passed_function));
|
|
if (is_construct) {
|
Handle<FunctionTemplateInfo> desc =
|
@@ -335,7 +340,7 @@
|
bool pending_exception = false;
|
Factory::ConfigureInstance(desc, Handle<JSObject>::cast(receiver),
|
&pending_exception);
|
- ASSERT(Top::has_pending_exception() == pending_exception);
|
+ ASSERT(v8_context()->top_.has_pending_exception() == pending_exception);
|
if (pending_exception) return Failure::Exception();
|
}
|
|
@@ -347,7 +352,7 @@
|
// This function cannot be called with the given receiver. Abort!
|
Handle<Object> obj =
|
Factory::NewTypeError("illegal_invocation", HandleVector(&function, 1));
|
- return Top::Throw(*obj);
|
+ return v8_context()->top_.Throw(*obj);
|
}
|
|
Object* raw_call_data = fun_data->call_code();
|
@@ -383,7 +388,7 @@
|
value = callback(new_args);
|
}
|
if (value.IsEmpty()) {
|
- result = Heap::undefined_value();
|
+ result = v8_context()->heap_.undefined_value();
|
} else {
|
result = *reinterpret_cast<Object**>(*value);
|
}
|
@@ -449,7 +454,7 @@
|
value = callback(new_args);
|
}
|
if (value.IsEmpty()) {
|
- result = Heap::undefined_value();
|
+ result = v8_context()->heap_.undefined_value();
|
} else {
|
result = *reinterpret_cast<Object**>(*value);
|
}
|
@@ -475,14 +480,6 @@
|
}
|
BUILTIN_END
|
|
-
|
-// TODO(1238487): This is a nasty hack. We need to improve the way we
|
-// call builtins considerable to get rid of this and the hairy macros
|
-// in builtins.cc.
|
-Object* Builtins::builtin_passed_function;
|
-
|
-
|
-
|
static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) {
|
LoadIC::GenerateArrayLength(masm);
|
}
|
@@ -694,15 +691,6 @@
|
}
|
#endif
|
|
-Object* Builtins::builtins_[builtin_count] = { NULL, };
|
-const char* Builtins::names_[builtin_count] = { NULL, };
|
-
|
-#define DEF_ENUM_C(name) FUNCTION_ADDR(Builtin_##name),
|
- Address Builtins::c_functions_[cfunction_count] = {
|
- BUILTIN_LIST_C(DEF_ENUM_C)
|
- };
|
-#undef DEF_ENUM_C
|
-
|
#define DEF_JS_NAME(name, ignore) #name,
|
#define DEF_JS_ARGC(ignore, argc) argc,
|
const char* Builtins::javascript_names_[id_count] = {
|
@@ -712,10 +700,31 @@
|
int Builtins::javascript_argc_[id_count] = {
|
BUILTINS_LIST_JS(DEF_JS_ARGC)
|
};
|
+
|
#undef DEF_JS_NAME
|
#undef DEF_JS_ARGC
|
|
-static bool is_initialized = false;
|
+Builtins::Builtins():is_initialized(false), builtin_passed_function(NULL) {
|
+ for(int i = 0; i < builtin_count; ++i) {
|
+ builtins_[i] = NULL;
|
+ names_[i] = NULL;
|
+ }
|
+
|
+ // TODO(1238487): This is a nasty hack. We need to improve the way we
|
+ // call builtins considerable to get rid of this and the hairy macros
|
+ // in builtins.cc.
|
+
|
+ #define DEF_ENUM_C(name) FUNCTION_ADDR(Builtin_##name),
|
+ Address c_functions[cfunction_count] = {
|
+ BUILTIN_LIST_C(DEF_ENUM_C)
|
+ };
|
+ #undef DEF_ENUM_C
|
+
|
+ for(int i = 0; i < cfunction_count; ++i) {
|
+ c_functions_[i] = c_functions[i];
|
+ }
|
+}
|
+
|
void Builtins::Setup(bool create_heap_objects) {
|
ASSERT(!is_initialized);
|
|
@@ -747,7 +756,7 @@
|
},
|
|
// Define array of pointers to generators and C builtin functions.
|
- static BuiltinDesc functions[] = {
|
+ BuiltinDesc functions[] = {
|
BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
|
BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
|
BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
|
@@ -762,6 +771,8 @@
|
// buffer, before copying it into individual code objects.
|
byte buffer[4*KB];
|
|
+ V8Context* const v8context = v8_context();
|
+
|
// Traverse the list of builtins and generate an adaptor in a
|
// separate code object for each one.
|
for (int i = 0; i < builtin_count; i++) {
|
@@ -783,14 +794,14 @@
|
// During startup it's OK to always allocate and defer GC to later.
|
// This simplifies things because we don't need to retry.
|
AlwaysAllocateScope __scope__;
|
- code = Heap::CreateCode(desc, NULL, flags, masm.CodeObject());
|
+ code = v8context->heap_.CreateCode(desc, NULL, flags, masm.CodeObject());
|
if (code->IsFailure()) {
|
- v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
|
+ v8context->v8_.FatalProcessOutOfMemory("CreateCode");
|
}
|
}
|
// Add any unresolved jumps or calls to the fixup list in the
|
// bootstrapper.
|
- Bootstrapper::AddFixup(Code::cast(code), &masm);
|
+ v8context->bootstrapper_.AddFixup(Code::cast(code), &masm);
|
// Log the event and add the code to the builtins array.
|
LOG(CodeCreateEvent(Logger::BUILTIN_TAG,
|
Code::cast(code), functions[i].s_name));
|
Index: src/global-handles.h
|
===================================================================
|
--- src/global-handles.h (revision 3230)
|
+++ src/global-handles.h Sat Nov 14 01:43:13 MSK 2009
|
@@ -56,13 +56,13 @@
|
|
typedef void (*WeakReferenceGuest)(Object* object, void* parameter);
|
|
-class GlobalHandles : public AllStatic {
|
+class GlobalHandles {
|
public:
|
// Creates a new global handle that is alive until Destroy is called.
|
- static Handle<Object> Create(Object* value);
|
+ Handle<Object> Create(Object* value);
|
|
// Destroy a global handle.
|
- static void Destroy(Object** location);
|
+ void Destroy(Object** location);
|
|
// Make the global handle weak and set the callback parameter for the
|
// handle. When the garbage collector recognizes that only weak global
|
@@ -70,65 +70,65 @@
|
// function is invoked (for each handle) with the handle and corresponding
|
// parameter as arguments. Note: cleared means set to Smi::FromInt(0). The
|
// reason is that Smi::FromInt(0) does not change during garage collection.
|
- static void MakeWeak(Object** location,
|
+ void MakeWeak(Object** location,
|
void* parameter,
|
WeakReferenceCallback callback);
|
|
// Returns the current number of weak handles.
|
- static int NumberOfWeakHandles() { return number_of_weak_handles_; }
|
+ int NumberOfWeakHandles() { return number_of_weak_handles_; }
|
|
// Returns the current number of weak handles to global objects.
|
// These handles are also included in NumberOfWeakHandles().
|
- static int NumberOfGlobalObjectWeakHandles() {
|
+ int NumberOfGlobalObjectWeakHandles() {
|
return number_of_global_object_weak_handles_;
|
}
|
|
// Clear the weakness of a global handle.
|
- static void ClearWeakness(Object** location);
|
+ void ClearWeakness(Object** location);
|
|
// Tells whether global handle is near death.
|
- static bool IsNearDeath(Object** location);
|
+ bool IsNearDeath(Object** location);
|
|
// Tells whether global handle is weak.
|
- static bool IsWeak(Object** location);
|
+ bool IsWeak(Object** location);
|
|
// Process pending weak handles.
|
- static void PostGarbageCollectionProcessing();
|
+ void PostGarbageCollectionProcessing();
|
|
// Iterates over all strong handles.
|
- static void IterateStrongRoots(ObjectVisitor* v);
|
+ void IterateStrongRoots(ObjectVisitor* v);
|
|
// Iterates over all handles.
|
- static void IterateAllRoots(ObjectVisitor* v);
|
+ void IterateAllRoots(ObjectVisitor* v);
|
|
// Iterates over all weak roots in heap.
|
- static void IterateWeakRoots(ObjectVisitor* v);
|
+ void IterateWeakRoots(ObjectVisitor* v);
|
|
// Iterates over weak roots that are bound to a given callback.
|
- static void IterateWeakRoots(WeakReferenceGuest f,
|
+ void IterateWeakRoots(WeakReferenceGuest f,
|
WeakReferenceCallback callback);
|
|
// Find all weak handles satisfying the callback predicate, mark
|
// them as pending.
|
- static void IdentifyWeakHandles(WeakSlotCallback f);
|
+ void IdentifyWeakHandles(WeakSlotCallback f);
|
|
// Add an object group.
|
// Should only used in GC callback function before a collection.
|
// All groups are destroyed after a mark-compact collection.
|
- static void AddGroup(Object*** handles, size_t length);
|
+ void AddGroup(Object*** handles, size_t length);
|
|
// Returns the object groups.
|
- static List<ObjectGroup*>* ObjectGroups();
|
+ List<ObjectGroup*>* ObjectGroups();
|
|
// Remove bags, this should only happen after GC.
|
- static void RemoveObjectGroups();
|
+ void RemoveObjectGroups();
|
|
// Tear down the global handle structure.
|
- static void TearDown();
|
+ void TearDown();
|
|
#ifdef DEBUG
|
- static void PrintStats();
|
- static void Print();
|
+ void PrintStats();
|
+ void Print();
|
#endif
|
class Pool;
|
private:
|
@@ -136,22 +136,22 @@
|
class Node;
|
|
// Field always containing the number of weak and near-death handles.
|
- static int number_of_weak_handles_;
|
+ int number_of_weak_handles_;
|
|
// Field always containing the number of weak and near-death handles
|
// to global objects. These objects are also included in
|
// number_of_weak_handles_.
|
- static int number_of_global_object_weak_handles_;
|
+ int number_of_global_object_weak_handles_;
|
|
// Global handles are kept in a single linked list pointed to by head_.
|
- static Node* head_;
|
- static Node* head() { return head_; }
|
- static void set_head(Node* value) { head_ = value; }
|
+ Node* head_;
|
+ Node* head() { return head_; }
|
+ void set_head(Node* value) { head_ = value; }
|
|
// Free list for DESTROYED global handles not yet deallocated.
|
- static Node* first_free_;
|
- static Node* first_free() { return first_free_; }
|
- static void set_first_free(Node* value) { first_free_ = value; }
|
+ Node* first_free_;
|
+ Node* first_free() { return first_free_; }
|
+ void set_first_free(Node* value) { first_free_ = value; }
|
|
// List of deallocated nodes.
|
// Deallocated nodes form a prefix of all the nodes and
|
@@ -164,11 +164,19 @@
|
// node node ... node node
|
// .next -> .next -> .next ->
|
// <- .next_free <- .next_free <- .next_free
|
- static Node* first_deallocated_;
|
- static Node* first_deallocated() { return first_deallocated_; }
|
- static void set_first_deallocated(Node* value) {
|
+ Node* first_deallocated_;
|
+ Node* first_deallocated() { return first_deallocated_; }
|
+ void set_first_deallocated(Node* value) {
|
first_deallocated_ = value;
|
}
|
+
|
+ List<ObjectGroup*>* object_groups_;
|
+ GlobalHandles();
|
+ ~GlobalHandles();
|
+ DISALLOW_COPY_AND_ASSIGN(GlobalHandles);
|
+ friend class V8Context;
|
+ class GlobalHandlesImpl;
|
+ GlobalHandlesImpl* const global_handles_impl;
|
};
|
|
|
Index: src/objects.h
|
===================================================================
|
--- src/objects.h (revision 3238)
|
+++ src/objects.h Sat Nov 14 01:43:19 MSK 2009
|
@@ -35,6 +35,7 @@
|
#if V8_TARGET_ARCH_ARM
|
#include "arm/constants-arm.h"
|
#endif
|
+#include "v8-global-context.h"
|
|
//
|
// All object types in the V8 JavaScript are described in this file.
|
@@ -4489,29 +4490,46 @@
|
DISALLOW_IMPLICIT_CONSTRUCTORS(ExternalTwoByteString);
|
};
|
|
+class Relocatable;
|
|
+class RelocatableData {
|
+public:
|
+ inline void save(Relocatable* new_top, Relocatable*& saved_old_top) {
|
+ saved_old_top = top_;
|
+ top_ = new_top;
|
+ }
|
+ inline void restore(Relocatable* top, Relocatable* saved_old_top) {
|
+ ASSERT_EQ(top_, top);
|
+ top_ = saved_old_top;;
|
+ }
|
+ void PostGarbageCollectionProcessing();
|
+ int ArchiveSpacePerThread();
|
+ char* ArchiveState(char* to);
|
+ char* RestoreState(char* from);
|
+ void Iterate(ObjectVisitor* v);
|
+ void Iterate(ObjectVisitor* v, Relocatable* top);
|
+ char* Iterate(ObjectVisitor* v, char* t);
|
+private:
|
+ friend class V8Context;
|
+ friend class Relocatable;
|
+ Relocatable * top_;
|
+ RelocatableData();
|
+ DISALLOW_COPY_AND_ASSIGN(RelocatableData);
|
+};
|
+
|
// Utility superclass for stack-allocated objects that must be updated
|
// on gc. It provides two ways for the gc to update instances, either
|
// iterating or updating after gc.
|
class Relocatable BASE_EMBEDDED {
|
public:
|
- inline Relocatable() : prev_(top_) { top_ = this; }
|
+ inline Relocatable() { v8_context()->relocatable_data_.save(this, prev_); }
|
virtual ~Relocatable() {
|
- ASSERT_EQ(top_, this);
|
- top_ = prev_;
|
+ v8_context()->relocatable_data_.restore(this, prev_);
|
}
|
virtual void IterateInstance(ObjectVisitor* v) { }
|
virtual void PostGarbageCollection() { }
|
-
|
- static void PostGarbageCollectionProcessing();
|
- static int ArchiveSpacePerThread();
|
- static char* ArchiveState(char* to);
|
- static char* RestoreState(char* from);
|
- static void Iterate(ObjectVisitor* v);
|
- static void Iterate(ObjectVisitor* v, Relocatable* top);
|
- static char* Iterate(ObjectVisitor* v, char* t);
|
private:
|
- static Relocatable* top_;
|
+ friend class RelocatableData;
|
Relocatable* prev_;
|
};
|
|
@@ -5092,7 +5110,10 @@
|
// Abstract base class for visiting, and optionally modifying, the
|
// pointers contained in Objects. Used in GC and serialization/deserialization.
|
class ObjectVisitor BASE_EMBEDDED {
|
+protected:
|
+ Heap& heap_;
|
public:
|
+ ObjectVisitor():heap_(v8_context()->heap_) {}
|
virtual ~ObjectVisitor() {}
|
|
// Visits a contiguous arrays of pointers in the half-open range
|
@@ -5165,6 +5186,13 @@
|
}
|
};
|
|
+class ObjectsData {
|
+public:
|
+ StringInputBuffer string_compare_buffer_a;
|
+ StringInputBuffer string_compare_buffer_b;
|
+ StaticResource<StringInputBuffer> string_input_buffer;
|
+};
|
+
|
} } // namespace v8::internal
|
|
#endif // V8_OBJECTS_H_
|
Index: test/cctest/test-assembler-arm.cc
|
===================================================================
|
--- test/cctest/test-assembler-arm.cc (revision 2853)
|
+++ test/cctest/test-assembler-arm.cc Sun Nov 15 12:39:10 MSK 2009
|
@@ -34,8 +34,8 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
-
|
// Define these function prototypes to match JSEntryFunction in execution.cc.
|
typedef Object* (*F1)(int x, int p1, int p2, int p3, int p4);
|
typedef Object* (*F2)(int x, int y, int p2, int p3, int p4);
|
@@ -72,10 +72,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -109,10 +109,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -155,10 +155,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
@@ -203,10 +203,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
Index: src/codegen.cc
|
===================================================================
|
--- src/codegen.cc (revision 3218)
|
+++ src/codegen.cc Sat Nov 14 01:43:13 MSK 2009
|
@@ -42,10 +42,39 @@
|
namespace v8 {
|
namespace internal {
|
|
+CodeGeneratorData::CodeGeneratorData():
|
+ top_(NULL),
|
+ compiling_deferred_code_(false),
|
+ frame_element_constants_list_(NULL),
|
+ result_constants_list_(NULL)
|
+{
|
+}
|
|
-CodeGenerator* CodeGeneratorScope::top_ = NULL;
|
+CodeGeneratorData::~CodeGeneratorData() {
|
+ delete result_constants_list_;
|
+ delete frame_element_constants_list_;
|
+}
|
|
+// we cannot allocate ZoneObjectList because it goes to some zone unexpectedly, and after that to unexpected death
|
+// so we create ZoneObjectList as member of malloced class and take the only field
|
+struct NonZoneObjectListHolder {
|
+ ZoneObjectList zone_list;
|
+ NonZoneObjectListHolder():zone_list(10) {
|
+ ASSERT(sizeof(NonZoneObjectListHolder) == sizeof(zone_list));
|
+ ASSERT(reinterpret_cast<int>(this) == reinterpret_cast<int>(&zone_list));
|
+ }
|
+};
|
|
+ZoneObjectList* CodeGeneratorData::result_constants_list() {
|
+ if (!result_constants_list_) result_constants_list_ = &((new NonZoneObjectListHolder())->zone_list);
|
+ return result_constants_list_;
|
+}
|
+
|
+ZoneObjectList* CodeGeneratorData::frame_element_constants_list() {
|
+ if (!frame_element_constants_list_) frame_element_constants_list_ = &((new NonZoneObjectListHolder())->zone_list);
|
+ return frame_element_constants_list_;
|
+}
|
+
|
DeferredCode::DeferredCode()
|
: masm_(CodeGeneratorScope::Current()->masm()),
|
statement_position_(masm_->current_statement_position()),
|
@@ -133,7 +162,7 @@
|
bool print_json_ast = false;
|
const char* ftype;
|
|
- if (Bootstrapper::IsActive()) {
|
+ if (v8_context()->bootstrapper_.IsActive()) {
|
print_source = FLAG_print_builtin_source;
|
print_ast = FLAG_print_builtin_ast;
|
print_json_ast = FLAG_print_builtin_json_ast;
|
@@ -179,10 +208,10 @@
|
Factory::NewCode(desc, &sinfo, flags, masm->CodeObject());
|
|
// Add unresolved entries in the code to the fixup list.
|
- Bootstrapper::AddFixup(*code, masm);
|
+ v8_context()->bootstrapper_.AddFixup(*code, masm);
|
|
#ifdef ENABLE_DISASSEMBLER
|
- bool print_code = Bootstrapper::IsActive()
|
+ bool print_code = v8_context()->bootstrapper_.IsActive()
|
? FLAG_print_builtin_code
|
: FLAG_print_code;
|
if (print_code) {
|
@@ -205,7 +234,7 @@
|
#endif // ENABLE_DISASSEMBLER
|
|
if (!code.is_null()) {
|
- Counters::total_compiled_code_size.Increment(code->instruction_size());
|
+ v8_context()->counters_.total_compiled_code_size.Increment(code->instruction_size());
|
}
|
return code;
|
}
|
@@ -224,7 +253,7 @@
|
CodeGeneratorScope scope(&cgen);
|
cgen.GenCode(fun);
|
if (cgen.HasStackOverflow()) {
|
- ASSERT(!Top::has_pending_exception());
|
+ ASSERT(!v8_context()->top_.has_pending_exception());
|
return Handle<Code>::null();
|
}
|
|
@@ -238,10 +267,10 @@
|
|
bool CodeGenerator::ShouldGenerateLog(Expression* type) {
|
ASSERT(type != NULL);
|
- if (!Logger::is_logging()) return false;
|
+ if (!v8_context()->logger_.is_logging()) return false;
|
Handle<String> name = Handle<String>::cast(type->AsLiteral()->handle());
|
if (FLAG_log_regexp) {
|
- static Vector<const char> kRegexp = CStrVector("regexp");
|
+ static Vector<const char> kRegexp = CStrVector("regexp"); ///static
|
if (name->IsEqualTo(kRegexp))
|
return true;
|
}
|
@@ -262,7 +291,7 @@
|
// that it needs so we need to ensure it is generated already.
|
ComputeCallInitialize(argc, NOT_IN_LOOP);
|
}
|
- CALL_HEAP_FUNCTION(StubCache::ComputeCallInitialize(argc, in_loop), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeCallInitialize(argc, in_loop), Code);
|
}
|
|
|
@@ -308,7 +337,7 @@
|
}
|
} else {
|
Handle<JSFunction> function =
|
- Compiler::BuildBoilerplate(node->fun(), script(), this);
|
+ v8_context()->compiler_.BuildBoilerplate(node->fun(), script(), this);
|
// Check for stack-overflow exception.
|
if (HasStackOverflow()) return;
|
array->set(j++, *function);
|
Index: src/oprofile-agent.h
|
===================================================================
|
--- src/oprofile-agent.h (revision 2855)
|
+++ src/oprofile-agent.h Sat Nov 14 01:42:58 MSK 2009
|
@@ -56,7 +56,7 @@
|
static bool is_enabled() { return handle_ != NULL; }
|
|
private:
|
- static op_agent_t handle_;
|
+ static op_agent_t handle_;///static
|
|
// Size of the buffer that is used for composing code areas names.
|
static const int kFormattingBufSize = 256;
|
Index: src/stub-cache.h
|
===================================================================
|
--- src/stub-cache.h (revision 2764)
|
+++ src/stub-cache.h Sat Nov 14 01:43:04 MSK 2009
|
@@ -44,7 +44,7 @@
|
|
class SCTableReference;
|
|
-class StubCache : public AllStatic {
|
+class StubCache {
|
public:
|
struct Entry {
|
String* key;
|
@@ -52,33 +52,33 @@
|
};
|
|
|
- static void Initialize(bool create_heap_objects);
|
+ void Initialize(bool create_heap_objects);
|
|
// Computes the right stub matching. Inserts the result in the
|
// cache before returning. This might compile a stub if needed.
|
- static Object* ComputeLoadField(String* name,
|
+ Object* ComputeLoadField(String* name,
|
JSObject* receiver,
|
JSObject* holder,
|
int field_index);
|
|
- static Object* ComputeLoadCallback(String* name,
|
+ Object* ComputeLoadCallback(String* name,
|
JSObject* receiver,
|
JSObject* holder,
|
AccessorInfo* callback);
|
|
- static Object* ComputeLoadConstant(String* name,
|
+ Object* ComputeLoadConstant(String* name,
|
JSObject* receiver,
|
JSObject* holder,
|
Object* value);
|
|
- static Object* ComputeLoadInterceptor(String* name,
|
+ Object* ComputeLoadInterceptor(String* name,
|
JSObject* receiver,
|
JSObject* holder);
|
|
- static Object* ComputeLoadNormal(String* name, JSObject* receiver);
|
+ Object* ComputeLoadNormal(String* name, JSObject* receiver);
|
|
|
- static Object* ComputeLoadGlobal(String* name,
|
+ Object* ComputeLoadGlobal(String* name,
|
JSObject* receiver,
|
GlobalObject* holder,
|
JSGlobalPropertyCell* cell,
|
@@ -87,82 +87,82 @@
|
|
// ---
|
|
- static Object* ComputeKeyedLoadField(String* name,
|
+ Object* ComputeKeyedLoadField(String* name,
|
JSObject* receiver,
|
JSObject* holder,
|
int field_index);
|
|
- static Object* ComputeKeyedLoadCallback(String* name,
|
+ Object* ComputeKeyedLoadCallback(String* name,
|
JSObject* receiver,
|
JSObject* holder,
|
AccessorInfo* callback);
|
|
- static Object* ComputeKeyedLoadConstant(String* name, JSObject* receiver,
|
+ Object* ComputeKeyedLoadConstant(String* name, JSObject* receiver,
|
JSObject* holder, Object* value);
|
|
- static Object* ComputeKeyedLoadInterceptor(String* name,
|
+ Object* ComputeKeyedLoadInterceptor(String* name,
|
JSObject* receiver,
|
JSObject* holder);
|
|
- static Object* ComputeKeyedLoadArrayLength(String* name, JSArray* receiver);
|
+ Object* ComputeKeyedLoadArrayLength(String* name, JSArray* receiver);
|
|
- static Object* ComputeKeyedLoadStringLength(String* name,
|
+ Object* ComputeKeyedLoadStringLength(String* name,
|
String* receiver);
|
|
- static Object* ComputeKeyedLoadFunctionPrototype(String* name,
|
+ Object* ComputeKeyedLoadFunctionPrototype(String* name,
|
JSFunction* receiver);
|
|
// ---
|
|
- static Object* ComputeStoreField(String* name,
|
+ Object* ComputeStoreField(String* name,
|
JSObject* receiver,
|
int field_index,
|
Map* transition = NULL);
|
|
- static Object* ComputeStoreGlobal(String* name,
|
+ Object* ComputeStoreGlobal(String* name,
|
GlobalObject* receiver,
|
JSGlobalPropertyCell* cell);
|
|
- static Object* ComputeStoreCallback(String* name,
|
+ Object* ComputeStoreCallback(String* name,
|
JSObject* receiver,
|
AccessorInfo* callback);
|
|
- static Object* ComputeStoreInterceptor(String* name, JSObject* receiver);
|
+ Object* ComputeStoreInterceptor(String* name, JSObject* receiver);
|
|
// ---
|
|
- static Object* ComputeKeyedStoreField(String* name,
|
+ Object* ComputeKeyedStoreField(String* name,
|
JSObject* receiver,
|
int field_index,
|
Map* transition = NULL);
|
|
// ---
|
|
- static Object* ComputeCallField(int argc,
|
+ Object* ComputeCallField(int argc,
|
InLoopFlag in_loop,
|
String* name,
|
Object* object,
|
JSObject* holder,
|
int index);
|
|
- static Object* ComputeCallConstant(int argc,
|
+ Object* ComputeCallConstant(int argc,
|
InLoopFlag in_loop,
|
String* name,
|
Object* object,
|
JSObject* holder,
|
JSFunction* function);
|
|
- static Object* ComputeCallNormal(int argc,
|
+ Object* ComputeCallNormal(int argc,
|
InLoopFlag in_loop,
|
String* name,
|
JSObject* receiver);
|
|
- static Object* ComputeCallInterceptor(int argc,
|
+ Object* ComputeCallInterceptor(int argc,
|
String* name,
|
Object* object,
|
JSObject* holder);
|
|
- static Object* ComputeCallGlobal(int argc,
|
+ Object* ComputeCallGlobal(int argc,
|
InLoopFlag in_loop,
|
String* name,
|
JSObject* receiver,
|
@@ -172,35 +172,35 @@
|
|
// ---
|
|
- static Object* ComputeCallInitialize(int argc, InLoopFlag in_loop);
|
- static Object* ComputeCallPreMonomorphic(int argc, InLoopFlag in_loop);
|
- static Object* ComputeCallNormal(int argc, InLoopFlag in_loop);
|
- static Object* ComputeCallMegamorphic(int argc, InLoopFlag in_loop);
|
- static Object* ComputeCallMiss(int argc);
|
+ Object* ComputeCallInitialize(int argc, InLoopFlag in_loop);
|
+ Object* ComputeCallPreMonomorphic(int argc, InLoopFlag in_loop);
|
+ Object* ComputeCallNormal(int argc, InLoopFlag in_loop);
|
+ Object* ComputeCallMegamorphic(int argc, InLoopFlag in_loop);
|
+ Object* ComputeCallMiss(int argc);
|
|
// Finds the Code object stored in the Heap::non_monomorphic_cache().
|
- static Code* FindCallInitialize(int argc, InLoopFlag in_loop);
|
+ Code* FindCallInitialize(int argc, InLoopFlag in_loop);
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- static Object* ComputeCallDebugBreak(int argc);
|
- static Object* ComputeCallDebugPrepareStepIn(int argc);
|
+ Object* ComputeCallDebugBreak(int argc);
|
+ Object* ComputeCallDebugPrepareStepIn(int argc);
|
#endif
|
|
- static Object* ComputeLazyCompile(int argc);
|
+ Object* ComputeLazyCompile(int argc);
|
|
|
// Update cache for entry hash(name, map).
|
- static Code* Set(String* name, Map* map, Code* code);
|
+ Code* Set(String* name, Map* map, Code* code);
|
|
// Clear the lookup table (@ mark compact collection).
|
- static void Clear();
|
+ void Clear();
|
|
// Functions for generating stubs at startup.
|
- static void GenerateMiss(MacroAssembler* masm);
|
+ void GenerateMiss(MacroAssembler* masm);
|
|
// Generate code for probing the stub cache table.
|
// If extra != no_reg it might be used as am extra scratch register.
|
- static void GenerateProbe(MacroAssembler* masm,
|
+ void GenerateProbe(MacroAssembler* masm,
|
Code::Flags flags,
|
Register receiver,
|
Register name,
|
@@ -216,11 +216,15 @@
|
friend class SCTableReference;
|
static const int kPrimaryTableSize = 2048;
|
static const int kSecondaryTableSize = 512;
|
- static Entry primary_[];
|
- static Entry secondary_[];
|
+ Entry primary_[kPrimaryTableSize];
|
+ Entry secondary_[kSecondaryTableSize];
|
|
+ friend class V8Context;
|
+ StubCache() {}
|
+ DISALLOW_COPY_AND_ASSIGN(StubCache);
|
+
|
// Computes the hashed offsets for primary and secondary caches.
|
- static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
|
+ int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
|
// This works well because the heap object tag size and the hash
|
// shift are equal. Shifting down the length field to get the
|
// hash code would effectively throw away two bits of the hash
|
@@ -243,7 +247,7 @@
|
return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
|
}
|
|
- static int SecondaryOffset(String* name, Code::Flags flags, int seed) {
|
+ int SecondaryOffset(String* name, Code::Flags flags, int seed) {
|
// Use the seed from the primary cache in the secondary cache.
|
uint32_t string_low32bits =
|
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
|
@@ -260,7 +264,7 @@
|
// ends in String::kHashShift 0s. Then we shift it so it is a multiple
|
// of sizeof(Entry). This makes it easier to avoid making mistakes
|
// in the hashed offset computations.
|
- static Entry* entry(Entry* table, int offset) {
|
+ Entry* entry(Entry* table, int offset) {
|
const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift;
|
return reinterpret_cast<Entry*>(
|
reinterpret_cast<Address>(table) + (offset << shift_amount));
|
@@ -288,8 +292,8 @@
|
|
static StubCache::Entry* first_entry(StubCache::Table table) {
|
switch (table) {
|
- case StubCache::kPrimary: return StubCache::primary_;
|
- case StubCache::kSecondary: return StubCache::secondary_;
|
+ case StubCache::kPrimary: return v8_context()->stub_cache_.primary_;
|
+ case StubCache::kSecondary: return v8_context()->stub_cache_.secondary_;
|
}
|
UNREACHABLE();
|
return NULL;
|
Index: src/stub-cache.cc
|
===================================================================
|
--- src/stub-cache.cc (revision 3209)
|
+++ src/stub-cache.cc Sat Nov 14 01:43:24 MSK 2009
|
@@ -39,9 +39,6 @@
|
// StubCache implementation.
|
|
|
-StubCache::Entry StubCache::primary_[StubCache::kPrimaryTableSize];
|
-StubCache::Entry StubCache::secondary_[StubCache::kSecondaryTableSize];
|
-
|
void StubCache::Initialize(bool create_heap_objects) {
|
ASSERT(IsPowerOf2(kPrimaryTableSize));
|
ASSERT(IsPowerOf2(kSecondaryTableSize));
|
@@ -58,7 +55,7 @@
|
|
// Validate that the name does not move on scavenge, and that we
|
// can use identity checks instead of string equality checks.
|
- ASSERT(!Heap::InNewSpace(name));
|
+ ASSERT(!v8_context()->heap_.InNewSpace(name));
|
ASSERT(name->IsSymbol());
|
|
// The state bits are not important to the hash function because
|
@@ -78,7 +75,7 @@
|
|
// If the primary entry has useful data in it, we retire it to the
|
// secondary cache before overwriting it.
|
- if (hit != Builtins::builtin(Builtins::Illegal)) {
|
+ if (hit != v8_context()->builtins_.builtin(Builtins::Illegal)) {
|
Code::Flags primary_flags = Code::RemoveTypeFromFlags(hit->flags());
|
int secondary_offset =
|
SecondaryOffset(primary->key, primary_flags, primary_offset);
|
@@ -167,7 +164,7 @@
|
|
|
Object* StubCache::ComputeLoadNormal(String* name, JSObject* receiver) {
|
- Code* code = Builtins::builtin(Builtins::LoadIC_Normal);
|
+ Code* code = v8_context()->builtins_.builtin(Builtins::LoadIC_Normal);
|
return Set(name, receiver->map(), code);
|
}
|
|
@@ -437,7 +434,7 @@
|
if (code->IsUndefined()) {
|
if (object->IsJSObject()) {
|
Object* opt =
|
- Top::LookupSpecialFunction(JSObject::cast(object), holder, function);
|
+ v8_context()->top_.LookupSpecialFunction(JSObject::cast(object), holder, function);
|
if (opt->IsJSFunction()) {
|
check = StubCompiler::JSARRAY_HAS_FAST_ELEMENTS_CHECK;
|
function = JSFunction::cast(opt);
|
@@ -569,24 +566,24 @@
|
|
static Object* GetProbeValue(Code::Flags flags) {
|
// Use raw_unchecked... so we don't get assert failures during GC.
|
- NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache();
|
+ NumberDictionary* dictionary = v8_context()->heap_.raw_unchecked_non_monomorphic_cache();
|
int entry = dictionary->FindEntry(flags);
|
if (entry != -1) return dictionary->ValueAt(entry);
|
- return Heap::raw_unchecked_undefined_value();
|
+ return v8_context()->heap_.raw_unchecked_undefined_value();
|
}
|
|
|
static Object* ProbeCache(Code::Flags flags) {
|
Object* probe = GetProbeValue(flags);
|
- if (probe != Heap::undefined_value()) return probe;
|
+ if (probe != v8_context()->heap_.undefined_value()) return probe;
|
// Seed the cache with an undefined value to make sure that any
|
// generated code object can always be inserted into the cache
|
// without causing allocation failures.
|
Object* result =
|
- Heap::non_monomorphic_cache()->AtNumberPut(flags,
|
- Heap::undefined_value());
|
+ v8_context()->heap_.non_monomorphic_cache()->AtNumberPut(flags,
|
+ v8_context()->heap_.undefined_value());
|
if (result->IsFailure()) return result;
|
- Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result));
|
+ v8_context()->heap_.public_set_non_monomorphic_cache(NumberDictionary::cast(result));
|
return probe;
|
}
|
|
@@ -594,13 +591,13 @@
|
static Object* FillCache(Object* code) {
|
if (code->IsCode()) {
|
int entry =
|
- Heap::non_monomorphic_cache()->FindEntry(
|
+ v8_context()->heap_.non_monomorphic_cache()->FindEntry(
|
Code::cast(code)->flags());
|
// The entry must be present see comment in ProbeCache.
|
ASSERT(entry != -1);
|
- ASSERT(Heap::non_monomorphic_cache()->ValueAt(entry) ==
|
- Heap::undefined_value());
|
- Heap::non_monomorphic_cache()->ValueAtPut(entry, code);
|
+ ASSERT(v8_context()->heap_.non_monomorphic_cache()->ValueAt(entry) ==
|
+ v8_context()->heap_.undefined_value());
|
+ v8_context()->heap_.non_monomorphic_cache()->ValueAtPut(entry, code);
|
CHECK(GetProbeValue(Code::cast(code)->flags()) == code);
|
}
|
return code;
|
@@ -712,13 +709,15 @@
|
|
|
void StubCache::Clear() {
|
+ Builtins& builtins = v8_context()->builtins_;
|
+ Heap& heap = v8_context()->heap_;
|
for (int i = 0; i < kPrimaryTableSize; i++) {
|
- primary_[i].key = Heap::empty_string();
|
- primary_[i].value = Builtins::builtin(Builtins::Illegal);
|
+ primary_[i].key = heap.empty_string();
|
+ primary_[i].value = builtins.builtin(Builtins::Illegal);
|
}
|
for (int j = 0; j < kSecondaryTableSize; j++) {
|
- secondary_[j].key = Heap::empty_string();
|
- secondary_[j].value = Builtins::builtin(Builtins::Illegal);
|
+ secondary_[j].key = heap.empty_string();
|
+ secondary_[j].value = builtins.builtin(Builtins::Illegal);
|
}
|
}
|
|
@@ -729,7 +728,7 @@
|
|
// Support function for computing call IC miss stubs.
|
Handle<Code> ComputeCallMiss(int argc) {
|
- CALL_HEAP_FUNCTION(StubCache::ComputeCallMiss(argc), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeCallMiss(argc), Code);
|
}
|
|
|
@@ -753,7 +752,7 @@
|
result = fun(v8::Utils::ToLocal(args.at<String>(4)), info);
|
}
|
RETURN_IF_SCHEDULED_EXCEPTION();
|
- if (result.IsEmpty()) return Heap::undefined_value();
|
+ if (result.IsEmpty()) return v8_context()->heap_.undefined_value();
|
return *v8::Utils::OpenHandle(*result);
|
}
|
|
@@ -815,7 +814,7 @@
|
}
|
}
|
|
- return Heap::no_interceptor_result_sentinel();
|
+ return v8_context()->heap_.no_interceptor_result_sentinel();
|
}
|
|
|
@@ -825,7 +824,7 @@
|
// can't use either LoadIC or KeyedLoadIC constructors.
|
IC ic(IC::NO_EXTRA_FRAME);
|
ASSERT(ic.target()->is_load_stub() || ic.target()->is_keyed_load_stub());
|
- if (!ic.is_contextual()) return Heap::undefined_value();
|
+ if (!ic.is_contextual()) return v8_context()->heap_.undefined_value();
|
|
// Throw a reference error.
|
HandleScope scope;
|
@@ -833,7 +832,7 @@
|
Handle<Object> error =
|
Factory::NewReferenceError("not_defined",
|
HandleVector(&name_handle, 1));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
|
@@ -920,7 +919,7 @@
|
CallIC::GenerateInitialize(masm(), argc);
|
Object* result = GetCodeWithFlags(flags, "CompileCallInitialize");
|
if (!result->IsFailure()) {
|
- Counters::call_initialize_stubs.Increment();
|
+ v8_context()->counters_.call_initialize_stubs.Increment();
|
Code* code = Code::cast(result);
|
USE(code);
|
LOG(CodeCreateEvent(Logger::CALL_INITIALIZE_TAG,
|
@@ -938,7 +937,7 @@
|
CallIC::GenerateInitialize(masm(), argc);
|
Object* result = GetCodeWithFlags(flags, "CompileCallPreMonomorphic");
|
if (!result->IsFailure()) {
|
- Counters::call_premonomorphic_stubs.Increment();
|
+ v8_context()->counters_.call_premonomorphic_stubs.Increment();
|
Code* code = Code::cast(result);
|
USE(code);
|
LOG(CodeCreateEvent(Logger::CALL_PRE_MONOMORPHIC_TAG,
|
@@ -954,7 +953,7 @@
|
CallIC::GenerateNormal(masm(), argc);
|
Object* result = GetCodeWithFlags(flags, "CompileCallNormal");
|
if (!result->IsFailure()) {
|
- Counters::call_normal_stubs.Increment();
|
+ v8_context()->counters_.call_normal_stubs.Increment();
|
Code* code = Code::cast(result);
|
USE(code);
|
LOG(CodeCreateEvent(Logger::CALL_NORMAL_TAG,
|
@@ -970,7 +969,7 @@
|
CallIC::GenerateMegamorphic(masm(), argc);
|
Object* result = GetCodeWithFlags(flags, "CompileCallMegamorphic");
|
if (!result->IsFailure()) {
|
- Counters::call_megamorphic_stubs.Increment();
|
+ v8_context()->counters_.call_megamorphic_stubs.Increment();
|
Code* code = Code::cast(result);
|
USE(code);
|
LOG(CodeCreateEvent(Logger::CALL_MEGAMORPHIC_TAG,
|
@@ -986,7 +985,7 @@
|
CallIC::GenerateMiss(masm(), argc);
|
Object* result = GetCodeWithFlags(flags, "CompileCallMiss");
|
if (!result->IsFailure()) {
|
- Counters::call_megamorphic_stubs.Increment();
|
+ v8_context()->counters_.call_megamorphic_stubs.Increment();
|
Code* code = Code::cast(result);
|
USE(code);
|
LOG(CodeCreateEvent(Logger::CALL_MISS_TAG, code, code->arguments_count()));
|
@@ -1035,7 +1034,7 @@
|
// Create code object in the heap.
|
CodeDesc desc;
|
masm_.GetCode(&desc);
|
- Object* result = Heap::CreateCode(desc, NULL, flags, masm_.CodeObject());
|
+ Object* result = v8_context()->heap_.CreateCode(desc, NULL, flags, masm_.CodeObject());
|
#ifdef ENABLE_DISASSEMBLER
|
if (FLAG_print_code_stubs && !result->IsFailure()) {
|
Code::cast(result)->Disassemble(name);
|
Index: src/spaces.h
|
===================================================================
|
--- src/spaces.h (revision 3098)
|
+++ src/spaces.h Sat Nov 14 01:43:16 MSK 2009
|
@@ -208,8 +208,8 @@
|
// Use a state to mark whether remembered set space can be used for other
|
// purposes.
|
enum RSetState { IN_USE, NOT_IN_USE };
|
- static bool is_rset_in_use() { return rset_state_ == IN_USE; }
|
- static void set_rset_state(RSetState state) { rset_state_ = state; }
|
+ static bool is_rset_in_use() { return v8_context()->storage_data_.rset_used_; }
|
+ static void set_rset_state(RSetState state) { v8_context()->storage_data_.rset_used_ = state == IN_USE; }
|
#endif
|
|
// 8K bytes per page.
|
@@ -280,10 +280,6 @@
|
// The forwarding address of the first live object in this page.
|
Address mc_first_forwarded;
|
|
-#ifdef DEBUG
|
- private:
|
- static RSetState rset_state_; // state of the remembered set
|
-#endif
|
};
|
|
|
@@ -321,19 +317,19 @@
|
// displacements cover the entire 4GB virtual address space. On 64-bit
|
// platforms, we support this using the CodeRange object, which reserves and
|
// manages a range of virtual memory.
|
-class CodeRange : public AllStatic {
|
+class CodeRange {
|
public:
|
// Reserves a range of virtual memory, but does not commit any of it.
|
// Can only be called once, at heap initialization time.
|
// Returns false on failure.
|
- static bool Setup(const size_t requested_size);
|
+ bool Setup(const size_t requested_size);
|
|
// Frees the range of virtual memory, and frees the data structures used to
|
// manage it.
|
- static void TearDown();
|
+ void TearDown();
|
|
- static bool exists() { return code_range_ != NULL; }
|
- static bool contains(Address address) {
|
+ bool exists() { return code_range_ != NULL; }
|
+ bool contains(Address address) {
|
if (code_range_ == NULL) return false;
|
Address start = static_cast<Address>(code_range_->address());
|
return start <= address && address < start + code_range_->size();
|
@@ -342,12 +338,12 @@
|
// Allocates a chunk of memory from the large-object portion of
|
// the code range. On platforms with no separate code range, should
|
// not be called.
|
- static void* AllocateRawMemory(const size_t requested, size_t* allocated);
|
- static void FreeRawMemory(void* buf, size_t length);
|
+ void* AllocateRawMemory(const size_t requested, size_t* allocated);
|
+ void FreeRawMemory(void* buf, size_t length);
|
|
private:
|
// The reserved range of virtual memory that all code objects are put in.
|
- static VirtualMemory* code_range_;
|
+ VirtualMemory* code_range_;
|
// Plain old data class, just a struct plus a constructor.
|
class FreeBlock {
|
public:
|
@@ -363,20 +359,24 @@
|
// Freed blocks of memory are added to the free list. When the allocation
|
// list is exhausted, the free list is sorted and merged to make the new
|
// allocation list.
|
- static List<FreeBlock> free_list_;
|
+ List<FreeBlock> free_list_;
|
// Memory is allocated from the free blocks on the allocation list.
|
// The block at current_allocation_block_index_ is the current block.
|
- static List<FreeBlock> allocation_list_;
|
- static int current_allocation_block_index_;
|
+ List<FreeBlock> allocation_list_;
|
+ int current_allocation_block_index_;
|
|
// Finds a block on the allocation list that contains at least the
|
// requested amount of memory. If none is found, sorts and merges
|
// the existing free memory blocks, and searches again.
|
// If none can be found, terminates V8 with FatalProcessOutOfMemory.
|
- static void GetNextAllocationBlock(size_t requested);
|
+ void GetNextAllocationBlock(size_t requested);
|
// Compares the start addresses of two free blocks.
|
static int CompareFreeBlockAddress(const FreeBlock* left,
|
const FreeBlock* right);
|
+
|
+ friend class Heap;
|
+ CodeRange();
|
+ DISALLOW_COPY_AND_ASSIGN(CodeRange);
|
};
|
|
|
@@ -397,14 +397,14 @@
|
// The memory allocator also allocates chunks for the large object space, but
|
// they are managed by the space itself. The new space does not expand.
|
|
-class MemoryAllocator : public AllStatic {
|
+class MemoryAllocator {
|
public:
|
// Initializes its internal bookkeeping structures.
|
// Max capacity of the total space.
|
- static bool Setup(int max_capacity);
|
+ bool Setup(int max_capacity);
|
|
// Deletes valid chunks.
|
- static void TearDown();
|
+ void TearDown();
|
|
// Reserves an initial address range of virtual memory to be split between
|
// the two new space semispaces, the old space, and the map space. The
|
@@ -415,7 +415,7 @@
|
// address of the initial chunk if successful, with the side effect of
|
// setting the initial chunk, or else NULL if unsuccessful and leaves the
|
// initial chunk NULL.
|
- static void* ReserveInitialChunk(const size_t requested);
|
+ void* ReserveInitialChunk(const size_t requested);
|
|
// Commits pages from an as-yet-unmanaged block of virtual memory into a
|
// paged space. The block should be part of the initial chunk reserved via
|
@@ -424,21 +424,21 @@
|
// address is non-null and that it is big enough to hold at least one
|
// page-aligned page. The call always succeeds, and num_pages is always
|
// greater than zero.
|
- static Page* CommitPages(Address start, size_t size, PagedSpace* owner,
|
+ Page* CommitPages(Address start, size_t size, PagedSpace* owner,
|
int* num_pages);
|
|
// Commit a contiguous block of memory from the initial chunk. Assumes that
|
// the address is not NULL, the size is greater than zero, and that the
|
// block is contained in the initial chunk. Returns true if it succeeded
|
// and false otherwise.
|
- static bool CommitBlock(Address start, size_t size, Executability executable);
|
+ bool CommitBlock(Address start, size_t size, Executability executable);
|
|
|
// Uncommit a contiguous block of memory [start..(start+size)[.
|
// start is not NULL, the size is greater than zero, and the
|
// block is contained in the initial chunk. Returns true if it succeeded
|
// and false otherwise.
|
- static bool UncommitBlock(Address start, size_t size);
|
+ bool UncommitBlock(Address start, size_t size);
|
|
// Attempts to allocate the requested (non-zero) number of pages from the
|
// OS. Fewer pages might be allocated than requested. If it fails to
|
@@ -449,7 +449,7 @@
|
// number of allocated pages is returned in the output parameter
|
// allocated_pages. If the PagedSpace owner is executable and there is
|
// a code range, the pages are allocated from the code range.
|
- static Page* AllocatePages(int requested_pages, int* allocated_pages,
|
+ Page* AllocatePages(int requested_pages, int* allocated_pages,
|
PagedSpace* owner);
|
|
// Frees pages from a given page and after. If 'p' is the first page
|
@@ -457,7 +457,7 @@
|
// invalid page pointer. Otherwise, the function searches a page
|
// after 'p' that is the first page of a chunk. Pages after the
|
// found page are freed and the function returns 'p'.
|
- static Page* FreePages(Page* p);
|
+ Page* FreePages(Page* p);
|
|
// Allocates and frees raw memory of certain size.
|
// These are just thin wrappers around OS::Allocate and OS::Free,
|
@@ -465,52 +465,52 @@
|
// If the flag is EXECUTABLE and a code range exists, the requested
|
// memory is allocated from the code range. If a code range exists
|
// and the freed memory is in it, the code range manages the freed memory.
|
- static void* AllocateRawMemory(const size_t requested,
|
+ void* AllocateRawMemory(const size_t requested,
|
size_t* allocated,
|
Executability executable);
|
- static void FreeRawMemory(void* buf, size_t length);
|
+ void FreeRawMemory(void* buf, size_t length);
|
|
// Returns the maximum available bytes of heaps.
|
- static int Available() { return capacity_ < size_ ? 0 : capacity_ - size_; }
|
+ int Available() { return capacity_ < size_ ? 0 : capacity_ - size_; }
|
|
// Returns allocated spaces in bytes.
|
- static int Size() { return size_; }
|
+ int Size() { return size_; }
|
|
// Returns maximum available bytes that the old space can have.
|
- static int MaxAvailable() {
|
+ int MaxAvailable() {
|
return (Available() / Page::kPageSize) * Page::kObjectAreaSize;
|
}
|
|
// Links two pages.
|
- static inline void SetNextPage(Page* prev, Page* next);
|
+ inline void SetNextPage(Page* prev, Page* next);
|
|
// Returns the next page of a given page.
|
- static inline Page* GetNextPage(Page* p);
|
+ inline Page* GetNextPage(Page* p);
|
|
// Checks whether a page belongs to a space.
|
- static inline bool IsPageInSpace(Page* p, PagedSpace* space);
|
+ inline bool IsPageInSpace(Page* p, PagedSpace* space);
|
|
// Returns the space that owns the given page.
|
- static inline PagedSpace* PageOwner(Page* page);
|
+ inline PagedSpace* PageOwner(Page* page);
|
|
// Finds the first/last page in the same chunk as a given page.
|
- static Page* FindFirstPageInSameChunk(Page* p);
|
- static Page* FindLastPageInSameChunk(Page* p);
|
+ Page* FindFirstPageInSameChunk(Page* p);
|
+ Page* FindLastPageInSameChunk(Page* p);
|
|
#ifdef ENABLE_HEAP_PROTECTION
|
// Protect/unprotect a block of memory by marking it read-only/writable.
|
- static inline void Protect(Address start, size_t size);
|
- static inline void Unprotect(Address start, size_t size,
|
+ inline void Protect(Address start, size_t size);
|
+ inline void Unprotect(Address start, size_t size,
|
Executability executable);
|
|
// Protect/unprotect a chunk given a page in the chunk.
|
- static inline void ProtectChunkFromPage(Page* page);
|
- static inline void UnprotectChunkFromPage(Page* page);
|
+ inline void ProtectChunkFromPage(Page* page);
|
+ inline void UnprotectChunkFromPage(Page* page);
|
#endif
|
|
#ifdef DEBUG
|
// Reports statistic info of the space.
|
- static void ReportStatistics();
|
+ void ReportStatistics();
|
#endif
|
|
// Due to encoding limitation, we can only have 8K chunks.
|
@@ -526,13 +526,13 @@
|
|
private:
|
// Maximum space size in bytes.
|
- static int capacity_;
|
+ int capacity_;
|
|
// Allocated space size in bytes.
|
- static int size_;
|
+ int size_;
|
|
// The initial chunk of virtual memory.
|
- static VirtualMemory* initial_chunk_;
|
+ VirtualMemory* initial_chunk_;
|
|
// Allocated chunk info: chunk start address, chunk size, and owning space.
|
class ChunkInfo BASE_EMBEDDED {
|
@@ -554,37 +554,41 @@
|
};
|
|
// Chunks_, free_chunk_ids_ and top_ act as a stack of free chunk ids.
|
- static List<ChunkInfo> chunks_;
|
- static List<int> free_chunk_ids_;
|
- static int max_nof_chunks_;
|
- static int top_;
|
+ List<ChunkInfo> chunks_;
|
+ List<int> free_chunk_ids_;
|
+ int max_nof_chunks_;
|
+ int top_;
|
|
// Push/pop a free chunk id onto/from the stack.
|
- static void Push(int free_chunk_id);
|
- static int Pop();
|
- static bool OutOfChunkIds() { return top_ == 0; }
|
+ void Push(int free_chunk_id);
|
+ int Pop();
|
+ bool OutOfChunkIds() { return top_ == 0; }
|
|
// Frees a chunk.
|
- static void DeleteChunk(int chunk_id);
|
+ void DeleteChunk(int chunk_id);
|
|
// Basic check whether a chunk id is in the valid range.
|
- static inline bool IsValidChunkId(int chunk_id);
|
+ inline bool IsValidChunkId(int chunk_id);
|
|
// Checks whether a chunk id identifies an allocated chunk.
|
- static inline bool IsValidChunk(int chunk_id);
|
+ inline bool IsValidChunk(int chunk_id);
|
|
// Returns the chunk id that a page belongs to.
|
- static inline int GetChunkId(Page* p);
|
+ inline int GetChunkId(Page* p);
|
|
// True if the address lies in the initial chunk.
|
- static inline bool InInitialChunk(Address address);
|
+ inline bool InInitialChunk(Address address);
|
|
// Initializes pages in a chunk. Returns the first page address.
|
// This function and GetChunkId() are provided for the mark-compact
|
// collector to rebuild page headers in the from space, which is
|
// used as a marking stack and its page headers are destroyed.
|
- static Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
|
+ Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
|
PagedSpace* owner);
|
+
|
+ MemoryAllocator();
|
+ DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
|
+ friend class Heap;
|
};
|
|
|
@@ -818,9 +822,10 @@
|
|
|
class PagedSpace : public Space {
|
+ MemoryAllocator* const memory_allocator_;
|
public:
|
// Creates a space with a maximum capacity, and an id.
|
- PagedSpace(int max_capacity, AllocationSpace id, Executability executable);
|
+ PagedSpace(int max_capacity, AllocationSpace id, Executability executable, MemoryAllocator* memory_allocator);
|
|
virtual ~PagedSpace() {}
|
|
@@ -1604,8 +1609,10 @@
|
// The constructor does not allocate pages from OS.
|
explicit OldSpace(int max_capacity,
|
AllocationSpace id,
|
- Executability executable)
|
- : PagedSpace(max_capacity, id, executable), free_list_(id) {
|
+ Executability executable,
|
+ MemoryAllocator* memory_allocator
|
+ )
|
+ : PagedSpace(max_capacity, id, executable, memory_allocator), free_list_(id) {
|
page_extra_ = 0;
|
}
|
|
@@ -1666,8 +1673,10 @@
|
FixedSpace(int max_capacity,
|
AllocationSpace id,
|
int object_size_in_bytes,
|
- const char* name)
|
- : PagedSpace(max_capacity, id, NOT_EXECUTABLE),
|
+ const char* name,
|
+ MemoryAllocator* memory_allocator
|
+ )
|
+ : PagedSpace(max_capacity, id, NOT_EXECUTABLE, memory_allocator),
|
object_size_in_bytes_(object_size_in_bytes),
|
name_(name),
|
free_list_(id, object_size_in_bytes) {
|
@@ -1729,8 +1738,8 @@
|
class MapSpace : public FixedSpace {
|
public:
|
// Creates a map space object with a maximum capacity.
|
- MapSpace(int max_capacity, AllocationSpace id)
|
- : FixedSpace(max_capacity, id, Map::kSize, "map") {}
|
+ MapSpace(int max_capacity, AllocationSpace id, MemoryAllocator* memory_allocator)
|
+ : FixedSpace(max_capacity, id, Map::kSize, "map", memory_allocator) {}
|
|
// Prepares for a mark-compact GC.
|
virtual void PrepareForMarkCompact(bool will_compact);
|
@@ -1761,8 +1770,8 @@
|
class CellSpace : public FixedSpace {
|
public:
|
// Creates a property cell space object with a maximum capacity.
|
- CellSpace(int max_capacity, AllocationSpace id)
|
- : FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell") {}
|
+ CellSpace(int max_capacity, AllocationSpace id, MemoryAllocator* memory_allocator)
|
+ : FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell", memory_allocator) {}
|
|
protected:
|
#ifdef DEBUG
|
@@ -1840,8 +1849,9 @@
|
|
|
class LargeObjectSpace : public Space {
|
+ MemoryAllocator* const memory_allocator_;
|
public:
|
- explicit LargeObjectSpace(AllocationSpace id);
|
+ explicit LargeObjectSpace(AllocationSpace id, MemoryAllocator* memory_allocator);
|
virtual ~LargeObjectSpace() {}
|
|
// Initializes internal data structures.
|
@@ -1860,7 +1870,7 @@
|
// Available bytes for objects in this space, not including any extra
|
// remembered set words.
|
int Available() {
|
- return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available());
|
+ return LargeObjectChunk::ObjectSizeFor(memory_allocator_->Available());
|
}
|
|
virtual int Size() {
|
Index: src/jsregexp.cc
|
===================================================================
|
--- src/jsregexp.cc (revision 3236)
|
+++ src/jsregexp.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -105,7 +105,7 @@
|
SetElement(array, 0, pattern);
|
SetElement(array, 1, error_text);
|
Handle<Object> regexp_err = Factory::NewSyntaxError(message, array);
|
- Top::Throw(*regexp_err);
|
+ v8_context()->top_.Throw(*regexp_err);
|
}
|
|
|
@@ -119,7 +119,9 @@
|
if (offsets_vector_length_ > kStaticOffsetsVectorSize) {
|
vector_ = NewArray<int>(offsets_vector_length_);
|
} else {
|
- vector_ = static_offsets_vector_;
|
+ int* & static_offsets_vector = v8_context()->reg_exp_stack_.static_offsets_vector_;
|
+ if (!static_offsets_vector) static_offsets_vector = new int[kStaticOffsetsVectorSize];
|
+ vector_ = static_offsets_vector;
|
}
|
}
|
inline ~OffsetsVector() {
|
@@ -135,19 +137,39 @@
|
int* vector_;
|
int offsets_vector_length_;
|
static const int kStaticOffsetsVectorSize = 50;
|
- static int static_offsets_vector_[kStaticOffsetsVectorSize];
|
};
|
|
+class RegExpImpl::Impl {
|
+public:
|
+ static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii);
|
+ static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
|
|
-int OffsetsVector::static_offsets_vector_[
|
- OffsetsVector::kStaticOffsetsVectorSize];
|
|
+ // Set the subject cache. The previous string buffer is not deleted, so the
|
+ // caller should ensure that it doesn't leak.
|
+ static void SetSubjectCache(String* subject,
|
+ char* utf8_subject,
|
+ int uft8_length,
|
+ int character_position,
|
+ int utf8_position);
|
+private:
|
+ String* last_ascii_string_;
|
+ String* two_byte_cached_string_;
|
|
+ // A one element cache of the last utf8_subject string and its length. The
|
+ // subject JS String object is cached in the heap. We also cache a
|
+ // translation between position and utf8 position.
|
+ char* utf8_subject_cache_;
|
+ int utf8_length_cache_;
|
+ int utf8_position_;
|
+ int character_position_;
|
+};
|
+
|
Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
|
Handle<String> pattern,
|
Handle<String> flag_str) {
|
JSRegExp::Flags flags = RegExpFlagsFromString(flag_str);
|
- Handle<FixedArray> cached = CompilationCache::LookupRegExp(pattern, flags);
|
+ Handle<FixedArray> cached = v8_context()->compilation_cache_.LookupRegExp(pattern, flags);
|
bool in_cache = !cached.is_null();
|
LOG(RegExpCompileEvent(re, in_cache));
|
|
@@ -186,7 +208,7 @@
|
// Compilation succeeded so the data is set on the regexp
|
// and we can store it in the cache.
|
Handle<FixedArray> data(FixedArray::cast(re->data()));
|
- CompilationCache::PutRegExp(pattern, flags, data);
|
+ v8_context()->compilation_cache_.PutRegExp(pattern, flags, data);
|
|
return re;
|
}
|
@@ -202,7 +224,7 @@
|
case JSRegExp::IRREGEXP: {
|
Handle<Object> result =
|
IrregexpExec(regexp, subject, index, last_match_info);
|
- ASSERT(!result.is_null() || Top::has_pending_exception());
|
+ ASSERT(!result.is_null() || v8_context()->top_.has_pending_exception());
|
return result;
|
}
|
default:
|
@@ -269,7 +291,7 @@
|
// from the source pattern.
|
// If compilation fails, an exception is thrown and this function
|
// returns false.
|
-bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
|
+bool RegExpImpl::Impl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
|
Object* compiled_code = re->DataAt(JSRegExp::code_index(is_ascii));
|
#ifdef V8_NATIVE_REGEXP
|
if (compiled_code->IsCode()) return true;
|
@@ -280,14 +302,14 @@
|
}
|
|
|
-bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
|
+bool RegExpImpl::Impl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
|
// Compile the RegExp.
|
CompilationZoneScope zone_scope(DELETE_ON_EXIT);
|
Object* entry = re->DataAt(JSRegExp::code_index(is_ascii));
|
if (entry->IsJSObject()) {
|
// If it's a JSObject, a previous compilation failed and threw this object.
|
// Re-throw the object without trying again.
|
- Top::Throw(entry);
|
+ v8_context()->top_.Throw(entry);
|
return false;
|
}
|
ASSERT(entry->IsTheHole());
|
@@ -325,7 +347,7 @@
|
Factory::NewStringFromUtf8(CStrVector(result.error_message)));
|
Handle<Object> regexp_err =
|
Factory::NewSyntaxError("malformed_regexp", array);
|
- Top::Throw(*regexp_err);
|
+ v8_context()->top_.Throw(*regexp_err);
|
re->SetDataAt(JSRegExp::code_index(is_ascii), *regexp_err);
|
return false;
|
}
|
@@ -423,7 +445,7 @@
|
NativeRegExpMacroAssembler::Result res;
|
do {
|
bool is_ascii = subject->IsAsciiRepresentation();
|
- if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) {
|
+ if (!RegExpImpl::Impl::EnsureCompiledIrregexp(jsregexp, is_ascii)) {
|
return Handle<Object>::null();
|
}
|
Handle<Code> code(RegExpImpl::IrregexpNativeCode(*regexp, is_ascii));
|
@@ -436,7 +458,7 @@
|
// must restart from scratch.
|
} while (res == NativeRegExpMacroAssembler::RETRY);
|
if (res == NativeRegExpMacroAssembler::EXCEPTION) {
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(v8_context()->top_.has_pending_exception());
|
return Handle<Object>::null();
|
}
|
ASSERT(res == NativeRegExpMacroAssembler::SUCCESS
|
@@ -1247,17 +1269,12 @@
|
}
|
}
|
|
-
|
-static unibrow::Mapping<unibrow::Ecma262UnCanonicalize> uncanonicalize;
|
-static unibrow::Mapping<unibrow::CanonicalizationRange> canonrange;
|
-
|
-
|
// Returns the number of characters in the equivalence class, omitting those
|
// that cannot occur in the source string because it is ASCII.
|
static int GetCaseIndependentLetters(uc16 character,
|
bool ascii_subject,
|
unibrow::uchar* letters) {
|
- int length = uncanonicalize.get(character, '\0', letters);
|
+ int length = v8_context()->reg_exp_stack_.uncanonicalize_.get(character, '\0', letters);
|
// Unibrow returns 0 or 1 for characters where case independependence is
|
// trivial.
|
if (length == 0) {
|
@@ -3914,6 +3931,7 @@
|
|
void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges) {
|
unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
|
+ unibrow::Mapping<unibrow::Ecma262UnCanonicalize>& uncanonicalize = v8_context()->reg_exp_stack_.uncanonicalize_;
|
if (IsSingleton()) {
|
// If this is a singleton we just expand the one character.
|
int length = uncanonicalize.get(from(), '\0', chars);
|
@@ -3943,6 +3961,7 @@
|
// completely contained in a block we do this for all the blocks
|
// covered by the range.
|
unibrow::uchar range[unibrow::Ecma262UnCanonicalize::kMaxWidth];
|
+ unibrow::Mapping<unibrow::CanonicalizationRange>& canonrange = v8_context()->reg_exp_stack_.canonrange_;
|
// First, look up the block that contains the 'from' character.
|
int length = canonrange.get(from(), '\0', range);
|
if (length == 0) {
|
Index: src/accessors.cc
|
===================================================================
|
--- src/accessors.cc (revision 2256)
|
+++ src/accessors.cc Sat Nov 14 01:43:22 MSK 2009
|
@@ -42,7 +42,7 @@
|
static C* FindInPrototypeChain(Object* obj, bool* found_it) {
|
ASSERT(!*found_it);
|
while (!Is<C>(obj)) {
|
- if (obj == Heap::null_value()) return NULL;
|
+ if (obj == v8_context()->heap_.null_value()) return NULL;
|
obj = obj->GetPrototype();
|
}
|
*found_it = true;
|
@@ -89,9 +89,9 @@
|
if (value->IsNumber() || !value->IsJSValue()) return value;
|
JSValue* wrapper = JSValue::cast(value);
|
ASSERT(
|
- Top::context()->global_context()->number_function()->has_initial_map());
|
+ v8_context()->top_.context()->global_context()->number_function()->has_initial_map());
|
Map* number_map =
|
- Top::context()->global_context()->number_function()->initial_map();
|
+ v8_context()->top_.context()->global_context()->number_function()->initial_map();
|
if (wrapper->map() == number_map) return wrapper->value();
|
return value;
|
}
|
@@ -124,11 +124,11 @@
|
// This means one of the object's prototypes is a JSArray and
|
// the object does not have a 'length' property.
|
// Calling SetProperty causes an infinite loop.
|
- return object->IgnoreAttributesAndSetLocalProperty(Heap::length_symbol(),
|
+ return object->IgnoreAttributesAndSetLocalProperty(v8_context()->heap_.length_symbol(),
|
value, NONE);
|
}
|
}
|
- return Top::Throw(*Factory::NewRangeError("invalid_array_length",
|
+ return v8_context()->top_.Throw(*Factory::NewRangeError("invalid_array_length",
|
HandleVector<Object>(NULL, 0)));
|
}
|
|
@@ -374,7 +374,7 @@
|
// If this is not a script compiled through eval there is no eval position.
|
int compilation_type = Smi::cast(script->compilation_type())->value();
|
if (compilation_type != Script::COMPILATION_TYPE_EVAL) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
// Get the function from where eval was called and find the source position
|
@@ -400,9 +400,9 @@
|
Object* Accessors::FunctionGetPrototype(Object* object, void*) {
|
bool found_it = false;
|
JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
|
- if (!found_it) return Heap::undefined_value();
|
+ if (!found_it) return v8_context()->heap_.undefined_value();
|
if (!function->has_prototype()) {
|
- Object* prototype = Heap::AllocateFunctionPrototype(function);
|
+ Object* prototype = v8_context()->heap_.AllocateFunctionPrototype(function);
|
if (prototype->IsFailure()) return prototype;
|
Object* result = function->SetPrototype(prototype);
|
if (result->IsFailure()) return result;
|
@@ -416,7 +416,7 @@
|
void*) {
|
bool found_it = false;
|
JSFunction* function = FindInPrototypeChain<JSFunction>(object, &found_it);
|
- if (!found_it) return Heap::undefined_value();
|
+ if (!found_it) return v8_context()->heap_.undefined_value();
|
if (function->has_initial_map()) {
|
// If the function has allocated the initial map
|
// replace it with a copy containing the new prototype.
|
@@ -478,7 +478,7 @@
|
Object* Accessors::FunctionGetName(Object* object, void*) {
|
bool found_it = false;
|
JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
|
- if (!found_it) return Heap::undefined_value();
|
+ if (!found_it) return v8_context()->heap_.undefined_value();
|
return holder->shared()->name();
|
}
|
|
@@ -499,7 +499,8 @@
|
HandleScope scope;
|
bool found_it = false;
|
JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
|
- if (!found_it) return Heap::undefined_value();
|
+ Heap& heap = v8_context()->heap_;
|
+ if (!found_it) return heap.undefined_value();
|
Handle<JSFunction> function(holder);
|
|
// Find the top invocation of the function by traversing frames.
|
@@ -510,7 +511,7 @@
|
|
// If there is an arguments variable in the stack, we return that.
|
int index = ScopeInfo<>::StackSlotIndex(frame->code(),
|
- Heap::arguments_symbol());
|
+ heap.arguments_symbol());
|
if (index >= 0) {
|
Handle<Object> arguments = Handle<Object>(frame->GetExpression(index));
|
if (!arguments->IsTheHole()) return *arguments;
|
@@ -538,7 +539,7 @@
|
}
|
|
// No frame corresponding to the given function found. Return null.
|
- return Heap::null_value();
|
+ return heap.null_value();
|
}
|
|
|
@@ -558,7 +559,8 @@
|
HandleScope scope;
|
bool found_it = false;
|
JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
|
- if (!found_it) return Heap::undefined_value();
|
+ Heap& heap = v8_context()->heap_;
|
+ if (!found_it) return heap.undefined_value();
|
Handle<JSFunction> function(holder);
|
|
// Find the top invocation of the function by traversing frames.
|
@@ -570,14 +572,14 @@
|
// frames, e.g. frames for scripts not functions.
|
while (true) {
|
it.Advance();
|
- if (it.done()) return Heap::null_value();
|
+ if (it.done()) return heap.null_value();
|
JSFunction* caller = JSFunction::cast(it.frame()->function());
|
if (!caller->shared()->is_toplevel()) return caller;
|
}
|
}
|
|
// No frame corresponding to the given function found. Return null.
|
- return Heap::null_value();
|
+ return heap.null_value();
|
}
|
|
|
@@ -614,12 +616,12 @@
|
// Silently ignore the change if value is not a JSObject or null.
|
// SpiderMonkey behaves this way.
|
if (!value->IsJSObject() && !value->IsNull()) return value;
|
-
|
- for (Object* pt = value; pt != Heap::null_value(); pt = pt->GetPrototype()) {
|
+ Heap& heap = v8_context()->heap_;
|
+ for (Object* pt = value; pt != heap.null_value(); pt = pt->GetPrototype()) {
|
if (JSObject::cast(pt) == receiver) {
|
// Cycle detected.
|
HandleScope scope;
|
- return Top::Throw(*Factory::NewError("cyclic_proto",
|
+ return v8_context()->top_.Throw(*Factory::NewError("cyclic_proto",
|
HandleVector<Object>(NULL, 0)));
|
}
|
}
|
Index: src/messages.cc
|
===================================================================
|
--- src/messages.cc (revision 2723)
|
+++ src/messages.cc Sat Nov 14 01:43:20 MSK 2009
|
@@ -73,7 +73,7 @@
|
for (int i = 0; i < args.length(); i++)
|
SetElement(Handle<JSArray>::cast(array), i, args[i]);
|
|
- Handle<JSFunction> fun(Top::global_context()->make_message_fun());
|
+ Handle<JSFunction> fun(v8_context()->top_.global_context()->make_message_fun());
|
int start, end;
|
Handle<Object> script;
|
if (loc) {
|
@@ -147,12 +147,12 @@
|
Handle<String> fmt_str = Factory::LookupAsciiSymbol("FormatMessage");
|
Handle<JSFunction> fun =
|
Handle<JSFunction>(
|
- JSFunction::cast(Top::builtins()->GetProperty(*fmt_str)));
|
+ JSFunction::cast(v8_context()->top_.builtins()->GetProperty(*fmt_str)));
|
Object** argv[1] = { data.location() };
|
|
bool caught_exception;
|
Handle<Object> result =
|
- Execution::TryCall(fun, Top::builtins(), 1, argv, &caught_exception);
|
+ Execution::TryCall(fun, v8_context()->top_.builtins(), 1, argv, &caught_exception);
|
|
if (caught_exception || !result->IsString()) {
|
return Factory::LookupAsciiSymbol("<error>");
|
Index: test/cctest/test-spaces.cc
|
===================================================================
|
--- test/cctest/test-spaces.cc (revision 3106)
|
+++ test/cctest/test-spaces.cc Sun Nov 15 13:16:39 MSK 2009
|
@@ -31,6 +31,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static void VerifyRSet(Address page_start) {
|
#ifdef DEBUG
|
@@ -98,37 +99,38 @@
|
|
|
TEST(MemoryAllocator) {
|
- CHECK(Heap::ConfigureHeapDefault());
|
- CHECK(MemoryAllocator::Setup(Heap::MaxReserved()));
|
+ CHECK(v8_context()->heap_.ConfigureHeapDefault());
|
+ MemoryAllocator* const memory_allocator = v8_context()->heap_.memory_allocator();
|
+ CHECK(memory_allocator->Setup(v8_context()->heap_.MaxReserved()));
|
|
- OldSpace faked_space(Heap::MaxReserved(), OLD_POINTER_SPACE, NOT_EXECUTABLE);
|
+ OldSpace faked_space(v8_context()->heap_.MaxReserved(), OLD_POINTER_SPACE, NOT_EXECUTABLE, memory_allocator);
|
int total_pages = 0;
|
int requested = 2;
|
int allocated;
|
// If we request two pages, we should get one or two.
|
Page* first_page =
|
- MemoryAllocator::AllocatePages(requested, &allocated, &faked_space);
|
+ memory_allocator->AllocatePages(requested, &allocated, &faked_space);
|
CHECK(first_page->is_valid());
|
CHECK(allocated > 0 && allocated <= 2);
|
total_pages += allocated;
|
|
Page* last_page = first_page;
|
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
|
- CHECK(MemoryAllocator::IsPageInSpace(p, &faked_space));
|
+ CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
|
last_page = p;
|
}
|
|
// Again, we should get one or two pages.
|
Page* others =
|
- MemoryAllocator::AllocatePages(requested, &allocated, &faked_space);
|
+ memory_allocator->AllocatePages(requested, &allocated, &faked_space);
|
CHECK(others->is_valid());
|
CHECK(allocated > 0 && allocated <= 2);
|
total_pages += allocated;
|
|
- MemoryAllocator::SetNextPage(last_page, others);
|
+ memory_allocator->SetNextPage(last_page, others);
|
int page_count = 0;
|
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
|
- CHECK(MemoryAllocator::IsPageInSpace(p, &faked_space));
|
+ CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
|
page_count++;
|
}
|
CHECK(total_pages == page_count);
|
@@ -140,31 +142,32 @@
|
// should free the entire second chunk. It will return the last page in the
|
// first chunk (if the second page was in the first chunk) or else an
|
// invalid page (if the second page was the start of the second chunk).
|
- Page* free_return = MemoryAllocator::FreePages(second_page);
|
+ Page* free_return = memory_allocator->FreePages(second_page);
|
CHECK(free_return == last_page || !free_return->is_valid());
|
- MemoryAllocator::SetNextPage(first_page, free_return);
|
+ memory_allocator->SetNextPage(first_page, free_return);
|
|
// Freeing pages in the first chunk starting at the first page should free
|
// the first chunk and return an invalid page.
|
- Page* invalid_page = MemoryAllocator::FreePages(first_page);
|
+ Page* invalid_page = memory_allocator->FreePages(first_page);
|
CHECK(!invalid_page->is_valid());
|
|
- MemoryAllocator::TearDown();
|
+ memory_allocator->TearDown();
|
}
|
|
|
TEST(NewSpace) {
|
- CHECK(Heap::ConfigureHeapDefault());
|
- CHECK(MemoryAllocator::Setup(Heap::MaxReserved()));
|
+ CHECK(v8_context()->heap_.ConfigureHeapDefault());
|
+ MemoryAllocator* const memory_allocator = v8_context()->heap_.memory_allocator();
|
+ CHECK(memory_allocator->Setup(v8_context()->heap_.MaxReserved()));
|
|
NewSpace new_space;
|
|
void* chunk =
|
- MemoryAllocator::ReserveInitialChunk(4 * Heap::ReservedSemiSpaceSize());
|
+ memory_allocator->ReserveInitialChunk(4 * v8_context()->heap_.ReservedSemiSpaceSize());
|
CHECK(chunk != NULL);
|
Address start = RoundUp(static_cast<Address>(chunk),
|
- 2 * Heap::ReservedSemiSpaceSize());
|
- CHECK(new_space.Setup(start, 2 * Heap::ReservedSemiSpaceSize()));
|
+ 2 * v8_context()->heap_.ReservedSemiSpaceSize());
|
+ CHECK(new_space.Setup(start, 2 * v8_context()->heap_.ReservedSemiSpaceSize()));
|
CHECK(new_space.HasBeenSetup());
|
|
while (new_space.Available() >= Page::kMaxHeapObjectSize) {
|
@@ -174,24 +177,26 @@
|
}
|
|
new_space.TearDown();
|
- MemoryAllocator::TearDown();
|
+ memory_allocator->TearDown();
|
}
|
|
|
TEST(OldSpace) {
|
- CHECK(Heap::ConfigureHeapDefault());
|
- CHECK(MemoryAllocator::Setup(Heap::MaxReserved()));
|
+ CHECK(v8_context()->heap_.ConfigureHeapDefault());
|
+ MemoryAllocator* const memory_allocator = v8_context()->heap_.memory_allocator();
|
+ CHECK(memory_allocator->Setup(v8_context()->heap_.MaxReserved()));
|
|
- OldSpace* s = new OldSpace(Heap::MaxOldGenerationSize(),
|
+ OldSpace* s = new OldSpace(v8_context()->heap_.MaxOldGenerationSize(),
|
OLD_POINTER_SPACE,
|
- NOT_EXECUTABLE);
|
+ NOT_EXECUTABLE,
|
+ memory_allocator);
|
CHECK(s != NULL);
|
|
void* chunk =
|
- MemoryAllocator::ReserveInitialChunk(4 * Heap::ReservedSemiSpaceSize());
|
+ memory_allocator->ReserveInitialChunk(4 * v8_context()->heap_.ReservedSemiSpaceSize());
|
CHECK(chunk != NULL);
|
Address start = static_cast<Address>(chunk);
|
- size_t size = RoundUp(start, 2 * Heap::ReservedSemiSpaceSize()) - start;
|
+ size_t size = RoundUp(start, 2 * v8_context()->heap_.ReservedSemiSpaceSize()) - start;
|
|
CHECK(s->Setup(start, size));
|
|
@@ -202,14 +207,14 @@
|
|
s->TearDown();
|
delete s;
|
- MemoryAllocator::TearDown();
|
+ memory_allocator->TearDown();
|
}
|
|
|
TEST(LargeObjectSpace) {
|
- CHECK(Heap::Setup(false));
|
+ CHECK(v8_context()->heap_.Setup(false));
|
|
- LargeObjectSpace* lo = Heap::lo_space();
|
+ LargeObjectSpace* lo = v8_context()->heap_.lo_space();
|
CHECK(lo != NULL);
|
|
Map* faked_map = reinterpret_cast<Map*>(HeapObject::FromAddress(0));
|
@@ -244,5 +249,5 @@
|
lo->TearDown();
|
delete lo;
|
|
- MemoryAllocator::TearDown();
|
+ v8_context()->heap_.memory_allocator()->TearDown();
|
}
|
Index: src/ia32/assembler-ia32-inl.h
|
===================================================================
|
--- src/ia32/assembler-ia32-inl.h (revision 3072)
|
+++ src/ia32/assembler-ia32-inl.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -174,7 +174,7 @@
|
Immediate::Immediate(Handle<Object> handle) {
|
// Verify all Objects referred by code are NOT in new space.
|
Object* obj = *handle;
|
- ASSERT(!Heap::InNewSpace(obj));
|
+ ASSERT(!v8_context()->heap_.InNewSpace(obj));
|
if (obj->IsHeapObject()) {
|
x_ = reinterpret_cast<intptr_t>(handle.location());
|
rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
@@ -201,7 +201,7 @@
|
void Assembler::emit(Handle<Object> handle) {
|
// Verify all Objects referred by code are NOT in new space.
|
Object* obj = *handle;
|
- ASSERT(!Heap::InNewSpace(obj));
|
+ ASSERT(!v8_context()->heap_.InNewSpace(obj));
|
if (obj->IsHeapObject()) {
|
emit(reinterpret_cast<intptr_t>(handle.location()),
|
RelocInfo::EMBEDDED_OBJECT);
|
Index: src/ast.h
|
===================================================================
|
--- src/ast.h (revision 3233)
|
+++ src/ast.h Sat Nov 14 01:43:14 MSK 2009
|
@@ -111,7 +111,6 @@
|
// Typedef only introduced to avoid unreadable code.
|
// Please do appreciate the required space in "> >".
|
typedef ZoneList<Handle<String> > ZoneStringList;
|
-typedef ZoneList<Handle<Object> > ZoneObjectList;
|
|
|
class AstNode: public ZoneObject {
|
@@ -1298,7 +1297,7 @@
|
is_expression_(is_expression),
|
loop_nesting_(0),
|
function_token_position_(RelocInfo::kNoPosition),
|
- inferred_name_(Heap::empty_string()),
|
+ inferred_name_(v8_context()->heap_.empty_string()),
|
try_fast_codegen_(false) {
|
#ifdef DEBUG
|
already_compiled_ = false;
|
@@ -1737,7 +1736,7 @@
|
virtual int max_match() { return 0; }
|
static RegExpEmpty* GetInstance() { return &kInstance; }
|
private:
|
- static RegExpEmpty kInstance;
|
+ static RegExpEmpty kInstance; ///static
|
};
|
|
|
Index: src/v8-counters.h
|
===================================================================
|
--- src/v8-counters.h (revision 3096)
|
+++ src/v8-counters.h Sat Nov 14 01:43:01 MSK 2009
|
@@ -157,15 +157,15 @@
|
|
|
// This file contains all the v8 counters that are in use.
|
-class Counters : AllStatic {
|
+class Counters {
|
public:
|
#define HT(name, caption) \
|
- static HistogramTimer name;
|
+ HistogramTimer name;
|
HISTOGRAM_TIMER_LIST(HT)
|
#undef HT
|
|
#define SC(name, caption) \
|
- static StatsCounter name;
|
+ StatsCounter name;
|
STATS_COUNTER_LIST_1(SC)
|
STATS_COUNTER_LIST_2(SC)
|
#undef SC
|
@@ -185,7 +185,11 @@
|
};
|
|
// Sliding state window counters.
|
- static StatsCounter state_counters[];
|
+ StatsCounter state_counters[state_tag_count];
|
+private:
|
+ Counters();
|
+ DISALLOW_COPY_AND_ASSIGN(Counters);
|
+ friend class V8Context;
|
};
|
|
} } // namespace v8::internal
|
Index: src/debug-agent.cc
|
===================================================================
|
--- src/debug-agent.cc (revision 2968)
|
+++ src/debug-agent.cc Sat Nov 14 01:43:01 MSK 2009
|
@@ -36,12 +36,9 @@
|
// Public V8 debugger API message handler function. This function just delegates
|
// to the debugger agent through it's data parameter.
|
void DebuggerAgentMessageHandler(const v8::Debug::Message& message) {
|
- DebuggerAgent::instance_->DebuggerMessage(message);
|
+ v8_context()->debugger_agent_->DebuggerMessage(message);
|
}
|
|
-// static
|
-DebuggerAgent* DebuggerAgent::instance_ = NULL;
|
-
|
// Debugger agent main thread.
|
void DebuggerAgent::Run() {
|
const int kOneSecondInMicros = 1000000;
|
Index: src/debug-agent.h
|
===================================================================
|
--- src/debug-agent.h (revision 2968)
|
+++ src/debug-agent.h Sat Nov 14 01:42:56 MSK 2009
|
@@ -49,11 +49,11 @@
|
session_access_(OS::CreateMutex()), session_(NULL),
|
terminate_now_(OS::CreateSemaphore(0)),
|
listening_(OS::CreateSemaphore(0)) {
|
- ASSERT(instance_ == NULL);
|
- instance_ = this;
|
+ ASSERT(v8_context()->debugger_agent_ == NULL);
|
+ v8_context()->debugger_agent_ = this;
|
}
|
~DebuggerAgent() {
|
- instance_ = NULL;
|
+ v8_context()->debugger_agent_= NULL;
|
delete server_;
|
}
|
|
@@ -76,8 +76,6 @@
|
Semaphore* terminate_now_; // Semaphore to signal termination.
|
Semaphore* listening_;
|
|
- static DebuggerAgent* instance_;
|
-
|
friend class DebuggerAgentSession;
|
friend void DebuggerAgentMessageHandler(const v8::Debug::Message& message);
|
|
Index: test/cctest/test-log.cc
|
===================================================================
|
--- test/cctest/test-log.cc (revision 3154)
|
+++ test/cctest/test-log.cc Sun Nov 15 12:46:48 MSK 2009
|
@@ -19,74 +19,74 @@
|
using v8::internal::Address;
|
using v8::internal::EmbeddedVector;
|
using v8::internal::Logger;
|
-
|
+using v8::v8_context;
|
namespace i = v8::internal;
|
|
static void SetUp() {
|
// Log to memory buffer.
|
i::FLAG_logfile = "*";
|
i::FLAG_log = true;
|
- Logger::Setup();
|
+ v8_context()->logger_.Setup();
|
}
|
|
static void TearDown() {
|
- Logger::TearDown();
|
+ v8_context()->logger_.TearDown();
|
}
|
|
|
TEST(EmptyLog) {
|
SetUp();
|
- CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
|
- CHECK_EQ(0, Logger::GetLogLines(100, NULL, 0));
|
- CHECK_EQ(0, Logger::GetLogLines(0, NULL, 100));
|
- CHECK_EQ(0, Logger::GetLogLines(100, NULL, 100));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, NULL, 0));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(100, NULL, 0));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, NULL, 100));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(100, NULL, 100));
|
TearDown();
|
}
|
|
|
TEST(GetMessages) {
|
SetUp();
|
- Logger::StringEvent("aaa", "bbb");
|
- Logger::StringEvent("cccc", "dddd");
|
- CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
|
+ v8_context()->logger_.StringEvent("aaa", "bbb");
|
+ v8_context()->logger_.StringEvent("cccc", "dddd");
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, NULL, 0));
|
char log_lines[100];
|
memset(log_lines, 0, sizeof(log_lines));
|
// Requesting data size which is smaller than first log message length.
|
- CHECK_EQ(0, Logger::GetLogLines(0, log_lines, 3));
|
- // See Logger::StringEvent.
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, log_lines, 3));
|
+ // See v8_context()->logger_.StringEvent.
|
const char* line_1 = "aaa,\"bbb\"\n";
|
const int line_1_len = strlen(line_1);
|
// Still smaller than log message length.
|
- CHECK_EQ(0, Logger::GetLogLines(0, log_lines, line_1_len - 1));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, log_lines, line_1_len - 1));
|
// The exact size.
|
- CHECK_EQ(line_1_len, Logger::GetLogLines(0, log_lines, line_1_len));
|
+ CHECK_EQ(line_1_len, v8_context()->logger_.GetLogLines(0, log_lines, line_1_len));
|
CHECK_EQ(line_1, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
// A bit more than the first line length.
|
- CHECK_EQ(line_1_len, Logger::GetLogLines(0, log_lines, line_1_len + 3));
|
+ CHECK_EQ(line_1_len, v8_context()->logger_.GetLogLines(0, log_lines, line_1_len + 3));
|
log_lines[line_1_len] = '\0';
|
CHECK_EQ(line_1, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
const char* line_2 = "cccc,\"dddd\"\n";
|
const int line_2_len = strlen(line_2);
|
// Now start with line_2 beginning.
|
- CHECK_EQ(0, Logger::GetLogLines(line_1_len, log_lines, 0));
|
- CHECK_EQ(0, Logger::GetLogLines(line_1_len, log_lines, 3));
|
- CHECK_EQ(0, Logger::GetLogLines(line_1_len, log_lines, line_2_len - 1));
|
- CHECK_EQ(line_2_len, Logger::GetLogLines(line_1_len, log_lines, line_2_len));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(line_1_len, log_lines, 0));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(line_1_len, log_lines, 3));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(line_1_len, log_lines, line_2_len - 1));
|
+ CHECK_EQ(line_2_len, v8_context()->logger_.GetLogLines(line_1_len, log_lines, line_2_len));
|
CHECK_EQ(line_2, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
CHECK_EQ(line_2_len,
|
- Logger::GetLogLines(line_1_len, log_lines, line_2_len + 3));
|
+ v8_context()->logger_.GetLogLines(line_1_len, log_lines, line_2_len + 3));
|
CHECK_EQ(line_2, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
// Now get entire buffer contents.
|
const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
|
const int all_lines_len = strlen(all_lines);
|
- CHECK_EQ(all_lines_len, Logger::GetLogLines(0, log_lines, all_lines_len));
|
+ CHECK_EQ(all_lines_len, v8_context()->logger_.GetLogLines(0, log_lines, all_lines_len));
|
CHECK_EQ(all_lines, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
- CHECK_EQ(all_lines_len, Logger::GetLogLines(0, log_lines, all_lines_len + 3));
|
+ CHECK_EQ(all_lines_len, v8_context()->logger_.GetLogLines(0, log_lines, all_lines_len + 3));
|
CHECK_EQ(all_lines, log_lines);
|
memset(log_lines, 0, sizeof(log_lines));
|
TearDown();
|
@@ -94,26 +94,26 @@
|
|
|
static int GetLogLines(int start_pos, i::Vector<char>* buffer) {
|
- return Logger::GetLogLines(start_pos, buffer->start(), buffer->length());
|
+ return v8_context()->logger_.GetLogLines(start_pos, buffer->start(), buffer->length());
|
}
|
|
|
TEST(BeyondWritePosition) {
|
SetUp();
|
- Logger::StringEvent("aaa", "bbb");
|
- Logger::StringEvent("cccc", "dddd");
|
- // See Logger::StringEvent.
|
+ v8_context()->logger_.StringEvent("aaa", "bbb");
|
+ v8_context()->logger_.StringEvent("cccc", "dddd");
|
+ // See v8_context()->logger_.StringEvent.
|
const char* all_lines = "aaa,\"bbb\"\ncccc,\"dddd\"\n";
|
const int all_lines_len = strlen(all_lines);
|
EmbeddedVector<char, 100> buffer;
|
const int beyond_write_pos = all_lines_len;
|
- CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos, buffer.start(), 1));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(beyond_write_pos, buffer.start(), 1));
|
CHECK_EQ(0, GetLogLines(beyond_write_pos, &buffer));
|
- CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos + 1, buffer.start(), 1));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(beyond_write_pos + 1, buffer.start(), 1));
|
CHECK_EQ(0, GetLogLines(beyond_write_pos + 1, &buffer));
|
- CHECK_EQ(0, Logger::GetLogLines(beyond_write_pos + 100, buffer.start(), 1));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(beyond_write_pos + 100, buffer.start(), 1));
|
CHECK_EQ(0, GetLogLines(beyond_write_pos + 100, &buffer));
|
- CHECK_EQ(0, Logger::GetLogLines(10 * 1024 * 1024, buffer.start(), 1));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(10 * 1024 * 1024, buffer.start(), 1));
|
CHECK_EQ(0, GetLogLines(10 * 1024 * 1024, &buffer));
|
TearDown();
|
}
|
@@ -123,12 +123,12 @@
|
// Log to stdout
|
i::FLAG_logfile = "-";
|
i::FLAG_log = true;
|
- Logger::Setup();
|
- CHECK_EQ(0, Logger::GetLogLines(0, NULL, 0));
|
- CHECK_EQ(0, Logger::GetLogLines(100, NULL, 0));
|
- CHECK_EQ(0, Logger::GetLogLines(0, NULL, 100));
|
- CHECK_EQ(0, Logger::GetLogLines(100, NULL, 100));
|
- Logger::TearDown();
|
+ v8_context()->logger_.Setup();
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, NULL, 0));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(100, NULL, 0));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(0, NULL, 100));
|
+ CHECK_EQ(0, v8_context()->logger_.GetLogLines(100, NULL, 100));
|
+ v8_context()->logger_.TearDown();
|
}
|
|
|
@@ -142,7 +142,7 @@
|
|
class LoggerTestHelper : public AllStatic {
|
public:
|
- static bool IsSamplerActive() { return Logger::IsProfilerSamplerActive(); }
|
+ static bool IsSamplerActive() { return v8_context()->logger_.IsProfilerSamplerActive(); }
|
};
|
|
} // namespace v8::internal
|
@@ -170,7 +170,7 @@
|
|
|
static int CheckThatProfilerWorks(int log_pos) {
|
- Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU);
|
+ v8_context()->logger_.ResumeProfiler(v8::PROFILER_MODULE_CPU);
|
CHECK(LoggerTestHelper::IsSamplerActive());
|
|
// Verify that the current map of compiled functions has been logged.
|
@@ -212,7 +212,7 @@
|
i::OS::Sleep(1);
|
}
|
|
- Logger::PauseProfiler(v8::PROFILER_MODULE_CPU);
|
+ v8_context()->logger_.PauseProfiler(v8::PROFILER_MODULE_CPU);
|
CHECK(!LoggerTestHelper::IsSamplerActive());
|
|
// Wait 50 msecs to allow Profiler thread to process the last
|
@@ -247,10 +247,10 @@
|
|
// If tests are being run manually, V8 will be already initialized
|
// by the test below.
|
- const bool need_to_set_up_logger = i::V8::IsRunning();
|
+ const bool need_to_set_up_logger = v8_context()->v8_.IsRunning();
|
v8::HandleScope scope;
|
v8::Handle<v8::Context> env = v8::Context::New();
|
- if (need_to_set_up_logger) Logger::Setup();
|
+ if (need_to_set_up_logger) v8_context()->logger_.Setup();
|
env->Enter();
|
|
// No sampling should happen prior to resuming profiler.
|
@@ -277,7 +277,7 @@
|
CheckThatProfilerWorks(log_pos);
|
|
env->Exit();
|
- Logger::TearDown();
|
+ v8_context()->logger_.TearDown();
|
i::FLAG_prof_lazy = saved_prof_lazy;
|
i::FLAG_prof = saved_prof;
|
i::FLAG_prof_auto = saved_prof_auto;
|
@@ -469,7 +469,7 @@
|
i_source->set_resource(NULL);
|
|
// Must not crash.
|
- i::Logger::LogCompiledFunctions();
|
+ v8_context()->logger_.LogCompiledFunctions();
|
}
|
|
|
@@ -854,7 +854,7 @@
|
// are using V8.
|
//
|
// P.S. No, V8 can't be re-initialized after disposal, see include/v8.h.
|
- CHECK(!i::V8::IsRunning());
|
+ CHECK(!v8_context()->v8_.IsRunning());
|
|
i::FLAG_logfile = "*";
|
i::FLAG_log = true;
|
@@ -878,7 +878,7 @@
|
" obj.test =\n"
|
" (function a(j) { return function b() { return j; } })(100);\n"
|
"})(this);");
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
EmbeddedVector<char, 204800> buffer;
|
int log_size;
|
@@ -898,9 +898,9 @@
|
}
|
|
// Iterate heap to find compiled functions, will write to log.
|
- i::Logger::LogCompiledFunctions();
|
+ v8_context()->logger_.LogCompiledFunctions();
|
char* new_log_start = buffer.start() + log_size;
|
- const int new_log_size = Logger::GetLogLines(
|
+ const int new_log_size = v8_context()->logger_.GetLogLines(
|
log_size, new_log_start, buffer.length() - log_size);
|
CHECK_GT(new_log_size, 0);
|
CHECK_GT(buffer.length(), log_size + new_log_size);
|
@@ -934,7 +934,7 @@
|
CHECK(results_equal);
|
|
env->Exit();
|
- Logger::TearDown();
|
+ v8_context()->logger_.TearDown();
|
i::FLAG_always_compact = saved_always_compact;
|
}
|
|
Index: test/cctest/test-mark-compact.cc
|
===================================================================
|
--- test/cctest/test-mark-compact.cc (revision 3106)
|
+++ test/cctest/test-mark-compact.cc Sun Nov 15 12:46:00 MSK 2009
|
@@ -35,7 +35,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
-
|
+using v8::v8_context;
|
static v8::Persistent<v8::Context> env;
|
|
static void InitializeVM() {
|
@@ -75,7 +75,7 @@
|
// from new space.
|
FLAG_gc_global = true;
|
FLAG_always_compact = true;
|
- Heap::ConfigureHeap(2*256*KB, 4*MB);
|
+ v8_context()->heap_.ConfigureHeap(2*256*KB, 4*MB);
|
|
InitializeVM();
|
|
@@ -83,26 +83,26 @@
|
|
// Allocate a fixed array in the new space.
|
int array_size =
|
- (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
|
+ (v8_context()->heap_.MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
|
(kPointerSize * 4);
|
- Object* obj = Heap::AllocateFixedArray(array_size);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(array_size);
|
CHECK(!obj->IsFailure());
|
|
Handle<FixedArray> array(FixedArray::cast(obj));
|
|
// Array should be in the new space.
|
- CHECK(Heap::InSpace(*array, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.InSpace(*array, NEW_SPACE));
|
|
// Call the m-c collector, so array becomes an old object.
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// Array now sits in the old space
|
- CHECK(Heap::InSpace(*array, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.InSpace(*array, OLD_POINTER_SPACE));
|
}
|
|
|
TEST(NoPromotion) {
|
- Heap::ConfigureHeap(2*256*KB, 4*MB);
|
+ v8_context()->heap_.ConfigureHeap(2*256*KB, 4*MB);
|
|
// Test the situation that some objects in new space are promoted to
|
// the old space
|
@@ -111,22 +111,22 @@
|
v8::HandleScope sc;
|
|
// Do a mark compact GC to shrink the heap.
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// Allocate a big Fixed array in the new space.
|
- int size = (Heap::MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
|
+ int size = (v8_context()->heap_.MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
|
kPointerSize;
|
- Object* obj = Heap::AllocateFixedArray(size);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(size);
|
|
Handle<FixedArray> array(FixedArray::cast(obj));
|
|
// Array still stays in the new space.
|
- CHECK(Heap::InSpace(*array, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.InSpace(*array, NEW_SPACE));
|
|
// Allocate objects in the old space until out of memory.
|
FixedArray* host = *array;
|
while (true) {
|
- Object* obj = Heap::AllocateFixedArray(100, TENURED);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(100, TENURED);
|
if (obj->IsFailure()) break;
|
|
host->set(0, obj);
|
@@ -134,10 +134,10 @@
|
}
|
|
// Call mark compact GC, and it should pass.
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// array should not be promoted because the old space is full.
|
- CHECK(Heap::InSpace(*array, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.InSpace(*array, NEW_SPACE));
|
}
|
|
|
@@ -146,63 +146,63 @@
|
|
v8::HandleScope sc;
|
// call mark-compact when heap is empty
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// keep allocating garbage in new space until it fails
|
const int ARRAY_SIZE = 100;
|
Object* array;
|
do {
|
- array = Heap::AllocateFixedArray(ARRAY_SIZE);
|
+ array = v8_context()->heap_.AllocateFixedArray(ARRAY_SIZE);
|
} while (!array->IsFailure());
|
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, NEW_SPACE));
|
|
- array = Heap::AllocateFixedArray(ARRAY_SIZE);
|
+ array = v8_context()->heap_.AllocateFixedArray(ARRAY_SIZE);
|
CHECK(!array->IsFailure());
|
|
// keep allocating maps until it fails
|
Object* mapp;
|
do {
|
- mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
+ mapp = v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
} while (!mapp->IsFailure());
|
- CHECK(Heap::CollectGarbage(0, MAP_SPACE));
|
- mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, MAP_SPACE));
|
+ mapp = v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
CHECK(!mapp->IsFailure());
|
|
// allocate a garbage
|
- String* func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
+ String* func_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
SharedFunctionInfo* function_share =
|
- SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(func_name));
|
+ SharedFunctionInfo::cast(v8_context()->heap_.AllocateSharedFunctionInfo(func_name));
|
JSFunction* function =
|
- JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
|
+ JSFunction::cast(v8_context()->heap_.AllocateFunction(*v8_context()->top_.function_map(),
|
function_share,
|
- Heap::undefined_value()));
|
+ v8_context()->heap_.undefined_value()));
|
Map* initial_map =
|
- Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
+ Map::cast(v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
function->set_initial_map(initial_map);
|
- Top::context()->global()->SetProperty(func_name, function, NONE);
|
+ v8_context()->top_.context()->global()->SetProperty(func_name, function, NONE);
|
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
- func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
- CHECK(Top::context()->global()->HasLocalProperty(func_name));
|
- Object* func_value = Top::context()->global()->GetProperty(func_name);
|
+ func_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
+ CHECK(v8_context()->top_.context()->global()->HasLocalProperty(func_name));
|
+ Object* func_value = v8_context()->top_.context()->global()->GetProperty(func_name);
|
CHECK(func_value->IsJSFunction());
|
function = JSFunction::cast(func_value);
|
|
- obj = JSObject::cast(Heap::AllocateJSObject(function));
|
- String* obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
- Top::context()->global()->SetProperty(obj_name, obj, NONE);
|
- String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
+ obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
+ String* obj_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theObject"));
|
+ v8_context()->top_.context()->global()->SetProperty(obj_name, obj, NONE);
|
+ String* prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
- obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
- CHECK(Top::context()->global()->HasLocalProperty(obj_name));
|
- CHECK(Top::context()->global()->GetProperty(obj_name)->IsJSObject());
|
- obj = JSObject::cast(Top::context()->global()->GetProperty(obj_name));
|
- prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
+ obj_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theObject"));
|
+ CHECK(v8_context()->top_.context()->global()->HasLocalProperty(obj_name));
|
+ CHECK(v8_context()->top_.context()->global()->GetProperty(obj_name)->IsJSObject());
|
+ obj = JSObject::cast(v8_context()->top_.context()->global()->GetProperty(obj_name));
|
+ prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
|
}
|
|
@@ -225,16 +225,16 @@
|
TEST(GCCallback) {
|
InitializeVM();
|
|
- Heap::SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
|
- Heap::SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
|
+ v8_context()->heap_.SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
|
+ v8_context()->heap_.SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
|
|
// Scavenge does not call GC callback functions.
|
- Heap::PerformScavenge();
|
+ v8_context()->heap_.PerformScavenge();
|
|
CHECK_EQ(0, gc_starts);
|
CHECK_EQ(gc_ends, gc_starts);
|
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
CHECK_EQ(1, gc_starts);
|
CHECK_EQ(gc_ends, gc_starts);
|
}
|
@@ -252,28 +252,28 @@
|
v8::HandleScope handle_scope;
|
|
Handle<Object> g1s1 =
|
- GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
+ v8_context()->global_handles_.Create(v8_context()->heap_.AllocateFixedArray(1));
|
Handle<Object> g1s2 =
|
- GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
- GlobalHandles::MakeWeak(g1s1.location(),
|
+ v8_context()->global_handles_.Create(v8_context()->heap_.AllocateFixedArray(1));
|
+ v8_context()->global_handles_.MakeWeak(g1s1.location(),
|
reinterpret_cast<void*>(1234),
|
&WeakPointerCallback);
|
- GlobalHandles::MakeWeak(g1s2.location(),
|
+ v8_context()->global_handles_.MakeWeak(g1s2.location(),
|
reinterpret_cast<void*>(1234),
|
&WeakPointerCallback);
|
|
Handle<Object> g2s1 =
|
- GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
+ v8_context()->global_handles_.Create(v8_context()->heap_.AllocateFixedArray(1));
|
Handle<Object> g2s2 =
|
- GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
- GlobalHandles::MakeWeak(g2s1.location(),
|
+ v8_context()->global_handles_.Create(v8_context()->heap_.AllocateFixedArray(1));
|
+ v8_context()->global_handles_.MakeWeak(g2s1.location(),
|
reinterpret_cast<void*>(1234),
|
&WeakPointerCallback);
|
- GlobalHandles::MakeWeak(g2s2.location(),
|
+ v8_context()->global_handles_.MakeWeak(g2s2.location(),
|
reinterpret_cast<void*>(1234),
|
&WeakPointerCallback);
|
|
- Handle<Object> root = GlobalHandles::Create(*g1s1); // make a root.
|
+ Handle<Object> root = v8_context()->global_handles_.Create(*g1s1); // make a root.
|
|
// Connect group 1 and 2, make a cycle.
|
Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
|
@@ -282,17 +282,17 @@
|
{
|
Object** g1_objects[] = { g1s1.location(), g1s2.location() };
|
Object** g2_objects[] = { g2s1.location(), g2s2.location() };
|
- GlobalHandles::AddGroup(g1_objects, 2);
|
- GlobalHandles::AddGroup(g2_objects, 2);
|
+ v8_context()->global_handles_.AddGroup(g1_objects, 2);
|
+ v8_context()->global_handles_.AddGroup(g2_objects, 2);
|
}
|
// Do a full GC
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// All object should be alive.
|
CHECK_EQ(0, NumberOfWeakCalls);
|
|
// Weaken the root.
|
- GlobalHandles::MakeWeak(root.location(),
|
+ v8_context()->global_handles_.MakeWeak(root.location(),
|
reinterpret_cast<void*>(1234),
|
&WeakPointerCallback);
|
|
@@ -300,11 +300,11 @@
|
{
|
Object** g1_objects[] = { g1s1.location(), g1s2.location() };
|
Object** g2_objects[] = { g2s1.location(), g2s2.location() };
|
- GlobalHandles::AddGroup(g1_objects, 2);
|
- GlobalHandles::AddGroup(g2_objects, 2);
|
+ v8_context()->global_handles_.AddGroup(g1_objects, 2);
|
+ v8_context()->global_handles_.AddGroup(g2_objects, 2);
|
}
|
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
// All objects should be gone. 5 global handles in total.
|
CHECK_EQ(5, NumberOfWeakCalls);
|
Index: src/jsregexp.h
|
===================================================================
|
--- src/jsregexp.h (revision 3074)
|
+++ src/jsregexp.h Sat Nov 14 01:43:08 MSK 2009
|
@@ -149,28 +149,8 @@
|
static Code* IrregexpNativeCode(FixedArray* re, bool is_ascii);
|
|
private:
|
- static String* last_ascii_string_;
|
- static String* two_byte_cached_string_;
|
-
|
- static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii);
|
- static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
|
-
|
-
|
- // Set the subject cache. The previous string buffer is not deleted, so the
|
- // caller should ensure that it doesn't leak.
|
- static void SetSubjectCache(String* subject,
|
- char* utf8_subject,
|
- int uft8_length,
|
- int character_position,
|
- int utf8_position);
|
-
|
- // A one element cache of the last utf8_subject string and its length. The
|
- // subject JS String object is cached in the heap. We also cache a
|
- // translation between position and utf8 position.
|
- static char* utf8_subject_cache_;
|
- static int utf8_length_cache_;
|
- static int utf8_position_;
|
- static int character_position_;
|
+ class Impl;
|
+ Impl* impl;
|
};
|
|
|
@@ -1259,7 +1239,7 @@
|
struct CompilationResult {
|
explicit CompilationResult(const char* error_message)
|
: error_message(error_message),
|
- code(Heap::the_hole_value()),
|
+ code(v8_context()->heap_.the_hole_value()),
|
num_registers(0) {}
|
CompilationResult(Object* code, int registers)
|
: error_message(NULL),
|
Index: src/compiler.cc
|
===================================================================
|
--- src/compiler.cc (revision 3234)
|
+++ src/compiler.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -98,14 +98,14 @@
|
// Compute top scope and allocate variables. For lazy compilation
|
// the top scope only contains the single lazily compiled function,
|
// so this doesn't re-allocate variables repeatedly.
|
- HistogramTimerScope timer(&Counters::variable_allocation);
|
+ HistogramTimerScope timer(&v8_context()->counters_.variable_allocation);
|
Scope* top = literal->scope();
|
while (top->outer_scope() != NULL) top = top->outer_scope();
|
top->AllocateVariables(context);
|
}
|
|
#ifdef DEBUG
|
- if (Bootstrapper::IsActive() ?
|
+ if (v8_context()->bootstrapper_.IsActive() ?
|
FLAG_print_builtin_scopes :
|
FLAG_print_scopes) {
|
literal->scope()->Print();
|
@@ -163,8 +163,10 @@
|
|
PostponeInterruptsScope postpone;
|
|
- ASSERT(!i::Top::global_context().is_null());
|
- script->set_context_data((*i::Top::global_context())->data());
|
+ V8Context * const v8context = v8_context();
|
+ Top& top = v8context->top_;
|
+ ASSERT(!top.global_context().is_null());
|
+ script->set_context_data((*top.global_context())->data());
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
bool is_json = (validate == Compiler::VALIDATE_JSON);
|
@@ -183,7 +185,7 @@
|
}
|
|
// Notify debugger
|
- Debugger::OnBeforeCompile(script);
|
+ v8context->debug_.debugger()->OnBeforeCompile(script);
|
#endif
|
|
// Only allow non-global compiles for eval.
|
@@ -194,7 +196,7 @@
|
|
// Check for parse errors.
|
if (lit == NULL) {
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(top.has_pending_exception());
|
return Handle<JSFunction>::null();
|
}
|
|
@@ -207,7 +209,7 @@
|
Handle<Object> source(script->source());
|
SetElement(args, 0, source);
|
Handle<Object> result = Factory::NewSyntaxError("invalid_json", args);
|
- Top::Throw(*result, NULL);
|
+ top.Throw(*result, NULL);
|
return Handle<JSFunction>::null();
|
}
|
|
@@ -215,8 +217,8 @@
|
// rest of the function into account to avoid overlap with the
|
// parsing statistics.
|
HistogramTimer* rate = is_eval
|
- ? &Counters::compile_eval
|
- : &Counters::compile;
|
+ ? &v8context->counters_.compile_eval
|
+ : &v8context->counters_.compile;
|
HistogramTimerScope timer(rate);
|
|
// Compile the code.
|
@@ -225,14 +227,14 @@
|
|
// Check for stack-overflow exceptions.
|
if (code.is_null()) {
|
- Top::StackOverflow();
|
+ top.StackOverflow();
|
return Handle<JSFunction>::null();
|
}
|
|
#if defined ENABLE_LOGGING_AND_PROFILING || defined ENABLE_OPROFILE_AGENT
|
// Log the code generation for the script. Check explicit whether logging is
|
// to avoid allocating when not required.
|
- if (Logger::is_logging() || OProfileAgent::is_enabled()) {
|
+ if (v8context->logger_.is_logging() || OProfileAgent::is_enabled()) {
|
if (script->name()->IsString()) {
|
SmartPointer<char> data =
|
String::cast(script->name())->ToCString(DISALLOW_NULLS);
|
@@ -258,7 +260,7 @@
|
code);
|
|
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
|
- Compiler::SetFunctionInfo(fun, lit, true, script);
|
+ v8context->compiler_.SetFunctionInfo(fun, lit, true, script);
|
|
// Hint to the runtime system used when allocating space for initial
|
// property space by setting the expected number of properties for
|
@@ -267,24 +269,23 @@
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Notify debugger
|
- Debugger::OnAfterCompile(script, fun);
|
+ v8context->debug_.debugger()->OnAfterCompile(script, fun);
|
#endif
|
|
return fun;
|
}
|
|
-
|
-static StaticResource<SafeStringInputBuffer> safe_string_input_buffer;
|
-
|
-
|
+Compiler::Compiler() {}
|
Handle<JSFunction> Compiler::Compile(Handle<String> source,
|
Handle<Object> script_name,
|
int line_offset, int column_offset,
|
v8::Extension* extension,
|
ScriptDataImpl* input_pre_data) {
|
int source_length = source->length();
|
- Counters::total_load_size.Increment(source_length);
|
- Counters::total_compile_size.Increment(source_length);
|
+ V8Context * const v8context = v8_context();
|
+ Counters& counters = v8context->counters_;
|
+ counters.total_load_size.Increment(source_length);
|
+ counters.total_compile_size.Increment(source_length);
|
|
// The VM is in the COMPILER state until exiting this function.
|
VMState state(COMPILER);
|
@@ -292,7 +293,7 @@
|
// Do a lookup in the compilation cache but not for extensions.
|
Handle<JSFunction> result;
|
if (extension == NULL) {
|
- result = CompilationCache::LookupScript(source,
|
+ result = v8context->compilation_cache_.LookupScript(source,
|
script_name,
|
line_offset,
|
column_offset);
|
@@ -324,7 +325,7 @@
|
extension,
|
pre_data);
|
if (extension == NULL && !result.is_null()) {
|
- CompilationCache::PutScript(source, result);
|
+ v8context->compilation_cache_.PutScript(source, result);
|
}
|
|
// Get rid of the pre-parsing data (if necessary).
|
@@ -333,7 +334,7 @@
|
}
|
}
|
|
- if (result.is_null()) Top::ReportPendingMessages();
|
+ if (result.is_null()) v8context->top_.ReportPendingMessages();
|
return result;
|
}
|
|
@@ -347,8 +348,9 @@
|
// the input is legal json.
|
|
int source_length = source->length();
|
- Counters::total_eval_size.Increment(source_length);
|
- Counters::total_compile_size.Increment(source_length);
|
+ V8Context * const v8context = v8_context();
|
+ v8context->counters_.total_eval_size.Increment(source_length);
|
+ v8context->counters_.total_compile_size.Increment(source_length);
|
|
// The VM is in the COMPILER state until exiting this function.
|
VMState state(COMPILER);
|
@@ -359,7 +361,7 @@
|
// potential value in the cache has been validated.
|
Handle<JSFunction> result;
|
if (validate == DONT_VALIDATE_JSON)
|
- result = CompilationCache::LookupEval(source, context, is_global);
|
+ result = v8context->compilation_cache_.LookupEval(source, context, is_global);
|
|
if (result.is_null()) {
|
// Create a script object describing the script to be compiled.
|
@@ -374,7 +376,7 @@
|
if (!result.is_null() && validate != VALIDATE_JSON) {
|
// For json it's unlikely that we'll ever see exactly the same
|
// string again so we don't use the compilation cache.
|
- CompilationCache::PutEval(source, context, is_global, result);
|
+ v8context->compilation_cache_.PutEval(source, context, is_global, result);
|
}
|
}
|
|
@@ -398,7 +400,8 @@
|
int start_position = shared->start_position();
|
int end_position = shared->end_position();
|
bool is_expression = shared->is_expression();
|
- Counters::total_compile_size.Increment(end_position - start_position);
|
+ V8Context * const v8context = v8_context();
|
+ v8context->counters_.total_compile_size.Increment(end_position - start_position);
|
|
// Generate the AST for the lazily compiled function. The AST may be
|
// NULL in case of parser stack overflow.
|
@@ -409,7 +412,7 @@
|
|
// Check for parse errors.
|
if (lit == NULL) {
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(v8context->top_.has_pending_exception());
|
return false;
|
}
|
|
@@ -419,7 +422,7 @@
|
// Measure how long it takes to do the lazy compilation; only take
|
// the rest of the function into account to avoid overlap with the
|
// lazy parsing statistics.
|
- HistogramTimerScope timer(&Counters::compile_lazy);
|
+ HistogramTimerScope timer(&v8context->counters_.compile_lazy);
|
|
// Compile the code.
|
Handle<Code> code = MakeCode(lit, script, Handle<Context>::null(), false,
|
@@ -427,7 +430,7 @@
|
|
// Check for stack-overflow exception.
|
if (code.is_null()) {
|
- Top::StackOverflow();
|
+ v8context->top_.StackOverflow();
|
return false;
|
}
|
|
@@ -435,7 +438,7 @@
|
// Log the code generation. If source information is available include script
|
// name and line number. Check explicit whether logging is enabled as finding
|
// the line number is not for free.
|
- if (Logger::is_logging() || OProfileAgent::is_enabled()) {
|
+ if (v8context->logger_.is_logging() || OProfileAgent::is_enabled()) {
|
Handle<String> func_name(name->length() > 0 ?
|
*name : shared->inferred_name());
|
if (script->name()->IsString()) {
|
@@ -547,7 +550,7 @@
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Notify debugger that a new function has been added.
|
- Debugger::OnNewFunction(function);
|
+ v8_context()->debug_.debugger()->OnNewFunction(function);
|
#endif
|
|
// Set the expected number of properties for instances and return
|
Index: src/string-stream.cc
|
===================================================================
|
--- src/string-stream.cc (revision 3162)
|
+++ src/string-stream.cc Sat Nov 14 01:42:53 MSK 2009
|
@@ -34,8 +34,8 @@
|
namespace internal {
|
|
static const int kMentionedObjectCacheMaxSize = 256;
|
-static List<HeapObject*, PreallocatedStorage>* debug_object_cache = NULL;
|
-static Object* current_security_token = NULL;
|
+static List<HeapObject*, PreallocatedStorage>* debug_object_cache = NULL; ///static
|
+static Object* current_security_token = NULL; ///static
|
|
|
char* HeapStringAllocator::allocate(unsigned bytes) {
|
@@ -338,7 +338,7 @@
|
|
void StringStream::PrintUsingMap(JSObject* js_object) {
|
Map* map = js_object->map();
|
- if (!Heap::Contains(map) ||
|
+ if (!v8_context()->heap_.Contains(map) ||
|
!map->IsHeapObject() ||
|
!map->IsMap()) {
|
Add("<Invalid map>\n");
|
@@ -377,7 +377,7 @@
|
void StringStream::PrintFixedArray(FixedArray* array, unsigned int limit) {
|
for (unsigned int i = 0; i < 10 && i < limit; i++) {
|
Object* element = array->get(i);
|
- if (element != Heap::the_hole_value()) {
|
+ if (element != v8_context()->heap_.the_hole_value()) {
|
for (int len = 1; len < 18; len++)
|
Put(' ');
|
Add("%d: %o\n", i, array->get(i));
|
@@ -444,12 +444,12 @@
|
|
|
void StringStream::PrintSecurityTokenIfChanged(Object* f) {
|
- if (!f->IsHeapObject() || !Heap::Contains(HeapObject::cast(f))) {
|
+ if (!f->IsHeapObject() || !v8_context()->heap_.Contains(HeapObject::cast(f))) {
|
return;
|
}
|
Map* map = HeapObject::cast(f)->map();
|
if (!map->IsHeapObject() ||
|
- !Heap::Contains(map) ||
|
+ !v8_context()->heap_.Contains(map) ||
|
!map->IsMap() ||
|
!f->IsJSFunction()) {
|
return;
|
@@ -458,10 +458,10 @@
|
JSFunction* fun = JSFunction::cast(f);
|
Object* perhaps_context = fun->unchecked_context();
|
if (perhaps_context->IsHeapObject() &&
|
- Heap::Contains(HeapObject::cast(perhaps_context)) &&
|
+ v8_context()->heap_.Contains(HeapObject::cast(perhaps_context)) &&
|
perhaps_context->IsContext()) {
|
Context* context = fun->context();
|
- if (!Heap::Contains(context)) {
|
+ if (!v8_context()->heap_.Contains(context)) {
|
Add("(Function context is outside heap)\n");
|
return;
|
}
|
@@ -478,8 +478,8 @@
|
|
void StringStream::PrintFunction(Object* f, Object* receiver, Code** code) {
|
if (f->IsHeapObject() &&
|
- Heap::Contains(HeapObject::cast(f)) &&
|
- Heap::Contains(HeapObject::cast(f)->map()) &&
|
+ v8_context()->heap_.Contains(HeapObject::cast(f)) &&
|
+ v8_context()->heap_.Contains(HeapObject::cast(f)->map()) &&
|
HeapObject::cast(f)->map()->IsMap()) {
|
if (f->IsJSFunction()) {
|
JSFunction* fun = JSFunction::cast(f);
|
@@ -506,11 +506,11 @@
|
Add("/* warning: 'function' was not a heap object */ ");
|
return;
|
}
|
- if (!Heap::Contains(HeapObject::cast(f))) {
|
+ if (!v8_context()->heap_.Contains(HeapObject::cast(f))) {
|
Add("/* warning: 'function' was not on the heap */ ");
|
return;
|
}
|
- if (!Heap::Contains(HeapObject::cast(f)->map())) {
|
+ if (!v8_context()->heap_.Contains(HeapObject::cast(f)->map())) {
|
Add("/* warning: function's map was not on the heap */ ");
|
return;
|
}
|
@@ -526,10 +526,10 @@
|
void StringStream::PrintPrototype(JSFunction* fun, Object* receiver) {
|
Object* name = fun->shared()->name();
|
bool print_name = false;
|
- for (Object* p = receiver; p != Heap::null_value(); p = p->GetPrototype()) {
|
+ for (Object* p = receiver; p != v8_context()->heap_.null_value(); p = p->GetPrototype()) {
|
if (p->IsJSObject()) {
|
Object* key = JSObject::cast(p)->SlowReverseLookup(fun);
|
- if (key != Heap::undefined_value()) {
|
+ if (key != v8_context()->heap_.undefined_value()) {
|
if (!name->IsString() ||
|
!key->IsString() ||
|
!String::cast(name)->Equals(String::cast(key))) {
|
Index: src/ia32/builtins-ia32.cc
|
===================================================================
|
--- src/ia32/builtins-ia32.cc (revision 3229)
|
+++ src/ia32/builtins-ia32.cc Sat Nov 14 01:43:20 MSK 2009
|
@@ -76,7 +76,7 @@
|
// Set expected number of arguments to zero (not changing eax).
|
__ Set(ebx, Immediate(0));
|
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
- __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
|
+ __ jmp(Handle<Code>(v8_context()->builtins_.builtin(ArgumentsAdaptorTrampoline)),
|
RelocInfo::CODE_TARGET);
|
}
|
|
@@ -314,7 +314,7 @@
|
__ pop(ecx);
|
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
|
__ push(ecx);
|
- __ IncrementCounter(&Counters::constructed_objects, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.constructed_objects, 1);
|
__ ret(0);
|
}
|
|
@@ -359,7 +359,7 @@
|
|
// Invoke the code.
|
if (is_construct) {
|
- __ call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
|
+ __ call(Handle<Code>(v8_context()->builtins_.builtin(Builtins::JSConstructCall)),
|
RelocInfo::CODE_TARGET);
|
} else {
|
ParameterCount actual(eax);
|
@@ -488,6 +488,7 @@
|
__ push(ebx);
|
__ dec(eax);
|
|
+ Builtins& builtins = v8_context()->builtins_;
|
// 6. Check that function really was a function and get the code to
|
// call from the function and check that the number of expected
|
// arguments matches what we're providing.
|
@@ -496,7 +497,7 @@
|
__ j(not_zero, &invoke, taken);
|
__ xor_(ebx, Operand(ebx));
|
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
|
- __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
|
+ __ jmp(Handle<Code>(builtins.builtin(ArgumentsAdaptorTrampoline)),
|
RelocInfo::CODE_TARGET);
|
|
__ bind(&invoke);
|
@@ -506,7 +507,7 @@
|
__ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
|
__ lea(edx, FieldOperand(edx, Code::kHeaderSize));
|
__ cmp(eax, Operand(ebx));
|
- __ j(not_equal, Handle<Code>(builtin(ArgumentsAdaptorTrampoline)));
|
+ __ j(not_equal, Handle<Code>(builtins.builtin(ArgumentsAdaptorTrampoline)));
|
}
|
|
// 7. Jump (tail-call) to the code in register edx without checking arguments.
|
@@ -609,7 +610,7 @@
|
__ push(eax);
|
|
// Use inline caching to speed up access to arguments.
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// It is important that we do not have a test instruction after the
|
// call. A test instruction after the call is used to indicate that
|
@@ -907,7 +908,7 @@
|
edi,
|
kPreallocatedArrayElements,
|
&prepare_generic_code_call);
|
- __ IncrementCounter(&Counters::array_function_native, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.array_function_native, 1);
|
__ pop(ebx);
|
if (construct_call) {
|
__ pop(edi);
|
@@ -947,7 +948,7 @@
|
edi,
|
true,
|
&prepare_generic_code_call);
|
- __ IncrementCounter(&Counters::array_function_native, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.array_function_native, 1);
|
__ pop(ebx);
|
if (construct_call) {
|
__ pop(edi);
|
@@ -973,7 +974,7 @@
|
edi,
|
false,
|
&prepare_generic_code_call);
|
- __ IncrementCounter(&Counters::array_function_native, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.array_function_native, 1);
|
__ mov(eax, ebx);
|
__ pop(ebx);
|
if (construct_call) {
|
@@ -1059,7 +1060,7 @@
|
// Jump to the generic array code in case the specialized code cannot handle
|
// the construction.
|
__ bind(&generic_array_code);
|
- Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
|
+ Code* code = v8_context()->builtins_.builtin(Builtins::ArrayCodeGeneric);
|
Handle<Code> array_code(code);
|
__ jmp(array_code, RelocInfo::CODE_TARGET);
|
}
|
@@ -1095,7 +1096,7 @@
|
// Jump to the generic construct code in case the specialized code cannot
|
// handle the construction.
|
__ bind(&generic_constructor);
|
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
|
+ Code* code = v8_context()->builtins_.builtin(Builtins::JSConstructStubGeneric);
|
Handle<Code> generic_construct_stub(code);
|
__ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
|
}
|
@@ -1143,7 +1144,7 @@
|
// -----------------------------------
|
|
Label invoke, dont_adapt_arguments;
|
- __ IncrementCounter(&Counters::arguments_adaptors, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.arguments_adaptors, 1);
|
|
Label enough, too_few;
|
__ cmp(eax, Operand(ebx));
|
Index: src/mark-compact.h
|
===================================================================
|
--- src/mark-compact.h (revision 2841)
|
+++ src/mark-compact.h Sat Nov 14 01:42:55 MSK 2009
|
@@ -36,75 +36,47 @@
|
// to the first live object in the page (only used for old and map objects).
|
typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset);
|
|
-// Callback function for non-live blocks in the old generation.
|
-typedef void (*DeallocateFunction)(Address start, int size_in_bytes);
|
-
|
-
|
// Forward declarations.
|
class RootMarkingVisitor;
|
class MarkingVisitor;
|
+class UpdatingVisitor;
|
|
-
|
// -------------------------------------------------------------------------
|
// Mark-Compact collector
|
//
|
-// All methods are static.
|
|
-class MarkCompactCollector: public AllStatic {
|
+class MarkCompactCollector {
|
public:
|
- // Type of functions to compute forwarding addresses of objects in
|
- // compacted spaces. Given an object and its size, return a (non-failure)
|
- // Object* that will be the object after forwarding. There is a separate
|
- // allocation function for each (compactable) space based on the location
|
- // of the object before compaction.
|
- typedef Object* (*AllocationFunction)(HeapObject* object, int object_size);
|
-
|
- // Type of functions to encode the forwarding address for an object.
|
- // Given the object, its size, and the new (non-failure) object it will be
|
- // forwarded to, encode the forwarding address. For paged spaces, the
|
- // 'offset' input/output parameter contains the offset of the forwarded
|
- // object from the forwarding address of the previous live object in the
|
- // page as input, and is updated to contain the offset to be used for the
|
- // next live object in the same page. For spaces using a different
|
- // encoding (ie, contiguous spaces), the offset parameter is ignored.
|
- typedef void (*EncodingFunction)(HeapObject* old_object,
|
- int object_size,
|
- Object* new_object,
|
- int* offset);
|
-
|
- // Type of functions to process non-live objects.
|
- typedef void (*ProcessNonLiveFunction)(HeapObject* object);
|
-
|
// Set the global force_compaction flag, it must be called before Prepare
|
// to take effect.
|
- static void SetForceCompaction(bool value) {
|
+ void SetForceCompaction(bool value) {
|
force_compaction_ = value;
|
}
|
|
// Prepares for GC by resetting relocation info in old and map spaces and
|
// choosing spaces to compact.
|
- static void Prepare(GCTracer* tracer);
|
+ void Prepare(GCTracer* tracer);
|
|
// Performs a global garbage collection.
|
- static void CollectGarbage();
|
+ void CollectGarbage();
|
|
// True if the last full GC performed heap compaction.
|
- static bool HasCompacted() { return compacting_collection_; }
|
+ bool HasCompacted() { return compacting_collection_; }
|
|
// True after the Prepare phase if the compaction is taking place.
|
- static bool IsCompacting() { return compacting_collection_; }
|
+ bool IsCompacting() { return compacting_collection_; }
|
|
// The count of the number of objects left marked at the end of the last
|
// completed full GC (expected to be zero).
|
- static int previous_marked_count() { return previous_marked_count_; }
|
+ int previous_marked_count() { return previous_marked_count_; }
|
|
// During a full GC, there is a stack-allocated GCTracer that is used for
|
// bookkeeping information. Return a pointer to that tracer.
|
- static GCTracer* tracer() { return tracer_; }
|
+ GCTracer* tracer() { return tracer_; }
|
|
#ifdef DEBUG
|
// Checks whether performing mark-compact collection.
|
- static bool in_use() { return state_ > PREPARE_GC; }
|
+ bool in_use() { return state_ > PREPARE_GC; }
|
#endif
|
|
private:
|
@@ -121,314 +93,70 @@
|
};
|
|
// The current stage of the collector.
|
- static CollectorState state_;
|
+ CollectorState state_;
|
#endif
|
|
// Global flag that forces a compaction.
|
- static bool force_compaction_;
|
+ bool force_compaction_;
|
|
// Global flag indicating whether spaces were compacted on the last GC.
|
- static bool compacting_collection_;
|
+ bool compacting_collection_;
|
|
// Global flag indicating whether spaces will be compacted on the next GC.
|
- static bool compact_on_next_gc_;
|
+ bool compact_on_next_gc_;
|
|
// The number of objects left marked at the end of the last completed full
|
// GC (expected to be zero).
|
- static int previous_marked_count_;
|
+ int previous_marked_count_;
|
|
// A pointer to the current stack-allocated GC tracer object during a full
|
// collection (NULL before and after).
|
- static GCTracer* tracer_;
|
+ GCTracer* tracer_;
|
|
- // Finishes GC, performs heap verification if enabled.
|
- static void Finish();
|
-
|
- // -----------------------------------------------------------------------
|
- // Phase 1: Marking live objects.
|
- //
|
- // Before: The heap has been prepared for garbage collection by
|
- // MarkCompactCollector::Prepare() and is otherwise in its
|
- // normal state.
|
- //
|
- // After: Live objects are marked and non-live objects are unmarked.
|
-
|
-
|
- friend class RootMarkingVisitor;
|
- friend class MarkingVisitor;
|
-
|
- // Marking operations for objects reachable from roots.
|
- static void MarkLiveObjects();
|
-
|
- static void MarkUnmarkedObject(HeapObject* obj);
|
-
|
- static inline void MarkObject(HeapObject* obj) {
|
- if (!obj->IsMarked()) MarkUnmarkedObject(obj);
|
- }
|
-
|
- static inline void SetMark(HeapObject* obj) {
|
- tracer_->increment_marked_count();
|
#ifdef DEBUG
|
- UpdateLiveObjectCount(obj);
|
-#endif
|
- obj->SetMark();
|
- }
|
-
|
- // Creates back pointers for all map transitions, stores them in
|
- // the prototype field. The original prototype pointers are restored
|
- // in ClearNonLiveTransitions(). All JSObject maps
|
- // connected by map transitions have the same prototype object, which
|
- // is why we can use this field temporarily for back pointers.
|
- static void CreateBackPointers();
|
-
|
- // Mark a Map and its DescriptorArray together, skipping transitions.
|
- static void MarkMapContents(Map* map);
|
- static void MarkDescriptorArray(DescriptorArray* descriptors);
|
-
|
- // Mark the heap roots and all objects reachable from them.
|
- static void MarkRoots(RootMarkingVisitor* visitor);
|
-
|
- // Mark the symbol table specially. References to symbols from the
|
- // symbol table are weak.
|
- static void MarkSymbolTable();
|
-
|
- // Mark objects in object groups that have at least one object in the
|
- // group marked.
|
- static void MarkObjectGroups();
|
-
|
- // Mark all objects in an object group with at least one marked
|
- // object, then all objects reachable from marked objects in object
|
- // groups, and repeat.
|
- static void ProcessObjectGroups(MarkingVisitor* visitor);
|
-
|
- // Mark objects reachable (transitively) from objects in the marking stack
|
- // or overflowed in the heap.
|
- static void ProcessMarkingStack(MarkingVisitor* visitor);
|
-
|
- // Mark objects reachable (transitively) from objects in the marking
|
- // stack. This function empties the marking stack, but may leave
|
- // overflowed objects in the heap, in which case the marking stack's
|
- // overflow flag will be set.
|
- static void EmptyMarkingStack(MarkingVisitor* visitor);
|
-
|
- // Refill the marking stack with overflowed objects from the heap. This
|
- // function either leaves the marking stack full or clears the overflow
|
- // flag on the marking stack.
|
- static void RefillMarkingStack();
|
-
|
- // Callback function for telling whether the object *p is an unmarked
|
- // heap object.
|
- static bool IsUnmarkedHeapObject(Object** p);
|
-
|
-#ifdef DEBUG
|
- static void UpdateLiveObjectCount(HeapObject* obj);
|
-#endif
|
-
|
- // We sweep the large object space in the same way whether we are
|
- // compacting or not, because the large object space is never compacted.
|
- static void SweepLargeObjectSpace();
|
-
|
- // Test whether a (possibly marked) object is a Map.
|
- static inline bool SafeIsMap(HeapObject* object);
|
-
|
- // Map transitions from a live map to a dead map must be killed.
|
- // We replace them with a null descriptor, with the same key.
|
- static void ClearNonLiveTransitions();
|
-
|
// -----------------------------------------------------------------------
|
- // Phase 2: Sweeping to clear mark bits and free non-live objects for
|
- // a non-compacting collection, or else computing and encoding
|
- // forwarding addresses for a compacting collection.
|
- //
|
- // Before: Live objects are marked and non-live objects are unmarked.
|
- //
|
- // After: (Non-compacting collection.) Live objects are unmarked,
|
- // non-live regions have been added to their space's free
|
- // list.
|
- //
|
- // After: (Compacting collection.) The forwarding address of live
|
- // objects in the paged spaces is encoded in their map word
|
- // along with their (non-forwarded) map pointer.
|
- //
|
- // The forwarding address of live objects in the new space is
|
- // written to their map word's offset in the inactive
|
- // semispace.
|
- //
|
- // Bookkeeping data is written to the remembered-set are of
|
- // eached paged-space page that contains live objects after
|
- // compaction:
|
- //
|
- // The 3rd word of the page (first word of the remembered
|
- // set) contains the relocation top address, the address of
|
- // the first word after the end of the last live object in
|
- // the page after compaction.
|
- //
|
- // The 4th word contains the zero-based index of the page in
|
- // its space. This word is only used for map space pages, in
|
- // order to encode the map addresses in 21 bits to free 11
|
- // bits per map word for the forwarding address.
|
- //
|
- // The 5th word contains the (nonencoded) forwarding address
|
- // of the first live object in the page.
|
- //
|
- // In both the new space and the paged spaces, a linked list
|
- // of live regions is constructructed (linked through
|
- // pointers in the non-live region immediately following each
|
- // live region) to speed further passes of the collector.
|
-
|
- // Encodes forwarding addresses of objects in compactable parts of the
|
- // heap.
|
- static void EncodeForwardingAddresses();
|
-
|
- // Encodes the forwarding addresses of objects in new space.
|
- static void EncodeForwardingAddressesInNewSpace();
|
-
|
- // Function template to encode the forwarding addresses of objects in
|
- // paged spaces, parameterized by allocation and non-live processing
|
- // functions.
|
- template<AllocationFunction Alloc, ProcessNonLiveFunction ProcessNonLive>
|
- static void EncodeForwardingAddressesInPagedSpace(PagedSpace* space);
|
-
|
- // Iterates live objects in a space, passes live objects
|
- // to a callback function which returns the heap size of the object.
|
- // Returns the number of live objects iterated.
|
- static int IterateLiveObjects(NewSpace* space, HeapObjectCallback size_f);
|
- static int IterateLiveObjects(PagedSpace* space, HeapObjectCallback size_f);
|
-
|
- // Iterates the live objects between a range of addresses, returning the
|
- // number of live objects.
|
- static int IterateLiveObjectsInRange(Address start, Address end,
|
- HeapObjectCallback size_func);
|
-
|
- // Callback functions for deallocating non-live blocks in the old
|
- // generation.
|
- static void DeallocateOldPointerBlock(Address start, int size_in_bytes);
|
- static void DeallocateOldDataBlock(Address start, int size_in_bytes);
|
- static void DeallocateCodeBlock(Address start, int size_in_bytes);
|
- static void DeallocateMapBlock(Address start, int size_in_bytes);
|
- static void DeallocateCellBlock(Address start, int size_in_bytes);
|
-
|
- // If we are not compacting the heap, we simply sweep the spaces except
|
- // for the large object space, clearing mark bits and adding unmarked
|
- // regions to each space's free list.
|
- static void SweepSpaces();
|
-
|
- // -----------------------------------------------------------------------
|
- // Phase 3: Updating pointers in live objects.
|
- //
|
- // Before: Same as after phase 2 (compacting collection).
|
- //
|
- // After: All pointers in live objects, including encoded map
|
- // pointers, are updated to point to their target's new
|
- // location. The remembered set area of each paged-space
|
- // page containing live objects still contains bookkeeping
|
- // information.
|
-
|
- friend class UpdatingVisitor; // helper for updating visited objects
|
-
|
- // Updates pointers in all spaces.
|
- static void UpdatePointers();
|
-
|
- // Updates pointers in an object in new space.
|
- // Returns the heap size of the object.
|
- static int UpdatePointersInNewObject(HeapObject* obj);
|
-
|
- // Updates pointers in an object in old spaces.
|
- // Returns the heap size of the object.
|
- static int UpdatePointersInOldObject(HeapObject* obj);
|
-
|
- // Calculates the forwarding address of an object in an old space.
|
- static Address GetForwardingAddressInOldSpace(HeapObject* obj);
|
-
|
- // -----------------------------------------------------------------------
|
- // Phase 4: Relocating objects.
|
- //
|
- // Before: Pointers to live objects are updated to point to their
|
- // target's new location. The remembered set area of each
|
- // paged-space page containing live objects still contains
|
- // bookkeeping information.
|
- //
|
- // After: Objects have been moved to their new addresses. The
|
- // remembered set area of each paged-space page containing
|
- // live objects still contains bookkeeping information.
|
-
|
- // Relocates objects in all spaces.
|
- static void RelocateObjects();
|
-
|
- // Converts a code object's inline target to addresses, convention from
|
- // address to target happens in the marking phase.
|
- static int ConvertCodeICTargetToAddress(HeapObject* obj);
|
-
|
- // Relocate a map object.
|
- static int RelocateMapObject(HeapObject* obj);
|
-
|
- // Relocates an old object.
|
- static int RelocateOldPointerObject(HeapObject* obj);
|
- static int RelocateOldDataObject(HeapObject* obj);
|
-
|
- // Relocate a property cell object.
|
- static int RelocateCellObject(HeapObject* obj);
|
-
|
- // Helper function.
|
- static inline int RelocateOldNonCodeObject(HeapObject* obj,
|
- PagedSpace* space);
|
-
|
- // Relocates an object in the code space.
|
- static int RelocateCodeObject(HeapObject* obj);
|
-
|
- // Copy a new object.
|
- static int RelocateNewObject(HeapObject* obj);
|
-
|
- // -----------------------------------------------------------------------
|
- // Phase 5: Rebuilding remembered sets.
|
- //
|
- // Before: The heap is in a normal state except that remembered sets
|
- // in the paged spaces are not correct.
|
- //
|
- // After: The heap is in a normal state.
|
-
|
- // Rebuild remembered set in old and map spaces.
|
- static void RebuildRSets();
|
-
|
-#ifdef DEBUG
|
- // -----------------------------------------------------------------------
|
// Debugging variables, functions and classes
|
// Counters used for debugging the marking phase of mark-compact or
|
// mark-sweep collection.
|
|
// Number of live objects in Heap::to_space_.
|
- static int live_young_objects_;
|
+ int live_young_objects_;
|
|
// Number of live objects in Heap::old_pointer_space_.
|
- static int live_old_pointer_objects_;
|
+ int live_old_pointer_objects_;
|
|
// Number of live objects in Heap::old_data_space_.
|
- static int live_old_data_objects_;
|
+ int live_old_data_objects_;
|
|
// Number of live objects in Heap::code_space_.
|
- static int live_code_objects_;
|
+ int live_code_objects_;
|
|
// Number of live objects in Heap::map_space_.
|
- static int live_map_objects_;
|
+ int live_map_objects_;
|
|
// Number of live objects in Heap::cell_space_.
|
- static int live_cell_objects_;
|
+ int live_cell_objects_;
|
|
// Number of live objects in Heap::lo_space_.
|
- static int live_lo_objects_;
|
+ int live_lo_objects_;
|
|
// Number of live bytes in this collection.
|
- static int live_bytes_;
|
+ int live_bytes_;
|
+#endif
|
+ class MarkCompactCollectorImpl;
|
+ friend class MarkCompactCollectorImpl;
|
+ friend class RootMarkingVisitor;
|
+ friend class MarkingVisitor;
|
+ friend class UpdatingVisitor;
|
+ friend class V8Context;
|
|
- friend class MarkObjectVisitor;
|
- static void VisitObject(HeapObject* obj);
|
+ MarkCompactCollectorImpl * const mark_compact_collector_impl;
|
|
- friend class UnmarkObjectVisitor;
|
- static void UnmarkObject(HeapObject* obj);
|
-#endif
|
+ MarkCompactCollector();
|
+ ~MarkCompactCollector();
|
+ DISALLOW_COPY_AND_ASSIGN(MarkCompactCollector);
|
};
|
|
-
|
} } // namespace v8::internal
|
|
#endif // V8_MARK_COMPACT_H_
|
Index: test/cctest/test-compiler.cc
|
===================================================================
|
--- test/cctest/test-compiler.cc (revision 1481)
|
+++ test/cctest/test-compiler.cc Sun Nov 15 12:39:47 MSK 2009
|
@@ -37,6 +37,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static v8::Persistent<v8::Context> env;
|
|
@@ -99,14 +100,14 @@
|
|
static Object* GetGlobalProperty(const char* name) {
|
Handle<String> symbol = Factory::LookupAsciiSymbol(name);
|
- return Top::context()->global()->GetProperty(*symbol);
|
+ return v8_context()->top_.context()->global()->GetProperty(*symbol);
|
}
|
|
|
static void SetGlobalProperty(const char* name, Object* value) {
|
Handle<Object> object(value);
|
Handle<String> symbol = Factory::LookupAsciiSymbol(name);
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
SetProperty(global, symbol, object, NONE);
|
}
|
|
@@ -114,9 +115,9 @@
|
static Handle<JSFunction> Compile(const char* source) {
|
Handle<String> source_code(Factory::NewStringFromUtf8(CStrVector(source)));
|
Handle<JSFunction> boilerplate =
|
- Compiler::Compile(source_code, Handle<String>(), 0, 0, NULL, NULL);
|
+ v8_context()->compiler_.Compile(source_code, Handle<String>(), 0, 0, NULL, NULL);
|
return Factory::NewFunctionFromBoilerplate(boilerplate,
|
- Top::global_context());
|
+ v8_context()->top_.global_context());
|
}
|
|
|
@@ -129,7 +130,7 @@
|
if (fun.is_null()) return -1;
|
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
return GetGlobalProperty("result")->Number();
|
@@ -150,7 +151,7 @@
|
SetGlobalProperty("x", Smi::FromInt(x));
|
SetGlobalProperty("y", Smi::FromInt(y));
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
return GetGlobalProperty("result")->Number();
|
@@ -170,7 +171,7 @@
|
|
SetGlobalProperty("x", Smi::FromInt(x));
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
return GetGlobalProperty("result")->Number();
|
@@ -191,7 +192,7 @@
|
|
SetGlobalProperty("n", Smi::FromInt(n));
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
return GetGlobalProperty("result")->Number();
|
@@ -212,7 +213,7 @@
|
Handle<JSFunction> fun = Compile(source);
|
if (fun.is_null()) return;
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
}
|
@@ -245,7 +246,7 @@
|
Handle<JSFunction> fun = Compile(source);
|
CHECK(!fun.is_null());
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
CHECK_EQ(511.0, GetGlobalProperty("r")->Number());
|
@@ -260,11 +261,11 @@
|
Handle<JSFunction> fun = Compile(source);
|
CHECK(!fun.is_null());
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Handle<Object> result =
|
Execution::Call(fun, global, 0, NULL, &has_pending_exception);
|
CHECK(has_pending_exception);
|
- CHECK_EQ(42.0, Top::pending_exception()->Number());
|
+ CHECK_EQ(42.0, v8_context()->top_.pending_exception()->Number());
|
}
|
|
|
@@ -285,13 +286,13 @@
|
|
// Run the generated code to populate the global object with 'foo'.
|
bool has_pending_exception;
|
- Handle<JSObject> global(Top::context()->global());
|
+ Handle<JSObject> global(v8_context()->top_.context()->global());
|
Execution::Call(fun0, global, 0, NULL, &has_pending_exception);
|
CHECK(!has_pending_exception);
|
|
Handle<Object> fun1 =
|
Handle<Object>(
|
- Top::context()->global()->GetProperty(
|
+ v8_context()->top_.context()->global()->GetProperty(
|
*Factory::LookupAsciiSymbol("foo")));
|
CHECK(fun1->IsJSFunction());
|
|
@@ -311,7 +312,7 @@
|
v8::HandleScope scope;
|
|
Handle<Script> script = Factory::NewScript(Factory::empty_string());
|
- script->set_source(Heap::undefined_value());
|
+ script->set_source(v8_context()->heap_.undefined_value());
|
CHECK_EQ(-1, GetScriptLineNumber(script, 0));
|
CHECK_EQ(-1, GetScriptLineNumber(script, 100));
|
CHECK_EQ(-1, GetScriptLineNumber(script, -1));
|
Index: src/allocation.cc
|
===================================================================
|
--- src/allocation.cc (revision 2038)
|
+++ src/allocation.cc Sat Nov 14 01:43:03 MSK 2009
|
@@ -36,7 +36,9 @@
|
void* Malloced::New(size_t size) {
|
ASSERT(NativeAllocationChecker::allocation_allowed());
|
void* result = malloc(size);
|
- if (result == NULL) V8::FatalProcessOutOfMemory("Malloced operator new");
|
+ if (result == NULL) {
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Malloced operator new");
|
+ }
|
return result;
|
}
|
|
@@ -47,7 +49,7 @@
|
|
|
void Malloced::FatalProcessOutOfMemory() {
|
- V8::FatalProcessOutOfMemory("Out of memory");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Out of memory");
|
}
|
|
|
@@ -97,16 +99,18 @@
|
return result;
|
}
|
|
+StorageData::StorageData():
|
+ in_use_list_(0), free_list_(0), preallocated_ (false), allocation_disallowed_(0)
|
+ #ifdef DEBUG
|
+ , rset_used_(true)
|
+ #endif
|
+{
|
+}
|
|
-int NativeAllocationChecker::allocation_disallowed_ = 0;
|
-
|
-
|
-PreallocatedStorage PreallocatedStorage::in_use_list_(0);
|
-PreallocatedStorage PreallocatedStorage::free_list_(0);
|
-bool PreallocatedStorage::preallocated_ = false;
|
-
|
-
|
void PreallocatedStorage::Init(size_t size) {
|
+ StorageData& storage_data = v8_context()->storage_data_;
|
+ PreallocatedStorage & free_list_ = storage_data.free_list_;
|
+
|
ASSERT(free_list_.next_ == &free_list_);
|
ASSERT(free_list_.previous_ == &free_list_);
|
PreallocatedStorage* free_chunk =
|
@@ -114,14 +118,20 @@
|
free_list_.next_ = free_list_.previous_ = free_chunk;
|
free_chunk->next_ = free_chunk->previous_ = &free_list_;
|
free_chunk->size_ = size - sizeof(PreallocatedStorage);
|
- preallocated_ = true;
|
+ storage_data.preallocated_ = true;
|
}
|
|
|
void* PreallocatedStorage::New(size_t size) {
|
- if (!preallocated_) {
|
+ StorageData& storage_data = v8_context()->storage_data_;
|
+
|
+ if (!storage_data.preallocated_) {
|
return FreeStoreAllocationPolicy::New(size);
|
}
|
+
|
+ PreallocatedStorage & free_list_ = storage_data.free_list_;
|
+ PreallocatedStorage & in_use_list_ = storage_data.in_use_list_;
|
+
|
ASSERT(free_list_.next_ != &free_list_);
|
ASSERT(free_list_.previous_ != &free_list_);
|
size = (size + kPointerSize - 1) & ~(kPointerSize - 1);
|
@@ -164,7 +174,8 @@
|
if (p == NULL) {
|
return;
|
}
|
- if (!preallocated_) {
|
+ StorageData& storage_data = v8_context()->storage_data_;
|
+ if (!storage_data.preallocated_) {
|
FreeStoreAllocationPolicy::Delete(p);
|
return;
|
}
|
@@ -172,7 +183,7 @@
|
ASSERT(storage->next_->previous_ == storage);
|
ASSERT(storage->previous_->next_ == storage);
|
storage->Unlink();
|
- storage->LinkTo(&free_list_);
|
+ storage->LinkTo(&storage_data.free_list_);
|
}
|
|
|
Index: src/frame-element.cc
|
===================================================================
|
--- src/frame-element.cc (revision 2492)
|
+++ src/frame-element.cc Sat Nov 14 01:43:17 MSK 2009
|
@@ -37,8 +37,7 @@
|
|
|
FrameElement::ZoneObjectList* FrameElement::ConstantList() {
|
- static ZoneObjectList list(10);
|
- return &list;
|
+ return v8_context()->code_generator_data_.frame_element_constants_list();
|
}
|
|
|
Index: src/log-utils.cc
|
===================================================================
|
--- src/log-utils.cc (revision 3010)
|
+++ src/log-utils.cc Sat Nov 14 01:43:00 MSK 2009
|
@@ -254,7 +254,7 @@
|
|
|
void LogMessageBuilder::AppendAddress(Address addr) {
|
- static Address last_address_ = NULL;
|
+ static Address last_address_ = NULL;///static
|
AppendAddress(addr, last_address_);
|
last_address_ = addr;
|
}
|
Index: src/prettyprinter.cc
|
===================================================================
|
--- src/prettyprinter.cc (revision 3233)
|
+++ src/prettyprinter.cc Sat Nov 14 01:43:03 MSK 2009
|
@@ -507,6 +507,7 @@
|
|
void PrettyPrinter::PrintLiteral(Handle<Object> value, bool quote) {
|
Object* object = *value;
|
+ Heap& heap = v8_context()->heap_;
|
if (object->IsString()) {
|
String* string = String::cast(object);
|
if (quote) Print("\"");
|
@@ -514,13 +515,13 @@
|
Print("%c", string->Get(i));
|
}
|
if (quote) Print("\"");
|
- } else if (object == Heap::null_value()) {
|
+ } else if (object == heap.null_value()) {
|
Print("null");
|
- } else if (object == Heap::true_value()) {
|
+ } else if (object == heap.true_value()) {
|
Print("true");
|
- } else if (object == Heap::false_value()) {
|
+ } else if (object == heap.false_value()) {
|
Print("false");
|
- } else if (object == Heap::undefined_value()) {
|
+ } else if (object == heap.undefined_value()) {
|
Print("undefined");
|
} else if (object->IsNumber()) {
|
Print("%g", object->Number());
|
@@ -612,7 +613,7 @@
|
static void SetAstPrinter(AstPrinter* a) { ast_printer_ = a; }
|
|
private:
|
- static AstPrinter* ast_printer_;
|
+ static AstPrinter* ast_printer_; ///static
|
};
|
|
|
Index: src/dateparser.cc
|
===================================================================
|
--- src/dateparser.cc (revision 2038)
|
+++ src/dateparser.cc Sat Nov 14 01:43:15 MSK 2009
|
@@ -126,7 +126,7 @@
|
SKIP_WRITE_BARRIER);
|
} else {
|
output->set(UTC_OFFSET,
|
- Heap::null_value(),
|
+ v8_context()->heap_.null_value(),
|
SKIP_WRITE_BARRIER);
|
}
|
return true;
|
Index: src/platform-freebsd.cc
|
===================================================================
|
--- src/platform-freebsd.cc (revision 2974)
|
+++ src/platform-freebsd.cc Sat Nov 14 01:42:55 MSK 2009
|
@@ -540,7 +540,7 @@
|
|
#ifdef ENABLE_LOGGING_AND_PROFILING
|
|
-static Sampler* active_sampler_ = NULL;
|
+static Sampler* active_sampler_ = NULL; ///static
|
|
static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
|
USE(info);
|
Index: src/compilation-cache.cc
|
===================================================================
|
--- src/compilation-cache.cc (revision 3065)
|
+++ src/compilation-cache.cc Sat Nov 14 01:43:12 MSK 2009
|
@@ -144,23 +144,43 @@
|
DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheRegExp);
|
};
|
|
+struct CompilationCache::CompilationCacheImpl {
|
+ // Allocate all the sub-caches.
|
+ CompilationCacheScript script;
|
+ CompilationCacheEval eval_global;
|
+ CompilationCacheEval eval_contextual;
|
+ CompilationCacheRegExp reg_exp;
|
+ CompilationSubCache* subcaches[kSubCacheCount];
|
|
-// Statically allocate all the sub-caches.
|
-static CompilationCacheScript script(kScriptGenerations);
|
-static CompilationCacheEval eval_global(kEvalGlobalGenerations);
|
-static CompilationCacheEval eval_contextual(kEvalContextualGenerations);
|
-static CompilationCacheRegExp reg_exp(kRegExpGenerations);
|
-static CompilationSubCache* subcaches[kSubCacheCount] =
|
- {&script, &eval_global, &eval_contextual, ®_exp};
|
+ // Current enable state of the compilation cache.
|
+ bool enabled;
|
-
|
+
|
+ CompilationCacheImpl():
|
+ script(kScriptGenerations),
|
+ eval_global(kEvalGlobalGenerations),
|
+ eval_contextual(kEvalContextualGenerations),
|
+ reg_exp(kRegExpGenerations),
|
+ enabled(true)
|
+ {
|
+ CompilationSubCache* _subcaches[kSubCacheCount] = {&script, &eval_global, &eval_contextual, ®_exp};
|
+ for(int i = 0; i < kSubCacheCount; ++i) subcaches[i] = _subcaches[i];
|
+ }
|
+};
|
|
-// Current enable state of the compilation cache.
|
-static bool enabled = true;
|
-static inline bool IsEnabled() {
|
- return FLAG_compilation_cache && enabled;
|
+CompilationCache::CompilationCache():
|
+ compilation_cache_impl(new CompilationCacheImpl())
|
+{
|
}
|
|
+CompilationCache::~CompilationCache() {
|
+ delete compilation_cache_impl;
|
+}
|
|
+inline bool CompilationCache::IsEnabled() {
|
+ return FLAG_compilation_cache && compilation_cache_impl->enabled;
|
+}
|
+
|
+
|
static Handle<CompilationCacheTable> AllocateTable(int size) {
|
CALL_HEAP_FUNCTION(CompilationCacheTable::Allocate(size),
|
CompilationCacheTable);
|
@@ -189,7 +209,7 @@
|
}
|
|
// Set the first generation as unborn.
|
- tables_[0] = Heap::undefined_value();
|
+ tables_[0] = v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -200,7 +220,7 @@
|
|
void CompilationSubCache::Clear() {
|
for (int i = 0; i < generations_; i++) {
|
- tables_[i] = Heap::undefined_value();
|
+ tables_[i] = v8_context()->heap_.undefined_value();
|
}
|
}
|
|
@@ -278,10 +298,10 @@
|
// If the script was found in a later generation, we promote it to
|
// the first generation to let it survive longer in the cache.
|
if (generation != 0) Put(source, boilerplate);
|
- Counters::compilation_cache_hits.Increment();
|
+ v8_context()->counters_.compilation_cache_hits.Increment();
|
return boilerplate;
|
} else {
|
- Counters::compilation_cache_misses.Increment();
|
+ v8_context()->counters_.compilation_cache_misses.Increment();
|
return Handle<JSFunction>::null();
|
}
|
}
|
@@ -317,10 +337,10 @@
|
if (generation != 0) {
|
Put(source, context, boilerplate);
|
}
|
- Counters::compilation_cache_hits.Increment();
|
+ v8_context()->counters_.compilation_cache_hits.Increment();
|
return boilerplate;
|
} else {
|
- Counters::compilation_cache_misses.Increment();
|
+ v8_context()->counters_.compilation_cache_misses.Increment();
|
return Handle<JSFunction>::null();
|
}
|
}
|
@@ -357,10 +377,10 @@
|
if (generation != 0) {
|
Put(source, flags, data);
|
}
|
- Counters::compilation_cache_hits.Increment();
|
+ v8_context()->counters_.compilation_cache_hits.Increment();
|
return data;
|
} else {
|
- Counters::compilation_cache_misses.Increment();
|
+ v8_context()->counters_.compilation_cache_misses.Increment();
|
return Handle<FixedArray>::null();
|
}
|
}
|
@@ -383,7 +403,7 @@
|
return Handle<JSFunction>::null();
|
}
|
|
- return script.Lookup(source, name, line_offset, column_offset);
|
+ return compilation_cache_impl->script.Lookup(source, name, line_offset, column_offset);
|
}
|
|
|
@@ -396,9 +416,9 @@
|
|
Handle<JSFunction> result;
|
if (is_global) {
|
- result = eval_global.Lookup(source, context);
|
+ result = compilation_cache_impl->eval_global.Lookup(source, context);
|
} else {
|
- result = eval_contextual.Lookup(source, context);
|
+ result = compilation_cache_impl->eval_contextual.Lookup(source, context);
|
}
|
return result;
|
}
|
@@ -410,7 +430,7 @@
|
return Handle<FixedArray>::null();
|
}
|
|
- return reg_exp.Lookup(source, flags);
|
+ return compilation_cache_impl->reg_exp.Lookup(source, flags);
|
}
|
|
|
@@ -421,7 +441,7 @@
|
}
|
|
ASSERT(boilerplate->IsBoilerplate());
|
- script.Put(source, boilerplate);
|
+ compilation_cache_impl->script.Put(source, boilerplate);
|
}
|
|
|
@@ -436,9 +456,9 @@
|
HandleScope scope;
|
ASSERT(boilerplate->IsBoilerplate());
|
if (is_global) {
|
- eval_global.Put(source, context, boilerplate);
|
+ compilation_cache_impl->eval_global.Put(source, context, boilerplate);
|
} else {
|
- eval_contextual.Put(source, context, boilerplate);
|
+ compilation_cache_impl->eval_contextual.Put(source, context, boilerplate);
|
}
|
}
|
|
@@ -451,38 +471,38 @@
|
return;
|
}
|
|
- reg_exp.Put(source, flags, data);
|
+ compilation_cache_impl->reg_exp.Put(source, flags, data);
|
}
|
|
|
void CompilationCache::Clear() {
|
for (int i = 0; i < kSubCacheCount; i++) {
|
- subcaches[i]->Clear();
|
+ compilation_cache_impl->subcaches[i]->Clear();
|
}
|
}
|
|
|
void CompilationCache::Iterate(ObjectVisitor* v) {
|
for (int i = 0; i < kSubCacheCount; i++) {
|
- subcaches[i]->Iterate(v);
|
+ compilation_cache_impl->subcaches[i]->Iterate(v);
|
}
|
}
|
|
|
void CompilationCache::MarkCompactPrologue() {
|
for (int i = 0; i < kSubCacheCount; i++) {
|
- subcaches[i]->Age();
|
+ compilation_cache_impl->subcaches[i]->Age();
|
}
|
}
|
|
|
void CompilationCache::Enable() {
|
- enabled = true;
|
+ compilation_cache_impl->enabled = true;
|
}
|
|
|
void CompilationCache::Disable() {
|
- enabled = false;
|
+ compilation_cache_impl->enabled = false;
|
Clear();
|
}
|
|
Index: src/execution.cc
|
===================================================================
|
--- src/execution.cc (revision 3229)
|
+++ src/execution.cc Sat Nov 14 01:42:55 MSK 2009
|
@@ -99,12 +99,13 @@
|
|
// Update the pending exception flag and return the value.
|
*has_pending_exception = value->IsException();
|
- ASSERT(*has_pending_exception == Top::has_pending_exception());
|
+ Top& top = v8_context()->top_;
|
+ ASSERT(*has_pending_exception == top.has_pending_exception());
|
if (*has_pending_exception) {
|
- Top::ReportPendingMessages();
|
+ top.ReportPendingMessages();
|
return Handle<Object>();
|
} else {
|
- Top::clear_pending_message();
|
+ top.clear_pending_message();
|
}
|
|
return Handle<Object>(value);
|
@@ -122,7 +123,7 @@
|
|
Handle<Object> Execution::New(Handle<JSFunction> func, int argc,
|
Object*** args, bool* pending_exception) {
|
- return Invoke(true, func, Top::global(), argc, args, pending_exception);
|
+ return Invoke(true, func, v8_context()->top_.global(), argc, args, pending_exception);
|
}
|
|
|
@@ -142,20 +143,22 @@
|
Handle<Object> result = Invoke(false, func, receiver, argc, args,
|
caught_exception);
|
|
+ Top& top = v8_context()->top_;
|
+
|
if (*caught_exception) {
|
ASSERT(catcher.HasCaught());
|
- ASSERT(Top::has_pending_exception());
|
- ASSERT(Top::external_caught_exception());
|
- if (Top::pending_exception() == Heap::termination_exception()) {
|
+ ASSERT(top.has_pending_exception());
|
+ ASSERT(top.external_caught_exception());
|
+ if (top.pending_exception() == v8_context()->heap_.termination_exception()) {
|
result = Factory::termination_exception();
|
} else {
|
result = v8::Utils::OpenHandle(*catcher.Exception());
|
}
|
- Top::OptionalRescheduleException(true);
|
+ top.OptionalRescheduleException(true);
|
}
|
|
- ASSERT(!Top::has_pending_exception());
|
- ASSERT(!Top::external_caught_exception());
|
+ ASSERT(!top.has_pending_exception());
|
+ ASSERT(!top.external_caught_exception());
|
return result;
|
}
|
|
@@ -178,7 +181,7 @@
|
if (object->IsHeapObject() &&
|
HeapObject::cast(*object)->map()->has_instance_call_handler()) {
|
return Handle<JSFunction>(
|
- Top::global_context()->call_as_function_delegate());
|
+ v8_context()->top_.global_context()->call_as_function_delegate());
|
}
|
|
return Factory::undefined_value();
|
@@ -196,15 +199,13 @@
|
if (object->IsHeapObject() &&
|
HeapObject::cast(*object)->map()->has_instance_call_handler()) {
|
return Handle<JSFunction>(
|
- Top::global_context()->call_as_constructor_delegate());
|
+ v8_context()->top_.global_context()->call_as_constructor_delegate());
|
}
|
|
return Factory::undefined_value();
|
}
|
|
|
-// Static state for stack guards.
|
-StackGuard::ThreadLocal StackGuard::thread_local_;
|
|
|
bool StackGuard::IsStackOverflow() {
|
@@ -335,6 +336,7 @@
|
ExecutionAccess access;
|
memcpy(to, reinterpret_cast<char*>(&thread_local_), sizeof(ThreadLocal));
|
ThreadLocal blank;
|
+ blank.Clear();
|
thread_local_ = blank;
|
return to + sizeof(ThreadLocal);
|
}
|
@@ -343,22 +345,38 @@
|
char* StackGuard::RestoreStackGuard(char* from) {
|
ExecutionAccess access;
|
memcpy(reinterpret_cast<char*>(&thread_local_), from, sizeof(ThreadLocal));
|
- Heap::SetStackLimits();
|
+ v8_context()->heap_.SetStackLimits();
|
return from + sizeof(ThreadLocal);
|
}
|
|
+class ExecutionData {
|
+public:
|
+ Thread::LocalStorageKey stack_limit_key;
|
+ GCExtension kGCExtension;
|
+ v8::DeclareExtension kGCExtensionDeclaration;
|
|
-static internal::Thread::LocalStorageKey stack_limit_key =
|
- internal::Thread::CreateThreadLocalKey();
|
+ ExecutionData():stack_limit_key(internal::Thread::CreateThreadLocalKey()), kGCExtension(),
|
+ kGCExtensionDeclaration(&kGCExtension)
|
+ {
|
+ }
|
+};
|
|
+void StackGuard::Setup() {
|
+ V8Context* const v8context = v8_context();
|
+ v8context->stack_guard_.thread_local_.Clear();
|
+ v8context->execution_data_=new ExecutionData();
|
+}
|
|
+void StackGuard::TearDown() {
|
+ delete v8_context()->execution_data_;
|
+}
|
+
|
void StackGuard::FreeThreadResources() {
|
Thread::SetThreadLocal(
|
- stack_limit_key,
|
+ v8_context()->execution_data_->stack_limit_key,
|
reinterpret_cast<void*>(thread_local_.real_climit_));
|
}
|
|
-
|
void StackGuard::ThreadLocal::Clear() {
|
real_jslimit_ = kIllegalLimit;
|
jslimit_ = kIllegalLimit;
|
@@ -367,7 +385,7 @@
|
nesting_ = 0;
|
postpone_interrupts_nesting_ = 0;
|
interrupt_flags_ = 0;
|
- Heap::SetStackLimits();
|
+ v8_context()->heap_.SetStackLimits();
|
}
|
|
|
@@ -381,7 +399,7 @@
|
jslimit_ = SimulatorStack::JsLimitFromCLimit(limit);
|
real_climit_ = limit;
|
climit_ = limit;
|
- Heap::SetStackLimits();
|
+ v8_context()->heap_.SetStackLimits();
|
}
|
nesting_ = 0;
|
postpone_interrupts_nesting_ = 0;
|
@@ -396,7 +414,7 @@
|
|
void StackGuard::InitThread(const ExecutionAccess& lock) {
|
thread_local_.Initialize();
|
- void* stored_limit = Thread::GetThreadLocal(stack_limit_key);
|
+ void* stored_limit = Thread::GetThreadLocal(v8_context()->execution_data_->stack_limit_key);
|
// You should hold the ExecutionAccess lock when you call this.
|
if (stored_limit != NULL) {
|
StackGuard::SetStackLimit(reinterpret_cast<intptr_t>(stored_limit));
|
@@ -410,7 +428,8 @@
|
do { \
|
Object** args[argc] = argv; \
|
ASSERT(has_pending_exception != NULL); \
|
- return Call(Top::name##_fun(), Top::builtins(), argc, args, \
|
+ Top & top = v8_context()->top_; \
|
+ return Call(top.name##_fun(), top.builtins(), argc, args, \
|
has_pending_exception); \
|
} while (false)
|
|
@@ -427,7 +446,7 @@
|
double value = obj->Number();
|
result = !((value == 0) || isnan(value));
|
}
|
- return Handle<Object>(Heap::ToBoolean(result));
|
+ return Handle<Object>(v8_context()->heap_.ToBoolean(result));
|
}
|
|
|
@@ -483,7 +502,7 @@
|
}
|
|
Handle<Object> char_at =
|
- GetProperty(Top::builtins(), Factory::char_at_symbol());
|
+ GetProperty(v8_context()->top_.builtins(), Factory::char_at_symbol());
|
if (!char_at->IsJSFunction()) {
|
return Factory::undefined_value();
|
}
|
@@ -507,13 +526,14 @@
|
Handle<FunctionTemplateInfo> data, bool* exc) {
|
// Fast case: see if the function has already been instantiated
|
int serial_number = Smi::cast(data->serial_number())->value();
|
+ Top& top = v8_context()->top_;
|
Object* elm =
|
- Top::global_context()->function_cache()->GetElement(serial_number);
|
+ top.global_context()->function_cache()->GetElement(serial_number);
|
if (elm->IsJSFunction()) return Handle<JSFunction>(JSFunction::cast(elm));
|
// The function has not yet been instantiated in this context; do it.
|
Object** args[1] = { Handle<Object>::cast(data).location() };
|
Handle<Object> result =
|
- Call(Top::instantiate_fun(), Top::builtins(), 1, args, exc);
|
+ Call(top.instantiate_fun(), top.builtins(), 1, args, exc);
|
if (*exc) return Handle<JSFunction>::null();
|
return Handle<JSFunction>::cast(result);
|
}
|
@@ -540,8 +560,9 @@
|
return Handle<JSObject>(JSObject::cast(result));
|
} else {
|
Object** args[1] = { Handle<Object>::cast(data).location() };
|
+ Top& top = v8_context()->top_;
|
Handle<Object> result =
|
- Call(Top::instantiate_fun(), Top::builtins(), 1, args, exc);
|
+ Call(top.instantiate_fun(), top.builtins(), 1, args, exc);
|
if (*exc) return Handle<JSObject>::null();
|
return Handle<JSObject>::cast(result);
|
}
|
@@ -552,7 +573,8 @@
|
Handle<Object> instance_template,
|
bool* exc) {
|
Object** args[2] = { instance.location(), instance_template.location() };
|
- Execution::Call(Top::configure_instance_fun(), Top::builtins(), 2, args, exc);
|
+ Top& top = v8_context()->top_;
|
+ Execution::Call(top.configure_instance_fun(), top.builtins(), 2, args, exc);
|
}
|
|
|
@@ -566,8 +588,9 @@
|
pos.location(),
|
is_global.location() };
|
bool caught_exception = false;
|
- Handle<Object> result = TryCall(Top::get_stack_trace_line_fun(),
|
- Top::builtins(), argc, args,
|
+ Top& top = v8_context()->top_;
|
+ Handle<Object> result = TryCall(top.get_stack_trace_line_fun(),
|
+ top.builtins(), argc, args,
|
&caught_exception);
|
if (caught_exception || !result->IsString()) return Factory::empty_symbol();
|
return Handle<String>::cast(result);
|
@@ -576,15 +599,16 @@
|
|
static Object* RuntimePreempt() {
|
// Clear the preempt request flag.
|
- StackGuard::Continue(PREEMPT);
|
+ V8Context * const v8context = v8_context();
|
+ v8context->stack_guard_.Continue(PREEMPT);
|
|
- ContextSwitcher::PreemptionReceived();
|
+ v8context->thread_manager_.PreemptionReceived();
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- if (Debug::InDebugger()) {
|
+ if (v8context->debug_.InDebugger()) {
|
// If currently in the debugger don't do any actual preemption but record
|
// that preemption occoured while in the debugger.
|
- Debug::PreemptionWhileInDebugger();
|
+ v8context->debug_.PreemptionWhileInDebugger();
|
} else {
|
// Perform preemption.
|
v8::Unlocker unlocker;
|
@@ -596,15 +620,17 @@
|
Thread::YieldCPU();
|
#endif
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
Object* Execution::DebugBreakHelper() {
|
+ V8Context * const v8context = v8_context();
|
+ Heap& heap = v8context->heap_;
|
// Just continue if breaks are disabled.
|
- if (Debug::disable_break()) {
|
- return Heap::undefined_value();
|
+ if (v8context->debug_.disable_break()) {
|
+ return heap.undefined_value();
|
}
|
|
{
|
@@ -614,57 +640,59 @@
|
if (fun && fun->IsJSFunction()) {
|
// Don't stop in builtin functions.
|
if (JSFunction::cast(fun)->IsBuiltin()) {
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
GlobalObject* global = JSFunction::cast(fun)->context()->global();
|
// Don't stop in debugger functions.
|
- if (Debug::IsDebugGlobal(global)) {
|
- return Heap::undefined_value();
|
+ if (v8context->debug_.IsDebugGlobal(global)) {
|
+ return heap.undefined_value();
|
}
|
}
|
}
|
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
// Collect the break state before clearing the flags.
|
bool debug_command_only =
|
- StackGuard::IsDebugCommand() && !StackGuard::IsDebugBreak();
|
+ stack_guard.IsDebugCommand() && !stack_guard.IsDebugBreak();
|
|
// Clear the debug request flags.
|
- StackGuard::Continue(DEBUGBREAK);
|
- StackGuard::Continue(DEBUGCOMMAND);
|
+ stack_guard.Continue(DEBUGBREAK);
|
+ stack_guard.Continue(DEBUGCOMMAND);
|
|
HandleScope scope;
|
// Enter the debugger. Just continue if we fail to enter the debugger.
|
EnterDebugger debugger;
|
if (debugger.FailedToEnter()) {
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
|
// Notify the debug event listeners. Indicate auto continue if the break was
|
// a debug command break.
|
- Debugger::OnDebugBreak(Factory::undefined_value(), debug_command_only);
|
+ v8context->debug_.debugger()->OnDebugBreak(Factory::undefined_value(), debug_command_only);
|
|
// Return to continue execution.
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
#endif
|
|
Object* Execution::HandleStackGuardInterrupt() {
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- if (StackGuard::IsDebugBreak() || StackGuard::IsDebugCommand()) {
|
+ if (stack_guard.IsDebugBreak() || stack_guard.IsDebugCommand()) {
|
DebugBreakHelper();
|
}
|
#endif
|
- if (StackGuard::IsPreempted()) RuntimePreempt();
|
- if (StackGuard::IsTerminateExecution()) {
|
- StackGuard::Continue(TERMINATE);
|
- return Top::TerminateExecution();
|
+ if (stack_guard.IsPreempted()) RuntimePreempt();
|
+ if (stack_guard.IsTerminateExecution()) {
|
+ stack_guard.Continue(TERMINATE);
|
+ return v8_context()->top_.TerminateExecution();
|
}
|
- if (StackGuard::IsInterrupted()) {
|
+ if (stack_guard.IsInterrupted()) {
|
// interrupt
|
- StackGuard::Continue(INTERRUPT);
|
- return Top::StackOverflow();
|
+ stack_guard.Continue(INTERRUPT);
|
+ return v8_context()->top_.StackOverflow();
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
// --- G C E x t e n s i o n ---
|
@@ -680,12 +708,7 @@
|
|
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
|
// All allocation spaces other than NEW_SPACE have the same effect.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
return v8::Undefined();
|
}
|
-
|
-
|
-static GCExtension kGCExtension;
|
-v8::DeclareExtension kGCExtensionDeclaration(&kGCExtension);
|
-
|
} } // namespace v8::internal
|
Index: src/assembler.cc
|
===================================================================
|
--- src/assembler.cc (revision 3229)
|
+++ src/assembler.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -207,7 +207,7 @@
|
#ifdef DEBUG
|
byte* begin_pos = pos_;
|
#endif
|
- Counters::reloc_info_count.Increment();
|
+ v8_context()->counters_.reloc_info_count.Increment();
|
ASSERT(rinfo->pc() - last_pc_ >= 0);
|
ASSERT(RelocInfo::NUMBER_OF_MODES < kMaxRelocModes);
|
// Use unsigned delta-encoding for pc.
|
@@ -492,7 +492,7 @@
|
ASSERT(addr != NULL);
|
// Check that we can find the right code object.
|
Code* code = Code::GetCodeFromTargetAddress(addr);
|
- Object* found = Heap::FindCodeObject(addr);
|
+ Object* found = v8_context()->heap_.FindCodeObject(addr);
|
ASSERT(found->IsCode());
|
ASSERT(code->address() == HeapObject::cast(found)->address());
|
break;
|
@@ -519,7 +519,7 @@
|
// Implementation of ExternalReference
|
|
ExternalReference::ExternalReference(Builtins::CFunctionId id)
|
- : address_(Redirect(Builtins::c_function_address(id))) {}
|
+ : address_(Redirect(v8_context()->builtins_.c_function_address(id))) {}
|
|
|
ExternalReference::ExternalReference(ApiFunction* fun)
|
@@ -527,7 +527,7 @@
|
|
|
ExternalReference::ExternalReference(Builtins::Name name)
|
- : address_(Builtins::builtin_address(name)) {}
|
+ : address_(v8_context()->builtins_.builtin_address(name)) {}
|
|
|
ExternalReference::ExternalReference(Runtime::FunctionId id)
|
@@ -551,7 +551,7 @@
|
|
|
ExternalReference::ExternalReference(Top::AddressId id)
|
- : address_(Top::get_address_from_id(id)) {}
|
+ : address_(v8_context()->top_.get_address_from_id(id)) {}
|
|
|
ExternalReference::ExternalReference(const SCTableReference& table_ref)
|
@@ -564,12 +564,12 @@
|
|
|
ExternalReference ExternalReference::builtin_passed_function() {
|
- return ExternalReference(&Builtins::builtin_passed_function);
|
+ return ExternalReference(&v8_context()->builtins_.builtin_passed_function);
|
}
|
|
|
ExternalReference ExternalReference::random_positive_smi_function() {
|
- return ExternalReference(Redirect(FUNCTION_ADDR(V8::RandomPositiveSmi)));
|
+ return ExternalReference(Redirect(FUNCTION_ADDR(v8_context()->v8_.RandomPositiveSmi)));
|
}
|
|
|
@@ -579,62 +579,62 @@
|
|
|
ExternalReference ExternalReference::roots_address() {
|
- return ExternalReference(Heap::roots_address());
|
+ return ExternalReference(v8_context()->heap_.roots_address());
|
}
|
|
|
ExternalReference ExternalReference::address_of_stack_limit() {
|
- return ExternalReference(StackGuard::address_of_jslimit());
|
+ return ExternalReference(v8_context()->stack_guard_.address_of_jslimit());
|
}
|
|
|
ExternalReference ExternalReference::address_of_real_stack_limit() {
|
- return ExternalReference(StackGuard::address_of_real_jslimit());
|
+ return ExternalReference(v8_context()->stack_guard_.address_of_real_jslimit());
|
}
|
|
|
ExternalReference ExternalReference::address_of_regexp_stack_limit() {
|
- return ExternalReference(RegExpStack::limit_address());
|
+ return ExternalReference(v8_context()->reg_exp_stack_.limit_address());
|
}
|
|
|
ExternalReference ExternalReference::new_space_start() {
|
- return ExternalReference(Heap::NewSpaceStart());
|
+ return ExternalReference(v8_context()->heap_.NewSpaceStart());
|
}
|
|
|
ExternalReference ExternalReference::new_space_allocation_top_address() {
|
- return ExternalReference(Heap::NewSpaceAllocationTopAddress());
|
+ return ExternalReference(v8_context()->heap_.NewSpaceAllocationTopAddress());
|
}
|
|
|
ExternalReference ExternalReference::heap_always_allocate_scope_depth() {
|
- return ExternalReference(Heap::always_allocate_scope_depth_address());
|
+ return ExternalReference(v8_context()->heap_.always_allocate_scope_depth_address());
|
}
|
|
|
ExternalReference ExternalReference::new_space_allocation_limit_address() {
|
- return ExternalReference(Heap::NewSpaceAllocationLimitAddress());
|
+ return ExternalReference(v8_context()->heap_.NewSpaceAllocationLimitAddress());
|
}
|
|
|
ExternalReference ExternalReference::handle_scope_extensions_address() {
|
- return ExternalReference(HandleScope::current_extensions_address());
|
+ return ExternalReference(v8_context()->handle_scope_implementer_.current_extensions_address());
|
}
|
|
|
ExternalReference ExternalReference::handle_scope_next_address() {
|
- return ExternalReference(HandleScope::current_next_address());
|
+ return ExternalReference(v8_context()->handle_scope_implementer_.current_next_address());
|
}
|
|
|
ExternalReference ExternalReference::handle_scope_limit_address() {
|
- return ExternalReference(HandleScope::current_limit_address());
|
+ return ExternalReference(v8_context()->handle_scope_implementer_.current_limit_address());
|
}
|
|
|
ExternalReference ExternalReference::scheduled_exception_address() {
|
- return ExternalReference(Top::scheduled_exception_address());
|
+ return ExternalReference(v8_context()->top_.scheduled_exception_address());
|
}
|
|
|
@@ -731,10 +731,6 @@
|
false));
|
}
|
|
-
|
-ExternalReferenceRedirector* ExternalReference::redirector_ = NULL;
|
-
|
-
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
ExternalReference ExternalReference::debug_break() {
|
return ExternalReference(Redirect(FUNCTION_ADDR(Debug::Break)));
|
Index: src/runtime.cc
|
===================================================================
|
--- src/runtime.cc (revision 3209)
|
+++ src/runtime.cc Sat Nov 14 01:42:57 MSK 2009
|
@@ -51,7 +51,7 @@
|
|
|
#define RUNTIME_ASSERT(value) \
|
- if (!(value)) return Top::ThrowIllegalOperation();
|
+ if (!(value)) return v8_context()->top_.ThrowIllegalOperation();
|
|
// Cast the given object to a value of the specified type and store
|
// it in a variable with the given name. If the object is not of the
|
@@ -92,15 +92,12 @@
|
RUNTIME_ASSERT(obj->IsNumber()); \
|
type name = NumberTo##Type(obj);
|
|
-// Non-reentrant string buffer for efficient general use in this file.
|
-static StaticResource<StringInputBuffer> runtime_string_input_buffer;
|
-
|
-
|
static Object* DeepCopyBoilerplate(JSObject* boilerplate) {
|
StackLimitCheck check;
|
- if (check.HasOverflowed()) return Top::StackOverflow();
|
+ V8Context * const v8context = v8_context();
|
+ if (check.HasOverflowed()) return v8context->top_.StackOverflow();
|
|
- Object* result = Heap::CopyJSObject(boilerplate);
|
+ Object* result = v8context->heap_.CopyJSObject(boilerplate);
|
if (result->IsFailure()) return result;
|
JSObject* copy = JSObject::cast(result);
|
|
@@ -129,7 +126,7 @@
|
}
|
}
|
} else {
|
- result = Heap::AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
|
+ result = v8context->heap_.AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
|
if (result->IsFailure()) return result;
|
FixedArray* names = FixedArray::cast(result);
|
copy->GetLocalPropertyNames(names, 0);
|
@@ -205,7 +202,7 @@
|
|
static Object* Runtime_CloneShallowLiteralBoilerplate(Arguments args) {
|
CONVERT_CHECKED(JSObject, boilerplate, args[0]);
|
- return Heap::CopyJSObject(boilerplate);
|
+ return v8_context()->heap_.CopyJSObject(boilerplate);
|
}
|
|
|
@@ -402,10 +399,11 @@
|
ASSERT(args.length() == 2);
|
CONVERT_CHECKED(String, key, args[0]);
|
Object* value = args[1];
|
+ V8Context * const v8context = v8_context();
|
// Create a catch context extension object.
|
JSFunction* constructor =
|
- Top::context()->global_context()->context_extension_function();
|
- Object* object = Heap::AllocateJSObject(constructor);
|
+ v8context->top_.context()->global_context()->context_extension_function();
|
+ Object* object = v8context->heap_.AllocateJSObject(constructor);
|
if (object->IsFailure()) return object;
|
// Assign the exception value to the catch variable and make sure
|
// that the catch variable is DontDelete.
|
@@ -419,7 +417,7 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 1);
|
Object* obj = args[0];
|
- if (!obj->IsJSObject()) return Heap::null_value();
|
+ if (!obj->IsJSObject()) return v8_context()->heap_.null_value();
|
return JSObject::cast(obj)->class_name();
|
}
|
|
@@ -430,10 +428,11 @@
|
// See ECMA-262, section 15.3.5.3, page 88 (steps 5 - 8).
|
Object* O = args[0];
|
Object* V = args[1];
|
+ V8Context * const v8context = v8_context();
|
while (true) {
|
Object* prototype = V->GetPrototype();
|
- if (prototype->IsNull()) return Heap::false_value();
|
- if (O == prototype) return Heap::true_value();
|
+ if (prototype->IsNull()) return v8context->heap_.false_value();
|
+ if (O == prototype) return v8context->heap_.true_value();
|
V = prototype;
|
}
|
}
|
@@ -471,7 +470,7 @@
|
new_map->set_prototype(proto);
|
jsobject->set_map(new_map);
|
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -479,7 +478,7 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 0);
|
JavaScriptFrameIterator it;
|
- return Heap::ToBoolean(it.frame()->IsConstructor());
|
+ return v8_context()->heap_.ToBoolean(it.frame()->IsConstructor());
|
}
|
|
|
@@ -507,7 +506,7 @@
|
ASSERT(args.length() == 1);
|
Object* arg = args[0];
|
bool result = arg->IsObjectTemplateInfo() || arg->IsFunctionTemplateInfo();
|
- return Heap::ToBoolean(result);
|
+ return v8_context()->heap_.ToBoolean(result);
|
}
|
|
|
@@ -543,7 +542,8 @@
|
Map::cast(new_map)->set_is_access_check_needed(false);
|
object->set_map(Map::cast(new_map));
|
}
|
- return needs_access_checks ? Heap::true_value() : Heap::false_value();
|
+ V8Context * const v8context = v8_context();
|
+ return needs_access_checks ? v8context->heap_.true_value() : v8context->heap_.false_value();
|
}
|
|
|
@@ -559,7 +559,7 @@
|
Map::cast(new_map)->set_is_access_check_needed(true);
|
object->set_map(Map::cast(new_map));
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -569,13 +569,14 @@
|
Handle<Object> args[2] = { type_handle, name };
|
Handle<Object> error =
|
Factory::NewTypeError("redeclaration", HandleVector(args, 2));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
|
static Object* Runtime_DeclareGlobals(Arguments args) {
|
HandleScope scope;
|
- Handle<GlobalObject> global = Handle<GlobalObject>(Top::context()->global());
|
+ V8Context * const v8context = v8_context();
|
+ Handle<GlobalObject> global = Handle<GlobalObject>(v8context->top_.context()->global());
|
|
Handle<Context> context = args.at<Context>(0);
|
CONVERT_ARG_CHECKED(FixedArray, pairs, 1);
|
@@ -680,7 +681,7 @@
|
}
|
}
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -699,6 +700,7 @@
|
context = Handle<Context>(context->fcontext());
|
|
int index;
|
+ V8Context * const v8context = v8_context();
|
PropertyAttributes attributes;
|
ContextLookupFlags flags = DONT_FOLLOW_CHAINS;
|
Handle<Object> holder =
|
@@ -744,7 +746,7 @@
|
} else {
|
// The function context's extension context does not exists - allocate
|
// it.
|
- context_ext = Factory::NewJSObject(Top::context_extension_function());
|
+ context_ext = Factory::NewJSObject(v8context->top_.context_extension_function());
|
// And store it in the extension slot.
|
context->set_extension(*context_ext);
|
}
|
@@ -754,13 +756,13 @@
|
// or undefined, and use the correct mode (e.g. READ_ONLY attribute for
|
// constant declarations).
|
ASSERT(!context_ext->HasLocalProperty(*name));
|
- Handle<Object> value(Heap::undefined_value());
|
+ Handle<Object> value(v8context->heap_.undefined_value());
|
if (*initial_value != NULL) value = initial_value;
|
SetProperty(context_ext, name, value, mode);
|
ASSERT(context_ext->GetLocalPropertyAttribute(*name) == mode);
|
}
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -773,7 +775,8 @@
|
bool assign = args.length() == 2;
|
|
CONVERT_ARG_CHECKED(String, name, 0);
|
- GlobalObject* global = Top::context()->global();
|
+ V8Context * const v8context = v8_context();
|
+ GlobalObject* global = v8context->top_.context()->global();
|
|
// According to ECMA-262, section 12.2, page 62, the property must
|
// not be deletable.
|
@@ -796,7 +799,7 @@
|
args[1],
|
attributes);
|
}
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
// Determine if this is a redeclaration of something read-only.
|
@@ -821,17 +824,17 @@
|
return ThrowRedeclarationError("const", name);
|
}
|
// Restore global object from context (in case of GC).
|
- global = Top::context()->global();
|
+ global = v8context->top_.context()->global();
|
}
|
|
if (found && !assign) {
|
// The global property is there and we're not assigning any value
|
// to it. Just return.
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
// Assign the value (or undefined) to the property.
|
- Object* value = (assign) ? args[1] : Heap::undefined_value();
|
+ Object* value = (assign) ? args[1] : v8context->heap_.undefined_value();
|
return global->SetProperty(&lookup, *name, value, attributes);
|
}
|
|
@@ -842,10 +845,11 @@
|
// is the second.
|
RUNTIME_ASSERT(args.length() == 2);
|
CONVERT_ARG_CHECKED(String, name, 0);
|
+ V8Context * const v8context = v8_context();
|
Handle<Object> value = args.at<Object>(1);
|
|
// Get the current global object from top.
|
- GlobalObject* global = Top::context()->global();
|
+ GlobalObject* global = v8context->top_.context()->global();
|
|
// According to ECMA-262, section 12.2, page 62, the property must
|
// not be deletable. Since it's a const, it must be READ_ONLY too.
|
@@ -884,7 +888,7 @@
|
// Restore global object from context (in case of GC) and continue
|
// with setting the value because the property is either absent or
|
// read-only. We also have to do redo the lookup.
|
- global = Top::context()->global();
|
+ global = v8context->top_.context()->global();
|
|
// BUG 1213579: Handle the case where we have to set a read-only
|
// property through an interceptor and only do it if it's
|
@@ -969,7 +973,7 @@
|
// The property could not be found, we introduce it in the global
|
// context.
|
if (attributes == ABSENT) {
|
- Handle<JSObject> global = Handle<JSObject>(Top::context()->global());
|
+ Handle<JSObject> global = Handle<JSObject>(v8_context()->top_.context()->global());
|
SetProperty(global, name, value, NONE);
|
return *value;
|
}
|
@@ -1012,7 +1016,7 @@
|
// are converted to empty handles in handle operations. We
|
// need to convert back to exceptions here.
|
if (set.is_null()) {
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(v8_context()->top_.has_pending_exception());
|
return Failure::Exception();
|
}
|
}
|
@@ -1089,7 +1093,7 @@
|
RegExpImpl::CreateRegExpLiteral(constructor, pattern, flags,
|
&has_pending_exception);
|
if (has_pending_exception) {
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(v8_context()->top_.has_pending_exception());
|
return Failure::Exception();
|
}
|
literals->set(index, *regexp);
|
@@ -1113,7 +1117,7 @@
|
CONVERT_CHECKED(JSFunction, f, args[0]);
|
CONVERT_CHECKED(String, name, args[1]);
|
f->shared()->set_name(name);
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -1123,7 +1127,7 @@
|
|
CONVERT_CHECKED(JSFunction, fun, args[0]);
|
Handle<Object> script = Handle<Object>(fun->shared()->script());
|
- if (!script->IsScript()) return Heap::undefined_value();
|
+ if (!script->IsScript()) return v8_context()->heap_.undefined_value();
|
|
return *GetScriptWrapper(Handle<Script>::cast(script));
|
}
|
@@ -1170,7 +1174,7 @@
|
CONVERT_CHECKED(JSFunction, fun, args[0]);
|
CONVERT_CHECKED(String, name, args[1]);
|
fun->SetInstanceClassName(name);
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -1201,10 +1205,11 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_CHECKED(JSFunction, f, args[0]);
|
+ V8Context * const v8context = v8_context();
|
// The function_data field of the shared function info is used exclusively by
|
// the API.
|
- return !f->shared()->function_data()->IsUndefined() ? Heap::true_value()
|
- : Heap::false_value();
|
+ return !f->shared()->function_data()->IsUndefined() ? v8context->heap_.true_value()
|
+ : v8context->heap_.false_value();
|
}
|
|
static Object* Runtime_FunctionIsBuiltin(Arguments args) {
|
@@ -1212,7 +1217,8 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_CHECKED(JSFunction, f, args[0]);
|
- return f->IsBuiltin() ? Heap::true_value() : Heap::false_value();
|
+ V8Context * const v8context = v8_context();
|
+ return f->IsBuiltin() ? v8context->heap_.true_value() : v8context->heap_.false_value();
|
}
|
|
|
@@ -1242,7 +1248,7 @@
|
// SetCode is only used for built-in constructors like String,
|
// Array, and Object, and some web code
|
// doesn't like seeing source code for constructors.
|
- target->shared()->set_script(Heap::undefined_value());
|
+ target->shared()->set_script(v8_context()->heap_.undefined_value());
|
// Clear the optimization hints related to the compiled code as these are no
|
// longer valid when the code is overwritten.
|
target->shared()->ClearThisPropertyAssignmentsInfo();
|
@@ -1270,13 +1276,14 @@
|
|
static Object* CharCodeAt(String* subject, Object* index) {
|
uint32_t i = 0;
|
- if (!Array::IndexFromObject(index, &i)) return Heap::nan_value();
|
+ V8Context * const v8context = v8_context();
|
+ if (!Array::IndexFromObject(index, &i)) return v8context->heap_.nan_value();
|
// Flatten the string. If someone wants to get a char at an index
|
// in a cons string, it is likely that more indices will be
|
// accessed.
|
subject->TryFlattenIfNotFlat();
|
if (i >= static_cast<uint32_t>(subject->length())) {
|
- return Heap::nan_value();
|
+ return v8context->heap_.nan_value();
|
}
|
return Smi::FromInt(subject->Get(i));
|
}
|
@@ -1296,12 +1303,14 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 1);
|
uint32_t code;
|
+ V8Context * const v8context = v8_context();
|
+
|
if (Array::IndexFromObject(args[0], &code)) {
|
if (code <= 0xffff) {
|
- return Heap::LookupSingleCharacterStringFromCode(code);
|
+ return v8context->heap_.LookupSingleCharacterStringFromCode(code);
|
}
|
}
|
- return Heap::empty_string();
|
+ return v8context->heap_.empty_string();
|
}
|
|
// Forward declarations.
|
@@ -1408,7 +1417,7 @@
|
|
void IncrementCharacterCount(int by) {
|
if (character_count_ > Smi::kMaxValue - by) {
|
- V8::FatalProcessOutOfMemory("String.replace result too large.");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("String.replace result too large.");
|
}
|
character_count_ += by;
|
}
|
@@ -1416,12 +1425,12 @@
|
private:
|
|
Handle<String> NewRawAsciiString(int size) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateRawAsciiString(size), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateRawAsciiString(size), String);
|
}
|
|
|
Handle<String> NewRawTwoByteString(int size) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateRawTwoByteString(size), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateRawTwoByteString(size), String);
|
}
|
|
|
@@ -1886,10 +1895,29 @@
|
DISALLOW_COPY_AND_ASSIGN(BMGoodSuffixBuffers);
|
};
|
|
+class RuntimeData {
|
+public:
|
+ // Non-reentrant string buffer for efficient general use in this file.
|
+ StaticResource<StringInputBuffer> runtime_string_input_buffer;
|
-// buffers reused by BoyerMoore
|
+ // buffers reused by BoyerMoore
|
-static int bad_char_occurrence[kBMAlphabetSize];
|
-static BMGoodSuffixBuffers bmgs_buffers;
|
+ int bad_char_occurrence[kBMAlphabetSize];
|
+ BMGoodSuffixBuffers bmgs_buffers;
|
+ StringInputBuffer buf1;
|
+ StringInputBuffer buf2;
|
|
+ unibrow::Mapping<unibrow::ToUppercase, 128> to_upper_mapping;
|
+ unibrow::Mapping<unibrow::ToLowercase, 128> to_lower_mapping;
|
+
|
+ // Arrays for the individual characters of the two Smis. Smis are
|
+ // 31 bit integers and 10 decimal digits are therefore enough.
|
+ int x_elms[10];
|
+ int y_elms[10];
|
+
|
+ StringInputBuffer bufx;
|
+ StringInputBuffer bufy;
|
+
|
+};
|
+
|
// Compute the bad-char table for Boyer-Moore in the static buffer.
|
template <typename pchar>
|
static void BoyerMoorePopulateBadCharTable(Vector<const pchar> pattern,
|
@@ -1899,6 +1927,7 @@
|
// Notice: Doesn't include the last character.
|
int table_size = (sizeof(pchar) == 1) ? String::kMaxAsciiCharCode + 1
|
: kBMAlphabetSize;
|
+ int* const bad_char_occurrence = v8_context()->runtime_data_->bad_char_occurrence;
|
if (start == 0) { // All patterns less than kBMMaxShift in length.
|
memset(bad_char_occurrence, -1, table_size * sizeof(*bad_char_occurrence));
|
} else {
|
@@ -1919,6 +1948,7 @@
|
int m = pattern.length();
|
int len = m - start;
|
// Compute Good Suffix tables.
|
+ BMGoodSuffixBuffers& bmgs_buffers = v8_context()->runtime_data_->bmgs_buffers;
|
bmgs_buffers.init(m);
|
|
bmgs_buffers.shift(m-1) = 1;
|
@@ -1965,6 +1995,7 @@
|
|
template <typename schar, typename pchar>
|
static inline int CharOccurrence(int char_code) {
|
+ int* const bad_char_occurrence = v8_context()->runtime_data_->bad_char_occurrence;
|
if (sizeof(schar) == 1) {
|
return bad_char_occurrence[char_code];
|
}
|
@@ -2044,6 +2075,7 @@
|
|
// Build the Good Suffix table and continue searching.
|
BoyerMoorePopulateGoodSuffixTable(pattern, start);
|
+ BMGoodSuffixBuffers& bmgs_buffers = v8_context()->runtime_data_->bmgs_buffers;
|
pchar last_char = pattern[m - 1];
|
// Continue search from i.
|
while (idx <= n - m) {
|
@@ -2335,8 +2367,9 @@
|
str1->TryFlattenIfNotFlat();
|
str2->TryFlattenIfNotFlat();
|
|
- static StringInputBuffer buf1;
|
- static StringInputBuffer buf2;
|
+ V8Context * const v8context = v8_context();
|
+ StringInputBuffer& buf1 = v8context->runtime_data_->buf1;
|
+ StringInputBuffer& buf2 = v8context->runtime_data_->buf2;
|
|
buf1.Reset(str1);
|
buf2.Reset(str2);
|
@@ -2383,7 +2416,7 @@
|
return Failure::Exception();
|
}
|
if (match->IsNull()) {
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
int length = subject->length();
|
|
@@ -2423,6 +2456,7 @@
|
static Object* Runtime_NumberToRadixString(Arguments args) {
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 2);
|
+ V8Context * const v8context = v8_context();
|
|
// Fast case where the result is a one character string.
|
if (args[0]->IsSmi() && args[1]->IsSmi()) {
|
@@ -2432,26 +2466,26 @@
|
RUNTIME_ASSERT(radix <= 36);
|
// Character array used for conversion.
|
static const char kCharTable[] = "0123456789abcdefghijklmnopqrstuvwxyz";
|
- return Heap::LookupSingleCharacterStringFromCode(kCharTable[value]);
|
+ return v8context->heap_.LookupSingleCharacterStringFromCode(kCharTable[value]);
|
}
|
}
|
|
// Slow case.
|
CONVERT_DOUBLE_CHECKED(value, args[0]);
|
if (isnan(value)) {
|
- return Heap::AllocateStringFromAscii(CStrVector("NaN"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("NaN"));
|
}
|
if (isinf(value)) {
|
if (value < 0) {
|
- return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("-Infinity"));
|
}
|
- return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("Infinity"));
|
}
|
CONVERT_DOUBLE_CHECKED(radix_number, args[1]);
|
int radix = FastD2I(radix_number);
|
RUNTIME_ASSERT(2 <= radix && radix <= 36);
|
char* str = DoubleToRadixCString(value, radix);
|
- Object* result = Heap::AllocateStringFromAscii(CStrVector(str));
|
+ Object* result = v8context->heap_.AllocateStringFromAscii(CStrVector(str));
|
DeleteArray(str);
|
return result;
|
}
|
@@ -2462,20 +2496,21 @@
|
ASSERT(args.length() == 2);
|
|
CONVERT_DOUBLE_CHECKED(value, args[0]);
|
+ V8Context * const v8context = v8_context();
|
if (isnan(value)) {
|
- return Heap::AllocateStringFromAscii(CStrVector("NaN"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("NaN"));
|
}
|
if (isinf(value)) {
|
if (value < 0) {
|
- return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("-Infinity"));
|
}
|
- return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("Infinity"));
|
}
|
CONVERT_DOUBLE_CHECKED(f_number, args[1]);
|
int f = FastD2I(f_number);
|
RUNTIME_ASSERT(f >= 0);
|
char* str = DoubleToFixedCString(value, f);
|
- Object* res = Heap::AllocateStringFromAscii(CStrVector(str));
|
+ Object* res = v8context->heap_.AllocateStringFromAscii(CStrVector(str));
|
DeleteArray(str);
|
return res;
|
}
|
@@ -2486,20 +2521,21 @@
|
ASSERT(args.length() == 2);
|
|
CONVERT_DOUBLE_CHECKED(value, args[0]);
|
+ V8Context * const v8context = v8_context();
|
if (isnan(value)) {
|
- return Heap::AllocateStringFromAscii(CStrVector("NaN"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("NaN"));
|
}
|
if (isinf(value)) {
|
if (value < 0) {
|
- return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("-Infinity"));
|
}
|
- return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("Infinity"));
|
}
|
CONVERT_DOUBLE_CHECKED(f_number, args[1]);
|
int f = FastD2I(f_number);
|
RUNTIME_ASSERT(f >= -1 && f <= 20);
|
char* str = DoubleToExponentialCString(value, f);
|
- Object* res = Heap::AllocateStringFromAscii(CStrVector(str));
|
+ Object* res = v8context->heap_.AllocateStringFromAscii(CStrVector(str));
|
DeleteArray(str);
|
return res;
|
}
|
@@ -2510,20 +2546,21 @@
|
ASSERT(args.length() == 2);
|
|
CONVERT_DOUBLE_CHECKED(value, args[0]);
|
+ V8Context * const v8context = v8_context();
|
if (isnan(value)) {
|
- return Heap::AllocateStringFromAscii(CStrVector("NaN"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("NaN"));
|
}
|
if (isinf(value)) {
|
if (value < 0) {
|
- return Heap::AllocateStringFromAscii(CStrVector("-Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("-Infinity"));
|
}
|
- return Heap::AllocateStringFromAscii(CStrVector("Infinity"));
|
+ return v8context->heap_.AllocateStringFromAscii(CStrVector("Infinity"));
|
}
|
CONVERT_DOUBLE_CHECKED(f_number, args[1]);
|
int f = FastD2I(f_number);
|
RUNTIME_ASSERT(f >= 1 && f <= 21);
|
char* str = DoubleToPrecisionCString(value, f);
|
- Object* res = Heap::AllocateStringFromAscii(CStrVector(str));
|
+ Object* res = v8context->heap_.AllocateStringFromAscii(CStrVector(str));
|
DeleteArray(str);
|
return res;
|
}
|
@@ -2573,7 +2610,7 @@
|
Handle<Object> error =
|
Factory::NewTypeError("non_object_property_load",
|
HandleVector(args, 2));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
// Check if the given key is an array index.
|
@@ -2621,6 +2658,7 @@
|
static Object* Runtime_KeyedGetProperty(Arguments args) {
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 2);
|
+ V8Context * const v8context = v8_context();
|
|
// Fast cases for getting named properties of the receiver JSObject
|
// itself.
|
@@ -2642,17 +2680,17 @@
|
if (receiver->HasFastProperties()) {
|
// Attempt to use lookup cache.
|
Map* receiver_map = receiver->map();
|
- int offset = KeyedLookupCache::Lookup(receiver_map, key);
|
+ int offset = v8context->keyed_lookup_cache_.Lookup(receiver_map, key);
|
if (offset != -1) {
|
Object* value = receiver->FastPropertyAt(offset);
|
- return value->IsTheHole() ? Heap::undefined_value() : value;
|
+ return value->IsTheHole() ? v8context->heap_.undefined_value() : value;
|
}
|
// Lookup cache miss. Perform lookup and update the cache if appropriate.
|
LookupResult result;
|
receiver->LocalLookup(key, &result);
|
if (result.IsProperty() && result.IsLoaded() && result.type() == FIELD) {
|
int offset = result.GetFieldIndex();
|
- KeyedLookupCache::Update(receiver_map, key, offset);
|
+ v8context->keyed_lookup_cache_.Update(receiver_map, key, offset);
|
return receiver->FastPropertyAt(offset);
|
}
|
} else {
|
@@ -2687,7 +2725,7 @@
|
Handle<Object> error =
|
Factory::NewTypeError("non_object_property_store",
|
HandleVector(args, 2));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
// If the object isn't a JavaScript object, we ignore the store.
|
@@ -2812,7 +2850,7 @@
|
// underlying string does nothing with the deletion, we can ignore
|
// such deletions.
|
if (js_object->IsStringObjectWithCharacterAt(index)) {
|
- return Heap::true_value();
|
+ return v8_context()->heap_.true_value();
|
}
|
|
return js_object->DeleteElement(index, JSObject::FORCE_DELETION);
|
@@ -2891,7 +2929,8 @@
|
|
static Object* HasLocalPropertyImplementation(Handle<JSObject> object,
|
Handle<String> key) {
|
- if (object->HasLocalProperty(*key)) return Heap::true_value();
|
+ V8Context * const v8context = v8_context();
|
+ if (object->HasLocalProperty(*key)) return v8context->heap_.true_value();
|
// Handle hidden prototypes. If there's a hidden prototype above this thing
|
// then we have to check it for properties, because they are supposed to
|
// look like they are on this object.
|
@@ -2900,7 +2939,7 @@
|
Handle<JSObject>::cast(proto)->map()->is_hidden_prototype()) {
|
return HasLocalPropertyImplementation(Handle<JSObject>::cast(proto), key);
|
}
|
- return Heap::false_value();
|
+ return v8context->heap_.false_value();
|
}
|
|
|
@@ -2908,13 +2947,14 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 2);
|
CONVERT_CHECKED(String, key, args[1]);
|
+ V8Context * const v8context = v8_context();
|
|
Object* obj = args[0];
|
// Only JS objects can have properties.
|
if (obj->IsJSObject()) {
|
JSObject* object = JSObject::cast(obj);
|
// Fast case - no interceptors.
|
- if (object->HasRealNamedProperty(key)) return Heap::true_value();
|
+ if (object->HasRealNamedProperty(key)) return v8context->heap_.true_value();
|
// Slow case. Either it's not there or we have an interceptor. We should
|
// have handles for this kind of deal.
|
HandleScope scope;
|
@@ -2926,39 +2966,41 @@
|
if (key->AsArrayIndex(&index)) {
|
String* string = String::cast(obj);
|
if (index < static_cast<uint32_t>(string->length()))
|
- return Heap::true_value();
|
+ return v8context->heap_.true_value();
|
}
|
}
|
- return Heap::false_value();
|
+ return v8context->heap_.false_value();
|
}
|
|
|
static Object* Runtime_HasProperty(Arguments args) {
|
NoHandleAllocation na;
|
ASSERT(args.length() == 2);
|
+ V8Context * const v8context = v8_context();
|
|
// Only JS objects can have properties.
|
if (args[0]->IsJSObject()) {
|
JSObject* object = JSObject::cast(args[0]);
|
CONVERT_CHECKED(String, key, args[1]);
|
- if (object->HasProperty(key)) return Heap::true_value();
|
+ if (object->HasProperty(key)) return v8context->heap_.true_value();
|
}
|
- return Heap::false_value();
|
+ return v8context->heap_.false_value();
|
}
|
|
|
static Object* Runtime_HasElement(Arguments args) {
|
NoHandleAllocation na;
|
ASSERT(args.length() == 2);
|
+ V8Context * const v8context = v8_context();
|
|
// Only JS objects can have elements.
|
if (args[0]->IsJSObject()) {
|
JSObject* object = JSObject::cast(args[0]);
|
CONVERT_CHECKED(Smi, index_obj, args[1]);
|
uint32_t index = index_obj->value();
|
- if (object->HasElement(index)) return Heap::true_value();
|
+ if (object->HasElement(index)) return v8context->heap_.true_value();
|
}
|
- return Heap::false_value();
|
+ return v8context->heap_.false_value();
|
}
|
|
|
@@ -2968,14 +3010,15 @@
|
|
CONVERT_CHECKED(JSObject, object, args[0]);
|
CONVERT_CHECKED(String, key, args[1]);
|
+ V8Context * const v8context = v8_context();
|
|
uint32_t index;
|
if (key->AsArrayIndex(&index)) {
|
- return Heap::ToBoolean(object->HasElement(index));
|
+ return v8context->heap_.ToBoolean(object->HasElement(index));
|
}
|
|
PropertyAttributes att = object->GetLocalPropertyAttribute(key);
|
- return Heap::ToBoolean(att != ABSENT && (att & DONT_ENUM) == 0);
|
+ return v8context->heap_.ToBoolean(att != ABSENT && (att & DONT_ENUM) == 0);
|
}
|
|
|
@@ -3065,22 +3108,23 @@
|
Execution::ToString(args.at<Object>(0), &exception);
|
if (exception) return Failure::Exception();
|
Handle<String> key = Handle<String>::cast(converted);
|
+ V8Context * const v8context = v8_context();
|
|
// Try to convert the string key into an array index.
|
if (key->AsArrayIndex(&index)) {
|
if (index < n) {
|
return frame->GetParameter(index);
|
} else {
|
- return Top::initial_object_prototype()->GetElement(index);
|
+ return v8context->top_.initial_object_prototype()->GetElement(index);
|
}
|
}
|
|
// Handle special arguments properties.
|
- if (key->Equals(Heap::length_symbol())) return Smi::FromInt(n);
|
- if (key->Equals(Heap::callee_symbol())) return frame->function();
|
+ if (key->Equals(v8context->heap_.length_symbol())) return Smi::FromInt(n);
|
+ if (key->Equals(v8context->heap_.callee_symbol())) return frame->function();
|
|
// Lookup in the initial Object.prototype object.
|
- return Top::initial_object_prototype()->GetProperty(*key);
|
+ return v8context->top_.initial_object_prototype()->GetProperty(*key);
|
}
|
|
|
@@ -3120,33 +3164,34 @@
|
NoHandleAllocation ha;
|
|
Object* obj = args[0];
|
- if (obj->IsNumber()) return Heap::number_symbol();
|
+ V8Context * const v8context = v8_context();
|
+ if (obj->IsNumber()) return v8context->heap_.number_symbol();
|
HeapObject* heap_obj = HeapObject::cast(obj);
|
|
// typeof an undetectable object is 'undefined'
|
- if (heap_obj->map()->is_undetectable()) return Heap::undefined_symbol();
|
+ if (heap_obj->map()->is_undetectable()) return v8context->heap_.undefined_symbol();
|
|
InstanceType instance_type = heap_obj->map()->instance_type();
|
if (instance_type < FIRST_NONSTRING_TYPE) {
|
- return Heap::string_symbol();
|
+ return v8context->heap_.string_symbol();
|
}
|
|
switch (instance_type) {
|
case ODDBALL_TYPE:
|
if (heap_obj->IsTrue() || heap_obj->IsFalse()) {
|
- return Heap::boolean_symbol();
|
+ return v8context->heap_.boolean_symbol();
|
}
|
if (heap_obj->IsNull()) {
|
- return Heap::object_symbol();
|
+ return v8context->heap_.object_symbol();
|
}
|
ASSERT(heap_obj->IsUndefined());
|
- return Heap::undefined_symbol();
|
+ return v8context->heap_.undefined_symbol();
|
case JS_FUNCTION_TYPE: case JS_REGEXP_TYPE:
|
- return Heap::function_symbol();
|
+ return v8context->heap_.function_symbol();
|
default:
|
// For any kind of object not handled above, the spec rule for
|
// host objects gives that it is okay to return "object"
|
- return Heap::object_symbol();
|
+ return v8context->heap_.object_symbol();
|
}
|
}
|
|
@@ -3156,7 +3201,7 @@
|
ASSERT(args.length() == 1);
|
CONVERT_CHECKED(String, subject, args[0]);
|
subject->TryFlattenIfNotFlat();
|
- return Heap::NumberFromDouble(StringToDouble(subject, ALLOW_HEX));
|
+ return v8_context()->heap_.NumberFromDouble(StringToDouble(subject, ALLOW_HEX));
|
}
|
|
|
@@ -3178,9 +3223,9 @@
|
|
Object* object = NULL;
|
if (i == length) { // The string is ASCII.
|
- object = Heap::AllocateRawAsciiString(length);
|
+ object = v8_context()->heap_.AllocateRawAsciiString(length);
|
} else { // The string is not ASCII.
|
- object = Heap::AllocateRawTwoByteString(length);
|
+ object = v8_context()->heap_.AllocateRawTwoByteString(length);
|
}
|
|
if (object->IsFailure()) return object;
|
@@ -3239,10 +3284,11 @@
|
|
source->TryFlattenIfNotFlat();
|
|
+ V8Context * const v8context = v8_context();
|
int escaped_length = 0;
|
int length = source->length();
|
{
|
- Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8context->runtime_data_->runtime_string_input_buffer);
|
buffer->Reset(source);
|
while (buffer->has_more()) {
|
uint16_t character = buffer->GetNext();
|
@@ -3255,7 +3301,7 @@
|
}
|
// We don't allow strings that are longer than a maximal length.
|
if (escaped_length > String::kMaxLength) {
|
- Top::context()->mark_out_of_memory();
|
+ v8context->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
}
|
@@ -3264,12 +3310,12 @@
|
if (escaped_length == length) {
|
return source;
|
}
|
- Object* o = Heap::AllocateRawAsciiString(escaped_length);
|
+ Object* o = v8context->heap_.AllocateRawAsciiString(escaped_length);
|
if (o->IsFailure()) return o;
|
String* destination = String::cast(o);
|
int dest_position = 0;
|
|
- Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8context->runtime_data_->runtime_string_input_buffer);
|
buffer->Rewind();
|
while (buffer->has_more()) {
|
uint16_t chr = buffer->GetNext();
|
@@ -3368,8 +3414,8 @@
|
return source;
|
|
Object* o = ascii ?
|
- Heap::AllocateRawAsciiString(unescaped_length) :
|
- Heap::AllocateRawTwoByteString(unescaped_length);
|
+ v8_context()->heap_.AllocateRawAsciiString(unescaped_length) :
|
+ v8_context()->heap_.AllocateRawTwoByteString(unescaped_length);
|
if (o->IsFailure()) return o;
|
String* destination = String::cast(o);
|
|
@@ -3395,9 +3441,11 @@
|
int len = s->length();
|
int i;
|
|
+ V8Context * const v8context = v8_context();
|
// Skip leading white space.
|
- for (i = 0; i < len && Scanner::kIsWhiteSpace.get(s->Get(i)); i++) ;
|
- if (i == len) return Heap::nan_value();
|
+ unibrow::Predicate<unibrow::WhiteSpace, 128>& wspredicate = v8context->scanner_.kIsWhiteSpace;
|
+ for (i = 0; i < len && wspredicate.get(s->Get(i)); i++) ;
|
+ if (i == len) return v8context->heap_.nan_value();
|
|
// Compute the sign (default to +).
|
int sign = 1;
|
@@ -3433,9 +3481,9 @@
|
double value;
|
int end_index = StringToInt(s, i, radix, &value);
|
if (end_index != i) {
|
- return Heap::NumberFromDouble(sign * value);
|
+ return v8context->heap_.NumberFromDouble(sign * value);
|
}
|
- return Heap::nan_value();
|
+ return v8context->heap_.nan_value();
|
}
|
|
|
@@ -3447,19 +3495,15 @@
|
double value = StringToDouble(str, ALLOW_TRAILING_JUNK, OS::nan_value());
|
|
// Create a number object from the value.
|
- return Heap::NumberFromDouble(value);
|
+ return v8_context()->heap_.NumberFromDouble(value);
|
}
|
|
-
|
-static unibrow::Mapping<unibrow::ToUppercase, 128> to_upper_mapping;
|
-static unibrow::Mapping<unibrow::ToLowercase, 128> to_lower_mapping;
|
-
|
-
|
template <class Converter>
|
static Object* ConvertCaseHelper(String* s,
|
int length,
|
int input_string_length,
|
unibrow::Mapping<Converter, 128>* mapping) {
|
+ V8Context * const v8context = v8_context();
|
// We try this twice, once with the assumption that the result is no longer
|
// than the input and, if that assumption breaks, again with the exact
|
// length. This may not be pretty, but it is nicer than what was here before
|
@@ -3472,15 +3516,15 @@
|
// might break in the future if we implement more context and locale
|
// dependent upper/lower conversions.
|
Object* o = s->IsAsciiRepresentation()
|
- ? Heap::AllocateRawAsciiString(length)
|
- : Heap::AllocateRawTwoByteString(length);
|
+ ? v8context->heap_.AllocateRawAsciiString(length)
|
+ : v8context->heap_.AllocateRawTwoByteString(length);
|
if (o->IsFailure()) return o;
|
String* result = String::cast(o);
|
bool has_changed_character = false;
|
|
// Convert all characters to upper case, assuming that they will fit
|
// in the buffer
|
- Access<StringInputBuffer> buffer(&runtime_string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8context->runtime_data_->runtime_string_input_buffer);
|
buffer->Reset(s);
|
unibrow::uchar chars[Converter::kMaxWidth];
|
// We can assume that the string is not empty
|
@@ -3527,7 +3571,7 @@
|
if (char_length == 0) char_length = 1;
|
current_length += char_length;
|
if (current_length > Smi::kMaxValue) {
|
- Top::context()->mark_out_of_memory();
|
+ v8context->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
}
|
@@ -3577,12 +3621,12 @@
|
|
|
static Object* Runtime_StringToLowerCase(Arguments args) {
|
- return ConvertCase<unibrow::ToLowercase>(args, &to_lower_mapping);
|
+ return ConvertCase<unibrow::ToLowercase>(args, &v8_context()->runtime_data_->to_lower_mapping);
|
}
|
|
|
static Object* Runtime_StringToUpperCase(Arguments args) {
|
- return ConvertCase<unibrow::ToUppercase>(args, &to_upper_mapping);
|
+ return ConvertCase<unibrow::ToUppercase>(args, &v8_context()->runtime_data_->to_upper_mapping);
|
}
|
|
static inline bool IsTrimWhiteSpace(unibrow::uchar c) {
|
@@ -3618,7 +3662,7 @@
|
|
bool Runtime::IsUpperCaseChar(uint16_t ch) {
|
unibrow::uchar chars[unibrow::ToUppercase::kMaxWidth];
|
- int char_length = to_upper_mapping.get(ch, 0, chars);
|
+ int char_length = v8_context()->runtime_data_->to_upper_mapping.get(ch, 0, chars);
|
return char_length == 0;
|
}
|
|
@@ -3630,7 +3674,7 @@
|
Object* number = args[0];
|
RUNTIME_ASSERT(number->IsNumber());
|
|
- return Heap::NumberToString(number);
|
+ return v8_context()->heap_.NumberToString(number);
|
}
|
|
|
@@ -3641,7 +3685,7 @@
|
Object* obj = args[0];
|
if (obj->IsSmi()) return obj;
|
CONVERT_DOUBLE_CHECKED(number, obj);
|
- return Heap::NumberFromDouble(DoubleToInteger(number));
|
+ return v8_context()->heap_.NumberFromDouble(DoubleToInteger(number));
|
}
|
|
|
@@ -3652,7 +3696,7 @@
|
Object* obj = args[0];
|
if (obj->IsSmi() && Smi::cast(obj)->value() >= 0) return obj;
|
CONVERT_NUMBER_CHECKED(int32_t, number, Uint32, obj);
|
- return Heap::NumberFromUint32(number);
|
+ return v8_context()->heap_.NumberFromUint32(number);
|
}
|
|
|
@@ -3663,7 +3707,7 @@
|
Object* obj = args[0];
|
if (obj->IsSmi()) return obj;
|
CONVERT_DOUBLE_CHECKED(number, obj);
|
- return Heap::NumberFromInt32(DoubleToInt32(number));
|
+ return v8_context()->heap_.NumberFromInt32(DoubleToInt32(number));
|
}
|
|
|
@@ -3684,7 +3728,7 @@
|
return Smi::FromInt(int_value);
|
}
|
}
|
- return Heap::nan_value();
|
+ return v8_context()->heap_.nan_value();
|
}
|
|
|
@@ -3694,7 +3738,7 @@
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
CONVERT_DOUBLE_CHECKED(y, args[1]);
|
- return Heap::AllocateHeapNumber(x + y);
|
+ return v8_context()->heap_.AllocateHeapNumber(x + y);
|
}
|
|
|
@@ -3704,7 +3748,7 @@
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
CONVERT_DOUBLE_CHECKED(y, args[1]);
|
- return Heap::AllocateHeapNumber(x - y);
|
+ return v8_context()->heap_.AllocateHeapNumber(x - y);
|
}
|
|
|
@@ -3714,7 +3758,7 @@
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
CONVERT_DOUBLE_CHECKED(y, args[1]);
|
- return Heap::AllocateHeapNumber(x * y);
|
+ return v8_context()->heap_.AllocateHeapNumber(x * y);
|
}
|
|
|
@@ -3723,7 +3767,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::AllocateHeapNumber(-x);
|
+ return v8_context()->heap_.AllocateHeapNumber(-x);
|
}
|
|
|
@@ -3733,7 +3777,7 @@
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
CONVERT_DOUBLE_CHECKED(y, args[1]);
|
- return Heap::NewNumberFromDouble(x / y);
|
+ return v8_context()->heap_.NewNumberFromDouble(x / y);
|
}
|
|
|
@@ -3746,7 +3790,7 @@
|
|
x = modulo(x, y);
|
// NewNumberFromDouble may return a Smi instead of a Number object
|
- return Heap::NewNumberFromDouble(x);
|
+ return v8_context()->heap_.NewNumberFromDouble(x);
|
}
|
|
|
@@ -3755,7 +3799,7 @@
|
ASSERT(args.length() == 2);
|
CONVERT_CHECKED(String, str1, args[0]);
|
CONVERT_CHECKED(String, str2, args[1]);
|
- return Heap::AllocateConsString(str1, str2);
|
+ return v8_context()->heap_.AllocateConsString(str1, str2);
|
}
|
|
|
@@ -3807,15 +3851,17 @@
|
// This assumption is used by the slice encoding in one or two smis.
|
ASSERT(Smi::kMaxValue >= String::kMaxLength);
|
|
+ V8Context * const v8context = v8_context();
|
int special_length = special->length();
|
Object* smi_array_length = array->length();
|
+
|
if (!smi_array_length->IsSmi()) {
|
- Top::context()->mark_out_of_memory();
|
+ v8context->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
int array_length = Smi::cast(smi_array_length)->value();
|
if (!array->HasFastElements()) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
FixedArray* fixed_array = FixedArray::cast(array->elements());
|
if (fixed_array->length() < array_length) {
|
@@ -3823,7 +3869,7 @@
|
}
|
|
if (array_length == 0) {
|
- return Heap::empty_string();
|
+ return v8context->heap_.empty_string();
|
} else if (array_length == 1) {
|
Object* first = fixed_array->get(0);
|
if (first->IsString()) return first;
|
@@ -3841,7 +3887,7 @@
|
int pos = len >> 11;
|
len &= 0x7ff;
|
if (pos + len > special_length) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
position += len;
|
} else {
|
@@ -3850,11 +3896,11 @@
|
// Get the position and check that it is also a smi.
|
i++;
|
if (i >= array_length) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
Object* pos = fixed_array->get(i);
|
if (!pos->IsSmi()) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
}
|
} else if (elt->IsString()) {
|
@@ -3865,10 +3911,10 @@
|
ascii = false;
|
}
|
} else {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
if (position > String::kMaxLength) {
|
- Top::context()->mark_out_of_memory();
|
+ v8context->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
}
|
@@ -3877,7 +3923,7 @@
|
Object* object;
|
|
if (ascii) {
|
- object = Heap::AllocateRawAsciiString(length);
|
+ object = v8context->heap_.AllocateRawAsciiString(length);
|
if (object->IsFailure()) return object;
|
SeqAsciiString* answer = SeqAsciiString::cast(object);
|
StringBuilderConcatHelper(special,
|
@@ -3886,7 +3932,7 @@
|
array_length);
|
return answer;
|
} else {
|
- object = Heap::AllocateRawTwoByteString(length);
|
+ object = v8context->heap_.AllocateRawTwoByteString(length);
|
if (object->IsFailure()) return object;
|
SeqTwoByteString* answer = SeqTwoByteString::cast(object);
|
StringBuilderConcatHelper(special,
|
@@ -3904,7 +3950,7 @@
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromInt32(x | y);
|
+ return v8_context()->heap_.NumberFromInt32(x | y);
|
}
|
|
|
@@ -3914,7 +3960,7 @@
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromInt32(x & y);
|
+ return v8_context()->heap_.NumberFromInt32(x & y);
|
}
|
|
|
@@ -3924,7 +3970,7 @@
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromInt32(x ^ y);
|
+ return v8_context()->heap_.NumberFromInt32(x ^ y);
|
}
|
|
|
@@ -3933,7 +3979,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
- return Heap::NumberFromInt32(~x);
|
+ return v8_context()->heap_.NumberFromInt32(~x);
|
}
|
|
|
@@ -3943,7 +3989,7 @@
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromInt32(x << (y & 0x1f));
|
+ return v8_context()->heap_.NumberFromInt32(x << (y & 0x1f));
|
}
|
|
|
@@ -3953,7 +3999,7 @@
|
|
CONVERT_NUMBER_CHECKED(uint32_t, x, Uint32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromUint32(x >> (y & 0x1f));
|
+ return v8_context()->heap_.NumberFromUint32(x >> (y & 0x1f));
|
}
|
|
|
@@ -3963,7 +4009,7 @@
|
|
CONVERT_NUMBER_CHECKED(int32_t, x, Int32, args[0]);
|
CONVERT_NUMBER_CHECKED(int32_t, y, Int32, args[1]);
|
- return Heap::NumberFromInt32(ArithmeticShiftRight(x, y & 0x1f));
|
+ return v8_context()->heap_.NumberFromInt32(ArithmeticShiftRight(x, y & 0x1f));
|
}
|
|
|
@@ -4023,10 +4069,11 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 2);
|
|
+ V8Context * const v8context = v8_context();
|
// Arrays for the individual characters of the two Smis. Smis are
|
// 31 bit integers and 10 decimal digits are therefore enough.
|
- static int x_elms[10];
|
- static int y_elms[10];
|
+ int * const x_elms = v8context->runtime_data_->x_elms;
|
+ int * const y_elms = v8context->runtime_data_->y_elms;
|
|
// Extract the integer values from the Smis.
|
CONVERT_CHECKED(Smi, x, args[0]);
|
@@ -4100,8 +4147,9 @@
|
x->TryFlattenIfNotFlat();
|
y->TryFlattenIfNotFlat();
|
|
- static StringInputBuffer bufx;
|
- static StringInputBuffer bufy;
|
+ V8Context * const v8context = v8_context();
|
+ StringInputBuffer& bufx = v8context->runtime_data_->bufx;
|
+ StringInputBuffer& bufy = v8context->runtime_data_->bufy;
|
bufx.Reset(x);
|
bufy.Reset(y);
|
while (bufx.has_more() && bufy.has_more()) {
|
@@ -4122,7 +4170,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::AllocateHeapNumber(fabs(x));
|
+ return v8_context()->heap_.AllocateHeapNumber(fabs(x));
|
}
|
|
|
@@ -4131,7 +4179,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::ACOS, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::ACOS, x);
|
}
|
|
|
@@ -4140,7 +4188,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::ASIN, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::ASIN, x);
|
}
|
|
|
@@ -4149,7 +4197,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::ATAN, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::ATAN, x);
|
}
|
|
|
@@ -4165,14 +4213,14 @@
|
// is a multiple of Pi / 4. The sign of the result is determined
|
// by the first argument (x) and the sign of the second argument
|
// determines the multiplier: one or three.
|
- static double kPiDividedBy4 = 0.78539816339744830962;
|
+ static const double kPiDividedBy4 = 0.78539816339744830962;
|
int multiplier = (x < 0) ? -1 : 1;
|
if (y < 0) multiplier *= 3;
|
result = multiplier * kPiDividedBy4;
|
} else {
|
result = atan2(x, y);
|
}
|
- return Heap::AllocateHeapNumber(result);
|
+ return v8_context()->heap_.AllocateHeapNumber(result);
|
}
|
|
|
@@ -4181,7 +4229,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::NumberFromDouble(ceiling(x));
|
+ return v8_context()->heap_.NumberFromDouble(ceiling(x));
|
}
|
|
|
@@ -4190,7 +4238,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::COS, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::COS, x);
|
}
|
|
|
@@ -4199,7 +4247,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::EXP, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::EXP, x);
|
}
|
|
|
@@ -4208,7 +4256,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::NumberFromDouble(floor(x));
|
+ return v8_context()->heap_.NumberFromDouble(floor(x));
|
}
|
|
|
@@ -4217,7 +4265,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::LOG, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::LOG, x);
|
}
|
|
|
@@ -4257,12 +4305,13 @@
|
ASSERT(args.length() == 2);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
+ V8Context * const v8context = v8_context();
|
|
// If the second argument is a smi, it is much faster to call the
|
// custom powi() function than the generic pow().
|
if (args[1]->IsSmi()) {
|
int y = Smi::cast(args[1])->value();
|
- return Heap::AllocateHeapNumber(powi(x, y));
|
+ return v8context->heap_.AllocateHeapNumber(powi(x, y));
|
}
|
|
CONVERT_DOUBLE_CHECKED(y, args[1]);
|
@@ -4273,19 +4322,19 @@
|
// square root of a number. To speed up such computations, we
|
// explictly check for this case and use the sqrt() function
|
// which is faster than pow().
|
- return Heap::AllocateHeapNumber(sqrt(x));
|
+ return v8context->heap_.AllocateHeapNumber(sqrt(x));
|
} else if (y == -0.5) {
|
// Optimized using Math.pow(x, -0.5) == 1 / Math.pow(x, 0.5).
|
- return Heap::AllocateHeapNumber(1.0 / sqrt(x));
|
+ return v8context->heap_.AllocateHeapNumber(1.0 / sqrt(x));
|
}
|
}
|
|
if (y == 0) {
|
return Smi::FromInt(1);
|
} else if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) {
|
- return Heap::nan_value();
|
+ return v8context->heap_.nan_value();
|
} else {
|
- return Heap::AllocateHeapNumber(pow(x, y));
|
+ return v8context->heap_.AllocateHeapNumber(pow(x, y));
|
}
|
}
|
|
@@ -4295,8 +4344,8 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- if (signbit(x) && x >= -0.5) return Heap::minus_zero_value();
|
- return Heap::NumberFromDouble(floor(x + 0.5));
|
+ if (signbit(x) && x >= -0.5) return v8_context()->heap_.minus_zero_value();
|
+ return v8_context()->heap_.NumberFromDouble(floor(x + 0.5));
|
}
|
|
|
@@ -4305,7 +4354,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::SIN, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::SIN, x);
|
}
|
|
|
@@ -4314,7 +4363,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::AllocateHeapNumber(sqrt(x));
|
+ return v8_context()->heap_.AllocateHeapNumber(sqrt(x));
|
}
|
|
|
@@ -4323,7 +4372,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return TranscendentalCache::Get(TranscendentalCache::TAN, x);
|
+ return v8_context()->transcendental_caches_.Get(TranscendentalCache::TAN, x);
|
}
|
|
|
@@ -4342,11 +4391,12 @@
|
it.AdvanceToArgumentsFrame();
|
JavaScriptFrame* frame = it.frame();
|
|
+ V8Context * const v8context = v8_context();
|
const int length = frame->GetProvidedParametersCount();
|
- Object* result = Heap::AllocateArgumentsObject(callee, length);
|
+ Object* result = v8context->heap_.AllocateArgumentsObject(callee, length);
|
if (result->IsFailure()) return result;
|
if (length > 0) {
|
- Object* obj = Heap::AllocateFixedArray(length);
|
+ Object* obj = v8context->heap_.AllocateFixedArray(length);
|
if (obj->IsFailure()) return obj;
|
FixedArray* array = FixedArray::cast(obj);
|
ASSERT(array->length() == length);
|
@@ -4368,16 +4418,17 @@
|
Object** parameters = reinterpret_cast<Object**>(args[1]);
|
const int length = Smi::cast(args[2])->value();
|
|
- Object* result = Heap::AllocateArgumentsObject(callee, length);
|
+ V8Context * const v8context = v8_context();
|
+ Object* result = v8context->heap_.AllocateArgumentsObject(callee, length);
|
if (result->IsFailure()) return result;
|
- ASSERT(Heap::InNewSpace(result));
|
+ ASSERT(v8context->heap_.InNewSpace(result));
|
|
// Allocate the elements if needed.
|
if (length > 0) {
|
// Allocate the fixed array.
|
- Object* obj = Heap::AllocateRawFixedArray(length);
|
+ Object* obj = v8context->heap_.AllocateRawFixedArray(length);
|
if (obj->IsFailure()) return obj;
|
- reinterpret_cast<Array*>(obj)->set_map(Heap::fixed_array_map());
|
+ reinterpret_cast<Array*>(obj)->set_map(v8context->heap_.fixed_array_map());
|
FixedArray* array = FixedArray::cast(obj);
|
array->set_length(length);
|
WriteBarrierMode mode = array->GetWriteBarrierMode();
|
@@ -4404,6 +4455,7 @@
|
|
|
static Code* ComputeConstructStub(Handle<SharedFunctionInfo> shared) {
|
+ V8Context * const v8context = v8_context();
|
// TODO(385): Change this to create a construct stub specialized for
|
// the given map to make allocation of simple objects - and maybe
|
// arrays - much faster.
|
@@ -4412,12 +4464,12 @@
|
ConstructStubCompiler compiler;
|
Object* code = compiler.CompileConstructStub(*shared);
|
if (code->IsFailure()) {
|
- return Builtins::builtin(Builtins::JSConstructStubGeneric);
|
+ return v8context->builtins_.builtin(Builtins::JSConstructStubGeneric);
|
}
|
return Code::cast(code);
|
}
|
|
- return Builtins::builtin(Builtins::JSConstructStubGeneric);
|
+ return v8context->builtins_.builtin(Builtins::JSConstructStubGeneric);
|
}
|
|
|
@@ -4427,19 +4479,21 @@
|
|
Handle<Object> constructor = args.at<Object>(0);
|
|
+ V8Context * const v8context = v8_context();
|
+
|
// If the constructor isn't a proper function we throw a type error.
|
if (!constructor->IsJSFunction()) {
|
Vector< Handle<Object> > arguments = HandleVector(&constructor, 1);
|
Handle<Object> type_error =
|
Factory::NewTypeError("not_constructor", arguments);
|
- return Top::Throw(*type_error);
|
+ return v8context->top_.Throw(*type_error);
|
}
|
|
Handle<JSFunction> function = Handle<JSFunction>::cast(constructor);
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
// Handle stepping into constructors if step into is active.
|
- if (Debug::StepInActive()) {
|
- Debug::HandleStepIn(function, Handle<Object>::null(), 0, true);
|
+ if (v8context->debug_.StepInActive()) {
|
+ v8context->debug_.HandleStepIn(function, Handle<Object>::null(), 0, true);
|
}
|
#endif
|
|
@@ -4456,7 +4510,7 @@
|
// instead of a new JSFunction object. This way, errors are
|
// reported the same way whether or not 'Function' is called
|
// using 'new'.
|
- return Top::context()->global();
|
+ return v8context->top_.context()->global();
|
}
|
}
|
|
@@ -4476,8 +4530,8 @@
|
function->shared()->set_construct_stub(*stub);
|
}
|
|
- Counters::constructed_objects.Increment();
|
- Counters::constructed_objects_runtime.Increment();
|
+ v8context->counters_.constructed_objects.Increment();
|
+ v8context->counters_.constructed_objects_runtime.Increment();
|
|
return *result;
|
}
|
@@ -4555,16 +4609,18 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_CHECKED(JSFunction, function, args[0]);
|
+ V8Context * const v8context = v8_context();
|
int length = ScopeInfo<>::NumberOfContextSlots(function->code());
|
- Object* result = Heap::AllocateFunctionContext(length, function);
|
+ Object* result = v8context->heap_.AllocateFunctionContext(length, function);
|
if (result->IsFailure()) return result;
|
|
- Top::set_context(Context::cast(result));
|
+ v8context->top_.set_context(Context::cast(result));
|
|
return result; // non-failure
|
}
|
|
static Object* PushContextHelper(Object* object, bool is_catch_context) {
|
+ V8Context * const v8context = v8_context();
|
// Convert the object to a proper JavaScript object.
|
Object* js_object = object;
|
if (!js_object->IsJSObject()) {
|
@@ -4575,18 +4631,18 @@
|
Handle<Object> handle(object);
|
Handle<Object> result =
|
Factory::NewTypeError("with_expression", HandleVector(&handle, 1));
|
- return Top::Throw(*result);
|
+ return v8context->top_.Throw(*result);
|
}
|
}
|
|
Object* result =
|
- Heap::AllocateWithContext(Top::context(),
|
+ v8context->heap_.AllocateWithContext(v8_context()->top_.context(),
|
JSObject::cast(js_object),
|
is_catch_context);
|
if (result->IsFailure()) return result;
|
|
Context* context = Context::cast(result);
|
- Top::set_context(context);
|
+ v8context->top_.set_context(context);
|
|
return result;
|
}
|
@@ -4625,7 +4681,7 @@
|
}
|
|
// No intermediate context found. Use global object by default.
|
- return Top::context()->global();
|
+ return v8_context()->top_.context()->global();
|
}
|
|
|
@@ -4661,13 +4717,13 @@
|
static inline Object* Unhole(Object* x, PropertyAttributes attributes) {
|
ASSERT(!x->IsTheHole() || (attributes & READ_ONLY) != 0);
|
USE(attributes);
|
- return x->IsTheHole() ? Heap::undefined_value() : x;
|
+ return x->IsTheHole() ? v8_context()->heap_.undefined_value() : x;
|
}
|
|
|
static JSObject* ComputeReceiverForNonGlobal(JSObject* holder) {
|
ASSERT(!holder->IsGlobalObject());
|
- Context* top = Top::context();
|
+ Context* top = v8_context()->top_.context();
|
// Get the context extension function.
|
JSFunction* context_extension_function =
|
top->global_context()->context_extension_function();
|
@@ -4688,8 +4744,9 @@
|
HandleScope scope;
|
ASSERT_EQ(2, args.length());
|
|
+ V8Context * const v8context = v8_context();
|
if (!args[0]->IsContext() || !args[1]->IsString()) {
|
- return MakePair(Top::ThrowIllegalOperation(), NULL);
|
+ return MakePair(v8context->top_.ThrowIllegalOperation(), NULL);
|
}
|
Handle<Context> context = args.at<Context>(0);
|
Handle<String> name = args.at<String>(1);
|
@@ -4707,7 +4764,7 @@
|
// If the "property" we were looking for is a local variable or an
|
// argument in a context, the receiver is the global object; see
|
// ECMA-262, 3rd., 10.1.6 and 10.2.3.
|
- JSObject* receiver = Top::context()->global()->global_receiver();
|
+ JSObject* receiver = v8context->top_.context()->global()->global_receiver();
|
Object* value = (holder->IsContext())
|
? Context::cast(*holder)->get(index)
|
: JSObject::cast(*holder)->GetElement(index);
|
@@ -4722,7 +4779,7 @@
|
if (object->IsGlobalObject()) {
|
receiver = GlobalObject::cast(object)->global_receiver();
|
} else if (context->is_exception_holder(*holder)) {
|
- receiver = Top::context()->global()->global_receiver();
|
+ receiver = v8context->top_.context()->global()->global_receiver();
|
} else {
|
receiver = ComputeReceiverForNonGlobal(object);
|
}
|
@@ -4736,10 +4793,10 @@
|
// The property doesn't exist - throw exception.
|
Handle<Object> reference_error =
|
Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
|
- return MakePair(Top::Throw(*reference_error), NULL);
|
+ return MakePair(v8context->top_.Throw(*reference_error), NULL);
|
} else {
|
// The property doesn't exist - return undefined
|
- return MakePair(Heap::undefined_value(), Heap::undefined_value());
|
+ return MakePair(v8context->heap_.undefined_value(), v8context->heap_.undefined_value());
|
}
|
}
|
|
@@ -4795,7 +4852,7 @@
|
// The property was not found. It needs to be stored in the global context.
|
ASSERT(attributes == ABSENT);
|
attributes = NONE;
|
- context_ext = Handle<JSObject>(Top::context()->global());
|
+ context_ext = Handle<JSObject>(v8_context()->top_.context()->global());
|
}
|
|
// Set the property, but ignore if read_only variable on the context
|
@@ -4807,7 +4864,7 @@
|
// Failure::Exception is converted to a null handle in the
|
// handle-based methods such as SetProperty. We therefore need
|
// to convert null handles back to exceptions.
|
- ASSERT(Top::has_pending_exception());
|
+ ASSERT(v8_context()->top_.has_pending_exception());
|
return Failure::Exception();
|
}
|
}
|
@@ -4819,7 +4876,7 @@
|
HandleScope scope;
|
ASSERT(args.length() == 1);
|
|
- return Top::Throw(args[0]);
|
+ return v8_context()->top_.Throw(args[0]);
|
}
|
|
|
@@ -4827,13 +4884,13 @@
|
HandleScope scope;
|
ASSERT(args.length() == 1);
|
|
- return Top::ReThrow(args[0]);
|
+ return v8_context()->top_.ReThrow(args[0]);
|
}
|
|
|
static Object* Runtime_PromoteScheduledException(Arguments args) {
|
ASSERT_EQ(0, args.length());
|
- return Top::PromoteScheduledException();
|
+ return v8_context()->top_.PromoteScheduledException();
|
}
|
|
|
@@ -4844,13 +4901,13 @@
|
Handle<Object> name(args[0]);
|
Handle<Object> reference_error =
|
Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
|
- return Top::Throw(*reference_error);
|
+ return v8_context()->top_.Throw(*reference_error);
|
}
|
|
|
static Object* Runtime_StackOverflow(Arguments args) {
|
NoHandleAllocation na;
|
- return Top::StackOverflow();
|
+ return v8_context()->top_.StackOverflow();
|
}
|
|
|
@@ -4858,7 +4915,7 @@
|
ASSERT(args.length() == 1);
|
|
// First check if this is a real stack overflow.
|
- if (StackGuard::IsStackOverflow()) {
|
+ if (v8_context()->stack_guard_.IsStackOverflow()) {
|
return Runtime_StackOverflow(args);
|
}
|
|
@@ -4956,7 +5013,7 @@
|
ASSERT(args.length() == 0);
|
NoHandleAllocation ha;
|
PrintTransition(NULL);
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -4997,8 +5054,9 @@
|
static Object* Runtime_DebugTrace(Arguments args) {
|
ASSERT(args.length() == 0);
|
NoHandleAllocation ha;
|
- Top::PrintStack();
|
- return Heap::undefined_value();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->top_.PrintStack();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -5011,7 +5069,7 @@
|
// time is milliseconds. Therefore, we floor the result of getting
|
// the OS time.
|
double millis = floor(OS::TimeCurrentMillis());
|
- return Heap::NumberFromDouble(millis);
|
+ return v8_context()->heap_.NumberFromDouble(millis);
|
}
|
|
|
@@ -5040,7 +5098,7 @@
|
if (result) {
|
return *output;
|
} else {
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
}
|
|
@@ -5051,7 +5109,7 @@
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
const char* zone = OS::LocalTimezone(x);
|
- return Heap::AllocateStringFromUtf8(CStrVector(zone));
|
+ return v8_context()->heap_.AllocateStringFromUtf8(CStrVector(zone));
|
}
|
|
|
@@ -5059,7 +5117,7 @@
|
NoHandleAllocation ha;
|
ASSERT(args.length() == 0);
|
|
- return Heap::NumberFromDouble(OS::LocalTimeOffset());
|
+ return v8_context()->heap_.NumberFromDouble(OS::LocalTimeOffset());
|
}
|
|
|
@@ -5068,7 +5126,7 @@
|
ASSERT(args.length() == 1);
|
|
CONVERT_DOUBLE_CHECKED(x, args[0]);
|
- return Heap::NumberFromDouble(OS::DaylightSavingsOffset(x));
|
+ return v8_context()->heap_.NumberFromDouble(OS::DaylightSavingsOffset(x));
|
}
|
|
|
@@ -5079,9 +5137,9 @@
|
CONVERT_DOUBLE_CHECKED(value, args[0]);
|
Object* result;
|
if (isnan(value) || (fpclassify(value) == FP_INFINITE)) {
|
- result = Heap::false_value();
|
+ result = v8_context()->heap_.false_value();
|
} else {
|
- result = Heap::true_value();
|
+ result = v8_context()->heap_.true_value();
|
}
|
return result;
|
}
|
@@ -5090,7 +5148,7 @@
|
static Object* Runtime_GlobalReceiver(Arguments args) {
|
ASSERT(args.length() == 1);
|
Object* global = args[0];
|
- if (!global->IsJSGlobalObject()) return Heap::null_value();
|
+ if (!global->IsJSGlobalObject()) return v8_context()->heap_.null_value();
|
return JSGlobalObject::cast(global)->global_receiver();
|
}
|
|
@@ -5101,11 +5159,12 @@
|
CONVERT_ARG_CHECKED(String, source, 0);
|
CONVERT_ARG_CHECKED(Oddball, is_json, 1)
|
|
+ V8Context * const v8context = v8_context();
|
// Compile source string in the global context.
|
- Handle<Context> context(Top::context()->global_context());
|
+ Handle<Context> context(v8context->top_.context()->global_context());
|
Compiler::ValidationState validate = (is_json->IsTrue())
|
? Compiler::VALIDATE_JSON : Compiler::DONT_VALIDATE_JSON;
|
- Handle<JSFunction> boilerplate = Compiler::CompileEval(source,
|
+ Handle<JSFunction> boilerplate = v8context->compiler_.CompileEval(source,
|
context,
|
true,
|
validate);
|
@@ -5118,7 +5177,7 @@
|
|
static Handle<JSFunction> GetBuiltinFunction(String* name) {
|
LookupResult result;
|
- Top::global_context()->builtins()->LocalLookup(name, &result);
|
+ v8_context()->top_.global_context()->builtins()->LocalLookup(name, &result);
|
return Handle<JSFunction>(JSFunction::cast(result.GetValue()));
|
}
|
|
@@ -5132,7 +5191,7 @@
|
bool is_global = context->IsGlobalContext();
|
|
// Compile source string in the current context.
|
- Handle<JSFunction> boilerplate = Compiler::CompileEval(
|
+ Handle<JSFunction> boilerplate = v8_context()->compiler_.CompileEval(
|
source,
|
context,
|
is_global,
|
@@ -5173,20 +5232,21 @@
|
}
|
}
|
|
+ V8Context * const v8context = v8_context();
|
// If eval could not be resolved, it has been deleted and we need to
|
// throw a reference error.
|
if (attributes == ABSENT) {
|
Handle<Object> name = Factory::eval_symbol();
|
Handle<Object> reference_error =
|
Factory::NewReferenceError("not_defined", HandleVector(&name, 1));
|
- return Top::Throw(*reference_error);
|
+ return v8context->top_.Throw(*reference_error);
|
}
|
|
if (context->IsGlobalContext()) {
|
// 'eval' is bound in the global context, but it may have been overwritten.
|
// Compare it to the builtin 'GlobalEval' function to make sure.
|
Handle<JSFunction> global_eval =
|
- GetBuiltinFunction(Heap::global_eval_symbol());
|
+ GetBuiltinFunction(v8context->heap_.global_eval_symbol());
|
if (global_eval.is_identical_to(callee)) {
|
// A direct eval call.
|
if (args[1]->IsString()) {
|
@@ -5200,11 +5260,11 @@
|
} else {
|
// An eval call that is not called on a string. Global eval
|
// deals better with this.
|
- receiver = Handle<Object>(Top::global_context()->global());
|
+ receiver = Handle<Object>(v8context->top_.global_context()->global());
|
}
|
} else {
|
// 'eval' is overwritten. Just call the function with the given arguments.
|
- receiver = Handle<Object>(Top::global_context()->global());
|
+ receiver = Handle<Object>(v8context->top_.global_context()->global());
|
}
|
} else {
|
// 'eval' is not bound in the global context. Just call the function
|
@@ -5230,11 +5290,13 @@
|
HandleScope scope;
|
ASSERT(args.length() == 1);
|
CONVERT_ARG_CHECKED(JSFunction, func, 0);
|
+ V8Context * const v8context = v8_context();
|
+
|
ASSERT(func->map()->instance_type() ==
|
- Top::function_instance_map()->instance_type());
|
+ v8context->top_.function_instance_map()->instance_type());
|
ASSERT(func->map()->instance_size() ==
|
- Top::function_instance_map()->instance_size());
|
- func->set_map(*Top::function_instance_map());
|
+ v8context->top_.function_instance_map()->instance_size());
|
+ func->set_map(*v8context->top_.function_instance_map());
|
return *func;
|
}
|
|
@@ -5249,12 +5311,14 @@
|
RUNTIME_ASSERT(array->HasFastElements());
|
int length = Smi::cast(array->length())->value();
|
FixedArray* elements = FixedArray::cast(array->elements());
|
+ V8Context * const v8context = v8_context();
|
+
|
for (int i = 0; i < length; i++) {
|
- if (elements->get(i) == element) return Heap::false_value();
|
+ if (elements->get(i) == element) return v8context->heap_.false_value();
|
}
|
Object* obj = array->SetFastElement(length, element);
|
if (obj->IsFailure()) return obj;
|
- return Heap::true_value();
|
+ return v8context->heap_.true_value();
|
}
|
|
|
@@ -5317,6 +5381,8 @@
|
uint32_t len = Min(static_cast<uint32_t>(array->length()), range);
|
|
if (visitor != NULL) {
|
+ V8Context * const v8context = v8_context();
|
+
|
if (elements_are_ints) {
|
if (elements_are_guaranteed_smis) {
|
for (uint32_t j = 0; j < len; j++) {
|
@@ -5331,14 +5397,14 @@
|
visitor->visit(j, e);
|
} else {
|
Handle<Object> e(
|
- Heap::AllocateHeapNumber(static_cast<ElementType>(val)));
|
+ v8context->heap_.AllocateHeapNumber(static_cast<ElementType>(val)));
|
visitor->visit(j, e);
|
}
|
}
|
}
|
} else {
|
for (uint32_t j = 0; j < len; j++) {
|
- Handle<Object> e(Heap::AllocateHeapNumber(array->get(j)));
|
+ Handle<Object> e(v8context->heap_.AllocateHeapNumber(array->get(j)));
|
visitor->visit(j, e);
|
}
|
}
|
@@ -5648,7 +5714,7 @@
|
CONVERT_CHECKED(JSArray, to, args[1]);
|
to->SetContent(FixedArray::cast(from->elements()));
|
to->set_length(from->length());
|
- from->SetContent(Heap::empty_fixed_array());
|
+ from->SetContent(v8_context()->heap_.empty_fixed_array());
|
from->set_length(0);
|
return to;
|
}
|
@@ -5767,18 +5833,20 @@
|
RUNTIME_ASSERT(args[0]->IsJSFunction() ||
|
args[0]->IsUndefined() ||
|
args[0]->IsNull());
|
+ V8Context * const v8context = v8_context();
|
Handle<Object> callback = args.at<Object>(0);
|
Handle<Object> data = args.at<Object>(1);
|
- Debugger::SetEventListener(callback, data);
|
+ v8context->debug_.debugger()->SetEventListener(callback, data);
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
static Object* Runtime_Break(Arguments args) {
|
ASSERT(args.length() == 0);
|
- StackGuard::DebugBreak();
|
- return Heap::undefined_value();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->stack_guard_.DebugBreak();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -5805,7 +5873,7 @@
|
case NORMAL:
|
value = result->holder()->GetNormalizedProperty(result);
|
if (value->IsTheHole()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
return value;
|
case FIELD:
|
@@ -5813,7 +5881,7 @@
|
JSObject::cast(
|
result->holder())->FastPropertyAt(result->GetFieldIndex());
|
if (value->IsTheHole()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
return value;
|
case CONSTANT_FUNCTION:
|
@@ -5824,27 +5892,27 @@
|
value = receiver->GetPropertyWithCallback(
|
receiver, structure, name, result->holder());
|
if (value->IsException()) {
|
- value = Top::pending_exception();
|
- Top::clear_pending_exception();
|
+ value = v8_context()->top_.pending_exception();
|
+ v8_context()->top_.clear_pending_exception();
|
if (caught_exception != NULL) {
|
*caught_exception = true;
|
}
|
}
|
return value;
|
} else {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
}
|
case INTERCEPTOR:
|
case MAP_TRANSITION:
|
case CONSTANT_TRANSITION:
|
case NULL_DESCRIPTOR:
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
default:
|
UNREACHABLE();
|
}
|
UNREACHABLE();
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -5867,6 +5935,7 @@
|
|
CONVERT_ARG_CHECKED(JSObject, obj, 0);
|
CONVERT_ARG_CHECKED(String, name, 1);
|
+ V8Context * const v8context = v8_context();
|
|
// Make sure to set the current context to the context before the debugger was
|
// entered (if the debugger is entered). The reason for switching context here
|
@@ -5875,8 +5944,8 @@
|
// could have the assumption that its own global context is the current
|
// context and not some internal debugger context.
|
SaveContext save;
|
- if (Debug::InDebugger()) {
|
- Top::set_context(*Debug::debugger_entry()->GetContext());
|
+ if (v8context->debug_.InDebugger()) {
|
+ v8context->top_.set_context(*v8context->debug_.debugger_entry()->GetContext());
|
}
|
|
// Skip the global proxy as it has no properties and always delegates to the
|
@@ -5932,8 +6001,8 @@
|
details->set(1, property_details);
|
if (hasJavaScriptAccessors) {
|
details->set(2,
|
- caught_exception ? Heap::true_value()
|
- : Heap::false_value());
|
+ caught_exception ? v8context->heap_.true_value()
|
+ : v8context->heap_.false_value());
|
details->set(3, FixedArray::cast(*result_callback_obj)->get(0));
|
details->set(4, FixedArray::cast(*result_callback_obj)->get(1));
|
}
|
@@ -5945,7 +6014,7 @@
|
}
|
}
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -5962,7 +6031,7 @@
|
if (result.IsProperty()) {
|
return DebugLookupResultValue(*obj, *name, &result, NULL);
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -5972,7 +6041,7 @@
|
HandleScope scope;
|
ASSERT(args.length() == 1);
|
if (!args[0]->IsJSObject()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
CONVERT_ARG_CHECKED(JSObject, obj, 0);
|
|
@@ -6023,7 +6092,7 @@
|
HandleScope scope;
|
ASSERT(args.length() == 1);
|
if (!args[0]->IsJSObject()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
CONVERT_ARG_CHECKED(JSObject, obj, 0);
|
|
@@ -6093,7 +6162,7 @@
|
v8::Handle<v8::Array> result = GetKeysForNamedInterceptor(obj, obj);
|
if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -6108,7 +6177,7 @@
|
v8::Handle<v8::Array> result = GetKeysForIndexedInterceptor(obj, obj);
|
if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -6144,12 +6213,13 @@
|
static Object* Runtime_CheckExecutionState(Arguments args) {
|
ASSERT(args.length() >= 1);
|
CONVERT_NUMBER_CHECKED(int, break_id, Int32, args[0]);
|
+ V8Context * const v8context = v8_context();
|
// Check that the break id is valid.
|
- if (Debug::break_id() == 0 || break_id != Debug::break_id()) {
|
- return Top::Throw(Heap::illegal_execution_state_symbol());
|
+ if (v8context->debug_.break_id() == 0 || break_id != v8context->debug_.break_id()) {
|
+ return v8context->top_.Throw(v8context->heap_.illegal_execution_state_symbol());
|
}
|
|
- return Heap::true_value();
|
+ return v8context->heap_.true_value();
|
}
|
|
|
@@ -6163,7 +6233,7 @@
|
|
// Count all frames which are relevant to debugging stack trace.
|
int n = 0;
|
- StackFrame::Id id = Debug::break_frame_id();
|
+ StackFrame::Id id = v8_context()->debug_.break_frame_id();
|
if (id == StackFrame::NO_ID) {
|
// If there is no JavaScript stack frame count is 0.
|
return Smi::FromInt(0);
|
@@ -6206,12 +6276,13 @@
|
Object* check = Runtime_CheckExecutionState(args);
|
if (check->IsFailure()) return check;
|
CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]);
|
+ V8Context * const v8context = v8_context();
|
|
// Find the relevant frame with the requested index.
|
- StackFrame::Id id = Debug::break_frame_id();
|
+ StackFrame::Id id = v8context->debug_.break_frame_id();
|
if (id == StackFrame::NO_ID) {
|
// If there are no JavaScript stack frames return undefined.
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
int count = 0;
|
JavaScriptFrameIterator it(id);
|
@@ -6219,11 +6290,11 @@
|
if (count == index) break;
|
count++;
|
}
|
- if (it.done()) return Heap::undefined_value();
|
+ if (it.done()) return v8context->heap_.undefined_value();
|
|
// Traverse the saved contexts chain to find the active context for the
|
// selected frame.
|
- SaveContext* save = Top::save_context();
|
+ SaveContext* save = v8context->top_.save_context();
|
while (save != NULL && !save->below(it.frame())) {
|
save = save->prev();
|
}
|
@@ -6311,15 +6382,15 @@
|
if (position != RelocInfo::kNoPosition) {
|
details->set(kFrameDetailsSourcePositionIndex, Smi::FromInt(position));
|
} else {
|
- details->set(kFrameDetailsSourcePositionIndex, Heap::undefined_value());
|
+ details->set(kFrameDetailsSourcePositionIndex, v8context->heap_.undefined_value());
|
}
|
|
// Add the constructor information.
|
- details->set(kFrameDetailsConstructCallIndex, Heap::ToBoolean(constructor));
|
+ details->set(kFrameDetailsConstructCallIndex, v8context->heap_.ToBoolean(constructor));
|
|
// Add information on whether this frame is invoked in the debugger context.
|
details->set(kFrameDetailsDebuggerFrameIndex,
|
- Heap::ToBoolean(*save->context() == *Debug::debug_context()));
|
+ v8context->heap_.ToBoolean(*save->context() == *v8context->debug_.debug_context()));
|
|
// Fill the dynamic part.
|
int details_index = kFrameDetailsFirstDynamicIndex;
|
@@ -6330,14 +6401,14 @@
|
if (i < info.number_of_parameters()) {
|
details->set(details_index++, *info.parameter_name(i));
|
} else {
|
- details->set(details_index++, Heap::undefined_value());
|
+ details->set(details_index++, v8context->heap_.undefined_value());
|
}
|
|
// Parameter value.
|
if (i < it.frame()->GetProvidedParametersCount()) {
|
details->set(details_index++, it.frame()->GetParameter(i));
|
} else {
|
- details->set(details_index++, Heap::undefined_value());
|
+ details->set(details_index++, v8context->heap_.undefined_value());
|
}
|
}
|
|
@@ -6373,6 +6444,7 @@
|
ScopeInfo<>& scope_info,
|
Handle<Context> context,
|
Handle<JSObject> scope_object) {
|
+ V8Context * const v8context = v8_context();
|
// Fill all context locals to the context extension.
|
for (int i = Context::MIN_CONTEXT_SLOTS;
|
i < scope_info.number_of_context_slots();
|
@@ -6383,7 +6455,7 @@
|
NULL);
|
|
// Don't include the arguments shadow (.arguments) context variable.
|
- if (*scope_info.context_slot_name(i) != Heap::arguments_shadow_symbol()) {
|
+ if (*scope_info.context_slot_name(i) != v8context->heap_.arguments_shadow_symbol()) {
|
SetProperty(scope_object,
|
scope_info.context_slot_name(i),
|
Handle<Object>(context->get(context_index)), NONE);
|
@@ -6401,7 +6473,7 @@
|
|
// Allocate and initialize a JSObject with all the arguments, stack locals
|
// heap locals and extension properties of the debugged function.
|
- Handle<JSObject> local_scope = Factory::NewJSObject(Top::object_function());
|
+ Handle<JSObject> local_scope = Factory::NewJSObject(v8_context()->top_.object_function());
|
|
// First fill all parameters.
|
for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
|
@@ -6452,12 +6524,12 @@
|
|
// Allocate and initialize a JSObject with all the content of theis function
|
// closure.
|
- Handle<JSObject> closure_scope = Factory::NewJSObject(Top::object_function());
|
+ Handle<JSObject> closure_scope = Factory::NewJSObject(v8_context()->top_.object_function());
|
|
// Check whether the arguments shadow object exists.
|
int arguments_shadow_index =
|
ScopeInfo<>::ContextSlotIndex(*code,
|
- Heap::arguments_shadow_symbol(),
|
+ v8_context()->heap_.arguments_shadow_symbol(),
|
NULL);
|
if (arguments_shadow_index >= 0) {
|
// In this case all the arguments are available in the arguments shadow
|
@@ -6522,7 +6594,7 @@
|
// Checking for the existence of .result seems fragile, but the scope info
|
// saved with the code object does not otherwise have that information.
|
Handle<Code> code(function_->code());
|
- int index = ScopeInfo<>::StackSlotIndex(*code, Heap::result_symbol());
|
+ int index = ScopeInfo<>::StackSlotIndex(*code, v8_context()->heap_.result_symbol());
|
at_local_ = index < 0;
|
} else if (context_->is_function_context()) {
|
at_local_ = true;
|
@@ -6757,7 +6829,7 @@
|
n++;
|
}
|
if (it.Done()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
// Calculate the size of the result.
|
@@ -6784,7 +6856,7 @@
|
it.DebugPrint();
|
}
|
#endif
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -6796,21 +6868,23 @@
|
|
#if V8_HOST_ARCH_64_BIT
|
UNIMPLEMENTED();
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
#else
|
|
static const int kMaxCFramesSize = 200;
|
ScopedVector<OS::StackFrame> frames(kMaxCFramesSize);
|
int frames_count = OS::StackWalk(frames);
|
if (frames_count == OS::kStackWalkError) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
Handle<String> address_str = Factory::LookupAsciiSymbol("address");
|
Handle<String> text_str = Factory::LookupAsciiSymbol("text");
|
Handle<FixedArray> frames_array = Factory::NewFixedArray(frames_count);
|
+ V8Context * const v8context = v8_context();
|
+
|
for (int i = 0; i < frames_count; i++) {
|
- Handle<JSObject> frame_value = Factory::NewJSObject(Top::object_function());
|
+ Handle<JSObject> frame_value = Factory::NewJSObject(v8context->top_.object_function());
|
frame_value->SetProperty(
|
*address_str,
|
*Factory::NewNumberFromInt(reinterpret_cast<int>(frames[i].address)),
|
@@ -6844,7 +6918,7 @@
|
|
// Count all archived V8 threads.
|
int n = 0;
|
- for (ThreadState* thread = ThreadState::FirstInUse();
|
+ for (ThreadState* thread = v8_context()->thread_manager_.FirstInUse();
|
thread != NULL;
|
thread = thread->Next()) {
|
n++;
|
@@ -6877,27 +6951,28 @@
|
|
// Allocate array for result.
|
Handle<FixedArray> details = Factory::NewFixedArray(kThreadDetailsSize);
|
+ V8Context * const v8context = v8_context();
|
|
// Thread index 0 is current thread.
|
if (index == 0) {
|
// Fill the details.
|
- details->set(kThreadDetailsCurrentThreadIndex, Heap::true_value());
|
+ details->set(kThreadDetailsCurrentThreadIndex, v8context->heap_.true_value());
|
details->set(kThreadDetailsThreadIdIndex,
|
- Smi::FromInt(ThreadManager::CurrentId()));
|
+ Smi::FromInt(v8context->thread_manager_.CurrentId()));
|
} else {
|
// Find the thread with the requested index.
|
int n = 1;
|
- ThreadState* thread = ThreadState::FirstInUse();
|
+ ThreadState* thread = v8context->thread_manager_.FirstInUse();
|
while (index != n && thread != NULL) {
|
thread = thread->Next();
|
n++;
|
}
|
if (thread == NULL) {
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
// Fill the details.
|
- details->set(kThreadDetailsCurrentThreadIndex, Heap::false_value());
|
+ details->set(kThreadDetailsCurrentThreadIndex, v8context->heap_.false_value());
|
details->set(kThreadDetailsThreadIdIndex, Smi::FromInt(thread->id()));
|
}
|
|
@@ -6912,9 +6987,10 @@
|
|
CONVERT_ARG_CHECKED(JSFunction, fun, 0);
|
Handle<SharedFunctionInfo> shared(fun->shared());
|
+ V8Context * const v8context = v8_context();
|
// Find the number of break points
|
- Handle<Object> break_locations = Debug::GetSourceBreakLocations(shared);
|
- if (break_locations->IsUndefined()) return Heap::undefined_value();
|
+ Handle<Object> break_locations = v8context->debug_.GetSourceBreakLocations(shared);
|
+ if (break_locations->IsUndefined()) return v8context->heap_.undefined_value();
|
// Return array as JS array
|
return *Factory::NewJSArrayWithElements(
|
Handle<FixedArray>::cast(break_locations));
|
@@ -6934,10 +7010,11 @@
|
RUNTIME_ASSERT(source_position >= 0);
|
Handle<Object> break_point_object_arg = args.at<Object>(2);
|
|
+ V8Context * const v8context = v8_context();
|
// Set break point.
|
- Debug::SetBreakPoint(shared, source_position, break_point_object_arg);
|
+ v8context->debug_.SetBreakPoint(shared, source_position, break_point_object_arg);
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7015,7 +7092,7 @@
|
target = last;
|
} else {
|
// Unable to find function - possibly script without any function.
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
}
|
|
@@ -7063,9 +7140,9 @@
|
} else {
|
position = source_position - shared->start_position();
|
}
|
- Debug::SetBreakPoint(shared, position, break_point_object_arg);
|
+ v8_context()->debug_.SetBreakPoint(shared, position, break_point_object_arg);
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -7076,10 +7153,11 @@
|
ASSERT(args.length() == 1);
|
Handle<Object> break_point_object_arg = args.at<Object>(0);
|
|
+ V8Context * const v8context = v8_context();
|
// Clear break point.
|
- Debug::ClearBreakPoint(break_point_object_arg);
|
+ v8context->debug_.ClearBreakPoint(break_point_object_arg);
|
|
- return Heap::undefined_value();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7096,8 +7174,9 @@
|
ExceptionBreakType type =
|
static_cast<ExceptionBreakType>(NumberToUint32(args[0]));
|
bool enable = args[1]->ToBoolean()->IsTrue();
|
- Debug::ChangeBreakOnException(type, enable);
|
- return Heap::undefined_value();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->debug_.ChangeBreakOnException(type, enable);
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7109,11 +7188,12 @@
|
static Object* Runtime_PrepareStep(Arguments args) {
|
HandleScope scope;
|
ASSERT(args.length() == 3);
|
+ V8Context * const v8context = v8_context();
|
// Check arguments.
|
Object* check = Runtime_CheckExecutionState(args);
|
if (check->IsFailure()) return check;
|
if (!args[1]->IsNumber() || !args[2]->IsNumber()) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8_context()->heap_.illegal_argument_symbol());
|
}
|
|
// Get the step action and check validity.
|
@@ -7123,21 +7203,21 @@
|
step_action != StepOut &&
|
step_action != StepInMin &&
|
step_action != StepMin) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8context->heap_.illegal_argument_symbol());
|
}
|
|
// Get the number of steps.
|
int step_count = NumberToInt32(args[2]);
|
if (step_count < 1) {
|
- return Top::Throw(Heap::illegal_argument_symbol());
|
+ return v8context->top_.Throw(v8context->heap_.illegal_argument_symbol());
|
}
|
|
// Clear all current stepping setup.
|
- Debug::ClearStepping();
|
+ v8context->debug_.ClearStepping();
|
|
// Prepare step.
|
- Debug::PrepareStep(static_cast<StepAction>(step_action), step_count);
|
- return Heap::undefined_value();
|
+ v8context->debug_.PrepareStep(static_cast<StepAction>(step_action), step_count);
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7145,8 +7225,9 @@
|
static Object* Runtime_ClearStepping(Arguments args) {
|
HandleScope scope;
|
ASSERT(args.length() == 0);
|
- Debug::ClearStepping();
|
- return Heap::undefined_value();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->debug_.ClearStepping();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7181,14 +7262,14 @@
|
// does not support eval) then create an 'arguments' object.
|
int index;
|
if (sinfo->number_of_stack_slots() > 0) {
|
- index = ScopeInfo<>::StackSlotIndex(*code, Heap::arguments_symbol());
|
+ index = ScopeInfo<>::StackSlotIndex(*code, v8_context()->heap_.arguments_symbol());
|
if (index != -1) {
|
return Handle<Object>(frame->GetExpression(index));
|
}
|
}
|
|
if (sinfo->number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
|
- index = ScopeInfo<>::ContextSlotIndex(*code, Heap::arguments_symbol(),
|
+ index = ScopeInfo<>::ContextSlotIndex(*code, v8_context()->heap_.arguments_symbol(),
|
NULL);
|
if (index != -1) {
|
return Handle<Object>(function_context->get(index));
|
@@ -7241,15 +7322,16 @@
|
Handle<Code> code(function->code());
|
ScopeInfo<> sinfo(*code);
|
|
+ V8Context * const v8context = v8_context();
|
// Traverse the saved contexts chain to find the active context for the
|
// selected frame.
|
- SaveContext* save = Top::save_context();
|
+ SaveContext* save = v8context->top_.save_context();
|
while (save != NULL && !save->below(frame)) {
|
save = save->prev();
|
}
|
ASSERT(save != NULL);
|
SaveContext savex;
|
- Top::set_context(*(save->context()));
|
+ v8context->top_.set_context(*(save->context()));
|
|
// Create the (empty) function replacing the function on the stack frame for
|
// the purpose of evaluating in the context created below. It is important
|
@@ -7291,7 +7373,7 @@
|
Factory::NewStringFromAscii(Vector<const char>(source_str,
|
source_str_length));
|
Handle<JSFunction> boilerplate =
|
- Compiler::CompileEval(function_source,
|
+ v8context->compiler_.CompileEval(function_source,
|
context,
|
context->IsGlobalContext(),
|
Compiler::DONT_VALIDATE_JSON);
|
@@ -7343,23 +7425,24 @@
|
// Handle the processing of break.
|
DisableBreak disable_break_save(disable_break);
|
|
+ V8Context * const v8context = v8_context();
|
// Enter the top context from before the debugger was invoked.
|
SaveContext save;
|
SaveContext* top = &save;
|
- while (top != NULL && *top->context() == *Debug::debug_context()) {
|
+ while (top != NULL && *top->context() == *v8context->debug_.debug_context()) {
|
top = top->prev();
|
}
|
if (top != NULL) {
|
- Top::set_context(*top->context());
|
+ v8context->top_.set_context(*top->context());
|
}
|
|
// Get the global context now set to the top context from before the
|
// debugger was invoked.
|
- Handle<Context> context = Top::global_context();
|
+ Handle<Context> context = v8context->top_.global_context();
|
|
// Compile the source to be evaluated.
|
Handle<JSFunction> boilerplate =
|
- Handle<JSFunction>(Compiler::CompileEval(source,
|
+ Handle<JSFunction>(v8context->compiler_.CompileEval(source,
|
context,
|
true,
|
Compiler::DONT_VALIDATE_JSON));
|
@@ -7370,7 +7453,7 @@
|
|
// Invoke the result of the compilation to get the evaluation function.
|
bool has_pending_exception;
|
- Handle<Object> receiver = Top::global();
|
+ Handle<Object> receiver = v8context->top_.global();
|
Handle<Object> result =
|
Execution::Call(compiled_function, receiver, 0, NULL,
|
&has_pending_exception);
|
@@ -7384,7 +7467,7 @@
|
ASSERT(args.length() == 0);
|
|
// Fill the script objects.
|
- Handle<FixedArray> instances = Debug::GetLoadedScripts();
|
+ Handle<FixedArray> instances = v8_context()->debug_.GetLoadedScripts();
|
|
// Convert the script objects to proper JS objects.
|
for (int i = 0; i < instances->length(); i++) {
|
@@ -7399,7 +7482,7 @@
|
}
|
|
// Return result as a JS array.
|
- Handle<JSObject> result = Factory::NewJSObject(Top::array_function());
|
+ Handle<JSObject> result = Factory::NewJSObject(v8_context()->top_.array_function());
|
Handle<JSArray>::cast(result)->SetContent(*instances);
|
return *result;
|
}
|
@@ -7413,7 +7496,7 @@
|
NoHandleAllocation ha;
|
AssertNoAllocation no_alloc;
|
|
- // Iterate the heap.
|
+ // Iterate the heap_.
|
int count = 0;
|
JSObject* last = NULL;
|
HeapIterator iterator;
|
@@ -7483,7 +7566,7 @@
|
ASSERT(args.length() == 3);
|
|
// First perform a full GC in order to avoid references from dead objects.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
// Check parameters.
|
CONVERT_CHECKED(JSObject, target, args[0]);
|
@@ -7495,7 +7578,7 @@
|
|
// Get the constructor function for context extension and arguments array.
|
JSObject* arguments_boilerplate =
|
- Top::context()->global_context()->arguments_boilerplate();
|
+ v8_context()->top_.context()->global_context()->arguments_boilerplate();
|
JSFunction* arguments_function =
|
JSFunction::cast(arguments_boilerplate->map()->constructor());
|
|
@@ -7505,7 +7588,7 @@
|
NULL, 0, arguments_function);
|
|
// Allocate an array to hold the result.
|
- Object* object = Heap::AllocateFixedArray(count);
|
+ Object* object = v8_context()->heap_.AllocateFixedArray(count);
|
if (object->IsFailure()) return object;
|
FixedArray* instances = FixedArray::cast(object);
|
|
@@ -7515,8 +7598,8 @@
|
|
// Return result as JS array.
|
Object* result =
|
- Heap::AllocateJSObject(
|
- Top::context()->global_context()->array_function());
|
+ v8_context()->heap_.AllocateJSObject(
|
+ v8_context()->top_.context()->global_context()->array_function());
|
if (!result->IsFailure()) JSArray::cast(result)->SetContent(instances);
|
return result;
|
}
|
@@ -7527,7 +7610,7 @@
|
FixedArray* instances, int instances_size) {
|
AssertNoAllocation no_alloc;
|
|
- // Iterate the heap.
|
+ // Iterate the heap_.
|
int count = 0;
|
HeapIterator iterator;
|
while (iterator.has_next() &&
|
@@ -7559,7 +7642,7 @@
|
ASSERT(args.length() == 2);
|
|
// First perform a full GC in order to avoid dead objects.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
// Check parameters.
|
CONVERT_CHECKED(JSFunction, constructor, args[0]);
|
@@ -7571,7 +7654,7 @@
|
count = DebugConstructedBy(constructor, max_references, NULL, 0);
|
|
// Allocate an array to hold the result.
|
- Object* object = Heap::AllocateFixedArray(count);
|
+ Object* object = v8_context()->heap_.AllocateFixedArray(count);
|
if (object->IsFailure()) return object;
|
FixedArray* instances = FixedArray::cast(object);
|
|
@@ -7580,8 +7663,8 @@
|
|
// Return result as JS array.
|
Object* result =
|
- Heap::AllocateJSObject(
|
- Top::context()->global_context()->array_function());
|
+ v8_context()->heap_.AllocateJSObject(
|
+ v8_context()->top_.context()->global_context()->array_function());
|
if (!result->IsFailure()) JSArray::cast(result)->SetContent(instances);
|
return result;
|
}
|
@@ -7602,7 +7685,7 @@
|
static Object* Runtime_SystemBreak(Arguments args) {
|
ASSERT(args.length() == 0);
|
CPU::DebugBreak();
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -7617,7 +7700,7 @@
|
}
|
func->code()->PrintLn();
|
#endif // DEBUG
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -7632,7 +7715,7 @@
|
}
|
func->shared()->construct_stub()->PrintLn();
|
#endif // DEBUG
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -7783,7 +7866,7 @@
|
|
const char* version_string = v8::V8::GetVersion();
|
|
- return Heap::AllocateStringFromAscii(CStrVector(version_string), NOT_TENURED);
|
+ return v8_context()->heap_.AllocateStringFromAscii(CStrVector(version_string), NOT_TENURED);
|
}
|
|
|
@@ -7791,7 +7874,7 @@
|
ASSERT(args.length() == 2);
|
OS::PrintError("abort: %s\n", reinterpret_cast<char*>(args[0]) +
|
Smi::cast(args[1])->value());
|
- Top::PrintStack();
|
+ v8_context()->top_.PrintStack();
|
OS::Abort();
|
UNREACHABLE();
|
return NULL;
|
@@ -7800,8 +7883,9 @@
|
|
static Object* Runtime_DeleteHandleScopeExtensions(Arguments args) {
|
ASSERT(args.length() == 0);
|
- HandleScope::DeleteExtensions();
|
- return Heap::undefined_value();
|
+ V8Context * const v8context = v8_context();
|
+ v8context->handle_scope_implementer_.DeleteExtensions();
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7835,8 +7919,9 @@
|
CONVERT_CHECKED(String, format, args[0]);
|
CONVERT_CHECKED(JSArray, elms, args[1]);
|
Vector<const char> chars = format->ToAsciiVector();
|
- Logger::LogRuntime(chars, elms);
|
- return Heap::undefined_value();
|
+ v8::V8Context * v8context = v8_context();
|
+ v8context->logger_.LogRuntime(chars, elms);
|
+ return v8context->heap_.undefined_value();
|
}
|
|
|
@@ -7882,14 +7967,20 @@
|
if (failure->IsRetryAfterGC()) {
|
// Try to do a garbage collection; ignore it if it fails. The C
|
// entry stub will throw an out-of-memory exception in that case.
|
- Heap::CollectGarbage(failure->requested(), failure->allocation_space());
|
+ v8_context()->heap_.CollectGarbage(failure->requested(), failure->allocation_space());
|
} else {
|
// Handle last resort GC and make sure to allow future allocations
|
// to grow the heap without causing GCs (if possible).
|
- Counters::gc_last_resort_from_js.Increment();
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->counters_.gc_last_resort_from_js.Increment();
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
}
|
|
+void Runtime::Setup() {
|
+ v8_context()->runtime_data_ = new RuntimeData();
|
+}
|
|
+void Runtime::TearDown() {
|
+ delete v8_context()->runtime_data_;
|
+}
|
} } // namespace v8::internal
|
Index: src/platform-win32.cc
|
===================================================================
|
--- src/platform-win32.cc (revision 3135)
|
+++ src/platform-win32.cc Sat Nov 14 01:42:57 MSK 2009
|
@@ -294,7 +294,7 @@
|
// Timezone information. We need to have static buffers for the
|
// timezone names because we return pointers to these in
|
// LocalTimezone().
|
- static bool tz_initialized_;
|
+ static bool tz_initialized_;///static
|
static TIME_ZONE_INFORMATION tzinfo_;
|
static char std_tz_name_[kTzNameSize];
|
static char dst_tz_name_[kTzNameSize];
|
@@ -473,7 +473,7 @@
|
// any time when more than kMaxClockElapsedTime has passed or
|
// whenever timeGetTime creates a rollover.
|
|
- static bool initialized = false;
|
+ static bool initialized = false;///static
|
static TimeStamp init_time;
|
static DWORD init_ticks;
|
static const int64_t kHundredNanosecondsPerSecond = 10000000;
|
@@ -669,7 +669,7 @@
|
ODS // Output is written to debug facility.
|
};
|
|
-static OutputMode output_mode = UNKNOWN; // Current output mode.
|
+static OutputMode output_mode = UNKNOWN; // Current output mode. ///static
|
|
|
// Determine if the process has a console for output.
|
@@ -812,7 +812,7 @@
|
// of two. The reason for always returning a power of two is that the
|
// rounding up in OS::Allocate expects that.
|
static size_t GetPageSize() {
|
- static size_t page_size = 0;
|
+ static size_t page_size = 0; ///static
|
if (page_size == 0) {
|
SYSTEM_INFO info;
|
GetSystemInfo(&info);
|
@@ -825,7 +825,7 @@
|
// The allocation alignment is the guaranteed alignment for
|
// VirtualAlloc'ed blocks of memory.
|
size_t OS::AllocateAlignment() {
|
- static size_t allocate_alignment = 0;
|
+ static size_t allocate_alignment = 0; ///static
|
if (allocate_alignment == 0) {
|
SYSTEM_INFO info;
|
GetSystemInfo(&info);
|
@@ -1113,7 +1113,7 @@
|
|
// Load the symbols for generating stack traces.
|
static bool LoadSymbols(HANDLE process_handle) {
|
- static bool symbols_loaded = false;
|
+ static bool symbols_loaded = false; ///static
|
|
if (symbols_loaded) return true;
|
|
@@ -1796,6 +1796,7 @@
|
// Context used for sampling the register state of the profiled thread.
|
CONTEXT context;
|
memset(&context, 0, sizeof(context));
|
+ Logger & logger = v8_context()->logger_;
|
// Loop until the sampler is disengaged.
|
while (sampler_->IsActive()) {
|
TickSample sample;
|
@@ -1820,7 +1821,7 @@
|
}
|
|
// We always sample the VM state.
|
- sample.state = Logger::state();
|
+ sample.state = logger.state();
|
// Invoke tick handler with program counter and stack pointer.
|
sampler_->Tick(&sample);
|
|
Index: src/ic.h
|
===================================================================
|
--- src/ic.h (revision 3096)
|
+++ src/ic.h Sat Nov 14 01:43:01 MSK 2009
|
@@ -237,13 +237,13 @@
|
|
// Stub accessors.
|
static Code* megamorphic_stub() {
|
- return Builtins::builtin(Builtins::LoadIC_Megamorphic);
|
+ return v8_context()->builtins_.builtin(Builtins::LoadIC_Megamorphic);
|
}
|
static Code* initialize_stub() {
|
- return Builtins::builtin(Builtins::LoadIC_Initialize);
|
+ return v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize);
|
}
|
static Code* pre_monomorphic_stub() {
|
- return Builtins::builtin(Builtins::LoadIC_PreMonomorphic);
|
+ return v8_context()->builtins_.builtin(Builtins::LoadIC_PreMonomorphic);
|
}
|
|
static void Clear(Address address, Code* target);
|
@@ -290,16 +290,16 @@
|
|
// Stub accessors.
|
static Code* initialize_stub() {
|
- return Builtins::builtin(Builtins::KeyedLoadIC_Initialize);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Initialize);
|
}
|
static Code* megamorphic_stub() {
|
- return Builtins::builtin(Builtins::KeyedLoadIC_Generic);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Generic);
|
}
|
static Code* generic_stub() {
|
- return Builtins::builtin(Builtins::KeyedLoadIC_Generic);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Generic);
|
}
|
static Code* pre_monomorphic_stub() {
|
- return Builtins::builtin(Builtins::KeyedLoadIC_PreMonomorphic);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_PreMonomorphic);
|
}
|
static Code* external_array_stub(JSObject::ElementsKind elements_kind);
|
|
@@ -340,10 +340,10 @@
|
|
// Stub accessors.
|
static Code* megamorphic_stub() {
|
- return Builtins::builtin(Builtins::StoreIC_Megamorphic);
|
+ return v8_context()->builtins_.builtin(Builtins::StoreIC_Megamorphic);
|
}
|
static Code* initialize_stub() {
|
- return Builtins::builtin(Builtins::StoreIC_Initialize);
|
+ return v8_context()->builtins_.builtin(Builtins::StoreIC_Initialize);
|
}
|
|
static void Clear(Address address, Code* target);
|
@@ -391,13 +391,13 @@
|
|
// Stub accessors.
|
static Code* initialize_stub() {
|
- return Builtins::builtin(Builtins::KeyedStoreIC_Initialize);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Initialize);
|
}
|
static Code* megamorphic_stub() {
|
- return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Generic);
|
}
|
static Code* generic_stub() {
|
- return Builtins::builtin(Builtins::KeyedStoreIC_Generic);
|
+ return v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Generic);
|
}
|
static Code* external_array_stub(JSObject::ElementsKind elements_kind);
|
|
Index: src/ic.cc
|
===================================================================
|
--- src/ic.cc (revision 3096)
|
+++ src/ic.cc Sat Nov 14 01:43:10 MSK 2009
|
@@ -63,7 +63,7 @@
|
Code* new_target,
|
const char* extra_info) {
|
if (FLAG_trace_ic) {
|
- State new_state = StateFrom(new_target, Heap::undefined_value());
|
+ State new_state = StateFrom(new_target, v8_context()->heap_.undefined_value());
|
PrintF("[%s (%c->%c)%s", type,
|
TransitionMarkFromState(old_state),
|
TransitionMarkFromState(new_state),
|
@@ -79,7 +79,8 @@
|
// To improve the performance of the (much used) IC code, we unfold
|
// a few levels of the stack frame iteration code. This yields a
|
// ~35% speedup when running DeltaBlue with the '--nouse-ic' flag.
|
- const Address entry = Top::c_entry_fp(Top::GetCurrentThread());
|
+ Top& top = v8_context()->top_;
|
+ const Address entry = top.c_entry_fp(top.GetCurrentThread());
|
Address* pc_address =
|
reinterpret_cast<Address*>(entry + ExitFrameConstants::kCallerPCOffset);
|
Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset);
|
@@ -116,8 +117,9 @@
|
JSFunction* function = JSFunction::cast(frame->function());
|
Handle<SharedFunctionInfo> shared(function->shared());
|
Code* code = shared->code();
|
- ASSERT(Debug::HasDebugInfo(shared));
|
- Code* original_code = Debug::GetDebugInfo(shared)->original_code();
|
+ Debug& debug = v8_context()->debug_;
|
+ ASSERT(debug.HasDebugInfo(shared));
|
+ Code* original_code = debug.GetDebugInfo(shared)->original_code();
|
ASSERT(original_code->IsCode());
|
// Get the address of the call site in the active code. This is the
|
// place where the call to DebugBreakXXX is and where the IC
|
@@ -180,7 +182,7 @@
|
|
RelocInfo::Mode IC::ComputeMode() {
|
Address addr = address();
|
- Code* code = Code::cast(Heap::FindCodeObject(addr));
|
+ Code* code = Code::cast(v8_context()->heap_.FindCodeObject(addr));
|
for (RelocIterator it(code, RelocInfo::kCodeTargetMask);
|
!it.done(); it.next()) {
|
RelocInfo* info = it.rinfo();
|
@@ -197,7 +199,7 @@
|
HandleScope scope;
|
Handle<Object> args[2] = { name, object };
|
Handle<Object> error = Factory::NewTypeError(type, HandleVector(args, 2));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
|
@@ -205,7 +207,7 @@
|
HandleScope scope;
|
Handle<Object> error =
|
Factory::NewReferenceError(type, HandleVector(&name, 1));
|
- return Top::Throw(*error);
|
+ return v8_context()->top_.Throw(*error);
|
}
|
|
|
@@ -231,7 +233,7 @@
|
InLoopFlag in_loop = target->ic_in_loop();
|
if (state == UNINITIALIZED) return;
|
Code* code =
|
- StubCache::FindCallInitialize(target->arguments_count(), in_loop);
|
+ v8_context()->stub_cache_.FindCallInitialize(target->arguments_count(), in_loop);
|
SetTargetAtAddress(address, code);
|
}
|
|
@@ -266,22 +268,23 @@
|
|
|
Code* KeyedLoadIC::external_array_stub(JSObject::ElementsKind elements_kind) {
|
+ Builtins& builtins = v8_context()->builtins_;
|
switch (elements_kind) {
|
case JSObject::EXTERNAL_BYTE_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalByteArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalByteArray);
|
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalUnsignedByteArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalUnsignedByteArray);
|
case JSObject::EXTERNAL_SHORT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalShortArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalShortArray);
|
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
|
- return Builtins::builtin(
|
+ return builtins.builtin(
|
Builtins::KeyedLoadIC_ExternalUnsignedShortArray);
|
case JSObject::EXTERNAL_INT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalIntArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalIntArray);
|
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalUnsignedIntArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalUnsignedIntArray);
|
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedLoadIC_ExternalFloatArray);
|
+ return builtins.builtin(Builtins::KeyedLoadIC_ExternalFloatArray);
|
default:
|
UNREACHABLE();
|
return NULL;
|
@@ -290,23 +293,24 @@
|
|
|
Code* KeyedStoreIC::external_array_stub(JSObject::ElementsKind elements_kind) {
|
+ Builtins& builtins = v8_context()->builtins_;
|
switch (elements_kind) {
|
case JSObject::EXTERNAL_BYTE_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedStoreIC_ExternalByteArray);
|
+ return builtins.builtin(Builtins::KeyedStoreIC_ExternalByteArray);
|
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
|
- return Builtins::builtin(
|
+ return builtins.builtin(
|
Builtins::KeyedStoreIC_ExternalUnsignedByteArray);
|
case JSObject::EXTERNAL_SHORT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedStoreIC_ExternalShortArray);
|
+ return builtins.builtin(Builtins::KeyedStoreIC_ExternalShortArray);
|
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
|
- return Builtins::builtin(
|
+ return builtins.builtin(
|
Builtins::KeyedStoreIC_ExternalUnsignedShortArray);
|
case JSObject::EXTERNAL_INT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedStoreIC_ExternalIntArray);
|
+ return builtins.builtin(Builtins::KeyedStoreIC_ExternalIntArray);
|
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedStoreIC_ExternalUnsignedIntArray);
|
+ return builtins.builtin(Builtins::KeyedStoreIC_ExternalUnsignedIntArray);
|
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
|
- return Builtins::builtin(Builtins::KeyedStoreIC_ExternalFloatArray);
|
+ return builtins.builtin(Builtins::KeyedStoreIC_ExternalFloatArray);
|
default:
|
UNREACHABLE();
|
return NULL;
|
@@ -434,7 +438,7 @@
|
}
|
}
|
|
- ASSERT(result != Heap::the_hole_value());
|
+ ASSERT(result != v8_context()->heap_.the_hole_value());
|
|
if (result->IsJSFunction()) {
|
// Check if there is an optimized (builtin) version of the function.
|
@@ -442,20 +446,21 @@
|
// Please note we only return the optimized function iff
|
// the JSObject has FastElements.
|
if (object->IsJSObject() && JSObject::cast(*object)->HasFastElements()) {
|
- Object* opt = Top::LookupSpecialFunction(JSObject::cast(*object),
|
+ Object* opt = v8_context()->top_.LookupSpecialFunction(JSObject::cast(*object),
|
lookup.holder(),
|
JSFunction::cast(result));
|
if (opt->IsJSFunction()) return opt;
|
}
|
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
+ Debug& debug = v8_context()->debug_;
|
// Handle stepping into a function if step into is active.
|
- if (Debug::StepInActive()) {
|
+ if (debug.StepInActive()) {
|
// Protect the result in a handle as the debugger can allocate and might
|
// cause GC.
|
HandleScope scope;
|
Handle<JSFunction> function(JSFunction::cast(result));
|
- Debug::HandleStepIn(function, object, fp(), false);
|
+ debug.HandleStepIn(function, object, fp(), false);
|
return *function;
|
}
|
#endif
|
@@ -487,15 +492,15 @@
|
// This is the first time we execute this inline cache.
|
// Set the target to the pre monomorphic stub to delay
|
// setting the monomorphic state.
|
- code = StubCache::ComputeCallPreMonomorphic(argc, in_loop);
|
+ code = v8_context()->stub_cache_.ComputeCallPreMonomorphic(argc, in_loop);
|
} else if (state == MONOMORPHIC) {
|
- code = StubCache::ComputeCallMegamorphic(argc, in_loop);
|
+ code = v8_context()->stub_cache_.ComputeCallMegamorphic(argc, in_loop);
|
} else {
|
// Compute monomorphic stub.
|
switch (lookup->type()) {
|
case FIELD: {
|
int index = lookup->GetFieldIndex();
|
- code = StubCache::ComputeCallField(argc, in_loop, *name, *object,
|
+ code = v8_context()->stub_cache_.ComputeCallField(argc, in_loop, *name, *object,
|
lookup->holder(), index);
|
break;
|
}
|
@@ -504,7 +509,7 @@
|
// call; used for rewriting to monomorphic state and making sure
|
// that the code stub is in the stub cache.
|
JSFunction* function = lookup->GetConstantFunction();
|
- code = StubCache::ComputeCallConstant(argc, in_loop, *name, *object,
|
+ code = v8_context()->stub_cache_.ComputeCallConstant(argc, in_loop, *name, *object,
|
lookup->holder(), function);
|
break;
|
}
|
@@ -518,7 +523,7 @@
|
JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
|
if (!cell->value()->IsJSFunction()) return;
|
JSFunction* function = JSFunction::cast(cell->value());
|
- code = StubCache::ComputeCallGlobal(argc,
|
+ code = v8_context()->stub_cache_.ComputeCallGlobal(argc,
|
in_loop,
|
*name,
|
*receiver,
|
@@ -531,13 +536,13 @@
|
// property must be found in the receiver for the stub to be
|
// applicable.
|
if (lookup->holder() != *receiver) return;
|
- code = StubCache::ComputeCallNormal(argc, in_loop, *name, *receiver);
|
+ code = v8_context()->stub_cache_.ComputeCallNormal(argc, in_loop, *name, *receiver);
|
}
|
break;
|
}
|
case INTERCEPTOR: {
|
ASSERT(HasInterceptorGetter(lookup->holder()));
|
- code = StubCache::ComputeCallInterceptor(argc, *name, *object,
|
+ code = v8_context()->stub_cache_.ComputeCallInterceptor(argc, *name, *object,
|
lookup->holder());
|
break;
|
}
|
@@ -572,12 +577,13 @@
|
}
|
|
if (FLAG_use_ic) {
|
+ Builtins& builtins = v8_context()->builtins_;
|
// Use specialized code for getting the length of strings and
|
// string wrapper objects. The length property of string wrapper
|
// objects is read-only and therefore always returns the length of
|
// the underlying string value. See ECMA-262 15.5.5.1.
|
if ((object->IsString() || object->IsStringWrapper()) &&
|
- name->Equals(Heap::length_symbol())) {
|
+ name->Equals(v8_context()->heap_.length_symbol())) {
|
HandleScope scope;
|
// Get the string if we have a string wrapper object.
|
if (object->IsJSValue()) {
|
@@ -587,31 +593,31 @@
|
if (FLAG_trace_ic) PrintF("[LoadIC : +#length /string]\n");
|
#endif
|
Code* target = NULL;
|
- target = Builtins::builtin(Builtins::LoadIC_StringLength);
|
+ target = builtins.builtin(Builtins::LoadIC_StringLength);
|
set_target(target);
|
- StubCache::Set(*name, HeapObject::cast(*object)->map(), target);
|
+ v8_context()->stub_cache_.Set(*name, HeapObject::cast(*object)->map(), target);
|
return Smi::FromInt(String::cast(*object)->length());
|
}
|
|
// Use specialized code for getting the length of arrays.
|
- if (object->IsJSArray() && name->Equals(Heap::length_symbol())) {
|
+ if (object->IsJSArray() && name->Equals(v8_context()->heap_.length_symbol())) {
|
#ifdef DEBUG
|
if (FLAG_trace_ic) PrintF("[LoadIC : +#length /array]\n");
|
#endif
|
- Code* target = Builtins::builtin(Builtins::LoadIC_ArrayLength);
|
+ Code* target = builtins.builtin(Builtins::LoadIC_ArrayLength);
|
set_target(target);
|
- StubCache::Set(*name, HeapObject::cast(*object)->map(), target);
|
+ v8_context()->stub_cache_.Set(*name, HeapObject::cast(*object)->map(), target);
|
return JSArray::cast(*object)->length();
|
}
|
|
// Use specialized code for getting prototype of functions.
|
- if (object->IsJSFunction() && name->Equals(Heap::prototype_symbol())) {
|
+ if (object->IsJSFunction() && name->Equals(v8_context()->heap_.prototype_symbol())) {
|
#ifdef DEBUG
|
if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
|
#endif
|
- Code* target = Builtins::builtin(Builtins::LoadIC_FunctionPrototype);
|
+ Code* target = builtins.builtin(Builtins::LoadIC_FunctionPrototype);
|
set_target(target);
|
- StubCache::Set(*name, HeapObject::cast(*object)->map(), target);
|
+ v8_context()->stub_cache_.Set(*name, HeapObject::cast(*object)->map(), target);
|
return Accessors::FunctionGetPrototype(*object, 0);
|
}
|
}
|
@@ -705,14 +711,14 @@
|
// Compute monomorphic stub.
|
switch (lookup->type()) {
|
case FIELD: {
|
- code = StubCache::ComputeLoadField(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeLoadField(*name, *receiver,
|
lookup->holder(),
|
lookup->GetFieldIndex());
|
break;
|
}
|
case CONSTANT_FUNCTION: {
|
Object* constant = lookup->GetConstantFunction();
|
- code = StubCache::ComputeLoadConstant(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeLoadConstant(*name, *receiver,
|
lookup->holder(), constant);
|
break;
|
}
|
@@ -721,7 +727,7 @@
|
GlobalObject* global = GlobalObject::cast(lookup->holder());
|
JSGlobalPropertyCell* cell =
|
JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
|
- code = StubCache::ComputeLoadGlobal(*name,
|
+ code = v8_context()->stub_cache_.ComputeLoadGlobal(*name,
|
*receiver,
|
global,
|
cell,
|
@@ -732,7 +738,7 @@
|
// property must be found in the receiver for the stub to be
|
// applicable.
|
if (lookup->holder() != *receiver) return;
|
- code = StubCache::ComputeLoadNormal(*name, *receiver);
|
+ code = v8_context()->stub_cache_.ComputeLoadNormal(*name, *receiver);
|
}
|
break;
|
}
|
@@ -741,13 +747,13 @@
|
AccessorInfo* callback =
|
AccessorInfo::cast(lookup->GetCallbackObject());
|
if (v8::ToCData<Address>(callback->getter()) == 0) return;
|
- code = StubCache::ComputeLoadCallback(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeLoadCallback(*name, *receiver,
|
lookup->holder(), callback);
|
break;
|
}
|
case INTERCEPTOR: {
|
ASSERT(HasInterceptorGetter(lookup->holder()));
|
- code = StubCache::ComputeLoadInterceptor(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeLoadInterceptor(*name, *receiver,
|
lookup->holder());
|
break;
|
}
|
@@ -788,10 +794,10 @@
|
|
if (FLAG_use_ic) {
|
// Use specialized code for getting the length of strings.
|
- if (object->IsString() && name->Equals(Heap::length_symbol())) {
|
+ if (object->IsString() && name->Equals(v8_context()->heap_.length_symbol())) {
|
Handle<String> string = Handle<String>::cast(object);
|
Object* code = NULL;
|
- code = StubCache::ComputeKeyedLoadStringLength(*name, *string);
|
+ code = v8_context()->stub_cache_.ComputeKeyedLoadStringLength(*name, *string);
|
if (code->IsFailure()) return code;
|
set_target(Code::cast(code));
|
#ifdef DEBUG
|
@@ -801,9 +807,9 @@
|
}
|
|
// Use specialized code for getting the length of arrays.
|
- if (object->IsJSArray() && name->Equals(Heap::length_symbol())) {
|
+ if (object->IsJSArray() && name->Equals(v8_context()->heap_.length_symbol())) {
|
Handle<JSArray> array = Handle<JSArray>::cast(object);
|
- Object* code = StubCache::ComputeKeyedLoadArrayLength(*name, *array);
|
+ Object* code = v8_context()->stub_cache_.ComputeKeyedLoadArrayLength(*name, *array);
|
if (code->IsFailure()) return code;
|
set_target(Code::cast(code));
|
#ifdef DEBUG
|
@@ -813,10 +819,10 @@
|
}
|
|
// Use specialized code for getting prototype of functions.
|
- if (object->IsJSFunction() && name->Equals(Heap::prototype_symbol())) {
|
+ if (object->IsJSFunction() && name->Equals(v8_context()->heap_.prototype_symbol())) {
|
Handle<JSFunction> function = Handle<JSFunction>::cast(object);
|
Object* code =
|
- StubCache::ComputeKeyedLoadFunctionPrototype(*name, *function);
|
+ v8_context()->stub_cache_.ComputeKeyedLoadFunctionPrototype(*name, *function);
|
if (code->IsFailure()) return code;
|
set_target(Code::cast(code));
|
#ifdef DEBUG
|
@@ -917,14 +923,14 @@
|
// Compute a monomorphic stub.
|
switch (lookup->type()) {
|
case FIELD: {
|
- code = StubCache::ComputeKeyedLoadField(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedLoadField(*name, *receiver,
|
lookup->holder(),
|
lookup->GetFieldIndex());
|
break;
|
}
|
case CONSTANT_FUNCTION: {
|
Object* constant = lookup->GetConstantFunction();
|
- code = StubCache::ComputeKeyedLoadConstant(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedLoadConstant(*name, *receiver,
|
lookup->holder(), constant);
|
break;
|
}
|
@@ -933,13 +939,13 @@
|
AccessorInfo* callback =
|
AccessorInfo::cast(lookup->GetCallbackObject());
|
if (v8::ToCData<Address>(callback->getter()) == 0) return;
|
- code = StubCache::ComputeKeyedLoadCallback(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedLoadCallback(*name, *receiver,
|
lookup->holder(), callback);
|
break;
|
}
|
case INTERCEPTOR: {
|
ASSERT(HasInterceptorGetter(lookup->holder()));
|
- code = StubCache::ComputeKeyedLoadInterceptor(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedLoadInterceptor(*name, *receiver,
|
lookup->holder());
|
break;
|
}
|
@@ -1062,7 +1068,7 @@
|
Object* code = NULL;
|
switch (type) {
|
case FIELD: {
|
- code = StubCache::ComputeStoreField(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeStoreField(*name, *receiver,
|
lookup->GetFieldIndex());
|
break;
|
}
|
@@ -1072,7 +1078,7 @@
|
ASSERT(type == MAP_TRANSITION);
|
Handle<Map> transition(lookup->GetTransitionMap());
|
int index = transition->PropertyIndexFor(*name);
|
- code = StubCache::ComputeStoreField(*name, *receiver, index, *transition);
|
+ code = v8_context()->stub_cache_.ComputeStoreField(*name, *receiver, index, *transition);
|
break;
|
}
|
case NORMAL: {
|
@@ -1085,19 +1091,19 @@
|
Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
|
JSGlobalPropertyCell* cell =
|
JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup));
|
- code = StubCache::ComputeStoreGlobal(*name, *global, cell);
|
+ code = v8_context()->stub_cache_.ComputeStoreGlobal(*name, *global, cell);
|
break;
|
}
|
case CALLBACKS: {
|
if (!lookup->GetCallbackObject()->IsAccessorInfo()) return;
|
AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
|
if (v8::ToCData<Address>(callback->setter()) == 0) return;
|
- code = StubCache::ComputeStoreCallback(*name, *receiver, callback);
|
+ code = v8_context()->stub_cache_.ComputeStoreCallback(*name, *receiver, callback);
|
break;
|
}
|
case INTERCEPTOR: {
|
ASSERT(!receiver->GetNamedInterceptor()->setter()->IsUndefined());
|
- code = StubCache::ComputeStoreInterceptor(*name, *receiver);
|
+ code = v8_context()->stub_cache_.ComputeStoreInterceptor(*name, *receiver);
|
break;
|
}
|
default:
|
@@ -1211,7 +1217,7 @@
|
|
switch (type) {
|
case FIELD: {
|
- code = StubCache::ComputeKeyedStoreField(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedStoreField(*name, *receiver,
|
lookup->GetFieldIndex());
|
break;
|
}
|
@@ -1221,7 +1227,7 @@
|
ASSERT(type == MAP_TRANSITION);
|
Handle<Map> transition(lookup->GetTransitionMap());
|
int index = transition->PropertyIndexFor(*name);
|
- code = StubCache::ComputeKeyedStoreField(*name, *receiver,
|
+ code = v8_context()->stub_cache_.ComputeKeyedStoreField(*name, *receiver,
|
index, *transition);
|
break;
|
}
|
Index: src/checks.cc
|
===================================================================
|
--- src/checks.cc (revision 1393)
|
+++ src/checks.cc Sun Nov 15 12:30:55 MSK 2009
|
@@ -50,7 +50,7 @@
|
if (fatal_error_handler_nesting_depth < 3) {
|
if (i::FLAG_stack_trace_on_abort) {
|
// Call this one twice on double fault
|
- i::Top::PrintStack();
|
+ v8::v8_context()->top_.PrintStack();
|
}
|
}
|
i::OS::Abort();
|
Index: src/heap-profiler.cc
|
===================================================================
|
--- src/heap-profiler.cc (revision 3230)
|
+++ src/heap-profiler.cc Sat Nov 14 01:43:04 MSK 2009
|
@@ -69,15 +69,16 @@
|
if (obj->IsJSObject()) {
|
JSObject* js_obj = JSObject::cast(obj);
|
String* constructor = JSObject::cast(js_obj)->constructor_name();
|
+ Heap& heap = v8_context()->heap_;
|
// Differentiate Object and Array instances.
|
- if (fine_grain && (constructor == Heap::Object_symbol() ||
|
- constructor == Heap::Array_symbol())) {
|
+ if (fine_grain && (constructor == heap.Object_symbol() ||
|
+ constructor == heap.Array_symbol())) {
|
return JSObjectsCluster(constructor, obj);
|
} else {
|
return JSObjectsCluster(constructor);
|
}
|
} else if (obj->IsString()) {
|
- return JSObjectsCluster(Heap::String_symbol());
|
+ return JSObjectsCluster(v8_context()->heap_.String_symbol());
|
} else if (obj->IsJSGlobalPropertyCell()) {
|
return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY);
|
} else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) {
|
@@ -536,7 +537,7 @@
|
: zscope_(DELETE_ON_EXIT) {
|
JSObjectsCluster roots(JSObjectsCluster::ROOTS);
|
ReferencesExtractor extractor(roots, this);
|
- Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG);
|
+ v8_context()->heap_.IterateRoots(&extractor, VISIT_ONLY_STRONG);
|
}
|
|
|
@@ -614,8 +615,9 @@
|
|
void HeapProfiler::WriteSample() {
|
LOG(HeapSampleBeginEvent("Heap", "allocated"));
|
+ Heap& heap = v8_context()->heap_;
|
LOG(HeapSampleStats(
|
- "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects()));
|
+ "Heap", "allocated", heap.CommittedMemory(), heap.SizeOfObjects()));
|
|
HistogramInfo info[LAST_TYPE+1];
|
#define DEF_TYPE_NAME(name) info[name].set_name(#name);
|
@@ -654,7 +656,7 @@
|
js_cons_profile.PrintStats();
|
js_retainer_profile.PrintStats();
|
|
- GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
|
+ v8_context()->global_handles_.IterateWeakRoots(PrintProducerStackTrace,
|
StackWeakReferenceCallback);
|
|
LOG(HeapSampleEndEvent("Heap", "allocated"));
|
@@ -681,8 +683,9 @@
|
stack[i++] = it.frame()->pc();
|
}
|
stack[i] = NULL;
|
- Handle<Object> handle = GlobalHandles::Create(obj);
|
- GlobalHandles::MakeWeak(handle.location(),
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ Handle<Object> handle = global_handles.Create(obj);
|
+ global_handles.MakeWeak(handle.location(),
|
static_cast<void*>(stack.start()),
|
StackWeakReferenceCallback);
|
}
|
Index: src/snapshot.h
|
===================================================================
|
--- src/snapshot.h (revision 3142)
|
+++ src/snapshot.h Sat Nov 14 01:43:03 MSK 2009
|
@@ -48,7 +48,7 @@
|
static bool WriteToFile2(const char* snapshot_file);
|
|
private:
|
- static const byte data_[];
|
+ static const byte data_[]; ///static
|
static int size_;
|
|
static bool Deserialize(const byte* content, int len);
|
Index: src/handles.h
|
===================================================================
|
--- src/handles.h (revision 3218)
|
+++ src/handles.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -105,79 +105,19 @@
|
// garbage collector will no longer track the object stored in the
|
// handle and may deallocate it. The behavior of accessing a handle
|
// for which the handle scope has been deleted is undefined.
|
-class HandleScope {
|
+class HandleScope {
|
public:
|
- HandleScope() : previous_(current_) {
|
- current_.extensions = 0;
|
- }
|
+ HandleScope();
|
|
- ~HandleScope() {
|
- Leave(&previous_);
|
- }
|
+ ~HandleScope();
|
|
- // Counts the number of allocated handles.
|
- static int NumberOfHandles();
|
-
|
- // Creates a new handle with the given value.
|
- template <typename T>
|
- static inline T** CreateHandle(T* value) {
|
- internal::Object** cur = current_.next;
|
- if (cur == current_.limit) cur = Extend();
|
- // Update the current next field, set the value in the created
|
- // handle, and return the result.
|
- ASSERT(cur < current_.limit);
|
- current_.next = cur + 1;
|
-
|
- T** result = reinterpret_cast<T**>(cur);
|
- *result = value;
|
- return result;
|
- }
|
-
|
- // Deallocates any extensions used by the current scope.
|
- static void DeleteExtensions();
|
-
|
- static Address current_extensions_address();
|
- static Address current_next_address();
|
- static Address current_limit_address();
|
-
|
private:
|
// Prevent heap allocation or illegal handle scopes.
|
- HandleScope(const HandleScope&);
|
- void operator=(const HandleScope&);
|
+ DISALLOW_COPY_AND_ASSIGN(HandleScope);
|
void* operator new(size_t size);
|
void operator delete(void* size_t);
|
|
- static v8::ImplementationUtilities::HandleScopeData current_;
|
- const v8::ImplementationUtilities::HandleScopeData previous_;
|
-
|
- // Pushes a fresh handle scope to be used when allocating new handles.
|
- static void Enter(
|
- v8::ImplementationUtilities::HandleScopeData* previous) {
|
- *previous = current_;
|
- current_.extensions = 0;
|
- }
|
-
|
- // Re-establishes the previous scope state. Should be called only
|
- // once, and only for the current scope.
|
- static void Leave(
|
- const v8::ImplementationUtilities::HandleScopeData* previous) {
|
- if (current_.extensions > 0) {
|
- DeleteExtensions();
|
- }
|
- current_ = *previous;
|
-#ifdef DEBUG
|
- ZapRange(current_.next, current_.limit);
|
-#endif
|
- }
|
-
|
- // Extend the handle scope making room for more handles.
|
- static internal::Object** Extend();
|
-
|
- // Zaps the handles in the half-open interval [start, end).
|
- static void ZapRange(internal::Object** start, internal::Object** end);
|
-
|
- friend class v8::HandleScope;
|
- friend class v8::ImplementationUtilities;
|
+ v8::ImplementationUtilities::HandleScopeData previous_;
|
};
|
|
|
Index: test/cctest/test-alloc.cc
|
===================================================================
|
--- test/cctest/test-alloc.cc (revision 3018)
|
+++ test/cctest/test-alloc.cc Sun Nov 15 12:38:09 MSK 2009
|
@@ -33,18 +33,22 @@
|
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
+using v8::V8Context;
|
|
|
static Object* AllocateAfterFailures() {
|
static int attempts = 0;
|
if (++attempts < 3) return Failure::RetryAfterGC(0);
|
|
+ V8Context* const v8context = v8_context();
|
+ Heap& heap = v8context->heap_;
|
// New space.
|
- NewSpace* new_space = Heap::new_space();
|
+ NewSpace* new_space = heap.new_space();
|
static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
|
while (new_space->Available() > kNewSpaceFillerSize) {
|
int available_before = new_space->Available();
|
- CHECK(!Heap::AllocateByteArray(0)->IsFailure());
|
+ CHECK(!heap.AllocateByteArray(0)->IsFailure());
|
if (available_before == new_space->Available()) {
|
// It seems that we are avoiding new space allocations when
|
// allocation is forced, so no need to fill up new space
|
@@ -52,44 +56,44 @@
|
break;
|
}
|
}
|
- CHECK(!Heap::AllocateByteArray(100)->IsFailure());
|
- CHECK(!Heap::AllocateFixedArray(100, NOT_TENURED)->IsFailure());
|
+ CHECK(!heap.AllocateByteArray(100)->IsFailure());
|
+ CHECK(!heap.AllocateFixedArray(100, NOT_TENURED)->IsFailure());
|
|
// Make sure we can allocate through optimized allocation functions
|
// for specific kinds.
|
- CHECK(!Heap::AllocateFixedArray(100)->IsFailure());
|
- CHECK(!Heap::AllocateHeapNumber(0.42)->IsFailure());
|
- CHECK(!Heap::AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
|
- Object* object = Heap::AllocateJSObject(*Top::object_function());
|
- CHECK(!Heap::CopyJSObject(JSObject::cast(object))->IsFailure());
|
+ CHECK(!heap.AllocateFixedArray(100)->IsFailure());
|
+ CHECK(!heap.AllocateHeapNumber(0.42)->IsFailure());
|
+ CHECK(!heap.AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure());
|
+ Object* object = heap.AllocateJSObject(*v8context->top_.object_function());
|
+ CHECK(!heap.CopyJSObject(JSObject::cast(object))->IsFailure());
|
|
// Old data space.
|
- OldSpace* old_data_space = Heap::old_data_space();
|
+ OldSpace* old_data_space = heap.old_data_space();
|
static const int kOldDataSpaceFillerSize = SeqAsciiString::SizeFor(0);
|
while (old_data_space->Available() > kOldDataSpaceFillerSize) {
|
- CHECK(!Heap::AllocateRawAsciiString(0, TENURED)->IsFailure());
|
+ CHECK(!heap.AllocateRawAsciiString(0, TENURED)->IsFailure());
|
}
|
- CHECK(!Heap::AllocateRawAsciiString(100, TENURED)->IsFailure());
|
+ CHECK(!heap.AllocateRawAsciiString(100, TENURED)->IsFailure());
|
|
// Large object space.
|
- while (!Heap::OldGenerationAllocationLimitReached()) {
|
- CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
|
+ while (!heap.OldGenerationAllocationLimitReached()) {
|
+ CHECK(!heap.AllocateFixedArray(10000, TENURED)->IsFailure());
|
}
|
- CHECK(!Heap::AllocateFixedArray(10000, TENURED)->IsFailure());
|
+ CHECK(!heap.AllocateFixedArray(10000, TENURED)->IsFailure());
|
|
// Map space.
|
- MapSpace* map_space = Heap::map_space();
|
+ MapSpace* map_space = heap.map_space();
|
static const int kMapSpaceFillerSize = Map::kSize;
|
InstanceType instance_type = JS_OBJECT_TYPE;
|
int instance_size = JSObject::kHeaderSize;
|
while (map_space->Available() > kMapSpaceFillerSize) {
|
- CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
|
+ CHECK(!heap.AllocateMap(instance_type, instance_size)->IsFailure());
|
}
|
- CHECK(!Heap::AllocateMap(instance_type, instance_size)->IsFailure());
|
+ CHECK(!heap.AllocateMap(instance_type, instance_size)->IsFailure());
|
|
// Test that we can allocate in old pointer space and code space.
|
- CHECK(!Heap::AllocateFixedArray(100, TENURED)->IsFailure());
|
- CHECK(!Heap::CopyCode(Builtins::builtin(Builtins::Illegal))->IsFailure());
|
+ CHECK(!heap.AllocateFixedArray(100, TENURED)->IsFailure());
|
+ CHECK(!heap.CopyCode(v8context->builtins_.builtin(Builtins::Illegal))->IsFailure());
|
|
// Return success.
|
return Smi::FromInt(42);
|
@@ -132,7 +136,7 @@
|
// Force the creation of an initial map and set the code to
|
// something empty.
|
Factory::NewJSObject(function);
|
- function->set_code(Builtins::builtin(Builtins::EmptyFunction));
|
+ function->set_code(v8_context()->builtins_.builtin(Builtins::EmptyFunction));
|
// Patch the map to have an accessor for "get".
|
Handle<Map> map(function->initial_map());
|
Handle<DescriptorArray> instance_descriptors(map->instance_descriptors());
|
@@ -182,7 +186,8 @@
|
|
TEST(CodeRange) {
|
const int code_range_size = 16*MB;
|
- CodeRange::Setup(code_range_size);
|
+ i::CodeRange* code_range = v8_context()->heap_.code_range();
|
+ code_range->Setup(code_range_size);
|
int current_allocated = 0;
|
int total_allocated = 0;
|
List<Block> blocks(1000);
|
@@ -194,14 +199,14 @@
|
size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
|
Pseudorandom() % 5000 + 1;
|
size_t allocated = 0;
|
- void* base = CodeRange::AllocateRawMemory(requested, &allocated);
|
+ void* base = code_range->AllocateRawMemory(requested, &allocated);
|
blocks.Add(Block(base, allocated));
|
current_allocated += allocated;
|
total_allocated += allocated;
|
} else {
|
// Free a block.
|
int index = Pseudorandom() % blocks.length();
|
- CodeRange::FreeRawMemory(blocks[index].base, blocks[index].size);
|
+ code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
|
current_allocated -= blocks[index].size;
|
if (index < blocks.length() - 1) {
|
blocks[index] = blocks.RemoveLast();
|
@@ -211,5 +216,5 @@
|
}
|
}
|
|
- CodeRange::TearDown();
|
+ code_range->TearDown();
|
}
|
Index: src/ia32/stub-cache-ia32.cc
|
===================================================================
|
--- src/ia32/stub-cache-ia32.cc (revision 3237)
|
+++ src/ia32/stub-cache-ia32.cc Sat Nov 14 01:43:00 MSK 2009
|
@@ -359,7 +359,7 @@
|
holder->LocalLookupRealNamedProperty(name, lookup);
|
if (lookup->IsNotFound()) {
|
Object* proto = holder->GetPrototype();
|
- if (proto != Heap::null_value()) {
|
+ if (proto != v8_context()->heap_.null_value()) {
|
proto->Lookup(name, lookup);
|
}
|
}
|
@@ -609,9 +609,9 @@
|
ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
|
Code* code = NULL;
|
if (kind == Code::LOAD_IC) {
|
- code = Builtins::builtin(Builtins::LoadIC_Miss);
|
+ code = v8_context()->builtins_.builtin(Builtins::LoadIC_Miss);
|
} else {
|
- code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
|
+ code = v8_context()->builtins_.builtin(Builtins::KeyedLoadIC_Miss);
|
}
|
|
Handle<Code> ic(code);
|
@@ -651,7 +651,7 @@
|
// The properties must be extended before we can store the value.
|
// We jump to a runtime call that extends the properties array.
|
__ mov(ecx, Immediate(Handle<Map>(transition)));
|
- Handle<Code> ic(Builtins::builtin(storage_extend));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(storage_extend));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
return;
|
}
|
@@ -1162,7 +1162,7 @@
|
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
|
|
// Jump to the cached code (tail call).
|
- __ IncrementCounter(&Counters::call_global_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.call_global_inline, 1);
|
ASSERT(function->is_compiled());
|
Handle<Code> code(function->code());
|
ParameterCount expected(function->shared()->formal_parameter_count());
|
@@ -1171,7 +1171,7 @@
|
|
// Handle call cache miss.
|
__ bind(&miss);
|
- __ IncrementCounter(&Counters::call_global_inline_miss, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.call_global_inline_miss, 1);
|
Handle<Code> ic = ComputeCallMiss(arguments().immediate());
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
@@ -1207,7 +1207,7 @@
|
// Handle store cache miss.
|
__ bind(&miss);
|
__ mov(ecx, Immediate(Handle<String>(name))); // restore name
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Miss));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
// Return the generated code.
|
@@ -1262,7 +1262,7 @@
|
// Handle store cache miss.
|
__ bind(&miss);
|
__ mov(ecx, Immediate(Handle<String>(name))); // restore name
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Miss));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
// Return the generated code.
|
@@ -1315,7 +1315,7 @@
|
// Handle store cache miss.
|
__ bind(&miss);
|
__ mov(ecx, Immediate(Handle<String>(name))); // restore name
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Miss));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
// Return the generated code.
|
@@ -1345,13 +1345,13 @@
|
__ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax);
|
|
// Return the value (register eax).
|
- __ IncrementCounter(&Counters::named_store_global_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.named_store_global_inline, 1);
|
__ ret(0);
|
|
// Handle store cache miss.
|
__ bind(&miss);
|
- __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
|
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
|
+ __ IncrementCounter(&v8_context()->counters_.named_store_global_inline_miss, 1);
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::StoreIC_Miss));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
// Return the generated code.
|
@@ -1371,7 +1371,7 @@
|
// -----------------------------------
|
Label miss;
|
|
- __ IncrementCounter(&Counters::keyed_store_field, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_store_field, 1);
|
|
// Get the name from the stack.
|
__ mov(ecx, Operand(esp, 1 * kPointerSize));
|
@@ -1393,8 +1393,8 @@
|
|
// Handle store cache miss.
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_store_field, 1);
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_store_field, 1);
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Miss));
|
__ jmp(ic, RelocInfo::CODE_TARGET);
|
|
// Return the generated code.
|
@@ -1540,11 +1540,11 @@
|
__ Check(not_equal, "DontDelete cells can't contain the hole");
|
}
|
|
- __ IncrementCounter(&Counters::named_load_global_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.named_load_global_inline, 1);
|
__ ret(0);
|
|
__ bind(&miss);
|
- __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.named_load_global_inline_miss, 1);
|
GenerateLoadMiss(masm(), Code::LOAD_IC);
|
|
// Return the generated code.
|
@@ -1565,7 +1565,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_field, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_field, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1574,7 +1574,7 @@
|
GenerateLoadField(receiver, holder, ecx, ebx, edx, index, name, &miss);
|
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_field, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_field, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1595,7 +1595,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_callback, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_callback, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1604,7 +1604,7 @@
|
GenerateLoadCallback(receiver, holder, ecx, eax, ebx, edx,
|
callback, name, &miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_callback, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_callback, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1625,7 +1625,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_constant_function, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_constant_function, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1634,7 +1634,7 @@
|
GenerateLoadConstant(receiver, holder, ecx, ebx, edx,
|
value, name, &miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_constant_function, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1654,7 +1654,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_interceptor, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_interceptor, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1672,7 +1672,7 @@
|
name,
|
&miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_interceptor, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1692,7 +1692,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_array_length, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_array_length, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1700,7 +1700,7 @@
|
|
GenerateLoadArrayLength(masm(), ecx, edx, &miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_array_length, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_array_length, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1718,7 +1718,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_string_length, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_string_length, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1726,7 +1726,7 @@
|
|
GenerateLoadStringLength(masm(), ecx, edx, &miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_string_length, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_string_length, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1744,7 +1744,7 @@
|
|
__ mov(eax, Operand(esp, kPointerSize));
|
__ mov(ecx, Operand(esp, 2 * kPointerSize));
|
- __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_function_prototype, 1);
|
|
// Check that the name has not changed.
|
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
|
@@ -1752,7 +1752,7 @@
|
|
GenerateLoadFunctionPrototype(masm(), ecx, edx, ebx, &miss);
|
__ bind(&miss);
|
- __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
|
+ __ DecrementCounter(&v8_context()->counters_.keyed_load_function_prototype, 1);
|
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
|
|
// Return the generated code.
|
@@ -1875,14 +1875,14 @@
|
__ pop(ecx);
|
__ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
|
__ push(ecx);
|
- __ IncrementCounter(&Counters::constructed_objects, 1);
|
- __ IncrementCounter(&Counters::constructed_objects_stub, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.constructed_objects, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.constructed_objects_stub, 1);
|
__ ret(0);
|
|
// Jump to the generic stub in case the specialized code cannot handle the
|
// construction.
|
__ bind(&generic_stub_call);
|
- Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
|
+ Code* code = v8_context()->builtins_.builtin(Builtins::JSConstructStubGeneric);
|
Handle<Code> generic_construct_stub(code);
|
__ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
|
|
Index: test/cctest/test-heap.cc
|
===================================================================
|
--- test/cctest/test-heap.cc (revision 3046)
|
+++ test/cctest/test-heap.cc Sun Nov 15 12:42:05 MSK 2009
|
@@ -11,6 +11,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static v8::Persistent<v8::Context> env;
|
|
@@ -24,9 +25,9 @@
|
static void CheckMap(Map* map, int type, int instance_size) {
|
CHECK(map->IsHeapObject());
|
#ifdef DEBUG
|
- CHECK(Heap::Contains(map));
|
+ CHECK(v8_context()->heap_.Contains(map));
|
#endif
|
- CHECK_EQ(Heap::meta_map(), map->map());
|
+ CHECK_EQ(v8_context()->heap_.meta_map(), map->map());
|
CHECK_EQ(type, map->instance_type());
|
CHECK_EQ(instance_size, map->instance_size());
|
}
|
@@ -34,10 +35,10 @@
|
|
TEST(HeapMaps) {
|
InitializeVM();
|
- CheckMap(Heap::meta_map(), MAP_TYPE, Map::kSize);
|
- CheckMap(Heap::heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
|
- CheckMap(Heap::fixed_array_map(), FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
|
- CheckMap(Heap::long_string_map(), LONG_STRING_TYPE,
|
+ CheckMap(v8_context()->heap_.meta_map(), MAP_TYPE, Map::kSize);
|
+ CheckMap(v8_context()->heap_.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
|
+ CheckMap(v8_context()->heap_.fixed_array_map(), FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
|
+ CheckMap(v8_context()->heap_.long_string_map(), LONG_STRING_TYPE,
|
SeqTwoByteString::kAlignedSize);
|
}
|
|
@@ -59,7 +60,7 @@
|
|
|
static void CheckNumber(double value, const char* string) {
|
- Object* obj = Heap::NumberFromDouble(value);
|
+ Object* obj = v8_context()->heap_.NumberFromDouble(value);
|
CHECK(obj->IsNumber());
|
bool exc;
|
Object* print_string = *Execution::ToString(Handle<Object>(obj), &exc);
|
@@ -77,27 +78,27 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
|
HeapObject* obj = HeapObject::cast(code);
|
Address obj_addr = obj->address();
|
|
for (int i = 0; i < obj->Size(); i += kPointerSize) {
|
- Object* found = Heap::FindCodeObject(obj_addr + i);
|
+ Object* found = v8_context()->heap_.FindCodeObject(obj_addr + i);
|
CHECK_EQ(code, found);
|
}
|
|
- Object* copy = Heap::CreateCode(desc,
|
+ Object* copy = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(copy->IsCode());
|
HeapObject* obj_copy = HeapObject::cast(copy);
|
- Object* not_right = Heap::FindCodeObject(obj_copy->address() +
|
+ Object* not_right = v8_context()->heap_.FindCodeObject(obj_copy->address() +
|
obj_copy->Size() / 2);
|
CHECK(not_right != code);
|
}
|
@@ -107,50 +108,50 @@
|
InitializeVM();
|
|
v8::HandleScope sc;
|
- Object* value = Heap::NumberFromDouble(1.000123);
|
+ Object* value = v8_context()->heap_.NumberFromDouble(1.000123);
|
CHECK(value->IsHeapNumber());
|
CHECK(value->IsNumber());
|
CHECK_EQ(1.000123, value->Number());
|
|
- value = Heap::NumberFromDouble(1.0);
|
+ value = v8_context()->heap_.NumberFromDouble(1.0);
|
CHECK(value->IsSmi());
|
CHECK(value->IsNumber());
|
CHECK_EQ(1.0, value->Number());
|
|
- value = Heap::NumberFromInt32(1024);
|
+ value = v8_context()->heap_.NumberFromInt32(1024);
|
CHECK(value->IsSmi());
|
CHECK(value->IsNumber());
|
CHECK_EQ(1024.0, value->Number());
|
|
- value = Heap::NumberFromInt32(Smi::kMinValue);
|
+ value = v8_context()->heap_.NumberFromInt32(Smi::kMinValue);
|
CHECK(value->IsSmi());
|
CHECK(value->IsNumber());
|
CHECK_EQ(Smi::kMinValue, Smi::cast(value)->value());
|
|
- value = Heap::NumberFromInt32(Smi::kMaxValue);
|
+ value = v8_context()->heap_.NumberFromInt32(Smi::kMaxValue);
|
CHECK(value->IsSmi());
|
CHECK(value->IsNumber());
|
CHECK_EQ(Smi::kMaxValue, Smi::cast(value)->value());
|
|
#ifndef V8_TARGET_ARCH_X64
|
// TODO(lrn): We need a NumberFromIntptr function in order to test this.
|
- value = Heap::NumberFromInt32(Smi::kMinValue - 1);
|
+ value = v8_context()->heap_.NumberFromInt32(Smi::kMinValue - 1);
|
CHECK(value->IsHeapNumber());
|
CHECK(value->IsNumber());
|
CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
|
#endif
|
|
- value = Heap::NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
|
+ value = v8_context()->heap_.NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
|
CHECK(value->IsHeapNumber());
|
CHECK(value->IsNumber());
|
CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
|
value->Number());
|
|
// nan oddball checks
|
- CHECK(Heap::nan_value()->IsNumber());
|
- CHECK(isnan(Heap::nan_value()->Number()));
|
+ CHECK(v8_context()->heap_.nan_value()->IsNumber());
|
+ CHECK(isnan(v8_context()->heap_.nan_value()->Number()));
|
|
- Object* str = Heap::AllocateStringFromAscii(CStrVector("fisk hest "));
|
+ Object* str = v8_context()->heap_.AllocateStringFromAscii(CStrVector("fisk hest "));
|
if (!str->IsFailure()) {
|
String* s = String::cast(str);
|
CHECK(s->IsString());
|
@@ -159,14 +160,14 @@
|
CHECK(false);
|
}
|
|
- String* object_symbol = String::cast(Heap::Object_symbol());
|
- CHECK(Top::context()->global()->HasLocalProperty(object_symbol));
|
+ String* object_symbol = String::cast(v8_context()->heap_.Object_symbol());
|
+ CHECK(v8_context()->top_.context()->global()->HasLocalProperty(object_symbol));
|
|
// Check ToString for oddballs
|
- CheckOddball(Heap::true_value(), "true");
|
- CheckOddball(Heap::false_value(), "false");
|
- CheckOddball(Heap::null_value(), "null");
|
- CheckOddball(Heap::undefined_value(), "undefined");
|
+ CheckOddball(v8_context()->heap_.true_value(), "true");
|
+ CheckOddball(v8_context()->heap_.false_value(), "false");
|
+ CheckOddball(v8_context()->heap_.null_value(), "null");
|
+ CheckOddball(v8_context()->heap_.undefined_value(), "undefined");
|
|
// Check ToString for Smis
|
CheckSmi(0, "0");
|
@@ -203,65 +204,65 @@
|
|
v8::HandleScope sc;
|
// check GC when heap is empty
|
- int free_bytes = Heap::MaxObjectSizeInPagedSpace();
|
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
|
+ int free_bytes = v8_context()->heap_.MaxObjectSizeInPagedSpace();
|
+ CHECK(v8_context()->heap_.CollectGarbage(free_bytes, NEW_SPACE));
|
|
// allocate a function and keep it in global object's property
|
- String* func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
+ String* func_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
SharedFunctionInfo* function_share =
|
- SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(func_name));
|
+ SharedFunctionInfo::cast(v8_context()->heap_.AllocateSharedFunctionInfo(func_name));
|
JSFunction* function =
|
- JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
|
+ JSFunction::cast(v8_context()->heap_.AllocateFunction(*v8_context()->top_.function_map(),
|
function_share,
|
- Heap::undefined_value()));
|
+ v8_context()->heap_.undefined_value()));
|
Map* initial_map =
|
- Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
+ Map::cast(v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
function->set_initial_map(initial_map);
|
- Top::context()->global()->SetProperty(func_name, function, NONE);
|
+ v8_context()->top_.context()->global()->SetProperty(func_name, function, NONE);
|
|
// allocate an object, but it is unrooted
|
- String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
- String* prop_namex = String::cast(Heap::LookupAsciiSymbol("theSlotx"));
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
|
+ String* prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
+ String* prop_namex = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlotx"));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
obj->SetProperty(prop_namex, Smi::FromInt(24), NONE);
|
|
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(prop_name));
|
CHECK_EQ(Smi::FromInt(24), obj->GetProperty(prop_namex));
|
|
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(free_bytes, NEW_SPACE));
|
|
// function should be alive, func_name might be invalid after GC
|
- func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
- CHECK(Top::context()->global()->HasLocalProperty(func_name));
|
+ func_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
+ CHECK(v8_context()->top_.context()->global()->HasLocalProperty(func_name));
|
// check function is retained
|
- Object* func_value = Top::context()->global()->GetProperty(func_name);
|
+ Object* func_value = v8_context()->top_.context()->global()->GetProperty(func_name);
|
CHECK(func_value->IsJSFunction());
|
// old function pointer may not be valid
|
function = JSFunction::cast(func_value);
|
|
// allocate another object, make it reachable from global
|
- obj = JSObject::cast(Heap::AllocateJSObject(function));
|
- String* obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
- Top::context()->global()->SetProperty(obj_name, obj, NONE);
|
+ obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
+ String* obj_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theObject"));
|
+ v8_context()->top_.context()->global()->SetProperty(obj_name, obj, NONE);
|
// set property
|
- prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
+ prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
|
// after gc, it should survive
|
- CHECK(Heap::CollectGarbage(free_bytes, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(free_bytes, NEW_SPACE));
|
|
- obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
- CHECK(Top::context()->global()->HasLocalProperty(obj_name));
|
- CHECK(Top::context()->global()->GetProperty(obj_name)->IsJSObject());
|
- obj = JSObject::cast(Top::context()->global()->GetProperty(obj_name));
|
- prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
+ obj_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theObject"));
|
+ CHECK(v8_context()->top_.context()->global()->HasLocalProperty(obj_name));
|
+ CHECK(v8_context()->top_.context()->global()->GetProperty(obj_name)->IsJSObject());
|
+ obj = JSObject::cast(v8_context()->top_.context()->global()->GetProperty(obj_name));
|
+ prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(prop_name));
|
}
|
|
|
static void VerifyStringAllocation(const char* string) {
|
- String* s = String::cast(Heap::AllocateStringFromUtf8(CStrVector(string)));
|
+ String* s = String::cast(v8_context()->heap_.AllocateStringFromUtf8(CStrVector(string)));
|
CHECK_EQ(static_cast<int>(strlen(string)), s->length());
|
for (int index = 0; index < s->length(); index++) {
|
CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index)); }
|
@@ -292,16 +293,16 @@
|
TEST(GlobalHandles) {
|
InitializeVM();
|
|
- Object* i = Heap::AllocateStringFromAscii(CStrVector("fisk"));
|
- Object* u = Heap::AllocateHeapNumber(1.12344);
|
+ Object* i = v8_context()->heap_.AllocateStringFromAscii(CStrVector("fisk"));
|
+ Object* u = v8_context()->heap_.AllocateHeapNumber(1.12344);
|
|
- Handle<Object> h1 = GlobalHandles::Create(i);
|
- Handle<Object> h2 = GlobalHandles::Create(u);
|
- Handle<Object> h3 = GlobalHandles::Create(i);
|
- Handle<Object> h4 = GlobalHandles::Create(u);
|
+ Handle<Object> h1 = v8_context()->global_handles_.Create(i);
|
+ Handle<Object> h2 = v8_context()->global_handles_.Create(u);
|
+ Handle<Object> h3 = v8_context()->global_handles_.Create(i);
|
+ Handle<Object> h4 = v8_context()->global_handles_.Create(u);
|
|
// after gc, it should survive
|
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, NEW_SPACE));
|
|
CHECK((*h1)->IsString());
|
CHECK((*h2)->IsHeapNumber());
|
@@ -309,12 +310,12 @@
|
CHECK((*h4)->IsHeapNumber());
|
|
CHECK_EQ(*h3, *h1);
|
- GlobalHandles::Destroy(h1.location());
|
- GlobalHandles::Destroy(h3.location());
|
+ v8_context()->global_handles_.Destroy(h1.location());
|
+ v8_context()->global_handles_.Destroy(h3.location());
|
|
CHECK_EQ(*h4, *h2);
|
- GlobalHandles::Destroy(h2.location());
|
- GlobalHandles::Destroy(h4.location());
|
+ v8_context()->global_handles_.Destroy(h2.location());
|
+ v8_context()->global_handles_.Destroy(h4.location());
|
}
|
|
|
@@ -332,28 +333,28 @@
|
|
WeakPointerCleared = false;
|
|
- Object* i = Heap::AllocateStringFromAscii(CStrVector("fisk"));
|
- Object* u = Heap::AllocateHeapNumber(1.12344);
|
+ Object* i = v8_context()->heap_.AllocateStringFromAscii(CStrVector("fisk"));
|
+ Object* u = v8_context()->heap_.AllocateHeapNumber(1.12344);
|
|
- Handle<Object> h1 = GlobalHandles::Create(i);
|
- Handle<Object> h2 = GlobalHandles::Create(u);
|
+ Handle<Object> h1 = v8_context()->global_handles_.Create(i);
|
+ Handle<Object> h2 = v8_context()->global_handles_.Create(u);
|
|
- GlobalHandles::MakeWeak(h2.location(),
|
+ v8_context()->global_handles_.MakeWeak(h2.location(),
|
reinterpret_cast<void*>(1234),
|
&TestWeakGlobalHandleCallback);
|
|
// Scavenge treats weak pointers as normal roots.
|
- Heap::PerformScavenge();
|
+ v8_context()->heap_.PerformScavenge();
|
|
CHECK((*h1)->IsString());
|
CHECK((*h2)->IsHeapNumber());
|
|
CHECK(!WeakPointerCleared);
|
- CHECK(!GlobalHandles::IsNearDeath(h2.location()));
|
- CHECK(!GlobalHandles::IsNearDeath(h1.location()));
|
+ CHECK(!v8_context()->global_handles_.IsNearDeath(h2.location()));
|
+ CHECK(!v8_context()->global_handles_.IsNearDeath(h1.location()));
|
|
- GlobalHandles::Destroy(h1.location());
|
- GlobalHandles::Destroy(h2.location());
|
+ v8_context()->global_handles_.Destroy(h1.location());
|
+ v8_context()->global_handles_.Destroy(h2.location());
|
}
|
|
|
@@ -362,32 +363,32 @@
|
|
WeakPointerCleared = false;
|
|
- Object* i = Heap::AllocateStringFromAscii(CStrVector("fisk"));
|
- Object* u = Heap::AllocateHeapNumber(1.12344);
|
+ Object* i = v8_context()->heap_.AllocateStringFromAscii(CStrVector("fisk"));
|
+ Object* u = v8_context()->heap_.AllocateHeapNumber(1.12344);
|
|
- Handle<Object> h1 = GlobalHandles::Create(i);
|
- Handle<Object> h2 = GlobalHandles::Create(u);
|
+ Handle<Object> h1 = v8_context()->global_handles_.Create(i);
|
+ Handle<Object> h2 = v8_context()->global_handles_.Create(u);
|
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
- CHECK(Heap::CollectGarbage(0, NEW_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, NEW_SPACE));
|
// Make sure the object is promoted.
|
|
- GlobalHandles::MakeWeak(h2.location(),
|
+ v8_context()->global_handles_.MakeWeak(h2.location(),
|
reinterpret_cast<void*>(1234),
|
&TestWeakGlobalHandleCallback);
|
- CHECK(!GlobalHandles::IsNearDeath(h1.location()));
|
- CHECK(!GlobalHandles::IsNearDeath(h2.location()));
|
+ CHECK(!v8_context()->global_handles_.IsNearDeath(h1.location()));
|
+ CHECK(!v8_context()->global_handles_.IsNearDeath(h2.location()));
|
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
CHECK((*h1)->IsString());
|
|
CHECK(WeakPointerCleared);
|
- CHECK(!GlobalHandles::IsNearDeath(h1.location()));
|
- CHECK(GlobalHandles::IsNearDeath(h2.location()));
|
+ CHECK(!v8_context()->global_handles_.IsNearDeath(h1.location()));
|
+ CHECK(v8_context()->global_handles_.IsNearDeath(h2.location()));
|
|
- GlobalHandles::Destroy(h1.location());
|
- GlobalHandles::Destroy(h2.location());
|
+ v8_context()->global_handles_.Destroy(h1.location());
|
+ v8_context()->global_handles_.Destroy(h2.location());
|
}
|
|
static void TestDeleteWeakGlobalHandleCallback(
|
@@ -402,20 +403,20 @@
|
|
WeakPointerCleared = false;
|
|
- Object* i = Heap::AllocateStringFromAscii(CStrVector("fisk"));
|
- Handle<Object> h = GlobalHandles::Create(i);
|
+ Object* i = v8_context()->heap_.AllocateStringFromAscii(CStrVector("fisk"));
|
+ Handle<Object> h = v8_context()->global_handles_.Create(i);
|
|
- GlobalHandles::MakeWeak(h.location(),
|
+ v8_context()->global_handles_.MakeWeak(h.location(),
|
reinterpret_cast<void*>(1234),
|
&TestDeleteWeakGlobalHandleCallback);
|
|
// Scanvenge does not recognize weak reference.
|
- Heap::PerformScavenge();
|
+ v8_context()->heap_.PerformScavenge();
|
|
CHECK(!WeakPointerCleared);
|
|
// Mark-compact treats weak reference properly.
|
- CHECK(Heap::CollectGarbage(0, OLD_POINTER_SPACE));
|
+ CHECK(v8_context()->heap_.CollectGarbage(0, OLD_POINTER_SPACE));
|
|
CHECK(WeakPointerCleared);
|
}
|
@@ -486,11 +487,11 @@
|
|
static void CheckSymbols(const char** strings) {
|
for (const char* string = *strings; *strings != 0; string = *strings++) {
|
- Object* a = Heap::LookupAsciiSymbol(string);
|
+ Object* a = v8_context()->heap_.LookupAsciiSymbol(string);
|
// LookupAsciiSymbol may return a failure if a GC is needed.
|
if (a->IsFailure()) continue;
|
CHECK(a->IsSymbol());
|
- Object* b = Heap::LookupAsciiSymbol(string);
|
+ Object* b = v8_context()->heap_.LookupAsciiSymbol(string);
|
if (b->IsFailure()) continue;
|
CHECK_EQ(b, a);
|
CHECK(String::cast(b)->IsEqualTo(CStrVector(string)));
|
@@ -510,19 +511,19 @@
|
InitializeVM();
|
|
v8::HandleScope sc;
|
- String* name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
+ String* name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
SharedFunctionInfo* function_share =
|
- SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(name));
|
+ SharedFunctionInfo::cast(v8_context()->heap_.AllocateSharedFunctionInfo(name));
|
JSFunction* function =
|
- JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
|
+ JSFunction::cast(v8_context()->heap_.AllocateFunction(*v8_context()->top_.function_map(),
|
function_share,
|
- Heap::undefined_value()));
|
+ v8_context()->heap_.undefined_value()));
|
Map* initial_map =
|
- Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
+ Map::cast(v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
function->set_initial_map(initial_map);
|
|
- String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
|
+ String* prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
CHECK_EQ(Smi::FromInt(23), obj->GetProperty(prop_name));
|
// Check that we can add properties to function objects.
|
@@ -537,11 +538,11 @@
|
v8::HandleScope sc;
|
JSFunction* constructor =
|
JSFunction::cast(
|
- Top::context()->global()->GetProperty(String::cast(
|
- Heap::Object_symbol())));
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(constructor));
|
- String* first = String::cast(Heap::LookupAsciiSymbol("first"));
|
- String* second = String::cast(Heap::LookupAsciiSymbol("second"));
|
+ v8_context()->top_.context()->global()->GetProperty(String::cast(
|
+ v8_context()->heap_.Object_symbol())));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(constructor));
|
+ String* first = String::cast(v8_context()->heap_.LookupAsciiSymbol("first"));
|
+ String* second = String::cast(v8_context()->heap_.LookupAsciiSymbol("second"));
|
|
// check for empty
|
CHECK(!obj->HasLocalProperty(first));
|
@@ -583,16 +584,16 @@
|
// check string and symbol match
|
static const char* string1 = "fisk";
|
String* s1 =
|
- String::cast(Heap::AllocateStringFromAscii(CStrVector(string1)));
|
+ String::cast(v8_context()->heap_.AllocateStringFromAscii(CStrVector(string1)));
|
obj->SetProperty(s1, Smi::FromInt(1), NONE);
|
- CHECK(obj->HasLocalProperty(String::cast(Heap::LookupAsciiSymbol(string1))));
|
+ CHECK(obj->HasLocalProperty(String::cast(v8_context()->heap_.LookupAsciiSymbol(string1))));
|
|
// check symbol and string match
|
static const char* string2 = "fugl";
|
- String* s2 = String::cast(Heap::LookupAsciiSymbol(string2));
|
+ String* s2 = String::cast(v8_context()->heap_.LookupAsciiSymbol(string2));
|
obj->SetProperty(s2, Smi::FromInt(1), NONE);
|
CHECK(obj->HasLocalProperty(
|
- String::cast(Heap::AllocateStringFromAscii(CStrVector(string2)))));
|
+ String::cast(v8_context()->heap_.AllocateStringFromAscii(CStrVector(string2)))));
|
}
|
|
|
@@ -600,18 +601,18 @@
|
InitializeVM();
|
|
v8::HandleScope sc;
|
- String* name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
+ String* name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theFunction"));
|
SharedFunctionInfo* function_share =
|
- SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(name));
|
+ SharedFunctionInfo::cast(v8_context()->heap_.AllocateSharedFunctionInfo(name));
|
JSFunction* function =
|
- JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
|
+ JSFunction::cast(v8_context()->heap_.AllocateFunction(*v8_context()->top_.function_map(),
|
function_share,
|
- Heap::undefined_value()));
|
+ v8_context()->heap_.undefined_value()));
|
Map* initial_map =
|
- Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
+ Map::cast(v8_context()->heap_.AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
function->set_initial_map(initial_map);
|
- String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
|
+ String* prop_name = String::cast(v8_context()->heap_.LookupAsciiSymbol("theSlot"));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(function));
|
|
// Set a propery
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
@@ -626,12 +627,12 @@
|
InitializeVM();
|
|
v8::HandleScope sc;
|
- String* name = String::cast(Heap::LookupAsciiSymbol("Array"));
|
+ String* name = String::cast(v8_context()->heap_.LookupAsciiSymbol("Array"));
|
JSFunction* function =
|
- JSFunction::cast(Top::context()->global()->GetProperty(name));
|
+ JSFunction::cast(v8_context()->top_.context()->global()->GetProperty(name));
|
|
// Allocate the object.
|
- JSArray* array = JSArray::cast(Heap::AllocateJSObject(function));
|
+ JSArray* array = JSArray::cast(v8_context()->heap_.AllocateJSObject(function));
|
array->Initialize(0);
|
|
// Set array length to 0.
|
@@ -646,7 +647,7 @@
|
|
// Set array length with larger than smi value.
|
Object* length =
|
- Heap::NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
|
+ v8_context()->heap_.NumberFromUint32(static_cast<uint32_t>(Smi::kMaxValue) + 1);
|
array->SetElementsLength(length);
|
|
uint32_t int_length = 0;
|
@@ -668,12 +669,12 @@
|
InitializeVM();
|
|
v8::HandleScope sc;
|
- String* name = String::cast(Heap::Object_symbol());
|
+ String* name = String::cast(v8_context()->heap_.Object_symbol());
|
JSFunction* constructor =
|
- JSFunction::cast(Top::context()->global()->GetProperty(name));
|
- JSObject* obj = JSObject::cast(Heap::AllocateJSObject(constructor));
|
- String* first = String::cast(Heap::LookupAsciiSymbol("first"));
|
- String* second = String::cast(Heap::LookupAsciiSymbol("second"));
|
+ JSFunction::cast(v8_context()->top_.context()->global()->GetProperty(name));
|
+ JSObject* obj = JSObject::cast(v8_context()->heap_.AllocateJSObject(constructor));
|
+ String* first = String::cast(v8_context()->heap_.LookupAsciiSymbol("first"));
|
+ String* second = String::cast(v8_context()->heap_.LookupAsciiSymbol("second"));
|
|
obj->SetProperty(first, Smi::FromInt(1), NONE);
|
obj->SetProperty(second, Smi::FromInt(2), NONE);
|
@@ -682,7 +683,7 @@
|
obj->SetElement(1, second);
|
|
// Make the clone.
|
- JSObject* clone = JSObject::cast(Heap::CopyJSObject(obj));
|
+ JSObject* clone = JSObject::cast(v8_context()->heap_.CopyJSObject(obj));
|
CHECK(clone != obj);
|
|
CHECK_EQ(obj->GetElement(0), clone->GetElement(0));
|
@@ -781,7 +782,7 @@
|
Factory::NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
|
|
// Allocate a large string (for large object space).
|
- int large_size = Heap::MaxObjectSizeInPagedSpace() + 1;
|
+ int large_size = v8_context()->heap_.MaxObjectSizeInPagedSpace() + 1;
|
char* str = new char[large_size];
|
for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
|
str[large_size - 1] = '\0';
|
Index: src/SConscript
|
===================================================================
|
--- src/SConscript (revision 3238)
|
+++ src/SConscript Sat Nov 14 01:42:54 MSK 2009
|
@@ -100,6 +100,7 @@
|
v8-counters.cc
|
v8.cc
|
v8threads.cc
|
+ v8-global-context.cc
|
variables.cc
|
version.cc
|
virtual-frame.cc
|
Index: test/cctest/test-disasm-ia32.cc
|
===================================================================
|
--- test/cctest/test-disasm-ia32.cc (revision 3040)
|
+++ test/cctest/test-disasm-ia32.cc Sun Nov 15 12:40:53 MSK 2009
|
@@ -37,6 +37,7 @@
|
#include "cctest.h"
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static v8::Persistent<v8::Context> env;
|
|
@@ -256,7 +257,7 @@
|
__ bind(&L2);
|
__ call(Operand(ebx, ecx, times_4, 10000));
|
__ nop();
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::LoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
__ nop();
|
__ call(FUNCTION_ADDR(DummyStaticFunction), RelocInfo::RUNTIME_ENTRY);
|
@@ -392,10 +393,10 @@
|
|
CodeDesc desc;
|
assm.GetCode(&desc);
|
- Object* code = Heap::CreateCode(desc,
|
+ Object* code = v8_context()->heap_.CreateCode(desc,
|
NULL,
|
Code::ComputeFlags(Code::STUB),
|
- Handle<Object>(Heap::undefined_value()));
|
+ Handle<Object>(v8_context()->heap_.undefined_value()));
|
CHECK(code->IsCode());
|
#ifdef DEBUG
|
Code::cast(code)->Print();
|
Index: src/scopeinfo.cc
|
===================================================================
|
--- src/scopeinfo.cc (revision 3233)
|
+++ src/scopeinfo.cc Sat Nov 14 01:43:11 MSK 2009
|
@@ -431,7 +431,8 @@
|
String* name,
|
Variable::Mode* mode) {
|
ASSERT(name->IsSymbol());
|
- int result = ContextSlotCache::Lookup(code, name, mode);
|
+ ContextSlotCache& context_slot_cache = v8_context()->context_slot_cache_;
|
+ int result = context_slot_cache.Lookup(code, name, mode);
|
if (result != ContextSlotCache::kNotFound) return result;
|
if (code->sinfo_size() > 0) {
|
// Loop below depends on the NULL sentinel after the context slot names.
|
@@ -441,6 +442,7 @@
|
// slots start after length entry
|
Object** p0 = ContextEntriesAddr(code) + 1;
|
Object** p = p0;
|
+
|
// contexts may have no variable slots (in the presence of eval()).
|
while (*p != NULL) {
|
if (*p == name) {
|
@@ -450,13 +452,13 @@
|
Variable::Mode mode_value = static_cast<Variable::Mode>(v);
|
if (mode != NULL) *mode = mode_value;
|
result = ((p - p0) >> 1) + Context::MIN_CONTEXT_SLOTS;
|
- ContextSlotCache::Update(code, name, mode_value, result);
|
+ context_slot_cache.Update(code, name, mode_value, result);
|
return result;
|
}
|
p += 2;
|
}
|
}
|
- ContextSlotCache::Update(code, name, Variable::INTERNAL, -1);
|
+ context_slot_cache.Update(code, name, Variable::INTERNAL, -1);
|
return -1;
|
}
|
|
@@ -560,7 +562,7 @@
|
int slot_index) {
|
String* symbol;
|
ASSERT(slot_index > kNotFound);
|
- if (Heap::LookupSymbolIfExists(name, &symbol)) {
|
+ if (v8_context()->heap_.LookupSymbolIfExists(name, &symbol)) {
|
int index = Hash(code, symbol);
|
Key& key = keys_[index];
|
key.code = code;
|
@@ -579,12 +581,6 @@
|
}
|
|
|
-ContextSlotCache::Key ContextSlotCache::keys_[ContextSlotCache::kLength];
|
-
|
-
|
-uint32_t ContextSlotCache::values_[ContextSlotCache::kLength];
|
-
|
-
|
#ifdef DEBUG
|
|
void ContextSlotCache::ValidateEntry(Code* code,
|
@@ -592,7 +588,7 @@
|
Variable::Mode mode,
|
int slot_index) {
|
String* symbol;
|
- if (Heap::LookupSymbolIfExists(name, &symbol)) {
|
+ if (v8_context()->heap_.LookupSymbolIfExists(name, &symbol)) {
|
int index = Hash(code, name);
|
Key& key = keys_[index];
|
ASSERT(key.code == code);
|
Index: src/mark-compact.cc
|
===================================================================
|
--- src/mark-compact.cc (revision 3230)
|
+++ src/mark-compact.cc Sat Nov 14 01:43:18 MSK 2009
|
@@ -39,29 +39,350 @@
|
// -------------------------------------------------------------------------
|
// MarkCompactCollector
|
|
-bool MarkCompactCollector::force_compaction_ = false;
|
-bool MarkCompactCollector::compacting_collection_ = false;
|
-bool MarkCompactCollector::compact_on_next_gc_ = false;
|
+class MarkCompactCollector::MarkCompactCollectorImpl {
|
+public:
|
+ // Type of functions to compute forwarding addresses of objects in
|
+ // compacted spaces. Given an object and its size, return a (non-failure)
|
+ // Object* that will be the object after forwarding. There is a separate
|
+ // allocation function for each (compactable) space based on the location
|
+ // of the object before compaction.
|
+ typedef Object* (*AllocationFunction)(HeapObject* object, int object_size);
|
|
-int MarkCompactCollector::previous_marked_count_ = 0;
|
-GCTracer* MarkCompactCollector::tracer_ = NULL;
|
+ // Type of functions to encode the forwarding address for an object.
|
+ // Given the object, its size, and the new (non-failure) object it will be
|
+ // forwarded to, encode the forwarding address. For paged spaces, the
|
+ // 'offset' input/output parameter contains the offset of the forwarded
|
+ // object from the forwarding address of the previous live object in the
|
+ // page as input, and is updated to contain the offset to be used for the
|
+ // next live object in the same page. For spaces using a different
|
+ // encoding (ie, contiguous spaces), the offset parameter is ignored.
|
+ typedef void (*EncodingFunction)(HeapObject* old_object,
|
+ int object_size,
|
+ Object* new_object,
|
+ int* offset);
|
|
+ // Type of functions to process non-live objects.
|
+ typedef void (*ProcessNonLiveFunction)(HeapObject* object);
|
|
+ // Finishes GC, performs heap verification if enabled.
|
+ void Finish();
|
+
|
+ // -----------------------------------------------------------------------
|
+ // Phase 1: Marking live objects.
|
+ //
|
+ // Before: The heap has been prepared for garbage collection by
|
+ // MarkCompactCollector::Prepare() and is otherwise in its
|
+ // normal state.
|
+ //
|
+ // After: Live objects are marked and non-live objects are unmarked.
|
+
|
+
|
+ friend class RootMarkingVisitor;
|
+ friend class MarkingVisitor;
|
+
|
+ // Marking operations for objects reachable from roots.
|
+ void MarkLiveObjects();
|
+
|
+ void MarkUnmarkedObject(HeapObject* obj);
|
+
|
+ inline void MarkObject(HeapObject* obj) {
|
+ if (!obj->IsMarked()) MarkUnmarkedObject(obj);
|
+ }
|
+
|
+ inline void SetMark(HeapObject* obj) {
|
+ base_->tracer_->increment_marked_count();
|
#ifdef DEBUG
|
-MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE;
|
+ UpdateLiveObjectCount(obj);
|
+#endif
|
+ obj->SetMark();
|
+ }
|
|
+ // -------------------------------------------------------------------------
|
+ // Phase 1: tracing and marking live objects.
|
+ // before: all objects are in normal state.
|
+ // after: a live object's map pointer is marked as '00'.
|
+
|
+ // Marking all live objects in the heap as part of mark-sweep or mark-compact
|
+ // collection. Before marking, all objects are in their normal state. After
|
+ // marking, live objects' map pointers are marked indicating that the object
|
+ // has been found reachable.
|
+ //
|
+ // The marking algorithm is a (mostly) depth-first (because of possible stack
|
+ // overflow) traversal of the graph of objects reachable from the roots. It
|
+ // uses an explicit stack of pointers rather than recursion. The young
|
+ // generation's inactive ('from') space is used as a marking stack. The
|
+ // objects in the marking stack are the ones that have been reached and marked
|
+ // but their children have not yet been visited.
|
+ //
|
+ // The marking stack can overflow during traversal. In that case, we set an
|
+ // overflow flag. When the overflow flag is set, we continue marking objects
|
+ // reachable from the objects on the marking stack, but no longer push them on
|
+ // the marking stack. Instead, we mark them as both marked and overflowed.
|
+ // When the stack is in the overflowed state, objects marked as overflowed
|
+ // have been reached and marked but their children have not been visited yet.
|
+ // After emptying the marking stack, we clear the overflow flag and traverse
|
+ // the heap looking for objects marked as overflowed, push them on the stack,
|
+ // and continue with marking. This process repeats until all reachable
|
+ // objects have been marked.
|
+
|
+ MarkingStack marking_stack;
|
+
|
+ // Creates back pointers for all map transitions, stores them in
|
+ // the prototype field. The original prototype pointers are restored
|
+ // in ClearNonLiveTransitions(). All JSObject maps
|
+ // connected by map transitions have the same prototype object, which
|
+ // is why we can use this field temporarily for back pointers.
|
+ void CreateBackPointers();
|
+
|
+ // Mark a Map and its DescriptorArray together, skipping transitions.
|
+ void MarkMapContents(Map* map);
|
+ void MarkDescriptorArray(DescriptorArray* descriptors);
|
+
|
+ // Mark the heap roots and all objects reachable from them.
|
+ void MarkRoots(RootMarkingVisitor* visitor);
|
+
|
+ // Mark the symbol table specially. References to symbols from the
|
+ // symbol table are weak.
|
+ void MarkSymbolTable();
|
+
|
+ // Mark objects in object groups that have at least one object in the
|
+ // group marked.
|
+ void MarkObjectGroups();
|
+
|
+ // Mark all objects in an object group with at least one marked
|
+ // object, then all objects reachable from marked objects in object
|
+ // groups, and repeat.
|
+ void ProcessObjectGroups(MarkingVisitor* visitor);
|
+
|
+ // Mark objects reachable (transitively) from objects in the marking stack
|
+ // or overflowed in the heap.
|
+ void ProcessMarkingStack(MarkingVisitor* visitor);
|
+
|
+ // Mark objects reachable (transitively) from objects in the marking
|
+ // stack. This function empties the marking stack, but may leave
|
+ // overflowed objects in the heap, in which case the marking stack's
|
+ // overflow flag will be set.
|
+ void EmptyMarkingStack(MarkingVisitor* visitor);
|
+
|
+ // Refill the marking stack with overflowed objects from the heap. This
|
+ // function either leaves the marking stack full or clears the overflow
|
+ // flag on the marking stack.
|
+ void RefillMarkingStack();
|
+
|
+ // Callback function for telling whether the object *p is an unmarked
|
+ // heap object.
|
+ static bool IsUnmarkedHeapObject(Object** p);
|
+
|
+#ifdef DEBUG
|
+ void UpdateLiveObjectCount(HeapObject* obj);
|
+#endif
|
+
|
+ // We sweep the large object space in the same way whether we are
|
+ // compacting or not, because the large object space is never compacted.
|
+ void SweepLargeObjectSpace();
|
+
|
+ // Test whether a (possibly marked) object is a Map.
|
+ inline bool SafeIsMap(HeapObject* object);
|
+
|
+ // Map transitions from a live map to a dead map must be killed.
|
+ // We replace them with a null descriptor, with the same key.
|
+ void ClearNonLiveTransitions();
|
+
|
+ // -----------------------------------------------------------------------
|
+ // Phase 2: Sweeping to clear mark bits and free non-live objects for
|
+ // a non-compacting collection, or else computing and encoding
|
+ // forwarding addresses for a compacting collection.
|
+ //
|
+ // Before: Live objects are marked and non-live objects are unmarked.
|
+ //
|
+ // After: (Non-compacting collection.) Live objects are unmarked,
|
+ // non-live regions have been added to their space's free
|
+ // list.
|
+ //
|
+ // After: (Compacting collection.) The forwarding address of live
|
+ // objects in the paged spaces is encoded in their map word
|
+ // along with their (non-forwarded) map pointer.
|
+ //
|
+ // The forwarding address of live objects in the new space is
|
+ // written to their map word's offset in the inactive
|
+ // semispace.
|
+ //
|
+ // Bookkeeping data is written to the remembered-set are of
|
+ // eached paged-space page that contains live objects after
|
+ // compaction:
|
+ //
|
+ // The 3rd word of the page (first word of the remembered
|
+ // set) contains the relocation top address, the address of
|
+ // the first word after the end of the last live object in
|
+ // the page after compaction.
|
+ //
|
+ // The 4th word contains the zero-based index of the page in
|
+ // its space. This word is only used for map space pages, in
|
+ // order to encode the map addresses in 21 bits to free 11
|
+ // bits per map word for the forwarding address.
|
+ //
|
+ // The 5th word contains the (nonencoded) forwarding address
|
+ // of the first live object in the page.
|
+ //
|
+ // In both the new space and the paged spaces, a linked list
|
+ // of live regions is constructructed (linked through
|
+ // pointers in the non-live region immediately following each
|
+ // live region) to speed further passes of the collector.
|
+
|
+ // Encodes forwarding addresses of objects in compactable parts of the
|
+ // heap.
|
+ void EncodeForwardingAddresses();
|
+
|
+ // Encodes the forwarding addresses of objects in new space.
|
+ void EncodeForwardingAddressesInNewSpace();
|
+
|
+ // Function template to encode the forwarding addresses of objects in
|
+ // paged spaces, parameterized by allocation and non-live processing
|
+ // functions.
|
+ template<AllocationFunction Alloc, ProcessNonLiveFunction ProcessNonLive>
|
+ void EncodeForwardingAddressesInPagedSpace(PagedSpace* space);
|
+
|
+ typedef int (MarkCompactCollectorImpl::*HeapObjectCallback)(HeapObject* obj);
|
+
|
+ // Iterates live objects in a space, passes live objects
|
+ // to a callback function which returns the heap size of the object.
|
+ // Returns the number of live objects iterated.
|
+ int IterateLiveObjects(NewSpace* space, HeapObjectCallback size_f);
|
+ int IterateLiveObjects(PagedSpace* space, HeapObjectCallback size_f);
|
+
|
+ // Iterates the live objects between a range of addresses, returning the
|
+ // number of live objects.
|
+ int IterateLiveObjectsInRange(Address start, Address end,
|
+ HeapObjectCallback size_func);
|
+
|
+ // Callback functions for deallocating non-live blocks in the old
|
+ // generation.
|
+ void DeallocateOldPointerBlock(Address start, int size_in_bytes);
|
+ void DeallocateOldDataBlock(Address start, int size_in_bytes);
|
+ void DeallocateCodeBlock(Address start, int size_in_bytes);
|
+ void DeallocateMapBlock(Address start, int size_in_bytes);
|
+ void DeallocateCellBlock(Address start, int size_in_bytes);
|
+
|
+ // If we are not compacting the heap, we simply sweep the spaces except
|
+ // for the large object space, clearing mark bits and adding unmarked
|
+ // regions to each space's free list.
|
+ void SweepSpaces();
|
+
|
+ // -----------------------------------------------------------------------
|
+ // Phase 3: Updating pointers in live objects.
|
+ //
|
+ // Before: Same as after phase 2 (compacting collection).
|
+ //
|
+ // After: All pointers in live objects, including encoded map
|
+ // pointers, are updated to point to their target's new
|
+ // location. The remembered set area of each paged-space
|
+ // page containing live objects still contains bookkeeping
|
+ // information.
|
+
|
+ // Updates pointers in all spaces.
|
+ void UpdatePointers();
|
+
|
+ // Updates pointers in an object in new space.
|
+ // Returns the heap size of the object.
|
+ int UpdatePointersInNewObject(HeapObject* obj);
|
+
|
+ // Updates pointers in an object in old spaces.
|
+ // Returns the heap size of the object.
|
+ int UpdatePointersInOldObject(HeapObject* obj);
|
+
|
+ // Calculates the forwarding address of an object in an old space.
|
+ Address GetForwardingAddressInOldSpace(HeapObject* obj);
|
+
|
+ // -----------------------------------------------------------------------
|
+ // Phase 4: Relocating objects.
|
+ //
|
+ // Before: Pointers to live objects are updated to point to their
|
+ // target's new location. The remembered set area of each
|
+ // paged-space page containing live objects still contains
|
+ // bookkeeping information.
|
+ //
|
+ // After: Objects have been moved to their new addresses. The
|
+ // remembered set area of each paged-space page containing
|
+ // live objects still contains bookkeeping information.
|
+
|
+ // Relocates objects in all spaces.
|
+ void RelocateObjects();
|
+
|
+ // Converts a code object's inline target to addresses, convention from
|
+ // address to target happens in the marking phase.
|
+ int ConvertCodeICTargetToAddress(HeapObject* obj);
|
+
|
+ // Relocate a map object.
|
+ int RelocateMapObject(HeapObject* obj);
|
+
|
+ // Relocates an old object.
|
+ int RelocateOldPointerObject(HeapObject* obj);
|
+ int RelocateOldDataObject(HeapObject* obj);
|
+
|
+ // Relocate a property cell object.
|
+ int RelocateCellObject(HeapObject* obj);
|
+
|
+ // Helper function.
|
+ inline int RelocateOldNonCodeObject(HeapObject* obj,
|
+ PagedSpace* space);
|
+
|
+ // Relocates an object in the code space.
|
+ int RelocateCodeObject(HeapObject* obj);
|
+
|
+ // Copy a new object.
|
+ int RelocateNewObject(HeapObject* obj);
|
+
|
+ // -----------------------------------------------------------------------
|
+ // Phase 5: Rebuilding remembered sets.
|
+ //
|
+ // Before: The heap is in a normal state except that remembered sets
|
+ // in the paged spaces are not correct.
|
+ //
|
+ // After: The heap is in a normal state.
|
+
|
+ // Rebuild remembered set in old and map spaces.
|
+ void RebuildRSets();
|
+
|
+ // Callback function for non-live blocks in the old generation.
|
+ typedef void (MarkCompactCollectorImpl::*DeallocateFunction)(Address start, int size_in_bytes);
|
+
|
+ void SweepSpace(PagedSpace* space, DeallocateFunction dealloc);
|
+ void SweepSpace(NewSpace* space);
|
+
|
+template<class T> void ScanOverflowedObjects(T* it);
|
+ MarkCompactCollectorImpl(MarkCompactCollector* base):base_(base) {}
|
+private:
|
+ MarkCompactCollector* const base_;
|
+};
|
+
|
+MarkCompactCollector::MarkCompactCollector():
|
+ force_compaction_ (false),
|
+ compacting_collection_ (false),
|
+ compact_on_next_gc_(false),
|
+ previous_marked_count_ (0),
|
+ tracer_ (NULL),
|
+ mark_compact_collector_impl(new MarkCompactCollectorImpl(this))
|
+#ifdef DEBUG
|
+ ,state_ (IDLE),
|
+
|
// Counters used for debugging the marking phase of mark-compact or mark-sweep
|
// collection.
|
-int MarkCompactCollector::live_bytes_ = 0;
|
-int MarkCompactCollector::live_young_objects_ = 0;
|
-int MarkCompactCollector::live_old_data_objects_ = 0;
|
-int MarkCompactCollector::live_old_pointer_objects_ = 0;
|
-int MarkCompactCollector::live_code_objects_ = 0;
|
-int MarkCompactCollector::live_map_objects_ = 0;
|
-int MarkCompactCollector::live_cell_objects_ = 0;
|
-int MarkCompactCollector::live_lo_objects_ = 0;
|
+ live_bytes_(0),
|
+ live_young_objects_ (0),
|
+ live_old_data_objects_(0),
|
+ live_old_pointer_objects_(0),
|
+ live_code_objects_(0),
|
+ live_map_objects_(0),
|
+ live_cell_objects_(0),
|
+ live_lo_objects_(0)
|
#endif
|
+{
|
+}
|
|
+MarkCompactCollector::~MarkCompactCollector() {
|
+ delete mark_compact_collector_impl;
|
+}
|
+
|
void MarkCompactCollector::CollectGarbage() {
|
// Make sure that Prepare() has been called. The individual steps below will
|
// update the state as they proceed.
|
@@ -71,26 +392,26 @@
|
// Tell the tracer.
|
if (IsCompacting()) tracer_->set_is_compacting();
|
|
- MarkLiveObjects();
|
+ mark_compact_collector_impl->MarkLiveObjects();
|
|
- if (FLAG_collect_maps) ClearNonLiveTransitions();
|
+ if (FLAG_collect_maps) mark_compact_collector_impl->ClearNonLiveTransitions();
|
|
- SweepLargeObjectSpace();
|
+ mark_compact_collector_impl->SweepLargeObjectSpace();
|
|
if (IsCompacting()) {
|
- EncodeForwardingAddresses();
|
+ mark_compact_collector_impl->EncodeForwardingAddresses();
|
|
- UpdatePointers();
|
+ mark_compact_collector_impl->UpdatePointers();
|
|
- RelocateObjects();
|
+ mark_compact_collector_impl->RelocateObjects();
|
|
- RebuildRSets();
|
+ mark_compact_collector_impl->RebuildRSets();
|
|
} else {
|
- SweepSpaces();
|
+ mark_compact_collector_impl->SweepSpaces();
|
}
|
|
- Finish();
|
+ mark_compact_collector_impl->Finish();
|
|
// Save the count of marked objects remaining after the collection and
|
// null out the GC tracer.
|
@@ -116,7 +437,7 @@
|
compact_on_next_gc_ = false;
|
|
if (FLAG_never_compact) compacting_collection_ = false;
|
- if (FLAG_collect_maps) CreateBackPointers();
|
+ if (FLAG_collect_maps) mark_compact_collector_impl->CreateBackPointers();
|
|
#ifdef DEBUG
|
if (compacting_collection_) {
|
@@ -144,20 +465,20 @@
|
}
|
|
|
-void MarkCompactCollector::Finish() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::Finish() {
|
#ifdef DEBUG
|
- ASSERT(state_ == SWEEP_SPACES || state_ == REBUILD_RSETS);
|
- state_ = IDLE;
|
+ ASSERT(base_->state_ == SWEEP_SPACES || base_->state_ == REBUILD_RSETS);
|
+ base_->state_ = IDLE;
|
#endif
|
// The stub cache is not traversed during GC; clear the cache to
|
// force lazy re-initialization of it. This must be done after the
|
// GC, because it relies on the new address of certain old space
|
// objects (empty string, illegal builtin).
|
- StubCache::Clear();
|
+ v8_context()->stub_cache_.Clear();
|
|
// If we've just compacted old space there's no reason to check the
|
// fragmentation limit. Just return.
|
- if (HasCompacted()) return;
|
+ if (base_->HasCompacted()) return;
|
|
// We compact the old generation on the next GC if it has gotten too
|
// fragmented (ie, we could recover an expected amount of space by
|
@@ -177,45 +498,13 @@
|
static_cast<int>((old_gen_recoverable * 100.0) / old_gen_used);
|
if (old_gen_fragmentation > kFragmentationLimit &&
|
old_gen_recoverable > kFragmentationAllowed) {
|
- compact_on_next_gc_ = true;
|
+ base_->compact_on_next_gc_ = true;
|
}
|
}
|
|
-
|
-// -------------------------------------------------------------------------
|
-// Phase 1: tracing and marking live objects.
|
-// before: all objects are in normal state.
|
-// after: a live object's map pointer is marked as '00'.
|
-
|
-// Marking all live objects in the heap as part of mark-sweep or mark-compact
|
-// collection. Before marking, all objects are in their normal state. After
|
-// marking, live objects' map pointers are marked indicating that the object
|
-// has been found reachable.
|
-//
|
-// The marking algorithm is a (mostly) depth-first (because of possible stack
|
-// overflow) traversal of the graph of objects reachable from the roots. It
|
-// uses an explicit stack of pointers rather than recursion. The young
|
-// generation's inactive ('from') space is used as a marking stack. The
|
-// objects in the marking stack are the ones that have been reached and marked
|
-// but their children have not yet been visited.
|
-//
|
-// The marking stack can overflow during traversal. In that case, we set an
|
-// overflow flag. When the overflow flag is set, we continue marking objects
|
-// reachable from the objects on the marking stack, but no longer push them on
|
-// the marking stack. Instead, we mark them as both marked and overflowed.
|
-// When the stack is in the overflowed state, objects marked as overflowed
|
-// have been reached and marked but their children have not been visited yet.
|
-// After emptying the marking stack, we clear the overflow flag and traverse
|
-// the heap looking for objects marked as overflowed, push them on the stack,
|
-// and continue with marking. This process repeats until all reachable
|
-// objects have been marked.
|
-
|
-static MarkingStack marking_stack;
|
-
|
-
|
static inline HeapObject* ShortCircuitConsString(Object** p) {
|
// Optimization: If the heap object pointed to by p is a non-symbol
|
- // cons string whose right substring is Heap::empty_string, update
|
+ // cons string whose right substring is v8_context()->heap_.empty_string, update
|
// it in place to its left substring. Return the updated value.
|
//
|
// Here we assume that if we change *p, we replace it with a heap object
|
@@ -223,7 +512,7 @@
|
//
|
// The check performed is:
|
// object->IsConsString() && !object->IsSymbol() &&
|
- // (ConsString::cast(object)->second() == Heap::empty_string())
|
+ // (ConsString::cast(object)->second() == v8_context()->heap_.empty_string())
|
// except the maps for the object and its possible substrings might be
|
// marked.
|
HeapObject* object = HeapObject::cast(*p);
|
@@ -233,7 +522,7 @@
|
if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object;
|
|
Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second();
|
- if (second != Heap::raw_unchecked_empty_string()) {
|
+ if (second != v8_context()->heap_.raw_unchecked_empty_string()) {
|
return object;
|
}
|
|
@@ -241,7 +530,7 @@
|
// remembered set. Therefore, we only replace the string with its left
|
// substring when the remembered set does not change.
|
Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first();
|
- if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object;
|
+ if (!v8_context()->heap_.InNewSpace(object) && v8_context()->heap_.InNewSpace(first)) return object;
|
|
*p = first;
|
return HeapObject::cast(first);
|
@@ -250,7 +539,10 @@
|
|
// Helper class for marking pointers in HeapObjects.
|
class MarkingVisitor : public ObjectVisitor {
|
+ MarkCompactCollector::MarkCompactCollectorImpl* const mark_compact_collector_impl;
|
public:
|
+ MarkingVisitor():mark_compact_collector_impl(v8_context()->mark_compact_collector_.mark_compact_collector_impl) {}
|
+
|
void VisitPointer(Object** p) {
|
MarkObjectByPointer(p);
|
}
|
@@ -271,9 +563,9 @@
|
if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) {
|
IC::Clear(rinfo->pc());
|
// Please note targets for cleared inline cached do not have to be
|
- // marked since they are contained in Heap::non_monomorphic_cache().
|
+ // marked since they are contained in v8_context()->heap_.non_monomorphic_cache().
|
} else {
|
- MarkCompactCollector::MarkObject(code);
|
+ mark_compact_collector_impl->MarkObject(code);
|
}
|
}
|
|
@@ -281,7 +573,7 @@
|
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
|
rinfo->IsPatchedReturnSequence());
|
HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
|
- MarkCompactCollector::MarkObject(code);
|
+ mark_compact_collector_impl->MarkObject(code);
|
}
|
|
private:
|
@@ -289,22 +581,22 @@
|
void MarkObjectByPointer(Object** p) {
|
if (!(*p)->IsHeapObject()) return;
|
HeapObject* object = ShortCircuitConsString(p);
|
- MarkCompactCollector::MarkObject(object);
|
+ mark_compact_collector_impl->MarkObject(object);
|
}
|
|
// Tells whether the mark sweep collection will perform compaction.
|
- bool IsCompacting() { return MarkCompactCollector::IsCompacting(); }
|
+ bool IsCompacting() { return mark_compact_collector_impl->base_->IsCompacting(); }
|
|
// Visit an unmarked object.
|
void VisitUnmarkedObject(HeapObject* obj) {
|
#ifdef DEBUG
|
- ASSERT(Heap::Contains(obj));
|
+ ASSERT(v8_context()->heap_.Contains(obj));
|
ASSERT(!obj->IsMarked());
|
#endif
|
Map* map = obj->map();
|
- MarkCompactCollector::SetMark(obj);
|
+ mark_compact_collector_impl->SetMark(obj);
|
// Mark the map pointer and the body.
|
- MarkCompactCollector::MarkObject(map);
|
+ mark_compact_collector_impl->MarkObject(map);
|
obj->IterateBody(map->instance_type(), obj->SizeFromMap(map), this);
|
}
|
|
@@ -351,16 +643,18 @@
|
if (object->IsMarked()) return;
|
|
Map* map = object->map();
|
+ MarkCompactCollector::MarkCompactCollectorImpl* const mark_compact_collector_impl =
|
+ v8_context()->mark_compact_collector_.mark_compact_collector_impl;
|
// Mark the object.
|
- MarkCompactCollector::SetMark(object);
|
+ mark_compact_collector_impl->SetMark(object);
|
// Mark the map pointer and body, and push them on the marking stack.
|
- MarkCompactCollector::MarkObject(map);
|
+ mark_compact_collector_impl->MarkObject(map);
|
object->IterateBody(map->instance_type(), object->SizeFromMap(map),
|
&stack_visitor_);
|
|
// Mark all the objects reachable from the map and body. May leave
|
// overflowed objects in the heap.
|
- MarkCompactCollector::EmptyMarkingStack(&stack_visitor_);
|
+ mark_compact_collector_impl->EmptyMarkingStack(&stack_visitor_);
|
}
|
};
|
|
@@ -406,7 +700,7 @@
|
}
|
}
|
// Set the entry to null_value (as deleted).
|
- *p = Heap::raw_unchecked_null_value();
|
+ *p = v8_context()->heap_.raw_unchecked_null_value();
|
pointers_removed_++;
|
}
|
}
|
@@ -420,9 +714,9 @@
|
};
|
|
|
-void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkUnmarkedObject(HeapObject* object) {
|
ASSERT(!object->IsMarked());
|
- ASSERT(Heap::Contains(object));
|
+ ASSERT(v8_context()->heap_.Contains(object));
|
if (object->IsMap()) {
|
Map* map = Map::cast(object);
|
if (FLAG_cleanup_caches_in_maps_at_gc) {
|
@@ -443,7 +737,7 @@
|
}
|
|
|
-void MarkCompactCollector::MarkMapContents(Map* map) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkMapContents(Map* map) {
|
MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
|
*HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
|
|
@@ -456,11 +750,11 @@
|
}
|
|
|
-void MarkCompactCollector::MarkDescriptorArray(
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkDescriptorArray(
|
DescriptorArray* descriptors) {
|
if (descriptors->IsMarked()) return;
|
// Empty descriptor array is marked as a root before any maps are marked.
|
- ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array());
|
+ ASSERT(descriptors != v8_context()->heap_.raw_unchecked_empty_descriptor_array());
|
SetMark(descriptors);
|
|
FixedArray* contents = reinterpret_cast<FixedArray*>(
|
@@ -492,8 +786,8 @@
|
}
|
|
|
-void MarkCompactCollector::CreateBackPointers() {
|
- HeapObjectIterator iterator(Heap::map_space());
|
+void MarkCompactCollector::MarkCompactCollectorImpl::CreateBackPointers() {
|
+ HeapObjectIterator iterator(v8_context()->heap_.map_space());
|
while (iterator.has_next()) {
|
Object* next_object = iterator.next();
|
if (next_object->IsMap()) { // Could also be ByteArray on free list.
|
@@ -502,7 +796,7 @@
|
map->instance_type() <= JS_FUNCTION_TYPE) {
|
map->CreateBackPointers();
|
} else {
|
- ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array());
|
+ ASSERT(map->instance_descriptors() == v8_context()->heap_.empty_descriptor_array());
|
}
|
}
|
}
|
@@ -523,7 +817,7 @@
|
// iterator. Stop when the marking stack is filled or the end of the space
|
// is reached, whichever comes first.
|
template<class T>
|
-static void ScanOverflowedObjects(T* it) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::ScanOverflowedObjects(T* it) {
|
// The caller should ensure that the marking stack is initially not full,
|
// so that we don't waste effort pointlessly scanning for objects.
|
ASSERT(!marking_stack.is_full());
|
@@ -533,7 +827,7 @@
|
if (object->IsOverflowed()) {
|
object->ClearOverflow();
|
ASSERT(object->IsMarked());
|
- ASSERT(Heap::Contains(object));
|
+ ASSERT(v8_context()->heap_.Contains(object));
|
marking_stack.Push(object);
|
if (marking_stack.is_full()) return;
|
}
|
@@ -541,7 +835,7 @@
|
}
|
|
|
-bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
+bool MarkCompactCollector::MarkCompactCollectorImpl::IsUnmarkedHeapObject(Object** p) {
|
return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked();
|
}
|
|
@@ -569,13 +863,13 @@
|
};
|
|
|
-void MarkCompactCollector::MarkSymbolTable() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkSymbolTable() {
|
// Objects reachable from symbols are marked as live so as to ensure
|
// that if the symbol itself remains alive after GC for any reason,
|
// and if it is a sliced string or a cons string backed by an
|
// external string (even indirectly), then the external string does
|
// not receive a weak reference callback.
|
- SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
|
+ SymbolTable* symbol_table = v8_context()->heap_.raw_unchecked_symbol_table();
|
// Mark the symbol table itself.
|
SetMark(symbol_table);
|
// Explicitly mark the prefix.
|
@@ -590,10 +884,10 @@
|
}
|
|
|
-void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkRoots(RootMarkingVisitor* visitor) {
|
// Mark the heap roots including global variables, stack variables,
|
// etc., and all objects reachable from them.
|
- Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
|
+ v8_context()->heap_.IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
|
|
// Handle the symbol table specially.
|
MarkSymbolTable();
|
@@ -606,8 +900,8 @@
|
}
|
|
|
-void MarkCompactCollector::MarkObjectGroups() {
|
- List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups();
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkObjectGroups() {
|
+ List<ObjectGroup*>* object_groups = v8_context()->global_handles_.ObjectGroups();
|
|
for (int i = 0; i < object_groups->length(); i++) {
|
ObjectGroup* entry = object_groups->at(i);
|
@@ -644,11 +938,11 @@
|
// Before: the marking stack contains zero or more heap object pointers.
|
// After: the marking stack is empty, and all objects reachable from the
|
// marking stack have been marked, or are overflowed in the heap.
|
-void MarkCompactCollector::EmptyMarkingStack(MarkingVisitor* visitor) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::EmptyMarkingStack(MarkingVisitor* visitor) {
|
while (!marking_stack.is_empty()) {
|
HeapObject* object = marking_stack.Pop();
|
ASSERT(object->IsHeapObject());
|
- ASSERT(Heap::Contains(object));
|
+ ASSERT(v8_context()->heap_.Contains(object));
|
ASSERT(object->IsMarked());
|
ASSERT(!object->IsOverflowed());
|
|
@@ -669,35 +963,35 @@
|
// before sweeping completes. If sweeping completes, there are no remaining
|
// overflowed objects in the heap so the overflow flag on the markings stack
|
// is cleared.
|
-void MarkCompactCollector::RefillMarkingStack() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::RefillMarkingStack() {
|
ASSERT(marking_stack.overflowed());
|
-
|
- SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize);
|
+ Heap& heap = v8_context()->heap_;
|
+ SemiSpaceIterator new_it(heap.new_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&new_it);
|
if (marking_stack.is_full()) return;
|
|
- HeapObjectIterator old_pointer_it(Heap::old_pointer_space(),
|
+ HeapObjectIterator old_pointer_it(heap.old_pointer_space(),
|
&OverflowObjectSize);
|
ScanOverflowedObjects(&old_pointer_it);
|
if (marking_stack.is_full()) return;
|
|
- HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize);
|
+ HeapObjectIterator old_data_it(heap.old_data_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&old_data_it);
|
if (marking_stack.is_full()) return;
|
|
- HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize);
|
+ HeapObjectIterator code_it(heap.code_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&code_it);
|
if (marking_stack.is_full()) return;
|
|
- HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize);
|
+ HeapObjectIterator map_it(heap.map_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&map_it);
|
if (marking_stack.is_full()) return;
|
|
- HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize);
|
+ HeapObjectIterator cell_it(heap.cell_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&cell_it);
|
if (marking_stack.is_full()) return;
|
|
- LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize);
|
+ LargeObjectIterator lo_it(heap.lo_space(), &OverflowObjectSize);
|
ScanOverflowedObjects(&lo_it);
|
if (marking_stack.is_full()) return;
|
|
@@ -709,7 +1003,7 @@
|
// stack. Before: the marking stack contains zero or more heap object
|
// pointers. After: the marking stack is empty and there are no overflowed
|
// objects in the heap.
|
-void MarkCompactCollector::ProcessMarkingStack(MarkingVisitor* visitor) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::ProcessMarkingStack(MarkingVisitor* visitor) {
|
EmptyMarkingStack(visitor);
|
while (marking_stack.overflowed()) {
|
RefillMarkingStack();
|
@@ -718,7 +1012,7 @@
|
}
|
|
|
-void MarkCompactCollector::ProcessObjectGroups(MarkingVisitor* visitor) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::ProcessObjectGroups(MarkingVisitor* visitor) {
|
bool work_to_do = true;
|
ASSERT(marking_stack.is_empty());
|
while (work_to_do) {
|
@@ -729,15 +1023,15 @@
|
}
|
|
|
-void MarkCompactCollector::MarkLiveObjects() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::MarkLiveObjects() {
|
#ifdef DEBUG
|
- ASSERT(state_ == PREPARE_GC);
|
- state_ = MARK_LIVE_OBJECTS;
|
+ ASSERT(base_->state_ == PREPARE_GC);
|
+ base_->state_ = MARK_LIVE_OBJECTS;
|
#endif
|
// The to space contains live objects, the from space is used as a marking
|
// stack.
|
- marking_stack.Initialize(Heap::new_space()->FromSpaceLow(),
|
- Heap::new_space()->FromSpaceHigh());
|
+ marking_stack.Initialize(v8_context()->heap_.new_space()->FromSpaceLow(),
|
+ v8_context()->heap_.new_space()->FromSpaceHigh());
|
|
ASSERT(!marking_stack.overflowed());
|
|
@@ -756,9 +1050,9 @@
|
//
|
// First we identify nonlive weak handles and mark them as pending
|
// destruction.
|
- GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject);
|
+ v8_context()->global_handles_.IdentifyWeakHandles(&IsUnmarkedHeapObject);
|
// Then we mark the objects and process the transitive closure.
|
- GlobalHandles::IterateWeakRoots(&root_visitor);
|
+ v8_context()->global_handles_.IterateWeakRoots(&root_visitor);
|
while (marking_stack.overflowed()) {
|
RefillMarkingStack();
|
EmptyMarkingStack(root_visitor.stack_visitor());
|
@@ -771,13 +1065,13 @@
|
// Prune the symbol table removing all symbols only pointed to by the
|
// symbol table. Cannot use symbol_table() here because the symbol
|
// table is marked.
|
- SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
|
+ SymbolTable* symbol_table = v8_context()->heap_.raw_unchecked_symbol_table();
|
SymbolTableCleaner v;
|
symbol_table->IterateElements(&v);
|
symbol_table->ElementsRemoved(v.PointersRemoved());
|
|
// Remove object groups after marking phase.
|
- GlobalHandles::RemoveObjectGroups();
|
+ v8_context()->global_handles_.RemoveObjectGroups();
|
}
|
|
|
@@ -789,24 +1083,25 @@
|
|
|
#ifdef DEBUG
|
-void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
|
- live_bytes_ += obj->Size();
|
- if (Heap::new_space()->Contains(obj)) {
|
- live_young_objects_++;
|
- } else if (Heap::map_space()->Contains(obj)) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::UpdateLiveObjectCount(HeapObject* obj) {
|
+ base_->live_bytes_ += obj->Size();
|
+ Heap& heap = v8_context()->heap_;
|
+ if (heap.new_space()->Contains(obj)) {
|
+ base_->live_young_objects_++;
|
+ } else if (heap.map_space()->Contains(obj)) {
|
ASSERT(obj->IsMap());
|
- live_map_objects_++;
|
- } else if (Heap::cell_space()->Contains(obj)) {
|
+ base_->live_map_objects_++;
|
+ } else if (heap.cell_space()->Contains(obj)) {
|
ASSERT(obj->IsJSGlobalPropertyCell());
|
- live_cell_objects_++;
|
- } else if (Heap::old_pointer_space()->Contains(obj)) {
|
- live_old_pointer_objects_++;
|
- } else if (Heap::old_data_space()->Contains(obj)) {
|
- live_old_data_objects_++;
|
- } else if (Heap::code_space()->Contains(obj)) {
|
- live_code_objects_++;
|
- } else if (Heap::lo_space()->Contains(obj)) {
|
- live_lo_objects_++;
|
+ base_->live_cell_objects_++;
|
+ } else if (heap.old_pointer_space()->Contains(obj)) {
|
+ base_->live_old_pointer_objects_++;
|
+ } else if (heap.old_data_space()->Contains(obj)) {
|
+ base_->live_old_data_objects_++;
|
+ } else if (heap.code_space()->Contains(obj)) {
|
+ base_->live_code_objects_++;
|
+ } else if (heap.lo_space()->Contains(obj)) {
|
+ base_->live_lo_objects_++;
|
} else {
|
UNREACHABLE();
|
}
|
@@ -814,25 +1109,25 @@
|
#endif // DEBUG
|
|
|
-void MarkCompactCollector::SweepLargeObjectSpace() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::SweepLargeObjectSpace() {
|
#ifdef DEBUG
|
- ASSERT(state_ == MARK_LIVE_OBJECTS);
|
- state_ =
|
- compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
|
+ ASSERT(base_->state_ == MARK_LIVE_OBJECTS);
|
+ base_->state_ =
|
+ base_->compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES;
|
#endif
|
// Deallocate unmarked objects and clear marked bits for marked objects.
|
- Heap::lo_space()->FreeUnmarkedObjects();
|
+ v8_context()->heap_.lo_space()->FreeUnmarkedObjects();
|
}
|
|
// Safe to use during marking phase only.
|
-bool MarkCompactCollector::SafeIsMap(HeapObject* object) {
|
+bool MarkCompactCollector::MarkCompactCollectorImpl::SafeIsMap(HeapObject* object) {
|
MapWord metamap = object->map_word();
|
metamap.ClearMark();
|
return metamap.ToMap()->instance_type() == MAP_TYPE;
|
}
|
|
-void MarkCompactCollector::ClearNonLiveTransitions() {
|
- HeapObjectIterator map_iterator(Heap::map_space(), &CountMarkedCallback);
|
+void MarkCompactCollector::MarkCompactCollectorImpl::ClearNonLiveTransitions() {
|
+ HeapObjectIterator map_iterator(v8_context()->heap_.map_space(), &CountMarkedCallback);
|
// Iterate over the map space, setting map transitions that go from
|
// a marked map to an unmarked map to null transitions. At the same time,
|
// set all the prototype fields of maps back to their original value,
|
@@ -942,16 +1237,17 @@
|
// and all others to the old space.
|
inline Object* MCAllocateFromNewSpace(HeapObject* object, int object_size) {
|
Object* forwarded;
|
- if (object_size > Heap::MaxObjectSizeInPagedSpace()) {
|
+ Heap& heap = v8_context()->heap_;
|
+ if (object_size > heap.MaxObjectSizeInPagedSpace()) {
|
forwarded = Failure::Exception();
|
} else {
|
- OldSpace* target_space = Heap::TargetSpace(object);
|
- ASSERT(target_space == Heap::old_pointer_space() ||
|
- target_space == Heap::old_data_space());
|
+ OldSpace* target_space = heap.TargetSpace(object);
|
+ ASSERT(target_space == heap.old_pointer_space() ||
|
+ target_space == heap.old_data_space());
|
forwarded = target_space->MCAllocateRaw(object_size);
|
}
|
if (forwarded->IsFailure()) {
|
- forwarded = Heap::new_space()->MCAllocateRaw(object_size);
|
+ forwarded = heap.new_space()->MCAllocateRaw(object_size);
|
}
|
return forwarded;
|
}
|
@@ -960,27 +1256,27 @@
|
// Allocation functions for the paged spaces call the space's MCAllocateRaw.
|
inline Object* MCAllocateFromOldPointerSpace(HeapObject* ignore,
|
int object_size) {
|
- return Heap::old_pointer_space()->MCAllocateRaw(object_size);
|
+ return v8_context()->heap_.old_pointer_space()->MCAllocateRaw(object_size);
|
}
|
|
|
inline Object* MCAllocateFromOldDataSpace(HeapObject* ignore, int object_size) {
|
- return Heap::old_data_space()->MCAllocateRaw(object_size);
|
+ return v8_context()->heap_.old_data_space()->MCAllocateRaw(object_size);
|
}
|
|
|
inline Object* MCAllocateFromCodeSpace(HeapObject* ignore, int object_size) {
|
- return Heap::code_space()->MCAllocateRaw(object_size);
|
+ return v8_context()->heap_.code_space()->MCAllocateRaw(object_size);
|
}
|
|
|
inline Object* MCAllocateFromMapSpace(HeapObject* ignore, int object_size) {
|
- return Heap::map_space()->MCAllocateRaw(object_size);
|
+ return v8_context()->heap_.map_space()->MCAllocateRaw(object_size);
|
}
|
|
|
inline Object* MCAllocateFromCellSpace(HeapObject* ignore, int object_size) {
|
- return Heap::cell_space()->MCAllocateRaw(object_size);
|
+ return v8_context()->heap_.cell_space()->MCAllocateRaw(object_size);
|
}
|
|
|
@@ -991,8 +1287,8 @@
|
Object* new_object,
|
int* ignored) {
|
int offset =
|
- Heap::new_space()->ToSpaceOffsetForAddress(old_object->address());
|
- Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) =
|
+ v8_context()->heap_.new_space()->ToSpaceOffsetForAddress(old_object->address());
|
+ Memory::Address_at(v8_context()->heap_.new_space()->FromSpaceLow() + offset) =
|
HeapObject::cast(new_object)->address();
|
}
|
|
@@ -1036,12 +1332,14 @@
|
//
|
// The template parameters are an allocation function, a forwarding address
|
// encoding function, and a function to process non-live objects.
|
-template<MarkCompactCollector::AllocationFunction Alloc,
|
- MarkCompactCollector::EncodingFunction Encode,
|
- MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
|
+template<MarkCompactCollector::MarkCompactCollectorImpl::AllocationFunction Alloc,
|
+ MarkCompactCollector::MarkCompactCollectorImpl::EncodingFunction Encode,
|
+ MarkCompactCollector::MarkCompactCollectorImpl::ProcessNonLiveFunction ProcessNonLive>
|
inline void EncodeForwardingAddressesInRange(Address start,
|
Address end,
|
- int* offset) {
|
+ int* offset,
|
+ MarkCompactCollector* mark_compact_collector
|
+ ) {
|
// The start address of the current free region while sweeping the space.
|
// This address is set when a transition from live to non-live objects is
|
// encountered. A value (an encoding of the 'next free region' pointer)
|
@@ -1059,7 +1357,7 @@
|
HeapObject* object = HeapObject::FromAddress(current);
|
if (object->IsMarked()) {
|
object->ClearMark();
|
- MarkCompactCollector::tracer()->decrement_marked_count();
|
+ mark_compact_collector->tracer()->decrement_marked_count();
|
object_size = object->Size();
|
|
Object* forwarded = Alloc(object, object_size);
|
@@ -1093,20 +1391,22 @@
|
|
|
// Functions to encode the forwarding pointers in each compactable space.
|
-void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::EncodeForwardingAddressesInNewSpace() {
|
int ignored;
|
EncodeForwardingAddressesInRange<MCAllocateFromNewSpace,
|
EncodeForwardingAddressInNewSpace,
|
IgnoreNonLiveObject>(
|
- Heap::new_space()->bottom(),
|
- Heap::new_space()->top(),
|
- &ignored);
|
+ v8_context()->heap_.new_space()->bottom(),
|
+ v8_context()->heap_.new_space()->top(),
|
+ &ignored,
|
+ base_
|
+ );
|
}
|
|
|
-template<MarkCompactCollector::AllocationFunction Alloc,
|
- MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive>
|
-void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace(
|
+template<MarkCompactCollector::MarkCompactCollectorImpl::AllocationFunction Alloc,
|
+ MarkCompactCollector::MarkCompactCollectorImpl::ProcessNonLiveFunction ProcessNonLive>
|
+void MarkCompactCollector::MarkCompactCollectorImpl::EncodeForwardingAddressesInPagedSpace(
|
PagedSpace* space) {
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
while (it.has_next()) {
|
@@ -1119,12 +1419,13 @@
|
ProcessNonLive>(
|
p->ObjectAreaStart(),
|
p->AllocationTop(),
|
- &offset);
|
+ &offset,
|
+ base_);
|
}
|
}
|
|
|
-static void SweepSpace(NewSpace* space) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::SweepSpace(NewSpace* space) {
|
HeapObject* object;
|
for (Address current = space->bottom();
|
current < space->top();
|
@@ -1132,17 +1433,17 @@
|
object = HeapObject::FromAddress(current);
|
if (object->IsMarked()) {
|
object->ClearMark();
|
- MarkCompactCollector::tracer()->decrement_marked_count();
|
+ base_->tracer()->decrement_marked_count();
|
} else {
|
// We give non-live objects a map that will correctly give their size,
|
// since their existing map might not be live after the collection.
|
int size = object->Size();
|
if (size >= ByteArray::kHeaderSize) {
|
- object->set_map(Heap::raw_unchecked_byte_array_map());
|
+ object->set_map(v8_context()->heap_.raw_unchecked_byte_array_map());
|
ByteArray::cast(object)->set_length(ByteArray::LengthFor(size));
|
} else {
|
ASSERT(size == kPointerSize);
|
- object->set_map(Heap::raw_unchecked_one_pointer_filler_map());
|
+ object->set_map(v8_context()->heap_.raw_unchecked_one_pointer_filler_map());
|
}
|
ASSERT(object->Size() == size);
|
}
|
@@ -1152,7 +1453,7 @@
|
}
|
|
|
-static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
while (it.has_next()) {
|
Page* p = it.next();
|
@@ -1167,9 +1468,9 @@
|
object = HeapObject::FromAddress(current);
|
if (object->IsMarked()) {
|
object->ClearMark();
|
- MarkCompactCollector::tracer()->decrement_marked_count();
|
+ base_->tracer()->decrement_marked_count();
|
if (!is_previous_alive) { // Transition from free to live.
|
- dealloc(free_start, current - free_start);
|
+ (this->*dealloc)(free_start, current - free_start);
|
is_previous_alive = true;
|
}
|
} else {
|
@@ -1191,84 +1492,87 @@
|
if (!is_previous_alive) {
|
int free_size = p->AllocationTop() - free_start;
|
if (free_size > 0) {
|
- dealloc(free_start, free_size);
|
+ (this->*dealloc)(free_start, free_size);
|
}
|
}
|
}
|
}
|
|
|
-void MarkCompactCollector::DeallocateOldPointerBlock(Address start,
|
+void MarkCompactCollector::MarkCompactCollectorImpl::DeallocateOldPointerBlock(Address start,
|
int size_in_bytes) {
|
- Heap::ClearRSetRange(start, size_in_bytes);
|
- Heap::old_pointer_space()->Free(start, size_in_bytes);
|
+ v8_context()->heap_.ClearRSetRange(start, size_in_bytes);
|
+ v8_context()->heap_.old_pointer_space()->Free(start, size_in_bytes);
|
}
|
|
|
-void MarkCompactCollector::DeallocateOldDataBlock(Address start,
|
+void MarkCompactCollector::MarkCompactCollectorImpl::DeallocateOldDataBlock(Address start,
|
int size_in_bytes) {
|
- Heap::old_data_space()->Free(start, size_in_bytes);
|
+ v8_context()->heap_.old_data_space()->Free(start, size_in_bytes);
|
}
|
|
|
-void MarkCompactCollector::DeallocateCodeBlock(Address start,
|
+void MarkCompactCollector::MarkCompactCollectorImpl::DeallocateCodeBlock(Address start,
|
int size_in_bytes) {
|
- Heap::code_space()->Free(start, size_in_bytes);
|
+ v8_context()->heap_.code_space()->Free(start, size_in_bytes);
|
}
|
|
|
-void MarkCompactCollector::DeallocateMapBlock(Address start,
|
+void MarkCompactCollector::MarkCompactCollectorImpl::DeallocateMapBlock(Address start,
|
int size_in_bytes) {
|
// Objects in map space are frequently assumed to have size Map::kSize and a
|
// valid map in their first word. Thus, we break the free block up into
|
// chunks and free them separately.
|
ASSERT(size_in_bytes % Map::kSize == 0);
|
- Heap::ClearRSetRange(start, size_in_bytes);
|
+ v8_context()->heap_.ClearRSetRange(start, size_in_bytes);
|
Address end = start + size_in_bytes;
|
for (Address a = start; a < end; a += Map::kSize) {
|
- Heap::map_space()->Free(a);
|
+ v8_context()->heap_.map_space()->Free(a);
|
}
|
}
|
|
|
-void MarkCompactCollector::DeallocateCellBlock(Address start,
|
+void MarkCompactCollector::MarkCompactCollectorImpl::DeallocateCellBlock(Address start,
|
int size_in_bytes) {
|
+ Heap& heap = v8_context()->heap_;
|
// Free-list elements in cell space are assumed to have a fixed size.
|
// We break the free block into chunks and add them to the free list
|
// individually.
|
- int size = Heap::cell_space()->object_size_in_bytes();
|
+ int size = heap.cell_space()->object_size_in_bytes();
|
ASSERT(size_in_bytes % size == 0);
|
- Heap::ClearRSetRange(start, size_in_bytes);
|
+ heap.ClearRSetRange(start, size_in_bytes);
|
Address end = start + size_in_bytes;
|
for (Address a = start; a < end; a += size) {
|
- Heap::cell_space()->Free(a);
|
+ heap.cell_space()->Free(a);
|
}
|
}
|
|
|
-void MarkCompactCollector::EncodeForwardingAddresses() {
|
- ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
|
+void MarkCompactCollector::MarkCompactCollectorImpl::EncodeForwardingAddresses() {
|
+ ASSERT(base_->state_ == ENCODE_FORWARDING_ADDRESSES);
|
+ Heap& heap = v8_context()->heap_;
|
+
|
// Objects in the active semispace of the young generation may be
|
// relocated to the inactive semispace (if not promoted). Set the
|
// relocation info to the beginning of the inactive semispace.
|
- Heap::new_space()->MCResetRelocationInfo();
|
+ heap.new_space()->MCResetRelocationInfo();
|
|
// Compute the forwarding pointers in each space.
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
|
IgnoreNonLiveObject>(
|
- Heap::old_pointer_space());
|
+ heap.old_pointer_space());
|
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
|
IgnoreNonLiveObject>(
|
- Heap::old_data_space());
|
+ heap.old_data_space());
|
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
|
LogNonLiveCodeObject>(
|
- Heap::code_space());
|
+ heap.code_space());
|
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
|
IgnoreNonLiveObject>(
|
- Heap::cell_space());
|
+ heap.cell_space());
|
|
|
// Compute new space next to last after the old and code spaces have been
|
@@ -1280,33 +1584,34 @@
|
// non-live map pointers to get the sizes of non-live objects.
|
EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace,
|
IgnoreNonLiveObject>(
|
- Heap::map_space());
|
+ heap.map_space());
|
|
// Write relocation info to the top page, so we can use it later. This is
|
// done after promoting objects from the new space so we get the correct
|
// allocation top.
|
- Heap::old_pointer_space()->MCWriteRelocationInfoToPage();
|
- Heap::old_data_space()->MCWriteRelocationInfoToPage();
|
- Heap::code_space()->MCWriteRelocationInfoToPage();
|
- Heap::map_space()->MCWriteRelocationInfoToPage();
|
- Heap::cell_space()->MCWriteRelocationInfoToPage();
|
+ heap.old_pointer_space()->MCWriteRelocationInfoToPage();
|
+ heap.old_data_space()->MCWriteRelocationInfoToPage();
|
+ heap.code_space()->MCWriteRelocationInfoToPage();
|
+ heap.map_space()->MCWriteRelocationInfoToPage();
|
+ heap.cell_space()->MCWriteRelocationInfoToPage();
|
}
|
|
|
-void MarkCompactCollector::SweepSpaces() {
|
- ASSERT(state_ == SWEEP_SPACES);
|
- ASSERT(!IsCompacting());
|
+void MarkCompactCollector::MarkCompactCollectorImpl::SweepSpaces() {
|
+ ASSERT(base_->state_ == SWEEP_SPACES);
|
+ ASSERT(!base_->IsCompacting());
|
+ Heap& heap = v8_context()->heap_;
|
// Noncompacting collections simply sweep the spaces to clear the mark
|
// bits and free the nonlive blocks (for old and map spaces). We sweep
|
// the map space last because freeing non-live maps overwrites them and
|
// the other spaces rely on possibly non-live maps to get the sizes for
|
// non-live objects.
|
- SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock);
|
- SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock);
|
- SweepSpace(Heap::code_space(), &DeallocateCodeBlock);
|
- SweepSpace(Heap::cell_space(), &DeallocateCellBlock);
|
- SweepSpace(Heap::new_space());
|
- SweepSpace(Heap::map_space(), &DeallocateMapBlock);
|
+ SweepSpace(heap.old_pointer_space(), &MarkCompactCollectorImpl::DeallocateOldPointerBlock);
|
+ SweepSpace(heap.old_data_space(), &MarkCompactCollectorImpl::DeallocateOldDataBlock);
|
+ SweepSpace(heap.code_space(), &MarkCompactCollectorImpl::DeallocateCodeBlock);
|
+ SweepSpace(heap.cell_space(), &MarkCompactCollectorImpl::DeallocateCellBlock);
|
+ SweepSpace(heap.new_space());
|
+ SweepSpace(heap.map_space(), &MarkCompactCollectorImpl::DeallocateMapBlock);
|
}
|
|
|
@@ -1315,7 +1620,7 @@
|
// The first live region is [first_live_start, first_live_end), and the last
|
// address in the range is top. The callback function is used to get the
|
// size of each live object.
|
-int MarkCompactCollector::IterateLiveObjectsInRange(
|
+int MarkCompactCollector::MarkCompactCollectorImpl::IterateLiveObjectsInRange(
|
Address start,
|
Address end,
|
HeapObjectCallback size_func) {
|
@@ -1329,23 +1634,23 @@
|
current += Memory::int_at(current + kIntSize);
|
} else {
|
live_objects++;
|
- current += size_func(HeapObject::FromAddress(current));
|
+ current += (this->*size_func)(HeapObject::FromAddress(current));
|
}
|
}
|
return live_objects;
|
}
|
|
|
-int MarkCompactCollector::IterateLiveObjects(NewSpace* space,
|
+int MarkCompactCollector::MarkCompactCollectorImpl::IterateLiveObjects(NewSpace* space,
|
HeapObjectCallback size_f) {
|
- ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
+ ASSERT(MARK_LIVE_OBJECTS < base_->state_ && base_->state_ <= RELOCATE_OBJECTS);
|
return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f);
|
}
|
|
|
-int MarkCompactCollector::IterateLiveObjects(PagedSpace* space,
|
+int MarkCompactCollector::MarkCompactCollectorImpl::IterateLiveObjects(PagedSpace* space,
|
HeapObjectCallback size_f) {
|
- ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS);
|
+ ASSERT(MARK_LIVE_OBJECTS < base_->state_ && base_->state_ <= RELOCATE_OBJECTS);
|
int total = 0;
|
PageIterator it(space, PageIterator::PAGES_IN_USE);
|
while (it.has_next()) {
|
@@ -1394,30 +1699,31 @@
|
void UpdatePointer(Object** p) {
|
if (!(*p)->IsHeapObject()) return;
|
|
+ Heap& heap = v8_context()->heap_;
|
HeapObject* obj = HeapObject::cast(*p);
|
Address old_addr = obj->address();
|
Address new_addr;
|
- ASSERT(!Heap::InFromSpace(obj));
|
+ ASSERT(!heap.InFromSpace(obj));
|
|
- if (Heap::new_space()->Contains(obj)) {
|
+ if (heap.new_space()->Contains(obj)) {
|
Address forwarding_pointer_addr =
|
- Heap::new_space()->FromSpaceLow() +
|
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
|
+ heap.new_space()->FromSpaceLow() +
|
+ heap.new_space()->ToSpaceOffsetForAddress(old_addr);
|
new_addr = Memory::Address_at(forwarding_pointer_addr);
|
|
#ifdef DEBUG
|
- ASSERT(Heap::old_pointer_space()->Contains(new_addr) ||
|
- Heap::old_data_space()->Contains(new_addr) ||
|
- Heap::new_space()->FromSpaceContains(new_addr) ||
|
- Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr)));
|
+ ASSERT(heap.old_pointer_space()->Contains(new_addr) ||
|
+ heap.old_data_space()->Contains(new_addr) ||
|
+ heap.new_space()->FromSpaceContains(new_addr) ||
|
+ heap.lo_space()->Contains(HeapObject::FromAddress(new_addr)));
|
|
- if (Heap::new_space()->FromSpaceContains(new_addr)) {
|
- ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
|
+ if (heap.new_space()->FromSpaceContains(new_addr)) {
|
+ ASSERT(heap.new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
+ heap.new_space()->ToSpaceOffsetForAddress(old_addr));
|
}
|
#endif
|
|
- } else if (Heap::lo_space()->Contains(obj)) {
|
+ } else if (heap.lo_space()->Contains(obj)) {
|
// Don't move objects in the large object space.
|
return;
|
|
@@ -1431,7 +1737,7 @@
|
}
|
ASSERT(original_space != NULL);
|
#endif
|
- new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj);
|
+ new_addr = v8_context()->mark_compact_collector_.mark_compact_collector_impl->GetForwardingAddressInOldSpace(obj);
|
ASSERT(original_space->Contains(new_addr));
|
ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <=
|
original_space->MCSpaceOffsetForAddress(old_addr));
|
@@ -1449,30 +1755,31 @@
|
};
|
|
|
-void MarkCompactCollector::UpdatePointers() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::UpdatePointers() {
|
+ Heap& heap = v8_context()->heap_;
|
#ifdef DEBUG
|
- ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES);
|
- state_ = UPDATE_POINTERS;
|
+ ASSERT(base_->state_ == ENCODE_FORWARDING_ADDRESSES);
|
+ base_->state_ = UPDATE_POINTERS;
|
#endif
|
UpdatingVisitor updating_visitor;
|
- Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
|
- GlobalHandles::IterateWeakRoots(&updating_visitor);
|
+ heap.IterateRoots(&updating_visitor, VISIT_ONLY_STRONG);
|
+ v8_context()->global_handles_.IterateWeakRoots(&updating_visitor);
|
|
- int live_maps = IterateLiveObjects(Heap::map_space(),
|
- &UpdatePointersInOldObject);
|
- int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
|
- &UpdatePointersInOldObject);
|
- int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
|
- &UpdatePointersInOldObject);
|
- int live_codes = IterateLiveObjects(Heap::code_space(),
|
- &UpdatePointersInOldObject);
|
- int live_cells = IterateLiveObjects(Heap::cell_space(),
|
- &UpdatePointersInOldObject);
|
- int live_news = IterateLiveObjects(Heap::new_space(),
|
- &UpdatePointersInNewObject);
|
+ int live_maps = IterateLiveObjects(heap.map_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInOldObject);
|
+ int live_pointer_olds = IterateLiveObjects(heap.old_pointer_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInOldObject);
|
+ int live_data_olds = IterateLiveObjects(heap.old_data_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInOldObject);
|
+ int live_codes = IterateLiveObjects(heap.code_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInOldObject);
|
+ int live_cells = IterateLiveObjects(heap.cell_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInOldObject);
|
+ int live_news = IterateLiveObjects(heap.new_space(),
|
+ &MarkCompactCollectorImpl::UpdatePointersInNewObject);
|
|
// Large objects do not move, the map word can be updated directly.
|
- LargeObjectIterator it(Heap::lo_space());
|
+ LargeObjectIterator it(heap.lo_space());
|
while (it.has_next()) UpdatePointersInNewObject(it.next());
|
|
USE(live_maps);
|
@@ -1481,24 +1788,24 @@
|
USE(live_codes);
|
USE(live_cells);
|
USE(live_news);
|
- ASSERT(live_maps == live_map_objects_);
|
- ASSERT(live_data_olds == live_old_data_objects_);
|
- ASSERT(live_pointer_olds == live_old_pointer_objects_);
|
- ASSERT(live_codes == live_code_objects_);
|
- ASSERT(live_cells == live_cell_objects_);
|
- ASSERT(live_news == live_young_objects_);
|
+ ASSERT(live_maps == base_->live_map_objects_);
|
+ ASSERT(live_data_olds == base_->live_old_data_objects_);
|
+ ASSERT(live_pointer_olds == base_->live_old_pointer_objects_);
|
+ ASSERT(live_codes == base_->live_code_objects_);
|
+ ASSERT(live_cells == base_->live_cell_objects_);
|
+ ASSERT(live_news == base_->live_young_objects_);
|
}
|
|
|
-int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) {
|
+int MarkCompactCollector::MarkCompactCollectorImpl::UpdatePointersInNewObject(HeapObject* obj) {
|
// Keep old map pointers
|
Map* old_map = obj->map();
|
ASSERT(old_map->IsHeapObject());
|
|
Address forwarded = GetForwardingAddressInOldSpace(old_map);
|
-
|
- ASSERT(Heap::map_space()->Contains(old_map));
|
- ASSERT(Heap::map_space()->Contains(forwarded));
|
+ Heap& heap = v8_context()->heap_;
|
+ ASSERT(heap.map_space()->Contains(old_map));
|
+ ASSERT(heap.map_space()->Contains(forwarded));
|
#ifdef DEBUG
|
if (FLAG_gc_verbose) {
|
PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(),
|
@@ -1519,11 +1826,12 @@
|
}
|
|
|
-int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) {
|
+int MarkCompactCollector::MarkCompactCollectorImpl::UpdatePointersInOldObject(HeapObject* obj) {
|
// Decode the map pointer.
|
MapWord encoding = obj->map_word();
|
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
+ Heap& heap = v8_context()->heap_;
|
+ Address map_addr = encoding.DecodeMapAddress(heap.map_space());
|
+ ASSERT(heap.map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
|
// At this point, the first word of map_addr is also encoded, cannot
|
// cast it to Map* using Map::cast.
|
@@ -1550,7 +1858,7 @@
|
}
|
|
|
-Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) {
|
+Address MarkCompactCollector::MarkCompactCollectorImpl::GetForwardingAddressInOldSpace(HeapObject* obj) {
|
// Object should either in old or map space.
|
MapWord encoding = obj->map_word();
|
|
@@ -1594,21 +1902,22 @@
|
// -------------------------------------------------------------------------
|
// Phase 4: Relocate objects
|
|
-void MarkCompactCollector::RelocateObjects() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::RelocateObjects() {
|
#ifdef DEBUG
|
- ASSERT(state_ == UPDATE_POINTERS);
|
- state_ = RELOCATE_OBJECTS;
|
+ ASSERT(base_->state_ == UPDATE_POINTERS);
|
+ base_->state_ = RELOCATE_OBJECTS;
|
#endif
|
+ Heap& heap = v8_context()->heap_;
|
// Relocates objects, always relocate map objects first. Relocating
|
// objects in other space relies on map objects to get object size.
|
- int live_maps = IterateLiveObjects(Heap::map_space(), &RelocateMapObject);
|
- int live_pointer_olds = IterateLiveObjects(Heap::old_pointer_space(),
|
- &RelocateOldPointerObject);
|
- int live_data_olds = IterateLiveObjects(Heap::old_data_space(),
|
- &RelocateOldDataObject);
|
- int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject);
|
- int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject);
|
- int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject);
|
+ int live_maps = IterateLiveObjects(heap.map_space(), &MarkCompactCollectorImpl::RelocateMapObject);
|
+ int live_pointer_olds = IterateLiveObjects(heap.old_pointer_space(),
|
+ &MarkCompactCollectorImpl::RelocateOldPointerObject);
|
+ int live_data_olds = IterateLiveObjects(heap.old_data_space(),
|
+ &MarkCompactCollectorImpl::RelocateOldDataObject);
|
+ int live_codes = IterateLiveObjects(heap.code_space(), &MarkCompactCollectorImpl::RelocateCodeObject);
|
+ int live_cells = IterateLiveObjects(heap.cell_space(), &MarkCompactCollectorImpl::RelocateCellObject);
|
+ int live_news = IterateLiveObjects(heap.new_space(), &MarkCompactCollectorImpl::RelocateNewObject);
|
|
USE(live_maps);
|
USE(live_data_olds);
|
@@ -1616,21 +1925,21 @@
|
USE(live_codes);
|
USE(live_cells);
|
USE(live_news);
|
- ASSERT(live_maps == live_map_objects_);
|
- ASSERT(live_data_olds == live_old_data_objects_);
|
- ASSERT(live_pointer_olds == live_old_pointer_objects_);
|
- ASSERT(live_codes == live_code_objects_);
|
- ASSERT(live_cells == live_cell_objects_);
|
- ASSERT(live_news == live_young_objects_);
|
+ ASSERT(live_maps == base_->live_map_objects_);
|
+ ASSERT(live_data_olds == base_->live_old_data_objects_);
|
+ ASSERT(live_pointer_olds == base_->live_old_pointer_objects_);
|
+ ASSERT(live_codes == base_->live_code_objects_);
|
+ ASSERT(live_cells == base_->live_cell_objects_);
|
+ ASSERT(live_news == base_->live_young_objects_);
|
|
// Flip from and to spaces
|
- Heap::new_space()->Flip();
|
+ heap.new_space()->Flip();
|
|
// Set age_mark to bottom in to space
|
- Address mark = Heap::new_space()->bottom();
|
- Heap::new_space()->set_age_mark(mark);
|
+ Address mark = heap.new_space()->bottom();
|
+ heap.new_space()->set_age_mark(mark);
|
|
- Heap::new_space()->MCCommitRelocationInfo();
|
+ heap.new_space()->MCCommitRelocationInfo();
|
#ifdef DEBUG
|
// It is safe to write to the remembered sets as remembered sets on a
|
// page-by-page basis after committing the m-c forwarding pointer.
|
@@ -1641,11 +1950,12 @@
|
}
|
|
|
-int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateMapObject(HeapObject* obj) {
|
// Recover map pointer.
|
MapWord encoding = obj->map_word();
|
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
+ Heap& heap = v8_context()->heap_;
|
+ Address map_addr = encoding.DecodeMapAddress(heap.map_space());
|
+ ASSERT(heap.map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
|
// Get forwarding address before resetting map pointer
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
@@ -1697,12 +2007,13 @@
|
}
|
|
|
-int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateOldNonCodeObject(HeapObject* obj,
|
PagedSpace* space) {
|
// Recover map pointer.
|
MapWord encoding = obj->map_word();
|
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
- ASSERT(Heap::map_space()->Contains(map_addr));
|
+ Heap& heap = v8_context()->heap_;
|
+ Address map_addr = encoding.DecodeMapAddress(heap.map_space());
|
+ ASSERT(heap.map_space()->Contains(map_addr));
|
|
// Get forwarding address before resetting map pointer.
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
@@ -1722,32 +2033,33 @@
|
}
|
|
|
-int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) {
|
- return RelocateOldNonCodeObject(obj, Heap::old_pointer_space());
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateOldPointerObject(HeapObject* obj) {
|
+ return RelocateOldNonCodeObject(obj, v8_context()->heap_.old_pointer_space());
|
}
|
|
|
-int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) {
|
- return RelocateOldNonCodeObject(obj, Heap::old_data_space());
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateOldDataObject(HeapObject* obj) {
|
+ return RelocateOldNonCodeObject(obj, v8_context()->heap_.old_data_space());
|
}
|
|
|
-int MarkCompactCollector::RelocateCellObject(HeapObject* obj) {
|
- return RelocateOldNonCodeObject(obj, Heap::cell_space());
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateCellObject(HeapObject* obj) {
|
+ return RelocateOldNonCodeObject(obj, v8_context()->heap_.cell_space());
|
}
|
|
|
-int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateCodeObject(HeapObject* obj) {
|
// Recover map pointer.
|
MapWord encoding = obj->map_word();
|
- Address map_addr = encoding.DecodeMapAddress(Heap::map_space());
|
- ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
+ Heap& heap = v8_context()->heap_;
|
+ Address map_addr = encoding.DecodeMapAddress(heap.map_space());
|
+ ASSERT(heap.map_space()->Contains(HeapObject::FromAddress(map_addr)));
|
|
// Get forwarding address before resetting map pointer
|
Address new_addr = GetForwardingAddressInOldSpace(obj);
|
|
// Reset the map pointer.
|
- int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
|
+ int obj_size = RestoreMap(obj, heap.code_space(), new_addr, map_addr);
|
|
Address old_addr = obj->address();
|
|
@@ -1767,23 +2079,24 @@
|
}
|
|
|
-int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
|
+int MarkCompactCollector::MarkCompactCollectorImpl::RelocateNewObject(HeapObject* obj) {
|
int obj_size = obj->Size();
|
|
// Get forwarding address
|
Address old_addr = obj->address();
|
- int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr);
|
+ Heap& heap = v8_context()->heap_;
|
+ int offset = heap.new_space()->ToSpaceOffsetForAddress(old_addr);
|
|
Address new_addr =
|
- Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset);
|
+ Memory::Address_at(heap.new_space()->FromSpaceLow() + offset);
|
|
#ifdef DEBUG
|
- if (Heap::new_space()->FromSpaceContains(new_addr)) {
|
- ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
- Heap::new_space()->ToSpaceOffsetForAddress(old_addr));
|
+ if (heap.new_space()->FromSpaceContains(new_addr)) {
|
+ ASSERT(heap.new_space()->FromSpaceOffsetForAddress(new_addr) <=
|
+ heap.new_space()->ToSpaceOffsetForAddress(old_addr));
|
} else {
|
- ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() ||
|
- Heap::TargetSpace(obj) == Heap::old_data_space());
|
+ ASSERT(heap.TargetSpace(obj) == heap.old_pointer_space() ||
|
+ heap.TargetSpace(obj) == heap.old_data_space());
|
}
|
#endif
|
|
@@ -1805,12 +2118,12 @@
|
// -------------------------------------------------------------------------
|
// Phase 5: rebuild remembered sets
|
|
-void MarkCompactCollector::RebuildRSets() {
|
+void MarkCompactCollector::MarkCompactCollectorImpl::RebuildRSets() {
|
#ifdef DEBUG
|
- ASSERT(state_ == RELOCATE_OBJECTS);
|
- state_ = REBUILD_RSETS;
|
+ ASSERT(base_->state_ == RELOCATE_OBJECTS);
|
+ base_->state_ = REBUILD_RSETS;
|
#endif
|
- Heap::RebuildRSets();
|
+ v8_context()->heap_.RebuildRSets();
|
}
|
|
} } // namespace v8::internal
|
Index: src/globals.h
|
===================================================================
|
--- src/globals.h (revision 3230)
|
+++ src/globals.h Sat Nov 14 01:42:54 MSK 2009
|
@@ -516,11 +516,11 @@
|
#define TRACK_MEMORY(name) \
|
void* operator new(size_t size) { \
|
void* result = ::operator new(size); \
|
- Logger::NewEvent(name, result, size); \
|
+ v8_context()->logger_.NewEvent(name, result, size); \
|
return result; \
|
} \
|
void operator delete(void* object) { \
|
- Logger::DeleteEvent(name, object); \
|
+ v8_context()->logger_.DeleteEvent(name, object); \
|
::operator delete(object); \
|
}
|
#else
|
Index: src/fast-codegen.cc
|
===================================================================
|
--- src/fast-codegen.cc (revision 3234)
|
+++ src/fast-codegen.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -47,7 +47,7 @@
|
FastCodeGenerator cgen(&masm, script, is_eval);
|
cgen.Generate(fun);
|
if (cgen.HasStackOverflow()) {
|
- ASSERT(!Top::has_pending_exception());
|
+ ASSERT(!v8_context()->top_.has_pending_exception());
|
return Handle<Code>::null();
|
}
|
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP);
|
@@ -115,7 +115,7 @@
|
}
|
} else {
|
Handle<JSFunction> function =
|
- Compiler::BuildBoilerplate(decl->fun(), script_, this);
|
+ v8_context()->compiler_.BuildBoilerplate(decl->fun(), script_, this);
|
// Check for stack-overflow exception.
|
if (HasStackOverflow()) return;
|
array->set(j++, *function);
|
Index: src/ia32/codegen-ia32.cc
|
===================================================================
|
--- src/ia32/codegen-ia32.cc (revision 3231)
|
+++ src/ia32/codegen-ia32.cc Sat Nov 14 01:43:06 MSK 2009
|
@@ -154,7 +154,7 @@
|
|
// New scope to get automatic timing calculation.
|
{ // NOLINT
|
- HistogramTimerScope codegen_timer(&Counters::code_generation);
|
+ HistogramTimerScope codegen_timer(&v8_context()->counters_.code_generation);
|
CodeGenState state(this);
|
|
// Entry:
|
@@ -267,7 +267,7 @@
|
if (!scope_->HasIllegalRedeclaration()) {
|
Comment cmnt(masm_, "[ function body");
|
#ifdef DEBUG
|
- bool is_builtin = Bootstrapper::IsActive();
|
+ bool is_builtin = v8_context()->bootstrapper_.IsActive();
|
bool should_trace =
|
is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
|
if (should_trace) {
|
@@ -316,7 +316,7 @@
|
|
// Process any deferred code using the register allocator.
|
if (!HasStackOverflow()) {
|
- HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
|
+ HistogramTimerScope deferred_timer(&v8_context()->counters_.deferred_code_generation);
|
JumpTarget::set_compiling_deferred_code(true);
|
ProcessDeferred();
|
JumpTarget::set_compiling_deferred_code(false);
|
@@ -919,7 +919,7 @@
|
|
|
bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
|
- Object* answer_object = Heap::undefined_value();
|
+ Object* answer_object = v8_context()->heap_.undefined_value();
|
switch (op) {
|
case Token::ADD:
|
if (Smi::IsValid(left + right)) {
|
@@ -991,7 +991,7 @@
|
UNREACHABLE();
|
break;
|
}
|
- if (answer_object == Heap::undefined_value()) {
|
+ if (answer_object == v8_context()->heap_.undefined_value()) {
|
return false;
|
}
|
frame_->Push(Handle<Object>(answer_object));
|
@@ -2070,7 +2070,7 @@
|
build_args.Branch(not_equal);
|
__ mov(tmp.reg(),
|
FieldOperand(apply.reg(), JSFunction::kSharedFunctionInfoOffset));
|
- Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
|
+ Handle<Code> apply_code(v8_context()->builtins_.builtin(Builtins::FunctionApply));
|
__ cmp(FieldOperand(tmp.reg(), SharedFunctionInfo::kCodeOffset),
|
Immediate(apply_code));
|
build_args.Branch(not_equal);
|
@@ -3552,7 +3552,7 @@
|
|
// Build the function boilerplate and instantiate it.
|
Handle<JSFunction> boilerplate =
|
- Compiler::BuildBoilerplate(node, script_, this);
|
+ v8_context()->compiler_.BuildBoilerplate(node, script_, this);
|
// Check for stack-overflow exception.
|
if (HasStackOverflow()) return;
|
InstantiateBoilerplate(boilerplate);
|
@@ -5721,7 +5721,7 @@
|
Result answer = frame_->Pop();
|
answer.ToRegister();
|
|
- if (check->Equals(Heap::number_symbol())) {
|
+ if (check->Equals(v8_context()->heap_.number_symbol())) {
|
__ test(answer.reg(), Immediate(kSmiTagMask));
|
destination()->true_target()->Branch(zero);
|
frame_->Spill(answer.reg());
|
@@ -5730,7 +5730,7 @@
|
answer.Unuse();
|
destination()->Split(equal);
|
|
- } else if (check->Equals(Heap::string_symbol())) {
|
+ } else if (check->Equals(v8_context()->heap_.string_symbol())) {
|
__ test(answer.reg(), Immediate(kSmiTagMask));
|
destination()->false_target()->Branch(zero);
|
|
@@ -5749,14 +5749,14 @@
|
answer.Unuse();
|
destination()->Split(less);
|
|
- } else if (check->Equals(Heap::boolean_symbol())) {
|
+ } else if (check->Equals(v8_context()->heap_.boolean_symbol())) {
|
__ cmp(answer.reg(), Factory::true_value());
|
destination()->true_target()->Branch(equal);
|
__ cmp(answer.reg(), Factory::false_value());
|
answer.Unuse();
|
destination()->Split(equal);
|
|
- } else if (check->Equals(Heap::undefined_symbol())) {
|
+ } else if (check->Equals(v8_context()->heap_.undefined_symbol())) {
|
__ cmp(answer.reg(), Factory::undefined_value());
|
destination()->true_target()->Branch(equal);
|
|
@@ -5772,7 +5772,7 @@
|
answer.Unuse();
|
destination()->Split(not_zero);
|
|
- } else if (check->Equals(Heap::function_symbol())) {
|
+ } else if (check->Equals(v8_context()->heap_.function_symbol())) {
|
__ test(answer.reg(), Immediate(kSmiTagMask));
|
destination()->false_target()->Branch(zero);
|
frame_->Spill(answer.reg());
|
@@ -5783,7 +5783,7 @@
|
answer.Unuse();
|
destination()->Split(equal);
|
|
- } else if (check->Equals(Heap::object_symbol())) {
|
+ } else if (check->Equals(v8_context()->heap_.object_symbol())) {
|
__ test(answer.reg(), Immediate(kSmiTagMask));
|
destination()->false_target()->Branch(zero);
|
__ cmp(answer.reg(), Factory::null_value());
|
@@ -5899,9 +5899,10 @@
|
|
|
void DeferredReferenceGetNamedValue::Generate() {
|
+ V8Context* const v8context = v8_context();
|
__ push(receiver_);
|
__ Set(ecx, Immediate(name_));
|
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
|
+ Handle<Code> ic(v8context->builtins_.builtin(Builtins::LoadIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// The call must be followed by a test eax instruction to indicate
|
// that the inobject property case was inlined.
|
@@ -5913,7 +5914,7 @@
|
// Here we use masm_-> instead of the __ macro because this is the
|
// instruction that gets patched and coverage code gets in the way.
|
masm_->test(eax, Immediate(-delta_to_patch_site));
|
- __ IncrementCounter(&Counters::named_load_inline_miss, 1);
|
+ __ IncrementCounter(&v8context->counters_.named_load_inline_miss, 1);
|
|
if (!dst_.is(eax)) __ mov(dst_, eax);
|
__ pop(receiver_);
|
@@ -5947,13 +5948,14 @@
|
__ push(receiver_); // First IC argument.
|
__ push(key_); // Second IC argument.
|
|
+ V8Context* const v8context = v8_context();
|
// Calculate the delta from the IC call instruction to the map check
|
// cmp instruction in the inlined version. This delta is stored in
|
// a test(eax, delta) instruction after the call so that we can find
|
// it in the IC initialization code and patch the cmp instruction.
|
// This means that we cannot allow test instructions after calls to
|
// KeyedLoadIC stubs in other places.
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
|
+ Handle<Code> ic(v8context->builtins_.builtin(Builtins::KeyedLoadIC_Initialize));
|
RelocInfo::Mode mode = is_global_
|
? RelocInfo::CODE_TARGET_CONTEXT
|
: RelocInfo::CODE_TARGET;
|
@@ -5967,7 +5969,7 @@
|
// Here we use masm_-> instead of the __ macro because this is the
|
// instruction that gets patched and coverage code gets in the way.
|
masm_->test(eax, Immediate(-delta_to_patch_site));
|
- __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
|
+ __ IncrementCounter(&v8context->counters_.keyed_load_inline_miss, 1);
|
|
if (!dst_.is(eax)) __ mov(dst_, eax);
|
__ pop(key_);
|
@@ -5997,14 +5999,14 @@
|
|
|
void DeferredReferenceSetKeyedValue::Generate() {
|
- __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_store_inline_miss, 1);
|
// Push receiver and key arguments on the stack.
|
__ push(receiver_);
|
__ push(key_);
|
// Move value argument to eax as expected by the IC stub.
|
if (!value_.is(eax)) __ mov(eax, value_);
|
// Call the IC stub.
|
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
|
+ Handle<Code> ic(v8_context()->builtins_.builtin(Builtins::KeyedStoreIC_Initialize));
|
__ call(ic, RelocInfo::CODE_TARGET);
|
// The delta from the start of the map-compare instruction to the
|
// test instruction. We use masm_-> directly here instead of the
|
@@ -6126,7 +6128,7 @@
|
int offset = kMaxInt;
|
masm->mov(value.reg(), FieldOperand(receiver.reg(), offset));
|
|
- __ IncrementCounter(&Counters::named_load_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.named_load_inline, 1);
|
deferred->BindExit();
|
cgen_->frame()->Push(&receiver);
|
cgen_->frame()->Push(&value);
|
@@ -6221,7 +6223,7 @@
|
index.Unuse();
|
__ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
|
deferred->Branch(equal);
|
- __ IncrementCounter(&Counters::keyed_load_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_inline, 1);
|
|
deferred->BindExit();
|
// Restore the receiver and key to the frame and push the
|
@@ -6390,7 +6392,7 @@
|
times_2,
|
FixedArray::kHeaderSize - kHeapObjectTag),
|
value.reg());
|
- __ IncrementCounter(&Counters::keyed_store_inline, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_store_inline, 1);
|
|
deferred->BindExit();
|
|
@@ -6518,7 +6520,7 @@
|
|
// Update flags to indicate that arguments are in registers.
|
SetArgsInRegisters();
|
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.generic_binary_stub_calls_regs, 1);
|
}
|
|
// Call the stub.
|
@@ -6550,7 +6552,7 @@
|
|
// Update flags to indicate that arguments are in registers.
|
SetArgsInRegisters();
|
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.generic_binary_stub_calls_regs, 1);
|
}
|
|
// Call the stub.
|
@@ -6581,7 +6583,7 @@
|
}
|
// Update flags to indicate that arguments are in registers.
|
SetArgsInRegisters();
|
- __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.generic_binary_stub_calls_regs, 1);
|
}
|
|
// Call the stub.
|
@@ -6736,7 +6738,7 @@
|
void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
|
Label call_runtime;
|
|
- __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.generic_binary_stub_calls, 1);
|
|
// Generate fast case smi code if requested. This flag is set when the fast
|
// case smi code is not generated by the caller. Generating it here will speed
|
@@ -7633,7 +7635,7 @@
|
__ Set(eax, Immediate(argc_));
|
__ Set(ebx, Immediate(0));
|
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
|
- Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
|
+ Handle<Code> adaptor(v8_context()->builtins_.builtin(Builtins::ArgumentsAdaptorTrampoline));
|
__ jmp(adaptor, RelocInfo::CODE_TARGET);
|
}
|
|
Index: src/log-utils.h
|
===================================================================
|
--- src/log-utils.h (revision 2936)
|
+++ src/log-utils.h Sat Nov 14 01:43:09 MSK 2009
|
@@ -124,7 +124,7 @@
|
static void Init();
|
|
// Write functions assume that mutex_ is acquired by the caller.
|
- static WritePtr Write;
|
+ static WritePtr Write;///static
|
|
// Implementation of writing to a log file.
|
static int WriteToFile(const char* msg, int length) {
|
@@ -141,16 +141,16 @@
|
}
|
|
// Whether logging is stopped (e.g. due to insufficient resources).
|
- static bool is_stopped_;
|
+ static bool is_stopped_; ///static
|
|
// When logging is active, either output_handle_ or output_buffer_ is used
|
// to store a pointer to log destination. If logging was opened via OpenStdout
|
// or OpenFile, then output_handle_ is used. If logging was opened
|
// via OpenMemoryBuffer, then output_buffer_ is used.
|
// mutex_ should be acquired before using output_handle_ or output_buffer_.
|
- static FILE* output_handle_;
|
+ static FILE* output_handle_; ///static
|
|
- static LogDynamicBuffer* output_buffer_;
|
+ static LogDynamicBuffer* output_buffer_; ///static
|
|
// Size of dynamic buffer block (and dynamic buffer initial size).
|
static const int kDynamicBufferBlockSize = 65536;
|
@@ -163,11 +163,11 @@
|
|
// mutex_ is a Mutex used for enforcing exclusive
|
// access to the formatting buffer and the log file or log memory buffer.
|
- static Mutex* mutex_;
|
+ static Mutex* mutex_; ///static
|
|
// Buffer used for formatting log messages. This is a singleton buffer and
|
// mutex_ should be acquired before using it.
|
- static char* message_buffer_;
|
+ static char* message_buffer_; ///static
|
|
friend class LogMessageBuilder;
|
friend class LogRecordCompressor;
|
@@ -278,7 +278,7 @@
|
}
|
|
private:
|
- static WriteFailureHandler write_failure_handler;
|
+ static WriteFailureHandler write_failure_handler; ///static
|
|
ScopedLock sl;
|
int pos_;
|
Index: src/v8-global-context.h
|
===================================================================
|
--- src/v8-global-context.h Sat Nov 14 01:43:12 MSK 2009
|
+++ src/v8-global-context.h Sat Nov 14 01:43:12 MSK 2009
|
@@ -0,0 +1,146 @@
|
+// Copyright 2009 the V8 project authors. All rights reserved.
|
+// Redistribution and use in source and binary forms, with or without
|
+// modification, are permitted provided that the following conditions are
|
+// met:
|
+//
|
+// * Redistributions of source code must retain the above copyright
|
+// notice, this list of conditions and the following disclaimer.
|
+// * Redistributions in binary form must reproduce the above
|
+// copyright notice, this list of conditions and the following
|
+// disclaimer in the documentation and/or other materials provided
|
+// with the distribution.
|
+// * Neither the name of Google Inc. nor the names of its
|
+// contributors may be used to endorse or promote products derived
|
+// from this software without specific prior written permission.
|
+//
|
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
+
|
+#ifndef V8_GLOBAL_CONTEXT_H_
|
+#define V8_GLOBAL_CONTEXT_H_
|
+#include "utils.h"
|
+#include "platform.h"
|
+
|
+namespace disasm {
|
+ class DisassemblerData;
|
+}
|
+
|
+namespace v8 {
|
+namespace internal {
|
+class ThreadManager;
|
+class V8;
|
+class StackGuard;
|
+class Top;
|
+class GlobalHandles;
|
+class Heap;
|
+class ContextSlotCache;
|
+class KeyedLookupCache;
|
+class DescriptorLookupCache;
|
+class TranscendentalCaches;
|
+class CompilationCache;
|
+class Counters;
|
+class StubCache;
|
+class MarkCompactCollector;
|
+class HandleScopeImplementer;
|
+class Logger;
|
+class Bootstrapper;
|
+class Builtins;
|
+class RelocatableData;
|
+class RegExpStack;
|
+class Scanner;
|
+class Compiler;
|
+class StorageData;
|
+class ZoneData;
|
+class ApiData;
|
+class CodeGeneratorData;
|
+class ObjectsData;
|
+class RuntimeData;
|
+class StatsTableData;
|
+class ExecutionData;
|
+class AssemblerData;
|
+class ExternalReferenceTable;
|
+#ifdef ENABLE_DEBUGGER_SUPPORT
|
+class Debug;
|
+class DebuggerAgent;
|
+#endif
|
+}
|
+
|
+class V8Context {
|
+public:
|
+ internal::ThreadManager& thread_manager_;
|
+ internal::V8& v8_;
|
+ internal::Top& top_;
|
+ internal::GlobalHandles& global_handles_;
|
+ internal::Heap& heap_;
|
+ internal::StackGuard& stack_guard_;
|
+ internal::ContextSlotCache& context_slot_cache_;
|
+ internal::DescriptorLookupCache& descriptor_lookup_cache_;
|
+ internal::KeyedLookupCache& keyed_lookup_cache_;
|
+ internal::TranscendentalCaches& transcendental_caches_;
|
+ internal::CompilationCache& compilation_cache_;
|
+ internal::Counters& counters_;
|
+ internal::MarkCompactCollector& mark_compact_collector_;
|
+ internal::StubCache& stub_cache_;
|
+ internal::HandleScopeImplementer& handle_scope_implementer_;
|
+ internal::Logger& logger_;
|
+ internal::Bootstrapper& bootstrapper_;
|
+ internal::Builtins& builtins_;
|
+ internal::RelocatableData& relocatable_data_;
|
+ internal::RegExpStack& reg_exp_stack_;
|
+ internal::Scanner& scanner_;
|
+ internal::Compiler& compiler_;
|
+ internal::StorageData& storage_data_;
|
+ internal::ZoneData& zone_data_;
|
+
|
+ internal::ApiData& api_data;
|
+ internal::ObjectsData* objects_data;
|
+ internal::CodeGeneratorData& code_generator_data_;
|
+ internal::RuntimeData* runtime_data_;
|
+ internal::ExecutionData* execution_data_;
|
+ internal::StatsTableData& stats_table_data_;
|
+ internal::ExternalReferenceTable* external_reference_table_;
|
+
|
+ #ifdef ENABLE_DEBUGGER_SUPPORT
|
+ internal::Debug& debug_;
|
+ internal::DebuggerAgent* debugger_agent_;
|
+ #endif
|
+
|
+ //#ifdef ENABLE_DISASSEMBLER
|
+ disasm::DisassemblerData* disassembler_data_;
|
+ //#endif
|
+
|
+ internal::AssemblerData* assembler_data_;
|
+
|
+ V8Context();
|
+ ~V8Context();
|
+private:
|
+ DISALLOW_COPY_AND_ASSIGN(V8Context);
|
+};
|
+
|
+#ifndef V8_SINGLE_THREADED
|
+void BindContext(V8Context*);
|
+extern internal::Thread::LocalStorageKey default_context;
|
+
|
+inline V8Context* v8_context() {
|
+ return reinterpret_cast<V8Context*>(internal::Thread::GetThreadLocal(default_context));
|
+}
|
+#else
|
+extern V8Context* default_context;
|
+
|
+inline V8Context* v8_context() {
|
+ return default_context;
|
+}
|
+#endif
|
+
|
+}
|
+
|
+#endif // V8_GLOBAL_CONTEXT_H_
|
Index: src/builtins.h
|
===================================================================
|
--- src/builtins.h (revision 3096)
|
+++ src/builtins.h Sat Nov 14 01:43:12 MSK 2009
|
@@ -153,18 +153,18 @@
|
class ObjectVisitor;
|
|
|
-class Builtins : public AllStatic {
|
+class Builtins {
|
public:
|
// Generate all builtin code objects. Should be called once during
|
// VM initialization.
|
- static void Setup(bool create_heap_objects);
|
- static void TearDown();
|
+ void Setup(bool create_heap_objects);
|
+ void TearDown();
|
|
// Garbage collection support.
|
- static void IterateBuiltins(ObjectVisitor* v);
|
+ void IterateBuiltins(ObjectVisitor* v);
|
|
// Disassembler support.
|
- static const char* Lookup(byte* pc);
|
+ const char* Lookup(byte* pc);
|
|
enum Name {
|
#define DEF_ENUM_C(name) name,
|
@@ -191,36 +191,36 @@
|
id_count
|
};
|
|
- static Code* builtin(Name name) {
|
+ Code* builtin(Name name) {
|
// Code::cast cannot be used here since we access builtins
|
// during the marking phase of mark sweep. See IC::Clear.
|
return reinterpret_cast<Code*>(builtins_[name]);
|
}
|
|
- static Address builtin_address(Name name) {
|
+ Address builtin_address(Name name) {
|
return reinterpret_cast<Address>(&builtins_[name]);
|
}
|
|
- static Address c_function_address(CFunctionId id) {
|
+ Address c_function_address(CFunctionId id) {
|
return c_functions_[id];
|
}
|
|
static const char* GetName(JavaScript id) { return javascript_names_[id]; }
|
static int GetArgumentsCount(JavaScript id) { return javascript_argc_[id]; }
|
- static Handle<Code> GetCode(JavaScript id, bool* resolved);
|
- static int NumberOfJavaScriptBuiltins() { return id_count; }
|
+ Handle<Code> GetCode(JavaScript id, bool* resolved);
|
+ int NumberOfJavaScriptBuiltins() { return id_count; }
|
|
- static Object* builtin_passed_function;
|
+ Object* builtin_passed_function;
|
|
private:
|
// The external C++ functions called from the code.
|
- static Address c_functions_[cfunction_count];
|
+ Address c_functions_[cfunction_count];
|
|
// Note: These are always Code objects, but to conform with
|
// IterateBuiltins() above which assumes Object**'s for the callback
|
// function f, we use an Object* array here.
|
- static Object* builtins_[builtin_count];
|
- static const char* names_[builtin_count];
|
+ Object* builtins_[builtin_count];
|
+ const char* names_[builtin_count];
|
static const char* javascript_names_[id_count];
|
static int javascript_argc_[id_count];
|
|
@@ -236,6 +236,11 @@
|
|
static void Generate_ArrayCode(MacroAssembler* masm);
|
static void Generate_ArrayConstructCode(MacroAssembler* masm);
|
+
|
+ bool is_initialized;
|
+ friend class V8Context;
|
+ Builtins();
|
+ DISALLOW_COPY_AND_ASSIGN(Builtins);
|
};
|
|
} } // namespace v8::internal
|
Index: src/ia32/disasm-ia32.cc
|
===================================================================
|
--- src/ia32/disasm-ia32.cc (revision 3141)
|
+++ src/ia32/disasm-ia32.cc Sat Nov 14 01:43:04 MSK 2009
|
@@ -51,7 +51,7 @@
|
};
|
|
|
-static ByteMnemonic two_operands_instr[] = {
|
+static const ByteMnemonic two_operands_instr[] = {
|
{0x03, "add", REG_OPER_OP_ORDER},
|
{0x21, "and", OPER_REG_OP_ORDER},
|
{0x23, "and", REG_OPER_OP_ORDER},
|
@@ -72,7 +72,7 @@
|
};
|
|
|
-static ByteMnemonic zero_operands_instr[] = {
|
+static const ByteMnemonic zero_operands_instr[] = {
|
{0xC3, "ret", UNSET_OP_ORDER},
|
{0xC9, "leave", UNSET_OP_ORDER},
|
{0x90, "nop", UNSET_OP_ORDER},
|
@@ -89,14 +89,14 @@
|
};
|
|
|
-static ByteMnemonic call_jump_instr[] = {
|
+static const ByteMnemonic call_jump_instr[] = {
|
{0xE8, "call", UNSET_OP_ORDER},
|
{0xE9, "jmp", UNSET_OP_ORDER},
|
{-1, "", UNSET_OP_ORDER}
|
};
|
|
|
-static ByteMnemonic short_immediate_instr[] = {
|
+static const ByteMnemonic short_immediate_instr[] = {
|
{0x05, "add", UNSET_OP_ORDER},
|
{0x0D, "or", UNSET_OP_ORDER},
|
{0x15, "adc", UNSET_OP_ORDER},
|
@@ -160,7 +160,7 @@
|
InstructionDesc instructions_[256];
|
void Clear();
|
void Init();
|
- void CopyTable(ByteMnemonic bm[], InstructionType type);
|
+ void CopyTable(const ByteMnemonic bm[], InstructionType type);
|
void SetTableRange(InstructionType type,
|
byte start,
|
byte end,
|
@@ -199,7 +199,7 @@
|
}
|
|
|
-void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
|
+void InstructionTable::CopyTable(const ByteMnemonic bm[], InstructionType type) {
|
for (int i = 0; bm[i].b >= 0; i++) {
|
InstructionDesc* id = &instructions_[bm[i].b];
|
id->mnem = bm[i].mnem;
|
@@ -1215,7 +1215,7 @@
|
|
|
const char* NameConverter::NameOfAddress(byte* addr) const {
|
- static v8::internal::EmbeddedVector<char, 32> tmp_buffer;
|
+ v8::internal::EmbeddedVector<char, 32>& tmp_buffer = v8::v8_context()->disassembler_data_->tmp_buffer_;
|
v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr);
|
return tmp_buffer.start();
|
}
|
Index: src/hashmap.cc
|
===================================================================
|
--- src/hashmap.cc (revision 2521)
|
+++ src/hashmap.cc Sat Nov 14 01:42:57 MSK 2009
|
@@ -32,7 +32,7 @@
|
namespace v8 {
|
namespace internal {
|
|
-Allocator HashMap::DefaultAllocator;
|
+Allocator HashMap::DefaultAllocator; ///static
|
|
|
HashMap::HashMap() {
|
@@ -195,7 +195,7 @@
|
ASSERT(IsPowerOf2(capacity));
|
map_ = reinterpret_cast<Entry*>(allocator_->New(capacity * sizeof(Entry)));
|
if (map_ == NULL) {
|
- V8::FatalProcessOutOfMemory("HashMap::Initialize");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("HashMap::Initialize");
|
return;
|
}
|
capacity_ = capacity;
|
Index: src/counters.h
|
===================================================================
|
--- src/counters.h (revision 2052)
|
+++ src/counters.h Sat Nov 14 01:43:01 MSK 2009
|
@@ -31,6 +31,16 @@
|
namespace v8 {
|
namespace internal {
|
|
+class StatsTableData {
|
+ CounterLookupCallback lookup_function_;
|
+ CreateHistogramCallback create_histogram_function_;
|
+ AddHistogramSampleCallback add_histogram_sample_function_;
|
+ StatsTableData();
|
+
|
+ friend class StatsTable;
|
+ friend class V8Context;
|
+};
|
+
|
// StatsCounters is an interface for plugging into external
|
// counters for monitoring. Counters can be looked up and
|
// manipulated by name.
|
@@ -40,23 +50,23 @@
|
// Register an application-defined function where
|
// counters can be looked up.
|
static void SetCounterFunction(CounterLookupCallback f) {
|
- lookup_function_ = f;
|
+ v8_context()->stats_table_data_.lookup_function_ = f;
|
}
|
|
// Register an application-defined function to create
|
// a histogram for passing to the AddHistogramSample function
|
static void SetCreateHistogramFunction(CreateHistogramCallback f) {
|
- create_histogram_function_ = f;
|
+ v8_context()->stats_table_data_.create_histogram_function_ = f;
|
}
|
|
// Register an application-defined function to add a sample
|
// to a histogram created with CreateHistogram function
|
static void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
|
- add_histogram_sample_function_ = f;
|
+ v8_context()->stats_table_data_.add_histogram_sample_function_ = f;
|
}
|
|
static bool HasCounterFunction() {
|
- return lookup_function_ != NULL;
|
+ return v8_context()->stats_table_data_.lookup_function_ != NULL;
|
}
|
|
// Lookup the location of a counter by name. If the lookup
|
@@ -66,8 +76,9 @@
|
// The return value must not be cached and re-used across
|
// threads, although a single thread is free to cache it.
|
static int *FindLocation(const char* name) {
|
- if (!lookup_function_) return NULL;
|
- return lookup_function_(name);
|
+ CounterLookupCallback lookup_function = v8_context()->stats_table_data_.lookup_function_;
|
+ if (!lookup_function) return NULL;
|
+ return lookup_function(name);
|
}
|
|
// Create a histogram by name. If the create is successful,
|
@@ -79,21 +90,20 @@
|
int min,
|
int max,
|
size_t buckets) {
|
- if (!create_histogram_function_) return NULL;
|
- return create_histogram_function_(name, min, max, buckets);
|
+ CreateHistogramCallback create_histogram_function = v8_context()->stats_table_data_.create_histogram_function_;
|
+ if (!create_histogram_function) return NULL;
|
+ return create_histogram_function(name, min, max, buckets);
|
}
|
|
// Add a sample to a histogram created with the CreateHistogram
|
// function.
|
static void AddHistogramSample(void* histogram, int sample) {
|
- if (!add_histogram_sample_function_) return;
|
- return add_histogram_sample_function_(histogram, sample);
|
+ AddHistogramSampleCallback add_histogram_sample_function = v8_context()->stats_table_data_.add_histogram_sample_function_;
|
+ if (!add_histogram_sample_function) return;
|
+ return add_histogram_sample_function(histogram, sample);
|
}
|
|
private:
|
- static CounterLookupCallback lookup_function_;
|
- static CreateHistogramCallback create_histogram_function_;
|
- static AddHistogramSampleCallback add_histogram_sample_function_;
|
};
|
|
// StatsCounters are dynamically created values which can be tracked in
|
Index: src/jump-target.h
|
===================================================================
|
--- src/jump-target.h (revision 3074)
|
+++ src/jump-target.h Sat Nov 14 01:43:03 MSK 2009
|
@@ -38,6 +38,28 @@
|
class Result;
|
class VirtualFrame;
|
|
+typedef ZoneList<Handle<Object> > ZoneObjectList;
|
+
|
+class CodeGeneratorData {
|
+ CodeGenerator* top_;
|
+ bool compiling_deferred_code_;
|
+ ZoneObjectList* result_constants_list_;
|
+ ZoneObjectList* frame_element_constants_list_;
|
+
|
+ ZoneObjectList* result_constants_list();
|
+
|
+ ZoneObjectList* frame_element_constants_list();
|
+
|
+ friend class CodeGeneratorScope;
|
+ friend class JumpTarget;
|
+ friend class V8Context;
|
+ friend class Result;
|
+ friend class FrameElement;
|
+
|
+ CodeGeneratorData();
|
+ ~CodeGeneratorData();
|
+};
|
+
|
// -------------------------------------------------------------------------
|
// Jump targets
|
//
|
@@ -129,7 +151,7 @@
|
void Call();
|
|
static void set_compiling_deferred_code(bool flag) {
|
- compiling_deferred_code_ = flag;
|
+ v8_context()->code_generator_data_.compiling_deferred_code_ = flag;
|
}
|
|
protected:
|
@@ -156,8 +178,7 @@
|
void DoBranch(Condition cc, Hint hint);
|
void DoBind();
|
|
- private:
|
+ private:
|
- static bool compiling_deferred_code_;
|
|
// Add a virtual frame reaching this labeled block via a forward jump,
|
// and a corresponding merge code label.
|
Index: src/disassembler.cc
|
===================================================================
|
--- src/disassembler.cc (revision 2308)
|
+++ src/disassembler.cc Sat Nov 14 01:43:23 MSK 2009
|
@@ -65,9 +65,10 @@
|
|
|
const char* V8NameConverter::NameOfAddress(byte* pc) const {
|
- static v8::internal::EmbeddedVector<char, 128> buffer;
|
+ V8Context * const v8context = v8_context();
|
+ EmbeddedVector<char, 128>& buffer = v8context->disassembler_data_->buffer_;
|
+ const char* name = v8context->builtins_.Lookup(pc);
|
-
|
+
|
- const char* name = Builtins::Lookup(pc);
|
if (name != NULL) {
|
OS::SNPrintF(buffer, "%s (%p)", name, pc);
|
return buffer.start();
|
@@ -252,8 +253,8 @@
|
} else if (kind == Code::STUB) {
|
// Reverse lookup required as the minor key cannot be retrieved
|
// from the code object.
|
- Object* obj = Heap::code_stubs()->SlowReverseLookup(code);
|
- if (obj != Heap::undefined_value()) {
|
+ Object* obj = v8_context()->heap_.code_stubs()->SlowReverseLookup(code);
|
+ if (obj != v8_context()->heap_.undefined_value()) {
|
ASSERT(obj->IsSmi());
|
// Get the STUB key and extract major and minor key.
|
uint32_t key = Smi::cast(obj)->value();
|
@@ -307,6 +308,13 @@
|
DecodeIt(f, v8NameConverter, begin, end);
|
}
|
|
+void Disassembler::Setup() {
|
+ v8_context()->disassembler_data_ = new disasm::DisassemblerData();
|
+}
|
+
|
+void Disassembler::TearDown() {
|
+ delete v8_context()->disassembler_data_;
|
+}
|
#else // ENABLE_DISASSEMBLER
|
|
void Disassembler::Dump(FILE* f, byte* begin, byte* end) {}
|
Index: test/cctest/test-heap-profiler.cc
|
===================================================================
|
--- test/cctest/test-heap-profiler.cc (revision 2972)
|
+++ test/cctest/test-heap-profiler.cc Sun Nov 15 12:42:05 MSK 2009
|
@@ -15,6 +15,7 @@
|
using i::JSObjectsRetainerTree;
|
using i::JSObjectsClusterTree;
|
using i::RetainerHeapProfile;
|
+using v8::v8_context;
|
|
|
static void CompileAndRunScript(const char *src) {
|
@@ -149,25 +150,25 @@
|
i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
|
|
JSObjectsRetainerTree tree;
|
- JSObjectsCluster function(i::Heap::function_class_symbol());
|
+ JSObjectsCluster function(v8_context()->heap_.function_class_symbol());
|
JSObjectsCluster a(*i::Factory::NewStringFromAscii(i::CStrVector("A")));
|
JSObjectsCluster b(*i::Factory::NewStringFromAscii(i::CStrVector("B")));
|
|
// o1 <- Function
|
JSObjectsCluster o1 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x100, &function);
|
// o2 <- Function
|
JSObjectsCluster o2 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x200, &function);
|
// o3 <- A, B
|
JSObjectsCluster o3 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &a, &b);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x300, &a, &b);
|
// o4 <- B, A
|
JSObjectsCluster o4 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x400, &b, &a);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x400, &b, &a);
|
// o5 <- A, B, Function
|
JSObjectsCluster o5 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x500,
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x500,
|
&a, &b, &function);
|
|
ClustersCoarser coarser;
|
@@ -188,20 +189,20 @@
|
i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
|
|
JSObjectsRetainerTree tree;
|
- JSObjectsCluster function(i::Heap::function_class_symbol());
|
+ JSObjectsCluster function(v8_context()->heap_.function_class_symbol());
|
|
// o1 <- Function
|
JSObjectsCluster o1 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x100, &function);
|
// a1 <- Function
|
JSObjectsCluster a1 =
|
- AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x1000, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Array_symbol(), 0x1000, &function);
|
// o2 <- Function
|
JSObjectsCluster o2 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x200, &function);
|
// a2 <- Function
|
JSObjectsCluster a2 =
|
- AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x2000, &function);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Array_symbol(), 0x2000, &function);
|
|
ClustersCoarser coarser;
|
coarser.Process(&tree);
|
@@ -232,21 +233,21 @@
|
// o21 ~ o22, and o11 ~ o12.
|
|
JSObjectsCluster o =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x100);
|
JSObjectsCluster o11 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x110, &o);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x110, &o);
|
JSObjectsCluster o12 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x120, &o);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x120, &o);
|
JSObjectsCluster o21 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x210, &o11);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x210, &o11);
|
JSObjectsCluster o22 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x220, &o12);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x220, &o12);
|
JSObjectsCluster p =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &o21);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x300, &o21);
|
JSObjectsCluster q =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x310, &o21, &o22);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x310, &o21, &o22);
|
JSObjectsCluster r =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x320, &o22);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x320, &o22);
|
|
ClustersCoarser coarser;
|
coarser.Process(&tree);
|
@@ -284,19 +285,19 @@
|
// we expect that coarser will deduce equivalences: p ~ q ~ r, o1 ~ o2;
|
|
JSObjectsCluster o =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x100);
|
JSObjectsCluster o1 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x110, &o);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x110, &o);
|
JSObjectsCluster o2 =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x120, &o);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x120, &o);
|
JSObjectsCluster p =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &o1);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x300, &o1);
|
AddSelfReferenceToTree(&tree, &p);
|
JSObjectsCluster q =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x310, &o1, &o2);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x310, &o1, &o2);
|
AddSelfReferenceToTree(&tree, &q);
|
JSObjectsCluster r =
|
- AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x320, &o2);
|
+ AddHeapObjectToTree(&tree, v8_context()->heap_.Object_symbol(), 0x320, &o2);
|
AddSelfReferenceToTree(&tree, &r);
|
|
ClustersCoarser coarser;
|
Index: src/debug.h
|
===================================================================
|
--- src/debug.h (revision 3215)
|
+++ src/debug.h Sat Nov 14 01:43:00 MSK 2009
|
@@ -204,6 +204,7 @@
|
DebugInfoListNode* next_;
|
};
|
|
+class Debugger;
|
|
// This class contains the debugger support. The main purpose is to handle
|
// setting break points in the code.
|
@@ -214,108 +215,110 @@
|
// DebugInfo.
|
class Debug {
|
public:
|
- static void Setup(bool create_heap_objects);
|
- static bool Load();
|
- static void Unload();
|
- static bool IsLoaded() { return !debug_context_.is_null(); }
|
- static bool InDebugger() { return thread_local_.debugger_entry_ != NULL; }
|
- static void PreemptionWhileInDebugger();
|
- static void Iterate(ObjectVisitor* v);
|
+ void Setup(bool create_heap_objects);
|
+ bool Load();
|
+ void Unload();
|
+ bool IsLoaded() { return !debug_context_.is_null(); }
|
+ bool InDebugger() { return thread_local_.debugger_entry_ != NULL; }
|
+ void PreemptionWhileInDebugger();
|
+ void Iterate(ObjectVisitor* v);
|
|
static Object* Break(Arguments args);
|
- static void SetBreakPoint(Handle<SharedFunctionInfo> shared,
|
+ void SetBreakPoint(Handle<SharedFunctionInfo> shared,
|
int source_position,
|
Handle<Object> break_point_object);
|
- static void ClearBreakPoint(Handle<Object> break_point_object);
|
- static void ClearAllBreakPoints();
|
- static void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
|
- static void FloodHandlerWithOneShot();
|
- static void ChangeBreakOnException(ExceptionBreakType type, bool enable);
|
- static void PrepareStep(StepAction step_action, int step_count);
|
- static void ClearStepping();
|
- static bool StepNextContinue(BreakLocationIterator* break_location_iterator,
|
+ void ClearBreakPoint(Handle<Object> break_point_object);
|
+ void ClearAllBreakPoints();
|
+ void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
|
+ void FloodHandlerWithOneShot();
|
+ void ChangeBreakOnException(ExceptionBreakType type, bool enable);
|
+ void PrepareStep(StepAction step_action, int step_count);
|
+ void ClearStepping();
|
+ bool StepNextContinue(BreakLocationIterator* break_location_iterator,
|
JavaScriptFrame* frame);
|
- static Handle<DebugInfo> GetDebugInfo(Handle<SharedFunctionInfo> shared);
|
- static bool HasDebugInfo(Handle<SharedFunctionInfo> shared);
|
+ Handle<DebugInfo> GetDebugInfo(Handle<SharedFunctionInfo> shared);
|
+ bool HasDebugInfo(Handle<SharedFunctionInfo> shared);
|
|
// Returns whether the operation succeeded.
|
- static bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared);
|
+ bool EnsureDebugInfo(Handle<SharedFunctionInfo> shared);
|
|
// Returns true if the current stub call is patched to call the debugger.
|
- static bool IsDebugBreak(Address addr);
|
+ bool IsDebugBreak(Address addr);
|
// Returns true if the current return statement has been patched to be
|
// a debugger breakpoint.
|
- static bool IsDebugBreakAtReturn(RelocInfo* rinfo);
|
+ bool IsDebugBreakAtReturn(RelocInfo* rinfo);
|
|
// Check whether a code stub with the specified major key is a possible break
|
// point location.
|
- static bool IsSourceBreakStub(Code* code);
|
- static bool IsBreakStub(Code* code);
|
+ bool IsSourceBreakStub(Code* code);
|
+ bool IsBreakStub(Code* code);
|
|
// Find the builtin to use for invoking the debug break
|
- static Handle<Code> FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode);
|
+ Handle<Code> FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode);
|
|
- static Handle<Object> GetSourceBreakLocations(
|
+ Handle<Object> GetSourceBreakLocations(
|
Handle<SharedFunctionInfo> shared);
|
|
// Getter for the debug_context.
|
- inline static Handle<Context> debug_context() { return debug_context_; }
|
+ inline Handle<Context> debug_context() { return debug_context_; }
|
|
+ inline Debugger* debugger() const { return debugger_; }
|
+
|
// Check whether a global object is the debug global object.
|
- static bool IsDebugGlobal(GlobalObject* global);
|
+ bool IsDebugGlobal(GlobalObject* global);
|
|
// Fast check to see if any break points are active.
|
- inline static bool has_break_points() { return has_break_points_; }
|
+ inline bool has_break_points() { return has_break_points_; }
|
|
- static void NewBreak(StackFrame::Id break_frame_id);
|
- static void SetBreak(StackFrame::Id break_frame_id, int break_id);
|
- static StackFrame::Id break_frame_id() {
|
+ void NewBreak(StackFrame::Id break_frame_id);
|
+ void SetBreak(StackFrame::Id break_frame_id, int break_id);
|
+ StackFrame::Id break_frame_id() {
|
return thread_local_.break_frame_id_;
|
}
|
- static int break_id() { return thread_local_.break_id_; }
|
+ int break_id() { return thread_local_.break_id_; }
|
|
- static bool StepInActive() { return thread_local_.step_into_fp_ != 0; }
|
- static void HandleStepIn(Handle<JSFunction> function,
|
+ bool StepInActive() { return thread_local_.step_into_fp_ != 0; }
|
+ void HandleStepIn(Handle<JSFunction> function,
|
Handle<Object> holder,
|
Address fp,
|
bool is_constructor);
|
- static Address step_in_fp() { return thread_local_.step_into_fp_; }
|
- static Address* step_in_fp_addr() { return &thread_local_.step_into_fp_; }
|
+ Address step_in_fp() { return thread_local_.step_into_fp_; }
|
+ static Address* step_in_fp_addr() { return &v8_context()->debug_.thread_local_.step_into_fp_; }
|
|
- static bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
|
- static Address step_out_fp() { return thread_local_.step_out_fp_; }
|
+ bool StepOutActive() { return thread_local_.step_out_fp_ != 0; }
|
+ Address step_out_fp() { return thread_local_.step_out_fp_; }
|
|
- static EnterDebugger* debugger_entry() {
|
+ EnterDebugger* debugger_entry() {
|
return thread_local_.debugger_entry_;
|
}
|
- static void set_debugger_entry(EnterDebugger* entry) {
|
+ void set_debugger_entry(EnterDebugger* entry) {
|
thread_local_.debugger_entry_ = entry;
|
}
|
|
// Check whether any of the specified interrupts are pending.
|
- static bool is_interrupt_pending(InterruptFlag what) {
|
+ bool is_interrupt_pending(InterruptFlag what) {
|
return (thread_local_.pending_interrupts_ & what) != 0;
|
}
|
|
// Set specified interrupts as pending.
|
- static void set_interrupts_pending(InterruptFlag what) {
|
+ void set_interrupts_pending(InterruptFlag what) {
|
thread_local_.pending_interrupts_ |= what;
|
}
|
|
// Clear specified interrupts from pending.
|
- static void clear_interrupt_pending(InterruptFlag what) {
|
+ void clear_interrupt_pending(InterruptFlag what) {
|
thread_local_.pending_interrupts_ &= ~static_cast<int>(what);
|
}
|
|
// Getter and setter for the disable break state.
|
- static bool disable_break() { return disable_break_; }
|
- static void set_disable_break(bool disable_break) {
|
+ bool disable_break() { return disable_break_; }
|
+ void set_disable_break(bool disable_break) {
|
disable_break_ = disable_break;
|
}
|
|
// Getters for the current exception break state.
|
- static bool break_on_exception() { return break_on_exception_; }
|
- static bool break_on_uncaught_exception() {
|
+ bool break_on_exception() { return break_on_exception_; }
|
+ bool break_on_uncaught_exception() {
|
return break_on_uncaught_exception_;
|
}
|
|
@@ -326,18 +329,18 @@
|
};
|
|
// Support for setting the address to jump to when returning from break point.
|
- static Address* after_break_target_address() {
|
+ Address* after_break_target_address() {
|
return reinterpret_cast<Address*>(&thread_local_.after_break_target_);
|
}
|
|
// Support for saving/restoring registers when handling debug break calls.
|
- static Object** register_address(int r) {
|
+ Object** register_address(int r) {
|
return ®isters_[r];
|
}
|
|
// Access to the debug break on return code.
|
- static Code* debug_break_return() { return debug_break_return_; }
|
- static Code** debug_break_return_address() {
|
+ Code* debug_break_return() { return debug_break_return_; }
|
+ Code** debug_break_return_address() {
|
return &debug_break_return_;
|
}
|
|
@@ -351,22 +354,22 @@
|
friend void CheckDebuggerUnloaded(bool check_functions); // In test-debug.cc
|
|
// Threading support.
|
- static char* ArchiveDebug(char* to);
|
- static char* RestoreDebug(char* from);
|
- static int ArchiveSpacePerThread();
|
- static void FreeThreadResources() { }
|
+ char* ArchiveDebug(char* to);
|
+ char* RestoreDebug(char* from);
|
+ int ArchiveSpacePerThread();
|
+ void FreeThreadResources() { }
|
|
// Mirror cache handling.
|
- static void ClearMirrorCache();
|
+ void ClearMirrorCache();
|
|
// Script cache handling.
|
- static void CreateScriptCache();
|
- static void DestroyScriptCache();
|
- static void AddScriptToScriptCache(Handle<Script> script);
|
- static Handle<FixedArray> GetLoadedScripts();
|
+ void CreateScriptCache();
|
+ void DestroyScriptCache();
|
+ void AddScriptToScriptCache(Handle<Script> script);
|
+ Handle<FixedArray> GetLoadedScripts();
|
|
// Garbage collection notifications.
|
- static void AfterGarbageCollection();
|
+ void AfterGarbageCollection();
|
|
// Code generation assumptions.
|
static const int kIa32CallInstructionLength = 5;
|
@@ -390,38 +393,38 @@
|
|
// Called from stub-cache.cc.
|
static void GenerateCallICDebugBreak(MacroAssembler* masm);
|
-
|
+ v8::Debug::MessageHandler message_handler_;
|
private:
|
- static bool CompileDebuggerScript(int index);
|
- static void ClearOneShot();
|
- static void ActivateStepIn(StackFrame* frame);
|
- static void ClearStepIn();
|
- static void ActivateStepOut(StackFrame* frame);
|
- static void ClearStepOut();
|
- static void ClearStepNext();
|
+ bool CompileDebuggerScript(int index);
|
+ void ClearOneShot();
|
+ void ActivateStepIn(StackFrame* frame);
|
+ void ClearStepIn();
|
+ void ActivateStepOut(StackFrame* frame);
|
+ void ClearStepOut();
|
+ void ClearStepNext();
|
// Returns whether the compile succeeded.
|
- static bool EnsureCompiled(Handle<SharedFunctionInfo> shared);
|
- static void RemoveDebugInfo(Handle<DebugInfo> debug_info);
|
- static void SetAfterBreakTarget(JavaScriptFrame* frame);
|
- static Handle<Object> CheckBreakPoints(Handle<Object> break_point);
|
- static bool CheckBreakPoint(Handle<Object> break_point_object);
|
+ bool EnsureCompiled(Handle<SharedFunctionInfo> shared);
|
+ void RemoveDebugInfo(Handle<DebugInfo> debug_info);
|
+ void SetAfterBreakTarget(JavaScriptFrame* frame);
|
+ Handle<Object> CheckBreakPoints(Handle<Object> break_point);
|
+ bool CheckBreakPoint(Handle<Object> break_point_object);
|
|
// Global handle to debug context where all the debugger JavaScript code is
|
// loaded.
|
- static Handle<Context> debug_context_;
|
+ Handle<Context> debug_context_;
|
|
// Boolean state indicating whether any break points are set.
|
- static bool has_break_points_;
|
+ bool has_break_points_;
|
|
// Cache of all scripts in the heap.
|
- static ScriptCache* script_cache_;
|
+ ScriptCache* script_cache_;
|
|
// List of active debug info objects.
|
- static DebugInfoListNode* debug_info_list_;
|
+ DebugInfoListNode* debug_info_list_;
|
|
- static bool disable_break_;
|
- static bool break_on_exception_;
|
- static bool break_on_uncaught_exception_;
|
+ bool disable_break_;
|
+ bool break_on_exception_;
|
+ bool break_on_uncaught_exception_;
|
|
// Per-thread data.
|
class ThreadLocal {
|
@@ -465,14 +468,19 @@
|
};
|
|
// Storage location for registers when handling debug break calls
|
- static JSCallerSavedBuffer registers_;
|
- static ThreadLocal thread_local_;
|
- static void ThreadInit();
|
+ JSCallerSavedBuffer registers_;
|
+ ThreadLocal thread_local_;
|
+ void ThreadInit();
|
|
// Code to call for handling debug break on return.
|
- static Code* debug_break_return_;
|
+ Code* debug_break_return_;
|
|
+ Debugger* const debugger_;
|
+
|
DISALLOW_COPY_AND_ASSIGN(Debug);
|
+ Debug();
|
+ ~Debug();
|
+ friend class V8Context;
|
};
|
|
|
@@ -589,73 +597,73 @@
|
|
class Debugger {
|
public:
|
- static void DebugRequest(const uint16_t* json_request, int length);
|
+ void DebugRequest(const uint16_t* json_request, int length);
|
|
- static Handle<Object> MakeJSObject(Vector<const char> constructor_name,
|
+ Handle<Object> MakeJSObject(Vector<const char> constructor_name,
|
int argc, Object*** argv,
|
bool* caught_exception);
|
- static Handle<Object> MakeExecutionState(bool* caught_exception);
|
- static Handle<Object> MakeBreakEvent(Handle<Object> exec_state,
|
+ Handle<Object> MakeExecutionState(bool* caught_exception);
|
+ Handle<Object> MakeBreakEvent(Handle<Object> exec_state,
|
Handle<Object> break_points_hit,
|
bool* caught_exception);
|
- static Handle<Object> MakeExceptionEvent(Handle<Object> exec_state,
|
+ Handle<Object> MakeExceptionEvent(Handle<Object> exec_state,
|
Handle<Object> exception,
|
bool uncaught,
|
bool* caught_exception);
|
- static Handle<Object> MakeNewFunctionEvent(Handle<Object> func,
|
+ Handle<Object> MakeNewFunctionEvent(Handle<Object> func,
|
bool* caught_exception);
|
- static Handle<Object> MakeCompileEvent(Handle<Script> script,
|
+ Handle<Object> MakeCompileEvent(Handle<Script> script,
|
bool before,
|
bool* caught_exception);
|
- static Handle<Object> MakeScriptCollectedEvent(int id,
|
+ Handle<Object> MakeScriptCollectedEvent(int id,
|
bool* caught_exception);
|
- static void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue);
|
- static void OnException(Handle<Object> exception, bool uncaught);
|
- static void OnBeforeCompile(Handle<Script> script);
|
- static void OnAfterCompile(Handle<Script> script,
|
+ void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue);
|
+ void OnException(Handle<Object> exception, bool uncaught);
|
+ void OnBeforeCompile(Handle<Script> script);
|
+ void OnAfterCompile(Handle<Script> script,
|
Handle<JSFunction> fun);
|
- static void OnNewFunction(Handle<JSFunction> fun);
|
- static void OnScriptCollected(int id);
|
- static void ProcessDebugEvent(v8::DebugEvent event,
|
+ void OnNewFunction(Handle<JSFunction> fun);
|
+ void OnScriptCollected(int id);
|
+ void ProcessDebugEvent(v8::DebugEvent event,
|
Handle<JSObject> event_data,
|
bool auto_continue);
|
- static void NotifyMessageHandler(v8::DebugEvent event,
|
+ void NotifyMessageHandler(v8::DebugEvent event,
|
Handle<JSObject> exec_state,
|
Handle<JSObject> event_data,
|
bool auto_continue);
|
- static void SetEventListener(Handle<Object> callback, Handle<Object> data);
|
- static void SetMessageHandler(v8::Debug::MessageHandler2 handler);
|
- static void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
|
+ void SetEventListener(Handle<Object> callback, Handle<Object> data);
|
+ void SetMessageHandler(v8::Debug::MessageHandler2 handler);
|
+ void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
|
int period);
|
|
// Invoke the message handler function.
|
- static void InvokeMessageHandler(MessageImpl message);
|
+ void InvokeMessageHandler(MessageImpl message);
|
|
// Add a debugger command to the command queue.
|
- static void ProcessCommand(Vector<const uint16_t> command,
|
+ void ProcessCommand(Vector<const uint16_t> command,
|
v8::Debug::ClientData* client_data = NULL);
|
|
// Check whether there are commands in the command queue.
|
- static bool HasCommands();
|
+ bool HasCommands();
|
|
- static Handle<Object> Call(Handle<JSFunction> fun,
|
+ Handle<Object> Call(Handle<JSFunction> fun,
|
Handle<Object> data,
|
bool* pending_exception);
|
|
// Start the debugger agent listening on the provided port.
|
- static bool StartAgent(const char* name, int port);
|
+ bool StartAgent(const char* name, int port);
|
|
// Stop the debugger agent.
|
- static void StopAgent();
|
+ void StopAgent();
|
|
// Blocks until the agent has started listening for connections
|
- static void WaitForAgent();
|
+ void WaitForAgent();
|
|
// Unload the debugger if possible. Only called when no debugger is currently
|
// active.
|
- static void UnloadDebugger();
|
+ void UnloadDebugger();
|
|
- inline static bool EventActive(v8::DebugEvent event) {
|
+ inline bool EventActive(v8::DebugEvent event) {
|
ScopedLock with(debugger_access_);
|
|
// Check whether the message handler was been cleared.
|
@@ -667,35 +675,39 @@
|
return !compiling_natives_ && Debugger::IsDebuggerActive();
|
}
|
|
- static void set_compiling_natives(bool compiling_natives) {
|
+ void set_compiling_natives(bool compiling_natives) {
|
Debugger::compiling_natives_ = compiling_natives;
|
}
|
- static bool compiling_natives() { return Debugger::compiling_natives_; }
|
- static void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
|
- static bool is_loading_debugger() { return Debugger::is_loading_debugger_; }
|
+ bool compiling_natives() { return Debugger::compiling_natives_; }
|
+ void set_loading_debugger(bool v) { is_loading_debugger_ = v; }
|
+ bool is_loading_debugger() { return Debugger::is_loading_debugger_; }
|
|
private:
|
- static bool IsDebuggerActive();
|
- static void ListenersChanged();
|
+ bool IsDebuggerActive();
|
+ void ListenersChanged();
|
|
- static Mutex* debugger_access_; // Mutex guarding debugger variables.
|
- static Handle<Object> event_listener_; // Global handle to listener.
|
- static Handle<Object> event_listener_data_;
|
- static bool compiling_natives_; // Are we compiling natives?
|
- static bool is_loading_debugger_; // Are we loading the debugger?
|
- static bool never_unload_debugger_; // Can we unload the debugger?
|
- static v8::Debug::MessageHandler2 message_handler_;
|
- static bool debugger_unload_pending_; // Was message handler cleared?
|
- static v8::Debug::HostDispatchHandler host_dispatch_handler_;
|
- static int host_dispatch_micros_;
|
+ Mutex* debugger_access_; // Mutex guarding debugger variables.
|
+ Handle<Object> event_listener_; // Global handle to listener.
|
+ Handle<Object> event_listener_data_;
|
+ bool compiling_natives_; // Are we compiling natives?
|
+ bool is_loading_debugger_; // Are we loading the debugger?
|
+ bool never_unload_debugger_; // Can we unload the debugger?
|
+ v8::Debug::MessageHandler2 message_handler_;
|
+ bool debugger_unload_pending_; // Was message handler cleared?
|
+ v8::Debug::HostDispatchHandler host_dispatch_handler_;
|
+ int host_dispatch_micros_;
|
|
- static DebuggerAgent* agent_;
|
+ DebuggerAgent* agent_;
|
+ Debug* const debug_;
|
|
static const int kQueueInitialSize = 4;
|
- static LockingCommandMessageQueue command_queue_;
|
- static Semaphore* command_received_; // Signaled for each command received.
|
+ LockingCommandMessageQueue command_queue_;
|
+ Semaphore* command_received_; // Signaled for each command received.
|
|
+ Debugger(Debug* debug);
|
+ ~Debugger();
|
friend class EnterDebugger;
|
+ friend class Debug;
|
};
|
|
|
@@ -706,82 +718,89 @@
|
class EnterDebugger BASE_EMBEDDED {
|
public:
|
EnterDebugger()
|
- : prev_(Debug::debugger_entry()),
|
+ : prev_(v8_context()->debug_.debugger_entry()),
|
has_js_frames_(!it_.done()) {
|
- ASSERT(prev_ != NULL || !Debug::is_interrupt_pending(PREEMPT));
|
- ASSERT(prev_ != NULL || !Debug::is_interrupt_pending(DEBUGBREAK));
|
|
+ Debug & debug = v8_context()->debug_;
|
+ ASSERT(prev_ != NULL || !debug.is_interrupt_pending(PREEMPT));
|
+ ASSERT(prev_ != NULL || !debug.is_interrupt_pending(DEBUGBREAK));
|
+
|
+
|
// Link recursive debugger entry.
|
- Debug::set_debugger_entry(this);
|
+ debug.set_debugger_entry(this);
|
|
// Store the previous break id and frame id.
|
- break_id_ = Debug::break_id();
|
- break_frame_id_ = Debug::break_frame_id();
|
+ break_id_ = debug.break_id();
|
+ break_frame_id_ = debug.break_frame_id();
|
|
// Create the new break info. If there is no JavaScript frames there is no
|
// break frame id.
|
if (has_js_frames_) {
|
- Debug::NewBreak(it_.frame()->id());
|
+ debug.NewBreak(it_.frame()->id());
|
} else {
|
- Debug::NewBreak(StackFrame::NO_ID);
|
+ debug.NewBreak(StackFrame::NO_ID);
|
}
|
|
// Make sure that debugger is loaded and enter the debugger context.
|
- load_failed_ = !Debug::Load();
|
+ load_failed_ = !debug.Load();
|
if (!load_failed_) {
|
// NOTE the member variable save which saves the previous context before
|
// this change.
|
- Top::set_context(*Debug::debug_context());
|
+ v8_context()->top_.set_context(*debug.debug_context());
|
}
|
}
|
|
~EnterDebugger() {
|
+ Debug & debug = v8_context()->debug_;
|
// Restore to the previous break state.
|
- Debug::SetBreak(break_frame_id_, break_id_);
|
+ debug.SetBreak(break_frame_id_, break_id_);
|
|
// Check for leaving the debugger.
|
if (prev_ == NULL) {
|
+ StackGuard& stack_guard = v8_context()->stack_guard_;
|
+
|
// Clear mirror cache when leaving the debugger. Skip this if there is a
|
// pending exception as clearing the mirror cache calls back into
|
// JavaScript. This can happen if the v8::Debug::Call is used in which
|
// case the exception should end up in the calling code.
|
- if (!Top::has_pending_exception()) {
|
+ if (!v8_context()->top_.has_pending_exception()) {
|
// Try to avoid any pending debug break breaking in the clear mirror
|
// cache JavaScript code.
|
- if (StackGuard::IsDebugBreak()) {
|
- Debug::set_interrupts_pending(DEBUGBREAK);
|
- StackGuard::Continue(DEBUGBREAK);
|
+ if (stack_guard.IsDebugBreak()) {
|
+ debug.set_interrupts_pending(DEBUGBREAK);
|
+ stack_guard.Continue(DEBUGBREAK);
|
}
|
- Debug::ClearMirrorCache();
|
+ debug.ClearMirrorCache();
|
}
|
|
// Request preemption and debug break when leaving the last debugger entry
|
// if any of these where recorded while debugging.
|
- if (Debug::is_interrupt_pending(PREEMPT)) {
|
+ if (debug.is_interrupt_pending(PREEMPT)) {
|
// This re-scheduling of preemption is to avoid starvation in some
|
// debugging scenarios.
|
- Debug::clear_interrupt_pending(PREEMPT);
|
- StackGuard::Preempt();
|
+ debug.clear_interrupt_pending(PREEMPT);
|
+ stack_guard.Preempt();
|
}
|
- if (Debug::is_interrupt_pending(DEBUGBREAK)) {
|
- Debug::clear_interrupt_pending(DEBUGBREAK);
|
- StackGuard::DebugBreak();
|
+ if (debug.is_interrupt_pending(DEBUGBREAK)) {
|
+ debug.clear_interrupt_pending(DEBUGBREAK);
|
+ stack_guard.DebugBreak();
|
}
|
|
+ Debugger* const debugger = debug.debugger();
|
// If there are commands in the queue when leaving the debugger request
|
// that these commands are processed.
|
- if (Debugger::HasCommands()) {
|
- StackGuard::DebugCommand();
|
+ if (debugger->HasCommands()) {
|
+ stack_guard.DebugCommand();
|
}
|
|
// If leaving the debugger with the debugger no longer active unload it.
|
- if (!Debugger::IsDebuggerActive()) {
|
- Debugger::UnloadDebugger();
|
+ if (!debugger->IsDebuggerActive()) {
|
+ debugger->UnloadDebugger();
|
}
|
}
|
|
// Leaving this debugger entry.
|
- Debug::set_debugger_entry(prev_);
|
+ debug.set_debugger_entry(prev_);
|
}
|
|
// Check whether the debugger could be entered.
|
@@ -810,11 +829,12 @@
|
// Enter the debugger by storing the previous top context and setting the
|
// current top context to the debugger context.
|
explicit DisableBreak(bool disable_break) {
|
- prev_disable_break_ = Debug::disable_break();
|
- Debug::set_disable_break(disable_break);
|
+ Debug & debug = v8_context()->debug_;
|
+ prev_disable_break_ = debug.disable_break();
|
+ debug.set_disable_break(disable_break);
|
}
|
~DisableBreak() {
|
- Debug::set_disable_break(prev_disable_break_);
|
+ v8_context()->debug_.set_disable_break(prev_disable_break_);
|
}
|
|
private:
|
@@ -846,13 +866,14 @@
|
}
|
|
Address address() const {
|
+ Debug & debug = v8_context()->debug_;
|
switch (id_) {
|
case Debug::k_after_break_target_address:
|
- return reinterpret_cast<Address>(Debug::after_break_target_address());
|
+ return reinterpret_cast<Address>(debug.after_break_target_address());
|
case Debug::k_debug_break_return_address:
|
- return reinterpret_cast<Address>(Debug::debug_break_return_address());
|
+ return reinterpret_cast<Address>(debug.debug_break_return_address());
|
case Debug::k_register_address:
|
- return reinterpret_cast<Address>(Debug::register_address(reg_));
|
+ return reinterpret_cast<Address>(debug.register_address(reg_));
|
default:
|
UNREACHABLE();
|
return NULL;
|
Index: test/cctest/test-debug.cc
|
===================================================================
|
--- test/cctest/test-debug.cc (revision 3229)
|
+++ test/cctest/test-debug.cc Sun Nov 15 12:40:21 MSK 2009
|
@@ -53,8 +53,8 @@
|
using ::v8::internal::StepNext; // From StepAction enum
|
using ::v8::internal::StepOut; // From StepAction enum
|
using ::v8::internal::Vector;
|
+using ::v8::v8_context;
|
|
-
|
// Size of temp buffer for formatting small strings.
|
#define SMALL_STRING_BUFFER_SIZE 80
|
|
@@ -140,8 +140,8 @@
|
inline bool IsReady() { return !context_.IsEmpty(); }
|
void ExposeDebug() {
|
// Expose the debug context global object in the global object for testing.
|
- Debug::Load();
|
- Debug::debug_context()->set_security_token(
|
+ v8_context()->debug_.Load();
|
+ v8_context()->debug_.debug_context()->set_security_token(
|
v8::Utils::OpenHandle(*context_)->security_token());
|
|
Handle<JSGlobalProxy> global(Handle<JSGlobalProxy>::cast(
|
@@ -149,7 +149,7 @@
|
Handle<v8::internal::String> debug_string =
|
v8::internal::Factory::LookupAsciiSymbol("debug");
|
SetProperty(global, debug_string,
|
- Handle<Object>(Debug::debug_context()->global_proxy()), DONT_ENUM);
|
+ Handle<Object>(v8_context()->debug_.debug_context()->global_proxy()), DONT_ENUM);
|
}
|
private:
|
v8::Persistent<v8::Context> context_;
|
@@ -182,7 +182,7 @@
|
static bool HasDebugInfo(v8::Handle<v8::Function> fun) {
|
Handle<v8::internal::JSFunction> f = v8::Utils::OpenHandle(*fun);
|
Handle<v8::internal::SharedFunctionInfo> shared(f->shared());
|
- return Debug::HasDebugInfo(shared);
|
+ return v8_context()->debug_.HasDebugInfo(shared);
|
}
|
|
|
@@ -191,7 +191,7 @@
|
static int SetBreakPoint(Handle<v8::internal::JSFunction> fun, int position) {
|
static int break_point = 0;
|
Handle<v8::internal::SharedFunctionInfo> shared(fun->shared());
|
- Debug::SetBreakPoint(
|
+ v8_context()->debug_.SetBreakPoint(
|
shared, position,
|
Handle<Object>(v8::internal::Smi::FromInt(++break_point)));
|
return break_point;
|
@@ -273,7 +273,7 @@
|
|
// Clear a break point.
|
static void ClearBreakPoint(int break_point) {
|
- Debug::ClearBreakPoint(
|
+ v8_context()->debug_.ClearBreakPoint(
|
Handle<Object>(v8::internal::Smi::FromInt(break_point)));
|
}
|
|
@@ -333,8 +333,8 @@
|
|
// Change break on exception.
|
static void ChangeBreakOnException(bool caught, bool uncaught) {
|
- Debug::ChangeBreakOnException(v8::internal::BreakException, caught);
|
- Debug::ChangeBreakOnException(v8::internal::BreakUncaughtException, uncaught);
|
+ v8_context()->debug_.ChangeBreakOnException(v8::internal::BreakException, caught);
|
+ v8_context()->debug_.ChangeBreakOnException(v8::internal::BreakUncaughtException, uncaught);
|
}
|
|
|
@@ -359,7 +359,7 @@
|
|
// Prepare to step to next break location.
|
static void PrepareStep(StepAction step_action) {
|
- Debug::PrepareStep(step_action, 1);
|
+ v8_context()->debug_.PrepareStep(step_action, 1);
|
}
|
|
|
@@ -370,7 +370,7 @@
|
|
// Collect the currently debugged functions.
|
Handle<FixedArray> GetDebuggedFunctions() {
|
- v8::internal::DebugInfoListNode* node = Debug::debug_info_list_;
|
+ v8::internal::DebugInfoListNode* node = v8_context()->debug_.debug_info_list_;
|
|
// Find the number of debugged functions.
|
int count = 0;
|
@@ -395,7 +395,7 @@
|
|
|
static Handle<Code> ComputeCallDebugBreak(int argc) {
|
- CALL_HEAP_FUNCTION(v8::internal::StubCache::ComputeCallDebugBreak(argc),
|
+ CALL_HEAP_FUNCTION(v8_context()->stub_cache_.ComputeCallDebugBreak(argc),
|
Code);
|
}
|
|
@@ -404,12 +404,12 @@
|
void CheckDebuggerUnloaded(bool check_functions) {
|
// Check that the debugger context is cleared and that there is no debug
|
// information stored for the debugger.
|
- CHECK(Debug::debug_context().is_null());
|
- CHECK_EQ(NULL, Debug::debug_info_list_);
|
+ CHECK(v8_context()->debug_.debug_context().is_null());
|
+ CHECK_EQ(NULL, v8_context()->debug_.debug_info_list_);
|
|
// Collect garbage to ensure weak handles are cleared.
|
- Heap::CollectAllGarbage(false);
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
// Iterate the head and check that there are no debugger related objects left.
|
HeapIterator iterator;
|
@@ -427,9 +427,9 @@
|
for (RelocIterator it(fun->shared()->code()); !it.done(); it.next()) {
|
RelocInfo::Mode rmode = it.rinfo()->rmode();
|
if (RelocInfo::IsCodeTarget(rmode)) {
|
- CHECK(!Debug::IsDebugBreak(it.rinfo()->target_address()));
|
+ CHECK(!v8_context()->debug_.IsDebugBreak(it.rinfo()->target_address()));
|
} else if (RelocInfo::IsJSReturn(rmode)) {
|
- CHECK(!Debug::IsDebugBreakAtReturn(it.rinfo()));
|
+ CHECK(!v8_context()->debug_.IsDebugBreakAtReturn(it.rinfo()));
|
}
|
}
|
}
|
@@ -473,27 +473,28 @@
|
|
// Check that the debug break function is as expected.
|
Handle<v8::internal::SharedFunctionInfo> shared(fun->shared());
|
- CHECK(Debug::HasDebugInfo(shared));
|
- TestBreakLocationIterator it1(Debug::GetDebugInfo(shared));
|
+ Debug& debug = v8_context()->debug_;
|
+ CHECK(debug.HasDebugInfo(shared));
|
+ TestBreakLocationIterator it1(debug.GetDebugInfo(shared));
|
it1.FindBreakLocationFromPosition(position);
|
CHECK_EQ(mode, it1.it()->rinfo()->rmode());
|
if (mode != v8::internal::RelocInfo::JS_RETURN) {
|
CHECK_EQ(debug_break,
|
Code::GetCodeFromTargetAddress(it1.it()->rinfo()->target_address()));
|
} else {
|
- CHECK(Debug::IsDebugBreakAtReturn(it1.it()->rinfo()));
|
+ CHECK(debug.IsDebugBreakAtReturn(it1.it()->rinfo()));
|
}
|
|
// Clear the break point and check that the debug break function is no longer
|
// there
|
ClearBreakPoint(bp);
|
- CHECK(!Debug::HasDebugInfo(shared));
|
- CHECK(Debug::EnsureDebugInfo(shared));
|
- TestBreakLocationIterator it2(Debug::GetDebugInfo(shared));
|
+ CHECK(!debug.HasDebugInfo(shared));
|
+ CHECK(debug.EnsureDebugInfo(shared));
|
+ TestBreakLocationIterator it2(debug.GetDebugInfo(shared));
|
it2.FindBreakLocationFromPosition(position);
|
CHECK_EQ(mode, it2.it()->rinfo()->rmode());
|
if (mode == v8::internal::RelocInfo::JS_RETURN) {
|
- CHECK(!Debug::IsDebugBreakAtReturn(it2.it()->rinfo()));
|
+ CHECK(!debug.IsDebugBreakAtReturn(it2.it()->rinfo()));
|
}
|
}
|
|
@@ -576,7 +577,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
// Count the number of breaks.
|
if (event == v8::Break) {
|
@@ -667,7 +668,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
// Count the number of breaks.
|
if (event == v8::Break) {
|
@@ -687,7 +688,7 @@
|
|
// Collect the JavsScript stack height if the function frame_count is
|
// compiled.
|
- if (!frame_count.IsEmpty()) {
|
+ if (!frame_count.IsEmpty()) {
|
static const int kArgc = 1;
|
v8::Handle<v8::Value> argv[kArgc] = { exec_state };
|
// Using exec_state as receiver is just to have a receiver.
|
@@ -725,7 +726,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break) {
|
for (int i = 0; checks[i].expr != NULL; i++) {
|
@@ -751,7 +752,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break) {
|
break_point_hit_count++;
|
@@ -769,7 +770,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break) {
|
break_point_hit_count++;
|
@@ -795,7 +796,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break || event == v8::Exception) {
|
// Check that the current function is the expected.
|
@@ -825,7 +826,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
// Perform a garbage collection when break point is hit and continue. Based
|
// on the number of break points hit either scavenge or mark compact
|
@@ -834,10 +835,10 @@
|
break_point_hit_count++;
|
if (break_point_hit_count % 2 == 0) {
|
// Scavenge.
|
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
|
+ v8_context()->heap_.CollectGarbage(0, v8::internal::NEW_SPACE);
|
} else {
|
// Mark sweep (and perhaps compact).
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
}
|
}
|
@@ -850,7 +851,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break) {
|
// Count the number of breaks.
|
@@ -858,7 +859,7 @@
|
|
// Run the garbage collector to enforce heap verification if option
|
// --verify-heap is set.
|
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
|
+ v8_context()->heap_.CollectGarbage(0, v8::internal::NEW_SPACE);
|
|
// Set the break flag again to come back here as soon as possible.
|
v8::Debug::DebugBreak();
|
@@ -874,7 +875,7 @@
|
v8::Handle<v8::Object> event_data,
|
v8::Handle<v8::Value> data) {
|
// When hitting a debug event listener there must be a break set.
|
- CHECK_NE(v8::internal::Debug::break_id(), 0);
|
+ CHECK_NE(v8_context()->debug_.break_id(), 0);
|
|
if (event == v8::Break && break_point_hit_count < max_break_point_hit_count) {
|
// Count the number of breaks.
|
@@ -911,7 +912,7 @@
|
using ::v8::internal::Builtins;
|
v8::HandleScope scope;
|
DebugLocalContext env;
|
-
|
+ Builtins& builtins = v8_context()->builtins_;
|
CheckDebugBreakFunction(&env,
|
"function f1(){}", "f1",
|
0,
|
@@ -921,12 +922,12 @@
|
"function f2(){x=1;}", "f2",
|
0,
|
v8::internal::RelocInfo::CODE_TARGET,
|
- Builtins::builtin(Builtins::StoreIC_DebugBreak));
|
+ builtins.builtin(Builtins::StoreIC_DebugBreak));
|
CheckDebugBreakFunction(&env,
|
"function f3(){var a=x;}", "f3",
|
0,
|
v8::internal::RelocInfo::CODE_TARGET_CONTEXT,
|
- Builtins::builtin(Builtins::LoadIC_DebugBreak));
|
+ builtins.builtin(Builtins::LoadIC_DebugBreak));
|
|
// TODO(1240753): Make the test architecture independent or split
|
// parts of the debugger into architecture dependent files. This
|
@@ -939,14 +940,14 @@
|
"f4",
|
0,
|
v8::internal::RelocInfo::CODE_TARGET,
|
- Builtins::builtin(Builtins::KeyedStoreIC_DebugBreak));
|
+ builtins.builtin(Builtins::KeyedStoreIC_DebugBreak));
|
CheckDebugBreakFunction(
|
&env,
|
"function f5(){var index='propertyName'; var a={}; return a[index];}",
|
"f5",
|
0,
|
v8::internal::RelocInfo::CODE_TARGET,
|
- Builtins::builtin(Builtins::KeyedLoadIC_DebugBreak));
|
+ builtins.builtin(Builtins::KeyedLoadIC_DebugBreak));
|
#endif
|
|
// Check the debug break code stubs for call ICs with different number of
|
@@ -1215,12 +1216,12 @@
|
CHECK_EQ(1 + i * 3, break_point_hit_count);
|
|
// Scavenge and call function.
|
- Heap::CollectGarbage(0, v8::internal::NEW_SPACE);
|
+ v8_context()->heap_.CollectGarbage(0, v8::internal::NEW_SPACE);
|
f->Call(recv, 0, NULL);
|
CHECK_EQ(2 + i * 3, break_point_hit_count);
|
|
// Mark sweep (and perhaps compact) and call function.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
f->Call(recv, 0, NULL);
|
CHECK_EQ(3 + i * 3, break_point_hit_count);
|
}
|
@@ -2732,7 +2733,7 @@
|
DebugLocalContext env;
|
env.ExposeDebug();
|
|
- v8::internal::Top::TraceException(false);
|
+ v8_context()->top_.TraceException(false);
|
|
// Create functions for testing break on exception.
|
v8::Local<v8::Function> throws =
|
@@ -2876,7 +2877,7 @@
|
v8::HandleScope scope;
|
DebugLocalContext env;
|
|
- v8::internal::Top::TraceException(false);
|
+ v8_context()->top_.TraceException(false);
|
|
// Create a function for checking the function when hitting a break point.
|
frame_count = CompileFunction(&env, frame_count_source, "frame_count");
|
@@ -4595,19 +4596,20 @@
|
// Initialize the socket library.
|
i::Socket::Setup();
|
|
+ Debugger * const debugger = v8_context()->debug_.debugger();
|
// Test starting and stopping the agent without any client connection.
|
- i::Debugger::StartAgent("test", kPort1);
|
- i::Debugger::StopAgent();
|
+ debugger->StartAgent("test", kPort1);
|
+ debugger->StopAgent();
|
|
// Test starting the agent, connecting a client and shutting down the agent
|
// with the client connected.
|
- ok = i::Debugger::StartAgent("test", kPort2);
|
+ ok = debugger->StartAgent("test", kPort2);
|
CHECK(ok);
|
- i::Debugger::WaitForAgent();
|
+ debugger->WaitForAgent();
|
i::Socket* client = i::OS::CreateSocket();
|
ok = client->Connect("localhost", port2_str);
|
CHECK(ok);
|
- i::Debugger::StopAgent();
|
+ debugger->StopAgent();
|
delete client;
|
|
// Test starting and stopping the agent with the required port already
|
@@ -4615,8 +4617,8 @@
|
i::Socket* server = i::OS::CreateSocket();
|
server->Bind(kPort3);
|
|
- i::Debugger::StartAgent("test", kPort3);
|
- i::Debugger::StopAgent();
|
+ debugger->StartAgent("test", kPort3);
|
+ debugger->StopAgent();
|
|
delete server;
|
}
|
@@ -5178,11 +5180,11 @@
|
DebugLocalContext env;
|
|
// Request the loaded scripts to initialize the debugger script cache.
|
- Debug::GetLoadedScripts();
|
+ v8_context()->debug_.GetLoadedScripts();
|
|
// Do garbage collection to ensure that only the script in this test will be
|
// collected afterwards.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
script_collected_count = 0;
|
v8::Debug::SetDebugEventListener(DebugEventScriptCollectedEvent,
|
@@ -5194,7 +5196,7 @@
|
|
// Do garbage collection to collect the script above which is no longer
|
// referenced.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
CHECK_EQ(2, script_collected_count);
|
|
@@ -5225,11 +5227,11 @@
|
DebugLocalContext env;
|
|
// Request the loaded scripts to initialize the debugger script cache.
|
- Debug::GetLoadedScripts();
|
+ v8_context()->debug_.GetLoadedScripts();
|
|
// Do garbage collection to ensure that only the script in this test will be
|
// collected afterwards.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
v8::Debug::SetMessageHandler2(ScriptCollectedMessageHandler);
|
{
|
@@ -5240,7 +5242,7 @@
|
|
// Do garbage collection to collect the script above which is no longer
|
// referenced.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
|
CHECK_EQ(2, script_collected_message_count);
|
|
@@ -5395,18 +5397,18 @@
|
} else if (message.IsEvent() && message.GetEvent() == v8::AfterCompile) {
|
v8::HandleScope scope;
|
|
- bool is_debug_break = i::StackGuard::IsDebugBreak();
|
+ bool is_debug_break = v8_context()->stack_guard_.IsDebugBreak();
|
// Force DebugBreak flag while serializer is working.
|
- i::StackGuard::DebugBreak();
|
+ v8_context()->stack_guard_.DebugBreak();
|
|
// Force serialization to trigger some internal JS execution.
|
v8::Handle<v8::String> json = message.GetJSON();
|
|
// Restore previous state.
|
if (is_debug_break) {
|
- i::StackGuard::DebugBreak();
|
+ v8_context()->stack_guard_.DebugBreak();
|
} else {
|
- i::StackGuard::Continue(i::DEBUGBREAK);
|
+ v8_context()->stack_guard_.Continue(i::DEBUGBREAK);
|
}
|
}
|
}
|
Index: src/factory.cc
|
===================================================================
|
--- src/factory.cc (revision 3117)
|
+++ src/factory.cc Sat Nov 14 01:43:01 MSK 2009
|
@@ -39,13 +39,13 @@
|
|
Handle<FixedArray> Factory::NewFixedArray(int size, PretenureFlag pretenure) {
|
ASSERT(0 <= size);
|
- CALL_HEAP_FUNCTION(Heap::AllocateFixedArray(size, pretenure), FixedArray);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFixedArray(size, pretenure), FixedArray);
|
}
|
|
|
Handle<FixedArray> Factory::NewFixedArrayWithHoles(int size) {
|
ASSERT(0 <= size);
|
- CALL_HEAP_FUNCTION(Heap::AllocateFixedArrayWithHoles(size), FixedArray);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFixedArrayWithHoles(size), FixedArray);
|
}
|
|
|
@@ -72,37 +72,37 @@
|
|
// Symbols are created in the old generation (data space).
|
Handle<String> Factory::LookupSymbol(Vector<const char> string) {
|
- CALL_HEAP_FUNCTION(Heap::LookupSymbol(string), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.LookupSymbol(string), String);
|
}
|
|
|
Handle<String> Factory::NewStringFromAscii(Vector<const char> string,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateStringFromAscii(string, pretenure), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateStringFromAscii(string, pretenure), String);
|
}
|
|
Handle<String> Factory::NewStringFromUtf8(Vector<const char> string,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateStringFromUtf8(string, pretenure), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateStringFromUtf8(string, pretenure), String);
|
}
|
|
|
Handle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateStringFromTwoByte(string, pretenure),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateStringFromTwoByte(string, pretenure),
|
String);
|
}
|
|
|
Handle<String> Factory::NewRawTwoByteString(int length,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateRawTwoByteString(length, pretenure), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateRawTwoByteString(length, pretenure), String);
|
}
|
|
|
Handle<String> Factory::NewConsString(Handle<String> first,
|
Handle<String> second) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateConsString(*first, *second), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateConsString(*first, *second), String);
|
}
|
|
|
@@ -115,31 +115,31 @@
|
|
Handle<String> Factory::NewExternalStringFromAscii(
|
ExternalAsciiString::Resource* resource) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateExternalStringFromAscii(resource), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateExternalStringFromAscii(resource), String);
|
}
|
|
|
Handle<String> Factory::NewExternalStringFromTwoByte(
|
ExternalTwoByteString::Resource* resource) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateExternalStringFromTwoByte(resource), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateExternalStringFromTwoByte(resource), String);
|
}
|
|
|
Handle<Context> Factory::NewGlobalContext() {
|
- CALL_HEAP_FUNCTION(Heap::AllocateGlobalContext(), Context);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateGlobalContext(), Context);
|
}
|
|
|
Handle<Context> Factory::NewFunctionContext(int length,
|
Handle<JSFunction> closure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateFunctionContext(length, *closure), Context);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFunctionContext(length, *closure), Context);
|
}
|
|
|
Handle<Context> Factory::NewWithContext(Handle<Context> previous,
|
Handle<JSObject> extension,
|
bool is_catch_context) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateWithContext(*previous,
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateWithContext(*previous,
|
*extension,
|
is_catch_context),
|
Context);
|
@@ -147,7 +147,7 @@
|
|
|
Handle<Struct> Factory::NewStruct(InstanceType type) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateStruct(type), Struct);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateStruct(type), Struct);
|
}
|
|
|
@@ -162,34 +162,35 @@
|
Handle<Script> Factory::NewScript(Handle<String> source) {
|
// Generate id for this script.
|
int id;
|
- if (Heap::last_script_id()->IsUndefined()) {
|
+ Heap& heap = v8_context()->heap_;
|
+ if (heap.last_script_id()->IsUndefined()) {
|
// Script ids start from one.
|
id = 1;
|
} else {
|
// Increment id, wrap when positive smi is exhausted.
|
- id = Smi::cast(Heap::last_script_id())->value();
|
+ id = Smi::cast(heap.last_script_id())->value();
|
id++;
|
if (!Smi::IsValid(id)) {
|
id = 0;
|
}
|
}
|
- Heap::SetLastScriptId(Smi::FromInt(id));
|
+ heap.SetLastScriptId(Smi::FromInt(id));
|
|
// Create and initialize script object.
|
Handle<Proxy> wrapper = Factory::NewProxy(0, TENURED);
|
Handle<Script> script = Handle<Script>::cast(NewStruct(SCRIPT_TYPE));
|
script->set_source(*source);
|
- script->set_name(Heap::undefined_value());
|
- script->set_id(Heap::last_script_id());
|
+ script->set_name(heap.undefined_value());
|
+ script->set_id(heap.last_script_id());
|
script->set_line_offset(Smi::FromInt(0));
|
script->set_column_offset(Smi::FromInt(0));
|
- script->set_data(Heap::undefined_value());
|
- script->set_context_data(Heap::undefined_value());
|
+ script->set_data(heap.undefined_value());
|
+ script->set_context_data(heap.undefined_value());
|
script->set_type(Smi::FromInt(Script::TYPE_NORMAL));
|
script->set_compilation_type(Smi::FromInt(Script::COMPILATION_TYPE_HOST));
|
script->set_wrapper(*wrapper);
|
- script->set_line_ends(Heap::undefined_value());
|
- script->set_eval_from_function(Heap::undefined_value());
|
+ script->set_line_ends(heap.undefined_value());
|
+ script->set_eval_from_function(heap.undefined_value());
|
script->set_eval_from_instructions_offset(Smi::FromInt(0));
|
|
return script;
|
@@ -197,7 +198,7 @@
|
|
|
Handle<Proxy> Factory::NewProxy(Address addr, PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateProxy(addr, pretenure), Proxy);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateProxy(addr, pretenure), Proxy);
|
}
|
|
|
@@ -208,7 +209,7 @@
|
|
Handle<ByteArray> Factory::NewByteArray(int length, PretenureFlag pretenure) {
|
ASSERT(0 <= length);
|
- CALL_HEAP_FUNCTION(Heap::AllocateByteArray(length, pretenure), ByteArray);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateByteArray(length, pretenure), ByteArray);
|
}
|
|
|
@@ -216,7 +217,7 @@
|
uint8_t* external_pointer,
|
PretenureFlag pretenure) {
|
ASSERT(0 <= length);
|
- CALL_HEAP_FUNCTION(Heap::AllocatePixelArray(length,
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocatePixelArray(length,
|
external_pointer,
|
pretenure), PixelArray);
|
}
|
@@ -227,7 +228,7 @@
|
void* external_pointer,
|
PretenureFlag pretenure) {
|
ASSERT(0 <= length);
|
- CALL_HEAP_FUNCTION(Heap::AllocateExternalArray(length,
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateExternalArray(length,
|
array_type,
|
external_pointer,
|
pretenure), ExternalArray);
|
@@ -235,12 +236,12 @@
|
|
|
Handle<Map> Factory::NewMap(InstanceType type, int instance_size) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateMap(type, instance_size), Map);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateMap(type, instance_size), Map);
|
}
|
|
|
Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateFunctionPrototype(*function), JSObject);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFunctionPrototype(*function), JSObject);
|
}
|
|
|
@@ -288,11 +289,12 @@
|
ASSERT(boilerplate->IsBoilerplate());
|
ASSERT(!boilerplate->has_initial_map());
|
ASSERT(!boilerplate->has_prototype());
|
- ASSERT(boilerplate->properties() == Heap::empty_fixed_array());
|
- ASSERT(boilerplate->elements() == Heap::empty_fixed_array());
|
- CALL_HEAP_FUNCTION(Heap::AllocateFunction(*function_map,
|
+ Heap& heap = v8_context()->heap_;
|
+ ASSERT(boilerplate->properties() == heap.empty_fixed_array());
|
+ ASSERT(boilerplate->elements() == heap.empty_fixed_array());
|
+ CALL_HEAP_FUNCTION(heap.AllocateFunction(*function_map,
|
boilerplate->shared(),
|
- Heap::the_hole_value()),
|
+ heap.the_hole_value()),
|
JSFunction);
|
}
|
|
@@ -301,7 +303,7 @@
|
Handle<JSFunction> boilerplate,
|
Handle<Context> context) {
|
Handle<JSFunction> result =
|
- BaseNewFunctionFromBoilerplate(boilerplate, Top::function_map());
|
+ BaseNewFunctionFromBoilerplate(boilerplate, v8_context()->top_.function_map());
|
result->set_context(*context);
|
int number_of_literals = boilerplate->NumberOfLiterals();
|
Handle<FixedArray> literals =
|
@@ -321,22 +323,22 @@
|
|
Handle<Object> Factory::NewNumber(double value,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::NumberFromDouble(value, pretenure), Object);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.NumberFromDouble(value, pretenure), Object);
|
}
|
|
|
Handle<Object> Factory::NewNumberFromInt(int value) {
|
- CALL_HEAP_FUNCTION(Heap::NumberFromInt32(value), Object);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.NumberFromInt32(value), Object);
|
}
|
|
|
Handle<Object> Factory::NewNumberFromUint(uint32_t value) {
|
- CALL_HEAP_FUNCTION(Heap::NumberFromUint32(value), Object);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.NumberFromUint32(value), Object);
|
}
|
|
|
Handle<JSObject> Factory::NewNeanderObject() {
|
- CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(Heap::neander_map()),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateJSObjectFromMap(v8_context()->heap_.neander_map()),
|
JSObject);
|
}
|
|
@@ -413,7 +415,8 @@
|
const char* type,
|
Handle<JSArray> args) {
|
Handle<String> make_str = Factory::LookupAsciiSymbol(maker);
|
- Handle<Object> fun_obj(Top::builtins()->GetProperty(*make_str));
|
+ Top& top = v8_context()->top_;
|
+ Handle<Object> fun_obj(top.builtins()->GetProperty(*make_str));
|
// If the builtins haven't been properly configured yet this error
|
// constructor may not have been defined. Bail out.
|
if (!fun_obj->IsJSFunction())
|
@@ -427,7 +430,7 @@
|
// running the factory method, use the exception as the result.
|
bool caught_exception;
|
Handle<Object> result = Execution::TryCall(fun,
|
- Top::builtins(),
|
+ top.builtins(),
|
2,
|
argv,
|
&caught_exception);
|
@@ -446,14 +449,14 @@
|
Handle<JSFunction> fun =
|
Handle<JSFunction>(
|
JSFunction::cast(
|
- Top::builtins()->GetProperty(*constr)));
|
+ v8_context()->top_.builtins()->GetProperty(*constr)));
|
Object** argv[1] = { Handle<Object>::cast(message).location() };
|
|
// Invoke the JavaScript factory method. If an exception is thrown while
|
// running the factory method, use the exception as the result.
|
bool caught_exception;
|
Handle<Object> result = Execution::TryCall(fun,
|
- Top::builtins(),
|
+ v8_context()->top_.builtins(),
|
1,
|
argv,
|
&caught_exception);
|
@@ -510,9 +513,9 @@
|
|
Handle<JSFunction> Factory::NewFunctionBoilerplate(Handle<String> name) {
|
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name);
|
- CALL_HEAP_FUNCTION(Heap::AllocateFunction(Heap::boilerplate_function_map(),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFunction(v8_context()->heap_.boilerplate_function_map(),
|
*shared,
|
- Heap::the_hole_value()),
|
+ v8_context()->heap_.the_hole_value()),
|
JSFunction);
|
}
|
|
@@ -548,12 +551,12 @@
|
ZoneScopeInfo* sinfo,
|
Code::Flags flags,
|
Handle<Object> self_ref) {
|
- CALL_HEAP_FUNCTION(Heap::CreateCode(desc, sinfo, flags, self_ref), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.CreateCode(desc, sinfo, flags, self_ref), Code);
|
}
|
|
|
Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
- CALL_HEAP_FUNCTION(Heap::CopyCode(*code), Code);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.CopyCode(*code), Code);
|
}
|
|
|
@@ -579,7 +582,7 @@
|
|
|
Handle<String> Factory::SymbolFromString(Handle<String> value) {
|
- CALL_HEAP_FUNCTION(Heap::LookupSymbol(*value), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.LookupSymbol(*value), String);
|
}
|
|
|
@@ -644,27 +647,27 @@
|
|
Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
|
PretenureFlag pretenure) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateJSObject(*constructor, pretenure), JSObject);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateJSObject(*constructor, pretenure), JSObject);
|
}
|
|
|
Handle<GlobalObject> Factory::NewGlobalObject(
|
Handle<JSFunction> constructor) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateGlobalObject(*constructor),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateGlobalObject(*constructor),
|
GlobalObject);
|
}
|
|
|
|
Handle<JSObject> Factory::NewJSObjectFromMap(Handle<Map> map) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateJSObjectFromMap(*map, NOT_TENURED),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateJSObjectFromMap(*map, NOT_TENURED),
|
JSObject);
|
}
|
|
|
Handle<JSArray> Factory::NewJSArray(int length,
|
PretenureFlag pretenure) {
|
- Handle<JSObject> obj = NewJSObject(Top::array_function(), pretenure);
|
+ Handle<JSObject> obj = NewJSObject(v8_context()->top_.array_function(), pretenure);
|
CALL_HEAP_FUNCTION(Handle<JSArray>::cast(obj)->Initialize(length), JSArray);
|
}
|
|
@@ -672,20 +675,20 @@
|
Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArray> elements,
|
PretenureFlag pretenure) {
|
Handle<JSArray> result =
|
- Handle<JSArray>::cast(NewJSObject(Top::array_function(), pretenure));
|
+ Handle<JSArray>::cast(NewJSObject(v8_context()->top_.array_function(), pretenure));
|
result->SetContent(*elements);
|
return result;
|
}
|
|
|
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(Handle<String> name) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateSharedFunctionInfo(*name),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateSharedFunctionInfo(*name),
|
SharedFunctionInfo);
|
}
|
|
|
Handle<String> Factory::NumberToString(Handle<Object> number) {
|
- CALL_HEAP_FUNCTION(Heap::NumberToString(*number), String);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.NumberToString(*number), String);
|
}
|
|
|
@@ -700,7 +703,7 @@
|
Handle<JSFunction> Factory::NewFunctionHelper(Handle<String> name,
|
Handle<Object> prototype) {
|
Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name);
|
- CALL_HEAP_FUNCTION(Heap::AllocateFunction(*Top::function_map(),
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateFunction(*v8_context()->top_.function_map(),
|
*function_share,
|
*prototype),
|
JSFunction);
|
@@ -710,7 +713,7 @@
|
Handle<JSFunction> Factory::NewFunction(Handle<String> name,
|
Handle<Object> prototype) {
|
Handle<JSFunction> fun = NewFunctionHelper(name, prototype);
|
- fun->set_context(Top::context()->global_context());
|
+ fun->set_context(v8_context()->top_.context()->global_context());
|
return fun;
|
}
|
|
@@ -756,13 +759,13 @@
|
|
Handle<JSObject> Factory::NewArgumentsObject(Handle<Object> callee,
|
int length) {
|
- CALL_HEAP_FUNCTION(Heap::AllocateArgumentsObject(*callee, length), JSObject);
|
+ CALL_HEAP_FUNCTION(v8_context()->heap_.AllocateArgumentsObject(*callee, length), JSObject);
|
}
|
|
|
Handle<JSFunction> Factory::CreateApiFunction(
|
Handle<FunctionTemplateInfo> obj, ApiInstanceType instance_type) {
|
- Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::HandleApiCall));
|
+ Handle<Code> code = Handle<Code>(v8_context()->builtins_.builtin(Builtins::HandleApiCall));
|
|
int internal_field_count = 0;
|
if (!obj->instance_template()->IsUndefined()) {
|
@@ -925,8 +928,8 @@
|
store->set(JSRegExp::kTagIndex, Smi::FromInt(type));
|
store->set(JSRegExp::kSourceIndex, *source);
|
store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags.value()));
|
- store->set(JSRegExp::kIrregexpASCIICodeIndex, Heap::the_hole_value());
|
- store->set(JSRegExp::kIrregexpUC16CodeIndex, Heap::the_hole_value());
|
+ store->set(JSRegExp::kIrregexpASCIICodeIndex, v8_context()->heap_.the_hole_value());
|
+ store->set(JSRegExp::kIrregexpUC16CodeIndex, v8_context()->heap_.the_hole_value());
|
store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::FromInt(0));
|
store->set(JSRegExp::kIrregexpCaptureCountIndex,
|
Smi::FromInt(capture_count));
|
Index: src/ast.cc
|
===================================================================
|
--- src/ast.cc (revision 3163)
|
+++ src/ast.cc Sat Nov 14 01:43:09 MSK 2009
|
@@ -36,7 +36,7 @@
|
namespace internal {
|
|
|
-VariableProxySentinel VariableProxySentinel::this_proxy_(true);
|
+VariableProxySentinel VariableProxySentinel::this_proxy_(true); ///static
|
VariableProxySentinel VariableProxySentinel::identifier_proxy_(false);
|
ValidLeftHandSideSentinel ValidLeftHandSideSentinel::instance_;
|
Property Property::this_property_(VariableProxySentinel::this_proxy(), NULL, 0);
|
@@ -120,7 +120,7 @@
|
key_ = key;
|
value_ = value;
|
Object* k = *key->handle();
|
- if (k->IsSymbol() && Heap::Proto_symbol()->Equals(String::cast(k))) {
|
+ if (k->IsSymbol() && v8_context()->heap_.Proto_symbol()->Equals(String::cast(k))) {
|
kind_ = PROTOTYPE;
|
} else if (value_->AsMaterializedLiteral() != NULL) {
|
kind_ = MATERIALIZED_LITERAL;
|
Index: src/regexp-macro-assembler.cc
|
===================================================================
|
--- src/regexp-macro-assembler.cc (revision 3228)
|
+++ src/regexp-macro-assembler.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -178,8 +178,9 @@
|
int at_start_val = at_start ? 1 : 0;
|
|
// Ensure that the minimum stack has been allocated.
|
- RegExpStack stack;
|
- Address stack_base = RegExpStack::stack_base();
|
+ RegExpStackControl stack;
|
+ V8Context * const v8context = v8_context();
|
+ Address stack_base = v8context->reg_exp_stack_.stack_base();
|
|
int result = CALL_GENERATED_REGEXP_CODE(matcher_func,
|
input,
|
@@ -192,17 +193,14 @@
|
ASSERT(result <= SUCCESS);
|
ASSERT(result >= RETRY);
|
|
- if (result == EXCEPTION && !Top::has_pending_exception()) {
|
+ if (result == EXCEPTION && !v8context->top_.has_pending_exception()) {
|
// We detected a stack overflow (on the backtrack stack) in RegExp code,
|
// but haven't created the exception yet.
|
- Top::StackOverflow();
|
+ v8context->top_.StackOverflow();
|
}
|
return static_cast<Result>(result);
|
}
|
|
-
|
-static unibrow::Mapping<unibrow::Ecma262Canonicalize> canonicalize;
|
-
|
int NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16(
|
Address byte_offset1,
|
Address byte_offset2,
|
@@ -214,6 +212,7 @@
|
uc16* substring1 = reinterpret_cast<uc16*>(byte_offset1);
|
uc16* substring2 = reinterpret_cast<uc16*>(byte_offset2);
|
size_t length = byte_length >> 1;
|
+ unibrow::Mapping<unibrow::Ecma262Canonicalize>& canonicalize = v8_context()->reg_exp_stack_.canonicalize_;
|
|
for (size_t i = 0; i < length; i++) {
|
unibrow::uchar c1 = substring1[i];
|
@@ -236,12 +235,13 @@
|
|
Address NativeRegExpMacroAssembler::GrowStack(Address stack_pointer,
|
Address* stack_base) {
|
- size_t size = RegExpStack::stack_capacity();
|
- Address old_stack_base = RegExpStack::stack_base();
|
+ RegExpStack & reg_exp_stack = v8_context()->reg_exp_stack_;
|
+ size_t size = reg_exp_stack.stack_capacity();
|
+ Address old_stack_base = reg_exp_stack.stack_base();
|
ASSERT(old_stack_base == *stack_base);
|
ASSERT(stack_pointer <= old_stack_base);
|
ASSERT(static_cast<size_t>(old_stack_base - stack_pointer) <= size);
|
- Address new_stack_base = RegExpStack::EnsureCapacity(size * 2);
|
+ Address new_stack_base = reg_exp_stack.EnsureCapacity(size * 2);
|
if (new_stack_base == NULL) {
|
return NULL;
|
}
|
Index: src/heap-profiler.h
|
===================================================================
|
--- src/heap-profiler.h (revision 3124)
|
+++ src/heap-profiler.h Sat Nov 14 01:42:53 MSK 2009
|
@@ -93,13 +93,14 @@
|
|
private:
|
static String* FromSpecialCase(SpecialCase special) {
|
+ Heap & heap = v8_context()->heap_;
|
// We use symbols that are illegal JS identifiers to identify special cases.
|
// Their actual value is irrelevant for us.
|
switch (special) {
|
- case ROOTS: return Heap::result_symbol();
|
- case GLOBAL_PROPERTY: return Heap::code_symbol();
|
- case CODE: return Heap::arguments_shadow_symbol();
|
- case SELF: return Heap::catch_var_symbol();
|
+ case ROOTS: return heap.result_symbol();
|
+ case GLOBAL_PROPERTY: return heap.code_symbol();
|
+ case CODE: return heap.arguments_shadow_symbol();
|
+ case SELF: return heap.catch_var_symbol();
|
default:
|
UNREACHABLE();
|
return NULL;
|
@@ -263,7 +264,7 @@
|
static void Setup();
|
static void RecordJSObjectAllocation(Object* obj);
|
private:
|
- static bool can_log_;
|
+ static bool can_log_; ///static
|
};
|
|
#endif // ENABLE_LOGGING_AND_PROFILING
|
Index: src/disassembler.h
|
===================================================================
|
--- src/disassembler.h (revision 2038)
|
+++ src/disassembler.h Sat Nov 14 01:42:53 MSK 2009
|
@@ -49,6 +49,10 @@
|
// not be decoded. The number of characters written is written into
|
// the out parameter char_count.
|
static int Decode(FILE* f, byte* pc, int* char_count);
|
+
|
+ friend class V8Context;
|
+ static void Setup();
|
+ static void TearDown();
|
};
|
|
} } // namespace v8::internal
|
Index: src/v8threads.h
|
===================================================================
|
--- src/v8threads.h (revision 2977)
|
+++ src/v8threads.h Sat Nov 14 01:43:02 MSK 2009
|
@@ -31,11 +31,8 @@
|
namespace v8 {
|
namespace internal {
|
|
-
|
class ThreadState {
|
public:
|
- // Iterate over in-use states.
|
- static ThreadState* FirstInUse();
|
// Returns NULL after the last one.
|
ThreadState* Next();
|
|
@@ -44,8 +41,6 @@
|
void LinkInto(List list);
|
void Unlink();
|
|
- static ThreadState* GetFree();
|
-
|
// Id of thread.
|
void set_id(int id) { id_ = id; }
|
int id() { return id_; }
|
@@ -59,7 +54,7 @@
|
// Get data area for archiving a thread.
|
char* data() { return data_; }
|
private:
|
- ThreadState();
|
+ ThreadState(ThreadManager* thread_manager);
|
|
void AllocateSpace();
|
|
@@ -69,74 +64,93 @@
|
ThreadState* next_;
|
ThreadState* previous_;
|
|
- // In the following two lists there is always at least one object on the list.
|
- // The first object is a flying anchor that is only there to simplify linking
|
- // and unlinking.
|
- // Head of linked list of free states.
|
- static ThreadState* free_anchor_;
|
- // Head of linked list of states in use.
|
- static ThreadState* in_use_anchor_;
|
+ friend class ThreadManager;
|
+ ThreadManager* const thread_manager_;
|
};
|
|
+// The ContextSwitcher thread is used to schedule regular preemptions to
|
+// multiple running V8 threads. Generally it is necessary to call
|
+// StartPreemption if there is more than one thread running. If not, a single
|
+// JavaScript can take full control of V8 and not allow other threads to run.
|
+class ContextSwitcher: public Thread {
|
+ private:
|
+ explicit ContextSwitcher(int every_n_ms);
|
|
-class ThreadManager : public AllStatic {
|
+ void Run();
|
+
|
+ bool keep_going_;
|
+ int sleep_ms_;
|
+ friend class ThreadManager;
|
+};
|
+
|
+class ThreadManager {
|
public:
|
- static void Lock();
|
- static void Unlock();
|
+ void Lock();
|
+ void Unlock();
|
|
- static void ArchiveThread();
|
- static bool RestoreThread();
|
- static void FreeThreadResources();
|
- static bool IsArchived();
|
+ void ArchiveThread();
|
+ bool RestoreThread();
|
+ void FreeThreadResources();
|
+ bool IsArchived();
|
|
- static void Iterate(ObjectVisitor* v);
|
- static void MarkCompactPrologue(bool is_compacting);
|
- static void MarkCompactEpilogue(bool is_compacting);
|
- static bool IsLockedByCurrentThread() { return mutex_owner_.IsSelf(); }
|
+ void Iterate(ObjectVisitor* v);
|
+ void MarkCompactPrologue(bool is_compacting);
|
+ void MarkCompactEpilogue(bool is_compacting);
|
+ bool IsLockedByCurrentThread() { return mutex_owner_.IsSelf(); }
|
|
- static int CurrentId();
|
- static void AssignId();
|
- static bool HasId();
|
+ int CurrentId();
|
+ void AssignId();
|
+ bool HasId();
|
|
- static void TerminateExecution(int thread_id);
|
+ void TerminateExecution(int thread_id);
|
|
static const int kInvalidId = -1;
|
- private:
|
- static void EagerlyArchiveThread();
|
|
- static int last_id_; // V8 threads are identified through an integer.
|
- static Mutex* mutex_;
|
- static ThreadHandle mutex_owner_;
|
- static ThreadHandle lazily_archived_thread_;
|
- static ThreadState* lazily_archived_thread_state_;
|
-};
|
-
|
-
|
-// The ContextSwitcher thread is used to schedule regular preemptions to
|
-// multiple running V8 threads. Generally it is necessary to call
|
-// StartPreemption if there is more than one thread running. If not, a single
|
-// JavaScript can take full control of V8 and not allow other threads to run.
|
-class ContextSwitcher: public Thread {
|
- public:
|
// Set the preemption interval for the ContextSwitcher thread.
|
- static void StartPreemption(int every_n_ms);
|
+ void StartPreemption(int every_n_ms);
|
|
// Stop sending preemption requests to threads.
|
- static void StopPreemption();
|
+ void StopPreemption();
|
|
// Preempted thread needs to call back to the ContextSwitcher to acknowledge
|
// the handling of a preemption request.
|
- static void PreemptionReceived();
|
+ void PreemptionReceived();
|
|
+ // Iterate over in-use states.
|
+ ThreadState* FirstInUse();
|
+ ThreadState* GetFree();
|
+
|
private:
|
- explicit ContextSwitcher(int every_n_ms);
|
+ void EagerlyArchiveThread();
|
|
- void Run();
|
+ int last_id_; // V8 threads are identified through an integer.
|
+ Mutex* mutex_;
|
+ ThreadHandle mutex_owner_;
|
+ ThreadHandle lazily_archived_thread_;
|
+ ThreadState* lazily_archived_thread_state_;
|
|
- bool keep_going_;
|
- int sleep_ms_;
|
+ ThreadManager();
|
+ ~ThreadManager();
|
+ DISALLOW_COPY_AND_ASSIGN(ThreadManager);
|
|
- static ContextSwitcher* singleton_;
|
+ // In the following two lists there is always at least one object on the list.
|
+ // The first object is a flying anchor that is only there to simplify linking
|
+ // and unlinking.
|
+ // Head of linked list of free states.
|
+ ThreadState* free_anchor_;
|
+ // Head of linked list of states in use.
|
+ ThreadState* in_use_anchor_;
|
+
|
+ // This is the ContextSwitcher singleton. There is at most a single thread
|
+ // running which delivers preemption events to V8 threads.
|
+ ContextSwitcher* singleton_;
|
+
|
+
|
+ Thread::LocalStorageKey thread_state_key;
|
+ Thread::LocalStorageKey thread_id_key;
|
+
|
+ friend class ThreadState;
|
+ friend class V8Context;
|
};
|
|
} } // namespace v8::internal
|
Index: src/heap.h
|
===================================================================
|
--- src/heap.h (revision 3230)
|
+++ src/heap.h Sat Nov 14 01:43:03 MSK 2009
|
@@ -31,8 +31,8 @@
|
#include <math.h>
|
|
#include "zone-inl.h"
|
+#include "v8-global-context.h"
|
|
-
|
namespace v8 {
|
namespace internal {
|
|
@@ -236,97 +236,97 @@
|
// The all static Heap captures the interface to the global object heap.
|
// All JavaScript contexts by this process share the same object heap.
|
|
-class Heap : public AllStatic {
|
+class Heap {
|
public:
|
// Configure heap size before setup. Return false if the heap has been
|
// setup already.
|
- static bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
|
- static bool ConfigureHeapDefault();
|
+ bool ConfigureHeap(int max_semispace_size, int max_old_gen_size);
|
+ bool ConfigureHeapDefault();
|
|
// Initializes the global object heap. If create_heap_objects is true,
|
// also creates the basic non-mutable objects.
|
// Returns whether it succeeded.
|
- static bool Setup(bool create_heap_objects);
|
+ bool Setup(bool create_heap_objects);
|
|
// Destroys all memory allocated by the heap.
|
- static void TearDown();
|
+ void TearDown();
|
|
// Set the stack limit in the roots_ array. Some architectures generate
|
// code that looks here, because it is faster than loading from the static
|
// jslimit_/real_jslimit_ variable in the StackGuard.
|
- static void SetStackLimits();
|
+ void SetStackLimits();
|
|
// Returns whether Setup has been called.
|
- static bool HasBeenSetup();
|
+ bool HasBeenSetup();
|
|
// Returns the maximum amount of memory reserved for the heap. For
|
// the young generation, we reserve 4 times the amount needed for a
|
// semi space. The young generation consists of two semi spaces and
|
// we reserve twice the amount needed for those in order to ensure
|
// that new space can be aligned to its size.
|
- static int MaxReserved() {
|
+ int MaxReserved() {
|
return 4 * reserved_semispace_size_ + max_old_generation_size_;
|
}
|
- static int MaxSemiSpaceSize() { return max_semispace_size_; }
|
- static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
|
- static int InitialSemiSpaceSize() { return initial_semispace_size_; }
|
- static int MaxOldGenerationSize() { return max_old_generation_size_; }
|
+ int MaxSemiSpaceSize() { return max_semispace_size_; }
|
+ int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
|
+ int InitialSemiSpaceSize() { return initial_semispace_size_; }
|
+ int MaxOldGenerationSize() { return max_old_generation_size_; }
|
|
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
|
// more spaces are needed until it reaches the limit.
|
- static int Capacity();
|
+ int Capacity();
|
|
// Returns the amount of memory currently committed for the heap.
|
- static int CommittedMemory();
|
+ int CommittedMemory();
|
|
// Returns the available bytes in space w/o growing.
|
// Heap doesn't guarantee that it can allocate an object that requires
|
// all available bytes. Check MaxHeapObjectSize() instead.
|
- static int Available();
|
+ int Available();
|
|
// Returns the maximum object size in paged space.
|
- static inline int MaxObjectSizeInPagedSpace();
|
+ inline int MaxObjectSizeInPagedSpace();
|
|
// Returns of size of all objects residing in the heap.
|
- static int SizeOfObjects();
|
+ int SizeOfObjects();
|
|
// Return the starting address and a mask for the new space. And-masking an
|
// address with the mask will result in the start address of the new space
|
// for all addresses in either semispace.
|
- static Address NewSpaceStart() { return new_space_.start(); }
|
- static uintptr_t NewSpaceMask() { return new_space_.mask(); }
|
- static Address NewSpaceTop() { return new_space_.top(); }
|
+ Address NewSpaceStart() { return new_space_.start(); }
|
+ uintptr_t NewSpaceMask() { return new_space_.mask(); }
|
+ Address NewSpaceTop() { return new_space_.top(); }
|
|
- static NewSpace* new_space() { return &new_space_; }
|
- static OldSpace* old_pointer_space() { return old_pointer_space_; }
|
- static OldSpace* old_data_space() { return old_data_space_; }
|
- static OldSpace* code_space() { return code_space_; }
|
- static MapSpace* map_space() { return map_space_; }
|
- static CellSpace* cell_space() { return cell_space_; }
|
- static LargeObjectSpace* lo_space() { return lo_space_; }
|
+ NewSpace* new_space() { return &new_space_; }
|
+ OldSpace* old_pointer_space() { return old_pointer_space_; }
|
+ OldSpace* old_data_space() { return old_data_space_; }
|
+ OldSpace* code_space() { return code_space_; }
|
+ MapSpace* map_space() { return map_space_; }
|
+ CellSpace* cell_space() { return cell_space_; }
|
+ LargeObjectSpace* lo_space() { return lo_space_; }
|
|
- static bool always_allocate() { return always_allocate_scope_depth_ != 0; }
|
- static Address always_allocate_scope_depth_address() {
|
+ bool always_allocate() { return always_allocate_scope_depth_ != 0; }
|
+ Address always_allocate_scope_depth_address() {
|
return reinterpret_cast<Address>(&always_allocate_scope_depth_);
|
}
|
- static bool linear_allocation() {
|
+ bool linear_allocation() {
|
return linear_allocation_scope_depth_ != 0;
|
}
|
|
- static Address* NewSpaceAllocationTopAddress() {
|
+ Address* NewSpaceAllocationTopAddress() {
|
return new_space_.allocation_top_address();
|
}
|
- static Address* NewSpaceAllocationLimitAddress() {
|
+ Address* NewSpaceAllocationLimitAddress() {
|
return new_space_.allocation_limit_address();
|
}
|
|
// Uncommit unused semi space.
|
- static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
|
+ bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
|
|
#ifdef ENABLE_HEAP_PROTECTION
|
// Protect/unprotect the heap by marking all spaces read-only/writable.
|
- static void Protect();
|
- static void Unprotect();
|
+ void Protect();
|
+ void Unprotect();
|
#endif
|
|
// Allocates and initializes a new JavaScript object based on a
|
@@ -334,58 +334,58 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateJSObject(JSFunction* constructor,
|
+ Object* AllocateJSObject(JSFunction* constructor,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
// Allocates and initializes a new global object based on a constructor.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateGlobalObject(JSFunction* constructor);
|
+ Object* AllocateGlobalObject(JSFunction* constructor);
|
|
// Returns a deep copy of the JavaScript object.
|
// Properties and elements are copied too.
|
// Returns failure if allocation failed.
|
- static Object* CopyJSObject(JSObject* source);
|
+ Object* CopyJSObject(JSObject* source);
|
|
// Allocates the function prototype.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateFunctionPrototype(JSFunction* function);
|
+ Object* AllocateFunctionPrototype(JSFunction* function);
|
|
// Reinitialize an JSGlobalProxy based on a constructor. The object
|
// must have the same size as objects allocated using the
|
// constructor. The object is reinitialized and behaves as an
|
// object that has been freshly allocated using the constructor.
|
- static Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
|
+ Object* ReinitializeJSGlobalProxy(JSFunction* constructor,
|
JSGlobalProxy* global);
|
|
// Allocates and initializes a new JavaScript object based on a map.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateJSObjectFromMap(Map* map,
|
+ Object* AllocateJSObjectFromMap(Map* map,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
// Allocates a heap object based on the map.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static Object* Allocate(Map* map, AllocationSpace space);
|
+ Object* Allocate(Map* map, AllocationSpace space);
|
|
// Allocates a JS Map in the heap.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static Object* AllocateMap(InstanceType instance_type, int instance_size);
|
+ Object* AllocateMap(InstanceType instance_type, int instance_size);
|
|
// Allocates a partial map for bootstrapping.
|
- static Object* AllocatePartialMap(InstanceType instance_type,
|
+ Object* AllocatePartialMap(InstanceType instance_type,
|
int instance_size);
|
|
// Allocate a map for the specified function
|
- static Object* AllocateInitialMap(JSFunction* fun);
|
+ Object* AllocateInitialMap(JSFunction* fun);
|
|
// Allocates and fully initializes a String. There are two String
|
// encodings: ASCII and two byte. One should choose between the three string
|
@@ -405,13 +405,13 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateStringFromAscii(
|
+ Object* AllocateStringFromAscii(
|
Vector<const char> str,
|
PretenureFlag pretenure = NOT_TENURED);
|
- static Object* AllocateStringFromUtf8(
|
+ Object* AllocateStringFromUtf8(
|
Vector<const char> str,
|
PretenureFlag pretenure = NOT_TENURED);
|
- static Object* AllocateStringFromTwoByte(
|
+ Object* AllocateStringFromTwoByte(
|
Vector<const uc16> str,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
@@ -419,15 +419,15 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static inline Object* AllocateSymbol(Vector<const char> str,
|
+ inline Object* AllocateSymbol(Vector<const char> str,
|
int chars,
|
uint32_t length_field);
|
|
- static Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
|
+ Object* AllocateInternalSymbol(unibrow::CharacterStream* buffer,
|
int chars,
|
uint32_t length_field);
|
|
- static Object* AllocateExternalSymbol(Vector<const char> str,
|
+ Object* AllocateExternalSymbol(Vector<const char> str,
|
int chars);
|
|
|
@@ -438,10 +438,10 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateRawAsciiString(
|
+ Object* AllocateRawAsciiString(
|
int length,
|
PretenureFlag pretenure = NOT_TENURED);
|
- static Object* AllocateRawTwoByteString(
|
+ Object* AllocateRawTwoByteString(
|
int length,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
@@ -449,25 +449,25 @@
|
// A cache is used for ascii codes.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed. Please note this does not perform a garbage collection.
|
- static Object* LookupSingleCharacterStringFromCode(uint16_t code);
|
+ Object* LookupSingleCharacterStringFromCode(uint16_t code);
|
|
// Allocate a byte array of the specified length
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateByteArray(int length, PretenureFlag pretenure);
|
+ Object* AllocateByteArray(int length, PretenureFlag pretenure);
|
|
// Allocate a non-tenured byte array of the specified length
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateByteArray(int length);
|
+ Object* AllocateByteArray(int length);
|
|
// Allocate a pixel array of the specified length
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocatePixelArray(int length,
|
+ Object* AllocatePixelArray(int length,
|
uint8_t* external_pointer,
|
PretenureFlag pretenure);
|
|
@@ -475,7 +475,7 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateExternalArray(int length,
|
+ Object* AllocateExternalArray(int length,
|
ExternalArrayType array_type,
|
void* external_pointer,
|
PretenureFlag pretenure);
|
@@ -484,49 +484,49 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateJSGlobalPropertyCell(Object* value);
|
+ Object* AllocateJSGlobalPropertyCell(Object* value);
|
|
// Allocates a fixed array initialized with undefined values
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateFixedArray(int length, PretenureFlag pretenure);
|
+ Object* AllocateFixedArray(int length, PretenureFlag pretenure);
|
// Allocate uninitialized, non-tenured fixed array with length elements.
|
- static Object* AllocateFixedArray(int length);
|
+ Object* AllocateFixedArray(int length);
|
|
// Make a copy of src and return it. Returns
|
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
|
- static Object* CopyFixedArray(FixedArray* src);
|
+ Object* CopyFixedArray(FixedArray* src);
|
|
// Allocates a fixed array initialized with the hole values.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateFixedArrayWithHoles(int length);
|
+ Object* AllocateFixedArrayWithHoles(int length);
|
|
// AllocateHashTable is identical to AllocateFixedArray except
|
// that the resulting object has hash_table_map as map.
|
- static Object* AllocateHashTable(int length);
|
+ Object* AllocateHashTable(int length);
|
|
// Allocate a global (but otherwise uninitialized) context.
|
- static Object* AllocateGlobalContext();
|
+ Object* AllocateGlobalContext();
|
|
// Allocate a function context.
|
- static Object* AllocateFunctionContext(int length, JSFunction* closure);
|
+ Object* AllocateFunctionContext(int length, JSFunction* closure);
|
|
// Allocate a 'with' context.
|
- static Object* AllocateWithContext(Context* previous,
|
+ Object* AllocateWithContext(Context* previous,
|
JSObject* extension,
|
bool is_catch_context);
|
|
// Allocates a new utility object in the old generation.
|
- static Object* AllocateStruct(InstanceType type);
|
+ Object* AllocateStruct(InstanceType type);
|
|
// Allocates a function initialized with a shared part.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateFunction(Map* function_map,
|
+ Object* AllocateFunction(Map* function_map,
|
SharedFunctionInfo* shared,
|
Object* prototype);
|
|
@@ -538,54 +538,54 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateArgumentsObject(Object* callee, int length);
|
+ Object* AllocateArgumentsObject(Object* callee, int length);
|
|
// Converts a double into either a Smi or a HeapNumber object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* NewNumberFromDouble(double value,
|
+ Object* NewNumberFromDouble(double value,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
// Same as NewNumberFromDouble, but may return a preallocated/immutable
|
// number object (e.g., minus_zero_value_, nan_value_)
|
- static Object* NumberFromDouble(double value,
|
+ Object* NumberFromDouble(double value,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
// Allocated a HeapNumber from value.
|
- static Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
|
- static Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
|
+ Object* AllocateHeapNumber(double value, PretenureFlag pretenure);
|
+ Object* AllocateHeapNumber(double value); // pretenure = NOT_TENURED
|
|
// Converts an int into either a Smi or a HeapNumber object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static inline Object* NumberFromInt32(int32_t value);
|
+ inline Object* NumberFromInt32(int32_t value);
|
|
// Converts an int into either a Smi or a HeapNumber object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static inline Object* NumberFromUint32(uint32_t value);
|
+ inline Object* NumberFromUint32(uint32_t value);
|
|
// Allocates a new proxy object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateProxy(Address proxy,
|
+ Object* AllocateProxy(Address proxy,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
// Allocates a new SharedFunctionInfo object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateSharedFunctionInfo(Object* name);
|
+ Object* AllocateSharedFunctionInfo(Object* name);
|
|
// Allocates a new cons string object.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateConsString(String* first, String* second);
|
+ Object* AllocateConsString(String* first, String* second);
|
|
// Allocates a new sliced string object which is a slice of an underlying
|
// string buffer stretching from the index start (inclusive) to the index
|
@@ -593,7 +593,7 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateSlicedString(String* buffer,
|
+ Object* AllocateSlicedString(String* buffer,
|
int start,
|
int end);
|
|
@@ -603,7 +603,7 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateSubString(String* buffer,
|
+ Object* AllocateSubString(String* buffer,
|
int start,
|
int end);
|
|
@@ -612,9 +612,9 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this does not perform a garbage collection.
|
- static Object* AllocateExternalStringFromAscii(
|
+ Object* AllocateExternalStringFromAscii(
|
ExternalAsciiString::Resource* resource);
|
- static Object* AllocateExternalStringFromTwoByte(
|
+ Object* AllocateExternalStringFromTwoByte(
|
ExternalTwoByteString::Resource* resource);
|
|
// Allocates an uninitialized object. The memory is non-executable if the
|
@@ -622,13 +622,13 @@
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static inline Object* AllocateRaw(int size_in_bytes,
|
+ inline Object* AllocateRaw(int size_in_bytes,
|
AllocationSpace space,
|
AllocationSpace retry_space);
|
|
// Initialize a filler object to keep the ability to iterate over the heap
|
// when shortening objects.
|
- static void CreateFillerObjectAt(Address addr, int size);
|
+ void CreateFillerObjectAt(Address addr, int size);
|
|
// Makes a new native code object
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
@@ -636,76 +636,76 @@
|
// self_reference. This allows generated code to reference its own Code
|
// object by containing this pointer.
|
// Please note this function does not perform a garbage collection.
|
- static Object* CreateCode(const CodeDesc& desc,
|
+ Object* CreateCode(const CodeDesc& desc,
|
ZoneScopeInfo* sinfo,
|
Code::Flags flags,
|
Handle<Object> self_reference);
|
|
- static Object* CopyCode(Code* code);
|
+ Object* CopyCode(Code* code);
|
// Finds the symbol for string in the symbol table.
|
// If not found, a new symbol is added to the table and returned.
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static Object* LookupSymbol(Vector<const char> str);
|
- static Object* LookupAsciiSymbol(const char* str) {
|
+ Object* LookupSymbol(Vector<const char> str);
|
+ Object* LookupAsciiSymbol(const char* str) {
|
return LookupSymbol(CStrVector(str));
|
}
|
- static Object* LookupSymbol(String* str);
|
- static bool LookupSymbolIfExists(String* str, String** symbol);
|
+ Object* LookupSymbol(String* str);
|
+ bool LookupSymbolIfExists(String* str, String** symbol);
|
|
// Compute the matching symbol map for a string if possible.
|
// NULL is returned if string is in new space or not flattened.
|
- static Map* SymbolMapForString(String* str);
|
+ Map* SymbolMapForString(String* str);
|
|
// Converts the given boolean condition to JavaScript boolean value.
|
- static Object* ToBoolean(bool condition) {
|
+ Object* ToBoolean(bool condition) {
|
return condition ? true_value() : false_value();
|
}
|
|
// Code that should be run before and after each GC. Includes some
|
// reporting/verification activities when compiled with DEBUG set.
|
- static void GarbageCollectionPrologue();
|
- static void GarbageCollectionEpilogue();
|
+ void GarbageCollectionPrologue();
|
+ void GarbageCollectionEpilogue();
|
|
// Performs garbage collection operation.
|
// Returns whether required_space bytes are available after the collection.
|
- static bool CollectGarbage(int required_space, AllocationSpace space);
|
+ bool CollectGarbage(int required_space, AllocationSpace space);
|
|
// Performs a full garbage collection. Force compaction if the
|
// parameter is true.
|
- static void CollectAllGarbage(bool force_compaction);
|
+ void CollectAllGarbage(bool force_compaction);
|
|
// Performs a full garbage collection if a context has been disposed
|
// since the last time the check was performed.
|
- static void CollectAllGarbageIfContextDisposed();
|
+ void CollectAllGarbageIfContextDisposed();
|
|
// Notify the heap that a context has been disposed.
|
- static void NotifyContextDisposed();
|
+ void NotifyContextDisposed();
|
|
// Utility to invoke the scavenger. This is needed in test code to
|
// ensure correct callback for weak global handles.
|
- static void PerformScavenge();
|
+ void PerformScavenge();
|
|
#ifdef DEBUG
|
// Utility used with flag gc-greedy.
|
- static bool GarbageCollectionGreedyCheck();
|
+ bool GarbageCollectionGreedyCheck();
|
#endif
|
|
- static void SetGlobalGCPrologueCallback(GCCallback callback) {
|
+ void SetGlobalGCPrologueCallback(GCCallback callback) {
|
global_gc_prologue_callback_ = callback;
|
}
|
- static void SetGlobalGCEpilogueCallback(GCCallback callback) {
|
+ void SetGlobalGCEpilogueCallback(GCCallback callback) {
|
global_gc_epilogue_callback_ = callback;
|
}
|
|
// Heap root getters. We have versions with and without type::cast() here.
|
// You can't use type::cast during GC because the assert fails.
|
#define ROOT_ACCESSOR(type, name, camel_name) \
|
- static inline type* name() { \
|
+ inline type* name() { \
|
return type::cast(roots_[k##camel_name##RootIndex]); \
|
} \
|
- static inline type* raw_unchecked_##name() { \
|
+ inline type* raw_unchecked_##name() { \
|
return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
|
}
|
ROOT_LIST(ROOT_ACCESSOR)
|
@@ -713,13 +713,13 @@
|
|
// Utility type maps
|
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
|
- static inline Map* name##_map() { \
|
+ inline Map* name##_map() { \
|
return Map::cast(roots_[k##Name##MapRootIndex]); \
|
}
|
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
|
#undef STRUCT_MAP_ACCESSOR
|
|
-#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \
|
+#define SYMBOL_ACCESSOR(name, str) inline String* name() { \
|
return String::cast(roots_[k##name##RootIndex]); \
|
}
|
SYMBOL_LIST(SYMBOL_ACCESSOR)
|
@@ -727,109 +727,109 @@
|
|
// The hidden_symbol is special because it is the empty string, but does
|
// not match the empty string.
|
- static String* hidden_symbol() { return hidden_symbol_; }
|
+ String* hidden_symbol() { return hidden_symbol_; }
|
|
// Iterates over all roots in the heap.
|
- static void IterateRoots(ObjectVisitor* v, VisitMode mode);
|
+ void IterateRoots(ObjectVisitor* v, VisitMode mode);
|
// Iterates over all strong roots in the heap.
|
- static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
|
+ void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
|
|
// Iterates remembered set of an old space.
|
- static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
|
+ void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
|
|
// Iterates a range of remembered set addresses starting with rset_start
|
// corresponding to the range of allocated pointers
|
// [object_start, object_end).
|
// Returns the number of bits that were set.
|
- static int IterateRSetRange(Address object_start,
|
+ int IterateRSetRange(Address object_start,
|
Address object_end,
|
Address rset_start,
|
ObjectSlotCallback copy_object_func);
|
|
// Returns whether the object resides in new space.
|
- static inline bool InNewSpace(Object* object);
|
- static inline bool InFromSpace(Object* object);
|
- static inline bool InToSpace(Object* object);
|
+ inline bool InNewSpace(Object* object);
|
+ inline bool InFromSpace(Object* object);
|
+ inline bool InToSpace(Object* object);
|
|
// Checks whether an address/object in the heap (including auxiliary
|
// area and unused area).
|
- static bool Contains(Address addr);
|
- static bool Contains(HeapObject* value);
|
+ bool Contains(Address addr);
|
+ bool Contains(HeapObject* value);
|
|
// Checks whether an address/object in a space.
|
// Currently used by tests, serialization and heap verification only.
|
- static bool InSpace(Address addr, AllocationSpace space);
|
- static bool InSpace(HeapObject* value, AllocationSpace space);
|
+ bool InSpace(Address addr, AllocationSpace space);
|
+ bool InSpace(HeapObject* value, AllocationSpace space);
|
|
// Finds out which space an object should get promoted to based on its type.
|
- static inline OldSpace* TargetSpace(HeapObject* object);
|
- static inline AllocationSpace TargetSpaceId(InstanceType type);
|
+ inline OldSpace* TargetSpace(HeapObject* object);
|
+ inline AllocationSpace TargetSpaceId(InstanceType type);
|
|
// Sets the stub_cache_ (only used when expanding the dictionary).
|
- static void public_set_code_stubs(NumberDictionary* value) {
|
+ void public_set_code_stubs(NumberDictionary* value) {
|
roots_[kCodeStubsRootIndex] = value;
|
}
|
|
// Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
|
- static void public_set_non_monomorphic_cache(NumberDictionary* value) {
|
+ void public_set_non_monomorphic_cache(NumberDictionary* value) {
|
roots_[kNonMonomorphicCacheRootIndex] = value;
|
}
|
|
// Update the next script id.
|
- static inline void SetLastScriptId(Object* last_script_id);
|
+ inline void SetLastScriptId(Object* last_script_id);
|
|
// Generated code can embed this address to get access to the roots.
|
- static Object** roots_address() { return roots_; }
|
+ Object** roots_address() { return roots_; }
|
|
#ifdef DEBUG
|
- static void Print();
|
- static void PrintHandles();
|
+ void Print();
|
+ void PrintHandles();
|
|
// Verify the heap is in its normal state before or after a GC.
|
- static void Verify();
|
+ void Verify();
|
|
// Report heap statistics.
|
- static void ReportHeapStatistics(const char* title);
|
- static void ReportCodeStatistics(const char* title);
|
+ void ReportHeapStatistics(const char* title);
|
+ void ReportCodeStatistics(const char* title);
|
|
// Fill in bogus values in from space
|
- static void ZapFromSpace();
|
+ void ZapFromSpace();
|
#endif
|
|
#if defined(ENABLE_LOGGING_AND_PROFILING)
|
// Print short heap statistics.
|
- static void PrintShortHeapStatistics();
|
+ void PrintShortHeapStatistics();
|
#endif
|
|
// Makes a new symbol object
|
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
|
// failed.
|
// Please note this function does not perform a garbage collection.
|
- static Object* CreateSymbol(const char* str, int length, int hash);
|
- static Object* CreateSymbol(String* str);
|
+ Object* CreateSymbol(const char* str, int length, int hash);
|
+ Object* CreateSymbol(String* str);
|
|
// Write barrier support for address[offset] = o.
|
- static inline void RecordWrite(Address address, int offset);
|
+ inline void RecordWrite(Address address, int offset);
|
|
// Given an address occupied by a live code object, return that object.
|
- static Object* FindCodeObject(Address a);
|
+ Object* FindCodeObject(Address a);
|
|
// Invoke Shrink on shrinkable spaces.
|
- static void Shrink();
|
+ void Shrink();
|
|
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
|
- static inline HeapState gc_state() { return gc_state_; }
|
+ inline HeapState gc_state() { return gc_state_; }
|
|
#ifdef DEBUG
|
- static bool IsAllocationAllowed() { return allocation_allowed_; }
|
- static inline bool allow_allocation(bool enable);
|
+ bool IsAllocationAllowed() { return allocation_allowed_; }
|
+ inline bool allow_allocation(bool enable);
|
|
- static bool disallow_allocation_failure() {
|
+ bool disallow_allocation_failure() {
|
return disallow_allocation_failure_;
|
}
|
|
- static void TracePathToObject();
|
- static void TracePathToGlobal();
|
+ void TracePathToObject();
|
+ void TracePathToGlobal();
|
#endif
|
|
// Callback function passed to Heap::Iterate etc. Copies an object if
|
@@ -837,58 +837,58 @@
|
// ensure the precondition that the object is (a) a heap object and (b) in
|
// the heap's from space.
|
static void ScavengePointer(HeapObject** p);
|
- static inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
+ inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
|
// Clear a range of remembered set addresses corresponding to the object
|
// area address 'start' with size 'size_in_bytes', eg, when adding blocks
|
// to the free list.
|
- static void ClearRSetRange(Address start, int size_in_bytes);
|
+ void ClearRSetRange(Address start, int size_in_bytes);
|
|
// Rebuild remembered set in old and map spaces.
|
- static void RebuildRSets();
|
+ void RebuildRSets();
|
|
// Commits from space if it is uncommitted.
|
- static void EnsureFromSpaceIsCommitted();
|
+ void EnsureFromSpaceIsCommitted();
|
|
//
|
// Support for the API.
|
//
|
|
- static bool CreateApiObjects();
|
+ bool CreateApiObjects();
|
|
// Attempt to find the number in a small cache. If we finds it, return
|
// the string representation of the number. Otherwise return undefined.
|
- static Object* GetNumberStringCache(Object* number);
|
+ Object* GetNumberStringCache(Object* number);
|
|
// Update the cache with a new number-string pair.
|
- static void SetNumberStringCache(Object* number, String* str);
|
+ void SetNumberStringCache(Object* number, String* str);
|
|
// Entries in the cache. Must be a power of 2.
|
static const int kNumberStringCacheSize = 64;
|
|
// Adjusts the amount of registered external memory.
|
// Returns the adjusted value.
|
- static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
|
+ inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
|
|
// Allocate unitialized fixed array (pretenure == NON_TENURE).
|
- static Object* AllocateRawFixedArray(int length);
|
+ Object* AllocateRawFixedArray(int length);
|
|
// True if we have reached the allocation limit in the old generation that
|
// should force the next GC (caused normally) to be a full one.
|
- static bool OldGenerationPromotionLimitReached() {
|
+ bool OldGenerationPromotionLimitReached() {
|
return (PromotedSpaceSize() + PromotedExternalMemorySize())
|
> old_gen_promotion_limit_;
|
}
|
|
// True if we have reached the allocation limit in the old generation that
|
// should artificially cause a GC right now.
|
- static bool OldGenerationAllocationLimitReached() {
|
+ bool OldGenerationAllocationLimitReached() {
|
return (PromotedSpaceSize() + PromotedExternalMemorySize())
|
> old_gen_allocation_limit_;
|
}
|
|
// Can be called when the embedding application is idle.
|
- static bool IdleNotification();
|
+ bool IdleNotification();
|
|
// Declare all the root indices.
|
enum RootListIndex {
|
@@ -910,26 +910,33 @@
|
kRootListLength
|
};
|
|
- static Object* NumberToString(Object* number);
|
+ Object* NumberToString(Object* number);
|
|
- static Map* MapForExternalArrayType(ExternalArrayType array_type);
|
- static RootListIndex RootIndexForExternalArrayType(
|
+ Map* MapForExternalArrayType(ExternalArrayType array_type);
|
+ RootListIndex RootIndexForExternalArrayType(
|
ExternalArrayType array_type);
|
+ MemoryAllocator* memory_allocator () const { return memory_allocator_; }
|
+ CodeRange* code_range () const { return code_range_; }
|
|
+ class HeapImpl;
|
private:
|
- static int reserved_semispace_size_;
|
- static int max_semispace_size_;
|
- static int initial_semispace_size_;
|
- static int max_old_generation_size_;
|
- static size_t code_range_size_;
|
+ int reserved_semispace_size_;
|
+ int max_semispace_size_;
|
+ int initial_semispace_size_;
|
+ int max_old_generation_size_;
|
+ size_t code_range_size_;
|
|
+ // Flag is set when the heap has been configured. The heap can be repeatedly
|
+ // configured through the API until it is setup.
|
+ bool heap_configured;
|
+
|
// For keeping track of how much data has survived
|
// scavenge since last new space expansion.
|
- static int survived_since_last_expansion_;
|
+ int survived_since_last_expansion_;
|
|
- static int always_allocate_scope_depth_;
|
- static int linear_allocation_scope_depth_;
|
- static bool context_disposed_pending_;
|
+ int always_allocate_scope_depth_;
|
+ int linear_allocation_scope_depth_;
|
+ bool context_disposed_pending_;
|
|
static const int kMaxMapSpaceSize = 8*MB;
|
|
@@ -939,70 +946,72 @@
|
static const int kMaxObjectSizeInNewSpace = 256*KB;
|
#endif
|
|
- static NewSpace new_space_;
|
- static OldSpace* old_pointer_space_;
|
- static OldSpace* old_data_space_;
|
- static OldSpace* code_space_;
|
- static MapSpace* map_space_;
|
- static CellSpace* cell_space_;
|
- static LargeObjectSpace* lo_space_;
|
- static HeapState gc_state_;
|
+ NewSpace new_space_;
|
+ OldSpace* old_pointer_space_;
|
+ OldSpace* old_data_space_;
|
+ OldSpace* code_space_;
|
+ MapSpace* map_space_;
|
+ CellSpace* cell_space_;
|
+ LargeObjectSpace* lo_space_;
|
+ MemoryAllocator* const memory_allocator_;
|
+ CodeRange* const code_range_;
|
+ HeapState gc_state_;
|
|
// Returns the size of object residing in non new spaces.
|
- static int PromotedSpaceSize();
|
+ int PromotedSpaceSize();
|
|
// Returns the amount of external memory registered since last global gc.
|
- static int PromotedExternalMemorySize();
|
+ int PromotedExternalMemorySize();
|
|
- static int mc_count_; // how many mark-compact collections happened
|
- static int gc_count_; // how many gc happened
|
+ int mc_count_; // how many mark-compact collections happened
|
+ int gc_count_; // how many gc happened
|
|
#define ROOT_ACCESSOR(type, name, camel_name) \
|
- static inline void set_##name(type* value) { \
|
+ inline void set_##name(type* value) { \
|
roots_[k##camel_name##RootIndex] = value; \
|
}
|
ROOT_LIST(ROOT_ACCESSOR)
|
#undef ROOT_ACCESSOR
|
|
#ifdef DEBUG
|
- static bool allocation_allowed_;
|
+ bool allocation_allowed_;
|
|
// If the --gc-interval flag is set to a positive value, this
|
// variable holds the value indicating the number of allocations
|
// remain until the next failure and garbage collection.
|
- static int allocation_timeout_;
|
+ int allocation_timeout_;
|
|
// Do we expect to be able to handle allocation failure at this
|
// time?
|
- static bool disallow_allocation_failure_;
|
+ bool disallow_allocation_failure_;
|
#endif // DEBUG
|
|
// Limit that triggers a global GC on the next (normally caused) GC. This
|
// is checked when we have already decided to do a GC to help determine
|
// which collector to invoke.
|
- static int old_gen_promotion_limit_;
|
+ int old_gen_promotion_limit_;
|
|
// Limit that triggers a global GC as soon as is reasonable. This is
|
// checked before expanding a paged space in the old generation and on
|
// every allocation in large object space.
|
- static int old_gen_allocation_limit_;
|
+ int old_gen_allocation_limit_;
|
|
// Limit on the amount of externally allocated memory allowed
|
// between global GCs. If reached a global GC is forced.
|
- static int external_allocation_limit_;
|
+ int external_allocation_limit_;
|
|
// The amount of external memory registered through the API kept alive
|
// by global handles
|
- static int amount_of_external_allocated_memory_;
|
+ int amount_of_external_allocated_memory_;
|
|
// Caches the amount of external memory registered at the last global gc.
|
- static int amount_of_external_allocated_memory_at_last_global_gc_;
|
+ int amount_of_external_allocated_memory_at_last_global_gc_;
|
|
// Indicates that an allocation has failed in the old generation since the
|
// last GC.
|
- static int old_gen_exhausted_;
|
+ int old_gen_exhausted_;
|
|
- static Object* roots_[kRootListLength];
|
+ Object* roots_[kRootListLength];
|
|
struct StringTypeTable {
|
InstanceType type;
|
@@ -1027,24 +1036,24 @@
|
|
// The special hidden symbol which is an empty string, but does not match
|
// any string when looked up in properties.
|
- static String* hidden_symbol_;
|
+ String* hidden_symbol_;
|
|
// GC callback function, called before and after mark-compact GC.
|
// Allocations in the callback function are disallowed.
|
- static GCCallback global_gc_prologue_callback_;
|
- static GCCallback global_gc_epilogue_callback_;
|
+ GCCallback global_gc_prologue_callback_;
|
+ GCCallback global_gc_epilogue_callback_;
|
|
// Checks whether a global GC is necessary
|
- static GarbageCollector SelectGarbageCollector(AllocationSpace space);
|
+ GarbageCollector SelectGarbageCollector(AllocationSpace space);
|
|
// Performs garbage collection
|
- static void PerformGarbageCollection(AllocationSpace space,
|
+ void PerformGarbageCollection(AllocationSpace space,
|
GarbageCollector collector,
|
GCTracer* tracer);
|
|
// Returns either a Smi or a Number object from 'value'. If 'new_object'
|
// is false, it may return a preallocated immutable object.
|
- static Object* SmiOrNumberFromDouble(double value,
|
+ Object* SmiOrNumberFromDouble(double value,
|
bool new_object,
|
PretenureFlag pretenure = NOT_TENURED);
|
|
@@ -1052,50 +1061,50 @@
|
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
|
// have to test the allocation space argument and (b) can reduce code size
|
// (since both AllocateRaw and AllocateRawMap are inlined).
|
- static inline Object* AllocateRawMap();
|
+ inline Object* AllocateRawMap();
|
|
// Allocate an uninitialized object in the global property cell space.
|
- static inline Object* AllocateRawCell();
|
+ inline Object* AllocateRawCell();
|
|
// Initializes a JSObject based on its map.
|
- static void InitializeJSObjectFromMap(JSObject* obj,
|
+ void InitializeJSObjectFromMap(JSObject* obj,
|
FixedArray* properties,
|
Map* map);
|
|
- static bool CreateInitialMaps();
|
- static bool CreateInitialObjects();
|
+ bool CreateInitialMaps();
|
+ bool CreateInitialObjects();
|
|
// These four Create*EntryStub functions are here because of a gcc-4.4 bug
|
// that assigns wrong vtable entries.
|
- static void CreateCEntryStub();
|
- static void CreateCEntryDebugBreakStub();
|
- static void CreateJSEntryStub();
|
- static void CreateJSConstructEntryStub();
|
- static void CreateRegExpCEntryStub();
|
+ void CreateCEntryStub();
|
+ void CreateCEntryDebugBreakStub();
|
+ void CreateJSEntryStub();
|
+ void CreateJSConstructEntryStub();
|
+ void CreateRegExpCEntryStub();
|
|
- static void CreateFixedStubs();
|
+ void CreateFixedStubs();
|
|
- static Object* CreateOddball(Map* map,
|
+ Object* CreateOddball(Map* map,
|
const char* to_string,
|
Object* to_number);
|
|
// Allocate empty fixed array.
|
- static Object* AllocateEmptyFixedArray();
|
+ Object* AllocateEmptyFixedArray();
|
|
// Performs a minor collection in new generation.
|
- static void Scavenge();
|
+ void Scavenge();
|
|
// Performs a major collection in the whole heap.
|
- static void MarkCompact(GCTracer* tracer);
|
+ void MarkCompact(GCTracer* tracer);
|
|
// Code to be run before and after mark-compact.
|
- static void MarkCompactPrologue(bool is_compacting);
|
- static void MarkCompactEpilogue(bool is_compacting);
|
+ void MarkCompactPrologue(bool is_compacting);
|
+ void MarkCompactEpilogue(bool is_compacting);
|
|
// Helper function used by CopyObject to copy a source object to an
|
// allocated target object and update the forwarding pointer in the source
|
// object. Returns the target object.
|
- static HeapObject* MigrateObject(HeapObject* source,
|
+ HeapObject* MigrateObject(HeapObject* source,
|
HeapObject* target,
|
int size);
|
|
@@ -1103,30 +1112,30 @@
|
// old. If the object's old address lies below the new space's age
|
// mark or if we've already filled the bottom 1/16th of the to space,
|
// we try to promote this object.
|
- static inline bool ShouldBePromoted(Address old_address, int object_size);
|
+ inline bool ShouldBePromoted(Address old_address, int object_size);
|
#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
|
// Record the copy of an object in the NewSpace's statistics.
|
- static void RecordCopiedObject(HeapObject* obj);
|
+ void RecordCopiedObject(HeapObject* obj);
|
|
// Record statistics before and after garbage collection.
|
- static void ReportStatisticsBeforeGC();
|
- static void ReportStatisticsAfterGC();
|
+ void ReportStatisticsBeforeGC();
|
+ void ReportStatisticsAfterGC();
|
#endif
|
|
// Update an old object's remembered set
|
- static int UpdateRSet(HeapObject* obj);
|
+ int UpdateRSet(HeapObject* obj);
|
|
// Rebuild remembered set in an old space.
|
- static void RebuildRSets(PagedSpace* space);
|
+ void RebuildRSets(PagedSpace* space);
|
|
// Rebuild remembered set in the large object space.
|
- static void RebuildRSets(LargeObjectSpace* space);
|
+ void RebuildRSets(LargeObjectSpace* space);
|
|
// Slow part of scavenge object.
|
- static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
+ void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
|
|
// Copy memory from src to dst.
|
- static inline void CopyBlock(Object** dst, Object** src, int byte_size);
|
+ inline void CopyBlock(Object** dst, Object** src, int byte_size);
|
|
// Initializes a function with a shared part and prototype.
|
// Returns the function.
|
@@ -1134,7 +1143,7 @@
|
// other parts of the VM could use it. Specifically, a function that creates
|
// instances of type JS_FUNCTION_TYPE benefit from the use of this function.
|
// Please note this does not perform a garbage collection.
|
- static inline Object* InitializeFunction(JSFunction* function,
|
+ inline Object* InitializeFunction(JSFunction* function,
|
SharedFunctionInfo* shared,
|
Object* prototype);
|
|
@@ -1145,23 +1154,32 @@
|
friend class DisallowAllocationFailure;
|
friend class AlwaysAllocateScope;
|
friend class LinearAllocationScope;
|
+ friend class V8Context;
|
+
|
+ HeapImpl* const heap_impl_;
|
+
|
+ Heap();
|
+ ~Heap();
|
+ DISALLOW_COPY_AND_ASSIGN(Heap);
|
};
|
|
|
class AlwaysAllocateScope {
|
public:
|
AlwaysAllocateScope() {
|
+ Heap& heap = v8_context()->heap_;
|
// We shouldn't hit any nested scopes, because that requires
|
// non-handle code to call handle code. The code still works but
|
// performance will degrade, so we want to catch this situation
|
// in debug mode.
|
- ASSERT(Heap::always_allocate_scope_depth_ == 0);
|
- Heap::always_allocate_scope_depth_++;
|
+ ASSERT(heap.always_allocate_scope_depth_ == 0);
|
+ heap.always_allocate_scope_depth_++;
|
}
|
|
~AlwaysAllocateScope() {
|
- Heap::always_allocate_scope_depth_--;
|
- ASSERT(Heap::always_allocate_scope_depth_ == 0);
|
+ Heap& heap = v8_context()->heap_;
|
+ heap.always_allocate_scope_depth_--;
|
+ ASSERT(heap.always_allocate_scope_depth_ == 0);
|
}
|
};
|
|
@@ -1169,12 +1187,12 @@
|
class LinearAllocationScope {
|
public:
|
LinearAllocationScope() {
|
- Heap::linear_allocation_scope_depth_++;
|
+ v8_context()->heap_.linear_allocation_scope_depth_++;
|
}
|
|
~LinearAllocationScope() {
|
- Heap::linear_allocation_scope_depth_--;
|
- ASSERT(Heap::linear_allocation_scope_depth_ >= 0);
|
+ v8_context()->heap_.linear_allocation_scope_depth_--;
|
+ ASSERT(v8_context()->heap_.linear_allocation_scope_depth_ >= 0);
|
}
|
};
|
|
@@ -1188,10 +1206,11 @@
|
class VerifyPointersVisitor: public ObjectVisitor {
|
public:
|
void VisitPointers(Object** start, Object** end) {
|
+ Heap& heap = v8_context()->heap_;
|
for (Object** current = start; current < end; current++) {
|
if ((*current)->IsHeapObject()) {
|
HeapObject* object = HeapObject::cast(*current);
|
- ASSERT(Heap::Contains(object));
|
+ ASSERT(heap.Contains(object));
|
ASSERT(object->map()->IsMap());
|
}
|
}
|
@@ -1205,12 +1224,13 @@
|
class VerifyPointersAndRSetVisitor: public ObjectVisitor {
|
public:
|
void VisitPointers(Object** start, Object** end) {
|
+ Heap& heap = v8_context()->heap_;
|
for (Object** current = start; current < end; current++) {
|
if ((*current)->IsHeapObject()) {
|
HeapObject* object = HeapObject::cast(*current);
|
- ASSERT(Heap::Contains(object));
|
+ ASSERT(heap.Contains(object));
|
ASSERT(object->map()->IsMap());
|
- if (Heap::InNewSpace(object)) {
|
+ if (heap.InNewSpace(object)) {
|
ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
|
}
|
}
|
@@ -1306,22 +1326,27 @@
|
class KeyedLookupCache {
|
public:
|
// Lookup field offset for (map, name). If absent, -1 is returned.
|
- static int Lookup(Map* map, String* name);
|
+ int Lookup(Map* map, String* name);
|
|
// Update an element in the cache.
|
- static void Update(Map* map, String* name, int field_offset);
|
+ void Update(Map* map, String* name, int field_offset);
|
|
// Clear the cache.
|
- static void Clear();
|
+ void Clear();
|
+
|
private:
|
- static inline int Hash(Map* map, String* name);
|
+ inline int Hash(Map* map, String* name);
|
static const int kLength = 64;
|
struct Key {
|
Map* map;
|
String* name;
|
};
|
- static Key keys_[kLength];
|
- static int field_offsets_[kLength];
|
+ Key keys_[kLength];
|
+ int field_offsets_[kLength];
|
+
|
+ KeyedLookupCache() {}
|
+ friend class V8Context;
|
+ DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
|
};
|
|
|
@@ -1334,7 +1359,7 @@
|
public:
|
// Lookup descriptor index for (map, name).
|
// If absent, kAbsent is returned.
|
- static int Lookup(DescriptorArray* array, String* name) {
|
+ int Lookup(DescriptorArray* array, String* name) {
|
if (!StringShape(name).IsSymbol()) return kAbsent;
|
int index = Hash(array, name);
|
Key& key = keys_[index];
|
@@ -1343,7 +1368,7 @@
|
}
|
|
// Update an element in the cache.
|
- static void Update(DescriptorArray* array, String* name, int result) {
|
+ void Update(DescriptorArray* array, String* name, int result) {
|
ASSERT(result != kAbsent);
|
if (StringShape(name).IsSymbol()) {
|
int index = Hash(array, name);
|
@@ -1355,7 +1380,7 @@
|
}
|
|
// Clear the cache.
|
- static void Clear();
|
+ void Clear();
|
|
static const int kAbsent = -2;
|
private:
|
@@ -1374,8 +1399,12 @@
|
String* name;
|
};
|
|
- static Key keys_[kLength];
|
- static int results_[kLength];
|
+ Key keys_[kLength];
|
+ int results_[kLength];
|
+
|
+ DescriptorLookupCache() {}
|
+ friend class V8Context;
|
+ DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
|
};
|
|
|
@@ -1439,11 +1468,11 @@
|
class DisallowAllocationFailure {
|
public:
|
DisallowAllocationFailure() {
|
- old_state_ = Heap::disallow_allocation_failure_;
|
- Heap::disallow_allocation_failure_ = true;
|
+ old_state_ = v8_context()->heap_.disallow_allocation_failure_;
|
+ v8_context()->heap_.disallow_allocation_failure_ = true;
|
}
|
~DisallowAllocationFailure() {
|
- Heap::disallow_allocation_failure_ = old_state_;
|
+ v8_context()->heap_.disallow_allocation_failure_ = old_state_;
|
}
|
private:
|
bool old_state_;
|
@@ -1452,11 +1481,11 @@
|
class AssertNoAllocation {
|
public:
|
AssertNoAllocation() {
|
- old_state_ = Heap::allow_allocation(false);
|
+ old_state_ = v8_context()->heap_.allow_allocation(false);
|
}
|
|
~AssertNoAllocation() {
|
- Heap::allow_allocation(old_state_);
|
+ v8_context()->heap_.allow_allocation(old_state_);
|
}
|
|
private:
|
@@ -1466,11 +1495,11 @@
|
class DisableAssertNoAllocation {
|
public:
|
DisableAssertNoAllocation() {
|
- old_state_ = Heap::allow_allocation(true);
|
+ old_state_ = v8_context()->heap_.allow_allocation(true);
|
}
|
|
~DisableAssertNoAllocation() {
|
- Heap::allow_allocation(old_state_);
|
+ v8_context()->heap_.allow_allocation(old_state_);
|
}
|
|
private:
|
@@ -1526,7 +1555,7 @@
|
|
// Returns size of object in heap (in MB).
|
double SizeOfHeapObjects() {
|
- return (static_cast<double>(Heap::SizeOfObjects())) / MB;
|
+ return (static_cast<double>(v8_context()->heap_.SizeOfObjects())) / MB;
|
}
|
|
double start_time_; // Timestamp set in the constructor.
|
@@ -1558,27 +1587,12 @@
|
int previous_marked_count_;
|
};
|
|
-
|
class TranscendentalCache {
|
public:
|
enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
|
|
explicit TranscendentalCache(Type t);
|
|
- // Returns a heap number with f(input), where f is a math function specified
|
- // by the 'type' argument.
|
- static inline Object* Get(Type type, double input) {
|
- TranscendentalCache* cache = caches_[type];
|
- if (cache == NULL) {
|
- caches_[type] = cache = new TranscendentalCache(type);
|
- }
|
- return cache->Get(input);
|
- }
|
-
|
- // The cache contains raw Object pointers. This method disposes of
|
- // them before a garbage collection.
|
- static void Clear();
|
-
|
private:
|
inline Object* Get(double input) {
|
Converter c;
|
@@ -1591,7 +1605,7 @@
|
return e.output;
|
}
|
double answer = Calculate(input);
|
- Object* heap_number = Heap::AllocateHeapNumber(answer);
|
+ Object* heap_number = v8_context()->heap_.AllocateHeapNumber(answer);
|
if (!heap_number->IsFailure()) {
|
elements_[hash].in[0] = c.integers[0];
|
elements_[hash].in[1] = c.integers[1];
|
@@ -1637,12 +1651,37 @@
|
hash ^= hash >> 8;
|
return (hash & (kCacheSize - 1));
|
}
|
- static TranscendentalCache* caches_[kNumberOfCaches];
|
+
|
Element elements_[kCacheSize];
|
Type type_;
|
+ friend class TranscendentalCaches;
|
};
|
|
+class TranscendentalCaches {
|
+public:
|
+ // Returns a heap number with f(input), where f is a math function specified
|
+ // by the 'type' argument.
|
+ inline Object* Get(TranscendentalCache::Type type, double input) {
|
+ TranscendentalCache* cache = caches_[type];
|
+ if (cache == NULL) {
|
+ caches_[type] = cache = new TranscendentalCache(type);
|
+ }
|
+ return cache->Get(input);
|
+ }
|
|
+ // The cache contains raw Object pointers. This method disposes of
|
+ // them before a garbage collection.
|
+ void Clear();
|
+
|
+private:
|
+ TranscendentalCache* caches_[TranscendentalCache::kNumberOfCaches];
|
+ friend class V8Context;
|
+ TranscendentalCaches() {
|
+ for(int i = 0; i < TranscendentalCache::kNumberOfCaches; ++i) caches_[i] = NULL;
|
+ }
|
+ DISALLOW_COPY_AND_ASSIGN(TranscendentalCaches);
|
+};
|
+
|
} } // namespace v8::internal
|
|
#endif // V8_HEAP_H_
|
Index: src/zone-inl.h
|
===================================================================
|
--- src/zone-inl.h (revision 2939)
|
+++ src/zone-inl.h Sat Nov 14 01:43:04 MSK 2009
|
@@ -36,14 +36,15 @@
|
|
|
inline void* Zone::New(int size) {
|
- ASSERT(AssertNoZoneAllocation::allow_allocation());
|
- ASSERT(ZoneScope::nesting() > 0);
|
+ V8Context* const v8context = v8_context();
|
+ ASSERT(AssertNoZoneAllocation::allow_allocation(v8context));
|
+ ASSERT(ZoneScope::nesting(v8context) > 0);
|
// Round up the requested size to fit the alignment.
|
size = RoundUp(size, kAlignment);
|
|
// Check if the requested size is available without expanding.
|
- Address result = position_;
|
- if ((position_ += size) > limit_) result = NewExpand(size);
|
+ Address result = v8context->zone_data_.position_;
|
+ if ((v8context->zone_data_.position_ += size) > v8context->zone_data_.limit_) result = NewExpand(size);
|
|
// Check that the result has the proper alignment and return it.
|
ASSERT(IsAddressAligned(result, kAlignment, 0));
|
@@ -58,13 +59,15 @@
|
|
|
bool Zone::excess_allocation() {
|
- return segment_bytes_allocated_ > zone_excess_limit_;
|
+ V8Context* const v8context = v8_context();
|
+ return v8context->zone_data_.segment_bytes_allocated_ > v8context->zone_data_.zone_excess_limit_;
|
}
|
|
|
void Zone::adjust_segment_bytes_allocated(int delta) {
|
- segment_bytes_allocated_ += delta;
|
- Counters::zone_segment_bytes.Set(segment_bytes_allocated_);
|
+ V8Context* const v8context = v8_context();
|
+ v8context->zone_data_.segment_bytes_allocated_ += delta;
|
+ v8context->counters_.zone_segment_bytes.Set(v8context->zone_data_.segment_bytes_allocated_);
|
}
|
|
|
Index: test/cctest/test-api.cc
|
===================================================================
|
--- test/cctest/test-api.cc (revision 3209)
|
+++ test/cctest/test-api.cc Sun Nov 15 12:38:36 MSK 2009
|
@@ -57,6 +57,8 @@
|
using ::v8::Function;
|
using ::v8::AccessorInfo;
|
using ::v8::Extension;
|
+using ::v8::v8_context;
|
+using v8::V8Context;
|
|
namespace i = ::v8::internal;
|
|
@@ -324,6 +326,7 @@
|
TestResource::dispose_count = 0;
|
const char* c_source = "1 + 2 * 3";
|
uint16_t* two_byte_source = AsciiToTwoByteString(c_source);
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
LocalContext env;
|
@@ -336,11 +339,11 @@
|
CHECK(source->IsExternal());
|
CHECK_EQ(resource,
|
static_cast<TestResource*>(source->GetExternalStringResource()));
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(0, TestResource::dispose_count);
|
}
|
- v8::internal::CompilationCache::Clear();
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->compilation_cache_.Clear();
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(1, TestResource::dispose_count);
|
}
|
|
@@ -348,6 +351,7 @@
|
THREADED_TEST(ScriptUsingAsciiStringResource) {
|
TestAsciiResource::dispose_count = 0;
|
const char* c_source = "1 + 2 * 3";
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
LocalContext env;
|
@@ -357,11 +361,11 @@
|
Local<Value> value = script->Run();
|
CHECK(value->IsNumber());
|
CHECK_EQ(7, value->Int32Value());
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(0, TestAsciiResource::dispose_count);
|
}
|
- v8::internal::CompilationCache::Clear();
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->compilation_cache_.Clear();
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(1, TestAsciiResource::dispose_count);
|
}
|
|
@@ -369,6 +373,7 @@
|
THREADED_TEST(ScriptMakingExternalString) {
|
TestResource::dispose_count = 0;
|
uint16_t* two_byte_source = AsciiToTwoByteString("1 + 2 * 3");
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
LocalContext env;
|
@@ -379,11 +384,11 @@
|
Local<Value> value = script->Run();
|
CHECK(value->IsNumber());
|
CHECK_EQ(7, value->Int32Value());
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(0, TestResource::dispose_count);
|
}
|
- v8::internal::CompilationCache::Clear();
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->compilation_cache_.Clear();
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(1, TestResource::dispose_count);
|
}
|
|
@@ -391,6 +396,7 @@
|
THREADED_TEST(ScriptMakingExternalAsciiString) {
|
TestAsciiResource::dispose_count = 0;
|
const char* c_source = "1 + 2 * 3";
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
LocalContext env;
|
@@ -402,16 +408,17 @@
|
Local<Value> value = script->Run();
|
CHECK(value->IsNumber());
|
CHECK_EQ(7, value->Int32Value());
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(0, TestAsciiResource::dispose_count);
|
}
|
- v8::internal::CompilationCache::Clear();
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->compilation_cache_.Clear();
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(1, TestAsciiResource::dispose_count);
|
}
|
|
|
THREADED_TEST(UsingExternalString) {
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
uint16_t* two_byte_string = AsciiToTwoByteString("test string");
|
@@ -419,17 +426,18 @@
|
String::NewExternal(new TestResource(two_byte_string));
|
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
// Trigger GCs so that the newly allocated string moves to old gen.
|
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
|
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
|
+ v8context->heap_.CollectGarbage(0, i::NEW_SPACE); // in survivor space now
|
+ v8context->heap_.CollectGarbage(0, i::NEW_SPACE); // in old gen now
|
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
|
CHECK(isymbol->IsSymbol());
|
}
|
- i::Heap::CollectAllGarbage(false);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
}
|
|
|
THREADED_TEST(UsingExternalAsciiString) {
|
+ V8Context* const v8context = v8_context();
|
{
|
v8::HandleScope scope;
|
const char* one_byte_string = "test string";
|
@@ -437,13 +445,13 @@
|
new TestAsciiResource(i::StrDup(one_byte_string)));
|
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
|
// Trigger GCs so that the newly allocated string moves to old gen.
|
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in survivor space now
|
- i::Heap::CollectGarbage(0, i::NEW_SPACE); // in old gen now
|
+ v8context->heap_.CollectGarbage(0, i::NEW_SPACE); // in survivor space now
|
+ v8context->heap_.CollectGarbage(0, i::NEW_SPACE); // in old gen now
|
i::Handle<i::String> isymbol = i::Factory::SymbolFromString(istring);
|
CHECK(isymbol->IsSymbol());
|
}
|
- i::Heap::CollectAllGarbage(false);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
}
|
|
|
@@ -479,9 +487,10 @@
|
CHECK(value->IsNumber());
|
CHECK_EQ(68, value->Int32Value());
|
}
|
- v8::internal::CompilationCache::Clear();
|
- i::Heap::CollectAllGarbage(false);
|
- i::Heap::CollectAllGarbage(false);
|
+ V8Context* const v8context = v8_context();
|
+ v8context->compilation_cache_.Clear();
|
+ v8context->heap_.CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
}
|
|
|
@@ -1216,15 +1225,15 @@
|
CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
|
void* unaligned = data + 1;
|
CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
|
-
|
+ V8Context* const v8context = v8_context();
|
// Check reading and writing aligned pointers.
|
obj->SetPointerInInternalField(0, aligned);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
|
|
// Check reading and writing unaligned pointers.
|
obj->SetPointerInInternalField(0, unaligned);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
|
|
delete[] data;
|
@@ -1232,6 +1241,7 @@
|
|
|
THREADED_TEST(InternalFieldsNativePointersAndExternal) {
|
+ V8Context* const v8context = v8_context();
|
v8::HandleScope scope;
|
LocalContext env;
|
|
@@ -1250,19 +1260,19 @@
|
CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
|
|
obj->SetPointerInInternalField(0, aligned);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(aligned, v8::External::Unwrap(obj->GetInternalField(0)));
|
|
obj->SetPointerInInternalField(0, unaligned);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(unaligned, v8::External::Unwrap(obj->GetInternalField(0)));
|
|
obj->SetInternalField(0, v8::External::Wrap(aligned));
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
|
|
obj->SetInternalField(0, v8::External::Wrap(unaligned));
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
|
|
delete[] data;
|
@@ -1270,12 +1280,13 @@
|
|
|
THREADED_TEST(IdentityHash) {
|
+ V8Context* const v8context = v8_context();
|
v8::HandleScope scope;
|
LocalContext env;
|
|
// Ensure that the test starts with an fresh heap to test whether the hash
|
// code is based on the address.
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
Local<v8::Object> obj = v8::Object::New();
|
int hash = obj->GetIdentityHash();
|
int hash1 = obj->GetIdentityHash();
|
@@ -1285,7 +1296,7 @@
|
// objects should not be assigned the same hash code. If the test below fails
|
// the random number generator should be evaluated.
|
CHECK_NE(hash, hash2);
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
int hash3 = v8::Object::New()->GetIdentityHash();
|
// Make sure that the identity hash is not based on the initial address of
|
// the object alone. If the test below fails the random number generator
|
@@ -1305,7 +1316,8 @@
|
v8::Local<v8::String> empty = v8_str("");
|
v8::Local<v8::String> prop_name = v8_str("prop_name");
|
|
- i::Heap::CollectAllGarbage(false);
|
+ V8Context* const v8context = v8_context();
|
+ v8context->heap_.CollectAllGarbage(false);
|
|
// Make sure delete of a non-existent hidden value works
|
CHECK(obj->DeleteHiddenValue(key));
|
@@ -1315,7 +1327,7 @@
|
CHECK(obj->SetHiddenValue(key, v8::Integer::New(2002)));
|
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
|
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
|
// Make sure we do not find the hidden property.
|
CHECK(!obj->Has(empty));
|
@@ -1326,7 +1338,7 @@
|
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
|
CHECK_EQ(2003, obj->Get(empty)->Int32Value());
|
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
|
// Add another property and delete it afterwards to force the object in
|
// slow case.
|
@@ -1337,7 +1349,7 @@
|
CHECK(obj->Delete(prop_name));
|
CHECK_EQ(2002, obj->GetHiddenValue(key)->Int32Value());
|
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
|
CHECK(obj->DeleteHiddenValue(key));
|
CHECK(obj->GetHiddenValue(key).IsEmpty());
|
@@ -1351,9 +1363,10 @@
|
if (!i::FLAG_never_compact) {
|
i::FLAG_always_compact = true;
|
}
|
+ V8Context* const v8context = v8_context();
|
// The whole goal of this interceptor is to cause a GC during local property
|
// lookup.
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
i::FLAG_always_compact = saved_always_compact;
|
return v8::Handle<Value>();
|
}
|
@@ -2005,7 +2018,7 @@
|
if (try_catch.HasCaught()) {
|
CHECK_EQ(expected, count);
|
CHECK(result.IsEmpty());
|
- CHECK(!i::Top::has_scheduled_exception());
|
+ CHECK(!v8_context()->top_.has_scheduled_exception());
|
} else {
|
CHECK_NE(expected, count);
|
}
|
@@ -2930,7 +2943,7 @@
|
obj.Dispose();
|
obj.Clear();
|
in_scavenge = true;
|
- i::Heap::PerformScavenge();
|
+ v8_context()->heap_.PerformScavenge();
|
in_scavenge = false;
|
*(reinterpret_cast<bool*>(data)) = true;
|
}
|
@@ -2967,7 +2980,7 @@
|
object_b.MakeWeak(&released_in_scavenge, &CheckIsNotInvokedInScavenge);
|
|
while (!object_a_disposed) {
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
CHECK(!released_in_scavenge);
|
}
|
@@ -2985,7 +2998,7 @@
|
CHECK_EQ(v8::Integer::New(3), args[2]);
|
CHECK_EQ(v8::Undefined(), args[3]);
|
v8::HandleScope scope;
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
return v8::Undefined();
|
}
|
|
@@ -4916,7 +4929,7 @@
|
Local<String> name,
|
const AccessorInfo& info) {
|
ApiTestFuzzer::Fuzz();
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
return v8::Handle<Value>();
|
}
|
|
@@ -6122,13 +6135,14 @@
|
|
static int GetSurvivingGlobalObjectsCount() {
|
int count = 0;
|
+ V8Context* const v8context = v8_context();
|
// We need to collect all garbage twice to be sure that everything
|
// has been collected. This is because inline caches are cleared in
|
// the first garbage collection but some of the maps have already
|
// been marked at that point. Therefore some of the maps are not
|
// collected until the second garbage collection.
|
- v8::internal::Heap::CollectAllGarbage(false);
|
- v8::internal::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
v8::internal::HeapIterator it;
|
while (it.has_next()) {
|
v8::internal::HeapObject* object = it.next();
|
@@ -6137,7 +6151,7 @@
|
}
|
}
|
#ifdef DEBUG
|
- if (count > 0) v8::internal::Heap::TracePathToGlobal();
|
+ if (count > 0) v8context->heap_.TracePathToGlobal();
|
#endif
|
return count;
|
}
|
@@ -6204,7 +6218,7 @@
|
// weak callback of the first handle would be able to 'reallocate' it.
|
handle1.MakeWeak(NULL, NewPersistentHandleCallback);
|
handle2.Dispose();
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
|
|
@@ -6212,7 +6226,7 @@
|
|
void DisposeAndForceGcCallback(v8::Persistent<v8::Value> handle, void*) {
|
to_be_disposed.Dispose();
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
|
|
@@ -6227,7 +6241,7 @@
|
}
|
handle1.MakeWeak(NULL, DisposeAndForceGcCallback);
|
to_be_disposed = handle2;
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
}
|
|
|
@@ -6757,7 +6771,7 @@
|
{
|
v8::Locker lock;
|
// TODO(lrn): Perhaps create some garbage before collecting.
|
- i::Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
gc_count_++;
|
}
|
i::OS::Sleep(1);
|
@@ -6875,10 +6889,11 @@
|
|
void CollectGarbage() {
|
block_->Wait();
|
+ V8Context* const v8context = v8_context();
|
while (gc_during_apply_ < kRequiredGCs) {
|
{
|
v8::Locker lock;
|
- i::Heap::CollectAllGarbage(false);
|
+ v8context->heap_.CollectAllGarbage(false);
|
gc_count_++;
|
}
|
i::OS::Sleep(1);
|
@@ -7004,29 +7019,30 @@
|
static void MorphAString(i::String* string,
|
AsciiVectorResource* ascii_resource,
|
UC16VectorResource* uc16_resource) {
|
+ V8Context* const v8context = v8_context();
|
CHECK(i::StringShape(string).IsExternal());
|
if (string->IsAsciiRepresentation()) {
|
// Check old map is not symbol or long.
|
- CHECK(string->map() == i::Heap::short_external_ascii_string_map() ||
|
- string->map() == i::Heap::medium_external_ascii_string_map());
|
+ CHECK(string->map() == v8context->heap_.short_external_ascii_string_map() ||
|
+ string->map() == v8context->heap_.medium_external_ascii_string_map());
|
// Morph external string to be TwoByte string.
|
if (string->length() <= i::String::kMaxShortSize) {
|
- string->set_map(i::Heap::short_external_string_map());
|
+ string->set_map(v8context->heap_.short_external_string_map());
|
} else {
|
- string->set_map(i::Heap::medium_external_string_map());
|
+ string->set_map(v8context->heap_.medium_external_string_map());
|
}
|
i::ExternalTwoByteString* morphed =
|
i::ExternalTwoByteString::cast(string);
|
morphed->set_resource(uc16_resource);
|
} else {
|
// Check old map is not symbol or long.
|
- CHECK(string->map() == i::Heap::short_external_string_map() ||
|
- string->map() == i::Heap::medium_external_string_map());
|
+ CHECK(string->map() == v8context->heap_.short_external_string_map() ||
|
+ string->map() == v8context->heap_.medium_external_string_map());
|
// Morph external string to be ASCII string.
|
if (string->length() <= i::String::kMaxShortSize) {
|
- string->set_map(i::Heap::short_external_ascii_string_map());
|
+ string->set_map(v8context->heap_.short_external_ascii_string_map());
|
} else {
|
- string->set_map(i::Heap::medium_external_ascii_string_map());
|
+ string->set_map(v8context->heap_.medium_external_ascii_string_map());
|
}
|
i::ExternalAsciiString* morphed =
|
i::ExternalAsciiString::cast(string);
|
@@ -7139,7 +7155,7 @@
|
// Inject the input as a global variable.
|
i::Handle<i::String> input_name =
|
i::Factory::NewStringFromAscii(i::Vector<const char>("input", 5));
|
- i::Top::global_context()->global()->SetProperty(*input_name, *input_, NONE);
|
+ v8_context()->top_.global_context()->global()->SetProperty(*input_name, *input_, NONE);
|
|
|
MorphThread morph_thread(this);
|
@@ -7595,11 +7611,12 @@
|
uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(kElementCount));
|
i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(kElementCount,
|
pixel_data);
|
- i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
|
+ V8Context* const v8context = v8_context();
|
+ v8context->heap_.CollectAllGarbage(false); // Force GC to trigger verification.
|
for (int i = 0; i < kElementCount; i++) {
|
pixels->set(i, i);
|
}
|
- i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
|
+ v8context->heap_.CollectAllGarbage(false); // Force GC to trigger verification.
|
for (int i = 0; i < kElementCount; i++) {
|
CHECK_EQ(i, pixels->get(i));
|
CHECK_EQ(i, pixel_data[i]);
|
@@ -7801,11 +7818,12 @@
|
i::Handle<ExternalArrayClass> array =
|
i::Handle<ExternalArrayClass>::cast(
|
i::Factory::NewExternalArray(kElementCount, array_type, array_data));
|
- i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
|
+ V8Context* const v8context = v8_context();
|
+ v8context->heap_.CollectAllGarbage(false); // Force GC to trigger verification.
|
for (int i = 0; i < kElementCount; i++) {
|
array->set(i, static_cast<ElementType>(i));
|
}
|
- i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
|
+ v8context->heap_.CollectAllGarbage(false); // Force GC to trigger verification.
|
for (int i = 0; i < kElementCount; i++) {
|
CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array->get(i)));
|
CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
|
@@ -7921,7 +7939,7 @@
|
" }"
|
"}"
|
"sum;");
|
- i::Heap::CollectAllGarbage(false); // Force GC to trigger verification.
|
+ v8context->heap_.CollectAllGarbage(false); // Force GC to trigger verification.
|
CHECK_EQ(28, result->Int32Value());
|
|
// Make sure out-of-range loads do not throw.
|
@@ -8142,7 +8160,7 @@
|
static uint32_t* stack_limit;
|
|
static v8::Handle<Value> GetStackLimitCallback(const v8::Arguments& args) {
|
- stack_limit = reinterpret_cast<uint32_t*>(i::StackGuard::climit());
|
+ stack_limit = reinterpret_cast<uint32_t*>(v8_context()->stack_guard_.climit());
|
return v8::Undefined();
|
}
|
|
Index: src/scanner.cc
|
===================================================================
|
--- src/scanner.cc (revision 3223)
|
+++ src/scanner.cc Sat Nov 14 01:43:05 MSK 2009
|
@@ -36,16 +36,9 @@
|
// ----------------------------------------------------------------------------
|
// Character predicates
|
|
+Scanner::Scanner() {}
|
|
-unibrow::Predicate<IdentifierStart, 128> Scanner::kIsIdentifierStart;
|
-unibrow::Predicate<IdentifierPart, 128> Scanner::kIsIdentifierPart;
|
-unibrow::Predicate<unibrow::LineTerminator, 128> Scanner::kIsLineTerminator;
|
-unibrow::Predicate<unibrow::WhiteSpace, 128> Scanner::kIsWhiteSpace;
|
|
-
|
-StaticResource<Scanner::Utf8Decoder> Scanner::utf8_decoder_;
|
-
|
-
|
// ----------------------------------------------------------------------------
|
// UTF8Buffer
|
|
@@ -1027,7 +1020,13 @@
|
bool Scanner::IsIdentifier(unibrow::CharacterStream* buffer) {
|
// Checks whether the buffer contains an identifier (no escape).
|
if (!buffer->has_more()) return false;
|
- if (!kIsIdentifierStart.get(buffer->GetNext())) return false;
|
+
|
+ unibrow::Predicate<v8::internal::IdentifierStart, 128> & kIsIentifierPredicate =
|
+ v8_context()->scanner_.kIsIdentifierStart;
|
+ if (!kIsIentifierPredicate.get(buffer->GetNext())) return false;
|
+ unibrow::Predicate<v8::internal::IdentifierPart, 128> & kIsIdentifierPart =
|
+ v8_context()->scanner_.kIsIdentifierPart;
|
+
|
while (buffer->has_more()) {
|
if (!kIsIdentifierPart.get(buffer->GetNext())) return false;
|
}
|
Index: src/serialize.h
|
===================================================================
|
--- src/serialize.h (revision 3238)
|
+++ src/serialize.h Sat Nov 14 01:43:01 MSK 2009
|
@@ -193,7 +193,7 @@
|
int roots_; // number of roots visited
|
int objects_; // number of objects serialized
|
|
- static bool serialization_enabled_;
|
+ static bool serialization_enabled_; ///static
|
|
int flags_end_; // The position right after the flags.
|
|
Index: src/objects.cc
|
===================================================================
|
--- src/objects.cc (revision 3238)
|
+++ src/objects.cc Sat Nov 14 01:42:59 MSK 2009
|
@@ -53,7 +53,7 @@
|
|
|
static Object* CreateJSValue(JSFunction* constructor, Object* value) {
|
- Object* result = Heap::AllocateJSObject(constructor);
|
+ Object* result = v8_context()->heap_.AllocateJSObject(constructor);
|
if (result->IsFailure()) return result;
|
JSValue::cast(result)->set_value(value);
|
return result;
|
@@ -74,7 +74,7 @@
|
|
|
Object* Object::ToObject() {
|
- Context* global_context = Top::context()->global_context();
|
+ Context* global_context = v8_context()->top_.context()->global_context();
|
if (IsJSObject()) {
|
return this;
|
} else if (IsNumber()) {
|
@@ -91,30 +91,31 @@
|
|
|
Object* Object::ToBoolean() {
|
- if (IsTrue()) return Heap::true_value();
|
- if (IsFalse()) return Heap::false_value();
|
+ Heap& heap = v8_context()->heap_;
|
+ if (IsTrue()) return heap.true_value();
|
+ if (IsFalse()) return heap.false_value();
|
if (IsSmi()) {
|
- return Heap::ToBoolean(Smi::cast(this)->value() != 0);
|
+ return heap.ToBoolean(Smi::cast(this)->value() != 0);
|
}
|
- if (IsUndefined() || IsNull()) return Heap::false_value();
|
+ if (IsUndefined() || IsNull()) return heap.false_value();
|
// Undetectable object is false
|
if (IsUndetectableObject()) {
|
- return Heap::false_value();
|
+ return heap.false_value();
|
}
|
if (IsString()) {
|
- return Heap::ToBoolean(String::cast(this)->length() != 0);
|
+ return heap.ToBoolean(String::cast(this)->length() != 0);
|
}
|
if (IsHeapNumber()) {
|
return HeapNumber::cast(this)->HeapNumberToBoolean();
|
}
|
- return Heap::true_value();
|
+ return heap.true_value();
|
}
|
|
|
void Object::Lookup(String* name, LookupResult* result) {
|
if (IsJSObject()) return JSObject::cast(this)->Lookup(name, result);
|
Object* holder = NULL;
|
- Context* global_context = Top::context()->global_context();
|
+ Context* global_context = v8_context()->top_.context()->global_context();
|
if (IsString()) {
|
holder = global_context->string_function()->instance_prototype();
|
} else if (IsNumber()) {
|
@@ -172,7 +173,7 @@
|
result = call_fun(v8::Utils::ToLocal(key), info);
|
}
|
RETURN_IF_SCHEDULED_EXCEPTION();
|
- if (result.IsEmpty()) return Heap::undefined_value();
|
+ if (result.IsEmpty()) return v8_context()->heap_.undefined_value();
|
return *v8::Utils::OpenHandle(*result);
|
}
|
|
@@ -184,7 +185,7 @@
|
JSFunction::cast(getter));
|
}
|
// Getter is not a function.
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
UNREACHABLE();
|
@@ -198,9 +199,10 @@
|
Handle<JSFunction> fun(JSFunction::cast(getter));
|
Handle<Object> self(receiver);
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
+ Debug& debug = v8_context()->debug_;
|
// Handle stepping into a getter if step into is active.
|
- if (Debug::StepInActive()) {
|
- Debug::HandleStepIn(fun, Handle<Object>::null(), 0, false);
|
+ if (debug.StepInActive()) {
|
+ debug.HandleStepIn(fun, Handle<Object>::null(), 0, false);
|
}
|
#endif
|
bool has_pending_exception;
|
@@ -269,8 +271,8 @@
|
|
// No accessible property found.
|
*attributes = ABSENT;
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_GET);
|
- return Heap::undefined_value();
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_GET);
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -333,7 +335,7 @@
|
}
|
}
|
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return ABSENT;
|
}
|
|
@@ -420,7 +422,7 @@
|
if (entry == StringDictionary::kNotFound) {
|
Object* store_value = value;
|
if (IsGlobalObject()) {
|
- store_value = Heap::AllocateJSGlobalPropertyCell(value);
|
+ store_value = v8_context()->heap_.AllocateJSGlobalPropertyCell(value);
|
if (store_value->IsFailure()) return store_value;
|
}
|
Object* dict = property_dictionary()->Add(name, store_value, details);
|
@@ -449,12 +451,13 @@
|
ASSERT(!HasFastProperties());
|
StringDictionary* dictionary = property_dictionary();
|
int entry = dictionary->FindEntry(name);
|
+ Heap& heap = v8_context()->heap_;
|
if (entry != StringDictionary::kNotFound) {
|
// If we have a global object set the cell to the hole.
|
if (IsGlobalObject()) {
|
PropertyDetails details = dictionary->DetailsAt(entry);
|
if (details.IsDontDelete()) {
|
- if (mode != FORCE_DELETION) return Heap::false_value();
|
+ if (mode != FORCE_DELETION) return heap.false_value();
|
// When forced to delete global properties, we have to make a
|
// map change to invalidate any ICs that think they can load
|
// from the DontDelete cell without checking if it contains
|
@@ -465,13 +468,13 @@
|
}
|
JSGlobalPropertyCell* cell =
|
JSGlobalPropertyCell::cast(dictionary->ValueAt(entry));
|
- cell->set_value(Heap::the_hole_value());
|
+ cell->set_value(heap.the_hole_value());
|
dictionary->DetailsAtPut(entry, details.AsDeleted());
|
} else {
|
return dictionary->DeleteProperty(entry, mode);
|
}
|
}
|
- return Heap::true_value();
|
+ return heap.true_value();
|
}
|
|
|
@@ -497,14 +500,14 @@
|
// Make sure that the top context does not change when doing
|
// callbacks or interceptor calls.
|
AssertNoContextChange ncc;
|
-
|
+ Heap& heap = v8_context()->heap_;
|
// Traverse the prototype chain from the current object (this) to
|
// the holder and check for access rights. This avoid traversing the
|
// objects more than once in case of interceptors, because the
|
// holder will always be the interceptor holder and the search may
|
// only continue with a current object just after the interceptor
|
// holder in the prototype chain.
|
- Object* last = result->IsValid() ? result->holder() : Heap::null_value();
|
+ Object* last = result->IsValid() ? result->holder() : heap.null_value();
|
for (Object* current = this; true; current = current->GetPrototype()) {
|
if (current->IsAccessCheckNeeded()) {
|
// Check if we're allowed to read from the current object. Note
|
@@ -512,7 +515,7 @@
|
// property from the current object, we still check that we have
|
// access to it.
|
JSObject* checked = JSObject::cast(current);
|
- if (!Top::MayNamedAccess(checked, name, v8::ACCESS_GET)) {
|
+ if (!v8_context()->top_.MayNamedAccess(checked, name, v8::ACCESS_GET)) {
|
return checked->GetPropertyWithFailedAccessCheck(receiver,
|
result,
|
name,
|
@@ -527,7 +530,7 @@
|
|
if (!result->IsProperty()) {
|
*attributes = ABSENT;
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
*attributes = result->GetAttributes();
|
if (!result->IsLoaded()) {
|
@@ -542,11 +545,11 @@
|
case NORMAL:
|
value = holder->GetNormalizedProperty(result);
|
ASSERT(!value->IsTheHole() || result->IsReadOnly());
|
- return value->IsTheHole() ? Heap::undefined_value() : value;
|
+ return value->IsTheHole() ? heap.undefined_value() : value;
|
case FIELD:
|
value = holder->FastPropertyAt(result->GetFieldIndex());
|
ASSERT(!value->IsTheHole() || result->IsReadOnly());
|
- return value->IsTheHole() ? Heap::undefined_value() : value;
|
+ return value->IsTheHole() ? heap.undefined_value() : value;
|
case CONSTANT_FUNCTION:
|
return result->GetConstantFunction();
|
case CALLBACKS:
|
@@ -567,7 +570,7 @@
|
|
Object* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
|
// Non-JS objects do not have integer indexed properties.
|
- if (!IsJSObject()) return Heap::undefined_value();
|
+ if (!IsJSObject()) return v8_context()->heap_.undefined_value();
|
return JSObject::cast(this)->GetElementWithReceiver(JSObject::cast(receiver),
|
index);
|
}
|
@@ -576,14 +579,14 @@
|
Object* Object::GetPrototype() {
|
// The object is either a number, a string, a boolean, or a real JS object.
|
if (IsJSObject()) return JSObject::cast(this)->map()->prototype();
|
- Context* context = Top::context()->global_context();
|
+ Context* context = v8_context()->top_.context()->global_context();
|
|
if (IsNumber()) return context->number_function()->instance_prototype();
|
if (IsString()) return context->string_function()->instance_prototype();
|
if (IsBoolean()) {
|
return context->boolean_function()->instance_prototype();
|
} else {
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
}
|
|
@@ -642,7 +645,7 @@
|
!Smi::IsValid(value) ||
|
value != ((value << kFailureTypeTagSize) >> kFailureTypeTagSize) ||
|
!Smi::IsValid(value << kFailureTypeTagSize)) {
|
- Top::context()->mark_out_of_memory();
|
+ v8_context()->top_.context()->mark_out_of_memory();
|
return Failure::OutOfMemoryException();
|
}
|
return Construct(RETRY_AFTER_GC, value);
|
@@ -679,7 +682,7 @@
|
// allowed. This is to avoid an assertion failure when allocating.
|
// Flattening strings is the only case where we always allow
|
// allocation because no GC is performed if the allocation fails.
|
- if (!Heap::IsAllocationAllowed()) return this;
|
+ if (!v8_context()->heap_.IsAllocationAllowed()) return this;
|
#endif
|
|
switch (StringShape(this).representation_tag()) {
|
@@ -708,12 +711,12 @@
|
// There's little point in putting the flat string in new space if the
|
// cons string is in old space. It can never get GCed until there is
|
// an old space GC.
|
- PretenureFlag tenure = Heap::InNewSpace(this) ? NOT_TENURED : TENURED;
|
+ PretenureFlag tenure = v8_context()->heap_.InNewSpace(this) ? NOT_TENURED : TENURED;
|
int len = length();
|
Object* object;
|
String* result;
|
if (IsAsciiRepresentation()) {
|
- object = Heap::AllocateRawAsciiString(len, tenure);
|
+ object = v8_context()->heap_.AllocateRawAsciiString(len, tenure);
|
if (object->IsFailure()) return object;
|
result = String::cast(object);
|
String* first = cs->first();
|
@@ -726,7 +729,7 @@
|
0,
|
len - first_length);
|
} else {
|
- object = Heap::AllocateRawTwoByteString(len, tenure);
|
+ object = v8_context()->heap_.AllocateRawTwoByteString(len, tenure);
|
if (object->IsFailure()) return object;
|
result = String::cast(object);
|
uc16* dest = SeqTwoByteString::cast(result)->GetChars();
|
@@ -740,7 +743,7 @@
|
len - first_length);
|
}
|
cs->set_first(result);
|
- cs->set_second(Heap::empty_string());
|
+ cs->set_second(v8_context()->heap_.empty_string());
|
return this;
|
}
|
default:
|
@@ -788,7 +791,7 @@
|
|
// Fill the remainder of the string with dead wood.
|
int new_size = this->Size(); // Byte size of the external String object.
|
- Heap::CreateFillerObjectAt(this->address() + new_size, size - new_size);
|
+ v8_context()->heap_.CreateFillerObjectAt(this->address() + new_size, size - new_size);
|
return true;
|
}
|
|
@@ -832,7 +835,7 @@
|
|
// Fill the remainder of the string with dead wood.
|
int new_size = this->Size(); // Byte size of the external String object.
|
- Heap::CreateFillerObjectAt(this->address() + new_size, size - new_size);
|
+ v8_context()->heap_.CreateFillerObjectAt(this->address() + new_size, size - new_size);
|
return true;
|
}
|
|
@@ -934,12 +937,12 @@
|
Object* constructor = map()->constructor();
|
bool printed = false;
|
if (constructor->IsHeapObject() &&
|
- !Heap::Contains(HeapObject::cast(constructor))) {
|
+ !v8_context()->heap_.Contains(HeapObject::cast(constructor))) {
|
accumulator->Add("!!!INVALID CONSTRUCTOR!!!");
|
} else {
|
bool global_object = IsJSGlobalProxy();
|
if (constructor->IsJSFunction()) {
|
- if (!Heap::Contains(JSFunction::cast(constructor)->shared())) {
|
+ if (!v8_context()->heap_.Contains(JSFunction::cast(constructor)->shared())) {
|
accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!");
|
} else {
|
Object* constructor_name =
|
@@ -974,12 +977,12 @@
|
|
|
void HeapObject::HeapObjectShortPrint(StringStream* accumulator) {
|
- // if (!Heap::InNewSpace(this)) PrintF("*", this);
|
- if (!Heap::Contains(this)) {
|
+ // if (!v8_context()->heap_.InNewSpace(this)) PrintF("*", this);
|
+ if (!v8_context()->heap_.Contains(this)) {
|
accumulator->Add("!!!INVALID POINTER!!!");
|
return;
|
}
|
- if (!Heap::Contains(map())) {
|
+ if (!v8_context()->heap_.Contains(map())) {
|
accumulator->Add("!!!INVALID MAP!!!");
|
return;
|
}
|
@@ -1221,8 +1224,8 @@
|
// NaN, +0, and -0 should return the false object
|
switch (fpclassify(value())) {
|
case FP_NAN: // fall through
|
- case FP_ZERO: return Heap::false_value();
|
- default: return Heap::true_value();
|
+ case FP_ZERO: return v8_context()->heap_.false_value();
|
+ default: return v8_context()->heap_.true_value();
|
}
|
}
|
|
@@ -1247,21 +1250,21 @@
|
|
String* JSObject::class_name() {
|
if (IsJSFunction()) {
|
- return Heap::function_class_symbol();
|
+ return v8_context()->heap_.function_class_symbol();
|
}
|
if (map()->constructor()->IsJSFunction()) {
|
JSFunction* constructor = JSFunction::cast(map()->constructor());
|
return String::cast(constructor->shared()->instance_class_name());
|
}
|
// If the constructor is not present, return "Object".
|
- return Heap::Object_symbol();
|
+ return v8_context()->heap_.Object_symbol();
|
}
|
|
|
String* JSObject::constructor_name() {
|
if (IsJSFunction()) {
|
return JSFunction::cast(this)->IsBoilerplate() ?
|
- Heap::function_class_symbol() : Heap::closure_symbol();
|
+ v8_context()->heap_.function_class_symbol() : v8_context()->heap_.closure_symbol();
|
}
|
if (map()->constructor()->IsJSFunction()) {
|
JSFunction* constructor = JSFunction::cast(map()->constructor());
|
@@ -1269,7 +1272,7 @@
|
return name->length() > 0 ? name : constructor->shared()->inferred_name();
|
}
|
// If the constructor is not present, return "Object".
|
- return Heap::Object_symbol();
|
+ return v8_context()->heap_.Object_symbol();
|
}
|
|
|
@@ -1302,7 +1305,7 @@
|
// Normalize the object if the name is an actual string (not the
|
// hidden symbols) and is not a real identifier.
|
StringInputBuffer buffer(name);
|
- if (!Scanner::IsIdentifier(&buffer) && name != Heap::hidden_symbol()) {
|
+ if (!Scanner::IsIdentifier(&buffer) && name != v8_context()->heap_.hidden_symbol()) {
|
Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
|
if (obj->IsFailure()) return obj;
|
return AddSlowProperty(name, value, attributes);
|
@@ -1322,7 +1325,7 @@
|
// global object_function's map and there is not a transition for name.
|
bool allow_map_transition =
|
!old_descriptors->Contains(name) &&
|
- (Top::context()->global_context()->object_function()->map() != map());
|
+ (v8_context()->top_.context()->global_context()->object_function()->map() != map());
|
|
ASSERT(index < map()->inobject_properties() ||
|
(index - map()->inobject_properties()) < properties()->length() ||
|
@@ -1383,7 +1386,7 @@
|
|
// If the old map is the global object map (from new Object()),
|
// then transitions are not added to it, so we are done.
|
- if (old_map == Top::context()->global_context()->object_function()->map()) {
|
+ if (old_map == v8_context()->top_.context()->global_context()->object_function()->map()) {
|
return function;
|
}
|
|
@@ -1432,7 +1435,7 @@
|
dict->SetEntry(entry, name, store_value, details);
|
return value;
|
}
|
- store_value = Heap::AllocateJSGlobalPropertyCell(value);
|
+ store_value = v8_context()->heap_.AllocateJSGlobalPropertyCell(value);
|
if (store_value->IsFailure()) return store_value;
|
JSGlobalPropertyCell::cast(store_value)->set_value(value);
|
}
|
@@ -1511,7 +1514,7 @@
|
return result;
|
}
|
// Do not add transitions to the map of "new Object()".
|
- if (map() == Top::context()->global_context()->object_function()->map()) {
|
+ if (map() == v8_context()->top_.context()->global_context()->object_function()->map()) {
|
return result;
|
}
|
|
@@ -1594,7 +1597,7 @@
|
// Leaving JavaScript.
|
VMState state(EXTERNAL);
|
Handle<Object> value_unhole(value->IsTheHole() ?
|
- Heap::undefined_value() :
|
+ v8_context()->heap_.undefined_value() :
|
value);
|
result = setter(v8::Utils::ToLocal(name_handle),
|
v8::Utils::ToLocal(value_unhole),
|
@@ -1672,7 +1675,7 @@
|
Handle<String> key(name);
|
Handle<Object> holder_handle(holder);
|
Handle<Object> args[2] = { key, holder_handle };
|
- return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
|
+ return v8_context()->top_.Throw(*Factory::NewTypeError("no_setter_in_callback",
|
HandleVector(args, 2)));
|
}
|
}
|
@@ -1688,9 +1691,10 @@
|
Handle<JSFunction> fun(JSFunction::cast(setter));
|
Handle<JSObject> self(this);
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
+ Debug& debug = v8_context()->debug_;
|
// Handle stepping into a setter if step into is active.
|
- if (Debug::StepInActive()) {
|
- Debug::HandleStepIn(fun, Handle<Object>::null(), 0, false);
|
+ if (debug.StepInActive()) {
|
+ debug.HandleStepIn(fun, Handle<Object>::null(), 0, false);
|
}
|
#endif
|
bool has_pending_exception;
|
@@ -1705,7 +1709,7 @@
|
void JSObject::LookupCallbackSetterInPrototypes(String* name,
|
LookupResult* result) {
|
for (Object* pt = GetPrototype();
|
- pt != Heap::null_value();
|
+ pt != v8_context()->heap_.null_value();
|
pt = pt->GetPrototype()) {
|
JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
|
if (result->IsValid()) {
|
@@ -1724,7 +1728,7 @@
|
|
Object* JSObject::LookupCallbackSetterInPrototypes(uint32_t index) {
|
for (Object* pt = GetPrototype();
|
- pt != Heap::null_value();
|
+ pt != v8_context()->heap_.null_value();
|
pt = pt->GetPrototype()) {
|
if (!JSObject::cast(pt)->HasDictionaryElements()) {
|
continue;
|
@@ -1740,16 +1744,18 @@
|
}
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
void JSObject::LookupInDescriptor(String* name, LookupResult* result) {
|
DescriptorArray* descriptors = map()->instance_descriptors();
|
- int number = DescriptorLookupCache::Lookup(descriptors, name);
|
+ V8Context* const v8context = v8_context();
|
+ int number = v8context->descriptor_lookup_cache_.Lookup(descriptors, name);
|
+
|
if (number == DescriptorLookupCache::kAbsent) {
|
number = descriptors->Search(name);
|
- DescriptorLookupCache::Update(descriptors, name, number);
|
+ v8context->descriptor_lookup_cache_.Update(descriptors, name, number);
|
}
|
if (number != DescriptorArray::kNotFound) {
|
result->DescriptorResult(this, descriptors->GetDetails(number), number);
|
@@ -1817,7 +1823,7 @@
|
void JSObject::LookupRealNamedPropertyInPrototypes(String* name,
|
LookupResult* result) {
|
for (Object* pt = GetPrototype();
|
- pt != Heap::null_value();
|
+ pt != v8_context()->heap_.null_value();
|
pt = JSObject::cast(pt)->GetPrototype()) {
|
JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
|
if (result->IsValid()) {
|
@@ -1876,7 +1882,7 @@
|
}
|
}
|
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
return value;
|
}
|
|
@@ -1891,7 +1897,7 @@
|
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded()
|
- && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
+ && !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
return SetPropertyWithFailedAccessCheck(result, name, value);
|
}
|
|
@@ -1983,7 +1989,7 @@
|
// END ADDED TO CLONE
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded()
|
- && !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
+ && !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
return SetPropertyWithFailedAccessCheck(result, name, value);
|
}
|
|
@@ -2054,7 +2060,7 @@
|
if (continue_search) {
|
// Continue searching via the prototype chain.
|
Object* pt = GetPrototype();
|
- if (pt != Heap::null_value()) {
|
+ if (pt != v8_context()->heap_.null_value()) {
|
return JSObject::cast(pt)->
|
GetPropertyAttributeWithReceiver(receiver, name);
|
}
|
@@ -2132,7 +2138,7 @@
|
bool continue_search) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
+ !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
return GetPropertyAttributeWithFailedAccessCheck(receiver,
|
result,
|
name,
|
@@ -2251,7 +2257,7 @@
|
int new_instance_size = map()->instance_size() - instance_size_delta;
|
new_map->set_inobject_properties(0);
|
new_map->set_instance_size(new_instance_size);
|
- Heap::CreateFillerObjectAt(this->address() + new_instance_size,
|
+ v8_context()->heap_.CreateFillerObjectAt(this->address() + new_instance_size,
|
instance_size_delta);
|
}
|
new_map->set_unused_property_fields(0);
|
@@ -2259,11 +2265,11 @@
|
// We have now successfully allocated all the necessary objects.
|
// Changes can now be made with the guarantee that all of them take effect.
|
set_map(new_map);
|
- map()->set_instance_descriptors(Heap::empty_descriptor_array());
|
+ map()->set_instance_descriptors(v8_context()->heap_.empty_descriptor_array());
|
|
set_properties(dictionary);
|
|
- Counters::props_to_dictionary.Increment();
|
+ v8_context()->counters_.props_to_dictionary.Increment();
|
|
#ifdef DEBUG
|
if (FLAG_trace_normalization) {
|
@@ -2310,7 +2316,7 @@
|
// Switch to using the dictionary as the backing storage for elements.
|
set_elements(dictionary);
|
|
- Counters::elements_to_dictionary.Increment();
|
+ v8_context()->counters_.elements_to_dictionary.Increment();
|
|
#ifdef DEBUG
|
if (FLAG_trace_normalization) {
|
@@ -2327,7 +2333,7 @@
|
// Check local property, ignore interceptor.
|
LookupResult result;
|
LocalLookupRealNamedProperty(name, &result);
|
- if (!result.IsValid()) return Heap::true_value();
|
+ if (!result.IsValid()) return v8_context()->heap_.true_value();
|
|
// Normalize object if needed.
|
Object* obj = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
|
@@ -2392,7 +2398,7 @@
|
UNREACHABLE();
|
break;
|
}
|
- return Heap::true_value();
|
+ return v8_context()->heap_.true_value();
|
}
|
|
|
@@ -2402,7 +2408,7 @@
|
AssertNoContextChange ncc;
|
HandleScope scope;
|
Handle<InterceptorInfo> interceptor(GetIndexedInterceptor());
|
- if (interceptor->deleter()->IsUndefined()) return Heap::false_value();
|
+ if (interceptor->deleter()->IsUndefined()) return v8_context()->heap_.false_value();
|
v8::IndexedPropertyDeleter deleter =
|
v8::ToCData<v8::IndexedPropertyDeleter>(interceptor->deleter());
|
Handle<JSObject> this_handle(this);
|
@@ -2428,16 +2434,17 @@
|
|
|
Object* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
|
+ Heap& heap = v8_context()->heap_;
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_DELETE)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
|
- return Heap::false_value();
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_DELETE)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
|
+ return heap.false_value();
|
}
|
|
if (IsJSGlobalProxy()) {
|
Object* proto = GetPrototype();
|
- if (proto->IsNull()) return Heap::false_value();
|
+ if (proto->IsNull()) return heap.false_value();
|
ASSERT(proto->IsJSGlobalObject());
|
return JSGlobalObject::cast(proto)->DeleteElement(index, mode);
|
}
|
@@ -2483,24 +2490,25 @@
|
UNREACHABLE();
|
break;
|
}
|
- return Heap::true_value();
|
+ return heap.true_value();
|
}
|
|
|
Object* JSObject::DeleteProperty(String* name, DeleteMode mode) {
|
// ECMA-262, 3rd, 8.6.2.5
|
ASSERT(name->IsString());
|
+ Heap& heap = v8_context()->heap_;
|
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, name, v8::ACCESS_DELETE)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
|
- return Heap::false_value();
|
+ !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_DELETE)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_DELETE);
|
+ return heap.false_value();
|
}
|
|
if (IsJSGlobalProxy()) {
|
Object* proto = GetPrototype();
|
- if (proto->IsNull()) return Heap::false_value();
|
+ if (proto->IsNull()) return heap.false_value();
|
ASSERT(proto->IsJSGlobalObject());
|
return JSGlobalObject::cast(proto)->DeleteProperty(name, mode);
|
}
|
@@ -2511,10 +2519,10 @@
|
} else {
|
LookupResult result;
|
LocalLookup(name, &result);
|
- if (!result.IsValid()) return Heap::true_value();
|
+ if (!result.IsValid()) return heap.true_value();
|
// Ignore attributes if forcing a deletion.
|
if (result.IsDontDelete() && mode != FORCE_DELETION) {
|
- return Heap::false_value();
|
+ return heap.false_value();
|
}
|
// Check for interceptor.
|
if (result.type() == INTERCEPTOR) {
|
@@ -2554,7 +2562,8 @@
|
|
// Check if the object is among the named properties.
|
Object* key = SlowReverseLookup(obj);
|
- if (key != Heap::undefined_value()) {
|
+ Heap& heap = v8_context()->heap_;
|
+ if (key != heap.undefined_value()) {
|
return true;
|
}
|
|
@@ -2585,7 +2594,7 @@
|
}
|
case DICTIONARY_ELEMENTS: {
|
key = element_dictionary()->SlowReverseLookup(obj);
|
- if (key != Heap::undefined_value()) {
|
+ if (key != heap.undefined_value()) {
|
return true;
|
}
|
break;
|
@@ -2600,7 +2609,7 @@
|
if (IsJSFunction() && !JSFunction::cast(this)->IsBoilerplate()) {
|
// Get the constructor function for arguments array.
|
JSObject* arguments_boilerplate =
|
- Top::context()->global_context()->arguments_boilerplate();
|
+ v8_context()->top_.context()->global_context()->arguments_boilerplate();
|
JSFunction* arguments_function =
|
JSFunction::cast(arguments_boilerplate->map()->constructor());
|
|
@@ -2645,14 +2654,14 @@
|
// - neither this object nor any prototype has interceptors
|
bool JSObject::IsSimpleEnum() {
|
JSObject* arguments_boilerplate =
|
- Top::context()->global_context()->arguments_boilerplate();
|
+ v8_context()->top_.context()->global_context()->arguments_boilerplate();
|
JSFunction* arguments_function =
|
JSFunction::cast(arguments_boilerplate->map()->constructor());
|
if (IsAccessCheckNeeded()) return false;
|
if (map()->constructor() == arguments_function) return false;
|
|
for (Object* o = this;
|
- o != Heap::null_value();
|
+ o != v8_context()->heap_.null_value();
|
o = JSObject::cast(o)->GetPrototype()) {
|
JSObject* curr = JSObject::cast(o);
|
if (!curr->HasFastProperties()) return false;
|
@@ -2734,13 +2743,13 @@
|
}
|
|
// Check __proto__ before interceptor.
|
- if (name->Equals(Heap::Proto_symbol()) && !IsJSContextExtensionObject()) {
|
+ if (name->Equals(v8_context()->heap_.Proto_symbol()) && !IsJSContextExtensionObject()) {
|
result->ConstantResult(this);
|
return;
|
}
|
|
// Check for lookup interceptor except when bootstrapping.
|
- if (HasNamedInterceptor() && !Bootstrapper::IsActive()) {
|
+ if (HasNamedInterceptor() && !v8_context()->bootstrapper_.IsActive()) {
|
result->InterceptorResult(this);
|
return;
|
}
|
@@ -2752,7 +2761,7 @@
|
void JSObject::Lookup(String* name, LookupResult* result) {
|
// Ecma-262 3rd 8.6.2.4
|
for (Object* current = this;
|
- current != Heap::null_value();
|
+ current != v8_context()->heap_.null_value();
|
current = JSObject::cast(current)->GetPrototype()) {
|
JSObject::cast(current)->LocalLookup(name, result);
|
if (result->IsValid() && !result->IsTransitionType()) return;
|
@@ -2764,7 +2773,7 @@
|
// Search object and it's prototype chain for callback properties.
|
void JSObject::LookupCallback(String* name, LookupResult* result) {
|
for (Object* current = this;
|
- current != Heap::null_value();
|
+ current != v8_context()->heap_.null_value();
|
current = JSObject::cast(current)->GetPrototype()) {
|
JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
|
if (result->IsValid() && result->type() == CALLBACKS) return;
|
@@ -2778,12 +2787,12 @@
|
// Make sure that the top context does not change when doing callbacks or
|
// interceptor calls.
|
AssertNoContextChange ncc;
|
-
|
+ Heap& heap = v8_context()->heap_;
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
- return Heap::undefined_value();
|
+ !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_SET)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
+ return heap.undefined_value();
|
}
|
|
// Try to flatten before operating on the string.
|
@@ -2801,13 +2810,13 @@
|
Object* obj = callback_result.GetCallbackObject();
|
if (obj->IsAccessorInfo() &&
|
AccessorInfo::cast(obj)->prohibits_overwriting()) {
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
}
|
|
uint32_t index;
|
bool is_element = name->AsArrayIndex(&index);
|
- if (is_element && IsJSArray()) return Heap::undefined_value();
|
+ if (is_element && IsJSArray()) return heap.undefined_value();
|
|
if (is_element) {
|
switch (GetElementsKind()) {
|
@@ -2823,7 +2832,7 @@
|
case EXTERNAL_FLOAT_ELEMENTS:
|
// Ignore getters and setters on pixel and external array
|
// elements.
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
case DICTIONARY_ELEMENTS: {
|
// Lookup the index.
|
NumberDictionary* dictionary = element_dictionary();
|
@@ -2831,7 +2840,7 @@
|
if (entry != NumberDictionary::kNotFound) {
|
Object* result = dictionary->ValueAt(entry);
|
PropertyDetails details = dictionary->DetailsAt(entry);
|
- if (details.IsReadOnly()) return Heap::undefined_value();
|
+ if (details.IsReadOnly()) return heap.undefined_value();
|
if (details.type() == CALLBACKS) {
|
// Only accessors allowed as elements.
|
ASSERT(result->IsFixedArray());
|
@@ -2849,7 +2858,7 @@
|
LookupResult result;
|
LocalLookup(name, &result);
|
if (result.IsValid()) {
|
- if (result.IsReadOnly()) return Heap::undefined_value();
|
+ if (result.IsReadOnly()) return v8_context()->heap_.undefined_value();
|
if (result.type() == CALLBACKS) {
|
Object* obj = result.GetCallbackObject();
|
if (obj->IsFixedArray()) return obj;
|
@@ -2858,7 +2867,7 @@
|
}
|
|
// Allocate the fixed array to hold getter and setter.
|
- Object* structure = Heap::AllocateFixedArray(2, TENURED);
|
+ Object* structure = v8_context()->heap_.AllocateFixedArray(2, TENURED);
|
if (structure->IsFailure()) return structure;
|
PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
|
|
@@ -2902,9 +2911,9 @@
|
PropertyAttributes attributes) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
- return Heap::undefined_value();
|
+ !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
if (IsJSGlobalProxy()) {
|
@@ -2929,17 +2938,18 @@
|
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
- return Heap::undefined_value();
|
+ !v8_context()->top_.MayNamedAccess(this, name, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
// Make the lookup and include prototypes.
|
int accessor_index = is_getter ? kGetterIndex : kSetterIndex;
|
uint32_t index;
|
+ Heap& heap = v8_context()->heap_;
|
if (name->AsArrayIndex(&index)) {
|
for (Object* obj = this;
|
- obj != Heap::null_value();
|
+ obj != heap.null_value();
|
obj = JSObject::cast(obj)->GetPrototype()) {
|
JSObject* js_object = JSObject::cast(obj);
|
if (js_object->HasDictionaryElements()) {
|
@@ -2957,12 +2967,12 @@
|
}
|
} else {
|
for (Object* obj = this;
|
- obj != Heap::null_value();
|
+ obj != heap.null_value();
|
obj = JSObject::cast(obj)->GetPrototype()) {
|
LookupResult result;
|
JSObject::cast(obj)->LocalLookup(name, &result);
|
if (result.IsValid()) {
|
- if (result.IsReadOnly()) return Heap::undefined_value();
|
+ if (result.IsReadOnly()) return heap.undefined_value();
|
if (result.type() == CALLBACKS) {
|
Object* obj = result.GetCallbackObject();
|
if (obj->IsFixedArray()) {
|
@@ -2972,7 +2982,7 @@
|
}
|
}
|
}
|
- return Heap::undefined_value();
|
+ return heap.undefined_value();
|
}
|
|
|
@@ -2990,7 +3000,7 @@
|
}
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
} else {
|
return property_dictionary()->SlowReverseLookup(value);
|
}
|
@@ -2998,7 +3008,7 @@
|
|
|
Object* Map::CopyDropDescriptors() {
|
- Object* result = Heap::AllocateMap(instance_type(), instance_size());
|
+ Object* result = v8_context()->heap_.AllocateMap(instance_type(), instance_size());
|
if (result->IsFailure()) return result;
|
Map::cast(result)->set_prototype(prototype());
|
Map::cast(result)->set_constructor(constructor());
|
@@ -3007,7 +3017,7 @@
|
// pointing to the same transition which is bad because the garbage
|
// collector relies on being able to reverse pointers from transitions
|
// to maps. If properties need to be retained use CopyDropTransitions.
|
- Map::cast(result)->set_instance_descriptors(Heap::empty_descriptor_array());
|
+ Map::cast(result)->set_instance_descriptors(v8_context()->heap_.empty_descriptor_array());
|
// Please note instance_type and instance_size are set when allocated.
|
Map::cast(result)->set_inobject_properties(inobject_properties());
|
Map::cast(result)->set_unused_property_fields(unused_property_fields());
|
@@ -3112,7 +3122,7 @@
|
if (code->flags() == flags) return code;
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -3166,7 +3176,7 @@
|
int size = dict->NumberOfElements();
|
|
// Allocate a temporary fixed array.
|
- Object* object = Heap::AllocateFixedArray(size);
|
+ Object* object = v8_context()->heap_.AllocateFixedArray(size);
|
if (object->IsFailure()) return object;
|
FixedArray* key_array = FixedArray::cast(object);
|
|
@@ -3185,7 +3195,7 @@
|
UNREACHABLE();
|
}
|
UNREACHABLE();
|
- return Heap::null_value(); // Failure case needs to "return" a value.
|
+ return v8_context()->heap_.null_value(); // Failure case needs to "return" a value.
|
}
|
|
|
@@ -3206,7 +3216,7 @@
|
if (extra == 0) return this;
|
|
// Allocate the result
|
- Object* obj = Heap::AllocateFixedArray(len0 + extra);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(len0 + extra);
|
if (obj->IsFailure()) return obj;
|
// Fill in the content
|
FixedArray* result = FixedArray::cast(obj);
|
@@ -3229,8 +3239,8 @@
|
|
|
Object* FixedArray::CopySize(int new_length) {
|
- if (new_length == 0) return Heap::empty_fixed_array();
|
- Object* obj = Heap::AllocateFixedArray(new_length);
|
+ if (new_length == 0) return v8_context()->heap_.empty_fixed_array();
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(new_length);
|
if (obj->IsFailure()) return obj;
|
FixedArray* result = FixedArray::cast(obj);
|
// Copy the content
|
@@ -3265,17 +3275,18 @@
|
|
|
Object* DescriptorArray::Allocate(int number_of_descriptors) {
|
+ V8Context * const v8context = v8_context();
|
if (number_of_descriptors == 0) {
|
- return Heap::empty_descriptor_array();
|
+ return v8context->heap_.empty_descriptor_array();
|
}
|
// Allocate the array of keys.
|
- Object* array = Heap::AllocateFixedArray(ToKeyIndex(number_of_descriptors));
|
+ Object* array = v8_context()->heap_.AllocateFixedArray(ToKeyIndex(number_of_descriptors));
|
if (array->IsFailure()) return array;
|
// Do not use DescriptorArray::cast on incomplete object.
|
FixedArray* result = FixedArray::cast(array);
|
|
// Allocate the content array and set it in the descriptor array.
|
- array = Heap::AllocateFixedArray(number_of_descriptors << 1);
|
+ array = v8context->heap_.AllocateFixedArray(number_of_descriptors << 1);
|
if (array->IsFailure()) return array;
|
result->set(kContentArrayIndex, array);
|
result->set(kEnumerationIndexIndex,
|
@@ -3538,11 +3549,8 @@
|
#endif
|
|
|
-static StaticResource<StringInputBuffer> string_input_buffer;
|
-
|
-
|
bool String::LooksValid() {
|
- if (!Heap::Contains(this)) return false;
|
+ if (!v8_context()->heap_.Contains(this)) return false;
|
return true;
|
}
|
|
@@ -3554,7 +3562,7 @@
|
// the string will be accessed later (for example by WriteUtf8)
|
// so it's still a good idea.
|
TryFlattenIfNotFlat();
|
- Access<StringInputBuffer> buffer(&string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8_context()->objects_data->string_input_buffer);
|
buffer->Reset(0, this);
|
int result = 0;
|
while (buffer->has_more())
|
@@ -3639,7 +3647,7 @@
|
if (length < 0) length = kMaxInt - offset;
|
|
// Compute the size of the UTF-8 string. Start at the specified offset.
|
- Access<StringInputBuffer> buffer(&string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8_context()->objects_data->string_input_buffer);
|
buffer->Reset(offset, this);
|
int character_position = offset;
|
int utf8_bytes = 0;
|
@@ -3725,7 +3733,7 @@
|
return SmartPointer<uc16>();
|
}
|
|
- Access<StringInputBuffer> buffer(&string_input_buffer);
|
+ Access<StringInputBuffer> buffer(&v8_context()->objects_data->string_input_buffer);
|
buffer->Reset(this);
|
|
uc16* result = NewArray<uc16>(length() + 1);
|
@@ -4028,10 +4036,9 @@
|
}
|
|
|
-Relocatable* Relocatable::top_ = NULL;
|
+RelocatableData::RelocatableData():top_(NULL) {}
|
|
-
|
-void Relocatable::PostGarbageCollectionProcessing() {
|
+void RelocatableData::PostGarbageCollectionProcessing() {
|
Relocatable* current = top_;
|
while (current != NULL) {
|
current->PostGarbageCollection();
|
@@ -4041,13 +4048,13 @@
|
|
|
// Reserve space for statics needing saving and restoring.
|
-int Relocatable::ArchiveSpacePerThread() {
|
+int RelocatableData::ArchiveSpacePerThread() {
|
return sizeof(top_);
|
}
|
|
|
// Archive statics that are thread local.
|
-char* Relocatable::ArchiveState(char* to) {
|
+char* RelocatableData::ArchiveState(char* to) {
|
*reinterpret_cast<Relocatable**>(to) = top_;
|
top_ = NULL;
|
return to + ArchiveSpacePerThread();
|
@@ -4055,25 +4062,25 @@
|
|
|
// Restore statics that are thread local.
|
-char* Relocatable::RestoreState(char* from) {
|
+char* RelocatableData::RestoreState(char* from) {
|
top_ = *reinterpret_cast<Relocatable**>(from);
|
return from + ArchiveSpacePerThread();
|
}
|
|
|
-char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) {
|
+char* RelocatableData::Iterate(ObjectVisitor* v, char* thread_storage) {
|
Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage);
|
Iterate(v, top);
|
return thread_storage + ArchiveSpacePerThread();
|
}
|
|
|
-void Relocatable::Iterate(ObjectVisitor* v) {
|
+void RelocatableData::Iterate(ObjectVisitor* v) {
|
Iterate(v, top_);
|
}
|
|
|
-void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) {
|
+void RelocatableData::Iterate(ObjectVisitor* v, Relocatable* top) {
|
Relocatable* current = top;
|
while (current != NULL) {
|
current->IterateInstance(v);
|
@@ -4487,10 +4494,6 @@
|
return true;
|
}
|
|
-
|
-static StringInputBuffer string_compare_buffer_b;
|
-
|
-
|
template <typename IteratorA>
|
static inline bool CompareStringContentsPartial(IteratorA* ia, String* b) {
|
if (b->IsFlat()) {
|
@@ -4502,15 +4505,12 @@
|
return CompareStringContents(ia, &ib);
|
}
|
} else {
|
+ StringInputBuffer& string_compare_buffer_b = v8_context()->objects_data->string_compare_buffer_b;
|
string_compare_buffer_b.Reset(0, b);
|
return CompareStringContents(ia, &string_compare_buffer_b);
|
}
|
}
|
|
-
|
-static StringInputBuffer string_compare_buffer_a;
|
-
|
-
|
bool String::SlowEquals(String* other) {
|
// Fast check: negative check with lengths.
|
int len = length();
|
@@ -4545,6 +4545,7 @@
|
}
|
} else {
|
VectorIterator<char> buf1(vec1);
|
+ StringInputBuffer& string_compare_buffer_b = v8_context()->objects_data->string_compare_buffer_b;
|
string_compare_buffer_b.Reset(0, other);
|
return CompareStringContents(&buf1, &string_compare_buffer_b);
|
}
|
@@ -4561,11 +4562,13 @@
|
}
|
} else {
|
VectorIterator<uc16> buf1(vec1);
|
+ StringInputBuffer& string_compare_buffer_b = v8_context()->objects_data->string_compare_buffer_b;
|
string_compare_buffer_b.Reset(0, other);
|
return CompareStringContents(&buf1, &string_compare_buffer_b);
|
}
|
}
|
} else {
|
+ StringInputBuffer& string_compare_buffer_a = v8_context()->objects_data->string_compare_buffer_a;
|
string_compare_buffer_a.Reset(0, this);
|
return CompareStringContentsPartial(&string_compare_buffer_a, other);
|
}
|
@@ -4576,23 +4579,24 @@
|
if (StringShape(this).IsSymbol()) return false;
|
|
Map* map = this->map();
|
- if (map == Heap::short_string_map()) {
|
- this->set_map(Heap::undetectable_short_string_map());
|
+ Heap& heap = v8_context()->heap_;
|
+ if (map == heap.short_string_map()) {
|
+ this->set_map(heap.undetectable_short_string_map());
|
return true;
|
- } else if (map == Heap::medium_string_map()) {
|
- this->set_map(Heap::undetectable_medium_string_map());
|
+ } else if (map == heap.medium_string_map()) {
|
+ this->set_map(heap.undetectable_medium_string_map());
|
return true;
|
- } else if (map == Heap::long_string_map()) {
|
- this->set_map(Heap::undetectable_long_string_map());
|
+ } else if (map == heap.long_string_map()) {
|
+ this->set_map(heap.undetectable_long_string_map());
|
return true;
|
- } else if (map == Heap::short_ascii_string_map()) {
|
- this->set_map(Heap::undetectable_short_ascii_string_map());
|
+ } else if (map == heap.short_ascii_string_map()) {
|
+ this->set_map(heap.undetectable_short_ascii_string_map());
|
return true;
|
- } else if (map == Heap::medium_ascii_string_map()) {
|
- this->set_map(Heap::undetectable_medium_ascii_string_map());
|
+ } else if (map == heap.medium_ascii_string_map()) {
|
+ this->set_map(heap.undetectable_medium_ascii_string_map());
|
return true;
|
- } else if (map == Heap::long_ascii_string_map()) {
|
- this->set_map(Heap::undetectable_long_ascii_string_map());
|
+ } else if (map == heap.long_ascii_string_map()) {
|
+ this->set_map(heap.undetectable_long_ascii_string_map());
|
return true;
|
}
|
// Rest cannot be marked as undetectable
|
@@ -4602,7 +4606,7 @@
|
|
bool String::IsEqualTo(Vector<const char> str) {
|
int slen = length();
|
- Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
|
+ Access<Scanner::Utf8Decoder> decoder(v8_context()->scanner_.utf8_decoder());
|
decoder->Reset(str.start(), str.length());
|
int i;
|
for (i = 0; i < slen && decoder->has_more(); i++) {
|
@@ -4738,11 +4742,11 @@
|
// underlying string buffer.
|
SlicedString* str = SlicedString::cast(this);
|
String* buf = str->buffer();
|
- return Heap::AllocateSlicedString(buf,
|
+ return v8_context()->heap_.AllocateSlicedString(buf,
|
str->start() + start,
|
str->start() + end);
|
}
|
- Object* result = Heap::AllocateSlicedString(this, start, end);
|
+ Object* result = v8_context()->heap_.AllocateSlicedString(this, start, end);
|
if (result->IsFailure()) {
|
return result;
|
}
|
@@ -4754,7 +4758,7 @@
|
// building 'towers' of sliced strings on cons strings.
|
// This may fail due to an allocation failure (when a GC is needed), but it
|
// will succeed often enough to avoid the problem. We only have to do this
|
- // if Heap::AllocateSlicedString actually returned a SlicedString. It will
|
+ // if v8_context()->heap_.AllocateSlicedString actually returned a SlicedString. It will
|
// return flat strings for small slices for efficiency reasons.
|
String* answer = String::cast(result);
|
if (StringShape(answer).IsSliced() &&
|
@@ -4810,7 +4814,7 @@
|
// low-level accessors to get and modify their data.
|
DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
|
*RawField(this, Map::kInstanceDescriptorsOffset));
|
- if (d == Heap::raw_unchecked_empty_descriptor_array()) return;
|
+ if (d == v8_context()->heap_.raw_unchecked_empty_descriptor_array()) return;
|
Smi* NullDescriptorDetails =
|
PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
|
FixedArray* contents = reinterpret_cast<FixedArray*>(
|
@@ -4829,7 +4833,7 @@
|
if (!target->IsMarked()) {
|
ASSERT(target->IsMap());
|
contents->set(i + 1, NullDescriptorDetails, SKIP_WRITE_BARRIER);
|
- contents->set(i, Heap::null_value(), SKIP_WRITE_BARRIER);
|
+ contents->set(i, v8_context()->heap_.null_value(), SKIP_WRITE_BARRIER);
|
ASSERT(target->prototype() == this ||
|
target->prototype() == real_prototype);
|
// Getter prototype() is read-only, set_prototype() has side effects.
|
@@ -4879,7 +4883,7 @@
|
map()->set_constructor(value);
|
map()->set_non_instance_prototype(true);
|
construct_prototype =
|
- Top::context()->global_context()->initial_object_prototype();
|
+ v8_context()->top_.context()->global_context()->initial_object_prototype();
|
} else {
|
map()->set_non_instance_prototype(false);
|
}
|
@@ -4906,7 +4910,7 @@
|
|
|
Object* Oddball::Initialize(const char* to_string, Object* to_number) {
|
- Object* symbol = Heap::LookupAsciiSymbol(to_string);
|
+ Object* symbol = v8_context()->heap_.LookupAsciiSymbol(to_string);
|
if (symbol->IsFailure()) return symbol;
|
set_to_string(String::cast(symbol));
|
set_to_number(to_number);
|
@@ -4922,9 +4926,9 @@
|
|
Object* SharedFunctionInfo::GetSourceCode() {
|
HandleScope scope;
|
- if (script()->IsUndefined()) return Heap::undefined_value();
|
+ if (script()->IsUndefined()) return v8_context()->heap_.undefined_value();
|
Object* source = Script::cast(script())->source();
|
- if (source->IsUndefined()) return Heap::undefined_value();
|
+ if (source->IsUndefined()) return v8_context()->heap_.undefined_value();
|
return *SubString(Handle<String>(String::cast(source)),
|
start_position(), end_position());
|
}
|
@@ -4968,7 +4972,7 @@
|
set_compiler_hints(BooleanBit::set(compiler_hints(),
|
kHasOnlySimpleThisPropertyAssignments,
|
false));
|
- set_this_property_assignments(Heap::undefined_value());
|
+ set_this_property_assignments(v8_context()->heap_.undefined_value());
|
set_this_property_assignments_count(0);
|
}
|
|
@@ -5085,6 +5089,7 @@
|
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
|
RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
|
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
|
+ Debug& debug = v8_context()->debug_;
|
|
for (RelocIterator it(this, mode_mask); !it.done(); it.next()) {
|
RelocInfo::Mode rmode = it.rinfo()->rmode();
|
@@ -5095,7 +5100,7 @@
|
} else if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
|
v->VisitExternalReference(it.rinfo()->target_reference_address());
|
#ifdef ENABLE_DEBUGGER_SUPPORT
|
- } else if (Debug::has_break_points() &&
|
+ } else if (debug.has_break_points() &&
|
RelocInfo::IsJSReturn(rmode) &&
|
it.rinfo()->IsPatchedReturnSequence()) {
|
v->VisitDebugTarget(it.rinfo());
|
@@ -5365,9 +5370,9 @@
|
set_length(Smi::FromInt(0), SKIP_WRITE_BARRIER);
|
FixedArray* new_elements;
|
if (capacity == 0) {
|
- new_elements = Heap::empty_fixed_array();
|
+ new_elements = v8_context()->heap_.empty_fixed_array();
|
} else {
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(capacity);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArrayWithHoles(capacity);
|
if (obj->IsFailure()) return obj;
|
new_elements = FixedArray::cast(obj);
|
}
|
@@ -5399,7 +5404,7 @@
|
|
static Object* ArrayLengthRangeError() {
|
HandleScope scope;
|
- return Top::Throw(*Factory::NewRangeError("invalid_array_length",
|
+ return v8_context()->top_.Throw(*Factory::NewRangeError("invalid_array_length",
|
HandleVector<Object>(NULL, 0)));
|
}
|
|
@@ -5432,7 +5437,7 @@
|
int new_capacity = value > min ? value : min;
|
if (new_capacity <= kMaxFastElementsLength ||
|
!ShouldConvertToSlowElements(new_capacity)) {
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArrayWithHoles(new_capacity);
|
if (obj->IsFailure()) return obj;
|
if (IsJSArray()) JSArray::cast(this)->set_length(smi_length,
|
SKIP_WRITE_BARRIER);
|
@@ -5476,7 +5481,7 @@
|
|
// len is not a number so make the array size one and
|
// set only element to len.
|
- Object* obj = Heap::AllocateFixedArray(1);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(1);
|
if (obj->IsFailure()) return obj;
|
FixedArray::cast(obj)->set(0, len);
|
if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(1),
|
@@ -5537,7 +5542,7 @@
|
if (this->IsStringObjectWithCharacterAt(index)) return true;
|
|
Object* pt = GetPrototype();
|
- if (pt == Heap::null_value()) return false;
|
+ if (pt == v8_context()->heap_.null_value()) return false;
|
return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
|
}
|
|
@@ -5582,8 +5587,8 @@
|
bool JSObject::HasLocalElement(uint32_t index) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return false;
|
}
|
|
@@ -5627,15 +5632,15 @@
|
break;
|
}
|
UNREACHABLE();
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
|
|
bool JSObject::HasElementWithReceiver(JSObject* receiver, uint32_t index) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return false;
|
}
|
|
@@ -5690,7 +5695,7 @@
|
if (this->IsStringObjectWithCharacterAt(index)) return true;
|
|
Object* pt = GetPrototype();
|
- if (pt == Heap::null_value()) return false;
|
+ if (pt == v8_context()->heap_.null_value()) return false;
|
return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
|
}
|
|
@@ -5764,7 +5769,7 @@
|
if (new_capacity <= kMaxFastElementsLength ||
|
!ShouldConvertToSlowElements(new_capacity)) {
|
ASSERT(static_cast<uint32_t>(new_capacity) > index);
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArrayWithHoles(new_capacity);
|
if (obj->IsFailure()) return obj;
|
SetFastElements(FixedArray::cast(obj));
|
if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(index + 1),
|
@@ -5784,8 +5789,8 @@
|
Object* JSObject::SetElement(uint32_t index, Object* value) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_SET)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_SET)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_SET);
|
return value;
|
}
|
|
@@ -5864,7 +5869,7 @@
|
Handle<Object> self(this);
|
Handle<Object> key(Factory::NewNumberFromUint(index));
|
Handle<Object> args[2] = { key, self };
|
- return Top::Throw(*Factory::NewTypeError("no_setter_in_callback",
|
+ return v8_context()->top_.Throw(*Factory::NewTypeError("no_setter_in_callback",
|
HandleVector(args, 2)));
|
}
|
} else {
|
@@ -5905,7 +5910,7 @@
|
} else {
|
new_length = NumberDictionary::cast(elements())->max_number_key() + 1;
|
}
|
- Object* obj = Heap::AllocateFixedArrayWithHoles(new_length);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArrayWithHoles(new_length);
|
if (obj->IsFailure()) return obj;
|
SetFastElements(FixedArray::cast(obj));
|
#ifdef DEBUG
|
@@ -5925,7 +5930,7 @@
|
// All possible cases have been handled above. Add a return to avoid the
|
// complaints from the compiler.
|
UNREACHABLE();
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
|
|
@@ -5936,7 +5941,7 @@
|
// sure that the length stays within 32-bits (unsigned).
|
if (index >= old_len && index != 0xffffffff) {
|
Object* len =
|
- Heap::NumberFromDouble(static_cast<double>(index) + 1);
|
+ v8_context()->heap_.NumberFromDouble(static_cast<double>(index) + 1);
|
if (len->IsFailure()) return len;
|
set_length(len);
|
}
|
@@ -5988,7 +5993,7 @@
|
JSFunction::cast(getter));
|
} else {
|
// Getter is not a function.
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
}
|
return element;
|
@@ -6002,7 +6007,7 @@
|
|
// Continue searching via the prototype chain.
|
Object* pt = GetPrototype();
|
- if (pt == Heap::null_value()) return Heap::undefined_value();
|
+ if (pt == v8_context()->heap_.null_value()) return v8_context()->heap_.undefined_value();
|
return pt->GetElementWithReceiver(receiver, index);
|
}
|
|
@@ -6043,9 +6048,9 @@
|
Object* JSObject::GetElementWithReceiver(JSObject* receiver, uint32_t index) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_GET)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_GET);
|
- return Heap::undefined_value();
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_GET)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_GET);
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
if (HasIndexedInterceptor()) {
|
@@ -6109,7 +6114,7 @@
|
ExternalIntArray* array = ExternalIntArray::cast(elements());
|
if (index < static_cast<uint32_t>(array->length())) {
|
int32_t value = array->get(index);
|
- return Heap::NumberFromInt32(value);
|
+ return v8_context()->heap_.NumberFromInt32(value);
|
}
|
break;
|
}
|
@@ -6118,7 +6123,7 @@
|
ExternalUnsignedIntArray::cast(elements());
|
if (index < static_cast<uint32_t>(array->length())) {
|
uint32_t value = array->get(index);
|
- return Heap::NumberFromUint32(value);
|
+ return v8_context()->heap_.NumberFromUint32(value);
|
}
|
break;
|
}
|
@@ -6126,7 +6131,7 @@
|
ExternalFloatArray* array = ExternalFloatArray::cast(elements());
|
if (index < static_cast<uint32_t>(array->length())) {
|
float value = array->get(index);
|
- return Heap::AllocateHeapNumber(value);
|
+ return v8_context()->heap_.AllocateHeapNumber(value);
|
}
|
break;
|
}
|
@@ -6145,7 +6150,7 @@
|
JSFunction::cast(getter));
|
} else {
|
// Getter is not a function.
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
}
|
return element;
|
@@ -6155,7 +6160,7 @@
|
}
|
|
Object* pt = GetPrototype();
|
- if (pt == Heap::null_value()) return Heap::undefined_value();
|
+ if (pt == v8_context()->heap_.null_value()) return v8_context()->heap_.undefined_value();
|
return pt->GetElementWithReceiver(receiver, index);
|
}
|
|
@@ -6306,7 +6311,7 @@
|
// Continue searching via the prototype chain.
|
Object* pt = GetPrototype();
|
*attributes = ABSENT;
|
- if (pt == Heap::null_value()) return Heap::undefined_value();
|
+ if (pt == v8_context()->heap_.null_value()) return v8_context()->heap_.undefined_value();
|
return pt->GetPropertyWithReceiver(receiver, name, attributes);
|
}
|
|
@@ -6352,8 +6357,8 @@
|
bool JSObject::HasRealNamedProperty(String* key) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, key, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ !v8_context()->top_.MayNamedAccess(this, key, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return false;
|
}
|
|
@@ -6383,8 +6388,8 @@
|
bool JSObject::HasRealElementProperty(uint32_t index) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ !v8_context()->top_.MayIndexedAccess(this, index, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return false;
|
}
|
|
@@ -6424,15 +6429,15 @@
|
}
|
// All possibilities have been handled above already.
|
UNREACHABLE();
|
- return Heap::null_value();
|
+ return v8_context()->heap_.null_value();
|
}
|
|
|
bool JSObject::HasRealNamedCallbackProperty(String* key) {
|
// Check access rights if needed.
|
if (IsAccessCheckNeeded() &&
|
- !Top::MayNamedAccess(this, key, v8::ACCESS_HAS)) {
|
- Top::ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
+ !v8_context()->top_.MayNamedAccess(this, key, v8::ACCESS_HAS)) {
|
+ v8_context()->top_.ReportFailedAccessCheck(this, v8::ACCESS_HAS);
|
return false;
|
}
|
|
@@ -6704,7 +6709,7 @@
|
|
|
Object* NumberDictionaryShape::AsObject(uint32_t key) {
|
- return Heap::NumberFromUint32(key);
|
+ return v8_context()->heap_.NumberFromUint32(key);
|
}
|
|
|
@@ -6801,7 +6806,7 @@
|
}
|
|
Object* AsObject() {
|
- Object* obj = Heap::AllocateFixedArray(2);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(2);
|
if (obj->IsFailure()) return obj;
|
FixedArray* pair = FixedArray::cast(obj);
|
pair->set(0, shared_);
|
@@ -6882,7 +6887,7 @@
|
|
Object* AsObject() {
|
if (length_field_ == 0) Hash();
|
- return Heap::AllocateSymbol(string_, chars_, length_field_);
|
+ return v8_context()->heap_.AllocateSymbol(string_, chars_, length_field_);
|
}
|
|
Vector<const char> string_;
|
@@ -6917,7 +6922,7 @@
|
}
|
}
|
// Transform string to symbol if possible.
|
- Map* map = Heap::SymbolMapForString(string_);
|
+ Map* map = v8_context()->heap_.SymbolMapForString(string_);
|
if (map != NULL) {
|
string_->set_map(map);
|
ASSERT(string_->IsSymbol());
|
@@ -6925,7 +6930,7 @@
|
}
|
// Otherwise allocate a new symbol.
|
StringInputBuffer buffer(string_);
|
- return Heap::AllocateInternalSymbol(&buffer,
|
+ return v8_context()->heap_.AllocateInternalSymbol(&buffer,
|
string_->length(),
|
string_->length_field());
|
}
|
@@ -6957,7 +6962,7 @@
|
int at_least_space_for) {
|
int capacity = RoundUpToPowerOf2(at_least_space_for);
|
if (capacity < 4) capacity = 4; // Guarantee min capacity.
|
- Object* obj = Heap::AllocateHashTable(EntryToIndex(capacity));
|
+ Object* obj = v8_context()->heap_.AllocateHashTable(EntryToIndex(capacity));
|
if (!obj->IsFailure()) {
|
HashTable::cast(obj)->SetNumberOfElements(0);
|
HashTable::cast(obj)->SetCapacity(capacity);
|
@@ -7131,7 +7136,7 @@
|
HeapNumber* result_double = NULL;
|
if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
|
// Allocate space for result before we start mutating the object.
|
- Object* new_double = Heap::AllocateHeapNumber(0.0);
|
+ Object* new_double = v8_context()->heap_.AllocateHeapNumber(0.0);
|
if (new_double->IsFailure()) return new_double;
|
result_double = HeapNumber::cast(new_double);
|
}
|
@@ -7175,7 +7180,7 @@
|
uint32_t result = pos;
|
PropertyDetails no_details = PropertyDetails(NONE, NORMAL);
|
while (undefs > 0) {
|
- new_dict->AddNumberEntry(pos, Heap::undefined_value(), no_details);
|
+ new_dict->AddNumberEntry(pos, v8_context()->heap_.undefined_value(), no_details);
|
pos++;
|
undefs--;
|
}
|
@@ -7209,9 +7214,9 @@
|
}
|
// Convert to fast elements.
|
|
- PretenureFlag tenure = Heap::InNewSpace(this) ? NOT_TENURED: TENURED;
|
+ PretenureFlag tenure = v8_context()->heap_.InNewSpace(this) ? NOT_TENURED: TENURED;
|
Object* new_array =
|
- Heap::AllocateFixedArray(dict->NumberOfElements(), tenure);
|
+ v8_context()->heap_.AllocateFixedArray(dict->NumberOfElements(), tenure);
|
if (new_array->IsFailure()) {
|
return new_array;
|
}
|
@@ -7237,7 +7242,7 @@
|
if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
|
// Pessimistically allocate space for return value before
|
// we start mutating the array.
|
- Object* new_double = Heap::AllocateHeapNumber(0.0);
|
+ Object* new_double = v8_context()->heap_.AllocateHeapNumber(0.0);
|
if (new_double->IsFailure()) return new_double;
|
result_double = HeapNumber::cast(new_double);
|
}
|
@@ -7348,7 +7353,7 @@
|
}
|
receiver->set(index, cast_value);
|
}
|
- return Heap::NumberFromInt32(cast_value);
|
+ return v8_context()->heap_.NumberFromInt32(cast_value);
|
}
|
|
|
@@ -7398,7 +7403,7 @@
|
}
|
set(index, cast_value);
|
}
|
- return Heap::NumberFromUint32(cast_value);
|
+ return v8_context()->heap_.NumberFromUint32(cast_value);
|
}
|
|
|
@@ -7418,7 +7423,7 @@
|
}
|
set(index, cast_value);
|
}
|
- return Heap::AllocateHeapNumber(cast_value);
|
+ return v8_context()->heap_.AllocateHeapNumber(cast_value);
|
}
|
|
|
@@ -7434,7 +7439,7 @@
|
ASSERT(!HasFastProperties());
|
int entry = property_dictionary()->FindEntry(name);
|
if (entry == StringDictionary::kNotFound) {
|
- Object* cell = Heap::AllocateJSGlobalPropertyCell(Heap::the_hole_value());
|
+ Object* cell = v8_context()->heap_.AllocateJSGlobalPropertyCell(v8_context()->heap_.the_hole_value());
|
if (cell->IsFailure()) return cell;
|
PropertyDetails details(NONE, NORMAL);
|
details = details.AsDeleted();
|
@@ -7510,7 +7515,7 @@
|
Object* CompilationCacheTable::Lookup(String* src) {
|
StringKey key(src);
|
int entry = FindEntry(&key);
|
- if (entry == kNotFound) return Heap::undefined_value();
|
+ if (entry == kNotFound) return v8_context()->heap_.undefined_value();
|
return get(EntryToIndex(entry) + 1);
|
}
|
|
@@ -7518,7 +7523,7 @@
|
Object* CompilationCacheTable::LookupEval(String* src, Context* context) {
|
StringSharedKey key(src, context->closure()->shared());
|
int entry = FindEntry(&key);
|
- if (entry == kNotFound) return Heap::undefined_value();
|
+ if (entry == kNotFound) return v8_context()->heap_.undefined_value();
|
return get(EntryToIndex(entry) + 1);
|
}
|
|
@@ -7527,7 +7532,7 @@
|
JSRegExp::Flags flags) {
|
RegExpKey key(src, flags);
|
int entry = FindEntry(&key);
|
- if (entry == kNotFound) return Heap::undefined_value();
|
+ if (entry == kNotFound) return v8_context()->heap_.undefined_value();
|
return get(EntryToIndex(entry) + 1);
|
}
|
|
@@ -7624,7 +7629,7 @@
|
Object* MapCache::Lookup(FixedArray* array) {
|
SymbolsKey key(array);
|
int entry = FindEntry(&key);
|
- if (entry == kNotFound) return Heap::undefined_value();
|
+ if (entry == kNotFound) return v8_context()->heap_.undefined_value();
|
return get(EntryToIndex(entry) + 1);
|
}
|
|
@@ -7660,7 +7665,7 @@
|
int length = HashTable<Shape, Key>::NumberOfElements();
|
|
// Allocate and initialize iteration order array.
|
- Object* obj = Heap::AllocateFixedArray(length);
|
+ Object* obj = v8_context()->heap_.AllocateFixedArray(length);
|
if (obj->IsFailure()) return obj;
|
FixedArray* iteration_order = FixedArray::cast(obj);
|
for (int i = 0; i < length; i++) {
|
@@ -7668,7 +7673,7 @@
|
}
|
|
// Allocate array with enumeration order.
|
- obj = Heap::AllocateFixedArray(length);
|
+ obj = v8_context()->heap_.AllocateFixedArray(length);
|
if (obj->IsFailure()) return obj;
|
FixedArray* enumeration_order = FixedArray::cast(obj);
|
|
@@ -7731,7 +7736,7 @@
|
if (from >= to) return;
|
|
int removed_entries = 0;
|
- Object* sentinel = Heap::null_value();
|
+ Object* sentinel = v8_context()->heap_.null_value();
|
int capacity = Capacity();
|
for (int i = 0; i < capacity; i++) {
|
Object* key = KeyAt(i);
|
@@ -7755,11 +7760,11 @@
|
PropertyDetails details = DetailsAt(entry);
|
// Ignore attributes if forcing a deletion.
|
if (details.IsDontDelete() && mode == JSObject::NORMAL_DELETION) {
|
- return Heap::false_value();
|
+ return v8_context()->heap_.false_value();
|
}
|
- SetEntry(entry, Heap::null_value(), Heap::null_value(), Smi::FromInt(0));
|
+ SetEntry(entry, v8_context()->heap_.null_value(), v8_context()->heap_.null_value(), Smi::FromInt(0));
|
HashTable<Shape, Key>::ElementRemoved();
|
- return Heap::true_value();
|
+ return v8_context()->heap_.true_value();
|
}
|
|
|
@@ -7975,7 +7980,7 @@
|
if (e == value) return k;
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -8022,7 +8027,7 @@
|
number_of_fields + unused_property_fields - inobject_props;
|
|
// Allocate the fixed array for the fields.
|
- Object* fields = Heap::AllocateFixedArray(number_of_allocated_fields);
|
+ Object* fields = v8_context()->heap_.AllocateFixedArray(number_of_allocated_fields);
|
if (fields->IsFailure()) return fields;
|
|
// Fill in the instance descriptor and the fields.
|
@@ -8033,7 +8038,7 @@
|
if (IsKey(k)) {
|
Object* value = ValueAt(i);
|
// Ensure the key is a symbol before writing into the instance descriptor.
|
- Object* key = Heap::LookupSymbol(String::cast(k));
|
+ Object* key = v8_context()->heap_.LookupSymbol(String::cast(k));
|
if (key->IsFailure()) return key;
|
PropertyDetails details = DetailsAt(i);
|
PropertyType type = details.type();
|
@@ -8111,7 +8116,7 @@
|
int index = GetBreakPointInfoIndex(code_position);
|
|
// Return the break point info object if any.
|
- if (index == kNoBreakPointInfo) return Heap::undefined_value();
|
+ if (index == kNoBreakPointInfo) return v8_context()->heap_.undefined_value();
|
return BreakPointInfo::cast(break_points()->get(index));
|
}
|
|
@@ -8173,7 +8178,7 @@
|
new_break_point_info->set_source_position(Smi::FromInt(source_position));
|
new_break_point_info->
|
set_statement_position(Smi::FromInt(statement_position));
|
- new_break_point_info->set_break_point_objects(Heap::undefined_value());
|
+ new_break_point_info->set_break_point_objects(v8_context()->heap_.undefined_value());
|
BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object);
|
debug_info->break_points()->set(index, *new_break_point_info);
|
}
|
@@ -8183,7 +8188,7 @@
|
Object* DebugInfo::GetBreakPointObjects(int code_position) {
|
Object* break_point_info = GetBreakPointInfo(code_position);
|
if (break_point_info->IsUndefined()) {
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
return BreakPointInfo::cast(break_point_info)->break_point_objects();
|
}
|
@@ -8206,7 +8211,7 @@
|
|
Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info,
|
Handle<Object> break_point_object) {
|
- if (debug_info->break_points()->IsUndefined()) return Heap::undefined_value();
|
+ if (debug_info->break_points()->IsUndefined()) return v8_context()->heap_.undefined_value();
|
for (int i = 0; i < debug_info->break_points()->length(); i++) {
|
if (!debug_info->break_points()->get(i)->IsUndefined()) {
|
Handle<BreakPointInfo> break_point_info =
|
@@ -8218,7 +8223,7 @@
|
}
|
}
|
}
|
- return Heap::undefined_value();
|
+ return v8_context()->heap_.undefined_value();
|
}
|
|
|
@@ -8247,7 +8252,7 @@
|
// If there is a single break point clear it if it is the same.
|
if (!break_point_info->break_point_objects()->IsFixedArray()) {
|
if (break_point_info->break_point_objects() == *break_point_object) {
|
- break_point_info->set_break_point_objects(Heap::undefined_value());
|
+ break_point_info->set_break_point_objects(v8_context()->heap_.undefined_value());
|
}
|
return;
|
}
|
Index: include/v8.h
|
===================================================================
|
--- include/v8.h (revision 3228)
|
+++ include/v8.h Sat Nov 14 01:42:57 MSK 2009
|
@@ -126,6 +126,7 @@
|
class FunctionTemplate;
|
class ObjectTemplate;
|
class Data;
|
+class V8Context;
|
|
namespace internal {
|
|
@@ -2761,7 +2762,7 @@
|
/**
|
* Returns whether v8::Locker is being used by this V8 instance.
|
*/
|
- static bool IsActive() { return active_; }
|
+ static bool IsActive();
|
|
private:
|
bool has_lock_;
|
@@ -3203,7 +3204,18 @@
|
* \example process.cc
|
*/
|
|
+ class V8ContextProvider {
|
+ V8Context* v8context;
|
+ public:
|
+ V8ContextProvider();
|
+ ~V8ContextProvider();
|
+ private:
|
+ V8ContextProvider(const V8ContextProvider&);
|
+ void operator=(const V8ContextProvider&);
|
+ void* operator new(size_t size);
|
+ void operator delete(void*, size_t);
|
|
+ };
|
} // namespace v8
|
|
|
Index: src/zone.cc
|
===================================================================
|
--- src/zone.cc (revision 2521)
|
+++ src/zone.cc Sat Nov 14 01:42:55 MSK 2009
|
@@ -32,16 +32,13 @@
|
namespace v8 {
|
namespace internal {
|
|
+ZoneData::ZoneData():position_ (0), limit_(0), zone_excess_limit_ (256 * MB),
|
+ segment_bytes_allocated_(0), allow_allocation_ (true ), nesting_ (0),
|
+ head_(NULL), bytes_allocated_(0)
|
|
-Address Zone::position_ = 0;
|
-Address Zone::limit_ = 0;
|
-int Zone::zone_excess_limit_ = 256 * MB;
|
-int Zone::segment_bytes_allocated_ = 0;
|
+{
|
+}
|
|
-bool AssertNoZoneAllocation::allow_allocation_ = true;
|
-
|
-int ZoneScope::nesting_ = 0;
|
-
|
// Segments represent chunks of memory: They have starting address
|
// (encoded in the this pointer) and a size in bytes. Segments are
|
// chained together forming a LIFO structure with the newest segment
|
@@ -59,18 +56,16 @@
|
Address start() const { return address(sizeof(Segment)); }
|
Address end() const { return address(size_); }
|
|
- static Segment* head() { return head_; }
|
- static void set_head(Segment* head) { head_ = head; }
|
-
|
// Creates a new segment, sets it size, and pushes it to the front
|
// of the segment chain. Returns the new segment.
|
static Segment* New(int size) {
|
Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
|
Zone::adjust_segment_bytes_allocated(size);
|
if (result != NULL) {
|
- result->next_ = head_;
|
+ ZoneData& zone_data = v8_context()->zone_data_;
|
+ result->next_ = zone_data.head_;
|
result->size_ = size;
|
- head_ = result;
|
+ zone_data.head_ = result;
|
}
|
return result;
|
}
|
@@ -81,40 +76,33 @@
|
Malloced::Delete(segment);
|
}
|
|
- static int bytes_allocated() { return bytes_allocated_; }
|
-
|
private:
|
// Computes the address of the nth byte in this segment.
|
Address address(int n) const {
|
return Address(this) + n;
|
}
|
|
- static Segment* head_;
|
- static int bytes_allocated_;
|
Segment* next_;
|
int size_;
|
};
|
|
|
-Segment* Segment::head_ = NULL;
|
-int Segment::bytes_allocated_ = 0;
|
-
|
-
|
void Zone::DeleteAll() {
|
+ ZoneData& zone_data = v8_context()->zone_data_;
|
#ifdef DEBUG
|
// Constant byte value used for zapping dead memory in debug mode.
|
static const unsigned char kZapDeadByte = 0xcd;
|
#endif
|
|
// Find a segment with a suitable size to keep around.
|
- Segment* keep = Segment::head();
|
+ Segment* keep = zone_data.head_;
|
while (keep != NULL && keep->size() > kMaximumKeptSegmentSize) {
|
keep = keep->next();
|
}
|
|
// Traverse the chained list of segments, zapping (in debug mode)
|
// and freeing every segment except the one we wish to keep.
|
- Segment* current = Segment::head();
|
+ Segment* current = zone_data.head_;
|
while (current != NULL) {
|
Segment* next = current->next();
|
if (current == keep) {
|
@@ -137,32 +125,33 @@
|
// force a new segment to be allocated on demand.
|
if (keep != NULL) {
|
Address start = keep->start();
|
- position_ = RoundUp(start, kAlignment);
|
- limit_ = keep->end();
|
+ zone_data.position_ = RoundUp(start, kAlignment);
|
+ zone_data.limit_ = keep->end();
|
#ifdef DEBUG
|
// Zap the contents of the kept segment (but not the header).
|
memset(start, kZapDeadByte, keep->capacity());
|
#endif
|
} else {
|
- position_ = limit_ = 0;
|
+ zone_data.position_ = zone_data.limit_ = 0;
|
}
|
|
// Update the head segment to be the kept segment (if any).
|
- Segment::set_head(keep);
|
+ zone_data.head_ = keep;
|
}
|
|
|
Address Zone::NewExpand(int size) {
|
+ ZoneData& zone_data = v8_context()->zone_data_;
|
// Make sure the requested size is already properly aligned and that
|
// there isn't enough room in the Zone to satisfy the request.
|
ASSERT(size == RoundDown(size, kAlignment));
|
- ASSERT(position_ + size > limit_);
|
+ ASSERT(zone_data.position_ + size > zone_data.limit_);
|
|
// Compute the new segment size. We use a 'high water mark'
|
// strategy, where we increase the segment size every time we expand
|
// except that we employ a maximum segment size when we delete. This
|
// is to avoid excessive malloc() and free() overhead.
|
- Segment* head = Segment::head();
|
+ Segment* head = zone_data.head_;
|
int old_size = (head == NULL) ? 0 : head->size();
|
static const int kSegmentOverhead = sizeof(Segment) + kAlignment;
|
int new_size = kSegmentOverhead + size + (old_size << 1);
|
@@ -177,15 +166,15 @@
|
}
|
Segment* segment = Segment::New(new_size);
|
if (segment == NULL) {
|
- V8::FatalProcessOutOfMemory("Zone");
|
+ v8_context()->v8_.FatalProcessOutOfMemory("Zone");
|
return NULL;
|
}
|
|
// Recompute 'top' and 'limit' based on the new segment.
|
Address result = RoundUp(segment->start(), kAlignment);
|
- position_ = result + size;
|
- limit_ = segment->end();
|
- ASSERT(position_ <= limit_);
|
+ zone_data.position_ = result + size;
|
+ zone_data.limit_ = segment->end();
|
+ ASSERT(zone_data.position_ <= zone_data.limit_);
|
return result;
|
}
|
|
Index: test/cctest/test-func-name-inference.cc
|
===================================================================
|
--- test/cctest/test-func-name-inference.cc (revision 2185)
|
+++ test/cctest/test-func-name-inference.cc Sun Nov 15 12:41:36 MSK 2009
|
@@ -43,8 +43,8 @@
|
using ::v8::internal::SmartPointer;
|
using ::v8::internal::SharedFunctionInfo;
|
using ::v8::internal::String;
|
+using v8::v8_context;
|
|
-
|
static v8::Persistent<v8::Context> env;
|
|
|
@@ -76,7 +76,7 @@
|
// Obtain SharedFunctionInfo for the function.
|
Object* shared_func_info_ptr =
|
Runtime::FindSharedFunctionInfoInScript(i_script, func_pos);
|
- CHECK(shared_func_info_ptr != Heap::undefined_value());
|
+ CHECK(shared_func_info_ptr != v8_context()->heap_.undefined_value());
|
Handle<SharedFunctionInfo> shared_func_info(
|
SharedFunctionInfo::cast(shared_func_info_ptr));
|
|
Index: src/ia32/ic-ia32.cc
|
===================================================================
|
--- src/ia32/ic-ia32.cc (revision 3114)
|
+++ src/ia32/ic-ia32.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -281,7 +281,7 @@
|
// In case the loaded value is the_hole we have to consult GetProperty
|
// to ensure the prototype chain is searched.
|
__ j(equal, &slow);
|
- __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_generic_smi, 1);
|
__ ret(0);
|
|
// Check whether the elements is a pixel array.
|
@@ -300,7 +300,7 @@
|
|
// Slow case: Load name and receiver from stack and jump to runtime.
|
__ bind(&slow);
|
- __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_generic_slow, 1);
|
Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
|
|
__ bind(&check_string);
|
@@ -322,7 +322,7 @@
|
GenerateDictionaryLoad(masm, &slow, ebx, ecx, edx, eax);
|
GenerateCheckNonObjectOrLoaded(masm, &slow, ecx, edx);
|
__ mov(eax, Operand(ecx));
|
- __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_generic_symbol, 1);
|
__ ret(0);
|
// Array index string: If short enough use cache in length/hash field (ebx).
|
// We assert that there are enough bits in an int32_t after the hash shift
|
@@ -383,7 +383,7 @@
|
// eax: index (as a smi)
|
// ecx: JSObject
|
__ mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
|
- Handle<Map> map(Heap::MapForExternalArrayType(array_type));
|
+ Handle<Map> map(v8_context()->heap_.MapForExternalArrayType(array_type));
|
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
|
Immediate(map));
|
__ j(not_equal, &slow, not_taken);
|
@@ -497,7 +497,7 @@
|
|
// Slow case: Load name and receiver from stack and jump to runtime.
|
__ bind(&slow);
|
- __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
|
+ __ IncrementCounter(&v8_context()->counters_.keyed_load_external_array_slow, 1);
|
Generate(masm, ExternalReference(Runtime::kKeyedGetProperty));
|
}
|
|
@@ -678,7 +678,7 @@
|
// edx: JSObject
|
// ebx: index (as a smi)
|
__ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
|
- Handle<Map> map(Heap::MapForExternalArrayType(array_type));
|
+ Handle<Map> map(v8_context()->heap_.MapForExternalArrayType(array_type));
|
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
|
Immediate(map));
|
__ j(not_equal, &slow);
|
@@ -850,7 +850,7 @@
|
// Probe the stub cache.
|
Code::Flags flags =
|
Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
|
- StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, eax);
|
+ v8_context()->stub_cache_.GenerateProbe(masm, flags, edx, ecx, ebx, eax);
|
|
// If the stub cache probing failed, the receiver might be a value.
|
// For value objects, we use the map of the prototype objects for
|
@@ -887,7 +887,7 @@
|
|
// Probe the stub cache for the value object.
|
__ bind(&probe);
|
- StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
|
+ v8_context()->stub_cache_.GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
|
|
// Cache miss: Jump to runtime.
|
__ bind(&miss);
|
@@ -1061,7 +1061,7 @@
|
Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
|
NOT_IN_LOOP,
|
MONOMORPHIC);
|
- StubCache::GenerateProbe(masm, flags, eax, ecx, ebx, edx);
|
+ v8_context()->stub_cache_.GenerateProbe(masm, flags, eax, ecx, ebx, edx);
|
|
// Cache miss: Jump to runtime.
|
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
|
@@ -1156,14 +1156,14 @@
|
// Reset the map check of the inlined inobject property load (if
|
// present) to guarantee failure by holding an invalid map (the null
|
// value). The offset can be patched to anything.
|
- PatchInlinedLoad(address, Heap::null_value(), kMaxInt);
|
+ PatchInlinedLoad(address, v8_context()->heap_.null_value(), kMaxInt);
|
}
|
|
|
void KeyedLoadIC::ClearInlinedVersion(Address address) {
|
// Insert null as the map to check for to make sure the map check fails
|
// sending control flow to the IC instead of the inlined version.
|
- PatchInlinedLoad(address, Heap::null_value());
|
+ PatchInlinedLoad(address, v8_context()->heap_.null_value());
|
}
|
|
|
@@ -1171,14 +1171,14 @@
|
// Insert null as the elements map to check for. This will make
|
// sure that the elements fast-case map check fails so that control
|
// flows to the IC instead of the inlined version.
|
- PatchInlinedStore(address, Heap::null_value());
|
+ PatchInlinedStore(address, v8_context()->heap_.null_value());
|
}
|
|
|
void KeyedStoreIC::RestoreInlinedVersion(Address address) {
|
// Restore the fast-case elements map check so that the inlined
|
// version can be used again.
|
- PatchInlinedStore(address, Heap::fixed_array_map());
|
+ PatchInlinedStore(address, v8_context()->heap_.fixed_array_map());
|
}
|
|
|
@@ -1289,7 +1289,7 @@
|
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
|
NOT_IN_LOOP,
|
MONOMORPHIC);
|
- StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
|
+ v8_context()->stub_cache_.GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
|
|
// Cache miss: Jump to runtime.
|
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
|
Index: src/frames.cc
|
===================================================================
|
--- src/frames.cc (revision 3209)
|
+++ src/frames.cc Sat Nov 14 01:42:54 MSK 2009
|
@@ -69,7 +69,7 @@
|
#define INITIALIZE_SINGLETON(type, field) field##_(this),
|
StackFrameIterator::StackFrameIterator()
|
: STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
|
- frame_(NULL), handler_(NULL), thread_(Top::GetCurrentThread()),
|
+ frame_(NULL), handler_(NULL), thread_(v8_context()->top_.GetCurrentThread()),
|
fp_(NULL), sp_(NULL), advance_(&StackFrameIterator::AdvanceWithHandler) {
|
Reset();
|
}
|
@@ -82,7 +82,7 @@
|
StackFrameIterator::StackFrameIterator(bool use_top, Address fp, Address sp)
|
: STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
|
frame_(NULL), handler_(NULL),
|
- thread_(use_top ? Top::GetCurrentThread() : NULL),
|
+ thread_(use_top ? v8_context()->top_.GetCurrentThread() : NULL),
|
fp_(use_top ? NULL : fp), sp_(sp),
|
advance_(use_top ? &StackFrameIterator::AdvanceWithHandler :
|
&StackFrameIterator::AdvanceWithoutHandler) {
|
@@ -131,8 +131,9 @@
|
StackFrame::State state;
|
StackFrame::Type type;
|
if (thread_ != NULL) {
|
- type = ExitFrame::GetStateForFramePointer(Top::c_entry_fp(thread_), &state);
|
- handler_ = StackHandler::FromAddress(Top::handler(thread_));
|
+ Top& top = v8_context()->top_;
|
+ type = ExitFrame::GetStateForFramePointer(top.c_entry_fp(thread_), &state);
|
+ handler_ = StackHandler::FromAddress(top.handler(thread_));
|
} else {
|
ASSERT(fp_ != NULL);
|
state.fp = fp_;
|
@@ -197,8 +198,8 @@
|
low_bound_(low_bound), high_bound_(high_bound),
|
is_valid_top_(
|
IsWithinBounds(low_bound, high_bound,
|
- Top::c_entry_fp(Top::GetCurrentThread())) &&
|
- Top::handler(Top::GetCurrentThread()) != NULL),
|
+ v8_context()->top_.c_entry_fp(v8_context()->top_.GetCurrentThread())) &&
|
+ v8_context()->top_.handler(v8_context()->top_.GetCurrentThread()) != NULL),
|
is_valid_fp_(IsWithinBounds(low_bound, high_bound, fp)),
|
is_working_iterator_(is_valid_top_ || is_valid_fp_),
|
iteration_done_(!is_working_iterator_),
|
@@ -299,14 +300,14 @@
|
|
|
void StackHandler::Cook(Code* code) {
|
- ASSERT(MarkCompactCollector::IsCompacting());
|
+ ASSERT(v8_context()->mark_compact_collector_.IsCompacting());
|
ASSERT(code->contains(pc()));
|
set_pc(AddressFrom<Address>(pc() - code->instruction_start()));
|
}
|
|
|
void StackHandler::Uncook(Code* code) {
|
- ASSERT(MarkCompactCollector::IsCompacting());
|
+ ASSERT(v8_context()->mark_compact_collector_.IsCompacting());
|
set_pc(code->instruction_start() + OffsetFrom(pc()));
|
ASSERT(code->contains(pc()));
|
}
|
@@ -324,7 +325,7 @@
|
void StackFrame::CookFramesForThread(ThreadLocalTop* thread) {
|
// Only cooking frames when the collector is compacting and thus moving code
|
// around.
|
- ASSERT(MarkCompactCollector::IsCompacting());
|
+ ASSERT(v8_context()->mark_compact_collector_.IsCompacting());
|
ASSERT(!thread->stack_is_cooked());
|
for (StackFrameIterator it(thread); !it.done(); it.Advance()) {
|
it.frame()->Cook();
|
@@ -336,7 +337,7 @@
|
void StackFrame::UncookFramesForThread(ThreadLocalTop* thread) {
|
// Only uncooking frames when the collector is compacting and thus moving code
|
// around.
|
- ASSERT(MarkCompactCollector::IsCompacting());
|
+ ASSERT(v8_context()->mark_compact_collector_.IsCompacting());
|
ASSERT(thread->stack_is_cooked());
|
for (StackFrameIterator it(thread); !it.done(); it.Advance()) {
|
it.frame()->Uncook();
|
@@ -372,7 +373,7 @@
|
|
|
Code* EntryFrame::code() const {
|
- return Heap::js_entry_code();
|
+ return v8_context()->heap_.js_entry_code();
|
}
|
|
|
@@ -389,7 +390,7 @@
|
|
|
Code* EntryConstructFrame::code() const {
|
- return Heap::js_construct_entry_code();
|
+ return v8_context()->heap_.js_construct_entry_code();
|
}
|
|
|
@@ -402,7 +403,7 @@
|
Code* ExitFrame::code() const {
|
Object* code = code_slot();
|
if (code->IsSmi()) {
|
- return Heap::c_entry_debug_break_code();
|
+ return v8_context()->heap_.c_entry_debug_break_code();
|
} else {
|
return Code::cast(code);
|
}
|
@@ -487,7 +488,7 @@
|
|
|
Code* ArgumentsAdaptorFrame::code() const {
|
- return Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline);
|
+ return v8_context()->builtins_.builtin(Builtins::ArgumentsAdaptorTrampoline);
|
}
|
|
|
@@ -730,7 +731,7 @@
|
|
|
int JSCallerSavedCode(int n) {
|
- static int reg_code[kNumJSCallerSaved];
|
+ static int reg_code[kNumJSCallerSaved]; ///static
|
static bool initialized = false;
|
if (!initialized) {
|
initialized = true;
|
Index: src/scanner.h
|
===================================================================
|
--- src/scanner.h (revision 3221)
|
+++ src/scanner.h Sat Nov 14 01:42:55 MSK 2009
|
@@ -316,16 +316,16 @@
|
Handle<String> SubString(int start_pos, int end_pos);
|
bool stack_overflow() { return stack_overflow_; }
|
|
- static StaticResource<Utf8Decoder>* utf8_decoder() { return &utf8_decoder_; }
|
+ StaticResource<Utf8Decoder>* utf8_decoder() { return &utf8_decoder_; }
|
|
// Tells whether the buffer contains an identifier (no escapes).
|
// Used for checking if a property name is an identifier.
|
static bool IsIdentifier(unibrow::CharacterStream* buffer);
|
|
- static unibrow::Predicate<IdentifierStart, 128> kIsIdentifierStart;
|
- static unibrow::Predicate<IdentifierPart, 128> kIsIdentifierPart;
|
- static unibrow::Predicate<unibrow::LineTerminator, 128> kIsLineTerminator;
|
- static unibrow::Predicate<unibrow::WhiteSpace, 128> kIsWhiteSpace;
|
+ unibrow::Predicate<IdentifierStart, 128> kIsIdentifierStart;
|
+ unibrow::Predicate<IdentifierPart, 128> kIsIdentifierPart;
|
+ unibrow::Predicate<unibrow::LineTerminator, 128> kIsLineTerminator;
|
+ unibrow::Predicate<unibrow::WhiteSpace, 128> kIsWhiteSpace;
|
|
static const int kCharacterLookaheadBufferSize = 1;
|
|
@@ -342,7 +342,7 @@
|
UTF8Buffer literals_;
|
|
bool stack_overflow_;
|
- static StaticResource<Utf8Decoder> utf8_decoder_;
|
+ StaticResource<Utf8Decoder> utf8_decoder_;
|
|
// One Unicode character look-ahead; c0_ < 0 at the end of the input.
|
uc32 c0_;
|
@@ -399,6 +399,9 @@
|
// Decodes a unicode escape-sequence which is part of an identifier.
|
// If the escape sequence cannot be decoded the result is kBadRune.
|
uc32 ScanIdentifierUnicodeEscape();
|
+ DISALLOW_COPY_AND_ASSIGN(Scanner);
|
+ Scanner();
|
+ friend class V8Context;
|
};
|
|
} } // namespace v8::internal
|
Index: src/v8-counters.cc
|
===================================================================
|
--- src/v8-counters.cc (revision 2038)
|
+++ src/v8-counters.cc Sat Nov 14 01:43:02 MSK 2009
|
@@ -32,24 +32,27 @@
|
namespace v8 {
|
namespace internal {
|
|
+Counters::Counters() {
|
+ int index = 0;
|
+ #define COUNTER_NAME(name) \
|
+ StatsCounter _##name = { "c:V8.State" #name, NULL, false };\
|
+ state_counters[index++] = _##name;
|
+ STATE_TAG_LIST(COUNTER_NAME)
|
+ #undef COUNTER_NAME
|
+
|
-#define HT(name, caption) \
|
+ #define HT(name, caption) \
|
- HistogramTimer Counters::name = { #caption, NULL, false, 0, 0 }; \
|
+ HistogramTimer _##name = { #caption, NULL, false, 0, 0 }; \
|
+ name = _##name;\
|
-
|
- HISTOGRAM_TIMER_LIST(HT)
|
-#undef SR
|
+
|
+ HISTOGRAM_TIMER_LIST(HT)
|
+ #undef SR
|
|
-#define SC(name, caption) \
|
+ #define SC(name, caption) \
|
- StatsCounter Counters::name = { "c:" #caption, NULL, false };
|
+ StatsCounter _##name = { "c:" #caption, NULL, false };\
|
+ name = _##name;
|
|
- STATS_COUNTER_LIST_1(SC)
|
- STATS_COUNTER_LIST_2(SC)
|
-#undef SC
|
+ STATS_COUNTER_LIST_1(SC)
|
+ STATS_COUNTER_LIST_2(SC)
|
+ #undef SC
|
-
|
-StatsCounter Counters::state_counters[] = {
|
-#define COUNTER_NAME(name) \
|
- { "c:V8.State" #name, NULL, false },
|
- STATE_TAG_LIST(COUNTER_NAME)
|
-#undef COUNTER_NAME
|
-};
|
-
|
+}
|
} } // namespace v8::internal
|
Index: test/cctest/test-strings.cc
|
===================================================================
|
--- test/cctest/test-strings.cc (revision 2884)
|
+++ test/cctest/test-strings.cc Sun Nov 15 13:18:18 MSK 2009
|
@@ -27,6 +27,7 @@
|
|
|
using namespace v8::internal;
|
+using v8::v8_context;
|
|
static v8::Persistent<v8::Context> env;
|
|
@@ -458,8 +459,8 @@
|
|
// Make sure the slice ends up in old space so we can morph it
|
// into a symbol.
|
- while (Heap::InNewSpace(*slice)) {
|
- Heap::PerformScavenge();
|
+ while (v8_context()->heap_.InNewSpace(*slice)) {
|
+ v8_context()->heap_.PerformScavenge();
|
}
|
|
// Force the slice into the symbol table.
|
@@ -482,7 +483,7 @@
|
// symbol entry in the symbol table because it is used by the script
|
// kept alive by the weak wrapper. Make sure we don't destruct the
|
// external string.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
CHECK(!resource_destructed);
|
|
{
|
@@ -501,16 +502,16 @@
|
// Forcing another garbage collection should let us get rid of the
|
// slice from the symbol table. The external string remains in the
|
// heap until the next GC.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
CHECK(!resource_destructed);
|
v8::HandleScope scope;
|
Handle<String> key_string = Factory::NewStringFromAscii(key_vector);
|
String* out;
|
- CHECK(!Heap::LookupSymbolIfExists(*key_string, &out));
|
+ CHECK(!v8_context()->heap_.LookupSymbolIfExists(*key_string, &out));
|
|
// Forcing yet another garbage collection must allow us to finally
|
// get rid of the external string.
|
- Heap::CollectAllGarbage(false);
|
+ v8_context()->heap_.CollectAllGarbage(false);
|
CHECK(resource_destructed);
|
|
delete[] source;
|
Index: src/global-handles.cc
|
===================================================================
|
--- src/global-handles.cc (revision 3230)
|
+++ src/global-handles.cc Sat Nov 14 01:43:03 MSK 2009
|
@@ -66,9 +66,10 @@
|
|
void Destroy() {
|
if (state_ == WEAK || IsNearDeath()) {
|
- GlobalHandles::number_of_weak_handles_--;
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ global_handles.number_of_weak_handles_--;
|
if (object_->IsJSGlobalObject()) {
|
- GlobalHandles::number_of_global_object_weak_handles_--;
|
+ global_handles.number_of_global_object_weak_handles_--;
|
}
|
}
|
state_ = DESTROYED;
|
@@ -103,9 +104,10 @@
|
LOG(HandleEvent("GlobalHandle::MakeWeak", handle().location()));
|
ASSERT(state_ != DESTROYED);
|
if (state_ != WEAK && !IsNearDeath()) {
|
- GlobalHandles::number_of_weak_handles_++;
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ global_handles.number_of_weak_handles_++;
|
if (object_->IsJSGlobalObject()) {
|
- GlobalHandles::number_of_global_object_weak_handles_++;
|
+ global_handles.number_of_global_object_weak_handles_++;
|
}
|
}
|
state_ = WEAK;
|
@@ -117,9 +119,10 @@
|
LOG(HandleEvent("GlobalHandle::ClearWeakness", handle().location()));
|
ASSERT(state_ != DESTROYED);
|
if (state_ == WEAK || IsNearDeath()) {
|
- GlobalHandles::number_of_weak_handles_--;
|
+ GlobalHandles& global_handles = v8_context()->global_handles_;
|
+ global_handles.number_of_weak_handles_--;
|
if (object_->IsJSGlobalObject()) {
|
- GlobalHandles::number_of_global_object_weak_handles_--;
|
+ global_handles.number_of_global_object_weak_handles_--;
|
}
|
}
|
state_ = NORMAL;
|
@@ -164,7 +167,7 @@
|
// Forbid reuse of destroyed nodes as they might be already deallocated.
|
// It's fine though to reuse nodes that were destroyed in weak callback
|
// as those cannot be deallocated until we are back from the callback.
|
- set_first_free(NULL);
|
+ v8_context()->global_handles_.set_first_free(NULL);
|
// Leaving V8.
|
VMState state(EXTERNAL);
|
func(object, par);
|
@@ -256,11 +259,14 @@
|
};
|
|
|
-static GlobalHandles::Pool pool_;
|
+class GlobalHandles::GlobalHandlesImpl {
|
+public:
|
+ Pool pool_;
|
+};
|
|
|
Handle<Object> GlobalHandles::Create(Object* value) {
|
- Counters::global_handles.Increment();
|
+ v8_context()->counters_.global_handles.Increment();
|
Node* result;
|
if (first_free()) {
|
// Take the first node in the free list.
|
@@ -273,7 +279,7 @@
|
set_head(result);
|
} else {
|
// Allocate a new node.
|
- result = pool_.Allocate();
|
+ result = global_handles_impl->pool_.Allocate();
|
result->set_next(head());
|
set_head(result);
|
}
|
@@ -283,7 +289,7 @@
|
|
|
void GlobalHandles::Destroy(Object** location) {
|
- Counters::global_handles.Decrement();
|
+ v8_context()->counters_.global_handles.Decrement();
|
if (location == NULL) return;
|
Node* node = Node::FromLocation(location);
|
node->Destroy();
|
@@ -357,7 +363,7 @@
|
// GC is completely done, because the callbacks may invoke arbitrary
|
// API functions.
|
// At the same time deallocate all DESTROYED nodes.
|
- ASSERT(Heap::gc_state() == Heap::NOT_IN_GC);
|
+ ASSERT(v8_context()->heap_.gc_state() == Heap::NOT_IN_GC);
|
const int initial_post_gc_processing_count = ++post_gc_processing_count;
|
Node** p = &head_;
|
while (*p != NULL) {
|
@@ -414,17 +420,25 @@
|
set_head(NULL);
|
set_first_free(NULL);
|
set_first_deallocated(NULL);
|
- pool_.Release();
|
+ global_handles_impl->pool_.Release();
|
}
|
|
+GlobalHandles::GlobalHandles():
|
+ number_of_weak_handles_(0),
|
+ number_of_global_object_weak_handles_(0),
|
+ head_ (NULL),
|
+ first_free_ (NULL),
|
+ first_deallocated_ (NULL),
|
+ object_groups_(NULL),
|
+ global_handles_impl(new GlobalHandlesImpl())
|
+{
|
+}
|
|
-int GlobalHandles::number_of_weak_handles_ = 0;
|
-int GlobalHandles::number_of_global_object_weak_handles_ = 0;
|
+GlobalHandles::~GlobalHandles() {
|
+ delete global_handles_impl;
|
+ delete object_groups_;
|
+}
|
|
-GlobalHandles::Node* GlobalHandles::head_ = NULL;
|
-GlobalHandles::Node* GlobalHandles::first_free_ = NULL;
|
-GlobalHandles::Node* GlobalHandles::first_deallocated_ = NULL;
|
-
|
#ifdef DEBUG
|
|
void GlobalHandles::PrintStats() {
|
@@ -462,9 +476,8 @@
|
#endif
|
|
List<ObjectGroup*>* GlobalHandles::ObjectGroups() {
|
- // Lazily initialize the list to avoid startup time static constructors.
|
- static List<ObjectGroup*> groups(4);
|
- return &groups;
|
+ if (!object_groups_) object_groups_ = new List<ObjectGroup*> (4);
|
+ return object_groups_;
|
}
|
|
void GlobalHandles::AddGroup(Object*** handles, size_t length) {
|