Local<ObjectTemplate> FunctionTemplate::InstanceTemplate() {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (!Utils::ApiCheck(this != NULL,
+ i::Handle<i::FunctionTemplateInfo> handle = Utils::OpenHandle(this, true);
+ if (!Utils::ApiCheck(!handle.is_null(),
"v8::FunctionTemplate::InstanceTemplate()",
"Reading from empty handle")) {
return Local<ObjectTemplate>();
}
+ i::Isolate* isolate = handle->GetIsolate();
ENTER_V8(isolate);
- i::Handle<i::FunctionTemplateInfo> handle = Utils::OpenHandle(this);
if (handle->instance_template()->IsUndefined()) {
Local<ObjectTemplate> templ =
ObjectTemplate::New(isolate, ToApiHandle<FunctionTemplate>(handle));
Local<Value> Script::Run() {
+ i::Handle<i::HeapObject> obj =
+ i::Handle<i::HeapObject>::cast(Utils::OpenHandle(this, true));
// If execution is terminating, Compile(..)->Run() requires this
// check.
- if (this == NULL) return Local<Value>();
- i::Handle<i::HeapObject> obj =
- i::Handle<i::HeapObject>::cast(Utils::OpenHandle(this));
+ if (obj.is_null()) return Local<Value>();
i::Isolate* isolate = obj->GetIsolate();
ON_BAILOUT(isolate, "v8::Script::Run()", return Local<Value>());
LOG_API(isolate, "Script::Run");
bool Value::Equals(Handle<Value> that) const {
i::Isolate* isolate = i::Isolate::Current();
- if (!Utils::ApiCheck(this != NULL && !that.IsEmpty(),
+ i::Handle<i::Object> obj = Utils::OpenHandle(this, true);
+ if (!Utils::ApiCheck(!obj.is_null() && !that.IsEmpty(),
"v8::Value::Equals()",
"Reading from empty handle")) {
return false;
}
LOG_API(isolate, "Equals");
ENTER_V8(isolate);
- i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> other = Utils::OpenHandle(*that);
// If both obj and other are JSObjects, we'd better compare by identity
// immediately when going into JS builtin. The reason is Invoke
bool Value::StrictEquals(Handle<Value> that) const {
i::Isolate* isolate = i::Isolate::Current();
- if (!Utils::ApiCheck(this != NULL && !that.IsEmpty(),
+ i::Handle<i::Object> obj = Utils::OpenHandle(this, true);
+ if (!Utils::ApiCheck(!obj.is_null() && !that.IsEmpty(),
"v8::Value::StrictEquals()",
"Reading from empty handle")) {
return false;
}
LOG_API(isolate, "StrictEquals");
- i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> other = Utils::OpenHandle(*that);
// Must check HeapNumber first, since NaN !== NaN.
if (obj->IsHeapNumber()) {
bool Value::SameValue(Handle<Value> that) const {
- if (!Utils::ApiCheck(this != NULL && !that.IsEmpty(),
+ i::Handle<i::Object> obj = Utils::OpenHandle(this, true);
+ if (!Utils::ApiCheck(!obj.is_null() && !that.IsEmpty(),
"v8::Value::SameValue()",
"Reading from empty handle")) {
return false;
}
- i::Handle<i::Object> obj = Utils::OpenHandle(this);
i::Handle<i::Object> other = Utils::OpenHandle(*that);
return obj->SameValue(*other);
}
int obj_size = Code::SizeFor(body_size);
Handle<Code> code = NewCodeRaw(obj_size, immovable);
- ASSERT(!isolate()->code_range()->exists() ||
+ ASSERT(isolate()->code_range() == NULL ||
+ !isolate()->code_range()->valid() ||
isolate()->code_range()->contains(code->address()));
// The code object has not been fully initialized yet. We rely on the
result->set_map_no_write_barrier(code_map());
Code* code = Code::cast(result);
- ASSERT(!isolate_->code_range()->exists() ||
- isolate_->code_range()->contains(code->address()));
+ ASSERT(isolate_->code_range() == NULL ||
+ !isolate_->code_range()->valid() ||
+ isolate_->code_range()->contains(code->address()));
code->set_gc_metadata(Smi::FromInt(0));
code->set_ic_age(global_ic_age_);
return code;
new_code->set_constant_pool(new_constant_pool);
// Relocate the copy.
- ASSERT(!isolate_->code_range()->exists() ||
- isolate_->code_range()->contains(code->address()));
+ ASSERT(isolate_->code_range() == NULL ||
+ !isolate_->code_range()->valid() ||
+ isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
return new_code;
}
static_cast<size_t>(reloc_info.length()));
// Relocate the copy.
- ASSERT(!isolate_->code_range()->exists() ||
- isolate_->code_range()->contains(code->address()));
+ ASSERT(isolate_->code_range() == NULL ||
+ !isolate_->code_range()->valid() ||
+ isolate_->code_range()->contains(code->address()));
new_code->Relocate(new_addr - old_addr);
#ifdef VERIFY_HEAP
// Recursively copy the entire linked list of field approximations.
HFieldApproximation* Copy(Zone* zone) {
- if (this == NULL) return NULL;
HFieldApproximation* copy = new(zone) HFieldApproximation();
copy->object_ = this->object_;
copy->last_value_ = this->last_value_;
- copy->next_ = this->next_->Copy(zone);
+ copy->next_ = this->next_ == NULL ? NULL : this->next_->Copy(zone);
return copy;
}
};
new(zone) HLoadEliminationTable(zone, aliasing_);
copy->EnsureFields(fields_.length());
for (int i = 0; i < fields_.length(); i++) {
- copy->fields_[i] = fields_[i]->Copy(zone);
+ copy->fields_[i] = fields_[i] == NULL ? NULL : fields_[i]->Copy(zone);
}
if (FLAG_trace_load_elimination) {
TRACE((" copy-to B%d\n", succ->block_id()));
size_executable_ -= size;
}
// Code which is part of the code-range does not have its own VirtualMemory.
- ASSERT(!isolate_->code_range()->contains(
- static_cast<Address>(reservation->address())));
- ASSERT(executable == NOT_EXECUTABLE || !isolate_->code_range()->exists());
+ ASSERT(isolate_->code_range() == NULL ||
+ !isolate_->code_range()->contains(
+ static_cast<Address>(reservation->address())));
+ ASSERT(executable == NOT_EXECUTABLE ||
+ isolate_->code_range() == NULL ||
+ !isolate_->code_range()->valid());
reservation->Release();
}
ASSERT(size_executable_ >= size);
size_executable_ -= size;
}
- if (isolate_->code_range()->contains(static_cast<Address>(base))) {
+ if (isolate_->code_range() != NULL &&
+ isolate_->code_range()->contains(static_cast<Address>(base))) {
ASSERT(executable == EXECUTABLE);
isolate_->code_range()->FreeRawMemory(base, size);
} else {
- ASSERT(executable == NOT_EXECUTABLE || !isolate_->code_range()->exists());
+ ASSERT(executable == NOT_EXECUTABLE ||
+ isolate_->code_range() == NULL ||
+ !isolate_->code_range()->valid());
bool result = VirtualMemory::ReleaseRegion(base, size);
USE(result);
ASSERT(result);
}
} else {
CodeRange* code_range = heap_->isolate()->code_range();
- ASSERT(code_range->exists() && IsFlagSet(IS_EXECUTABLE));
+ ASSERT(code_range != NULL && code_range->valid() &&
+ IsFlagSet(IS_EXECUTABLE));
if (!code_range->CommitRawMemory(start, length)) return false;
}
if (!reservation_.Uncommit(start, length)) return false;
} else {
CodeRange* code_range = heap_->isolate()->code_range();
- ASSERT(code_range->exists() && IsFlagSet(IS_EXECUTABLE));
+ ASSERT(code_range != NULL && code_range->valid() &&
+ IsFlagSet(IS_EXECUTABLE));
if (!code_range->UncommitRawMemory(start, length)) return false;
}
}
OS::CommitPageSize());
// Allocate executable memory either from code range or from the
// OS.
- if (isolate_->code_range()->exists()) {
+ if (isolate_->code_range() != NULL && isolate_->code_range()->valid()) {
base = isolate_->code_range()->AllocateRawMemory(chunk_size,
commit_size,
&chunk_size);
case PROPERTY_CELL_SPACE:
size = 8 * kPointerSize * KB;
break;
- case CODE_SPACE:
- if (heap()->isolate()->code_range()->exists()) {
+ case CODE_SPACE: {
+ CodeRange* code_range = heap()->isolate()->code_range();
+ if (code_range != NULL && code_range->valid()) {
// When code range exists, code pages are allocated in a special way
// (from the reserved code range). That part of the code is not yet
// upgraded to handle small pages.
kPointerSize);
}
break;
+ }
default:
UNREACHABLE();
}
// manage it.
void TearDown();
- bool exists() { return this != NULL && code_range_ != NULL; }
+ bool valid() { return code_range_ != NULL; }
Address start() {
- if (this == NULL || code_range_ == NULL) return NULL;
+ ASSERT(valid());
return static_cast<Address>(code_range_->address());
}
bool contains(Address address) {
- if (this == NULL || code_range_ == NULL) return false;
+ if (!valid()) return false;
Address start = static_cast<Address>(code_range_->address());
return start <= address && address < start + code_range_->size();
}
void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
ASSERT(RelocInfo::IsRuntimeEntry(rmode));
- ASSERT(isolate()->code_range()->exists());
RecordRelocInfo(rmode);
emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
}
Address Assembler::runtime_entry_at(Address pc) {
- ASSERT(isolate()->code_range()->exists());
return Memory::int32_at(pc) + isolate()->code_range()->start();
}
commit_area_size,
executable,
NULL);
- size_t alignment = code_range->exists() ?
+ size_t alignment = code_range != NULL && code_range->valid() ?
MemoryChunk::kAlignment : OS::CommitPageSize();
size_t reserved_size = ((executable == EXECUTABLE))
? RoundUp(header_size + guard_size + reserve_area_size + guard_size,