1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
35 #include "src/assembler.h"
39 #include "src/base/cpu.h"
40 #include "src/base/functional.h"
41 #include "src/base/lazy-instance.h"
42 #include "src/base/platform/platform.h"
43 #include "src/builtins.h"
44 #include "src/codegen.h"
45 #include "src/counters.h"
46 #include "src/cpu-profiler.h"
47 #include "src/debug.h"
48 #include "src/deoptimizer.h"
49 #include "src/execution.h"
50 #include "src/ic/ic.h"
51 #include "src/ic/stub-cache.h"
52 #include "src/isolate-inl.h"
53 #include "src/jsregexp.h"
54 #include "src/regexp-macro-assembler.h"
55 #include "src/regexp-stack.h"
56 #include "src/runtime/runtime.h"
57 #include "src/snapshot/serialize.h"
58 #include "src/token.h"
60 #if V8_TARGET_ARCH_IA32
61 #include "src/ia32/assembler-ia32-inl.h" // NOLINT
62 #elif V8_TARGET_ARCH_X64
63 #include "src/x64/assembler-x64-inl.h" // NOLINT
64 #elif V8_TARGET_ARCH_ARM64
65 #include "src/arm64/assembler-arm64-inl.h" // NOLINT
66 #elif V8_TARGET_ARCH_ARM
67 #include "src/arm/assembler-arm-inl.h" // NOLINT
68 #elif V8_TARGET_ARCH_PPC
69 #include "src/ppc/assembler-ppc-inl.h" // NOLINT
70 #elif V8_TARGET_ARCH_MIPS
71 #include "src/mips/assembler-mips-inl.h" // NOLINT
72 #elif V8_TARGET_ARCH_MIPS64
73 #include "src/mips64/assembler-mips64-inl.h" // NOLINT
74 #elif V8_TARGET_ARCH_X87
75 #include "src/x87/assembler-x87-inl.h" // NOLINT
77 #error "Unknown architecture."
80 // Include native regexp-macro-assembler.
81 #ifndef V8_INTERPRETED_REGEXP
82 #if V8_TARGET_ARCH_IA32
83 #include "src/ia32/regexp-macro-assembler-ia32.h" // NOLINT
84 #elif V8_TARGET_ARCH_X64
85 #include "src/x64/regexp-macro-assembler-x64.h" // NOLINT
86 #elif V8_TARGET_ARCH_ARM64
87 #include "src/arm64/regexp-macro-assembler-arm64.h" // NOLINT
88 #elif V8_TARGET_ARCH_ARM
89 #include "src/arm/regexp-macro-assembler-arm.h" // NOLINT
90 #elif V8_TARGET_ARCH_PPC
91 #include "src/ppc/regexp-macro-assembler-ppc.h" // NOLINT
92 #elif V8_TARGET_ARCH_MIPS
93 #include "src/mips/regexp-macro-assembler-mips.h" // NOLINT
94 #elif V8_TARGET_ARCH_MIPS64
95 #include "src/mips64/regexp-macro-assembler-mips64.h" // NOLINT
96 #elif V8_TARGET_ARCH_X87
97 #include "src/x87/regexp-macro-assembler-x87.h" // NOLINT
98 #else // Unknown architecture.
99 #error "Unknown architecture."
100 #endif // Target architecture.
101 #endif // V8_INTERPRETED_REGEXP
106 // -----------------------------------------------------------------------------
107 // Common double constants.
109 struct DoubleConstant BASE_EMBEDDED {
112 double minus_one_half;
113 double negative_infinity;
118 static DoubleConstant double_constants;
120 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
122 static bool math_exp_data_initialized = false;
123 static base::Mutex* math_exp_data_mutex = NULL;
124 static double* math_exp_constants_array = NULL;
125 static double* math_exp_log_table_array = NULL;
127 // -----------------------------------------------------------------------------
128 // Implementation of AssemblerBase
130 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
133 enabled_cpu_features_(0),
134 emit_debug_code_(FLAG_debug_code),
135 predictable_code_size_(false),
136 // We may use the assembler without an isolate.
137 serializer_enabled_(isolate && isolate->serializer_enabled()),
138 ool_constant_pool_available_(false) {
139 if (FLAG_mask_constants_with_cookie && isolate != NULL) {
140 jit_cookie_ = isolate->random_number_generator()->NextInt();
142 own_buffer_ = buffer == NULL;
143 if (buffer_size == 0) buffer_size = kMinimalBufferSize;
144 DCHECK(buffer_size > 0);
145 if (own_buffer_) buffer = NewArray<byte>(buffer_size);
146 buffer_ = static_cast<byte*>(buffer);
147 buffer_size_ = buffer_size;
153 AssemblerBase::~AssemblerBase() {
154 if (own_buffer_) DeleteArray(buffer_);
158 // -----------------------------------------------------------------------------
159 // Implementation of PredictableCodeSizeScope
161 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
163 : assembler_(assembler),
164 expected_size_(expected_size),
165 start_offset_(assembler->pc_offset()),
166 old_value_(assembler->predictable_code_size()) {
167 assembler_->set_predictable_code_size(true);
171 PredictableCodeSizeScope::~PredictableCodeSizeScope() {
172 // TODO(svenpanne) Remove the 'if' when everything works.
173 if (expected_size_ >= 0) {
174 CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
176 assembler_->set_predictable_code_size(old_value_);
180 // -----------------------------------------------------------------------------
181 // Implementation of CpuFeatureScope
184 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
185 : assembler_(assembler) {
186 DCHECK(CpuFeatures::IsSupported(f));
187 old_enabled_ = assembler_->enabled_cpu_features();
188 uint64_t mask = static_cast<uint64_t>(1) << f;
189 // TODO(svenpanne) This special case below doesn't belong here!
190 #if V8_TARGET_ARCH_ARM
191 // ARMv7 is implied by VFP3.
193 mask |= static_cast<uint64_t>(1) << ARMv7;
196 assembler_->set_enabled_cpu_features(old_enabled_ | mask);
200 CpuFeatureScope::~CpuFeatureScope() {
201 assembler_->set_enabled_cpu_features(old_enabled_);
206 bool CpuFeatures::initialized_ = false;
207 unsigned CpuFeatures::supported_ = 0;
208 unsigned CpuFeatures::cache_line_size_ = 0;
211 // -----------------------------------------------------------------------------
212 // Implementation of Label
214 int Label::pos() const {
215 if (pos_ < 0) return -pos_ - 1;
216 if (pos_ > 0) return pos_ - 1;
222 // -----------------------------------------------------------------------------
223 // Implementation of RelocInfoWriter and RelocIterator
225 // Relocation information is written backwards in memory, from high addresses
226 // towards low addresses, byte by byte. Therefore, in the encodings listed
227 // below, the first byte listed it at the highest address, and successive
228 // bytes in the record are at progressively lower addresses.
232 // The most common modes are given single-byte encodings. Also, it is
233 // easy to identify the type of reloc info and skip unwanted modes in
236 // The encoding relies on the fact that there are fewer than 14
237 // different relocation modes using standard non-compact encoding.
239 // The first byte of a relocation record has a tag in its low 2 bits:
240 // Here are the record schemes, depending on the low tag and optional higher
244 // 00: embedded_object: [6-bit pc delta] 00
246 // 01: code_target: [6-bit pc delta] 01
248 // 10: short_data_record: [6-bit pc delta] 10 followed by
249 // [6-bit data delta] [2-bit data type tag]
251 // 11: long_record [2-bit high tag][4 bit middle_tag] 11
252 // followed by variable data depending on type.
254 // 2-bit data type tags, used in short_data_record and data_jump long_record:
255 // code_target_with_id: 00
257 // statement_position: 10
258 // comment: 11 (not used in short_data_record)
259 // deopt_reason: 11 (not used in long_data_record)
261 // Long record format:
263 // 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
264 // (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
265 // and is between 0000 and 1100)
267 // 00 [4 bit middle_tag] 11 followed by
268 // 00 [6 bit pc delta]
270 // 1101: constant or veneer pool. Used only on ARM and ARM64 for now.
271 // The format is: [2-bit sub-type] 1101 11
272 // signed int (size of the pool).
273 // The 2-bit sub-types are:
276 // 1110: long_data_record
277 // The format is: [2-bit data_type_tag] 1110 11
278 // signed intptr_t, lowest byte written first
279 // (except data_type code_target_with_id, which
280 // is followed by a signed int, not intptr_t.)
282 // 1111: long_pc_jump
284 // pc-jump: 00 1111 11,
285 // 00 [6 bits pc delta]
287 // pc-jump (variable length):
292 // (Bits 6..31 of pc delta, with leading zeroes
293 // dropped, and last non-zero chunk tagged with 1.)
295 const int kTagBits = 2;
296 const int kTagMask = (1 << kTagBits) - 1;
297 const int kExtraTagBits = 4;
298 const int kLocatableTypeTagBits = 2;
299 const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits;
301 const int kEmbeddedObjectTag = 0;
302 const int kCodeTargetTag = 1;
303 const int kLocatableTag = 2;
304 const int kDefaultTag = 3;
306 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
308 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
309 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
310 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
312 const int kVariableLengthPCJumpTopTag = 1;
313 const int kChunkBits = 7;
314 const int kChunkMask = (1 << kChunkBits) - 1;
315 const int kLastChunkTagBits = 1;
316 const int kLastChunkTagMask = 1;
317 const int kLastChunkTag = 1;
320 const int kDataJumpExtraTag = kPCJumpExtraTag - 1;
322 const int kCodeWithIdTag = 0;
323 const int kNonstatementPositionTag = 1;
324 const int kStatementPositionTag = 2;
325 const int kCommentTag = 3;
327 // Reuse the same value for deopt reason tag in short record format.
328 // It is possible because we use kCommentTag only for the long record format.
329 const int kDeoptReasonTag = 3;
331 const int kPoolExtraTag = kPCJumpExtraTag - 2;
332 const int kConstPoolTag = 0;
333 const int kVeneerPoolTag = 1;
336 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
337 // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
338 // Otherwise write a variable length PC jump for the bits that do
339 // not fit in the kSmallPCDeltaBits bits.
340 if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
341 WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag);
342 uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
344 // Write kChunkBits size chunks of the pc_jump.
345 for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
346 byte b = pc_jump & kChunkMask;
347 *--pos_ = b << kLastChunkTagBits;
349 // Tag the last chunk so it can be identified.
350 *pos_ = *pos_ | kLastChunkTag;
351 // Return the remaining kSmallPCDeltaBits of the pc_delta.
352 return pc_delta & kSmallPCDeltaMask;
356 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) {
357 // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump.
358 pc_delta = WriteVariableLengthPCJump(pc_delta);
359 *--pos_ = pc_delta << kTagBits | tag;
363 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
364 *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
368 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) {
369 *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) |
370 extra_tag << kTagBits |
375 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) {
376 // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
377 pc_delta = WriteVariableLengthPCJump(pc_delta);
378 WriteExtraTag(extra_tag, 0);
383 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
384 WriteExtraTag(kDataJumpExtraTag, top_tag);
385 for (int i = 0; i < kIntSize; i++) {
386 *--pos_ = static_cast<byte>(data_delta);
387 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
388 data_delta = data_delta >> kBitsPerByte;
393 void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) {
394 WriteExtraTag(kPoolExtraTag, pool_type);
395 for (int i = 0; i < kIntSize; i++) {
396 *--pos_ = static_cast<byte>(data);
397 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
398 data = data >> kBitsPerByte;
403 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
404 WriteExtraTag(kDataJumpExtraTag, top_tag);
405 for (int i = 0; i < kIntptrSize; i++) {
406 *--pos_ = static_cast<byte>(data_delta);
407 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
408 data_delta = data_delta >> kBitsPerByte;
413 void RelocInfoWriter::WritePosition(int pc_delta, int pos_delta,
414 RelocInfo::Mode rmode) {
415 int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
416 : kStatementPositionTag;
417 // Check if delta is small enough to fit in a tagged byte.
418 if (is_intn(pos_delta, kSmallDataBits)) {
419 WriteTaggedPC(pc_delta, kLocatableTag);
420 WriteTaggedData(pos_delta, pos_type_tag);
422 // Otherwise, use costly encoding.
423 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
424 WriteExtraTaggedIntData(pos_delta, pos_type_tag);
429 void RelocInfoWriter::FlushPosition() {
430 if (!next_position_candidate_flushed_) {
431 WritePosition(next_position_candidate_pc_delta_,
432 next_position_candidate_pos_delta_, RelocInfo::POSITION);
433 next_position_candidate_pos_delta_ = 0;
434 next_position_candidate_pc_delta_ = 0;
435 next_position_candidate_flushed_ = true;
440 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
441 RelocInfo::Mode rmode = rinfo->rmode();
442 if (rmode != RelocInfo::POSITION) {
446 byte* begin_pos = pos_;
448 DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
449 DCHECK(rinfo->pc() - last_pc_ >= 0);
450 // Use unsigned delta-encoding for pc.
451 uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
453 // The two most common modes are given small tags, and usually fit in a byte.
454 if (rmode == RelocInfo::EMBEDDED_OBJECT) {
455 WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
456 } else if (rmode == RelocInfo::CODE_TARGET) {
457 WriteTaggedPC(pc_delta, kCodeTargetTag);
458 DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
459 } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
460 // Use signed delta-encoding for id.
461 DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
462 int id_delta = static_cast<int>(rinfo->data()) - last_id_;
463 // Check if delta is small enough to fit in a tagged byte.
464 if (is_intn(id_delta, kSmallDataBits)) {
465 WriteTaggedPC(pc_delta, kLocatableTag);
466 WriteTaggedData(id_delta, kCodeWithIdTag);
468 // Otherwise, use costly encoding.
469 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
470 WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
472 last_id_ = static_cast<int>(rinfo->data());
473 } else if (rmode == RelocInfo::DEOPT_REASON) {
474 DCHECK(rinfo->data() < (1 << kSmallDataBits));
475 WriteTaggedPC(pc_delta, kLocatableTag);
476 WriteTaggedData(rinfo->data(), kDeoptReasonTag);
477 } else if (RelocInfo::IsPosition(rmode)) {
478 // Use signed delta-encoding for position.
479 DCHECK_EQ(static_cast<int>(rinfo->data()), rinfo->data());
480 int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
481 if (rmode == RelocInfo::STATEMENT_POSITION) {
482 WritePosition(pc_delta, pos_delta, rmode);
484 DCHECK_EQ(rmode, RelocInfo::POSITION);
485 if (pc_delta != 0 || last_mode_ != RelocInfo::POSITION) {
487 next_position_candidate_pc_delta_ = pc_delta;
488 next_position_candidate_pos_delta_ = pos_delta;
490 next_position_candidate_pos_delta_ += pos_delta;
492 next_position_candidate_flushed_ = false;
494 last_position_ = static_cast<int>(rinfo->data());
495 } else if (RelocInfo::IsComment(rmode)) {
496 // Comments are normally not generated, so we use the costly encoding.
497 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
498 WriteExtraTaggedData(rinfo->data(), kCommentTag);
499 DCHECK(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
500 } else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
501 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
502 WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
503 RelocInfo::IsConstPool(rmode) ? kConstPoolTag
506 DCHECK(rmode > RelocInfo::LAST_COMPACT_ENUM);
507 DCHECK(rmode <= RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM);
508 STATIC_ASSERT(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM -
509 RelocInfo::LAST_COMPACT_ENUM <=
511 int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM - 1;
512 // For all other modes we simply use the mode as the extra tag.
513 // None of these modes need a data component.
514 DCHECK(0 <= saved_mode && saved_mode < kPoolExtraTag);
515 WriteExtraTaggedPC(pc_delta, saved_mode);
517 last_pc_ = rinfo->pc();
520 DCHECK(begin_pos - pos_ <= kMaxSize);
525 inline int RelocIterator::AdvanceGetTag() {
526 return *--pos_ & kTagMask;
530 inline int RelocIterator::GetExtraTag() {
531 return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1);
535 inline int RelocIterator::GetTopTag() {
536 return *pos_ >> (kTagBits + kExtraTagBits);
540 inline void RelocIterator::ReadTaggedPC() {
541 rinfo_.pc_ += *pos_ >> kTagBits;
545 inline void RelocIterator::AdvanceReadPC() {
546 rinfo_.pc_ += *--pos_;
550 void RelocIterator::AdvanceReadId() {
552 for (int i = 0; i < kIntSize; i++) {
553 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
556 rinfo_.data_ = last_id_;
560 void RelocIterator::AdvanceReadPoolData() {
562 for (int i = 0; i < kIntSize; i++) {
563 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
569 void RelocIterator::AdvanceReadPosition() {
571 for (int i = 0; i < kIntSize; i++) {
572 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
575 rinfo_.data_ = last_position_;
579 void RelocIterator::AdvanceReadData() {
581 for (int i = 0; i < kIntptrSize; i++) {
582 x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
588 void RelocIterator::AdvanceReadVariableLengthPCJump() {
589 // Read the 32-kSmallPCDeltaBits most significant bits of the
590 // pc jump in kChunkBits bit chunks and shift them into place.
591 // Stop when the last chunk is encountered.
592 uint32_t pc_jump = 0;
593 for (int i = 0; i < kIntSize; i++) {
594 byte pc_jump_part = *--pos_;
595 pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
596 if ((pc_jump_part & kLastChunkTagMask) == 1) break;
598 // The least significant kSmallPCDeltaBits bits will be added
600 rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
604 inline int RelocIterator::GetLocatableTypeTag() {
605 return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
609 inline void RelocIterator::ReadTaggedId() {
610 int8_t signed_b = *pos_;
611 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
612 last_id_ += signed_b >> kLocatableTypeTagBits;
613 rinfo_.data_ = last_id_;
617 inline void RelocIterator::ReadTaggedPosition() {
618 int8_t signed_b = *pos_;
619 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
620 last_position_ += signed_b >> kLocatableTypeTagBits;
621 rinfo_.data_ = last_position_;
625 inline void RelocIterator::ReadTaggedData() {
626 uint8_t unsigned_b = *pos_;
627 rinfo_.data_ = unsigned_b >> kTagBits;
631 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
632 DCHECK(tag == kNonstatementPositionTag ||
633 tag == kStatementPositionTag);
634 return (tag == kNonstatementPositionTag) ?
635 RelocInfo::POSITION :
636 RelocInfo::STATEMENT_POSITION;
640 void RelocIterator::next() {
642 // Basically, do the opposite of RelocInfoWriter::Write.
643 // Reading of data is as far as possible avoided for unwanted modes,
644 // but we must always update the pc.
646 // We exit this loop by returning when we find a mode we want.
647 while (pos_ > end_) {
648 int tag = AdvanceGetTag();
649 if (tag == kEmbeddedObjectTag) {
651 if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
652 } else if (tag == kCodeTargetTag) {
654 if (SetMode(RelocInfo::CODE_TARGET)) return;
655 } else if (tag == kLocatableTag) {
658 int locatable_tag = GetLocatableTypeTag();
659 if (locatable_tag == kCodeWithIdTag) {
660 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
664 } else if (locatable_tag == kDeoptReasonTag) {
666 if (SetMode(RelocInfo::DEOPT_REASON)) return;
668 DCHECK(locatable_tag == kNonstatementPositionTag ||
669 locatable_tag == kStatementPositionTag);
670 if (mode_mask_ & RelocInfo::kPositionMask) {
671 ReadTaggedPosition();
672 if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
676 DCHECK(tag == kDefaultTag);
677 int extra_tag = GetExtraTag();
678 if (extra_tag == kPCJumpExtraTag) {
679 if (GetTopTag() == kVariableLengthPCJumpTopTag) {
680 AdvanceReadVariableLengthPCJump();
684 } else if (extra_tag == kDataJumpExtraTag) {
685 int locatable_tag = GetTopTag();
686 if (locatable_tag == kCodeWithIdTag) {
687 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
692 } else if (locatable_tag != kCommentTag) {
693 DCHECK(locatable_tag == kNonstatementPositionTag ||
694 locatable_tag == kStatementPositionTag);
695 if (mode_mask_ & RelocInfo::kPositionMask) {
696 AdvanceReadPosition();
697 if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
702 DCHECK(locatable_tag == kCommentTag);
703 if (SetMode(RelocInfo::COMMENT)) {
707 Advance(kIntptrSize);
709 } else if (extra_tag == kPoolExtraTag) {
710 int pool_type = GetTopTag();
711 DCHECK(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag);
712 RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ?
713 RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL;
714 if (SetMode(rmode)) {
715 AdvanceReadPoolData();
721 int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM + 1;
722 if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
726 if (code_age_sequence_ != NULL) {
727 byte* old_code_age_sequence = code_age_sequence_;
728 code_age_sequence_ = NULL;
729 if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
731 rinfo_.pc_ = old_code_age_sequence;
739 RelocIterator::RelocIterator(Code* code, int mode_mask) {
741 rinfo_.pc_ = code->instruction_start();
743 // Relocation info is read backwards.
744 pos_ = code->relocation_start() + code->relocation_size();
745 end_ = code->relocation_start();
747 mode_mask_ = mode_mask;
750 byte* sequence = code->FindCodeAgeSequence();
751 // We get the isolate from the map, because at serialization time
752 // the code pointer has been cloned and isn't really in heap space.
753 Isolate* isolate = code->map()->GetIsolate();
754 if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
755 code_age_sequence_ = sequence;
757 code_age_sequence_ = NULL;
759 if (mode_mask_ == 0) pos_ = end_;
764 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
765 rinfo_.pc_ = desc.buffer;
767 // Relocation info is read backwards.
768 pos_ = desc.buffer + desc.buffer_size;
769 end_ = pos_ - desc.reloc_size;
771 mode_mask_ = mode_mask;
774 code_age_sequence_ = NULL;
775 if (mode_mask_ == 0) pos_ = end_;
780 // -----------------------------------------------------------------------------
781 // Implementation of RelocInfo
785 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
786 // Ensure there are no code targets or embedded objects present in the
787 // deoptimization entries, they would require relocation after code
789 int mode_mask = RelocInfo::kCodeTargetMask |
790 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
791 RelocInfo::ModeMask(RelocInfo::CELL) |
792 RelocInfo::kApplyMask;
793 RelocIterator it(desc, mode_mask);
799 #ifdef ENABLE_DISASSEMBLER
800 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
802 case RelocInfo::NONE32:
803 return "no reloc 32";
804 case RelocInfo::NONE64:
805 return "no reloc 64";
806 case RelocInfo::EMBEDDED_OBJECT:
807 return "embedded object";
808 case RelocInfo::CONSTRUCT_CALL:
809 return "code target (js construct call)";
810 case RelocInfo::DEBUG_BREAK:
811 return "debug break";
812 case RelocInfo::CODE_TARGET:
813 return "code target";
814 case RelocInfo::CODE_TARGET_WITH_ID:
815 return "code target with id";
816 case RelocInfo::CELL:
817 return "property cell";
818 case RelocInfo::RUNTIME_ENTRY:
819 return "runtime entry";
820 case RelocInfo::JS_RETURN:
822 case RelocInfo::COMMENT:
824 case RelocInfo::POSITION:
826 case RelocInfo::STATEMENT_POSITION:
827 return "statement position";
828 case RelocInfo::EXTERNAL_REFERENCE:
829 return "external reference";
830 case RelocInfo::INTERNAL_REFERENCE:
831 return "internal reference";
832 case RelocInfo::INTERNAL_REFERENCE_ENCODED:
833 return "encoded internal reference";
834 case RelocInfo::DEOPT_REASON:
835 return "deopt reason";
836 case RelocInfo::CONST_POOL:
837 return "constant pool";
838 case RelocInfo::VENEER_POOL:
839 return "veneer pool";
840 case RelocInfo::DEBUG_BREAK_SLOT:
841 return "debug break slot";
842 case RelocInfo::CODE_AGE_SEQUENCE:
843 return "code_age_sequence";
844 case RelocInfo::NUMBER_OF_MODES:
846 return "number_of_modes";
848 return "unknown relocation type";
852 void RelocInfo::Print(Isolate* isolate, std::ostream& os) { // NOLINT
853 os << static_cast<const void*>(pc_) << " " << RelocModeName(rmode_);
854 if (IsComment(rmode_)) {
855 os << " (" << reinterpret_cast<char*>(data_) << ")";
856 } else if (rmode_ == DEOPT_REASON) {
857 os << " (" << Deoptimizer::GetDeoptReason(
858 static_cast<Deoptimizer::DeoptReason>(data_)) << ")";
859 } else if (rmode_ == EMBEDDED_OBJECT) {
860 os << " (" << Brief(target_object()) << ")";
861 } else if (rmode_ == EXTERNAL_REFERENCE) {
862 ExternalReferenceEncoder ref_encoder(isolate);
864 << ref_encoder.NameOfAddress(isolate, target_external_reference())
865 << ") (" << static_cast<const void*>(target_external_reference())
867 } else if (IsCodeTarget(rmode_)) {
868 Code* code = Code::GetCodeFromTargetAddress(target_address());
869 os << " (" << Code::Kind2String(code->kind()) << ") ("
870 << static_cast<const void*>(target_address()) << ")";
871 if (rmode_ == CODE_TARGET_WITH_ID) {
872 os << " (id=" << static_cast<int>(data_) << ")";
874 } else if (IsPosition(rmode_)) {
875 os << " (" << data() << ")";
876 } else if (IsRuntimeEntry(rmode_) &&
877 isolate->deoptimizer_data() != NULL) {
878 // Depotimization bailouts are stored as runtime entries.
879 int id = Deoptimizer::GetDeoptimizationId(
880 isolate, target_address(), Deoptimizer::EAGER);
881 if (id != Deoptimizer::kNotDeoptimizationEntry) {
882 os << " (deoptimization bailout " << id << ")";
888 #endif // ENABLE_DISASSEMBLER
892 void RelocInfo::Verify(Isolate* isolate) {
894 case EMBEDDED_OBJECT:
895 Object::VerifyPointer(target_object());
898 Object::VerifyPointer(target_cell());
902 case CODE_TARGET_WITH_ID:
904 // convert inline target address to code object
905 Address addr = target_address();
907 // Check that we can find the right code object.
908 Code* code = Code::GetCodeFromTargetAddress(addr);
909 Object* found = isolate->FindCodeObject(addr);
910 CHECK(found->IsCode());
911 CHECK(code->address() == HeapObject::cast(found)->address());
914 case INTERNAL_REFERENCE:
915 case INTERNAL_REFERENCE_ENCODED: {
916 Address target = target_internal_reference();
917 Address pc = target_internal_reference_address();
918 Code* code = Code::cast(isolate->FindCodeObject(pc));
919 CHECK(target >= code->instruction_start());
920 CHECK(target <= code->instruction_end());
927 case STATEMENT_POSITION:
928 case EXTERNAL_REFERENCE:
932 case DEBUG_BREAK_SLOT:
936 case NUMBER_OF_MODES:
939 case CODE_AGE_SEQUENCE:
940 DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
944 #endif // VERIFY_HEAP
947 // -----------------------------------------------------------------------------
948 // Implementation of ExternalReference
950 void ExternalReference::SetUp() {
951 double_constants.min_int = kMinInt;
952 double_constants.one_half = 0.5;
953 double_constants.minus_one_half = -0.5;
954 double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
955 double_constants.negative_infinity = -V8_INFINITY;
956 double_constants.uint32_bias =
957 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
959 math_exp_data_mutex = new base::Mutex();
963 void ExternalReference::InitializeMathExpData() {
965 if (math_exp_data_initialized) return;
967 base::LockGuard<base::Mutex> lock_guard(math_exp_data_mutex);
968 if (!math_exp_data_initialized) {
969 // If this is changed, generated code must be adapted too.
970 const int kTableSizeBits = 11;
971 const int kTableSize = 1 << kTableSizeBits;
972 const double kTableSizeDouble = static_cast<double>(kTableSize);
974 math_exp_constants_array = new double[9];
975 // Input values smaller than this always return 0.
976 math_exp_constants_array[0] = -708.39641853226408;
977 // Input values larger than this always return +Infinity.
978 math_exp_constants_array[1] = 709.78271289338397;
979 math_exp_constants_array[2] = V8_INFINITY;
980 // The rest is black magic. Do not attempt to understand it. It is
981 // loosely based on the "expd" function published at:
982 // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html
983 const double constant3 = (1 << kTableSizeBits) / std::log(2.0);
984 math_exp_constants_array[3] = constant3;
985 math_exp_constants_array[4] =
986 static_cast<double>(static_cast<int64_t>(3) << 51);
987 math_exp_constants_array[5] = 1 / constant3;
988 math_exp_constants_array[6] = 3.0000000027955394;
989 math_exp_constants_array[7] = 0.16666666685227835;
990 math_exp_constants_array[8] = 1;
992 math_exp_log_table_array = new double[kTableSize];
993 for (int i = 0; i < kTableSize; i++) {
994 double value = std::pow(2, i / kTableSizeDouble);
995 uint64_t bits = bit_cast<uint64_t, double>(value);
996 bits &= (static_cast<uint64_t>(1) << 52) - 1;
997 double mantissa = bit_cast<double, uint64_t>(bits);
998 math_exp_log_table_array[i] = mantissa;
1001 math_exp_data_initialized = true;
1006 void ExternalReference::TearDownMathExpData() {
1007 delete[] math_exp_constants_array;
1008 math_exp_constants_array = NULL;
1009 delete[] math_exp_log_table_array;
1010 math_exp_log_table_array = NULL;
1011 delete math_exp_data_mutex;
1012 math_exp_data_mutex = NULL;
1016 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
1017 : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
1020 ExternalReference::ExternalReference(
1022 Type type = ExternalReference::BUILTIN_CALL,
1023 Isolate* isolate = NULL)
1024 : address_(Redirect(isolate, fun->address(), type)) {}
1027 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
1028 : address_(isolate->builtins()->builtin_address(name)) {}
1031 ExternalReference::ExternalReference(Runtime::FunctionId id,
1033 : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
1036 ExternalReference::ExternalReference(const Runtime::Function* f,
1038 : address_(Redirect(isolate, f->entry)) {}
1041 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
1042 return ExternalReference(isolate);
1046 ExternalReference::ExternalReference(const IC_Utility& ic_utility,
1048 : address_(Redirect(isolate, ic_utility.address())) {}
1051 ExternalReference::ExternalReference(StatsCounter* counter)
1052 : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
1055 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
1056 : address_(isolate->get_address_from_id(id)) {}
1059 ExternalReference::ExternalReference(const SCTableReference& table_ref)
1060 : address_(table_ref.address()) {}
1063 ExternalReference ExternalReference::
1064 incremental_marking_record_write_function(Isolate* isolate) {
1065 return ExternalReference(Redirect(
1067 FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
1071 ExternalReference ExternalReference::
1072 store_buffer_overflow_function(Isolate* isolate) {
1073 return ExternalReference(Redirect(
1075 FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
1079 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
1080 return ExternalReference(
1081 Redirect(isolate, FUNCTION_ADDR(CpuFeatures::FlushICache)));
1085 ExternalReference ExternalReference::delete_handle_scope_extensions(
1087 return ExternalReference(Redirect(
1089 FUNCTION_ADDR(HandleScope::DeleteExtensions)));
1093 ExternalReference ExternalReference::get_date_field_function(
1095 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1099 ExternalReference ExternalReference::get_make_code_young_function(
1101 return ExternalReference(Redirect(
1102 isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
1106 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1108 return ExternalReference(Redirect(
1109 isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
1113 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1114 return ExternalReference(isolate->date_cache()->stamp_address());
1118 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1119 return ExternalReference(isolate->stress_deopt_count_address());
1123 ExternalReference ExternalReference::new_deoptimizer_function(
1125 return ExternalReference(
1126 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1130 ExternalReference ExternalReference::compute_output_frames_function(
1132 return ExternalReference(
1133 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1137 ExternalReference ExternalReference::log_enter_external_function(
1139 return ExternalReference(
1140 Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1144 ExternalReference ExternalReference::log_leave_external_function(
1146 return ExternalReference(
1147 Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1151 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1152 return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1156 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1158 return ExternalReference(
1159 isolate->keyed_lookup_cache()->field_offsets_address());
1163 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1164 return ExternalReference(isolate->heap()->roots_array_start());
1168 ExternalReference ExternalReference::allocation_sites_list_address(
1170 return ExternalReference(isolate->heap()->allocation_sites_list_address());
1174 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1175 return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1179 ExternalReference ExternalReference::address_of_real_stack_limit(
1181 return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1185 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1187 return ExternalReference(isolate->regexp_stack()->limit_address());
1191 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
1192 return ExternalReference(isolate->heap()->NewSpaceStart());
1196 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1197 return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
1201 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1202 return ExternalReference(reinterpret_cast<Address>(
1203 isolate->heap()->NewSpaceMask()));
1207 ExternalReference ExternalReference::new_space_allocation_top_address(
1209 return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1213 ExternalReference ExternalReference::new_space_allocation_limit_address(
1215 return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1219 ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
1221 return ExternalReference(
1222 isolate->heap()->OldPointerSpaceAllocationTopAddress());
1226 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
1228 return ExternalReference(
1229 isolate->heap()->OldPointerSpaceAllocationLimitAddress());
1233 ExternalReference ExternalReference::old_data_space_allocation_top_address(
1235 return ExternalReference(isolate->heap()->OldDataSpaceAllocationTopAddress());
1239 ExternalReference ExternalReference::old_data_space_allocation_limit_address(
1241 return ExternalReference(
1242 isolate->heap()->OldDataSpaceAllocationLimitAddress());
1246 ExternalReference ExternalReference::handle_scope_level_address(
1248 return ExternalReference(HandleScope::current_level_address(isolate));
1252 ExternalReference ExternalReference::handle_scope_next_address(
1254 return ExternalReference(HandleScope::current_next_address(isolate));
1258 ExternalReference ExternalReference::handle_scope_limit_address(
1260 return ExternalReference(HandleScope::current_limit_address(isolate));
1264 ExternalReference ExternalReference::scheduled_exception_address(
1266 return ExternalReference(isolate->scheduled_exception_address());
1270 ExternalReference ExternalReference::address_of_pending_message_obj(
1272 return ExternalReference(isolate->pending_message_obj_address());
1276 ExternalReference ExternalReference::address_of_min_int() {
1277 return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1281 ExternalReference ExternalReference::address_of_one_half() {
1282 return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1286 ExternalReference ExternalReference::address_of_minus_one_half() {
1287 return ExternalReference(
1288 reinterpret_cast<void*>(&double_constants.minus_one_half));
1292 ExternalReference ExternalReference::address_of_negative_infinity() {
1293 return ExternalReference(
1294 reinterpret_cast<void*>(&double_constants.negative_infinity));
1298 ExternalReference ExternalReference::address_of_the_hole_nan() {
1299 return ExternalReference(
1300 reinterpret_cast<void*>(&double_constants.the_hole_nan));
1304 ExternalReference ExternalReference::address_of_uint32_bias() {
1305 return ExternalReference(
1306 reinterpret_cast<void*>(&double_constants.uint32_bias));
1310 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1311 return ExternalReference(isolate->cpu_profiler()->is_profiling_address());
1315 ExternalReference ExternalReference::invoke_function_callback(
1317 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1318 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1319 ApiFunction thunk_fun(thunk_address);
1320 return ExternalReference(&thunk_fun, thunk_type, isolate);
1324 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1326 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1327 ExternalReference::Type thunk_type =
1328 ExternalReference::PROFILING_GETTER_CALL;
1329 ApiFunction thunk_fun(thunk_address);
1330 return ExternalReference(&thunk_fun, thunk_type, isolate);
1334 #ifndef V8_INTERPRETED_REGEXP
1336 ExternalReference ExternalReference::re_check_stack_guard_state(
1339 #if V8_TARGET_ARCH_X64
1340 function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
1341 #elif V8_TARGET_ARCH_IA32
1342 function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1343 #elif V8_TARGET_ARCH_ARM64
1344 function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
1345 #elif V8_TARGET_ARCH_ARM
1346 function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1347 #elif V8_TARGET_ARCH_PPC
1348 function = FUNCTION_ADDR(RegExpMacroAssemblerPPC::CheckStackGuardState);
1349 #elif V8_TARGET_ARCH_MIPS
1350 function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1351 #elif V8_TARGET_ARCH_MIPS64
1352 function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1353 #elif V8_TARGET_ARCH_X87
1354 function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
1358 return ExternalReference(Redirect(isolate, function));
1362 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1363 return ExternalReference(
1364 Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
1367 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1369 return ExternalReference(Redirect(
1371 FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
1375 ExternalReference ExternalReference::re_word_character_map() {
1376 return ExternalReference(
1377 NativeRegExpMacroAssembler::word_character_map_address());
1380 ExternalReference ExternalReference::address_of_static_offsets_vector(
1382 return ExternalReference(
1383 reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1386 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1388 return ExternalReference(
1389 isolate->regexp_stack()->memory_address());
1392 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1394 return ExternalReference(isolate->regexp_stack()->memory_size_address());
1397 #endif // V8_INTERPRETED_REGEXP
1400 ExternalReference ExternalReference::math_log_double_function(
1402 typedef double (*d2d)(double x);
1403 return ExternalReference(Redirect(isolate,
1404 FUNCTION_ADDR(static_cast<d2d>(std::log)),
1409 ExternalReference ExternalReference::math_exp_constants(int constant_index) {
1410 DCHECK(math_exp_data_initialized);
1411 return ExternalReference(
1412 reinterpret_cast<void*>(math_exp_constants_array + constant_index));
1416 ExternalReference ExternalReference::math_exp_log_table() {
1417 DCHECK(math_exp_data_initialized);
1418 return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
1422 ExternalReference ExternalReference::page_flags(Page* page) {
1423 return ExternalReference(reinterpret_cast<Address>(page) +
1424 MemoryChunk::kFlagsOffset);
1428 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1429 return ExternalReference(entry);
1433 ExternalReference ExternalReference::cpu_features() {
1434 DCHECK(CpuFeatures::initialized_);
1435 return ExternalReference(&CpuFeatures::supported_);
1439 ExternalReference ExternalReference::debug_is_active_address(
1441 return ExternalReference(isolate->debug()->is_active_address());
1445 ExternalReference ExternalReference::debug_after_break_target_address(
1447 return ExternalReference(isolate->debug()->after_break_target_address());
1452 ExternalReference::debug_restarter_frame_function_pointer_address(
1454 return ExternalReference(
1455 isolate->debug()->restarter_frame_function_pointer_address());
1459 double power_helper(double x, double y) {
1460 int y_int = static_cast<int>(y);
1462 return power_double_int(x, y_int); // Returns 1 if exponent is 0.
1465 return (std::isinf(x)) ? V8_INFINITY
1466 : fast_sqrt(x + 0.0); // Convert -0 to +0.
1469 return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0); // Convert -0 to +0.
1471 return power_double_double(x, y);
1475 // Helper function to compute x^y, where y is known to be an
1476 // integer. Uses binary decomposition to limit the number of
1477 // multiplications; see the discussion in "Hacker's Delight" by Henry
1478 // S. Warren, Jr., figure 11-6, page 213.
1479 double power_double_int(double x, int y) {
1480 double m = (y < 0) ? 1 / x : x;
1481 unsigned n = (y < 0) ? -y : y;
1484 if ((n & 1) != 0) p *= m;
1486 if ((n & 2) != 0) p *= m;
1494 double power_double_double(double x, double y) {
1495 #if (defined(__MINGW64_VERSION_MAJOR) && \
1496 (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)) || \
1498 // MinGW64 and AIX have a custom implementation for pow. This handles certain
1499 // special cases that are different.
1500 if ((x == 0.0 || std::isinf(x)) && y != 0.0 && std::isfinite(y)) {
1502 double result = ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
1503 /* retain sign if odd integer exponent */
1504 return ((std::modf(y, &f) == 0.0) && (static_cast<int64_t>(y) & 1))
1505 ? copysign(result, x)
1510 int y_int = static_cast<int>(y);
1512 return std::ldexp(1.0, y_int);
1517 // The checks for special cases can be dropped in ia32 because it has already
1518 // been done in generated code before bailing out here.
1519 if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1520 return std::numeric_limits<double>::quiet_NaN();
1522 return std::pow(x, y);
1526 ExternalReference ExternalReference::power_double_double_function(
1528 return ExternalReference(Redirect(isolate,
1529 FUNCTION_ADDR(power_double_double),
1530 BUILTIN_FP_FP_CALL));
1534 ExternalReference ExternalReference::power_double_int_function(
1536 return ExternalReference(Redirect(isolate,
1537 FUNCTION_ADDR(power_double_int),
1538 BUILTIN_FP_INT_CALL));
1542 bool EvalComparison(Token::Value op, double op1, double op2) {
1543 DCHECK(Token::IsCompareOp(op));
1546 case Token::EQ_STRICT: return (op1 == op2);
1547 case Token::NE: return (op1 != op2);
1548 case Token::LT: return (op1 < op2);
1549 case Token::GT: return (op1 > op2);
1550 case Token::LTE: return (op1 <= op2);
1551 case Token::GTE: return (op1 >= op2);
1559 ExternalReference ExternalReference::mod_two_doubles_operation(
1561 return ExternalReference(Redirect(isolate,
1562 FUNCTION_ADDR(modulo),
1563 BUILTIN_FP_FP_CALL));
1567 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1568 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break)));
1572 ExternalReference ExternalReference::debug_step_in_fp_address(
1574 return ExternalReference(isolate->debug()->step_in_fp_addr());
1578 bool operator==(ExternalReference lhs, ExternalReference rhs) {
1579 return lhs.address() == rhs.address();
1583 bool operator!=(ExternalReference lhs, ExternalReference rhs) {
1584 return !(lhs == rhs);
1588 size_t hash_value(ExternalReference reference) {
1589 return base::hash<Address>()(reference.address());
1593 std::ostream& operator<<(std::ostream& os, ExternalReference reference) {
1594 os << static_cast<const void*>(reference.address());
1595 const Runtime::Function* fn = Runtime::FunctionForEntry(reference.address());
1596 if (fn) os << "<" << fn->name << ".entry>";
1601 void PositionsRecorder::RecordPosition(int pos) {
1602 DCHECK(pos != RelocInfo::kNoPosition);
1604 state_.current_position = pos;
1605 LOG_CODE_EVENT(assembler_->isolate(),
1606 CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1607 assembler_->pc_offset(),
1612 void PositionsRecorder::RecordStatementPosition(int pos) {
1613 DCHECK(pos != RelocInfo::kNoPosition);
1615 state_.current_statement_position = pos;
1616 LOG_CODE_EVENT(assembler_->isolate(),
1617 CodeLinePosInfoAddStatementPositionEvent(
1619 assembler_->pc_offset(),
1624 bool PositionsRecorder::WriteRecordedPositions() {
1625 bool written = false;
1627 // Write the statement position if it is different from what was written last
1629 if (state_.current_statement_position != state_.written_statement_position) {
1630 EnsureSpace ensure_space(assembler_);
1631 assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1632 state_.current_statement_position);
1633 state_.written_statement_position = state_.current_statement_position;
1637 // Write the position if it is different from what was written last time and
1638 // also different from the written statement position.
1639 if (state_.current_position != state_.written_position &&
1640 state_.current_position != state_.written_statement_position) {
1641 EnsureSpace ensure_space(assembler_);
1642 assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1643 state_.written_position = state_.current_position;
1647 // Return whether something was written.
1652 // Platform specific but identical code for all the platforms.
1655 void Assembler::RecordDeoptReason(const int reason,
1656 const SourcePosition position) {
1657 if (FLAG_trace_deopt || isolate()->cpu_profiler()->is_profiling()) {
1658 EnsureSpace ensure_space(this);
1659 int raw_position = position.IsUnknown() ? 0 : position.raw();
1660 RecordRelocInfo(RelocInfo::POSITION, raw_position);
1661 RecordRelocInfo(RelocInfo::DEOPT_REASON, reason);
1666 void Assembler::RecordComment(const char* msg) {
1667 if (FLAG_code_comments) {
1668 EnsureSpace ensure_space(this);
1669 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
1674 void Assembler::RecordJSReturn() {
1675 positions_recorder()->WriteRecordedPositions();
1676 EnsureSpace ensure_space(this);
1677 RecordRelocInfo(RelocInfo::JS_RETURN);
1681 void Assembler::RecordDebugBreakSlot() {
1682 positions_recorder()->WriteRecordedPositions();
1683 EnsureSpace ensure_space(this);
1684 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
1686 } } // namespace v8::internal