1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the distribution.
15 // - Neither the name of Sun Microsystems or the names of contributors may
16 // be used to endorse or promote products derived from this software without
17 // specific prior written permission.
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21 // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // The original source code covered by the above license above has been
32 // modified significantly by Google Inc.
33 // Copyright 2012 the V8 project authors. All rights reserved.
35 #include "src/assembler.h"
39 #include "src/base/cpu.h"
40 #include "src/base/lazy-instance.h"
41 #include "src/base/platform/platform.h"
42 #include "src/builtins.h"
43 #include "src/codegen.h"
44 #include "src/counters.h"
45 #include "src/cpu-profiler.h"
46 #include "src/debug.h"
47 #include "src/deoptimizer.h"
48 #include "src/execution.h"
49 #include "src/ic/ic.h"
50 #include "src/ic/stub-cache.h"
51 #include "src/isolate-inl.h"
52 #include "src/jsregexp.h"
53 #include "src/regexp-macro-assembler.h"
54 #include "src/regexp-stack.h"
55 #include "src/runtime.h"
56 #include "src/serialize.h"
57 #include "src/token.h"
59 #if V8_TARGET_ARCH_IA32
60 #include "src/ia32/assembler-ia32-inl.h" // NOLINT
61 #elif V8_TARGET_ARCH_X64
62 #include "src/x64/assembler-x64-inl.h" // NOLINT
63 #elif V8_TARGET_ARCH_ARM64
64 #include "src/arm64/assembler-arm64-inl.h" // NOLINT
65 #elif V8_TARGET_ARCH_ARM
66 #include "src/arm/assembler-arm-inl.h" // NOLINT
67 #elif V8_TARGET_ARCH_MIPS
68 #include "src/mips/assembler-mips-inl.h" // NOLINT
69 #elif V8_TARGET_ARCH_MIPS64
70 #include "src/mips64/assembler-mips64-inl.h" // NOLINT
71 #elif V8_TARGET_ARCH_X87
72 #include "src/x87/assembler-x87-inl.h" // NOLINT
74 #error "Unknown architecture."
77 // Include native regexp-macro-assembler.
78 #ifndef V8_INTERPRETED_REGEXP
79 #if V8_TARGET_ARCH_IA32
80 #include "src/ia32/regexp-macro-assembler-ia32.h" // NOLINT
81 #elif V8_TARGET_ARCH_X64
82 #include "src/x64/regexp-macro-assembler-x64.h" // NOLINT
83 #elif V8_TARGET_ARCH_ARM64
84 #include "src/arm64/regexp-macro-assembler-arm64.h" // NOLINT
85 #elif V8_TARGET_ARCH_ARM
86 #include "src/arm/regexp-macro-assembler-arm.h" // NOLINT
87 #elif V8_TARGET_ARCH_MIPS
88 #include "src/mips/regexp-macro-assembler-mips.h" // NOLINT
89 #elif V8_TARGET_ARCH_MIPS64
90 #include "src/mips64/regexp-macro-assembler-mips64.h" // NOLINT
91 #elif V8_TARGET_ARCH_X87
92 #include "src/x87/regexp-macro-assembler-x87.h" // NOLINT
93 #else // Unknown architecture.
94 #error "Unknown architecture."
95 #endif // Target architecture.
96 #endif // V8_INTERPRETED_REGEXP
101 // -----------------------------------------------------------------------------
102 // Common double constants.
104 struct DoubleConstant BASE_EMBEDDED {
107 double minus_one_half;
108 double negative_infinity;
109 double canonical_non_hole_nan;
114 static DoubleConstant double_constants;
116 const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
118 static bool math_exp_data_initialized = false;
119 static base::Mutex* math_exp_data_mutex = NULL;
120 static double* math_exp_constants_array = NULL;
121 static double* math_exp_log_table_array = NULL;
123 // -----------------------------------------------------------------------------
124 // Implementation of AssemblerBase
126 AssemblerBase::AssemblerBase(Isolate* isolate, void* buffer, int buffer_size)
129 enabled_cpu_features_(0),
130 emit_debug_code_(FLAG_debug_code),
131 predictable_code_size_(false),
132 // We may use the assembler without an isolate.
133 serializer_enabled_(isolate && isolate->serializer_enabled()) {
134 if (FLAG_mask_constants_with_cookie && isolate != NULL) {
135 jit_cookie_ = isolate->random_number_generator()->NextInt();
137 own_buffer_ = buffer == NULL;
138 if (buffer_size == 0) buffer_size = kMinimalBufferSize;
139 DCHECK(buffer_size > 0);
140 if (own_buffer_) buffer = NewArray<byte>(buffer_size);
141 buffer_ = static_cast<byte*>(buffer);
142 buffer_size_ = buffer_size;
148 AssemblerBase::~AssemblerBase() {
149 if (own_buffer_) DeleteArray(buffer_);
153 // -----------------------------------------------------------------------------
154 // Implementation of PredictableCodeSizeScope
156 PredictableCodeSizeScope::PredictableCodeSizeScope(AssemblerBase* assembler,
158 : assembler_(assembler),
159 expected_size_(expected_size),
160 start_offset_(assembler->pc_offset()),
161 old_value_(assembler->predictable_code_size()) {
162 assembler_->set_predictable_code_size(true);
166 PredictableCodeSizeScope::~PredictableCodeSizeScope() {
167 // TODO(svenpanne) Remove the 'if' when everything works.
168 if (expected_size_ >= 0) {
169 CHECK_EQ(expected_size_, assembler_->pc_offset() - start_offset_);
171 assembler_->set_predictable_code_size(old_value_);
175 // -----------------------------------------------------------------------------
176 // Implementation of CpuFeatureScope
179 CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
180 : assembler_(assembler) {
181 DCHECK(CpuFeatures::IsSupported(f));
182 old_enabled_ = assembler_->enabled_cpu_features();
183 uint64_t mask = static_cast<uint64_t>(1) << f;
184 // TODO(svenpanne) This special case below doesn't belong here!
185 #if V8_TARGET_ARCH_ARM
186 // ARMv7 is implied by VFP3.
188 mask |= static_cast<uint64_t>(1) << ARMv7;
191 assembler_->set_enabled_cpu_features(old_enabled_ | mask);
195 CpuFeatureScope::~CpuFeatureScope() {
196 assembler_->set_enabled_cpu_features(old_enabled_);
201 bool CpuFeatures::initialized_ = false;
202 unsigned CpuFeatures::supported_ = 0;
203 unsigned CpuFeatures::cache_line_size_ = 0;
206 // -----------------------------------------------------------------------------
207 // Implementation of Label
209 int Label::pos() const {
210 if (pos_ < 0) return -pos_ - 1;
211 if (pos_ > 0) return pos_ - 1;
217 // -----------------------------------------------------------------------------
218 // Implementation of RelocInfoWriter and RelocIterator
220 // Relocation information is written backwards in memory, from high addresses
221 // towards low addresses, byte by byte. Therefore, in the encodings listed
222 // below, the first byte listed it at the highest address, and successive
223 // bytes in the record are at progressively lower addresses.
227 // The most common modes are given single-byte encodings. Also, it is
228 // easy to identify the type of reloc info and skip unwanted modes in
231 // The encoding relies on the fact that there are fewer than 14
232 // different relocation modes using standard non-compact encoding.
234 // The first byte of a relocation record has a tag in its low 2 bits:
235 // Here are the record schemes, depending on the low tag and optional higher
239 // 00: embedded_object: [6-bit pc delta] 00
241 // 01: code_target: [6-bit pc delta] 01
243 // 10: short_data_record: [6-bit pc delta] 10 followed by
244 // [6-bit data delta] [2-bit data type tag]
246 // 11: long_record [2-bit high tag][4 bit middle_tag] 11
247 // followed by variable data depending on type.
249 // 2-bit data type tags, used in short_data_record and data_jump long_record:
250 // code_target_with_id: 00
252 // statement_position: 10
253 // comment: 11 (not used in short_data_record)
255 // Long record format:
257 // 0000 - 1100 : Short record for RelocInfo::Mode middle_tag + 2
258 // (The middle_tag encodes rmode - RelocInfo::LAST_COMPACT_ENUM,
259 // and is between 0000 and 1100)
261 // 00 [4 bit middle_tag] 11 followed by
262 // 00 [6 bit pc delta]
264 // 1101: constant or veneer pool. Used only on ARM and ARM64 for now.
265 // The format is: [2-bit sub-type] 1101 11
266 // signed int (size of the pool).
267 // The 2-bit sub-types are:
270 // 1110: long_data_record
271 // The format is: [2-bit data_type_tag] 1110 11
272 // signed intptr_t, lowest byte written first
273 // (except data_type code_target_with_id, which
274 // is followed by a signed int, not intptr_t.)
276 // 1111: long_pc_jump
278 // pc-jump: 00 1111 11,
279 // 00 [6 bits pc delta]
281 // pc-jump (variable length):
286 // (Bits 6..31 of pc delta, with leading zeroes
287 // dropped, and last non-zero chunk tagged with 1.)
291 const int kMaxStandardNonCompactModes = 14;
294 const int kTagBits = 2;
295 const int kTagMask = (1 << kTagBits) - 1;
296 const int kExtraTagBits = 4;
297 const int kLocatableTypeTagBits = 2;
298 const int kSmallDataBits = kBitsPerByte - kLocatableTypeTagBits;
300 const int kEmbeddedObjectTag = 0;
301 const int kCodeTargetTag = 1;
302 const int kLocatableTag = 2;
303 const int kDefaultTag = 3;
305 const int kPCJumpExtraTag = (1 << kExtraTagBits) - 1;
307 const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
308 const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
309 const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
311 const int kVariableLengthPCJumpTopTag = 1;
312 const int kChunkBits = 7;
313 const int kChunkMask = (1 << kChunkBits) - 1;
314 const int kLastChunkTagBits = 1;
315 const int kLastChunkTagMask = 1;
316 const int kLastChunkTag = 1;
319 const int kDataJumpExtraTag = kPCJumpExtraTag - 1;
321 const int kCodeWithIdTag = 0;
322 const int kNonstatementPositionTag = 1;
323 const int kStatementPositionTag = 2;
324 const int kCommentTag = 3;
326 const int kPoolExtraTag = kPCJumpExtraTag - 2;
327 const int kConstPoolTag = 0;
328 const int kVeneerPoolTag = 1;
331 uint32_t RelocInfoWriter::WriteVariableLengthPCJump(uint32_t pc_delta) {
332 // Return if the pc_delta can fit in kSmallPCDeltaBits bits.
333 // Otherwise write a variable length PC jump for the bits that do
334 // not fit in the kSmallPCDeltaBits bits.
335 if (is_uintn(pc_delta, kSmallPCDeltaBits)) return pc_delta;
336 WriteExtraTag(kPCJumpExtraTag, kVariableLengthPCJumpTopTag);
337 uint32_t pc_jump = pc_delta >> kSmallPCDeltaBits;
339 // Write kChunkBits size chunks of the pc_jump.
340 for (; pc_jump > 0; pc_jump = pc_jump >> kChunkBits) {
341 byte b = pc_jump & kChunkMask;
342 *--pos_ = b << kLastChunkTagBits;
344 // Tag the last chunk so it can be identified.
345 *pos_ = *pos_ | kLastChunkTag;
346 // Return the remaining kSmallPCDeltaBits of the pc_delta.
347 return pc_delta & kSmallPCDeltaMask;
351 void RelocInfoWriter::WriteTaggedPC(uint32_t pc_delta, int tag) {
352 // Write a byte of tagged pc-delta, possibly preceded by var. length pc-jump.
353 pc_delta = WriteVariableLengthPCJump(pc_delta);
354 *--pos_ = pc_delta << kTagBits | tag;
358 void RelocInfoWriter::WriteTaggedData(intptr_t data_delta, int tag) {
359 *--pos_ = static_cast<byte>(data_delta << kLocatableTypeTagBits | tag);
363 void RelocInfoWriter::WriteExtraTag(int extra_tag, int top_tag) {
364 *--pos_ = static_cast<int>(top_tag << (kTagBits + kExtraTagBits) |
365 extra_tag << kTagBits |
370 void RelocInfoWriter::WriteExtraTaggedPC(uint32_t pc_delta, int extra_tag) {
371 // Write two-byte tagged pc-delta, possibly preceded by var. length pc-jump.
372 pc_delta = WriteVariableLengthPCJump(pc_delta);
373 WriteExtraTag(extra_tag, 0);
378 void RelocInfoWriter::WriteExtraTaggedIntData(int data_delta, int top_tag) {
379 WriteExtraTag(kDataJumpExtraTag, top_tag);
380 for (int i = 0; i < kIntSize; i++) {
381 *--pos_ = static_cast<byte>(data_delta);
382 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
383 data_delta = data_delta >> kBitsPerByte;
388 void RelocInfoWriter::WriteExtraTaggedPoolData(int data, int pool_type) {
389 WriteExtraTag(kPoolExtraTag, pool_type);
390 for (int i = 0; i < kIntSize; i++) {
391 *--pos_ = static_cast<byte>(data);
392 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
393 data = data >> kBitsPerByte;
398 void RelocInfoWriter::WriteExtraTaggedData(intptr_t data_delta, int top_tag) {
399 WriteExtraTag(kDataJumpExtraTag, top_tag);
400 for (int i = 0; i < kIntptrSize; i++) {
401 *--pos_ = static_cast<byte>(data_delta);
402 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
403 data_delta = data_delta >> kBitsPerByte;
408 void RelocInfoWriter::Write(const RelocInfo* rinfo) {
410 byte* begin_pos = pos_;
412 DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
413 DCHECK(rinfo->pc() - last_pc_ >= 0);
414 DCHECK(RelocInfo::LAST_STANDARD_NONCOMPACT_ENUM - RelocInfo::LAST_COMPACT_ENUM
415 <= kMaxStandardNonCompactModes);
416 // Use unsigned delta-encoding for pc.
417 uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
418 RelocInfo::Mode rmode = rinfo->rmode();
420 // The two most common modes are given small tags, and usually fit in a byte.
421 if (rmode == RelocInfo::EMBEDDED_OBJECT) {
422 WriteTaggedPC(pc_delta, kEmbeddedObjectTag);
423 } else if (rmode == RelocInfo::CODE_TARGET) {
424 WriteTaggedPC(pc_delta, kCodeTargetTag);
425 DCHECK(begin_pos - pos_ <= RelocInfo::kMaxCallSize);
426 } else if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
427 // Use signed delta-encoding for id.
428 DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
429 int id_delta = static_cast<int>(rinfo->data()) - last_id_;
430 // Check if delta is small enough to fit in a tagged byte.
431 if (is_intn(id_delta, kSmallDataBits)) {
432 WriteTaggedPC(pc_delta, kLocatableTag);
433 WriteTaggedData(id_delta, kCodeWithIdTag);
435 // Otherwise, use costly encoding.
436 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
437 WriteExtraTaggedIntData(id_delta, kCodeWithIdTag);
439 last_id_ = static_cast<int>(rinfo->data());
440 } else if (RelocInfo::IsPosition(rmode)) {
441 // Use signed delta-encoding for position.
442 DCHECK(static_cast<int>(rinfo->data()) == rinfo->data());
443 int pos_delta = static_cast<int>(rinfo->data()) - last_position_;
444 int pos_type_tag = (rmode == RelocInfo::POSITION) ? kNonstatementPositionTag
445 : kStatementPositionTag;
446 // Check if delta is small enough to fit in a tagged byte.
447 if (is_intn(pos_delta, kSmallDataBits)) {
448 WriteTaggedPC(pc_delta, kLocatableTag);
449 WriteTaggedData(pos_delta, pos_type_tag);
451 // Otherwise, use costly encoding.
452 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
453 WriteExtraTaggedIntData(pos_delta, pos_type_tag);
455 last_position_ = static_cast<int>(rinfo->data());
456 } else if (RelocInfo::IsComment(rmode)) {
457 // Comments are normally not generated, so we use the costly encoding.
458 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
459 WriteExtraTaggedData(rinfo->data(), kCommentTag);
460 DCHECK(begin_pos - pos_ >= RelocInfo::kMinRelocCommentSize);
461 } else if (RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)) {
462 WriteExtraTaggedPC(pc_delta, kPCJumpExtraTag);
463 WriteExtraTaggedPoolData(static_cast<int>(rinfo->data()),
464 RelocInfo::IsConstPool(rmode) ? kConstPoolTag
467 DCHECK(rmode > RelocInfo::LAST_COMPACT_ENUM);
468 int saved_mode = rmode - RelocInfo::LAST_COMPACT_ENUM;
469 // For all other modes we simply use the mode as the extra tag.
470 // None of these modes need a data component.
471 DCHECK(saved_mode < kPCJumpExtraTag && saved_mode < kDataJumpExtraTag);
472 WriteExtraTaggedPC(pc_delta, saved_mode);
474 last_pc_ = rinfo->pc();
476 DCHECK(begin_pos - pos_ <= kMaxSize);
481 inline int RelocIterator::AdvanceGetTag() {
482 return *--pos_ & kTagMask;
486 inline int RelocIterator::GetExtraTag() {
487 return (*pos_ >> kTagBits) & ((1 << kExtraTagBits) - 1);
491 inline int RelocIterator::GetTopTag() {
492 return *pos_ >> (kTagBits + kExtraTagBits);
496 inline void RelocIterator::ReadTaggedPC() {
497 rinfo_.pc_ += *pos_ >> kTagBits;
501 inline void RelocIterator::AdvanceReadPC() {
502 rinfo_.pc_ += *--pos_;
506 void RelocIterator::AdvanceReadId() {
508 for (int i = 0; i < kIntSize; i++) {
509 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
512 rinfo_.data_ = last_id_;
516 void RelocIterator::AdvanceReadPoolData() {
518 for (int i = 0; i < kIntSize; i++) {
519 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
525 void RelocIterator::AdvanceReadPosition() {
527 for (int i = 0; i < kIntSize; i++) {
528 x |= static_cast<int>(*--pos_) << i * kBitsPerByte;
531 rinfo_.data_ = last_position_;
535 void RelocIterator::AdvanceReadData() {
537 for (int i = 0; i < kIntptrSize; i++) {
538 x |= static_cast<intptr_t>(*--pos_) << i * kBitsPerByte;
544 void RelocIterator::AdvanceReadVariableLengthPCJump() {
545 // Read the 32-kSmallPCDeltaBits most significant bits of the
546 // pc jump in kChunkBits bit chunks and shift them into place.
547 // Stop when the last chunk is encountered.
548 uint32_t pc_jump = 0;
549 for (int i = 0; i < kIntSize; i++) {
550 byte pc_jump_part = *--pos_;
551 pc_jump |= (pc_jump_part >> kLastChunkTagBits) << i * kChunkBits;
552 if ((pc_jump_part & kLastChunkTagMask) == 1) break;
554 // The least significant kSmallPCDeltaBits bits will be added
556 rinfo_.pc_ += pc_jump << kSmallPCDeltaBits;
560 inline int RelocIterator::GetLocatableTypeTag() {
561 return *pos_ & ((1 << kLocatableTypeTagBits) - 1);
565 inline void RelocIterator::ReadTaggedId() {
566 int8_t signed_b = *pos_;
567 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
568 last_id_ += signed_b >> kLocatableTypeTagBits;
569 rinfo_.data_ = last_id_;
573 inline void RelocIterator::ReadTaggedPosition() {
574 int8_t signed_b = *pos_;
575 // Signed right shift is arithmetic shift. Tested in test-utils.cc.
576 last_position_ += signed_b >> kLocatableTypeTagBits;
577 rinfo_.data_ = last_position_;
581 static inline RelocInfo::Mode GetPositionModeFromTag(int tag) {
582 DCHECK(tag == kNonstatementPositionTag ||
583 tag == kStatementPositionTag);
584 return (tag == kNonstatementPositionTag) ?
585 RelocInfo::POSITION :
586 RelocInfo::STATEMENT_POSITION;
590 void RelocIterator::next() {
592 // Basically, do the opposite of RelocInfoWriter::Write.
593 // Reading of data is as far as possible avoided for unwanted modes,
594 // but we must always update the pc.
596 // We exit this loop by returning when we find a mode we want.
597 while (pos_ > end_) {
598 int tag = AdvanceGetTag();
599 if (tag == kEmbeddedObjectTag) {
601 if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
602 } else if (tag == kCodeTargetTag) {
604 if (SetMode(RelocInfo::CODE_TARGET)) return;
605 } else if (tag == kLocatableTag) {
608 int locatable_tag = GetLocatableTypeTag();
609 if (locatable_tag == kCodeWithIdTag) {
610 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
615 // Compact encoding is never used for comments,
616 // so it must be a position.
617 DCHECK(locatable_tag == kNonstatementPositionTag ||
618 locatable_tag == kStatementPositionTag);
619 if (mode_mask_ & RelocInfo::kPositionMask) {
620 ReadTaggedPosition();
621 if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
625 DCHECK(tag == kDefaultTag);
626 int extra_tag = GetExtraTag();
627 if (extra_tag == kPCJumpExtraTag) {
628 if (GetTopTag() == kVariableLengthPCJumpTopTag) {
629 AdvanceReadVariableLengthPCJump();
633 } else if (extra_tag == kDataJumpExtraTag) {
634 int locatable_tag = GetTopTag();
635 if (locatable_tag == kCodeWithIdTag) {
636 if (SetMode(RelocInfo::CODE_TARGET_WITH_ID)) {
641 } else if (locatable_tag != kCommentTag) {
642 DCHECK(locatable_tag == kNonstatementPositionTag ||
643 locatable_tag == kStatementPositionTag);
644 if (mode_mask_ & RelocInfo::kPositionMask) {
645 AdvanceReadPosition();
646 if (SetMode(GetPositionModeFromTag(locatable_tag))) return;
651 DCHECK(locatable_tag == kCommentTag);
652 if (SetMode(RelocInfo::COMMENT)) {
656 Advance(kIntptrSize);
658 } else if (extra_tag == kPoolExtraTag) {
659 int pool_type = GetTopTag();
660 DCHECK(pool_type == kConstPoolTag || pool_type == kVeneerPoolTag);
661 RelocInfo::Mode rmode = (pool_type == kConstPoolTag) ?
662 RelocInfo::CONST_POOL : RelocInfo::VENEER_POOL;
663 if (SetMode(rmode)) {
664 AdvanceReadPoolData();
670 int rmode = extra_tag + RelocInfo::LAST_COMPACT_ENUM;
671 if (SetMode(static_cast<RelocInfo::Mode>(rmode))) return;
675 if (code_age_sequence_ != NULL) {
676 byte* old_code_age_sequence = code_age_sequence_;
677 code_age_sequence_ = NULL;
678 if (SetMode(RelocInfo::CODE_AGE_SEQUENCE)) {
680 rinfo_.pc_ = old_code_age_sequence;
688 RelocIterator::RelocIterator(Code* code, int mode_mask) {
690 rinfo_.pc_ = code->instruction_start();
692 // Relocation info is read backwards.
693 pos_ = code->relocation_start() + code->relocation_size();
694 end_ = code->relocation_start();
696 mode_mask_ = mode_mask;
699 byte* sequence = code->FindCodeAgeSequence();
700 // We get the isolate from the map, because at serialization time
701 // the code pointer has been cloned and isn't really in heap space.
702 Isolate* isolate = code->map()->GetIsolate();
703 if (sequence != NULL && !Code::IsYoungSequence(isolate, sequence)) {
704 code_age_sequence_ = sequence;
706 code_age_sequence_ = NULL;
708 if (mode_mask_ == 0) pos_ = end_;
713 RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask) {
714 rinfo_.pc_ = desc.buffer;
716 // Relocation info is read backwards.
717 pos_ = desc.buffer + desc.buffer_size;
718 end_ = pos_ - desc.reloc_size;
720 mode_mask_ = mode_mask;
723 code_age_sequence_ = NULL;
724 if (mode_mask_ == 0) pos_ = end_;
729 // -----------------------------------------------------------------------------
730 // Implementation of RelocInfo
734 bool RelocInfo::RequiresRelocation(const CodeDesc& desc) {
735 // Ensure there are no code targets or embedded objects present in the
736 // deoptimization entries, they would require relocation after code
738 int mode_mask = RelocInfo::kCodeTargetMask |
739 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
740 RelocInfo::ModeMask(RelocInfo::CELL) |
741 RelocInfo::kApplyMask;
742 RelocIterator it(desc, mode_mask);
748 #ifdef ENABLE_DISASSEMBLER
749 const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
751 case RelocInfo::NONE32:
752 return "no reloc 32";
753 case RelocInfo::NONE64:
754 return "no reloc 64";
755 case RelocInfo::EMBEDDED_OBJECT:
756 return "embedded object";
757 case RelocInfo::CONSTRUCT_CALL:
758 return "code target (js construct call)";
759 case RelocInfo::DEBUG_BREAK:
760 return "debug break";
761 case RelocInfo::CODE_TARGET:
762 return "code target";
763 case RelocInfo::CODE_TARGET_WITH_ID:
764 return "code target with id";
765 case RelocInfo::CELL:
766 return "property cell";
767 case RelocInfo::RUNTIME_ENTRY:
768 return "runtime entry";
769 case RelocInfo::JS_RETURN:
771 case RelocInfo::COMMENT:
773 case RelocInfo::POSITION:
775 case RelocInfo::STATEMENT_POSITION:
776 return "statement position";
777 case RelocInfo::EXTERNAL_REFERENCE:
778 return "external reference";
779 case RelocInfo::INTERNAL_REFERENCE:
780 return "internal reference";
781 case RelocInfo::CONST_POOL:
782 return "constant pool";
783 case RelocInfo::VENEER_POOL:
784 return "veneer pool";
785 case RelocInfo::DEBUG_BREAK_SLOT:
786 return "debug break slot";
787 case RelocInfo::CODE_AGE_SEQUENCE:
788 return "code_age_sequence";
789 case RelocInfo::NUMBER_OF_MODES:
791 return "number_of_modes";
793 return "unknown relocation type";
797 void RelocInfo::Print(Isolate* isolate, OStream& os) { // NOLINT
798 os << pc_ << " " << RelocModeName(rmode_);
799 if (IsComment(rmode_)) {
800 os << " (" << reinterpret_cast<char*>(data_) << ")";
801 } else if (rmode_ == EMBEDDED_OBJECT) {
802 os << " (" << Brief(target_object()) << ")";
803 } else if (rmode_ == EXTERNAL_REFERENCE) {
804 ExternalReferenceEncoder ref_encoder(isolate);
805 os << " (" << ref_encoder.NameOfAddress(target_reference()) << ") ("
806 << target_reference() << ")";
807 } else if (IsCodeTarget(rmode_)) {
808 Code* code = Code::GetCodeFromTargetAddress(target_address());
809 os << " (" << Code::Kind2String(code->kind()) << ") (" << target_address()
811 if (rmode_ == CODE_TARGET_WITH_ID) {
812 os << " (id=" << static_cast<int>(data_) << ")";
814 } else if (IsPosition(rmode_)) {
815 os << " (" << data() << ")";
816 } else if (IsRuntimeEntry(rmode_) &&
817 isolate->deoptimizer_data() != NULL) {
818 // Depotimization bailouts are stored as runtime entries.
819 int id = Deoptimizer::GetDeoptimizationId(
820 isolate, target_address(), Deoptimizer::EAGER);
821 if (id != Deoptimizer::kNotDeoptimizationEntry) {
822 os << " (deoptimization bailout " << id << ")";
828 #endif // ENABLE_DISASSEMBLER
832 void RelocInfo::Verify(Isolate* isolate) {
834 case EMBEDDED_OBJECT:
835 Object::VerifyPointer(target_object());
838 Object::VerifyPointer(target_cell());
842 case CODE_TARGET_WITH_ID:
844 // convert inline target address to code object
845 Address addr = target_address();
847 // Check that we can find the right code object.
848 Code* code = Code::GetCodeFromTargetAddress(addr);
849 Object* found = isolate->FindCodeObject(addr);
850 CHECK(found->IsCode());
851 CHECK(code->address() == HeapObject::cast(found)->address());
858 case STATEMENT_POSITION:
859 case EXTERNAL_REFERENCE:
860 case INTERNAL_REFERENCE:
863 case DEBUG_BREAK_SLOT:
867 case NUMBER_OF_MODES:
870 case CODE_AGE_SEQUENCE:
871 DCHECK(Code::IsYoungSequence(isolate, pc_) || code_age_stub()->IsCode());
875 #endif // VERIFY_HEAP
878 // -----------------------------------------------------------------------------
879 // Implementation of ExternalReference
881 void ExternalReference::SetUp() {
882 double_constants.min_int = kMinInt;
883 double_constants.one_half = 0.5;
884 double_constants.minus_one_half = -0.5;
885 double_constants.canonical_non_hole_nan = base::OS::nan_value();
886 double_constants.the_hole_nan = BitCast<double>(kHoleNanInt64);
887 double_constants.negative_infinity = -V8_INFINITY;
888 double_constants.uint32_bias =
889 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
891 math_exp_data_mutex = new base::Mutex();
895 void ExternalReference::InitializeMathExpData() {
897 if (math_exp_data_initialized) return;
899 base::LockGuard<base::Mutex> lock_guard(math_exp_data_mutex);
900 if (!math_exp_data_initialized) {
901 // If this is changed, generated code must be adapted too.
902 const int kTableSizeBits = 11;
903 const int kTableSize = 1 << kTableSizeBits;
904 const double kTableSizeDouble = static_cast<double>(kTableSize);
906 math_exp_constants_array = new double[9];
907 // Input values smaller than this always return 0.
908 math_exp_constants_array[0] = -708.39641853226408;
909 // Input values larger than this always return +Infinity.
910 math_exp_constants_array[1] = 709.78271289338397;
911 math_exp_constants_array[2] = V8_INFINITY;
912 // The rest is black magic. Do not attempt to understand it. It is
913 // loosely based on the "expd" function published at:
914 // http://herumi.blogspot.com/2011/08/fast-double-precision-exponential.html
915 const double constant3 = (1 << kTableSizeBits) / std::log(2.0);
916 math_exp_constants_array[3] = constant3;
917 math_exp_constants_array[4] =
918 static_cast<double>(static_cast<int64_t>(3) << 51);
919 math_exp_constants_array[5] = 1 / constant3;
920 math_exp_constants_array[6] = 3.0000000027955394;
921 math_exp_constants_array[7] = 0.16666666685227835;
922 math_exp_constants_array[8] = 1;
924 math_exp_log_table_array = new double[kTableSize];
925 for (int i = 0; i < kTableSize; i++) {
926 double value = std::pow(2, i / kTableSizeDouble);
927 uint64_t bits = BitCast<uint64_t, double>(value);
928 bits &= (static_cast<uint64_t>(1) << 52) - 1;
929 double mantissa = BitCast<double, uint64_t>(bits);
930 math_exp_log_table_array[i] = mantissa;
933 math_exp_data_initialized = true;
938 void ExternalReference::TearDownMathExpData() {
939 delete[] math_exp_constants_array;
940 delete[] math_exp_log_table_array;
941 delete math_exp_data_mutex;
945 ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
946 : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
949 ExternalReference::ExternalReference(
951 Type type = ExternalReference::BUILTIN_CALL,
952 Isolate* isolate = NULL)
953 : address_(Redirect(isolate, fun->address(), type)) {}
956 ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
957 : address_(isolate->builtins()->builtin_address(name)) {}
960 ExternalReference::ExternalReference(Runtime::FunctionId id,
962 : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
965 ExternalReference::ExternalReference(const Runtime::Function* f,
967 : address_(Redirect(isolate, f->entry)) {}
970 ExternalReference ExternalReference::isolate_address(Isolate* isolate) {
971 return ExternalReference(isolate);
975 ExternalReference::ExternalReference(const IC_Utility& ic_utility,
977 : address_(Redirect(isolate, ic_utility.address())) {}
980 ExternalReference::ExternalReference(StatsCounter* counter)
981 : address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
984 ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
985 : address_(isolate->get_address_from_id(id)) {}
988 ExternalReference::ExternalReference(const SCTableReference& table_ref)
989 : address_(table_ref.address()) {}
992 ExternalReference ExternalReference::
993 incremental_marking_record_write_function(Isolate* isolate) {
994 return ExternalReference(Redirect(
996 FUNCTION_ADDR(IncrementalMarking::RecordWriteFromCode)));
1000 ExternalReference ExternalReference::
1001 store_buffer_overflow_function(Isolate* isolate) {
1002 return ExternalReference(Redirect(
1004 FUNCTION_ADDR(StoreBuffer::StoreBufferOverflow)));
1008 ExternalReference ExternalReference::flush_icache_function(Isolate* isolate) {
1009 return ExternalReference(
1010 Redirect(isolate, FUNCTION_ADDR(CpuFeatures::FlushICache)));
1014 ExternalReference ExternalReference::delete_handle_scope_extensions(
1016 return ExternalReference(Redirect(
1018 FUNCTION_ADDR(HandleScope::DeleteExtensions)));
1022 ExternalReference ExternalReference::get_date_field_function(
1024 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
1028 ExternalReference ExternalReference::get_make_code_young_function(
1030 return ExternalReference(Redirect(
1031 isolate, FUNCTION_ADDR(Code::MakeCodeAgeSequenceYoung)));
1035 ExternalReference ExternalReference::get_mark_code_as_executed_function(
1037 return ExternalReference(Redirect(
1038 isolate, FUNCTION_ADDR(Code::MarkCodeAsExecuted)));
1042 ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
1043 return ExternalReference(isolate->date_cache()->stamp_address());
1047 ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
1048 return ExternalReference(isolate->stress_deopt_count_address());
1052 ExternalReference ExternalReference::new_deoptimizer_function(
1054 return ExternalReference(
1055 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
1059 ExternalReference ExternalReference::compute_output_frames_function(
1061 return ExternalReference(
1062 Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
1066 ExternalReference ExternalReference::log_enter_external_function(
1068 return ExternalReference(
1069 Redirect(isolate, FUNCTION_ADDR(Logger::EnterExternal)));
1073 ExternalReference ExternalReference::log_leave_external_function(
1075 return ExternalReference(
1076 Redirect(isolate, FUNCTION_ADDR(Logger::LeaveExternal)));
1080 ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
1081 return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
1085 ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
1087 return ExternalReference(
1088 isolate->keyed_lookup_cache()->field_offsets_address());
1092 ExternalReference ExternalReference::roots_array_start(Isolate* isolate) {
1093 return ExternalReference(isolate->heap()->roots_array_start());
1097 ExternalReference ExternalReference::allocation_sites_list_address(
1099 return ExternalReference(isolate->heap()->allocation_sites_list_address());
1103 ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
1104 return ExternalReference(isolate->stack_guard()->address_of_jslimit());
1108 ExternalReference ExternalReference::address_of_real_stack_limit(
1110 return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
1114 ExternalReference ExternalReference::address_of_regexp_stack_limit(
1116 return ExternalReference(isolate->regexp_stack()->limit_address());
1120 ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
1121 return ExternalReference(isolate->heap()->NewSpaceStart());
1125 ExternalReference ExternalReference::store_buffer_top(Isolate* isolate) {
1126 return ExternalReference(isolate->heap()->store_buffer()->TopAddress());
1130 ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
1131 return ExternalReference(reinterpret_cast<Address>(
1132 isolate->heap()->NewSpaceMask()));
1136 ExternalReference ExternalReference::new_space_allocation_top_address(
1138 return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
1142 ExternalReference ExternalReference::new_space_allocation_limit_address(
1144 return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
1148 ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
1150 return ExternalReference(
1151 isolate->heap()->OldPointerSpaceAllocationTopAddress());
1155 ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
1157 return ExternalReference(
1158 isolate->heap()->OldPointerSpaceAllocationLimitAddress());
1162 ExternalReference ExternalReference::old_data_space_allocation_top_address(
1164 return ExternalReference(
1165 isolate->heap()->OldDataSpaceAllocationTopAddress());
1169 ExternalReference ExternalReference::old_data_space_allocation_limit_address(
1171 return ExternalReference(
1172 isolate->heap()->OldDataSpaceAllocationLimitAddress());
1176 ExternalReference ExternalReference::handle_scope_level_address(
1178 return ExternalReference(HandleScope::current_level_address(isolate));
1182 ExternalReference ExternalReference::handle_scope_next_address(
1184 return ExternalReference(HandleScope::current_next_address(isolate));
1188 ExternalReference ExternalReference::handle_scope_limit_address(
1190 return ExternalReference(HandleScope::current_limit_address(isolate));
1194 ExternalReference ExternalReference::scheduled_exception_address(
1196 return ExternalReference(isolate->scheduled_exception_address());
1200 ExternalReference ExternalReference::address_of_pending_message_obj(
1202 return ExternalReference(isolate->pending_message_obj_address());
1206 ExternalReference ExternalReference::address_of_has_pending_message(
1208 return ExternalReference(isolate->has_pending_message_address());
1212 ExternalReference ExternalReference::address_of_pending_message_script(
1214 return ExternalReference(isolate->pending_message_script_address());
1218 ExternalReference ExternalReference::address_of_min_int() {
1219 return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
1223 ExternalReference ExternalReference::address_of_one_half() {
1224 return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
1228 ExternalReference ExternalReference::address_of_minus_one_half() {
1229 return ExternalReference(
1230 reinterpret_cast<void*>(&double_constants.minus_one_half));
1234 ExternalReference ExternalReference::address_of_negative_infinity() {
1235 return ExternalReference(
1236 reinterpret_cast<void*>(&double_constants.negative_infinity));
1240 ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
1241 return ExternalReference(
1242 reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
1246 ExternalReference ExternalReference::address_of_the_hole_nan() {
1247 return ExternalReference(
1248 reinterpret_cast<void*>(&double_constants.the_hole_nan));
1252 ExternalReference ExternalReference::address_of_uint32_bias() {
1253 return ExternalReference(
1254 reinterpret_cast<void*>(&double_constants.uint32_bias));
1258 ExternalReference ExternalReference::is_profiling_address(Isolate* isolate) {
1259 return ExternalReference(isolate->cpu_profiler()->is_profiling_address());
1263 ExternalReference ExternalReference::invoke_function_callback(
1265 Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
1266 ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
1267 ApiFunction thunk_fun(thunk_address);
1268 return ExternalReference(&thunk_fun, thunk_type, isolate);
1272 ExternalReference ExternalReference::invoke_accessor_getter_callback(
1274 Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1275 ExternalReference::Type thunk_type =
1276 ExternalReference::PROFILING_GETTER_CALL;
1277 ApiFunction thunk_fun(thunk_address);
1278 return ExternalReference(&thunk_fun, thunk_type, isolate);
1282 #ifndef V8_INTERPRETED_REGEXP
1284 ExternalReference ExternalReference::re_check_stack_guard_state(
1287 #if V8_TARGET_ARCH_X64
1288 function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
1289 #elif V8_TARGET_ARCH_IA32
1290 function = FUNCTION_ADDR(RegExpMacroAssemblerIA32::CheckStackGuardState);
1291 #elif V8_TARGET_ARCH_ARM64
1292 function = FUNCTION_ADDR(RegExpMacroAssemblerARM64::CheckStackGuardState);
1293 #elif V8_TARGET_ARCH_ARM
1294 function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
1295 #elif V8_TARGET_ARCH_MIPS
1296 function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1297 #elif V8_TARGET_ARCH_MIPS64
1298 function = FUNCTION_ADDR(RegExpMacroAssemblerMIPS::CheckStackGuardState);
1299 #elif V8_TARGET_ARCH_X87
1300 function = FUNCTION_ADDR(RegExpMacroAssemblerX87::CheckStackGuardState);
1304 return ExternalReference(Redirect(isolate, function));
1308 ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
1309 return ExternalReference(
1310 Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
1313 ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
1315 return ExternalReference(Redirect(
1317 FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
1321 ExternalReference ExternalReference::re_word_character_map() {
1322 return ExternalReference(
1323 NativeRegExpMacroAssembler::word_character_map_address());
1326 ExternalReference ExternalReference::address_of_static_offsets_vector(
1328 return ExternalReference(
1329 reinterpret_cast<Address>(isolate->jsregexp_static_offsets_vector()));
1332 ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
1334 return ExternalReference(
1335 isolate->regexp_stack()->memory_address());
1338 ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
1340 return ExternalReference(isolate->regexp_stack()->memory_size_address());
1343 #endif // V8_INTERPRETED_REGEXP
1346 ExternalReference ExternalReference::math_log_double_function(
1348 typedef double (*d2d)(double x);
1349 return ExternalReference(Redirect(isolate,
1350 FUNCTION_ADDR(static_cast<d2d>(std::log)),
1355 ExternalReference ExternalReference::math_exp_constants(int constant_index) {
1356 DCHECK(math_exp_data_initialized);
1357 return ExternalReference(
1358 reinterpret_cast<void*>(math_exp_constants_array + constant_index));
1362 ExternalReference ExternalReference::math_exp_log_table() {
1363 DCHECK(math_exp_data_initialized);
1364 return ExternalReference(reinterpret_cast<void*>(math_exp_log_table_array));
1368 ExternalReference ExternalReference::page_flags(Page* page) {
1369 return ExternalReference(reinterpret_cast<Address>(page) +
1370 MemoryChunk::kFlagsOffset);
1374 ExternalReference ExternalReference::ForDeoptEntry(Address entry) {
1375 return ExternalReference(entry);
1379 ExternalReference ExternalReference::cpu_features() {
1380 DCHECK(CpuFeatures::initialized_);
1381 return ExternalReference(&CpuFeatures::supported_);
1385 ExternalReference ExternalReference::debug_is_active_address(
1387 return ExternalReference(isolate->debug()->is_active_address());
1391 ExternalReference ExternalReference::debug_after_break_target_address(
1393 return ExternalReference(isolate->debug()->after_break_target_address());
1398 ExternalReference::debug_restarter_frame_function_pointer_address(
1400 return ExternalReference(
1401 isolate->debug()->restarter_frame_function_pointer_address());
1405 double power_helper(double x, double y) {
1406 int y_int = static_cast<int>(y);
1408 return power_double_int(x, y_int); // Returns 1 if exponent is 0.
1411 return (std::isinf(x)) ? V8_INFINITY
1412 : fast_sqrt(x + 0.0); // Convert -0 to +0.
1415 return (std::isinf(x)) ? 0 : 1.0 / fast_sqrt(x + 0.0); // Convert -0 to +0.
1417 return power_double_double(x, y);
1421 // Helper function to compute x^y, where y is known to be an
1422 // integer. Uses binary decomposition to limit the number of
1423 // multiplications; see the discussion in "Hacker's Delight" by Henry
1424 // S. Warren, Jr., figure 11-6, page 213.
1425 double power_double_int(double x, int y) {
1426 double m = (y < 0) ? 1 / x : x;
1427 unsigned n = (y < 0) ? -y : y;
1430 if ((n & 1) != 0) p *= m;
1432 if ((n & 2) != 0) p *= m;
1440 double power_double_double(double x, double y) {
1441 #if defined(__MINGW64_VERSION_MAJOR) && \
1442 (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)
1443 // MinGW64 has a custom implementation for pow. This handles certain
1444 // special cases that are different.
1445 if ((x == 0.0 || std::isinf(x)) && std::isfinite(y)) {
1447 if (std::modf(y, &f) != 0.0) {
1448 return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
1453 int y_int = static_cast<int>(y);
1455 return std::ldexp(1.0, y_int);
1460 // The checks for special cases can be dropped in ia32 because it has already
1461 // been done in generated code before bailing out here.
1462 if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
1463 return base::OS::nan_value();
1465 return std::pow(x, y);
1469 ExternalReference ExternalReference::power_double_double_function(
1471 return ExternalReference(Redirect(isolate,
1472 FUNCTION_ADDR(power_double_double),
1473 BUILTIN_FP_FP_CALL));
1477 ExternalReference ExternalReference::power_double_int_function(
1479 return ExternalReference(Redirect(isolate,
1480 FUNCTION_ADDR(power_double_int),
1481 BUILTIN_FP_INT_CALL));
1485 bool EvalComparison(Token::Value op, double op1, double op2) {
1486 DCHECK(Token::IsCompareOp(op));
1489 case Token::EQ_STRICT: return (op1 == op2);
1490 case Token::NE: return (op1 != op2);
1491 case Token::LT: return (op1 < op2);
1492 case Token::GT: return (op1 > op2);
1493 case Token::LTE: return (op1 <= op2);
1494 case Token::GTE: return (op1 >= op2);
1502 ExternalReference ExternalReference::mod_two_doubles_operation(
1504 return ExternalReference(Redirect(isolate,
1505 FUNCTION_ADDR(modulo),
1506 BUILTIN_FP_FP_CALL));
1510 ExternalReference ExternalReference::debug_break(Isolate* isolate) {
1511 return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug_Break)));
1515 ExternalReference ExternalReference::debug_step_in_fp_address(
1517 return ExternalReference(isolate->debug()->step_in_fp_addr());
1521 void PositionsRecorder::RecordPosition(int pos) {
1522 DCHECK(pos != RelocInfo::kNoPosition);
1524 state_.current_position = pos;
1525 LOG_CODE_EVENT(assembler_->isolate(),
1526 CodeLinePosInfoAddPositionEvent(jit_handler_data_,
1527 assembler_->pc_offset(),
1532 void PositionsRecorder::RecordStatementPosition(int pos) {
1533 DCHECK(pos != RelocInfo::kNoPosition);
1535 state_.current_statement_position = pos;
1536 LOG_CODE_EVENT(assembler_->isolate(),
1537 CodeLinePosInfoAddStatementPositionEvent(
1539 assembler_->pc_offset(),
1544 bool PositionsRecorder::WriteRecordedPositions() {
1545 bool written = false;
1547 // Write the statement position if it is different from what was written last
1549 if (state_.current_statement_position != state_.written_statement_position) {
1550 EnsureSpace ensure_space(assembler_);
1551 assembler_->RecordRelocInfo(RelocInfo::STATEMENT_POSITION,
1552 state_.current_statement_position);
1553 state_.written_statement_position = state_.current_statement_position;
1557 // Write the position if it is different from what was written last time and
1558 // also different from the written statement position.
1559 if (state_.current_position != state_.written_position &&
1560 state_.current_position != state_.written_statement_position) {
1561 EnsureSpace ensure_space(assembler_);
1562 assembler_->RecordRelocInfo(RelocInfo::POSITION, state_.current_position);
1563 state_.written_position = state_.current_position;
1567 // Return whether something was written.
1572 MultiplierAndShift::MultiplierAndShift(int32_t d) {
1573 DCHECK(d <= -2 || 2 <= d);
1574 const uint32_t two31 = 0x80000000;
1575 uint32_t ad = Abs(d);
1576 uint32_t t = two31 + (uint32_t(d) >> 31);
1577 uint32_t anc = t - 1 - t % ad; // Absolute value of nc.
1578 int32_t p = 31; // Init. p.
1579 uint32_t q1 = two31 / anc; // Init. q1 = 2**p/|nc|.
1580 uint32_t r1 = two31 - q1 * anc; // Init. r1 = rem(2**p, |nc|).
1581 uint32_t q2 = two31 / ad; // Init. q2 = 2**p/|d|.
1582 uint32_t r2 = two31 - q2 * ad; // Init. r2 = rem(2**p, |d|).
1586 q1 *= 2; // Update q1 = 2**p/|nc|.
1587 r1 *= 2; // Update r1 = rem(2**p, |nc|).
1588 if (r1 >= anc) { // Must be an unsigned comparison here.
1592 q2 *= 2; // Update q2 = 2**p/|d|.
1593 r2 *= 2; // Update r2 = rem(2**p, |d|).
1594 if (r2 >= ad) { // Must be an unsigned comparison here.
1599 } while (q1 < delta || (q1 == delta && r1 == 0));
1600 int32_t mul = static_cast<int32_t>(q2 + 1);
1601 multiplier_ = (d < 0) ? -mul : mul;
1605 } } // namespace v8::internal