Revision f230a1cf deps/v8/src/full-codegen.h
deps/v8/src/full-codegen.h | ||
---|---|---|
139 | 139 |
#error Unsupported target architecture. |
140 | 140 |
#endif |
141 | 141 |
|
142 |
class BackEdgeTableIterator { |
|
143 |
public: |
|
144 |
explicit BackEdgeTableIterator(Code* unoptimized, |
|
145 |
DisallowHeapAllocation* required) { |
|
146 |
ASSERT(unoptimized->kind() == Code::FUNCTION); |
|
147 |
instruction_start_ = unoptimized->instruction_start(); |
|
148 |
cursor_ = instruction_start_ + unoptimized->back_edge_table_offset(); |
|
149 |
ASSERT(cursor_ < instruction_start_ + unoptimized->instruction_size()); |
|
150 |
table_length_ = Memory::uint32_at(cursor_); |
|
151 |
cursor_ += kTableLengthSize; |
|
152 |
end_ = cursor_ + table_length_ * kEntrySize; |
|
153 |
} |
|
154 |
|
|
155 |
bool Done() { return cursor_ >= end_; } |
|
156 |
|
|
157 |
void Next() { |
|
158 |
ASSERT(!Done()); |
|
159 |
cursor_ += kEntrySize; |
|
160 |
} |
|
161 |
|
|
162 |
BailoutId ast_id() { |
|
163 |
ASSERT(!Done()); |
|
164 |
return BailoutId(static_cast<int>( |
|
165 |
Memory::uint32_at(cursor_ + kAstIdOffset))); |
|
166 |
} |
|
167 |
|
|
168 |
uint32_t loop_depth() { |
|
169 |
ASSERT(!Done()); |
|
170 |
return Memory::uint32_at(cursor_ + kLoopDepthOffset); |
|
171 |
} |
|
172 |
|
|
173 |
uint32_t pc_offset() { |
|
174 |
ASSERT(!Done()); |
|
175 |
return Memory::uint32_at(cursor_ + kPcOffsetOffset); |
|
176 |
} |
|
177 |
|
|
178 |
Address pc() { |
|
179 |
ASSERT(!Done()); |
|
180 |
return instruction_start_ + pc_offset(); |
|
181 |
} |
|
182 |
|
|
183 |
uint32_t table_length() { return table_length_; } |
|
184 |
|
|
185 |
private: |
|
186 |
static const int kTableLengthSize = kIntSize; |
|
187 |
static const int kAstIdOffset = 0 * kIntSize; |
|
188 |
static const int kPcOffsetOffset = 1 * kIntSize; |
|
189 |
static const int kLoopDepthOffset = 2 * kIntSize; |
|
190 |
static const int kEntrySize = 3 * kIntSize; |
|
191 |
|
|
192 |
Address cursor_; |
|
193 |
Address end_; |
|
194 |
Address instruction_start_; |
|
195 |
uint32_t table_length_; |
|
196 |
|
|
197 |
DISALLOW_COPY_AND_ASSIGN(BackEdgeTableIterator); |
|
198 |
}; |
|
199 |
|
|
200 |
|
|
201 | 142 |
private: |
202 | 143 |
class Breakable; |
203 | 144 |
class Iteration; |
... | ... | |
635 | 576 |
void SetFunctionPosition(FunctionLiteral* fun); |
636 | 577 |
void SetReturnPosition(FunctionLiteral* fun); |
637 | 578 |
void SetStatementPosition(Statement* stmt); |
638 |
void SetExpressionPosition(Expression* expr, int pos);
|
|
579 |
void SetExpressionPosition(Expression* expr); |
|
639 | 580 |
void SetStatementPosition(int pos); |
640 | 581 |
void SetSourcePosition(int pos); |
641 | 582 |
|
... | ... | |
940 | 881 |
}; |
941 | 882 |
|
942 | 883 |
|
884 |
class BackEdgeTable { |
|
885 |
public: |
|
886 |
BackEdgeTable(Code* code, DisallowHeapAllocation* required) { |
|
887 |
ASSERT(code->kind() == Code::FUNCTION); |
|
888 |
instruction_start_ = code->instruction_start(); |
|
889 |
Address table_address = instruction_start_ + code->back_edge_table_offset(); |
|
890 |
length_ = Memory::uint32_at(table_address); |
|
891 |
start_ = table_address + kTableLengthSize; |
|
892 |
} |
|
893 |
|
|
894 |
uint32_t length() { return length_; } |
|
895 |
|
|
896 |
BailoutId ast_id(uint32_t index) { |
|
897 |
return BailoutId(static_cast<int>( |
|
898 |
Memory::uint32_at(entry_at(index) + kAstIdOffset))); |
|
899 |
} |
|
900 |
|
|
901 |
uint32_t loop_depth(uint32_t index) { |
|
902 |
return Memory::uint32_at(entry_at(index) + kLoopDepthOffset); |
|
903 |
} |
|
904 |
|
|
905 |
uint32_t pc_offset(uint32_t index) { |
|
906 |
return Memory::uint32_at(entry_at(index) + kPcOffsetOffset); |
|
907 |
} |
|
908 |
|
|
909 |
Address pc(uint32_t index) { |
|
910 |
return instruction_start_ + pc_offset(index); |
|
911 |
} |
|
912 |
|
|
913 |
enum BackEdgeState { |
|
914 |
INTERRUPT, |
|
915 |
ON_STACK_REPLACEMENT, |
|
916 |
OSR_AFTER_STACK_CHECK |
|
917 |
}; |
|
918 |
|
|
919 |
// Patch all interrupts with allowed loop depth in the unoptimized code to |
|
920 |
// unconditionally call replacement_code. |
|
921 |
static void Patch(Isolate* isolate, |
|
922 |
Code* unoptimized_code); |
|
923 |
|
|
924 |
// Patch the back edge to the target state, provided the correct callee. |
|
925 |
static void PatchAt(Code* unoptimized_code, |
|
926 |
Address pc, |
|
927 |
BackEdgeState target_state, |
|
928 |
Code* replacement_code); |
|
929 |
|
|
930 |
// Change all patched back edges back to normal interrupts. |
|
931 |
static void Revert(Isolate* isolate, |
|
932 |
Code* unoptimized_code); |
|
933 |
|
|
934 |
// Change a back edge patched for on-stack replacement to perform a |
|
935 |
// stack check first. |
|
936 |
static void AddStackCheck(CompilationInfo* info); |
|
937 |
|
|
938 |
// Remove the stack check, if available, and replace by on-stack replacement. |
|
939 |
static void RemoveStackCheck(CompilationInfo* info); |
|
940 |
|
|
941 |
// Return the current patch state of the back edge. |
|
942 |
static BackEdgeState GetBackEdgeState(Isolate* isolate, |
|
943 |
Code* unoptimized_code, |
|
944 |
Address pc_after); |
|
945 |
|
|
946 |
#ifdef DEBUG |
|
947 |
// Verify that all back edges of a certain loop depth are patched. |
|
948 |
static bool Verify(Isolate* isolate, |
|
949 |
Code* unoptimized_code, |
|
950 |
int loop_nesting_level); |
|
951 |
#endif // DEBUG |
|
952 |
|
|
953 |
private: |
|
954 |
Address entry_at(uint32_t index) { |
|
955 |
ASSERT(index < length_); |
|
956 |
return start_ + index * kEntrySize; |
|
957 |
} |
|
958 |
|
|
959 |
static const int kTableLengthSize = kIntSize; |
|
960 |
static const int kAstIdOffset = 0 * kIntSize; |
|
961 |
static const int kPcOffsetOffset = 1 * kIntSize; |
|
962 |
static const int kLoopDepthOffset = 2 * kIntSize; |
|
963 |
static const int kEntrySize = 3 * kIntSize; |
|
964 |
|
|
965 |
Address start_; |
|
966 |
Address instruction_start_; |
|
967 |
uint32_t length_; |
|
968 |
}; |
|
969 |
|
|
970 |
|
|
943 | 971 |
} } // namespace v8::internal |
944 | 972 |
|
945 | 973 |
#endif // V8_FULL_CODEGEN_H_ |
Also available in: Unified diff