The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / hydrogen-gvn.cc @ f230a1cf
History | View | Annotate | Download (29 KB)
1 |
// Copyright 2013 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "hydrogen.h" |
29 |
#include "hydrogen-gvn.h" |
30 |
#include "v8.h" |
31 |
|
32 |
namespace v8 {
|
33 |
namespace internal {
|
34 |
|
35 |
class HValueMap: public ZoneObject { |
36 |
public:
|
37 |
explicit HValueMap(Zone* zone)
|
38 |
: array_size_(0),
|
39 |
lists_size_(0),
|
40 |
count_(0),
|
41 |
present_flags_(0),
|
42 |
array_(NULL),
|
43 |
lists_(NULL),
|
44 |
free_list_head_(kNil) { |
45 |
ResizeLists(kInitialSize, zone); |
46 |
Resize(kInitialSize, zone); |
47 |
} |
48 |
|
49 |
void Kill(GVNFlagSet flags);
|
50 |
|
51 |
void Add(HValue* value, Zone* zone) {
|
52 |
present_flags_.Add(value->gvn_flags()); |
53 |
Insert(value, zone); |
54 |
} |
55 |
|
56 |
HValue* Lookup(HValue* value) const;
|
57 |
|
58 |
HValueMap* Copy(Zone* zone) const {
|
59 |
return new(zone) HValueMap(zone, this); |
60 |
} |
61 |
|
62 |
bool IsEmpty() const { return count_ == 0; } |
63 |
|
64 |
private:
|
65 |
// A linked list of HValue* values. Stored in arrays.
|
66 |
struct HValueMapListElement {
|
67 |
HValue* value; |
68 |
int next; // Index in the array of the next list element. |
69 |
}; |
70 |
static const int kNil = -1; // The end of a linked list |
71 |
|
72 |
// Must be a power of 2.
|
73 |
static const int kInitialSize = 16; |
74 |
|
75 |
HValueMap(Zone* zone, const HValueMap* other);
|
76 |
|
77 |
void Resize(int new_size, Zone* zone); |
78 |
void ResizeLists(int new_size, Zone* zone); |
79 |
void Insert(HValue* value, Zone* zone);
|
80 |
uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } |
81 |
|
82 |
int array_size_;
|
83 |
int lists_size_;
|
84 |
int count_; // The number of values stored in the HValueMap. |
85 |
GVNFlagSet present_flags_; // All flags that are in any value in the
|
86 |
// HValueMap.
|
87 |
HValueMapListElement* array_; // Primary store - contains the first value
|
88 |
// with a given hash. Colliding elements are stored in linked lists.
|
89 |
HValueMapListElement* lists_; // The linked lists containing hash collisions.
|
90 |
int free_list_head_; // Unused elements in lists_ are on the free list. |
91 |
}; |
92 |
|
93 |
|
94 |
class HSideEffectMap BASE_EMBEDDED { |
95 |
public:
|
96 |
HSideEffectMap(); |
97 |
explicit HSideEffectMap(HSideEffectMap* other);
|
98 |
HSideEffectMap& operator= (const HSideEffectMap& other); |
99 |
|
100 |
void Kill(GVNFlagSet flags);
|
101 |
|
102 |
void Store(GVNFlagSet flags, HInstruction* instr);
|
103 |
|
104 |
bool IsEmpty() const { return count_ == 0; } |
105 |
|
106 |
inline HInstruction* operator[](int i) const { |
107 |
ASSERT(0 <= i);
|
108 |
ASSERT(i < kNumberOfTrackedSideEffects); |
109 |
return data_[i];
|
110 |
} |
111 |
inline HInstruction* at(int i) const { return operator[](i); } |
112 |
|
113 |
private:
|
114 |
int count_;
|
115 |
HInstruction* data_[kNumberOfTrackedSideEffects]; |
116 |
}; |
117 |
|
118 |
|
119 |
void TraceGVN(const char* msg, ...) { |
120 |
va_list arguments; |
121 |
va_start(arguments, msg); |
122 |
OS::VPrint(msg, arguments); |
123 |
va_end(arguments); |
124 |
} |
125 |
|
126 |
|
127 |
// Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
|
128 |
// --trace-gvn is off.
|
129 |
#define TRACE_GVN_1(msg, a1) \
|
130 |
if (FLAG_trace_gvn) { \
|
131 |
TraceGVN(msg, a1); \ |
132 |
} |
133 |
|
134 |
#define TRACE_GVN_2(msg, a1, a2) \
|
135 |
if (FLAG_trace_gvn) { \
|
136 |
TraceGVN(msg, a1, a2); \ |
137 |
} |
138 |
|
139 |
#define TRACE_GVN_3(msg, a1, a2, a3) \
|
140 |
if (FLAG_trace_gvn) { \
|
141 |
TraceGVN(msg, a1, a2, a3); \ |
142 |
} |
143 |
|
144 |
#define TRACE_GVN_4(msg, a1, a2, a3, a4) \
|
145 |
if (FLAG_trace_gvn) { \
|
146 |
TraceGVN(msg, a1, a2, a3, a4); \ |
147 |
} |
148 |
|
149 |
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
|
150 |
if (FLAG_trace_gvn) { \
|
151 |
TraceGVN(msg, a1, a2, a3, a4, a5); \ |
152 |
} |
153 |
|
154 |
|
155 |
HValueMap::HValueMap(Zone* zone, const HValueMap* other)
|
156 |
: array_size_(other->array_size_), |
157 |
lists_size_(other->lists_size_), |
158 |
count_(other->count_), |
159 |
present_flags_(other->present_flags_), |
160 |
array_(zone->NewArray<HValueMapListElement>(other->array_size_)), |
161 |
lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), |
162 |
free_list_head_(other->free_list_head_) { |
163 |
OS::MemCopy( |
164 |
array_, other->array_, array_size_ * sizeof(HValueMapListElement));
|
165 |
OS::MemCopy( |
166 |
lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement));
|
167 |
} |
168 |
|
169 |
|
170 |
void HValueMap::Kill(GVNFlagSet flags) {
|
171 |
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); |
172 |
if (!present_flags_.ContainsAnyOf(depends_flags)) return; |
173 |
present_flags_.RemoveAll(); |
174 |
for (int i = 0; i < array_size_; ++i) { |
175 |
HValue* value = array_[i].value; |
176 |
if (value != NULL) { |
177 |
// Clear list of collisions first, so we know if it becomes empty.
|
178 |
int kept = kNil; // List of kept elements. |
179 |
int next;
|
180 |
for (int current = array_[i].next; current != kNil; current = next) { |
181 |
next = lists_[current].next; |
182 |
HValue* value = lists_[current].value; |
183 |
if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
|
184 |
// Drop it.
|
185 |
count_--; |
186 |
lists_[current].next = free_list_head_; |
187 |
free_list_head_ = current; |
188 |
} else {
|
189 |
// Keep it.
|
190 |
lists_[current].next = kept; |
191 |
kept = current; |
192 |
present_flags_.Add(value->gvn_flags()); |
193 |
} |
194 |
} |
195 |
array_[i].next = kept; |
196 |
|
197 |
// Now possibly drop directly indexed element.
|
198 |
value = array_[i].value; |
199 |
if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it. |
200 |
count_--; |
201 |
int head = array_[i].next;
|
202 |
if (head == kNil) {
|
203 |
array_[i].value = NULL;
|
204 |
} else {
|
205 |
array_[i].value = lists_[head].value; |
206 |
array_[i].next = lists_[head].next; |
207 |
lists_[head].next = free_list_head_; |
208 |
free_list_head_ = head; |
209 |
} |
210 |
} else {
|
211 |
present_flags_.Add(value->gvn_flags()); // Keep it.
|
212 |
} |
213 |
} |
214 |
} |
215 |
} |
216 |
|
217 |
|
218 |
HValue* HValueMap::Lookup(HValue* value) const {
|
219 |
uint32_t hash = static_cast<uint32_t>(value->Hashcode());
|
220 |
uint32_t pos = Bound(hash); |
221 |
if (array_[pos].value != NULL) { |
222 |
if (array_[pos].value->Equals(value)) return array_[pos].value; |
223 |
int next = array_[pos].next;
|
224 |
while (next != kNil) {
|
225 |
if (lists_[next].value->Equals(value)) return lists_[next].value; |
226 |
next = lists_[next].next; |
227 |
} |
228 |
} |
229 |
return NULL; |
230 |
} |
231 |
|
232 |
|
233 |
void HValueMap::Resize(int new_size, Zone* zone) { |
234 |
ASSERT(new_size > count_); |
235 |
// Hashing the values into the new array has no more collisions than in the
|
236 |
// old hash map, so we can use the existing lists_ array, if we are careful.
|
237 |
|
238 |
// Make sure we have at least one free element.
|
239 |
if (free_list_head_ == kNil) {
|
240 |
ResizeLists(lists_size_ << 1, zone);
|
241 |
} |
242 |
|
243 |
HValueMapListElement* new_array = |
244 |
zone->NewArray<HValueMapListElement>(new_size); |
245 |
memset(new_array, 0, sizeof(HValueMapListElement) * new_size); |
246 |
|
247 |
HValueMapListElement* old_array = array_; |
248 |
int old_size = array_size_;
|
249 |
|
250 |
int old_count = count_;
|
251 |
count_ = 0;
|
252 |
// Do not modify present_flags_. It is currently correct.
|
253 |
array_size_ = new_size; |
254 |
array_ = new_array; |
255 |
|
256 |
if (old_array != NULL) { |
257 |
// Iterate over all the elements in lists, rehashing them.
|
258 |
for (int i = 0; i < old_size; ++i) { |
259 |
if (old_array[i].value != NULL) { |
260 |
int current = old_array[i].next;
|
261 |
while (current != kNil) {
|
262 |
Insert(lists_[current].value, zone); |
263 |
int next = lists_[current].next;
|
264 |
lists_[current].next = free_list_head_; |
265 |
free_list_head_ = current; |
266 |
current = next; |
267 |
} |
268 |
// Rehash the directly stored value.
|
269 |
Insert(old_array[i].value, zone); |
270 |
} |
271 |
} |
272 |
} |
273 |
USE(old_count); |
274 |
ASSERT(count_ == old_count); |
275 |
} |
276 |
|
277 |
|
278 |
void HValueMap::ResizeLists(int new_size, Zone* zone) { |
279 |
ASSERT(new_size > lists_size_); |
280 |
|
281 |
HValueMapListElement* new_lists = |
282 |
zone->NewArray<HValueMapListElement>(new_size); |
283 |
memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); |
284 |
|
285 |
HValueMapListElement* old_lists = lists_; |
286 |
int old_size = lists_size_;
|
287 |
|
288 |
lists_size_ = new_size; |
289 |
lists_ = new_lists; |
290 |
|
291 |
if (old_lists != NULL) { |
292 |
OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement));
|
293 |
} |
294 |
for (int i = old_size; i < lists_size_; ++i) { |
295 |
lists_[i].next = free_list_head_; |
296 |
free_list_head_ = i; |
297 |
} |
298 |
} |
299 |
|
300 |
|
301 |
void HValueMap::Insert(HValue* value, Zone* zone) {
|
302 |
ASSERT(value != NULL);
|
303 |
// Resizing when half of the hashtable is filled up.
|
304 |
if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); |
305 |
ASSERT(count_ < array_size_); |
306 |
count_++; |
307 |
uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
|
308 |
if (array_[pos].value == NULL) { |
309 |
array_[pos].value = value; |
310 |
array_[pos].next = kNil; |
311 |
} else {
|
312 |
if (free_list_head_ == kNil) {
|
313 |
ResizeLists(lists_size_ << 1, zone);
|
314 |
} |
315 |
int new_element_pos = free_list_head_;
|
316 |
ASSERT(new_element_pos != kNil); |
317 |
free_list_head_ = lists_[free_list_head_].next; |
318 |
lists_[new_element_pos].value = value; |
319 |
lists_[new_element_pos].next = array_[pos].next; |
320 |
ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL);
|
321 |
array_[pos].next = new_element_pos; |
322 |
} |
323 |
} |
324 |
|
325 |
|
326 |
HSideEffectMap::HSideEffectMap() : count_(0) {
|
327 |
memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
|
328 |
} |
329 |
|
330 |
|
331 |
HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { |
332 |
*this = *other; // Calls operator=. |
333 |
} |
334 |
|
335 |
|
336 |
HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) { |
337 |
if (this != &other) { |
338 |
OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); |
339 |
} |
340 |
return *this; |
341 |
} |
342 |
|
343 |
|
344 |
void HSideEffectMap::Kill(GVNFlagSet flags) {
|
345 |
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
346 |
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
347 |
if (flags.Contains(changes_flag)) {
|
348 |
if (data_[i] != NULL) count_--; |
349 |
data_[i] = NULL;
|
350 |
} |
351 |
} |
352 |
} |
353 |
|
354 |
|
355 |
void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
|
356 |
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
357 |
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
358 |
if (flags.Contains(changes_flag)) {
|
359 |
if (data_[i] == NULL) count_++; |
360 |
data_[i] = instr; |
361 |
} |
362 |
} |
363 |
} |
364 |
|
365 |
|
366 |
HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph) |
367 |
: HPhase("H_Global value numbering", graph),
|
368 |
removed_side_effects_(false),
|
369 |
block_side_effects_(graph->blocks()->length(), zone()), |
370 |
loop_side_effects_(graph->blocks()->length(), zone()), |
371 |
visited_on_paths_(graph->blocks()->length(), zone()) { |
372 |
ASSERT(!AllowHandleAllocation::IsAllowed()); |
373 |
block_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), |
374 |
zone()); |
375 |
loop_side_effects_.AddBlock(GVNFlagSet(), graph->blocks()->length(), |
376 |
zone()); |
377 |
} |
378 |
|
379 |
void HGlobalValueNumberingPhase::Analyze() {
|
380 |
removed_side_effects_ = false;
|
381 |
ComputeBlockSideEffects(); |
382 |
if (FLAG_loop_invariant_code_motion) {
|
383 |
LoopInvariantCodeMotion(); |
384 |
} |
385 |
AnalyzeGraph(); |
386 |
} |
387 |
|
388 |
|
389 |
void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
|
390 |
// The Analyze phase of GVN can be called multiple times. Clear loop side
|
391 |
// effects before computing them to erase the contents from previous Analyze
|
392 |
// passes.
|
393 |
for (int i = 0; i < loop_side_effects_.length(); ++i) { |
394 |
loop_side_effects_[i].RemoveAll(); |
395 |
} |
396 |
for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { |
397 |
// Compute side effects for the block.
|
398 |
HBasicBlock* block = graph()->blocks()->at(i); |
399 |
GVNFlagSet side_effects; |
400 |
if (block->IsReachable() && !block->IsDeoptimizing()) {
|
401 |
int id = block->block_id();
|
402 |
for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
403 |
HInstruction* instr = it.Current(); |
404 |
side_effects.Add(instr->ChangesFlags()); |
405 |
} |
406 |
block_side_effects_[id].Add(side_effects); |
407 |
|
408 |
// Loop headers are part of their loop.
|
409 |
if (block->IsLoopHeader()) {
|
410 |
loop_side_effects_[id].Add(side_effects); |
411 |
} |
412 |
|
413 |
// Propagate loop side effects upwards.
|
414 |
if (block->HasParentLoopHeader()) {
|
415 |
int header_id = block->parent_loop_header()->block_id();
|
416 |
loop_side_effects_[header_id].Add(block->IsLoopHeader() |
417 |
? loop_side_effects_[id] |
418 |
: side_effects); |
419 |
} |
420 |
} |
421 |
} |
422 |
} |
423 |
|
424 |
|
425 |
SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
|
426 |
char underlying_buffer[kLastFlag * 128]; |
427 |
Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer)); |
428 |
#if DEBUG
|
429 |
int offset = 0; |
430 |
const char* separator = ""; |
431 |
const char* comma = ", "; |
432 |
buffer[0] = 0; |
433 |
uint32_t set_depends_on = 0;
|
434 |
uint32_t set_changes = 0;
|
435 |
for (int bit = 0; bit < kLastFlag; ++bit) { |
436 |
if ((flags.ToIntegral() & (1 << bit)) != 0) { |
437 |
if (bit % 2 == 0) { |
438 |
set_changes++; |
439 |
} else {
|
440 |
set_depends_on++; |
441 |
} |
442 |
} |
443 |
} |
444 |
bool positive_changes = set_changes < (kLastFlag / 2); |
445 |
bool positive_depends_on = set_depends_on < (kLastFlag / 2); |
446 |
if (set_changes > 0) { |
447 |
if (positive_changes) {
|
448 |
offset += OS::SNPrintF(buffer + offset, "changes [");
|
449 |
} else {
|
450 |
offset += OS::SNPrintF(buffer + offset, "changes all except [");
|
451 |
} |
452 |
for (int bit = 0; bit < kLastFlag; ++bit) { |
453 |
if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) { |
454 |
switch (static_cast<GVNFlag>(bit)) { |
455 |
#define DECLARE_FLAG(type) \
|
456 |
case kChanges##type: \ |
457 |
offset += OS::SNPrintF(buffer + offset, separator); \ |
458 |
offset += OS::SNPrintF(buffer + offset, #type); \
|
459 |
separator = comma; \ |
460 |
break;
|
461 |
GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
462 |
GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
463 |
#undef DECLARE_FLAG
|
464 |
default:
|
465 |
break;
|
466 |
} |
467 |
} |
468 |
} |
469 |
offset += OS::SNPrintF(buffer + offset, "]");
|
470 |
} |
471 |
if (set_depends_on > 0) { |
472 |
separator = "";
|
473 |
if (set_changes > 0) { |
474 |
offset += OS::SNPrintF(buffer + offset, ", ");
|
475 |
} |
476 |
if (positive_depends_on) {
|
477 |
offset += OS::SNPrintF(buffer + offset, "depends on [");
|
478 |
} else {
|
479 |
offset += OS::SNPrintF(buffer + offset, "depends on all except [");
|
480 |
} |
481 |
for (int bit = 0; bit < kLastFlag; ++bit) { |
482 |
if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) { |
483 |
switch (static_cast<GVNFlag>(bit)) { |
484 |
#define DECLARE_FLAG(type) \
|
485 |
case kDependsOn##type: \ |
486 |
offset += OS::SNPrintF(buffer + offset, separator); \ |
487 |
offset += OS::SNPrintF(buffer + offset, #type); \
|
488 |
separator = comma; \ |
489 |
break;
|
490 |
GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
491 |
GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
492 |
#undef DECLARE_FLAG
|
493 |
default:
|
494 |
break;
|
495 |
} |
496 |
} |
497 |
} |
498 |
offset += OS::SNPrintF(buffer + offset, "]");
|
499 |
} |
500 |
#else
|
501 |
OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
|
502 |
#endif
|
503 |
size_t string_len = strlen(underlying_buffer) + 1;
|
504 |
ASSERT(string_len <= sizeof(underlying_buffer));
|
505 |
char* result = new char[strlen(underlying_buffer) + 1]; |
506 |
OS::MemCopy(result, underlying_buffer, string_len); |
507 |
return SmartArrayPointer<char>(result); |
508 |
} |
509 |
|
510 |
|
511 |
void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
|
512 |
TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
|
513 |
graph()->use_optimistic_licm() ? "yes" : "no"); |
514 |
for (int i = graph()->blocks()->length() - 1; i >= 0; --i) { |
515 |
HBasicBlock* block = graph()->blocks()->at(i); |
516 |
if (block->IsLoopHeader()) {
|
517 |
GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; |
518 |
TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
|
519 |
block->block_id(), |
520 |
*GetGVNFlagsString(side_effects)); |
521 |
|
522 |
GVNFlagSet accumulated_first_time_depends; |
523 |
GVNFlagSet accumulated_first_time_changes; |
524 |
HBasicBlock* last = block->loop_information()->GetLastBackEdge(); |
525 |
for (int j = block->block_id(); j <= last->block_id(); ++j) { |
526 |
ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects, |
527 |
&accumulated_first_time_depends, |
528 |
&accumulated_first_time_changes); |
529 |
} |
530 |
} |
531 |
} |
532 |
} |
533 |
|
534 |
|
535 |
void HGlobalValueNumberingPhase::ProcessLoopBlock(
|
536 |
HBasicBlock* block, |
537 |
HBasicBlock* loop_header, |
538 |
GVNFlagSet loop_kills, |
539 |
GVNFlagSet* first_time_depends, |
540 |
GVNFlagSet* first_time_changes) { |
541 |
HBasicBlock* pre_header = loop_header->predecessors()->at(0);
|
542 |
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); |
543 |
TRACE_GVN_2("Loop invariant motion for B%d %s\n",
|
544 |
block->block_id(), |
545 |
*GetGVNFlagsString(depends_flags)); |
546 |
HInstruction* instr = block->first(); |
547 |
while (instr != NULL) { |
548 |
HInstruction* next = instr->next(); |
549 |
bool hoisted = false; |
550 |
if (instr->CheckFlag(HValue::kUseGVN)) {
|
551 |
TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
|
552 |
instr->id(), |
553 |
instr->Mnemonic(), |
554 |
*GetGVNFlagsString(instr->gvn_flags()), |
555 |
*GetGVNFlagsString(loop_kills)); |
556 |
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
|
557 |
if (can_hoist && !graph()->use_optimistic_licm()) {
|
558 |
can_hoist = block->IsLoopSuccessorDominator(); |
559 |
} |
560 |
|
561 |
if (can_hoist) {
|
562 |
bool inputs_loop_invariant = true; |
563 |
for (int i = 0; i < instr->OperandCount(); ++i) { |
564 |
if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
|
565 |
inputs_loop_invariant = false;
|
566 |
} |
567 |
} |
568 |
|
569 |
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
|
570 |
TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
|
571 |
// Move the instruction out of the loop.
|
572 |
instr->Unlink(); |
573 |
instr->InsertBefore(pre_header->end()); |
574 |
if (instr->HasSideEffects()) removed_side_effects_ = true; |
575 |
hoisted = true;
|
576 |
} |
577 |
} |
578 |
} |
579 |
if (!hoisted) {
|
580 |
// If an instruction is not hoisted, we have to account for its side
|
581 |
// effects when hoisting later HTransitionElementsKind instructions.
|
582 |
GVNFlagSet previous_depends = *first_time_depends; |
583 |
GVNFlagSet previous_changes = *first_time_changes; |
584 |
first_time_depends->Add(instr->DependsOnFlags()); |
585 |
first_time_changes->Add(instr->ChangesFlags()); |
586 |
if (!(previous_depends == *first_time_depends)) {
|
587 |
TRACE_GVN_1("Updated first-time accumulated %s\n",
|
588 |
*GetGVNFlagsString(*first_time_depends)); |
589 |
} |
590 |
if (!(previous_changes == *first_time_changes)) {
|
591 |
TRACE_GVN_1("Updated first-time accumulated %s\n",
|
592 |
*GetGVNFlagsString(*first_time_changes)); |
593 |
} |
594 |
} |
595 |
instr = next; |
596 |
} |
597 |
} |
598 |
|
599 |
|
600 |
bool HGlobalValueNumberingPhase::AllowCodeMotion() {
|
601 |
return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; |
602 |
} |
603 |
|
604 |
|
605 |
bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
|
606 |
HBasicBlock* loop_header) { |
607 |
// If we've disabled code motion or we're in a block that unconditionally
|
608 |
// deoptimizes, don't move any instructions.
|
609 |
return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
|
610 |
instr->block()->IsReachable(); |
611 |
} |
612 |
|
613 |
|
614 |
GVNFlagSet |
615 |
HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock( |
616 |
HBasicBlock* dominator, HBasicBlock* dominated) { |
617 |
GVNFlagSet side_effects; |
618 |
for (int i = 0; i < dominated->predecessors()->length(); ++i) { |
619 |
HBasicBlock* block = dominated->predecessors()->at(i); |
620 |
if (dominator->block_id() < block->block_id() &&
|
621 |
block->block_id() < dominated->block_id() && |
622 |
!visited_on_paths_.Contains(block->block_id())) { |
623 |
visited_on_paths_.Add(block->block_id()); |
624 |
side_effects.Add(block_side_effects_[block->block_id()]); |
625 |
if (block->IsLoopHeader()) {
|
626 |
side_effects.Add(loop_side_effects_[block->block_id()]); |
627 |
} |
628 |
side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( |
629 |
dominator, block)); |
630 |
} |
631 |
} |
632 |
return side_effects;
|
633 |
} |
634 |
|
635 |
|
636 |
// Each instance of this class is like a "stack frame" for the recursive
|
637 |
// traversal of the dominator tree done during GVN (the stack is handled
|
638 |
// as a double linked list).
|
639 |
// We reuse frames when possible so the list length is limited by the depth
|
640 |
// of the dominator tree but this forces us to initialize each frame calling
|
641 |
// an explicit "Initialize" method instead of a using constructor.
|
642 |
class GvnBasicBlockState: public ZoneObject { |
643 |
public:
|
644 |
static GvnBasicBlockState* CreateEntry(Zone* zone,
|
645 |
HBasicBlock* entry_block, |
646 |
HValueMap* entry_map) { |
647 |
return new(zone) |
648 |
GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); |
649 |
} |
650 |
|
651 |
HBasicBlock* block() { return block_; }
|
652 |
HValueMap* map() { return map_; }
|
653 |
HSideEffectMap* dominators() { return &dominators_; }
|
654 |
|
655 |
GvnBasicBlockState* next_in_dominator_tree_traversal( |
656 |
Zone* zone, |
657 |
HBasicBlock** dominator) { |
658 |
// This assignment needs to happen before calling next_dominated() because
|
659 |
// that call can reuse "this" if we are at the last dominated block.
|
660 |
*dominator = block(); |
661 |
GvnBasicBlockState* result = next_dominated(zone); |
662 |
if (result == NULL) { |
663 |
GvnBasicBlockState* dominator_state = pop(); |
664 |
if (dominator_state != NULL) { |
665 |
// This branch is guaranteed not to return NULL because pop() never
|
666 |
// returns a state where "is_done() == true".
|
667 |
*dominator = dominator_state->block(); |
668 |
result = dominator_state->next_dominated(zone); |
669 |
} else {
|
670 |
// Unnecessary (we are returning NULL) but done for cleanness.
|
671 |
*dominator = NULL;
|
672 |
} |
673 |
} |
674 |
return result;
|
675 |
} |
676 |
|
677 |
private:
|
678 |
void Initialize(HBasicBlock* block,
|
679 |
HValueMap* map, |
680 |
HSideEffectMap* dominators, |
681 |
bool copy_map,
|
682 |
Zone* zone) { |
683 |
block_ = block; |
684 |
map_ = copy_map ? map->Copy(zone) : map; |
685 |
dominated_index_ = -1;
|
686 |
length_ = block->dominated_blocks()->length(); |
687 |
if (dominators != NULL) { |
688 |
dominators_ = *dominators; |
689 |
} |
690 |
} |
691 |
bool is_done() { return dominated_index_ >= length_; } |
692 |
|
693 |
GvnBasicBlockState(GvnBasicBlockState* previous, |
694 |
HBasicBlock* block, |
695 |
HValueMap* map, |
696 |
HSideEffectMap* dominators, |
697 |
Zone* zone) |
698 |
: previous_(previous), next_(NULL) {
|
699 |
Initialize(block, map, dominators, true, zone);
|
700 |
} |
701 |
|
702 |
GvnBasicBlockState* next_dominated(Zone* zone) { |
703 |
dominated_index_++; |
704 |
if (dominated_index_ == length_ - 1) { |
705 |
// No need to copy the map for the last child in the dominator tree.
|
706 |
Initialize(block_->dominated_blocks()->at(dominated_index_), |
707 |
map(), |
708 |
dominators(), |
709 |
false,
|
710 |
zone); |
711 |
return this; |
712 |
} else if (dominated_index_ < length_) { |
713 |
return push(zone, block_->dominated_blocks()->at(dominated_index_));
|
714 |
} else {
|
715 |
return NULL; |
716 |
} |
717 |
} |
718 |
|
719 |
GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) { |
720 |
if (next_ == NULL) { |
721 |
next_ = |
722 |
new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone); |
723 |
} else {
|
724 |
next_->Initialize(block, map(), dominators(), true, zone);
|
725 |
} |
726 |
return next_;
|
727 |
} |
728 |
GvnBasicBlockState* pop() { |
729 |
GvnBasicBlockState* result = previous_; |
730 |
while (result != NULL && result->is_done()) { |
731 |
TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
|
732 |
block()->block_id(), |
733 |
previous_->block()->block_id()) |
734 |
result = result->previous_; |
735 |
} |
736 |
return result;
|
737 |
} |
738 |
|
739 |
GvnBasicBlockState* previous_; |
740 |
GvnBasicBlockState* next_; |
741 |
HBasicBlock* block_; |
742 |
HValueMap* map_; |
743 |
HSideEffectMap dominators_; |
744 |
int dominated_index_;
|
745 |
int length_;
|
746 |
}; |
747 |
|
748 |
|
749 |
// This is a recursive traversal of the dominator tree but it has been turned
|
750 |
// into a loop to avoid stack overflows.
|
751 |
// The logical "stack frames" of the recursion are kept in a list of
|
752 |
// GvnBasicBlockState instances.
|
753 |
void HGlobalValueNumberingPhase::AnalyzeGraph() {
|
754 |
HBasicBlock* entry_block = graph()->entry_block(); |
755 |
HValueMap* entry_map = new(zone()) HValueMap(zone());
|
756 |
GvnBasicBlockState* current = |
757 |
GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); |
758 |
|
759 |
while (current != NULL) { |
760 |
HBasicBlock* block = current->block(); |
761 |
HValueMap* map = current->map(); |
762 |
HSideEffectMap* dominators = current->dominators(); |
763 |
|
764 |
TRACE_GVN_2("Analyzing block B%d%s\n",
|
765 |
block->block_id(), |
766 |
block->IsLoopHeader() ? " (loop header)" : ""); |
767 |
|
768 |
// If this is a loop header kill everything killed by the loop.
|
769 |
if (block->IsLoopHeader()) {
|
770 |
map->Kill(loop_side_effects_[block->block_id()]); |
771 |
dominators->Kill(loop_side_effects_[block->block_id()]); |
772 |
} |
773 |
|
774 |
// Go through all instructions of the current block.
|
775 |
for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
|
776 |
HInstruction* instr = it.Current(); |
777 |
if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
|
778 |
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
779 |
HValue* other = dominators->at(i); |
780 |
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
781 |
GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i); |
782 |
if (instr->DependsOnFlags().Contains(depends_on_flag) &&
|
783 |
(other != NULL)) {
|
784 |
TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
|
785 |
i, |
786 |
instr->id(), |
787 |
instr->Mnemonic(), |
788 |
other->id(), |
789 |
other->Mnemonic()); |
790 |
instr->HandleSideEffectDominator(changes_flag, other); |
791 |
} |
792 |
} |
793 |
} |
794 |
// Instruction was unlinked during graph traversal.
|
795 |
if (!instr->IsLinked()) continue; |
796 |
|
797 |
GVNFlagSet flags = instr->ChangesFlags(); |
798 |
if (!flags.IsEmpty()) {
|
799 |
// Clear all instructions in the map that are affected by side effects.
|
800 |
// Store instruction as the dominating one for tracked side effects.
|
801 |
map->Kill(flags); |
802 |
dominators->Store(flags, instr); |
803 |
TRACE_GVN_2("Instruction %d %s\n", instr->id(),
|
804 |
*GetGVNFlagsString(flags)); |
805 |
} |
806 |
if (instr->CheckFlag(HValue::kUseGVN)) {
|
807 |
ASSERT(!instr->HasObservableSideEffects()); |
808 |
HValue* other = map->Lookup(instr); |
809 |
if (other != NULL) { |
810 |
ASSERT(instr->Equals(other) && other->Equals(instr)); |
811 |
TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
|
812 |
instr->id(), |
813 |
instr->Mnemonic(), |
814 |
other->id(), |
815 |
other->Mnemonic()); |
816 |
if (instr->HasSideEffects()) removed_side_effects_ = true; |
817 |
instr->DeleteAndReplaceWith(other); |
818 |
} else {
|
819 |
map->Add(instr, zone()); |
820 |
} |
821 |
} |
822 |
} |
823 |
|
824 |
HBasicBlock* dominator_block; |
825 |
GvnBasicBlockState* next = |
826 |
current->next_in_dominator_tree_traversal(zone(), |
827 |
&dominator_block); |
828 |
|
829 |
if (next != NULL) { |
830 |
HBasicBlock* dominated = next->block(); |
831 |
HValueMap* successor_map = next->map(); |
832 |
HSideEffectMap* successor_dominators = next->dominators(); |
833 |
|
834 |
// Kill everything killed on any path between this block and the
|
835 |
// dominated block. We don't have to traverse these paths if the
|
836 |
// value map and the dominators list is already empty. If the range
|
837 |
// of block ids (block_id, dominated_id) is empty there are no such
|
838 |
// paths.
|
839 |
if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
|
840 |
dominator_block->block_id() + 1 < dominated->block_id()) {
|
841 |
visited_on_paths_.Clear(); |
842 |
GVNFlagSet side_effects_on_all_paths = |
843 |
CollectSideEffectsOnPathsToDominatedBlock(dominator_block, |
844 |
dominated); |
845 |
successor_map->Kill(side_effects_on_all_paths); |
846 |
successor_dominators->Kill(side_effects_on_all_paths); |
847 |
} |
848 |
} |
849 |
current = next; |
850 |
} |
851 |
} |
852 |
|
853 |
} } // namespace v8::internal
|