The data contained in this repository can be downloaded to your computer using one of several clients.
Please see the documentation of your version control software client for more information.
Please select the desired protocol below to get the URL.
This URL has Read-Only access.
main_repo / deps / v8 / src / hydrogen-load-elimination.cc @ f230a1cf
History | View | Annotate | Download (17.1 KB)
1 |
// Copyright 2013 the V8 project authors. All rights reserved.
|
---|---|
2 |
// Redistribution and use in source and binary forms, with or without
|
3 |
// modification, are permitted provided that the following conditions are
|
4 |
// met:
|
5 |
//
|
6 |
// * Redistributions of source code must retain the above copyright
|
7 |
// notice, this list of conditions and the following disclaimer.
|
8 |
// * Redistributions in binary form must reproduce the above
|
9 |
// copyright notice, this list of conditions and the following
|
10 |
// disclaimer in the documentation and/or other materials provided
|
11 |
// with the distribution.
|
12 |
// * Neither the name of Google Inc. nor the names of its
|
13 |
// contributors may be used to endorse or promote products derived
|
14 |
// from this software without specific prior written permission.
|
15 |
//
|
16 |
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
17 |
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
18 |
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
19 |
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
20 |
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
21 |
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
22 |
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
23 |
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
24 |
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
25 |
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
26 |
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
27 |
|
28 |
#include "hydrogen-alias-analysis.h" |
29 |
#include "hydrogen-load-elimination.h" |
30 |
#include "hydrogen-instructions.h" |
31 |
#include "hydrogen-flow-engine.h" |
32 |
|
33 |
namespace v8 {
|
34 |
namespace internal {
|
35 |
|
36 |
#define GLOBAL true |
37 |
#define TRACE(x) if (FLAG_trace_load_elimination) PrintF x |
38 |
|
39 |
static const int kMaxTrackedFields = 16; |
40 |
static const int kMaxTrackedObjects = 5; |
41 |
|
42 |
// An element in the field approximation list.
|
43 |
class HFieldApproximation : public ZoneObject { |
44 |
public: // Just a data blob. |
45 |
HValue* object_; |
46 |
HLoadNamedField* last_load_; |
47 |
HValue* last_value_; |
48 |
HFieldApproximation* next_; |
49 |
|
50 |
// Recursively copy the entire linked list of field approximations.
|
51 |
HFieldApproximation* Copy(Zone* zone) { |
52 |
if (this == NULL) return NULL; |
53 |
HFieldApproximation* copy = new(zone) HFieldApproximation();
|
54 |
copy->object_ = this->object_;
|
55 |
copy->last_load_ = this->last_load_;
|
56 |
copy->last_value_ = this->last_value_;
|
57 |
copy->next_ = this->next_->Copy(zone);
|
58 |
return copy;
|
59 |
} |
60 |
}; |
61 |
|
62 |
|
63 |
// The main datastructure used during load/store elimination. Each in-object
|
64 |
// field is tracked separately. For each field, store a list of known field
|
65 |
// values for known objects.
|
66 |
class HLoadEliminationTable : public ZoneObject { |
67 |
public:
|
68 |
HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) |
69 |
: zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } |
70 |
|
71 |
// The main processing of instructions.
|
72 |
HLoadEliminationTable* Process(HInstruction* instr, Zone* zone) { |
73 |
switch (instr->opcode()) {
|
74 |
case HValue::kLoadNamedField: {
|
75 |
HLoadNamedField* l = HLoadNamedField::cast(instr); |
76 |
TRACE((" process L%d field %d (o%d)\n",
|
77 |
instr->id(), |
78 |
FieldOf(l->access()), |
79 |
l->object()->ActualValue()->id())); |
80 |
HValue* result = load(l); |
81 |
if (result != instr) {
|
82 |
// The load can be replaced with a previous load or a value.
|
83 |
TRACE((" replace L%d -> v%d\n", instr->id(), result->id()));
|
84 |
instr->DeleteAndReplaceWith(result); |
85 |
} |
86 |
break;
|
87 |
} |
88 |
case HValue::kStoreNamedField: {
|
89 |
HStoreNamedField* s = HStoreNamedField::cast(instr); |
90 |
TRACE((" process S%d field %d (o%d) = v%d\n",
|
91 |
instr->id(), |
92 |
FieldOf(s->access()), |
93 |
s->object()->ActualValue()->id(), |
94 |
s->value()->id())); |
95 |
HValue* result = store(s); |
96 |
if (result == NULL) { |
97 |
// The store is redundant. Remove it.
|
98 |
TRACE((" remove S%d\n", instr->id()));
|
99 |
instr->DeleteAndReplaceWith(NULL);
|
100 |
} |
101 |
break;
|
102 |
} |
103 |
default: {
|
104 |
if (instr->CheckGVNFlag(kChangesInobjectFields)) {
|
105 |
TRACE((" kill-all i%d\n", instr->id()));
|
106 |
Kill(); |
107 |
break;
|
108 |
} |
109 |
if (instr->CheckGVNFlag(kChangesMaps)) {
|
110 |
TRACE((" kill-maps i%d\n", instr->id()));
|
111 |
KillOffset(JSObject::kMapOffset); |
112 |
} |
113 |
if (instr->CheckGVNFlag(kChangesElementsKind)) {
|
114 |
TRACE((" kill-elements-kind i%d\n", instr->id()));
|
115 |
KillOffset(JSObject::kMapOffset); |
116 |
KillOffset(JSObject::kElementsOffset); |
117 |
} |
118 |
if (instr->CheckGVNFlag(kChangesElementsPointer)) {
|
119 |
TRACE((" kill-elements i%d\n", instr->id()));
|
120 |
KillOffset(JSObject::kElementsOffset); |
121 |
} |
122 |
if (instr->CheckGVNFlag(kChangesOsrEntries)) {
|
123 |
TRACE((" kill-osr i%d\n", instr->id()));
|
124 |
Kill(); |
125 |
} |
126 |
} |
127 |
// Improvements possible:
|
128 |
// - learn from HCheckMaps for field 0
|
129 |
// - remove unobservable stores (write-after-write)
|
130 |
// - track cells
|
131 |
// - track globals
|
132 |
// - track roots
|
133 |
} |
134 |
return this; |
135 |
} |
136 |
|
137 |
// Support for global analysis with HFlowEngine: Copy state to sucessor block.
|
138 |
HLoadEliminationTable* Copy(HBasicBlock* succ, Zone* zone) { |
139 |
HLoadEliminationTable* copy = |
140 |
new(zone) HLoadEliminationTable(zone, aliasing_);
|
141 |
copy->EnsureFields(fields_.length()); |
142 |
for (int i = 0; i < fields_.length(); i++) { |
143 |
copy->fields_[i] = fields_[i]->Copy(zone); |
144 |
} |
145 |
if (FLAG_trace_load_elimination) {
|
146 |
TRACE((" copy-to B%d\n", succ->block_id()));
|
147 |
copy->Print(); |
148 |
} |
149 |
return copy;
|
150 |
} |
151 |
|
152 |
// Support for global analysis with HFlowEngine: Merge this state with
|
153 |
// the other incoming state.
|
154 |
HLoadEliminationTable* Merge(HBasicBlock* succ, |
155 |
HLoadEliminationTable* that, Zone* zone) { |
156 |
if (that->fields_.length() < fields_.length()) {
|
157 |
// Drop fields not in the other table.
|
158 |
fields_.Rewind(that->fields_.length()); |
159 |
} |
160 |
for (int i = 0; i < fields_.length(); i++) { |
161 |
// Merge the field approximations for like fields.
|
162 |
HFieldApproximation* approx = fields_[i]; |
163 |
HFieldApproximation* prev = NULL;
|
164 |
while (approx != NULL) { |
165 |
// TODO(titzer): Merging is O(N * M); sort?
|
166 |
HFieldApproximation* other = that->Find(approx->object_, i); |
167 |
if (other == NULL || !Equal(approx->last_value_, other->last_value_)) { |
168 |
// Kill an entry that doesn't agree with the other value.
|
169 |
if (prev != NULL) { |
170 |
prev->next_ = approx->next_; |
171 |
} else {
|
172 |
fields_[i] = approx->next_; |
173 |
} |
174 |
approx = approx->next_; |
175 |
continue;
|
176 |
} |
177 |
prev = approx; |
178 |
approx = approx->next_; |
179 |
} |
180 |
} |
181 |
return this; |
182 |
} |
183 |
|
184 |
friend class HLoadEliminationEffects; // Calls Kill() and others. |
185 |
friend class HLoadEliminationPhase; |
186 |
|
187 |
private:
|
188 |
// Process a load instruction, updating internal table state. If a previous
|
189 |
// load or store for this object and field exists, return the new value with
|
190 |
// which the load should be replaced. Otherwise, return {instr}.
|
191 |
HValue* load(HLoadNamedField* instr) { |
192 |
int field = FieldOf(instr->access());
|
193 |
if (field < 0) return instr; |
194 |
|
195 |
HValue* object = instr->object()->ActualValue(); |
196 |
HFieldApproximation* approx = FindOrCreate(object, field); |
197 |
|
198 |
if (approx->last_value_ == NULL) { |
199 |
// Load is not redundant. Fill out a new entry.
|
200 |
approx->last_load_ = instr; |
201 |
approx->last_value_ = instr; |
202 |
return instr;
|
203 |
} else {
|
204 |
// Eliminate the load. Reuse previously stored value or load instruction.
|
205 |
return approx->last_value_;
|
206 |
} |
207 |
} |
208 |
|
209 |
// Process a store instruction, updating internal table state. If a previous
|
210 |
// store to the same object and field makes this store redundant (e.g. because
|
211 |
// the stored values are the same), return NULL indicating that this store
|
212 |
// instruction is redundant. Otherwise, return {instr}.
|
213 |
HValue* store(HStoreNamedField* instr) { |
214 |
int field = FieldOf(instr->access());
|
215 |
if (field < 0) return KillIfMisaligned(instr); |
216 |
|
217 |
HValue* object = instr->object()->ActualValue(); |
218 |
HValue* value = instr->value(); |
219 |
|
220 |
// Kill non-equivalent may-alias entries.
|
221 |
KillFieldInternal(object, field, value); |
222 |
if (instr->has_transition()) {
|
223 |
// A transition store alters the map of the object.
|
224 |
// TODO(titzer): remember the new map (a constant) for the object.
|
225 |
KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL);
|
226 |
} |
227 |
HFieldApproximation* approx = FindOrCreate(object, field); |
228 |
|
229 |
if (Equal(approx->last_value_, value)) {
|
230 |
// The store is redundant because the field already has this value.
|
231 |
return NULL; |
232 |
} else {
|
233 |
// The store is not redundant. Update the entry.
|
234 |
approx->last_load_ = NULL;
|
235 |
approx->last_value_ = value; |
236 |
return instr;
|
237 |
} |
238 |
} |
239 |
|
240 |
// Kill everything in this table.
|
241 |
void Kill() {
|
242 |
fields_.Rewind(0);
|
243 |
} |
244 |
|
245 |
// Kill all entries matching the given offset.
|
246 |
void KillOffset(int offset) { |
247 |
int field = FieldOf(offset);
|
248 |
if (field >= 0 && field < fields_.length()) { |
249 |
fields_[field] = NULL;
|
250 |
} |
251 |
} |
252 |
|
253 |
// Kill all entries aliasing the given store.
|
254 |
void KillStore(HStoreNamedField* s) {
|
255 |
int field = FieldOf(s->access());
|
256 |
if (field >= 0) { |
257 |
KillFieldInternal(s->object()->ActualValue(), field, s->value()); |
258 |
} else {
|
259 |
KillIfMisaligned(s); |
260 |
} |
261 |
} |
262 |
|
263 |
// Kill multiple entries in the case of a misaligned store.
|
264 |
HValue* KillIfMisaligned(HStoreNamedField* instr) { |
265 |
HObjectAccess access = instr->access(); |
266 |
if (access.IsInobject()) {
|
267 |
int offset = access.offset();
|
268 |
if ((offset % kPointerSize) != 0) { |
269 |
// Kill the field containing the first word of the access.
|
270 |
HValue* object = instr->object()->ActualValue(); |
271 |
int field = offset / kPointerSize;
|
272 |
KillFieldInternal(object, field, NULL);
|
273 |
|
274 |
// Kill the next field in case of overlap.
|
275 |
int size = kPointerSize;
|
276 |
if (access.representation().IsByte()) size = 1; |
277 |
else if (access.representation().IsInteger32()) size = 4; |
278 |
int next_field = (offset + size - 1) / kPointerSize; |
279 |
if (next_field != field) KillFieldInternal(object, next_field, NULL); |
280 |
} |
281 |
} |
282 |
return instr;
|
283 |
} |
284 |
|
285 |
// Find an entry for the given object and field pair.
|
286 |
HFieldApproximation* Find(HValue* object, int field) {
|
287 |
// Search for a field approximation for this object.
|
288 |
HFieldApproximation* approx = fields_[field]; |
289 |
while (approx != NULL) { |
290 |
if (aliasing_->MustAlias(object, approx->object_)) return approx; |
291 |
approx = approx->next_; |
292 |
} |
293 |
return NULL; |
294 |
} |
295 |
|
296 |
// Find or create an entry for the given object and field pair.
|
297 |
HFieldApproximation* FindOrCreate(HValue* object, int field) {
|
298 |
EnsureFields(field + 1);
|
299 |
|
300 |
// Search for a field approximation for this object.
|
301 |
HFieldApproximation* approx = fields_[field]; |
302 |
int count = 0; |
303 |
while (approx != NULL) { |
304 |
if (aliasing_->MustAlias(object, approx->object_)) return approx; |
305 |
count++; |
306 |
approx = approx->next_; |
307 |
} |
308 |
|
309 |
if (count >= kMaxTrackedObjects) {
|
310 |
// Pull the last entry off the end and repurpose it for this object.
|
311 |
approx = ReuseLastApproximation(field); |
312 |
} else {
|
313 |
// Allocate a new entry.
|
314 |
approx = new(zone_) HFieldApproximation();
|
315 |
} |
316 |
|
317 |
// Insert the entry at the head of the list.
|
318 |
approx->object_ = object; |
319 |
approx->last_load_ = NULL;
|
320 |
approx->last_value_ = NULL;
|
321 |
approx->next_ = fields_[field]; |
322 |
fields_[field] = approx; |
323 |
|
324 |
return approx;
|
325 |
} |
326 |
|
327 |
// Kill all entries for a given field that _may_ alias the given object
|
328 |
// and do _not_ have the given value.
|
329 |
void KillFieldInternal(HValue* object, int field, HValue* value) { |
330 |
if (field >= fields_.length()) return; // Nothing to do. |
331 |
|
332 |
HFieldApproximation* approx = fields_[field]; |
333 |
HFieldApproximation* prev = NULL;
|
334 |
while (approx != NULL) { |
335 |
if (aliasing_->MayAlias(object, approx->object_)) {
|
336 |
if (!Equal(approx->last_value_, value)) {
|
337 |
// Kill an aliasing entry that doesn't agree on the value.
|
338 |
if (prev != NULL) { |
339 |
prev->next_ = approx->next_; |
340 |
} else {
|
341 |
fields_[field] = approx->next_; |
342 |
} |
343 |
approx = approx->next_; |
344 |
continue;
|
345 |
} |
346 |
} |
347 |
prev = approx; |
348 |
approx = approx->next_; |
349 |
} |
350 |
} |
351 |
|
352 |
bool Equal(HValue* a, HValue* b) {
|
353 |
if (a == b) return true; |
354 |
if (a != NULL && b != NULL) return a->Equals(b); |
355 |
return false; |
356 |
} |
357 |
|
358 |
// Remove the last approximation for a field so that it can be reused.
|
359 |
// We reuse the last entry because it was the first inserted and is thus
|
360 |
// farthest away from the current instruction.
|
361 |
HFieldApproximation* ReuseLastApproximation(int field) {
|
362 |
HFieldApproximation* approx = fields_[field]; |
363 |
ASSERT(approx != NULL);
|
364 |
|
365 |
HFieldApproximation* prev = NULL;
|
366 |
while (approx->next_ != NULL) { |
367 |
prev = approx; |
368 |
approx = approx->next_; |
369 |
} |
370 |
if (prev != NULL) prev->next_ = NULL; |
371 |
return approx;
|
372 |
} |
373 |
|
374 |
// Compute the field index for the given object access; -1 if not tracked.
|
375 |
int FieldOf(HObjectAccess access) {
|
376 |
return access.IsInobject() ? FieldOf(access.offset()) : -1; |
377 |
} |
378 |
|
379 |
// Compute the field index for the given in-object offset; -1 if not tracked.
|
380 |
int FieldOf(int offset) { |
381 |
if (offset >= kMaxTrackedFields * kPointerSize) return -1; |
382 |
// TODO(titzer): track misaligned loads in a separate list?
|
383 |
if ((offset % kPointerSize) != 0) return -1; // Ignore misaligned accesses. |
384 |
return offset / kPointerSize;
|
385 |
} |
386 |
|
387 |
// Ensure internal storage for the given number of fields.
|
388 |
void EnsureFields(int num_fields) { |
389 |
if (fields_.length() < num_fields) {
|
390 |
fields_.AddBlock(NULL, num_fields - fields_.length(), zone_);
|
391 |
} |
392 |
} |
393 |
|
394 |
// Print this table to stdout.
|
395 |
void Print() {
|
396 |
for (int i = 0; i < fields_.length(); i++) { |
397 |
PrintF(" field %d: ", i);
|
398 |
for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { |
399 |
PrintF("[o%d =", a->object_->id());
|
400 |
if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); |
401 |
if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); |
402 |
PrintF("] ");
|
403 |
} |
404 |
PrintF("\n");
|
405 |
} |
406 |
} |
407 |
|
408 |
Zone* zone_; |
409 |
ZoneList<HFieldApproximation*> fields_; |
410 |
HAliasAnalyzer* aliasing_; |
411 |
}; |
412 |
|
413 |
|
414 |
// Support for HFlowEngine: collect store effects within loops.
|
415 |
class HLoadEliminationEffects : public ZoneObject { |
416 |
public:
|
417 |
explicit HLoadEliminationEffects(Zone* zone)
|
418 |
: zone_(zone), |
419 |
maps_stored_(false),
|
420 |
fields_stored_(false),
|
421 |
elements_stored_(false),
|
422 |
stores_(5, zone) { }
|
423 |
|
424 |
inline bool Disabled() { |
425 |
return false; // Effects are _not_ disabled. |
426 |
} |
427 |
|
428 |
// Process a possibly side-effecting instruction.
|
429 |
void Process(HInstruction* instr, Zone* zone) {
|
430 |
switch (instr->opcode()) {
|
431 |
case HValue::kStoreNamedField: {
|
432 |
stores_.Add(HStoreNamedField::cast(instr), zone_); |
433 |
break;
|
434 |
} |
435 |
case HValue::kOsrEntry: {
|
436 |
// Kill everything. Loads must not be hoisted past the OSR entry.
|
437 |
maps_stored_ = true;
|
438 |
fields_stored_ = true;
|
439 |
elements_stored_ = true;
|
440 |
} |
441 |
default: {
|
442 |
fields_stored_ |= instr->CheckGVNFlag(kChangesInobjectFields); |
443 |
maps_stored_ |= instr->CheckGVNFlag(kChangesMaps); |
444 |
maps_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
445 |
elements_stored_ |= instr->CheckGVNFlag(kChangesElementsKind); |
446 |
elements_stored_ |= instr->CheckGVNFlag(kChangesElementsPointer); |
447 |
} |
448 |
} |
449 |
} |
450 |
|
451 |
// Apply these effects to the given load elimination table.
|
452 |
void Apply(HLoadEliminationTable* table) {
|
453 |
if (fields_stored_) {
|
454 |
table->Kill(); |
455 |
return;
|
456 |
} |
457 |
if (maps_stored_) {
|
458 |
table->KillOffset(JSObject::kMapOffset); |
459 |
} |
460 |
if (elements_stored_) {
|
461 |
table->KillOffset(JSObject::kElementsOffset); |
462 |
} |
463 |
|
464 |
// Kill non-agreeing fields for each store contained in these effects.
|
465 |
for (int i = 0; i < stores_.length(); i++) { |
466 |
table->KillStore(stores_[i]); |
467 |
} |
468 |
} |
469 |
|
470 |
// Union these effects with the other effects.
|
471 |
void Union(HLoadEliminationEffects* that, Zone* zone) {
|
472 |
maps_stored_ |= that->maps_stored_; |
473 |
fields_stored_ |= that->fields_stored_; |
474 |
elements_stored_ |= that->elements_stored_; |
475 |
for (int i = 0; i < that->stores_.length(); i++) { |
476 |
stores_.Add(that->stores_[i], zone); |
477 |
} |
478 |
} |
479 |
|
480 |
private:
|
481 |
Zone* zone_; |
482 |
bool maps_stored_ : 1; |
483 |
bool fields_stored_ : 1; |
484 |
bool elements_stored_ : 1; |
485 |
ZoneList<HStoreNamedField*> stores_; |
486 |
}; |
487 |
|
488 |
|
489 |
// The main routine of the analysis phase. Use the HFlowEngine for either a
|
490 |
// local or a global analysis.
|
491 |
void HLoadEliminationPhase::Run() {
|
492 |
HFlowEngine<HLoadEliminationTable, HLoadEliminationEffects> |
493 |
engine(graph(), zone()); |
494 |
HAliasAnalyzer aliasing; |
495 |
HLoadEliminationTable* table = |
496 |
new(zone()) HLoadEliminationTable(zone(), &aliasing);
|
497 |
|
498 |
if (GLOBAL) {
|
499 |
// Perform a global analysis.
|
500 |
engine.AnalyzeDominatedBlocks(graph()->blocks()->at(0), table);
|
501 |
} else {
|
502 |
// Perform only local analysis.
|
503 |
for (int i = 0; i < graph()->blocks()->length(); i++) { |
504 |
table->Kill(); |
505 |
engine.AnalyzeOneBlock(graph()->blocks()->at(i), table); |
506 |
} |
507 |
} |
508 |
} |
509 |
|
510 |
} } // namespace v8::internal
|