comparison gcc/tree-ssa-address.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
1 /* Memory address lowering and addressing mode selection. 1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. 2 Copyright (C) 2004, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it 6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the 7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any 8 Free Software Foundation; either version 3, or (at your option) any
9 later version. 9 later version.
10 10
11 GCC is distributed in the hope that it will be useful, but WITHOUT 11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details. 14 for more details.
15 15
16 You should have received a copy of the GNU General Public License 16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see 17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */ 18 <http://www.gnu.org/licenses/>. */
19 19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions 20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
40 #include "insn-config.h" 40 #include "insn-config.h"
41 #include "recog.h" 41 #include "recog.h"
42 #include "expr.h" 42 #include "expr.h"
43 #include "ggc.h" 43 #include "ggc.h"
44 #include "tree-affine.h" 44 #include "tree-affine.h"
45 #include "target.h"
45 46
46 /* TODO -- handling of symbols (according to Richard Hendersons 47 /* TODO -- handling of symbols (according to Richard Hendersons
47 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html): 48 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
48 49
49 There are at least 5 different kinds of symbols that we can run up against: 50 There are at least 5 different kinds of symbols that we can run up against:
50 51
51 (1) binds_local_p, small data area. 52 (1) binds_local_p, small data area.
52 (2) binds_local_p, eg local statics 53 (2) binds_local_p, eg local statics
53 (3) !binds_local_p, eg global variables 54 (3) !binds_local_p, eg global variables
68 precise results. */ 69 precise results. */
69 70
70 /* A "template" for memory address, used to determine whether the address is 71 /* A "template" for memory address, used to determine whether the address is
71 valid for mode. */ 72 valid for mode. */
72 73
73 struct mem_addr_template GTY (()) 74 typedef struct GTY (()) mem_addr_template {
74 {
75 rtx ref; /* The template. */ 75 rtx ref; /* The template. */
76 rtx * GTY ((skip)) step_p; /* The point in template where the step should be 76 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
77 filled in. */ 77 filled in. */
78 rtx * GTY ((skip)) off_p; /* The point in template where the offset should 78 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
79 be filled in. */ 79 be filled in. */
80 }; 80 } mem_addr_template;
81 81
82 /* The templates. Each of the five bits of the index corresponds to one 82 DEF_VEC_O (mem_addr_template);
83 component of TARGET_MEM_REF being present, see TEMPL_IDX. */ 83 DEF_VEC_ALLOC_O (mem_addr_template, gc);
84 84
85 static GTY (()) struct mem_addr_template templates[32]; 85 /* The templates. Each of the low five bits of the index corresponds to one
86 86 component of TARGET_MEM_REF being present, while the high bits identify
87 #define TEMPL_IDX(SYMBOL, BASE, INDEX, STEP, OFFSET) \ 87 the address space. See TEMPL_IDX. */
88 (((SYMBOL != 0) << 4) \ 88
89 static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
90
91 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
92 (((int) (AS) << 5) \
93 | ((SYMBOL != 0) << 4) \
89 | ((BASE != 0) << 3) \ 94 | ((BASE != 0) << 3) \
90 | ((INDEX != 0) << 2) \ 95 | ((INDEX != 0) << 2) \
91 | ((STEP != 0) << 1) \ 96 | ((STEP != 0) << 1) \
92 | (OFFSET != 0)) 97 | (OFFSET != 0))
93 98
94 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX, 99 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
95 STEP and OFFSET to *ADDR. Stores pointers to where step is placed to 100 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
96 *STEP_P and offset to *OFFSET_P. */ 101 to where step is placed to *STEP_P and offset to *OFFSET_P. */
97 102
98 static void 103 static void
99 gen_addr_rtx (rtx symbol, rtx base, rtx index, rtx step, rtx offset, 104 gen_addr_rtx (enum machine_mode address_mode,
105 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
100 rtx *addr, rtx **step_p, rtx **offset_p) 106 rtx *addr, rtx **step_p, rtx **offset_p)
101 { 107 {
102 rtx act_elem; 108 rtx act_elem;
103 109
104 *addr = NULL_RTX; 110 *addr = NULL_RTX;
110 if (index) 116 if (index)
111 { 117 {
112 act_elem = index; 118 act_elem = index;
113 if (step) 119 if (step)
114 { 120 {
115 act_elem = gen_rtx_MULT (Pmode, act_elem, step); 121 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
116 122
117 if (step_p) 123 if (step_p)
118 *step_p = &XEXP (act_elem, 1); 124 *step_p = &XEXP (act_elem, 1);
119 } 125 }
120 126
122 } 128 }
123 129
124 if (base) 130 if (base)
125 { 131 {
126 if (*addr) 132 if (*addr)
127 *addr = simplify_gen_binary (PLUS, Pmode, base, *addr); 133 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
128 else 134 else
129 *addr = base; 135 *addr = base;
130 } 136 }
131 137
132 if (symbol) 138 if (symbol)
133 { 139 {
134 act_elem = symbol; 140 act_elem = symbol;
135 if (offset) 141 if (offset)
136 { 142 {
137 act_elem = gen_rtx_PLUS (Pmode, act_elem, offset); 143 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
138 144
139 if (offset_p) 145 if (offset_p)
140 *offset_p = &XEXP (act_elem, 1); 146 *offset_p = &XEXP (act_elem, 1);
141 147
142 if (GET_CODE (symbol) == SYMBOL_REF 148 if (GET_CODE (symbol) == SYMBOL_REF
143 || GET_CODE (symbol) == LABEL_REF 149 || GET_CODE (symbol) == LABEL_REF
144 || GET_CODE (symbol) == CONST) 150 || GET_CODE (symbol) == CONST)
145 act_elem = gen_rtx_CONST (Pmode, act_elem); 151 act_elem = gen_rtx_CONST (address_mode, act_elem);
146 } 152 }
147 153
148 if (*addr) 154 if (*addr)
149 *addr = gen_rtx_PLUS (Pmode, *addr, act_elem); 155 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
150 else 156 else
151 *addr = act_elem; 157 *addr = act_elem;
152 } 158 }
153 else if (offset) 159 else if (offset)
154 { 160 {
155 if (*addr) 161 if (*addr)
156 { 162 {
157 *addr = gen_rtx_PLUS (Pmode, *addr, offset); 163 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
158 if (offset_p) 164 if (offset_p)
159 *offset_p = &XEXP (*addr, 1); 165 *offset_p = &XEXP (*addr, 1);
160 } 166 }
161 else 167 else
162 { 168 {
168 174
169 if (!*addr) 175 if (!*addr)
170 *addr = const0_rtx; 176 *addr = const0_rtx;
171 } 177 }
172 178
173 /* Returns address for TARGET_MEM_REF with parameters given by ADDR. 179 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
174 If REALLY_EXPAND is false, just make fake registers instead 180 in address space AS.
181 If REALLY_EXPAND is false, just make fake registers instead
175 of really expanding the operands, and perform the expansion in-place 182 of really expanding the operands, and perform the expansion in-place
176 by using one of the "templates". */ 183 by using one of the "templates". */
177 184
178 rtx 185 rtx
179 addr_for_mem_ref (struct mem_address *addr, bool really_expand) 186 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
180 { 187 bool really_expand)
188 {
189 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
181 rtx address, sym, bse, idx, st, off; 190 rtx address, sym, bse, idx, st, off;
182 static bool templates_initialized = false;
183 struct mem_addr_template *templ; 191 struct mem_addr_template *templ;
184 192
185 if (addr->step && !integer_onep (addr->step)) 193 if (addr->step && !integer_onep (addr->step))
186 st = immed_double_const (TREE_INT_CST_LOW (addr->step), 194 st = immed_double_const (TREE_INT_CST_LOW (addr->step),
187 TREE_INT_CST_HIGH (addr->step), Pmode); 195 TREE_INT_CST_HIGH (addr->step), address_mode);
188 else 196 else
189 st = NULL_RTX; 197 st = NULL_RTX;
190 198
191 if (addr->offset && !integer_zerop (addr->offset)) 199 if (addr->offset && !integer_zerop (addr->offset))
192 off = immed_double_const (TREE_INT_CST_LOW (addr->offset), 200 off = immed_double_const (TREE_INT_CST_LOW (addr->offset),
193 TREE_INT_CST_HIGH (addr->offset), Pmode); 201 TREE_INT_CST_HIGH (addr->offset), address_mode);
194 else 202 else
195 off = NULL_RTX; 203 off = NULL_RTX;
196 204
197 if (!really_expand) 205 if (!really_expand)
198 { 206 {
207 unsigned int templ_index
208 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
209
210 if (templ_index
211 >= VEC_length (mem_addr_template, mem_addr_template_list))
212 VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
213 templ_index + 1);
214
199 /* Reuse the templates for addresses, so that we do not waste memory. */ 215 /* Reuse the templates for addresses, so that we do not waste memory. */
200 if (!templates_initialized) 216 templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
201 { 217 if (!templ->ref)
202 unsigned i; 218 {
203 219 sym = (addr->symbol ?
204 templates_initialized = true; 220 gen_rtx_SYMBOL_REF (address_mode, ggc_strdup ("test_symbol"))
205 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup ("test_symbol")); 221 : NULL_RTX);
206 bse = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1); 222 bse = (addr->base ?
207 idx = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 2); 223 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1)
208 224 : NULL_RTX);
209 for (i = 0; i < 32; i++) 225 idx = (addr->index ?
210 gen_addr_rtx ((i & 16 ? sym : NULL_RTX), 226 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2)
211 (i & 8 ? bse : NULL_RTX), 227 : NULL_RTX);
212 (i & 4 ? idx : NULL_RTX), 228
213 (i & 2 ? const0_rtx : NULL_RTX), 229 gen_addr_rtx (address_mode, sym, bse, idx,
214 (i & 1 ? const0_rtx : NULL_RTX), 230 st? const0_rtx : NULL_RTX,
215 &templates[i].ref, 231 off? const0_rtx : NULL_RTX,
216 &templates[i].step_p, 232 &templ->ref,
217 &templates[i].off_p); 233 &templ->step_p,
218 } 234 &templ->off_p);
219 235 }
220 templ = templates + TEMPL_IDX (addr->symbol, addr->base, addr->index, 236
221 st, off);
222 if (st) 237 if (st)
223 *templ->step_p = st; 238 *templ->step_p = st;
224 if (off) 239 if (off)
225 *templ->off_p = off; 240 *templ->off_p = off;
226 241
228 } 243 }
229 244
230 /* Otherwise really expand the expressions. */ 245 /* Otherwise really expand the expressions. */
231 sym = (addr->symbol 246 sym = (addr->symbol
232 ? expand_expr (build_addr (addr->symbol, current_function_decl), 247 ? expand_expr (build_addr (addr->symbol, current_function_decl),
233 NULL_RTX, Pmode, EXPAND_NORMAL) 248 NULL_RTX, address_mode, EXPAND_NORMAL)
234 : NULL_RTX); 249 : NULL_RTX);
235 bse = (addr->base 250 bse = (addr->base
236 ? expand_expr (addr->base, NULL_RTX, Pmode, EXPAND_NORMAL) 251 ? expand_expr (addr->base, NULL_RTX, address_mode, EXPAND_NORMAL)
237 : NULL_RTX); 252 : NULL_RTX);
238 idx = (addr->index 253 idx = (addr->index
239 ? expand_expr (addr->index, NULL_RTX, Pmode, EXPAND_NORMAL) 254 ? expand_expr (addr->index, NULL_RTX, address_mode, EXPAND_NORMAL)
240 : NULL_RTX); 255 : NULL_RTX);
241 256
242 gen_addr_rtx (sym, bse, idx, st, off, &address, NULL, NULL); 257 gen_addr_rtx (address_mode, sym, bse, idx, st, off, &address, NULL, NULL);
243 return address; 258 return address;
244 } 259 }
245 260
246 /* Returns address of MEM_REF in TYPE. */ 261 /* Returns address of MEM_REF in TYPE. */
247 262
304 319
305 /* Returns true if a memory reference in MODE and with parameters given by 320 /* Returns true if a memory reference in MODE and with parameters given by
306 ADDR is valid on the current target. */ 321 ADDR is valid on the current target. */
307 322
308 static bool 323 static bool
309 valid_mem_ref_p (enum machine_mode mode, struct mem_address *addr) 324 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
325 struct mem_address *addr)
310 { 326 {
311 rtx address; 327 rtx address;
312 328
313 address = addr_for_mem_ref (addr, false); 329 address = addr_for_mem_ref (addr, as, false);
314 if (!address) 330 if (!address)
315 return false; 331 return false;
316 332
317 return memory_address_p (mode, address); 333 return memory_address_addr_space_p (mode, address, as);
318 } 334 }
319 335
320 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR 336 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
321 is valid on the current target and if so, creates and returns the 337 is valid on the current target and if so, creates and returns the
322 TARGET_MEM_REF. */ 338 TARGET_MEM_REF. */
323 339
324 static tree 340 static tree
325 create_mem_ref_raw (tree type, struct mem_address *addr) 341 create_mem_ref_raw (tree type, struct mem_address *addr)
326 { 342 {
327 if (!valid_mem_ref_p (TYPE_MODE (type), addr)) 343 if (!valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
328 return NULL_TREE; 344 return NULL_TREE;
329 345
330 if (addr->step && integer_onep (addr->step)) 346 if (addr->step && integer_onep (addr->step))
331 addr->step = NULL_TREE; 347 addr->step = NULL_TREE;
332 348
333 if (addr->offset && integer_zerop (addr->offset)) 349 if (addr->offset && integer_zerop (addr->offset))
334 addr->offset = NULL_TREE; 350 addr->offset = NULL_TREE;
335 351
336 return build7 (TARGET_MEM_REF, type, 352 return build6 (TARGET_MEM_REF, type,
337 addr->symbol, addr->base, addr->index, 353 addr->symbol, addr->base, addr->index,
338 addr->step, addr->offset, NULL, NULL); 354 addr->step, addr->offset, NULL);
339 } 355 }
340 356
341 /* Returns true if OBJ is an object whose address is a link time constant. */ 357 /* Returns true if OBJ is an object whose address is a link time constant. */
342 358
343 static bool 359 static bool
371 387
372 if (i == addr->n) 388 if (i == addr->n)
373 return; 389 return;
374 390
375 parts->symbol = TREE_OPERAND (val, 0); 391 parts->symbol = TREE_OPERAND (val, 0);
392 aff_combination_remove_elt (addr, i);
393 }
394
395 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
396
397 static void
398 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
399 aff_tree *addr)
400 {
401 unsigned i;
402 tree val = NULL_TREE;
403 int qual;
404
405 for (i = 0; i < addr->n; i++)
406 {
407 if (!double_int_one_p (addr->elts[i].coef))
408 continue;
409
410 val = addr->elts[i].val;
411 if (operand_equal_p (val, base_hint, 0))
412 break;
413 }
414
415 if (i == addr->n)
416 return;
417
418 /* Cast value to appropriate pointer type. We cannot use a pointer
419 to TYPE directly, as the back-end will assume registers of pointer
420 type are aligned, and just the base itself may not actually be.
421 We use void pointer to the type's address space instead. */
422 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
423 type = build_qualified_type (void_type_node, qual);
424 parts->base = fold_convert (build_pointer_type (type), val);
376 aff_combination_remove_elt (addr, i); 425 aff_combination_remove_elt (addr, i);
377 } 426 }
378 427
379 /* If ADDR contains an address of a dereferenced pointer, move it to 428 /* If ADDR contains an address of a dereferenced pointer, move it to
380 PARTS->base. */ 429 PARTS->base. */
435 /* Finds the most expensive multiplication in ADDR that can be 484 /* Finds the most expensive multiplication in ADDR that can be
436 expressed in an addressing mode and move the corresponding 485 expressed in an addressing mode and move the corresponding
437 element(s) to PARTS. */ 486 element(s) to PARTS. */
438 487
439 static void 488 static void
440 most_expensive_mult_to_index (struct mem_address *parts, aff_tree *addr, 489 most_expensive_mult_to_index (tree type, struct mem_address *parts,
441 bool speed) 490 aff_tree *addr, bool speed)
442 { 491 {
492 addr_space_t as = TYPE_ADDR_SPACE (type);
493 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
443 HOST_WIDE_INT coef; 494 HOST_WIDE_INT coef;
444 double_int best_mult, amult, amult_neg; 495 double_int best_mult, amult, amult_neg;
445 unsigned best_mult_cost = 0, acost; 496 unsigned best_mult_cost = 0, acost;
446 tree mult_elt = NULL_TREE, elt; 497 tree mult_elt = NULL_TREE, elt;
447 unsigned i, j; 498 unsigned i, j;
451 for (i = 0; i < addr->n; i++) 502 for (i = 0; i < addr->n; i++)
452 { 503 {
453 if (!double_int_fits_in_shwi_p (addr->elts[i].coef)) 504 if (!double_int_fits_in_shwi_p (addr->elts[i].coef))
454 continue; 505 continue;
455 506
456 /* FIXME: Should use the correct memory mode rather than Pmode. */
457
458 coef = double_int_to_shwi (addr->elts[i].coef); 507 coef = double_int_to_shwi (addr->elts[i].coef);
459 if (coef == 1 508 if (coef == 1
460 || !multiplier_allowed_in_address_p (coef, Pmode)) 509 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
461 continue; 510 continue;
462 511
463 acost = multiply_by_cost (coef, Pmode, speed); 512 acost = multiply_by_cost (coef, address_mode, speed);
464 513
465 if (acost > best_mult_cost) 514 if (acost > best_mult_cost)
466 { 515 {
467 best_mult_cost = acost; 516 best_mult_cost = acost;
468 best_mult = addr->elts[i].coef; 517 best_mult = addr->elts[i].coef;
475 /* Collect elements multiplied by best_mult. */ 524 /* Collect elements multiplied by best_mult. */
476 for (i = j = 0; i < addr->n; i++) 525 for (i = j = 0; i < addr->n; i++)
477 { 526 {
478 amult = addr->elts[i].coef; 527 amult = addr->elts[i].coef;
479 amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr); 528 amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr);
480 529
481 if (double_int_equal_p (amult, best_mult)) 530 if (double_int_equal_p (amult, best_mult))
482 op_code = PLUS_EXPR; 531 op_code = PLUS_EXPR;
483 else if (double_int_equal_p (amult_neg, best_mult)) 532 else if (double_int_equal_p (amult_neg, best_mult))
484 op_code = MINUS_EXPR; 533 op_code = MINUS_EXPR;
485 else 534 else
496 mult_elt = elt; 545 mult_elt = elt;
497 else 546 else
498 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt); 547 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
499 } 548 }
500 addr->n = j; 549 addr->n = j;
501 550
502 parts->index = mult_elt; 551 parts->index = mult_elt;
503 parts->step = double_int_to_tree (sizetype, best_mult); 552 parts->step = double_int_to_tree (sizetype, best_mult);
504 } 553 }
505 554
506 /* Splits address ADDR into PARTS. 555 /* Splits address ADDR for a memory access of type TYPE into PARTS.
507 556 If BASE_HINT is non-NULL, it specifies an SSA name to be used
557 preferentially as base of the reference.
558
508 TODO -- be more clever about the distribution of the elements of ADDR 559 TODO -- be more clever about the distribution of the elements of ADDR
509 to PARTS. Some architectures do not support anything but single 560 to PARTS. Some architectures do not support anything but single
510 register in address, possibly with a small integer offset; while 561 register in address, possibly with a small integer offset; while
511 create_mem_ref will simplify the address to an acceptable shape 562 create_mem_ref will simplify the address to an acceptable shape
512 later, it would be more efficient to know that asking for complicated 563 later, it would be more efficient to know that asking for complicated
513 addressing modes is useless. */ 564 addressing modes is useless. */
514 565
515 static void 566 static void
516 addr_to_parts (aff_tree *addr, struct mem_address *parts, bool speed) 567 addr_to_parts (tree type, aff_tree *addr, tree base_hint,
568 struct mem_address *parts, bool speed)
517 { 569 {
518 tree part; 570 tree part;
519 unsigned i; 571 unsigned i;
520 572
521 parts->symbol = NULL_TREE; 573 parts->symbol = NULL_TREE;
531 /* Try to find a symbol. */ 583 /* Try to find a symbol. */
532 move_fixed_address_to_symbol (parts, addr); 584 move_fixed_address_to_symbol (parts, addr);
533 585
534 /* First move the most expensive feasible multiplication 586 /* First move the most expensive feasible multiplication
535 to index. */ 587 to index. */
536 most_expensive_mult_to_index (parts, addr, speed); 588 most_expensive_mult_to_index (type, parts, addr, speed);
537 589
538 /* Try to find a base of the reference. Since at the moment 590 /* Try to find a base of the reference. Since at the moment
539 there is no reliable way how to distinguish between pointer and its 591 there is no reliable way how to distinguish between pointer and its
540 offset, this is just a guess. */ 592 offset, this is just a guess. */
541 if (!parts->symbol) 593 if (!parts->symbol && base_hint)
594 move_hint_to_base (type, parts, base_hint, addr);
595 if (!parts->symbol && !parts->base)
542 move_pointer_to_base (parts, addr); 596 move_pointer_to_base (parts, addr);
543 597
544 /* Then try to process the remaining elements. */ 598 /* Then try to process the remaining elements. */
545 for (i = 0; i < addr->n; i++) 599 for (i = 0; i < addr->n; i++)
546 { 600 {
573 computations are emitted in front of GSI. TYPE is the mode 627 computations are emitted in front of GSI. TYPE is the mode
574 of created memory reference. */ 628 of created memory reference. */
575 629
576 tree 630 tree
577 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr, 631 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
578 bool speed) 632 tree base_hint, bool speed)
579 { 633 {
580 tree mem_ref, tmp; 634 tree mem_ref, tmp;
581 tree atype; 635 tree atype;
582 struct mem_address parts; 636 struct mem_address parts;
583 637
584 addr_to_parts (addr, &parts, speed); 638 addr_to_parts (type, addr, base_hint, &parts, speed);
585 gimplify_mem_ref_parts (gsi, &parts); 639 gimplify_mem_ref_parts (gsi, &parts);
586 mem_ref = create_mem_ref_raw (type, &parts); 640 mem_ref = create_mem_ref_raw (type, &parts);
587 if (mem_ref) 641 if (mem_ref)
588 return mem_ref; 642 return mem_ref;
589 643
596 parts.index = force_gimple_operand_gsi (gsi, 650 parts.index = force_gimple_operand_gsi (gsi,
597 fold_build2 (MULT_EXPR, sizetype, 651 fold_build2 (MULT_EXPR, sizetype,
598 parts.index, parts.step), 652 parts.index, parts.step),
599 true, NULL_TREE, true, GSI_SAME_STMT); 653 true, NULL_TREE, true, GSI_SAME_STMT);
600 parts.step = NULL_TREE; 654 parts.step = NULL_TREE;
601 655
602 mem_ref = create_mem_ref_raw (type, &parts); 656 mem_ref = create_mem_ref_raw (type, &parts);
603 if (mem_ref) 657 if (mem_ref)
604 return mem_ref; 658 return mem_ref;
605 } 659 }
606 660
607 if (parts.symbol) 661 if (parts.symbol)
608 { 662 {
609 tmp = build_addr (parts.symbol, current_function_decl); 663 tmp = build_addr (parts.symbol, current_function_decl);
610 gcc_assert (is_gimple_val (tmp)); 664 gcc_assert (is_gimple_val (tmp));
611 665
612 /* Add the symbol to base, eventually forcing it to register. */ 666 /* Add the symbol to base, eventually forcing it to register. */
613 if (parts.base) 667 if (parts.base)
614 { 668 {
615 gcc_assert (useless_type_conversion_p 669 gcc_assert (useless_type_conversion_p
616 (sizetype, TREE_TYPE (parts.base))); 670 (sizetype, TREE_TYPE (parts.base)));
664 { 718 {
665 /* Try adding offset to base. */ 719 /* Try adding offset to base. */
666 if (parts.base) 720 if (parts.base)
667 { 721 {
668 atype = TREE_TYPE (parts.base); 722 atype = TREE_TYPE (parts.base);
669 parts.base = force_gimple_operand_gsi (gsi, 723 parts.base = force_gimple_operand_gsi (gsi,
670 fold_build2 (POINTER_PLUS_EXPR, atype, 724 fold_build2 (POINTER_PLUS_EXPR, atype,
671 parts.base, 725 parts.base,
672 fold_convert (sizetype, parts.offset)), 726 fold_convert (sizetype, parts.offset)),
673 true, NULL_TREE, true, GSI_SAME_STMT); 727 true, NULL_TREE, true, GSI_SAME_STMT);
674 } 728 }
707 /* Copies the additional information attached to target_mem_ref FROM to TO. */ 761 /* Copies the additional information attached to target_mem_ref FROM to TO. */
708 762
709 void 763 void
710 copy_mem_ref_info (tree to, tree from) 764 copy_mem_ref_info (tree to, tree from)
711 { 765 {
712 /* Copy the annotation, to preserve the aliasing information. */
713 TMR_TAG (to) = TMR_TAG (from);
714
715 /* And the info about the original reference. */ 766 /* And the info about the original reference. */
716 TMR_ORIGINAL (to) = TMR_ORIGINAL (from); 767 TMR_ORIGINAL (to) = TMR_ORIGINAL (from);
717 } 768 }
718 769
719 /* Move constants in target_mem_ref REF to offset. Returns the new target 770 /* Move constants in target_mem_ref REF to offset. Returns the new target
763 changed = true; 814 changed = true;
764 } 815 }
765 816
766 if (!changed) 817 if (!changed)
767 return NULL_TREE; 818 return NULL_TREE;
768 819
769 ret = create_mem_ref_raw (TREE_TYPE (ref), &addr); 820 ret = create_mem_ref_raw (TREE_TYPE (ref), &addr);
770 if (!ret) 821 if (!ret)
771 return NULL_TREE; 822 return NULL_TREE;
772 823
773 copy_mem_ref_info (ret, ref); 824 copy_mem_ref_info (ret, ref);