comparison gcc/tree-ssa-address.c @ 16:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
15:561a7518be6b 16:04ced10e8804
1 /* Memory address lowering and addressing mode selection. 1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010 2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Free Software Foundation, Inc.
4 3
5 This file is part of GCC. 4 This file is part of GCC.
6 5
7 GCC is free software; you can redistribute it and/or modify it 6 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the 7 under the terms of the GNU General Public License as published by the
22 that directly map to addressing modes of the target. */ 21 that directly map to addressing modes of the target. */
23 22
24 #include "config.h" 23 #include "config.h"
25 #include "system.h" 24 #include "system.h"
26 #include "coretypes.h" 25 #include "coretypes.h"
27 #include "tm.h" 26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
28 #include "tree.h" 29 #include "tree.h"
29 #include "tm_p.h" 30 #include "gimple.h"
30 #include "basic-block.h" 31 #include "memmodel.h"
31 #include "output.h" 32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
32 #include "tree-pretty-print.h" 39 #include "tree-pretty-print.h"
33 #include "tree-flow.h" 40 #include "fold-const.h"
34 #include "tree-dump.h" 41 #include "stor-layout.h"
35 #include "tree-pass.h" 42 #include "gimple-iterator.h"
36 #include "timevar.h" 43 #include "gimplify-me.h"
37 #include "flags.h" 44 #include "tree-ssa-loop-ivopts.h"
38 #include "tree-inline.h" 45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
39 #include "tree-affine.h" 48 #include "tree-affine.h"
49 #include "gimplify.h"
40 50
41 /* FIXME: We compute address costs using RTL. */ 51 /* FIXME: We compute address costs using RTL. */
42 #include "insn-config.h" 52 #include "tree-ssa-address.h"
43 #include "rtl.h"
44 #include "recog.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "target.h"
48 53
49 /* TODO -- handling of symbols (according to Richard Hendersons 54 /* TODO -- handling of symbols (according to Richard Hendersons
50 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html): 55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
51 56
52 There are at least 5 different kinds of symbols that we can run up against: 57 There are at least 5 different kinds of symbols that we can run up against:
71 precise results. */ 76 precise results. */
72 77
73 /* A "template" for memory address, used to determine whether the address is 78 /* A "template" for memory address, used to determine whether the address is
74 valid for mode. */ 79 valid for mode. */
75 80
76 typedef struct GTY (()) mem_addr_template { 81 struct GTY (()) mem_addr_template {
77 rtx ref; /* The template. */ 82 rtx ref; /* The template. */
78 rtx * GTY ((skip)) step_p; /* The point in template where the step should be 83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
79 filled in. */ 84 filled in. */
80 rtx * GTY ((skip)) off_p; /* The point in template where the offset should 85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
81 be filled in. */ 86 be filled in. */
82 } mem_addr_template; 87 };
83 88
84 DEF_VEC_O (mem_addr_template);
85 DEF_VEC_ALLOC_O (mem_addr_template, gc);
86 89
87 /* The templates. Each of the low five bits of the index corresponds to one 90 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify 91 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */ 92 the address space. See TEMPL_IDX. */
90 93
91 static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list; 94 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
92 95
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \ 96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
94 (((int) (AS) << 5) \ 97 (((int) (AS) << 5) \
95 | ((SYMBOL != 0) << 4) \ 98 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \ 99 | ((BASE != 0) << 3) \
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX, 104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers 105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */ 106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
104 107
105 static void 108 static void
106 gen_addr_rtx (enum machine_mode address_mode, 109 gen_addr_rtx (machine_mode address_mode,
107 rtx symbol, rtx base, rtx index, rtx step, rtx offset, 110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
108 rtx *addr, rtx **step_p, rtx **offset_p) 111 rtx *addr, rtx **step_p, rtx **offset_p)
109 { 112 {
110 rtx act_elem; 113 rtx act_elem;
111 114
113 if (step_p) 116 if (step_p)
114 *step_p = NULL; 117 *step_p = NULL;
115 if (offset_p) 118 if (offset_p)
116 *offset_p = NULL; 119 *offset_p = NULL;
117 120
118 if (index) 121 if (index && index != const0_rtx)
119 { 122 {
120 act_elem = index; 123 act_elem = index;
121 if (step) 124 if (step)
122 { 125 {
123 act_elem = gen_rtx_MULT (address_mode, act_elem, step); 126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
127 } 130 }
128 131
129 *addr = act_elem; 132 *addr = act_elem;
130 } 133 }
131 134
132 if (base) 135 if (base && base != const0_rtx)
133 { 136 {
134 if (*addr) 137 if (*addr)
135 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr); 138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
136 else 139 else
137 *addr = base; 140 *addr = base;
186 189
187 rtx 190 rtx
188 addr_for_mem_ref (struct mem_address *addr, addr_space_t as, 191 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
189 bool really_expand) 192 bool really_expand)
190 { 193 {
191 enum machine_mode address_mode = targetm.addr_space.address_mode (as); 194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
192 rtx address, sym, bse, idx, st, off; 196 rtx address, sym, bse, idx, st, off;
193 struct mem_addr_template *templ; 197 struct mem_addr_template *templ;
194 198
195 if (addr->step && !integer_onep (addr->step)) 199 if (addr->step && !integer_onep (addr->step))
196 st = immed_double_int_const (tree_to_double_int (addr->step), address_mode); 200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
197 else 201 else
198 st = NULL_RTX; 202 st = NULL_RTX;
199 203
200 if (addr->offset && !integer_zerop (addr->offset)) 204 if (addr->offset && !integer_zerop (addr->offset))
201 off = immed_double_int_const 205 {
202 (double_int_sext (tree_to_double_int (addr->offset), 206 offset_int dc = offset_int::from (wi::to_wide (addr->offset), SIGNED);
203 TYPE_PRECISION (TREE_TYPE (addr->offset))), 207 off = immed_wide_int_const (dc, pointer_mode);
204 address_mode); 208 }
205 else 209 else
206 off = NULL_RTX; 210 off = NULL_RTX;
207 211
208 if (!really_expand) 212 if (!really_expand)
209 { 213 {
210 unsigned int templ_index 214 unsigned int templ_index
211 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off); 215 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
212 216
213 if (templ_index 217 if (templ_index >= vec_safe_length (mem_addr_template_list))
214 >= VEC_length (mem_addr_template, mem_addr_template_list)) 218 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
215 VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
216 templ_index + 1);
217 219
218 /* Reuse the templates for addresses, so that we do not waste memory. */ 220 /* Reuse the templates for addresses, so that we do not waste memory. */
219 templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index); 221 templ = &(*mem_addr_template_list)[templ_index];
220 if (!templ->ref) 222 if (!templ->ref)
221 { 223 {
222 sym = (addr->symbol ? 224 sym = (addr->symbol ?
223 gen_rtx_SYMBOL_REF (address_mode, ggc_strdup ("test_symbol")) 225 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
224 : NULL_RTX); 226 : NULL_RTX);
225 bse = (addr->base ? 227 bse = (addr->base ?
226 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1) 228 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
227 : NULL_RTX); 229 : NULL_RTX);
228 idx = (addr->index ? 230 idx = (addr->index ?
229 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2) 231 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
230 : NULL_RTX); 232 : NULL_RTX);
231 233
232 gen_addr_rtx (address_mode, sym, bse, idx, 234 gen_addr_rtx (pointer_mode, sym, bse, idx,
233 st? const0_rtx : NULL_RTX, 235 st? const0_rtx : NULL_RTX,
234 off? const0_rtx : NULL_RTX, 236 off? const0_rtx : NULL_RTX,
235 &templ->ref, 237 &templ->ref,
236 &templ->step_p, 238 &templ->step_p,
237 &templ->off_p); 239 &templ->off_p);
245 return templ->ref; 247 return templ->ref;
246 } 248 }
247 249
248 /* Otherwise really expand the expressions. */ 250 /* Otherwise really expand the expressions. */
249 sym = (addr->symbol 251 sym = (addr->symbol
250 ? expand_expr (addr->symbol, NULL_RTX, address_mode, EXPAND_NORMAL) 252 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
251 : NULL_RTX); 253 : NULL_RTX);
252 bse = (addr->base 254 bse = (addr->base
253 ? expand_expr (addr->base, NULL_RTX, address_mode, EXPAND_NORMAL) 255 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254 : NULL_RTX); 256 : NULL_RTX);
255 idx = (addr->index 257 idx = (addr->index
256 ? expand_expr (addr->index, NULL_RTX, address_mode, EXPAND_NORMAL) 258 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 : NULL_RTX); 259 : NULL_RTX);
258 260
259 gen_addr_rtx (address_mode, sym, bse, idx, st, off, &address, NULL, NULL); 261 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
262 if (pointer_mode != address_mode)
263 address = convert_memory_address (address_mode, address);
260 return address; 264 return address;
265 }
266
267 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
268 the mem_address structure. */
269
270 rtx
271 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
272 {
273 struct mem_address addr;
274 get_address_description (exp, &addr);
275 return addr_for_mem_ref (&addr, as, really_expand);
261 } 276 }
262 277
263 /* Returns address of MEM_REF in TYPE. */ 278 /* Returns address of MEM_REF in TYPE. */
264 279
265 tree 280 tree
274 289
275 act_elem = TMR_INDEX (mem_ref); 290 act_elem = TMR_INDEX (mem_ref);
276 if (act_elem) 291 if (act_elem)
277 { 292 {
278 if (step) 293 if (step)
279 act_elem = fold_build2 (MULT_EXPR, sizetype, act_elem, step); 294 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
295 act_elem, step);
280 addr_off = act_elem; 296 addr_off = act_elem;
281 } 297 }
282 298
283 act_elem = TMR_INDEX2 (mem_ref); 299 act_elem = TMR_INDEX2 (mem_ref);
284 if (act_elem) 300 if (act_elem)
285 { 301 {
286 if (addr_off) 302 if (addr_off)
287 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, act_elem); 303 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
304 addr_off, act_elem);
288 else 305 else
289 addr_off = act_elem; 306 addr_off = act_elem;
290 } 307 }
291 308
292 if (offset && !integer_zerop (offset)) 309 if (offset && !integer_zerop (offset))
293 { 310 {
294 offset = fold_convert (sizetype, offset);
295 if (addr_off) 311 if (addr_off)
296 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, offset); 312 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
313 fold_convert (TREE_TYPE (addr_off), offset));
297 else 314 else
298 addr_off = offset; 315 addr_off = offset;
299 } 316 }
300 317
301 if (addr_off) 318 if (addr_off)
302 addr = fold_build2 (POINTER_PLUS_EXPR, type, addr_base, addr_off); 319 addr = fold_build_pointer_plus (addr_base, addr_off);
303 else 320 else
304 addr = addr_base; 321 addr = addr_base;
305 322
306 return addr; 323 return addr;
307 } 324 }
308 325
309 /* Returns true if a memory reference in MODE and with parameters given by 326 /* Returns true if a memory reference in MODE and with parameters given by
310 ADDR is valid on the current target. */ 327 ADDR is valid on the current target. */
311 328
312 static bool 329 bool
313 valid_mem_ref_p (enum machine_mode mode, addr_space_t as, 330 valid_mem_ref_p (machine_mode mode, addr_space_t as,
314 struct mem_address *addr) 331 struct mem_address *addr)
315 { 332 {
316 rtx address; 333 rtx address;
317 334
318 address = addr_for_mem_ref (addr, as, false); 335 address = addr_for_mem_ref (addr, as, false);
355 base = addr->base; 372 base = addr->base;
356 index2 = NULL_TREE; 373 index2 = NULL_TREE;
357 } 374 }
358 else 375 else
359 { 376 {
360 base = build_int_cst (ptr_type_node, 0); 377 base = build_int_cst (build_pointer_type (type), 0);
361 index2 = addr->base; 378 index2 = addr->base;
362 } 379 }
363 380
364 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. */ 381 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
365 if (alias_ptr_type 382 ??? As IVOPTs does not follow restrictions to where the base
383 pointer may point to create a MEM_REF only if we know that
384 base is valid. */
385 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
366 && (!index2 || integer_zerop (index2)) 386 && (!index2 || integer_zerop (index2))
367 && (!addr->index || integer_zerop (addr->index))) 387 && (!addr->index || integer_zerop (addr->index)))
368 return fold_build2 (MEM_REF, type, base, addr->offset); 388 return fold_build2 (MEM_REF, type, base, addr->offset);
369 389
370 return build5 (TARGET_MEM_REF, type, 390 return build5 (TARGET_MEM_REF, type,
374 /* Returns true if OBJ is an object whose address is a link time constant. */ 394 /* Returns true if OBJ is an object whose address is a link time constant. */
375 395
376 static bool 396 static bool
377 fixed_address_object_p (tree obj) 397 fixed_address_object_p (tree obj)
378 { 398 {
379 return (TREE_CODE (obj) == VAR_DECL 399 return (VAR_P (obj)
380 && (TREE_STATIC (obj) 400 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
381 || DECL_EXTERNAL (obj))
382 && ! DECL_DLLIMPORT_P (obj)); 401 && ! DECL_DLLIMPORT_P (obj));
383 } 402 }
384 403
385 /* If ADDR contains an address of object that is a link time constant, 404 /* If ADDR contains an address of object that is a link time constant,
386 move it to PARTS->symbol. */ 405 move it to PARTS->symbol. */
387 406
388 static void 407 void
389 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr) 408 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
390 { 409 {
391 unsigned i; 410 unsigned i;
392 tree val = NULL_TREE; 411 tree val = NULL_TREE;
393 412
394 for (i = 0; i < addr->n; i++) 413 for (i = 0; i < addr->n; i++)
395 { 414 {
396 if (!double_int_one_p (addr->elts[i].coef)) 415 if (addr->elts[i].coef != 1)
397 continue; 416 continue;
398 417
399 val = addr->elts[i].val; 418 val = addr->elts[i].val;
400 if (TREE_CODE (val) == ADDR_EXPR 419 if (TREE_CODE (val) == ADDR_EXPR
401 && fixed_address_object_p (TREE_OPERAND (val, 0))) 420 && fixed_address_object_p (TREE_OPERAND (val, 0)))
407 426
408 parts->symbol = val; 427 parts->symbol = val;
409 aff_combination_remove_elt (addr, i); 428 aff_combination_remove_elt (addr, i);
410 } 429 }
411 430
412 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */ 431 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
413 432 PARTS->base. */
414 static void 433
434 static bool
415 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint, 435 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
416 aff_tree *addr) 436 aff_tree *addr)
417 { 437 {
418 unsigned i; 438 unsigned i;
419 tree val = NULL_TREE; 439 tree val = NULL_TREE;
420 int qual; 440 int qual;
421 441
422 for (i = 0; i < addr->n; i++) 442 for (i = 0; i < addr->n; i++)
423 { 443 {
424 if (!double_int_one_p (addr->elts[i].coef)) 444 if (addr->elts[i].coef != 1)
425 continue; 445 continue;
426 446
427 val = addr->elts[i].val; 447 val = addr->elts[i].val;
428 if (operand_equal_p (val, base_hint, 0)) 448 if (operand_equal_p (val, base_hint, 0))
429 break; 449 break;
430 } 450 }
431 451
432 if (i == addr->n) 452 if (i == addr->n)
433 return; 453 return false;
434 454
435 /* Cast value to appropriate pointer type. We cannot use a pointer 455 /* Cast value to appropriate pointer type. We cannot use a pointer
436 to TYPE directly, as the back-end will assume registers of pointer 456 to TYPE directly, as the back-end will assume registers of pointer
437 type are aligned, and just the base itself may not actually be. 457 type are aligned, and just the base itself may not actually be.
438 We use void pointer to the type's address space instead. */ 458 We use void pointer to the type's address space instead. */
439 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type)); 459 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
440 type = build_qualified_type (void_type_node, qual); 460 type = build_qualified_type (void_type_node, qual);
441 parts->base = fold_convert (build_pointer_type (type), val); 461 parts->base = fold_convert (build_pointer_type (type), val);
442 aff_combination_remove_elt (addr, i); 462 aff_combination_remove_elt (addr, i);
463 return true;
443 } 464 }
444 465
445 /* If ADDR contains an address of a dereferenced pointer, move it to 466 /* If ADDR contains an address of a dereferenced pointer, move it to
446 PARTS->base. */ 467 PARTS->base. */
447 468
451 unsigned i; 472 unsigned i;
452 tree val = NULL_TREE; 473 tree val = NULL_TREE;
453 474
454 for (i = 0; i < addr->n; i++) 475 for (i = 0; i < addr->n; i++)
455 { 476 {
456 if (!double_int_one_p (addr->elts[i].coef)) 477 if (addr->elts[i].coef != 1)
457 continue; 478 continue;
458 479
459 val = addr->elts[i].val; 480 val = addr->elts[i].val;
460 if (POINTER_TYPE_P (TREE_TYPE (val))) 481 if (POINTER_TYPE_P (TREE_TYPE (val)))
461 break; 482 break;
487 508
488 if (i == addr->n) 509 if (i == addr->n)
489 return; 510 return;
490 511
491 parts->index = fold_convert (sizetype, val); 512 parts->index = fold_convert (sizetype, val);
492 parts->step = double_int_to_tree (sizetype, addr->elts[i].coef); 513 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
493 aff_combination_remove_elt (addr, i); 514 aff_combination_remove_elt (addr, i);
494 } 515 }
495 516
496 /* Adds ELT to PARTS. */ 517 /* Adds ELT to PARTS. */
497 518
513 } 534 }
514 535
515 /* Add ELT to base. */ 536 /* Add ELT to base. */
516 type = TREE_TYPE (parts->base); 537 type = TREE_TYPE (parts->base);
517 if (POINTER_TYPE_P (type)) 538 if (POINTER_TYPE_P (type))
518 parts->base = fold_build2 (POINTER_PLUS_EXPR, type, 539 parts->base = fold_build_pointer_plus (parts->base, elt);
519 parts->base,
520 fold_convert (sizetype, elt));
521 else 540 else
522 parts->base = fold_build2 (PLUS_EXPR, type, 541 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
523 parts->base, elt); 542 }
543
544 /* Returns true if multiplying by RATIO is allowed in an address. Test the
545 validity for a memory reference accessing memory of mode MODE in address
546 space AS. */
547
548 static bool
549 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
550 addr_space_t as)
551 {
552 #define MAX_RATIO 128
553 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
554 static vec<sbitmap> valid_mult_list;
555 sbitmap valid_mult;
556
557 if (data_index >= valid_mult_list.length ())
558 valid_mult_list.safe_grow_cleared (data_index + 1);
559
560 valid_mult = valid_mult_list[data_index];
561 if (!valid_mult)
562 {
563 machine_mode address_mode = targetm.addr_space.address_mode (as);
564 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
565 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
566 rtx addr, scaled;
567 HOST_WIDE_INT i;
568
569 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
570 bitmap_clear (valid_mult);
571 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
572 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
573 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
574 {
575 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
576 if (memory_address_addr_space_p (mode, addr, as)
577 || memory_address_addr_space_p (mode, scaled, as))
578 bitmap_set_bit (valid_mult, i + MAX_RATIO);
579 }
580
581 if (dump_file && (dump_flags & TDF_DETAILS))
582 {
583 fprintf (dump_file, " allowed multipliers:");
584 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
585 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
586 fprintf (dump_file, " %d", (int) i);
587 fprintf (dump_file, "\n");
588 fprintf (dump_file, "\n");
589 }
590
591 valid_mult_list[data_index] = valid_mult;
592 }
593
594 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
595 return false;
596
597 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
524 } 598 }
525 599
526 /* Finds the most expensive multiplication in ADDR that can be 600 /* Finds the most expensive multiplication in ADDR that can be
527 expressed in an addressing mode and move the corresponding 601 expressed in an addressing mode and move the corresponding
528 element(s) to PARTS. */ 602 element(s) to PARTS. */
530 static void 604 static void
531 most_expensive_mult_to_index (tree type, struct mem_address *parts, 605 most_expensive_mult_to_index (tree type, struct mem_address *parts,
532 aff_tree *addr, bool speed) 606 aff_tree *addr, bool speed)
533 { 607 {
534 addr_space_t as = TYPE_ADDR_SPACE (type); 608 addr_space_t as = TYPE_ADDR_SPACE (type);
535 enum machine_mode address_mode = targetm.addr_space.address_mode (as); 609 machine_mode address_mode = targetm.addr_space.address_mode (as);
536 HOST_WIDE_INT coef; 610 HOST_WIDE_INT coef;
537 double_int best_mult, amult, amult_neg;
538 unsigned best_mult_cost = 0, acost; 611 unsigned best_mult_cost = 0, acost;
539 tree mult_elt = NULL_TREE, elt; 612 tree mult_elt = NULL_TREE, elt;
540 unsigned i, j; 613 unsigned i, j;
541 enum tree_code op_code; 614 enum tree_code op_code;
542 615
543 best_mult = double_int_zero; 616 offset_int best_mult = 0;
544 for (i = 0; i < addr->n; i++) 617 for (i = 0; i < addr->n; i++)
545 { 618 {
546 if (!double_int_fits_in_shwi_p (addr->elts[i].coef)) 619 if (!wi::fits_shwi_p (addr->elts[i].coef))
547 continue; 620 continue;
548 621
549 coef = double_int_to_shwi (addr->elts[i].coef); 622 coef = addr->elts[i].coef.to_shwi ();
550 if (coef == 1 623 if (coef == 1
551 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as)) 624 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
552 continue; 625 continue;
553 626
554 acost = multiply_by_cost (coef, address_mode, speed); 627 acost = mult_by_coeff_cost (coef, address_mode, speed);
555 628
556 if (acost > best_mult_cost) 629 if (acost > best_mult_cost)
557 { 630 {
558 best_mult_cost = acost; 631 best_mult_cost = acost;
559 best_mult = addr->elts[i].coef; 632 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
560 } 633 }
561 } 634 }
562 635
563 if (!best_mult_cost) 636 if (!best_mult_cost)
564 return; 637 return;
565 638
566 /* Collect elements multiplied by best_mult. */ 639 /* Collect elements multiplied by best_mult. */
567 for (i = j = 0; i < addr->n; i++) 640 for (i = j = 0; i < addr->n; i++)
568 { 641 {
569 amult = addr->elts[i].coef; 642 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
570 amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr); 643 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
571 644
572 if (double_int_equal_p (amult, best_mult)) 645 if (amult == best_mult)
573 op_code = PLUS_EXPR; 646 op_code = PLUS_EXPR;
574 else if (double_int_equal_p (amult_neg, best_mult)) 647 else if (amult_neg == best_mult)
575 op_code = MINUS_EXPR; 648 op_code = MINUS_EXPR;
576 else 649 else
577 { 650 {
578 addr->elts[j] = addr->elts[i]; 651 addr->elts[j] = addr->elts[i];
579 j++; 652 j++;
589 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt); 662 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
590 } 663 }
591 addr->n = j; 664 addr->n = j;
592 665
593 parts->index = mult_elt; 666 parts->index = mult_elt;
594 parts->step = double_int_to_tree (sizetype, best_mult); 667 parts->step = wide_int_to_tree (sizetype, best_mult);
595 } 668 }
596 669
597 /* Splits address ADDR for a memory access of type TYPE into PARTS. 670 /* Splits address ADDR for a memory access of type TYPE into PARTS.
598 If BASE_HINT is non-NULL, it specifies an SSA name to be used 671 If BASE_HINT is non-NULL, it specifies an SSA name to be used
599 preferentially as base of the reference, and IV_CAND is the selected 672 preferentially as base of the reference, and IV_CAND is the selected
600 iv candidate used in ADDR. 673 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
674 part of address is split to PARTS.base.
601 675
602 TODO -- be more clever about the distribution of the elements of ADDR 676 TODO -- be more clever about the distribution of the elements of ADDR
603 to PARTS. Some architectures do not support anything but single 677 to PARTS. Some architectures do not support anything but single
604 register in address, possibly with a small integer offset; while 678 register in address, possibly with a small integer offset; while
605 create_mem_ref will simplify the address to an acceptable shape 679 create_mem_ref will simplify the address to an acceptable shape
606 later, it would be more efficient to know that asking for complicated 680 later, it would be more efficient to know that asking for complicated
607 addressing modes is useless. */ 681 addressing modes is useless. */
608 682
609 static void 683 static void
610 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, 684 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
611 tree base_hint, struct mem_address *parts, 685 struct mem_address *parts, bool *var_in_base, bool speed)
612 bool speed)
613 { 686 {
614 tree part; 687 tree part;
615 unsigned i; 688 unsigned i;
616 689
617 parts->symbol = NULL_TREE; 690 parts->symbol = NULL_TREE;
618 parts->base = NULL_TREE; 691 parts->base = NULL_TREE;
619 parts->index = NULL_TREE; 692 parts->index = NULL_TREE;
620 parts->step = NULL_TREE; 693 parts->step = NULL_TREE;
621 694
622 if (!double_int_zero_p (addr->offset)) 695 if (addr->offset != 0)
623 parts->offset = double_int_to_tree (sizetype, addr->offset); 696 parts->offset = wide_int_to_tree (sizetype, addr->offset);
624 else 697 else
625 parts->offset = NULL_TREE; 698 parts->offset = NULL_TREE;
626 699
627 /* Try to find a symbol. */ 700 /* Try to find a symbol. */
628 move_fixed_address_to_symbol (parts, addr); 701 move_fixed_address_to_symbol (parts, addr);
629 702
630 /* No need to do address parts reassociation if the number of parts 703 /* Since at the moment there is no reliable way to know how to
631 is <= 2 -- in that case, no loop invariant code motion can be 704 distinguish between pointer and its offset, we decide if var
632 exposed. */ 705 part is the pointer based on guess. */
633 706 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
634 if (!base_hint && (addr->n > 2)) 707 if (*var_in_base)
708 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
709 else
635 move_variant_to_index (parts, addr, iv_cand); 710 move_variant_to_index (parts, addr, iv_cand);
636 711
637 /* First move the most expensive feasible multiplication 712 /* First move the most expensive feasible multiplication to index. */
638 to index. */
639 if (!parts->index) 713 if (!parts->index)
640 most_expensive_mult_to_index (type, parts, addr, speed); 714 most_expensive_mult_to_index (type, parts, addr, speed);
641 715
642 /* Try to find a base of the reference. Since at the moment 716 /* Move pointer into base. */
643 there is no reliable way how to distinguish between pointer and its
644 offset, this is just a guess. */
645 if (!parts->symbol && base_hint)
646 move_hint_to_base (type, parts, base_hint, addr);
647 if (!parts->symbol && !parts->base) 717 if (!parts->symbol && !parts->base)
648 move_pointer_to_base (parts, addr); 718 move_pointer_to_base (parts, addr);
649 719
650 /* Then try to process the remaining elements. */ 720 /* Then try to process the remaining elements. */
651 for (i = 0; i < addr->n; i++) 721 for (i = 0; i < addr->n; i++)
652 { 722 {
653 part = fold_convert (sizetype, addr->elts[i].val); 723 part = fold_convert (sizetype, addr->elts[i].val);
654 if (!double_int_one_p (addr->elts[i].coef)) 724 if (addr->elts[i].coef != 1)
655 part = fold_build2 (MULT_EXPR, sizetype, part, 725 part = fold_build2 (MULT_EXPR, sizetype, part,
656 double_int_to_tree (sizetype, addr->elts[i].coef)); 726 wide_int_to_tree (sizetype, addr->elts[i].coef));
657 add_to_parts (parts, part); 727 add_to_parts (parts, part);
658 } 728 }
659 if (addr->rest) 729 if (addr->rest)
660 add_to_parts (parts, fold_convert (sizetype, addr->rest)); 730 add_to_parts (parts, fold_convert (sizetype, addr->rest));
661 } 731 }
683 753
684 tree 754 tree
685 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr, 755 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
686 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed) 756 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
687 { 757 {
758 bool var_in_base;
688 tree mem_ref, tmp; 759 tree mem_ref, tmp;
689 tree atype;
690 struct mem_address parts; 760 struct mem_address parts;
691 761
692 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed); 762 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
693 gimplify_mem_ref_parts (gsi, &parts); 763 gimplify_mem_ref_parts (gsi, &parts);
694 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 764 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
695 if (mem_ref) 765 if (mem_ref)
696 return mem_ref; 766 return mem_ref;
697 767
698 /* The expression is too complicated. Try making it simpler. */ 768 /* The expression is too complicated. Try making it simpler. */
699 769
770 /* Merge symbol into other parts. */
771 if (parts.symbol)
772 {
773 tmp = parts.symbol;
774 parts.symbol = NULL_TREE;
775 gcc_assert (is_gimple_val (tmp));
776
777 if (parts.base)
778 {
779 gcc_assert (useless_type_conversion_p (sizetype,
780 TREE_TYPE (parts.base)));
781
782 if (parts.index)
783 {
784 /* Add the symbol to base, eventually forcing it to register. */
785 tmp = fold_build_pointer_plus (tmp, parts.base);
786 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
787 is_gimple_mem_ref_addr,
788 NULL_TREE, true,
789 GSI_SAME_STMT);
790 }
791 else
792 {
793 /* Move base to index, then move the symbol to base. */
794 parts.index = parts.base;
795 }
796 parts.base = tmp;
797 }
798 else
799 parts.base = tmp;
800
801 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
802 if (mem_ref)
803 return mem_ref;
804 }
805
806 /* Move multiplication to index by transforming address expression:
807 [... + index << step + ...]
808 into:
809 index' = index << step;
810 [... + index' + ,,,]. */
700 if (parts.step && !integer_onep (parts.step)) 811 if (parts.step && !integer_onep (parts.step))
701 { 812 {
702 /* Move the multiplication to index. */
703 gcc_assert (parts.index); 813 gcc_assert (parts.index);
704 parts.index = force_gimple_operand_gsi (gsi, 814 parts.index = force_gimple_operand_gsi (gsi,
705 fold_build2 (MULT_EXPR, sizetype, 815 fold_build2 (MULT_EXPR, sizetype,
706 parts.index, parts.step), 816 parts.index, parts.step),
707 true, NULL_TREE, true, GSI_SAME_STMT); 817 true, NULL_TREE, true, GSI_SAME_STMT);
710 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 820 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
711 if (mem_ref) 821 if (mem_ref)
712 return mem_ref; 822 return mem_ref;
713 } 823 }
714 824
715 if (parts.symbol) 825 /* Add offset to invariant part by transforming address expression:
716 { 826 [base + index + offset]
717 tmp = parts.symbol; 827 into:
718 gcc_assert (is_gimple_val (tmp)); 828 base' = base + offset;
719 829 [base' + index]
720 /* Add the symbol to base, eventually forcing it to register. */ 830 or:
721 if (parts.base) 831 index' = index + offset;
722 { 832 [base + index']
723 gcc_assert (useless_type_conversion_p 833 depending on which one is invariant. */
724 (sizetype, TREE_TYPE (parts.base))); 834 if (parts.offset && !integer_zerop (parts.offset))
725 835 {
836 tree old_base = unshare_expr (parts.base);
837 tree old_index = unshare_expr (parts.index);
838 tree old_offset = unshare_expr (parts.offset);
839
840 tmp = parts.offset;
841 parts.offset = NULL_TREE;
842 /* Add offset to invariant part. */
843 if (!var_in_base)
844 {
845 if (parts.base)
846 {
847 tmp = fold_build_pointer_plus (parts.base, tmp);
848 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
849 is_gimple_mem_ref_addr,
850 NULL_TREE, true,
851 GSI_SAME_STMT);
852 }
853 parts.base = tmp;
854 }
855 else
856 {
726 if (parts.index) 857 if (parts.index)
727 { 858 {
728 atype = TREE_TYPE (tmp); 859 tmp = fold_build_pointer_plus (parts.index, tmp);
729 parts.base = force_gimple_operand_gsi_1 (gsi, 860 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
730 fold_build2 (POINTER_PLUS_EXPR, atype, 861 is_gimple_mem_ref_addr,
731 tmp, 862 NULL_TREE, true,
732 fold_convert (sizetype, parts.base)), 863 GSI_SAME_STMT);
733 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
734 } 864 }
735 else 865 parts.index = tmp;
736 { 866 }
737 parts.index = parts.base;
738 parts.base = tmp;
739 }
740 }
741 else
742 parts.base = tmp;
743 parts.symbol = NULL_TREE;
744 867
745 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 868 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
746 if (mem_ref) 869 if (mem_ref)
747 return mem_ref; 870 return mem_ref;
748 } 871
749 872 /* Restore parts.base, index and offset so that we can check if
873 [base + offset] addressing mode is supported in next step.
874 This is necessary for targets only support [base + offset],
875 but not [base + index] addressing mode. */
876 parts.base = old_base;
877 parts.index = old_index;
878 parts.offset = old_offset;
879 }
880
881 /* Transform [base + index + ...] into:
882 base' = base + index;
883 [base' + ...]. */
750 if (parts.index) 884 if (parts.index)
751 { 885 {
886 tmp = parts.index;
887 parts.index = NULL_TREE;
752 /* Add index to base. */ 888 /* Add index to base. */
753 if (parts.base) 889 if (parts.base)
754 { 890 {
755 atype = TREE_TYPE (parts.base); 891 tmp = fold_build_pointer_plus (parts.base, tmp);
756 parts.base = force_gimple_operand_gsi_1 (gsi, 892 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
757 fold_build2 (POINTER_PLUS_EXPR, atype, 893 is_gimple_mem_ref_addr,
758 parts.base, 894 NULL_TREE, true, GSI_SAME_STMT);
759 parts.index), 895 }
760 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT); 896 parts.base = tmp;
761 }
762 else
763 parts.base = parts.index;
764 parts.index = NULL_TREE;
765 897
766 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 898 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
767 if (mem_ref) 899 if (mem_ref)
768 return mem_ref; 900 return mem_ref;
769 } 901 }
770 902
903 /* Transform [base + offset] into:
904 base' = base + offset;
905 [base']. */
771 if (parts.offset && !integer_zerop (parts.offset)) 906 if (parts.offset && !integer_zerop (parts.offset))
772 { 907 {
773 /* Try adding offset to base. */ 908 tmp = parts.offset;
909 parts.offset = NULL_TREE;
910 /* Add offset to base. */
774 if (parts.base) 911 if (parts.base)
775 { 912 {
776 atype = TREE_TYPE (parts.base); 913 tmp = fold_build_pointer_plus (parts.base, tmp);
777 parts.base = force_gimple_operand_gsi_1 (gsi, 914 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
778 fold_build2 (POINTER_PLUS_EXPR, atype, 915 is_gimple_mem_ref_addr,
779 parts.base, 916 NULL_TREE, true, GSI_SAME_STMT);
780 fold_convert (sizetype, parts.offset)), 917 }
781 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT); 918 parts.base = tmp;
782 }
783 else
784 parts.base = parts.offset;
785
786 parts.offset = NULL_TREE;
787 919
788 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 920 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
789 if (mem_ref) 921 if (mem_ref)
790 return mem_ref; 922 return mem_ref;
791 } 923 }
824 addr->index = TMR_INDEX (op); 956 addr->index = TMR_INDEX (op);
825 addr->step = TMR_STEP (op); 957 addr->step = TMR_STEP (op);
826 addr->offset = TMR_OFFSET (op); 958 addr->offset = TMR_OFFSET (op);
827 } 959 }
828 960
829 /* Copies the additional information attached to target_mem_ref FROM to TO. */ 961 /* Copies the reference information from OLD_REF to NEW_REF, where
962 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
830 963
831 void 964 void
832 copy_mem_ref_info (tree to, tree from) 965 copy_ref_info (tree new_ref, tree old_ref)
833 { 966 {
834 /* And the info about the original reference. */ 967 tree new_ptr_base = NULL_TREE;
835 TREE_SIDE_EFFECTS (to) = TREE_SIDE_EFFECTS (from); 968
836 TREE_THIS_VOLATILE (to) = TREE_THIS_VOLATILE (from); 969 gcc_assert (TREE_CODE (new_ref) == MEM_REF
970 || TREE_CODE (new_ref) == TARGET_MEM_REF);
971
972 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
973 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
974
975 new_ptr_base = TREE_OPERAND (new_ref, 0);
976
977 /* We can transfer points-to information from an old pointer
978 or decl base to the new one. */
979 if (new_ptr_base
980 && TREE_CODE (new_ptr_base) == SSA_NAME
981 && !SSA_NAME_PTR_INFO (new_ptr_base))
982 {
983 tree base = get_base_address (old_ref);
984 if (!base)
985 ;
986 else if ((TREE_CODE (base) == MEM_REF
987 || TREE_CODE (base) == TARGET_MEM_REF)
988 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
989 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
990 {
991 struct ptr_info_def *new_pi;
992 unsigned int align, misalign;
993
994 duplicate_ssa_name_ptr_info
995 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
996 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
997 /* We have to be careful about transferring alignment information. */
998 if (get_ptr_info_alignment (new_pi, &align, &misalign)
999 && TREE_CODE (old_ref) == MEM_REF
1000 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1001 && (TMR_INDEX2 (new_ref)
1002 /* TODO: Below conditions can be relaxed if TMR_INDEX
1003 is an indcution variable and its initial value and
1004 step are aligned. */
1005 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1006 || (TMR_STEP (new_ref)
1007 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1008 < align)))))
1009 {
1010 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
1011 - mem_ref_offset (new_ref).to_short_addr ());
1012 adjust_ptr_info_misalignment (new_pi, inc);
1013 }
1014 else
1015 mark_ptr_info_alignment_unknown (new_pi);
1016 }
1017 else if (VAR_P (base)
1018 || TREE_CODE (base) == PARM_DECL
1019 || TREE_CODE (base) == RESULT_DECL)
1020 {
1021 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1022 pt_solution_set_var (&pi->pt, base);
1023 }
1024 }
837 } 1025 }
838 1026
839 /* Move constants in target_mem_ref REF to offset. Returns the new target 1027 /* Move constants in target_mem_ref REF to offset. Returns the new target
840 mem ref if anything changes, NULL_TREE otherwise. */ 1028 mem ref if anything changes, NULL_TREE otherwise. */
841 1029
842 tree 1030 tree
843 maybe_fold_tmr (tree ref) 1031 maybe_fold_tmr (tree ref)
844 { 1032 {
845 struct mem_address addr; 1033 struct mem_address addr;
846 bool changed = false; 1034 bool changed = false;
847 tree ret, off; 1035 tree new_ref, off;
848 1036
849 get_address_description (ref, &addr); 1037 get_address_description (ref, &addr);
850 1038
851 if (addr.base 1039 if (addr.base
852 && TREE_CODE (addr.base) == INTEGER_CST 1040 && TREE_CODE (addr.base) == INTEGER_CST
875 HOST_WIDE_INT offset; 1063 HOST_WIDE_INT offset;
876 addr.symbol = build_fold_addr_expr 1064 addr.symbol = build_fold_addr_expr
877 (get_addr_base_and_unit_offset 1065 (get_addr_base_and_unit_offset
878 (TREE_OPERAND (addr.symbol, 0), &offset)); 1066 (TREE_OPERAND (addr.symbol, 0), &offset));
879 addr.offset = int_const_binop (PLUS_EXPR, 1067 addr.offset = int_const_binop (PLUS_EXPR,
880 addr.offset, size_int (offset), 0); 1068 addr.offset, size_int (offset));
881 changed = true; 1069 changed = true;
882 } 1070 }
883 1071
884 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST) 1072 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
885 { 1073 {
903 1091
904 /* If we have propagated something into this TARGET_MEM_REF and thus 1092 /* If we have propagated something into this TARGET_MEM_REF and thus
905 ended up folding it, always create a new TARGET_MEM_REF regardless 1093 ended up folding it, always create a new TARGET_MEM_REF regardless
906 if it is valid in this for on the target - the propagation result 1094 if it is valid in this for on the target - the propagation result
907 wouldn't be anyway. */ 1095 wouldn't be anyway. */
908 ret = create_mem_ref_raw (TREE_TYPE (ref), 1096 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
909 TREE_TYPE (addr.offset), &addr, false); 1097 TREE_TYPE (addr.offset), &addr, false);
910 copy_mem_ref_info (ret, ref); 1098 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
911 return ret; 1099 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1100 return new_ref;
912 } 1101 }
913 1102
914 /* Dump PARTS to FILE. */ 1103 /* Dump PARTS to FILE. */
915 1104
916 extern void dump_mem_address (FILE *, struct mem_address *); 1105 extern void dump_mem_address (FILE *, struct mem_address *);