comparison gcc/tree-affine.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Operations with affine combinations of trees. 1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc. 2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it 6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the 7 under the terms of the GNU General Public License as published by the
18 <http://www.gnu.org/licenses/>. */ 18 <http://www.gnu.org/licenses/>. */
19 19
20 #include "config.h" 20 #include "config.h"
21 #include "system.h" 21 #include "system.h"
22 #include "coretypes.h" 22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
23 #include "tree.h" 25 #include "tree.h"
24 #include "output.h" 26 #include "gimple.h"
27 #include "ssa.h"
25 #include "tree-pretty-print.h" 28 #include "tree-pretty-print.h"
26 #include "tree-dump.h" 29 #include "fold-const.h"
27 #include "pointer-set.h"
28 #include "tree-affine.h" 30 #include "tree-affine.h"
29 #include "gimple.h" 31 #include "gimplify.h"
30 #include "flags.h" 32 #include "dumpfile.h"
33 #include "cfgexpand.h"
31 34
32 /* Extends CST as appropriate for the affine combinations COMB. */ 35 /* Extends CST as appropriate for the affine combinations COMB. */
33 36
34 double_int 37 widest_int
35 double_int_ext_for_comb (double_int cst, aff_tree *comb) 38 wide_int_ext_for_comb (const widest_int &cst, tree type)
36 { 39 {
37 return double_int_sext (cst, TYPE_PRECISION (comb->type)); 40 return wi::sext (cst, TYPE_PRECISION (type));
38 } 41 }
39 42
40 /* Initializes affine combination COMB so that its value is zero in TYPE. */ 43 /* Initializes affine combination COMB so that its value is zero in TYPE. */
41 44
42 static void 45 static void
43 aff_combination_zero (aff_tree *comb, tree type) 46 aff_combination_zero (aff_tree *comb, tree type)
44 { 47 {
48 int i;
45 comb->type = type; 49 comb->type = type;
46 comb->offset = double_int_zero; 50 comb->offset = 0;
47 comb->n = 0; 51 comb->n = 0;
52 for (i = 0; i < MAX_AFF_ELTS; i++)
53 comb->elts[i].coef = 0;
48 comb->rest = NULL_TREE; 54 comb->rest = NULL_TREE;
49 } 55 }
50 56
51 /* Sets COMB to CST. */ 57 /* Sets COMB to CST. */
52 58
53 void 59 void
54 aff_combination_const (aff_tree *comb, tree type, double_int cst) 60 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
55 { 61 {
56 aff_combination_zero (comb, type); 62 aff_combination_zero (comb, type);
57 comb->offset = double_int_ext_for_comb (cst, comb); 63 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
58 } 64 }
59 65
60 /* Sets COMB to single element ELT. */ 66 /* Sets COMB to single element ELT. */
61 67
62 void 68 void
64 { 70 {
65 aff_combination_zero (comb, type); 71 aff_combination_zero (comb, type);
66 72
67 comb->n = 1; 73 comb->n = 1;
68 comb->elts[0].val = elt; 74 comb->elts[0].val = elt;
69 comb->elts[0].coef = double_int_one; 75 comb->elts[0].coef = 1;
70 } 76 }
71 77
72 /* Scales COMB by SCALE. */ 78 /* Scales COMB by SCALE. */
73 79
74 void 80 void
75 aff_combination_scale (aff_tree *comb, double_int scale) 81 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
76 { 82 {
77 unsigned i, j; 83 unsigned i, j;
78 84
79 scale = double_int_ext_for_comb (scale, comb); 85 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
80 if (double_int_one_p (scale)) 86 if (scale == 1)
81 return; 87 return;
82 88
83 if (double_int_zero_p (scale)) 89 if (scale == 0)
84 { 90 {
85 aff_combination_zero (comb, comb->type); 91 aff_combination_zero (comb, comb->type);
86 return; 92 return;
87 } 93 }
88 94
89 comb->offset 95 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
90 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
91 for (i = 0, j = 0; i < comb->n; i++) 96 for (i = 0, j = 0; i < comb->n; i++)
92 { 97 {
93 double_int new_coef; 98 widest_int new_coef
94 99 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
95 new_coef
96 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
97 comb);
98 /* A coefficient may become zero due to overflow. Remove the zero 100 /* A coefficient may become zero due to overflow. Remove the zero
99 elements. */ 101 elements. */
100 if (double_int_zero_p (new_coef)) 102 if (new_coef == 0)
101 continue; 103 continue;
102 comb->elts[j].coef = new_coef; 104 comb->elts[j].coef = new_coef;
103 comb->elts[j].val = comb->elts[i].val; 105 comb->elts[j].val = comb->elts[i].val;
104 j++; 106 j++;
105 } 107 }
117 comb->rest = NULL_TREE; 119 comb->rest = NULL_TREE;
118 comb->n++; 120 comb->n++;
119 } 121 }
120 else 122 else
121 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest, 123 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
122 double_int_to_tree (type, scale)); 124 wide_int_to_tree (type, scale));
123 } 125 }
124 } 126 }
125 127
126 /* Adds ELT * SCALE to COMB. */ 128 /* Adds ELT * SCALE to COMB. */
127 129
128 void 130 void
129 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale) 131 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
130 { 132 {
131 unsigned i; 133 unsigned i;
132 tree type; 134 tree type;
133 135
134 scale = double_int_ext_for_comb (scale, comb); 136 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
135 if (double_int_zero_p (scale)) 137 if (scale == 0)
136 return; 138 return;
137 139
138 for (i = 0; i < comb->n; i++) 140 for (i = 0; i < comb->n; i++)
139 if (operand_equal_p (comb->elts[i].val, elt, 0)) 141 if (operand_equal_p (comb->elts[i].val, elt, 0))
140 { 142 {
141 double_int new_coef; 143 widest_int new_coef
142 144 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
143 new_coef = double_int_add (comb->elts[i].coef, scale); 145 if (new_coef != 0)
144 new_coef = double_int_ext_for_comb (new_coef, comb);
145 if (!double_int_zero_p (new_coef))
146 { 146 {
147 comb->elts[i].coef = new_coef; 147 comb->elts[i].coef = new_coef;
148 return; 148 return;
149 } 149 }
150 150
152 comb->elts[i] = comb->elts[comb->n]; 152 comb->elts[i] = comb->elts[comb->n];
153 153
154 if (comb->rest) 154 if (comb->rest)
155 { 155 {
156 gcc_assert (comb->n == MAX_AFF_ELTS - 1); 156 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
157 comb->elts[comb->n].coef = double_int_one; 157 comb->elts[comb->n].coef = 1;
158 comb->elts[comb->n].val = comb->rest; 158 comb->elts[comb->n].val = comb->rest;
159 comb->rest = NULL_TREE; 159 comb->rest = NULL_TREE;
160 comb->n++; 160 comb->n++;
161 } 161 }
162 return; 162 return;
171 171
172 type = comb->type; 172 type = comb->type;
173 if (POINTER_TYPE_P (type)) 173 if (POINTER_TYPE_P (type))
174 type = sizetype; 174 type = sizetype;
175 175
176 if (double_int_one_p (scale)) 176 if (scale == 1)
177 elt = fold_convert (type, elt); 177 elt = fold_convert (type, elt);
178 else 178 else
179 elt = fold_build2 (MULT_EXPR, type, 179 elt = fold_build2 (MULT_EXPR, type,
180 fold_convert (type, elt), 180 fold_convert (type, elt),
181 double_int_to_tree (type, scale)); 181 wide_int_to_tree (type, scale));
182 182
183 if (comb->rest) 183 if (comb->rest)
184 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest, 184 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
185 elt); 185 elt);
186 else 186 else
188 } 188 }
189 189
190 /* Adds CST to C. */ 190 /* Adds CST to C. */
191 191
192 static void 192 static void
193 aff_combination_add_cst (aff_tree *c, double_int cst) 193 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
194 { 194 {
195 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c); 195 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
196 } 196 }
197 197
198 /* Adds COMB2 to COMB1. */ 198 /* Adds COMB2 to COMB1. */
199 199
200 void 200 void
204 204
205 aff_combination_add_cst (comb1, comb2->offset); 205 aff_combination_add_cst (comb1, comb2->offset);
206 for (i = 0; i < comb2->n; i++) 206 for (i = 0; i < comb2->n; i++)
207 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef); 207 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
208 if (comb2->rest) 208 if (comb2->rest)
209 aff_combination_add_elt (comb1, comb2->rest, double_int_one); 209 aff_combination_add_elt (comb1, comb2->rest, 1);
210 } 210 }
211 211
212 /* Converts affine combination COMB to TYPE. */ 212 /* Converts affine combination COMB to TYPE. */
213 213
214 void 214 void
229 comb->rest = fold_convert (type, comb->rest); 229 comb->rest = fold_convert (type, comb->rest);
230 230
231 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) 231 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
232 return; 232 return;
233 233
234 comb->offset = double_int_ext_for_comb (comb->offset, comb); 234 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
235 for (i = j = 0; i < comb->n; i++) 235 for (i = j = 0; i < comb->n; i++)
236 { 236 {
237 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb); 237 if (comb->elts[i].coef == 0)
238 if (double_int_zero_p (new_coef))
239 continue; 238 continue;
240 comb->elts[j].coef = new_coef; 239 comb->elts[j].coef = comb->elts[i].coef;
241 comb->elts[j].val = fold_convert (type, comb->elts[i].val); 240 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
242 j++; 241 j++;
243 } 242 }
244 243
245 comb->n = j; 244 comb->n = j;
246 if (comb->n < MAX_AFF_ELTS && comb->rest) 245 if (comb->n < MAX_AFF_ELTS && comb->rest)
247 { 246 {
248 comb->elts[comb->n].coef = double_int_one; 247 comb->elts[comb->n].coef = 1;
249 comb->elts[comb->n].val = comb->rest; 248 comb->elts[comb->n].val = comb->rest;
250 comb->rest = NULL_TREE; 249 comb->rest = NULL_TREE;
251 comb->n++; 250 comb->n++;
252 } 251 }
253 } 252 }
259 { 258 {
260 aff_tree tmp; 259 aff_tree tmp;
261 enum tree_code code; 260 enum tree_code code;
262 tree cst, core, toffset; 261 tree cst, core, toffset;
263 HOST_WIDE_INT bitpos, bitsize; 262 HOST_WIDE_INT bitpos, bitsize;
264 enum machine_mode mode; 263 machine_mode mode;
265 int unsignedp, volatilep; 264 int unsignedp, reversep, volatilep;
266 265
267 STRIP_NOPS (expr); 266 STRIP_NOPS (expr);
268 267
269 code = TREE_CODE (expr); 268 code = TREE_CODE (expr);
270 switch (code) 269 switch (code)
271 { 270 {
272 case INTEGER_CST: 271 case INTEGER_CST:
273 aff_combination_const (comb, type, tree_to_double_int (expr)); 272 aff_combination_const (comb, type, wi::to_widest (expr));
274 return; 273 return;
275 274
276 case POINTER_PLUS_EXPR: 275 case POINTER_PLUS_EXPR:
277 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); 276 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
278 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); 277 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
282 case PLUS_EXPR: 281 case PLUS_EXPR:
283 case MINUS_EXPR: 282 case MINUS_EXPR:
284 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); 283 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
285 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); 284 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
286 if (code == MINUS_EXPR) 285 if (code == MINUS_EXPR)
287 aff_combination_scale (&tmp, double_int_minus_one); 286 aff_combination_scale (&tmp, -1);
288 aff_combination_add (comb, &tmp); 287 aff_combination_add (comb, &tmp);
289 return; 288 return;
290 289
291 case MULT_EXPR: 290 case MULT_EXPR:
292 cst = TREE_OPERAND (expr, 1); 291 cst = TREE_OPERAND (expr, 1);
293 if (TREE_CODE (cst) != INTEGER_CST) 292 if (TREE_CODE (cst) != INTEGER_CST)
294 break; 293 break;
295 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); 294 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
296 aff_combination_scale (comb, tree_to_double_int (cst)); 295 aff_combination_scale (comb, wi::to_widest (cst));
297 return; 296 return;
298 297
299 case NEGATE_EXPR: 298 case NEGATE_EXPR:
300 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); 299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
301 aff_combination_scale (comb, double_int_minus_one); 300 aff_combination_scale (comb, -1);
302 return; 301 return;
303 302
304 case BIT_NOT_EXPR: 303 case BIT_NOT_EXPR:
305 /* ~x = -x - 1 */ 304 /* ~x = -x - 1 */
306 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); 305 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
307 aff_combination_scale (comb, double_int_minus_one); 306 aff_combination_scale (comb, -1);
308 aff_combination_add_cst (comb, double_int_minus_one); 307 aff_combination_add_cst (comb, -1);
309 return; 308 return;
310 309
311 case ADDR_EXPR: 310 case ADDR_EXPR:
312 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ 311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
313 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) 312 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
317 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); 316 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
318 aff_combination_add (comb, &tmp); 317 aff_combination_add (comb, &tmp);
319 return; 318 return;
320 } 319 }
321 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, 320 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
322 &toffset, &mode, &unsignedp, &volatilep, 321 &toffset, &mode, &unsignedp, &reversep,
323 false); 322 &volatilep);
324 if (bitpos % BITS_PER_UNIT != 0) 323 if (bitpos % BITS_PER_UNIT != 0)
325 break; 324 break;
326 aff_combination_const (comb, type, 325 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
327 uhwi_to_double_int (bitpos / BITS_PER_UNIT)); 326 if (TREE_CODE (core) == MEM_REF)
328 core = build_fold_addr_expr (core); 327 {
328 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
329 core = TREE_OPERAND (core, 0);
330 }
331 else
332 core = build_fold_addr_expr (core);
333
329 if (TREE_CODE (core) == ADDR_EXPR) 334 if (TREE_CODE (core) == ADDR_EXPR)
330 aff_combination_add_elt (comb, core, double_int_one); 335 aff_combination_add_elt (comb, core, 1);
331 else 336 else
332 { 337 {
333 tree_to_aff_combination (core, type, &tmp); 338 tree_to_aff_combination (core, type, &tmp);
334 aff_combination_add (comb, &tmp); 339 aff_combination_add (comb, &tmp);
335 } 340 }
357 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); 362 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
358 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); 363 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
359 aff_combination_add (comb, &tmp); 364 aff_combination_add (comb, &tmp);
360 return; 365 return;
361 366
367 CASE_CONVERT:
368 {
369 tree otype = TREE_TYPE (expr);
370 tree inner = TREE_OPERAND (expr, 0);
371 tree itype = TREE_TYPE (inner);
372 enum tree_code icode = TREE_CODE (inner);
373
374 /* In principle this is a valid folding, but it isn't necessarily
375 an optimization, so do it here and not in fold_unary. */
376 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
377 && TREE_CODE (itype) == INTEGER_TYPE
378 && TREE_CODE (otype) == INTEGER_TYPE
379 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
380 {
381 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
382
383 /* If inner type has undefined overflow behavior, fold conversion
384 for below two cases:
385 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
386 (T1)(X + X) -> (T1)X + (T1)X. */
387 if (TYPE_OVERFLOW_UNDEFINED (itype)
388 && (TREE_CODE (op1) == INTEGER_CST
389 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
390 {
391 op0 = fold_convert (otype, op0);
392 op1 = fold_convert (otype, op1);
393 expr = fold_build2 (icode, otype, op0, op1);
394 tree_to_aff_combination (expr, type, comb);
395 return;
396 }
397 wide_int minv, maxv;
398 /* If inner type has wrapping overflow behavior, fold conversion
399 for below case:
400 (T1)(X - CST) -> (T1)X - (T1)CST
401 if X - CST doesn't overflow by range information. Also handle
402 (T1)(X + CST) as (T1)(X - (-CST)). */
403 if (TYPE_UNSIGNED (itype)
404 && TYPE_OVERFLOW_WRAPS (itype)
405 && TREE_CODE (op0) == SSA_NAME
406 && TREE_CODE (op1) == INTEGER_CST
407 && icode != MULT_EXPR
408 && get_range_info (op0, &minv, &maxv) == VR_RANGE)
409 {
410 if (icode == PLUS_EXPR)
411 op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
412 if (wi::geu_p (minv, wi::to_wide (op1)))
413 {
414 op0 = fold_convert (otype, op0);
415 op1 = fold_convert (otype, op1);
416 expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
417 tree_to_aff_combination (expr, type, comb);
418 return;
419 }
420 }
421 }
422 }
423 break;
424
362 default: 425 default:
363 break; 426 break;
364 } 427 }
365 428
366 aff_combination_elt (comb, type, expr); 429 aff_combination_elt (comb, type, expr);
368 431
369 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine 432 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
370 combination COMB. */ 433 combination COMB. */
371 434
372 static tree 435 static tree
373 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale, 436 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
374 aff_tree *comb)
375 { 437 {
376 enum tree_code code; 438 enum tree_code code;
377 tree type1 = type; 439
378 if (POINTER_TYPE_P (type)) 440 widest_int scale = wide_int_ext_for_comb (scale_in, type);
379 type1 = sizetype; 441
380 442 elt = fold_convert (type, elt);
381 scale = double_int_ext_for_comb (scale, comb); 443 if (scale == 1)
382 elt = fold_convert (type1, elt);
383
384 if (double_int_one_p (scale))
385 { 444 {
386 if (!expr) 445 if (!expr)
387 return fold_convert (type, elt); 446 return elt;
388 447
389 if (POINTER_TYPE_P (type))
390 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
391 return fold_build2 (PLUS_EXPR, type, expr, elt); 448 return fold_build2 (PLUS_EXPR, type, expr, elt);
392 } 449 }
393 450
394 if (double_int_minus_one_p (scale)) 451 if (scale == -1)
395 { 452 {
396 if (!expr) 453 if (!expr)
397 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt)); 454 return fold_build1 (NEGATE_EXPR, type, elt);
398 455
399 if (POINTER_TYPE_P (type))
400 {
401 elt = fold_build1 (NEGATE_EXPR, type1, elt);
402 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
403 }
404 return fold_build2 (MINUS_EXPR, type, expr, elt); 456 return fold_build2 (MINUS_EXPR, type, expr, elt);
405 } 457 }
406 458
407 if (!expr) 459 if (!expr)
408 return fold_convert (type, 460 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
409 fold_build2 (MULT_EXPR, type1, elt, 461
410 double_int_to_tree (type1, scale))); 462 if (wi::neg_p (scale))
411
412 if (double_int_negative_p (scale))
413 { 463 {
414 code = MINUS_EXPR; 464 code = MINUS_EXPR;
415 scale = double_int_neg (scale); 465 scale = -scale;
416 } 466 }
417 else 467 else
418 code = PLUS_EXPR; 468 code = PLUS_EXPR;
419 469
420 elt = fold_build2 (MULT_EXPR, type1, elt, 470 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
421 double_int_to_tree (type1, scale));
422 if (POINTER_TYPE_P (type))
423 {
424 if (code == MINUS_EXPR)
425 elt = fold_build1 (NEGATE_EXPR, type1, elt);
426 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
427 }
428 return fold_build2 (code, type, expr, elt); 471 return fold_build2 (code, type, expr, elt);
429 } 472 }
430 473
431 /* Makes tree from the affine combination COMB. */ 474 /* Makes tree from the affine combination COMB. */
432 475
433 tree 476 tree
434 aff_combination_to_tree (aff_tree *comb) 477 aff_combination_to_tree (aff_tree *comb)
435 { 478 {
436 tree type = comb->type; 479 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
437 tree expr = NULL_TREE;
438 unsigned i; 480 unsigned i;
439 double_int off, sgn; 481 widest_int off, sgn;
440 tree type1 = type; 482
483 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
484
485 i = 0;
441 if (POINTER_TYPE_P (type)) 486 if (POINTER_TYPE_P (type))
442 type1 = sizetype; 487 {
443 488 type = sizetype;
444 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE); 489 if (comb->n > 0 && comb->elts[0].coef == 1
445 490 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
446 for (i = 0; i < comb->n; i++) 491 {
447 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef, 492 base = comb->elts[0].val;
448 comb); 493 ++i;
494 }
495 }
496
497 for (; i < comb->n; i++)
498 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
449 499
450 if (comb->rest) 500 if (comb->rest)
451 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb); 501 expr = add_elt_to_tree (expr, type, comb->rest, 1);
452 502
453 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is 503 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
454 unsigned. */ 504 unsigned. */
455 if (double_int_negative_p (comb->offset)) 505 if (wi::neg_p (comb->offset))
456 { 506 {
457 off = double_int_neg (comb->offset); 507 off = -comb->offset;
458 sgn = double_int_minus_one; 508 sgn = -1;
459 } 509 }
460 else 510 else
461 { 511 {
462 off = comb->offset; 512 off = comb->offset;
463 sgn = double_int_one; 513 sgn = 1;
464 } 514 }
465 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn, 515 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
466 comb); 516
517 if (base)
518 return fold_build_pointer_plus (base, expr);
519 else
520 return fold_convert (comb->type, expr);
467 } 521 }
468 522
469 /* Copies the tree elements of COMB to ensure that they are not shared. */ 523 /* Copies the tree elements of COMB to ensure that they are not shared. */
470 524
471 void 525 void
487 comb->n--; 541 comb->n--;
488 if (m <= comb->n) 542 if (m <= comb->n)
489 comb->elts[m] = comb->elts[comb->n]; 543 comb->elts[m] = comb->elts[comb->n];
490 if (comb->rest) 544 if (comb->rest)
491 { 545 {
492 comb->elts[comb->n].coef = double_int_one; 546 comb->elts[comb->n].coef = 1;
493 comb->elts[comb->n].val = comb->rest; 547 comb->elts[comb->n].val = comb->rest;
494 comb->rest = NULL_TREE; 548 comb->rest = NULL_TREE;
495 comb->n++; 549 comb->n++;
496 } 550 }
497 } 551 }
499 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only 553 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
500 C * COEF is added to R. */ 554 C * COEF is added to R. */
501 555
502 556
503 static void 557 static void
504 aff_combination_add_product (aff_tree *c, double_int coef, tree val, 558 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
505 aff_tree *r) 559 aff_tree *r)
506 { 560 {
507 unsigned i; 561 unsigned i;
508 tree aval, type; 562 tree aval, type;
509 563
515 type = TREE_TYPE (aval); 569 type = TREE_TYPE (aval);
516 aval = fold_build2 (MULT_EXPR, type, aval, 570 aval = fold_build2 (MULT_EXPR, type, aval,
517 fold_convert (type, val)); 571 fold_convert (type, val));
518 } 572 }
519 573
520 aff_combination_add_elt (r, aval, 574 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
521 double_int_mul (coef, c->elts[i].coef));
522 } 575 }
523 576
524 if (c->rest) 577 if (c->rest)
525 { 578 {
526 aval = c->rest; 579 aval = c->rest;
533 586
534 aff_combination_add_elt (r, aval, coef); 587 aff_combination_add_elt (r, aval, coef);
535 } 588 }
536 589
537 if (val) 590 if (val)
538 aff_combination_add_elt (r, val, 591 aff_combination_add_elt (r, val, coef * c->offset);
539 double_int_mul (coef, c->offset));
540 else 592 else
541 aff_combination_add_cst (r, double_int_mul (coef, c->offset)); 593 aff_combination_add_cst (r, coef * c->offset);
542 } 594 }
543 595
544 /* Multiplies C1 by C2, storing the result to R */ 596 /* Multiplies C1 by C2, storing the result to R */
545 597
546 void 598 void
552 aff_combination_zero (r, c1->type); 604 aff_combination_zero (r, c1->type);
553 605
554 for (i = 0; i < c2->n; i++) 606 for (i = 0; i < c2->n; i++)
555 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r); 607 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
556 if (c2->rest) 608 if (c2->rest)
557 aff_combination_add_product (c1, double_int_one, c2->rest, r); 609 aff_combination_add_product (c1, 1, c2->rest, r);
558 aff_combination_add_product (c1, c2->offset, NULL, r); 610 aff_combination_add_product (c1, c2->offset, NULL, r);
559 } 611 }
560 612
561 /* Returns the element of COMB whose value is VAL, or NULL if no such 613 /* Returns the element of COMB whose value is VAL, or NULL if no such
562 element exists. If IDX is not NULL, it is set to the index of VAL in 614 element exists. If IDX is not NULL, it is set to the index of VAL in
593 /* Expands SSA names in COMB recursively. CACHE is used to cache the 645 /* Expands SSA names in COMB recursively. CACHE is used to cache the
594 results. */ 646 results. */
595 647
596 void 648 void
597 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED, 649 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
598 struct pointer_map_t **cache ATTRIBUTE_UNUSED) 650 hash_map<tree, name_expansion *> **cache)
599 { 651 {
600 unsigned i; 652 unsigned i;
601 aff_tree to_add, current, curre; 653 aff_tree to_add, current, curre;
602 tree e, rhs; 654 tree e, rhs;
603 gimple def; 655 gimple *def;
604 double_int scale; 656 widest_int scale;
605 void **slot;
606 struct name_expansion *exp; 657 struct name_expansion *exp;
607 658
608 aff_combination_zero (&to_add, comb->type); 659 aff_combination_zero (&to_add, comb->type);
609 for (i = 0; i < comb->n; i++) 660 for (i = 0; i < comb->n; i++)
610 { 661 {
613 664
614 e = comb->elts[i].val; 665 e = comb->elts[i].val;
615 type = TREE_TYPE (e); 666 type = TREE_TYPE (e);
616 name = e; 667 name = e;
617 /* Look through some conversions. */ 668 /* Look through some conversions. */
618 if (TREE_CODE (e) == NOP_EXPR 669 if (CONVERT_EXPR_P (e)
619 && (TYPE_PRECISION (type) 670 && (TYPE_PRECISION (type)
620 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0))))) 671 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
621 name = TREE_OPERAND (e, 0); 672 name = TREE_OPERAND (e, 0);
622 if (TREE_CODE (name) != SSA_NAME) 673 if (TREE_CODE (name) != SSA_NAME)
623 continue; 674 continue;
636 place where the expansion is used. */ 687 place where the expansion is used. */
637 if (TREE_CODE_CLASS (code) == tcc_reference) 688 if (TREE_CODE_CLASS (code) == tcc_reference)
638 continue; 689 continue;
639 690
640 if (!*cache) 691 if (!*cache)
641 *cache = pointer_map_create (); 692 *cache = new hash_map<tree, name_expansion *>;
642 slot = pointer_map_insert (*cache, e); 693 name_expansion **slot = &(*cache)->get_or_insert (e);
643 exp = (struct name_expansion *) *slot; 694 exp = *slot;
644 695
645 if (!exp) 696 if (!exp)
646 { 697 {
647 exp = XNEW (struct name_expansion); 698 exp = XNEW (struct name_expansion);
648 exp->in_progress = 1; 699 exp->in_progress = 1;
649 *slot = exp; 700 *slot = exp;
650 /* In principle this is a generally valid folding, but 701 rhs = gimple_assign_rhs_to_tree (def);
651 it is not unconditionally an optimization, so do it 702 if (e != name)
652 here and not in fold_unary. */ 703 rhs = fold_convert (type, rhs);
653 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider 704
654 than the type of X and overflow for the type of X is
655 undefined. */
656 if (e != name
657 && INTEGRAL_TYPE_P (type)
658 && INTEGRAL_TYPE_P (TREE_TYPE (name))
659 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
660 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
661 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
662 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
663 rhs = fold_build2 (code, type,
664 fold_convert (type, gimple_assign_rhs1 (def)),
665 fold_convert (type, gimple_assign_rhs2 (def)));
666 else
667 {
668 rhs = gimple_assign_rhs_to_tree (def);
669 if (e != name)
670 rhs = fold_convert (type, rhs);
671 }
672 tree_to_aff_combination_expand (rhs, comb->type, &current, cache); 705 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
673 exp->expansion = current; 706 exp->expansion = current;
674 exp->in_progress = 0; 707 exp->in_progress = 0;
675 } 708 }
676 else 709 else
684 /* Accumulate the new terms to TO_ADD, so that we do not modify 717 /* Accumulate the new terms to TO_ADD, so that we do not modify
685 COMB while traversing it; include the term -coef * E, to remove 718 COMB while traversing it; include the term -coef * E, to remove
686 it from COMB. */ 719 it from COMB. */
687 scale = comb->elts[i].coef; 720 scale = comb->elts[i].coef;
688 aff_combination_zero (&curre, comb->type); 721 aff_combination_zero (&curre, comb->type);
689 aff_combination_add_elt (&curre, e, double_int_neg (scale)); 722 aff_combination_add_elt (&curre, e, -scale);
690 aff_combination_scale (&current, scale); 723 aff_combination_scale (&current, scale);
691 aff_combination_add (&to_add, &current); 724 aff_combination_add (&to_add, &current);
692 aff_combination_add (&to_add, &curre); 725 aff_combination_add (&to_add, &curre);
693 } 726 }
694 aff_combination_add (comb, &to_add); 727 aff_combination_add (comb, &to_add);
704 a3 = a2 + a2; 737 a3 = a2 + a2;
705 ... */ 738 ... */
706 739
707 void 740 void
708 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, 741 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
709 struct pointer_map_t **cache) 742 hash_map<tree, name_expansion *> **cache)
710 { 743 {
711 tree_to_aff_combination (expr, type, comb); 744 tree_to_aff_combination (expr, type, comb);
712 aff_combination_expand (comb, cache); 745 aff_combination_expand (comb, cache);
713 } 746 }
714 747
715 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for 748 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
716 pointer_map_traverse. */ 749 hash_map::traverse. */
717 750
718 static bool 751 bool
719 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value, 752 free_name_expansion (tree const &, name_expansion **value, void *)
720 void *data ATTRIBUTE_UNUSED) 753 {
721 { 754 free (*value);
722 struct name_expansion *const exp = (struct name_expansion *) *value;
723
724 free (exp);
725 return true; 755 return true;
726 } 756 }
727 757
728 /* Frees memory allocated for the CACHE used by 758 /* Frees memory allocated for the CACHE used by
729 tree_to_aff_combination_expand. */ 759 tree_to_aff_combination_expand. */
730 760
731 void 761 void
732 free_affine_expand_cache (struct pointer_map_t **cache) 762 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
733 { 763 {
734 if (!*cache) 764 if (!*cache)
735 return; 765 return;
736 766
737 pointer_map_traverse (*cache, free_name_expansion, NULL); 767 (*cache)->traverse<void *, free_name_expansion> (NULL);
738 pointer_map_destroy (*cache); 768 delete (*cache);
739 *cache = NULL; 769 *cache = NULL;
740 } 770 }
741 771
742 /* If VAL != CST * DIV for any constant CST, returns false. 772 /* If VAL != CST * DIV for any constant CST, returns false.
743 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true, 773 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
744 additionally compares CST and MULT, and if they are different, 774 and if they are different, returns false. Finally, if neither of these
745 returns false. Finally, if neither of these two cases occur, 775 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
746 true is returned, and if CST != 0, CST is stored to MULT and 776 is set to true. */
747 MULT_SET is set to true. */
748 777
749 static bool 778 static bool
750 double_int_constant_multiple_p (double_int val, double_int div, 779 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
751 bool *mult_set, double_int *mult) 780 bool *mult_set, widest_int *mult)
752 { 781 {
753 double_int rem, cst; 782 widest_int rem, cst;
754 783
755 if (double_int_zero_p (val)) 784 if (val == 0)
756 return true; 785 {
757 786 if (*mult_set && *mult != 0)
758 if (double_int_zero_p (div)) 787 return false;
788 *mult_set = true;
789 *mult = 0;
790 return true;
791 }
792
793 if (div == 0)
759 return false; 794 return false;
760 795
761 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem); 796 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
762 if (!double_int_zero_p (rem))
763 return false; 797 return false;
764 798
765 if (*mult_set && !double_int_equal_p (*mult, cst)) 799 if (*mult_set && *mult != cst)
766 return false; 800 return false;
767 801
768 *mult_set = true; 802 *mult_set = true;
769 *mult = cst; 803 *mult = cst;
770 return true; 804 return true;
773 /* Returns true if VAL = X * DIV for some constant X. If this is the case, 807 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
774 X is stored to MULT. */ 808 X is stored to MULT. */
775 809
776 bool 810 bool
777 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div, 811 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
778 double_int *mult) 812 widest_int *mult)
779 { 813 {
780 bool mult_set = false; 814 bool mult_set = false;
781 unsigned i; 815 unsigned i;
782 816
783 if (val->n == 0 && double_int_zero_p (val->offset)) 817 if (val->n == 0 && val->offset == 0)
784 { 818 {
785 *mult = double_int_zero; 819 *mult = 0;
786 return true; 820 return true;
787 } 821 }
788 if (val->n != div->n) 822 if (val->n != div->n)
789 return false; 823 return false;
790 824
791 if (val->rest || div->rest) 825 if (val->rest || div->rest)
792 return false; 826 return false;
793 827
794 if (!double_int_constant_multiple_p (val->offset, div->offset, 828 if (!wide_int_constant_multiple_p (val->offset, div->offset,
795 &mult_set, mult)) 829 &mult_set, mult))
796 return false; 830 return false;
797 831
798 for (i = 0; i < div->n; i++) 832 for (i = 0; i < div->n; i++)
799 { 833 {
800 struct aff_comb_elt *elt 834 struct aff_comb_elt *elt
801 = aff_combination_find_elt (val, div->elts[i].val, NULL); 835 = aff_combination_find_elt (val, div->elts[i].val, NULL);
802 if (!elt) 836 if (!elt)
803 return false; 837 return false;
804 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef, 838 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
805 &mult_set, mult)) 839 &mult_set, mult))
806 return false; 840 return false;
807 } 841 }
808 842
809 gcc_assert (mult_set); 843 gcc_assert (mult_set);
810 return true; 844 return true;
811 } 845 }
812 846
813 /* Prints the affine VAL to the FILE. */ 847 /* Prints the affine VAL to the FILE. */
814 848
815 void 849 static void
816 print_aff (FILE *file, aff_tree *val) 850 print_aff (FILE *file, aff_tree *val)
817 { 851 {
818 unsigned i; 852 unsigned i;
819 bool uns = TYPE_UNSIGNED (val->type); 853 signop sgn = TYPE_SIGN (val->type);
820 if (POINTER_TYPE_P (val->type)) 854 if (POINTER_TYPE_P (val->type))
821 uns = false; 855 sgn = SIGNED;
822 fprintf (file, "{\n type = "); 856 fprintf (file, "{\n type = ");
823 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS); 857 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
824 fprintf (file, "\n offset = "); 858 fprintf (file, "\n offset = ");
825 dump_double_int (file, val->offset, uns); 859 print_dec (val->offset, file, sgn);
826 if (val->n > 0) 860 if (val->n > 0)
827 { 861 {
828 fprintf (file, "\n elements = {\n"); 862 fprintf (file, "\n elements = {\n");
829 for (i = 0; i < val->n; i++) 863 for (i = 0; i < val->n; i++)
830 { 864 {
831 fprintf (file, " [%d] = ", i); 865 fprintf (file, " [%d] = ", i);
832 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS); 866 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
833 867
834 fprintf (file, " * "); 868 fprintf (file, " * ");
835 dump_double_int (file, val->elts[i].coef, uns); 869 print_dec (val->elts[i].coef, file, sgn);
836 if (i != val->n - 1) 870 if (i != val->n - 1)
837 fprintf (file, ", \n"); 871 fprintf (file, ", \n");
838 } 872 }
839 fprintf (file, "\n }"); 873 fprintf (file, "\n }");
840 } 874 }
853 { 887 {
854 print_aff (stderr, val); 888 print_aff (stderr, val);
855 fprintf (stderr, "\n"); 889 fprintf (stderr, "\n");
856 } 890 }
857 891
858 /* Returns address of the reference REF in ADDR. The size of the accessed 892 /* Computes address of the reference REF in ADDR. The size of the accessed
859 location is stored to SIZE. */ 893 location is stored to SIZE. Returns the ultimate containing object to
860 894 which REF refers. */
861 void 895
862 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size) 896 tree
897 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
863 { 898 {
864 HOST_WIDE_INT bitsize, bitpos; 899 HOST_WIDE_INT bitsize, bitpos;
865 tree toff; 900 tree toff;
866 enum machine_mode mode; 901 machine_mode mode;
867 int uns, vol; 902 int uns, rev, vol;
868 aff_tree tmp; 903 aff_tree tmp;
869 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, 904 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
870 &uns, &vol, false); 905 &uns, &rev, &vol);
871 tree base_addr = build_fold_addr_expr (base); 906 tree base_addr = build_fold_addr_expr (base);
872 907
873 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ 908 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
874 909
875 tree_to_aff_combination (base_addr, sizetype, addr); 910 tree_to_aff_combination (base_addr, sizetype, addr);
878 { 913 {
879 tree_to_aff_combination (toff, sizetype, &tmp); 914 tree_to_aff_combination (toff, sizetype, &tmp);
880 aff_combination_add (addr, &tmp); 915 aff_combination_add (addr, &tmp);
881 } 916 }
882 917
883 aff_combination_const (&tmp, sizetype, 918 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
884 shwi_to_double_int (bitpos / BITS_PER_UNIT));
885 aff_combination_add (addr, &tmp); 919 aff_combination_add (addr, &tmp);
886 920
887 *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT); 921 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
888 } 922
889 923 return base;
924 }
925
926 /* Returns true if a region of size SIZE1 at position 0 and a region of
927 size SIZE2 at position DIFF cannot overlap. */
928
929 bool
930 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
931 const widest_int &size2)
932 {
933 /* Unless the difference is a constant, we fail. */
934 if (diff->n != 0)
935 return false;
936
937 if (wi::neg_p (diff->offset))
938 {
939 /* The second object is before the first one, we succeed if the last
940 element of the second object is before the start of the first one. */
941 return wi::neg_p (diff->offset + size2 - 1);
942 }
943 else
944 {
945 /* We succeed if the second object starts after the first one ends. */
946 return size1 <= diff->offset;
947 }
948 }
949