Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-affine.c @ 120:f93fa5091070
fix conv1.c
author | mir3636 |
---|---|
date | Thu, 08 Mar 2018 14:53:42 +0900 |
parents | 04ced10e8804 |
children | 84e7813d76e9 |
rev | line source |
---|---|
0 | 1 /* Operations with affine combinations of trees. |
111 | 2 Copyright (C) 2005-2017 Free Software Foundation, Inc. |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
3 |
0 | 4 This file is part of GCC. |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
5 |
0 | 6 GCC is free software; you can redistribute it and/or modify it |
7 under the terms of the GNU General Public License as published by the | |
8 Free Software Foundation; either version 3, or (at your option) any | |
9 later version. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
10 |
0 | 11 GCC is distributed in the hope that it will be useful, but WITHOUT |
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 for more details. | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
15 |
0 | 16 You should have received a copy of the GNU General Public License |
17 along with GCC; see the file COPYING3. If not see | |
18 <http://www.gnu.org/licenses/>. */ | |
19 | |
20 #include "config.h" | |
21 #include "system.h" | |
22 #include "coretypes.h" | |
111 | 23 #include "backend.h" |
24 #include "rtl.h" | |
0 | 25 #include "tree.h" |
111 | 26 #include "gimple.h" |
27 #include "ssa.h" | |
63
b7f97abdc517
update gcc from gcc-4.5.0 to gcc-4.6
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
55
diff
changeset
|
28 #include "tree-pretty-print.h" |
111 | 29 #include "fold-const.h" |
0 | 30 #include "tree-affine.h" |
111 | 31 #include "gimplify.h" |
32 #include "dumpfile.h" | |
33 #include "cfgexpand.h" | |
0 | 34 |
35 /* Extends CST as appropriate for the affine combinations COMB. */ | |
36 | |
111 | 37 widest_int |
38 wide_int_ext_for_comb (const widest_int &cst, tree type) | |
0 | 39 { |
111 | 40 return wi::sext (cst, TYPE_PRECISION (type)); |
0 | 41 } |
42 | |
43 /* Initializes affine combination COMB so that its value is zero in TYPE. */ | |
44 | |
45 static void | |
46 aff_combination_zero (aff_tree *comb, tree type) | |
47 { | |
111 | 48 int i; |
0 | 49 comb->type = type; |
111 | 50 comb->offset = 0; |
0 | 51 comb->n = 0; |
111 | 52 for (i = 0; i < MAX_AFF_ELTS; i++) |
53 comb->elts[i].coef = 0; | |
0 | 54 comb->rest = NULL_TREE; |
55 } | |
56 | |
57 /* Sets COMB to CST. */ | |
58 | |
59 void | |
111 | 60 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst) |
0 | 61 { |
62 aff_combination_zero (comb, type); | |
111 | 63 comb->offset = wide_int_ext_for_comb (cst, comb->type);; |
0 | 64 } |
65 | |
66 /* Sets COMB to single element ELT. */ | |
67 | |
68 void | |
69 aff_combination_elt (aff_tree *comb, tree type, tree elt) | |
70 { | |
71 aff_combination_zero (comb, type); | |
72 | |
73 comb->n = 1; | |
74 comb->elts[0].val = elt; | |
111 | 75 comb->elts[0].coef = 1; |
0 | 76 } |
77 | |
78 /* Scales COMB by SCALE. */ | |
79 | |
80 void | |
111 | 81 aff_combination_scale (aff_tree *comb, const widest_int &scale_in) |
0 | 82 { |
83 unsigned i, j; | |
84 | |
111 | 85 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
86 if (scale == 1) | |
0 | 87 return; |
88 | |
111 | 89 if (scale == 0) |
0 | 90 { |
91 aff_combination_zero (comb, comb->type); | |
92 return; | |
93 } | |
94 | |
111 | 95 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type); |
0 | 96 for (i = 0, j = 0; i < comb->n; i++) |
97 { | |
111 | 98 widest_int new_coef |
99 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type); | |
0 | 100 /* A coefficient may become zero due to overflow. Remove the zero |
101 elements. */ | |
111 | 102 if (new_coef == 0) |
0 | 103 continue; |
104 comb->elts[j].coef = new_coef; | |
105 comb->elts[j].val = comb->elts[i].val; | |
106 j++; | |
107 } | |
108 comb->n = j; | |
109 | |
110 if (comb->rest) | |
111 { | |
112 tree type = comb->type; | |
113 if (POINTER_TYPE_P (type)) | |
114 type = sizetype; | |
115 if (comb->n < MAX_AFF_ELTS) | |
116 { | |
117 comb->elts[comb->n].coef = scale; | |
118 comb->elts[comb->n].val = comb->rest; | |
119 comb->rest = NULL_TREE; | |
120 comb->n++; | |
121 } | |
122 else | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
123 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest, |
111 | 124 wide_int_to_tree (type, scale)); |
0 | 125 } |
126 } | |
127 | |
128 /* Adds ELT * SCALE to COMB. */ | |
129 | |
130 void | |
111 | 131 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in) |
0 | 132 { |
133 unsigned i; | |
134 tree type; | |
135 | |
111 | 136 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
137 if (scale == 0) | |
0 | 138 return; |
139 | |
140 for (i = 0; i < comb->n; i++) | |
141 if (operand_equal_p (comb->elts[i].val, elt, 0)) | |
142 { | |
111 | 143 widest_int new_coef |
144 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type); | |
145 if (new_coef != 0) | |
0 | 146 { |
147 comb->elts[i].coef = new_coef; | |
148 return; | |
149 } | |
150 | |
151 comb->n--; | |
152 comb->elts[i] = comb->elts[comb->n]; | |
153 | |
154 if (comb->rest) | |
155 { | |
156 gcc_assert (comb->n == MAX_AFF_ELTS - 1); | |
111 | 157 comb->elts[comb->n].coef = 1; |
0 | 158 comb->elts[comb->n].val = comb->rest; |
159 comb->rest = NULL_TREE; | |
160 comb->n++; | |
161 } | |
162 return; | |
163 } | |
164 if (comb->n < MAX_AFF_ELTS) | |
165 { | |
166 comb->elts[comb->n].coef = scale; | |
167 comb->elts[comb->n].val = elt; | |
168 comb->n++; | |
169 return; | |
170 } | |
171 | |
172 type = comb->type; | |
173 if (POINTER_TYPE_P (type)) | |
174 type = sizetype; | |
175 | |
111 | 176 if (scale == 1) |
0 | 177 elt = fold_convert (type, elt); |
178 else | |
179 elt = fold_build2 (MULT_EXPR, type, | |
180 fold_convert (type, elt), | |
111 | 181 wide_int_to_tree (type, scale)); |
0 | 182 |
183 if (comb->rest) | |
184 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest, | |
185 elt); | |
186 else | |
187 comb->rest = elt; | |
188 } | |
189 | |
190 /* Adds CST to C. */ | |
191 | |
192 static void | |
111 | 193 aff_combination_add_cst (aff_tree *c, const widest_int &cst) |
0 | 194 { |
111 | 195 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type); |
0 | 196 } |
197 | |
198 /* Adds COMB2 to COMB1. */ | |
199 | |
200 void | |
201 aff_combination_add (aff_tree *comb1, aff_tree *comb2) | |
202 { | |
203 unsigned i; | |
204 | |
205 aff_combination_add_cst (comb1, comb2->offset); | |
206 for (i = 0; i < comb2->n; i++) | |
207 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef); | |
208 if (comb2->rest) | |
111 | 209 aff_combination_add_elt (comb1, comb2->rest, 1); |
0 | 210 } |
211 | |
212 /* Converts affine combination COMB to TYPE. */ | |
213 | |
214 void | |
215 aff_combination_convert (aff_tree *comb, tree type) | |
216 { | |
217 unsigned i, j; | |
218 tree comb_type = comb->type; | |
219 | |
220 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type)) | |
221 { | |
222 tree val = fold_convert (type, aff_combination_to_tree (comb)); | |
223 tree_to_aff_combination (val, type, comb); | |
224 return; | |
225 } | |
226 | |
227 comb->type = type; | |
228 if (comb->rest && !POINTER_TYPE_P (type)) | |
229 comb->rest = fold_convert (type, comb->rest); | |
230 | |
231 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) | |
232 return; | |
233 | |
111 | 234 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type); |
0 | 235 for (i = j = 0; i < comb->n; i++) |
236 { | |
111 | 237 if (comb->elts[i].coef == 0) |
0 | 238 continue; |
111 | 239 comb->elts[j].coef = comb->elts[i].coef; |
0 | 240 comb->elts[j].val = fold_convert (type, comb->elts[i].val); |
241 j++; | |
242 } | |
243 | |
244 comb->n = j; | |
245 if (comb->n < MAX_AFF_ELTS && comb->rest) | |
246 { | |
111 | 247 comb->elts[comb->n].coef = 1; |
0 | 248 comb->elts[comb->n].val = comb->rest; |
249 comb->rest = NULL_TREE; | |
250 comb->n++; | |
251 } | |
252 } | |
253 | |
254 /* Splits EXPR into an affine combination of parts. */ | |
255 | |
256 void | |
257 tree_to_aff_combination (tree expr, tree type, aff_tree *comb) | |
258 { | |
259 aff_tree tmp; | |
260 enum tree_code code; | |
261 tree cst, core, toffset; | |
262 HOST_WIDE_INT bitpos, bitsize; | |
111 | 263 machine_mode mode; |
264 int unsignedp, reversep, volatilep; | |
0 | 265 |
266 STRIP_NOPS (expr); | |
267 | |
268 code = TREE_CODE (expr); | |
269 switch (code) | |
270 { | |
271 case INTEGER_CST: | |
111 | 272 aff_combination_const (comb, type, wi::to_widest (expr)); |
0 | 273 return; |
274 | |
275 case POINTER_PLUS_EXPR: | |
276 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
277 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
278 aff_combination_add (comb, &tmp); | |
279 return; | |
280 | |
281 case PLUS_EXPR: | |
282 case MINUS_EXPR: | |
283 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
284 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); | |
285 if (code == MINUS_EXPR) | |
111 | 286 aff_combination_scale (&tmp, -1); |
0 | 287 aff_combination_add (comb, &tmp); |
288 return; | |
289 | |
290 case MULT_EXPR: | |
291 cst = TREE_OPERAND (expr, 1); | |
292 if (TREE_CODE (cst) != INTEGER_CST) | |
293 break; | |
294 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
111 | 295 aff_combination_scale (comb, wi::to_widest (cst)); |
0 | 296 return; |
297 | |
298 case NEGATE_EXPR: | |
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
111 | 300 aff_combination_scale (comb, -1); |
0 | 301 return; |
302 | |
303 case BIT_NOT_EXPR: | |
304 /* ~x = -x - 1 */ | |
305 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
111 | 306 aff_combination_scale (comb, -1); |
307 aff_combination_add_cst (comb, -1); | |
0 | 308 return; |
309 | |
310 case ADDR_EXPR: | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
312 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
313 { |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
314 expr = TREE_OPERAND (expr, 0); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
315 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
316 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
317 aff_combination_add (comb, &tmp); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
318 return; |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
319 } |
0 | 320 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, |
111 | 321 &toffset, &mode, &unsignedp, &reversep, |
322 &volatilep); | |
0 | 323 if (bitpos % BITS_PER_UNIT != 0) |
324 break; | |
111 | 325 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT); |
326 if (TREE_CODE (core) == MEM_REF) | |
327 { | |
328 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1))); | |
329 core = TREE_OPERAND (core, 0); | |
330 } | |
331 else | |
332 core = build_fold_addr_expr (core); | |
333 | |
0 | 334 if (TREE_CODE (core) == ADDR_EXPR) |
111 | 335 aff_combination_add_elt (comb, core, 1); |
0 | 336 else |
337 { | |
338 tree_to_aff_combination (core, type, &tmp); | |
339 aff_combination_add (comb, &tmp); | |
340 } | |
341 if (toffset) | |
342 { | |
343 tree_to_aff_combination (toffset, type, &tmp); | |
344 aff_combination_add (comb, &tmp); | |
345 } | |
346 return; | |
347 | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
348 case MEM_REF: |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
349 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
350 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
351 type, comb); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
352 else if (integer_zerop (TREE_OPERAND (expr, 1))) |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
353 { |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
354 aff_combination_elt (comb, type, expr); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
355 return; |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
356 } |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
357 else |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
358 aff_combination_elt (comb, type, |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
359 build2 (MEM_REF, TREE_TYPE (expr), |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
360 TREE_OPERAND (expr, 0), |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
361 build_int_cst |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
362 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
363 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
364 aff_combination_add (comb, &tmp); |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
365 return; |
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
366 |
111 | 367 CASE_CONVERT: |
368 { | |
369 tree otype = TREE_TYPE (expr); | |
370 tree inner = TREE_OPERAND (expr, 0); | |
371 tree itype = TREE_TYPE (inner); | |
372 enum tree_code icode = TREE_CODE (inner); | |
373 | |
374 /* In principle this is a valid folding, but it isn't necessarily | |
375 an optimization, so do it here and not in fold_unary. */ | |
376 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR) | |
377 && TREE_CODE (itype) == INTEGER_TYPE | |
378 && TREE_CODE (otype) == INTEGER_TYPE | |
379 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype)) | |
380 { | |
381 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1); | |
382 | |
383 /* If inner type has undefined overflow behavior, fold conversion | |
384 for below two cases: | |
385 (T1)(X *+- CST) -> (T1)X *+- (T1)CST | |
386 (T1)(X + X) -> (T1)X + (T1)X. */ | |
387 if (TYPE_OVERFLOW_UNDEFINED (itype) | |
388 && (TREE_CODE (op1) == INTEGER_CST | |
389 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0)))) | |
390 { | |
391 op0 = fold_convert (otype, op0); | |
392 op1 = fold_convert (otype, op1); | |
393 expr = fold_build2 (icode, otype, op0, op1); | |
394 tree_to_aff_combination (expr, type, comb); | |
395 return; | |
396 } | |
397 wide_int minv, maxv; | |
398 /* If inner type has wrapping overflow behavior, fold conversion | |
399 for below case: | |
400 (T1)(X - CST) -> (T1)X - (T1)CST | |
401 if X - CST doesn't overflow by range information. Also handle | |
402 (T1)(X + CST) as (T1)(X - (-CST)). */ | |
403 if (TYPE_UNSIGNED (itype) | |
404 && TYPE_OVERFLOW_WRAPS (itype) | |
405 && TREE_CODE (op0) == SSA_NAME | |
406 && TREE_CODE (op1) == INTEGER_CST | |
407 && icode != MULT_EXPR | |
408 && get_range_info (op0, &minv, &maxv) == VR_RANGE) | |
409 { | |
410 if (icode == PLUS_EXPR) | |
411 op1 = wide_int_to_tree (itype, -wi::to_wide (op1)); | |
412 if (wi::geu_p (minv, wi::to_wide (op1))) | |
413 { | |
414 op0 = fold_convert (otype, op0); | |
415 op1 = fold_convert (otype, op1); | |
416 expr = fold_build2 (MINUS_EXPR, otype, op0, op1); | |
417 tree_to_aff_combination (expr, type, comb); | |
418 return; | |
419 } | |
420 } | |
421 } | |
422 } | |
423 break; | |
424 | |
0 | 425 default: |
426 break; | |
427 } | |
428 | |
429 aff_combination_elt (comb, type, expr); | |
430 } | |
431 | |
432 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine | |
433 combination COMB. */ | |
434 | |
435 static tree | |
111 | 436 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in) |
0 | 437 { |
438 enum tree_code code; | |
111 | 439 |
440 widest_int scale = wide_int_ext_for_comb (scale_in, type); | |
0 | 441 |
111 | 442 elt = fold_convert (type, elt); |
443 if (scale == 1) | |
0 | 444 { |
445 if (!expr) | |
111 | 446 return elt; |
0 | 447 |
448 return fold_build2 (PLUS_EXPR, type, expr, elt); | |
449 } | |
450 | |
111 | 451 if (scale == -1) |
0 | 452 { |
453 if (!expr) | |
111 | 454 return fold_build1 (NEGATE_EXPR, type, elt); |
0 | 455 |
456 return fold_build2 (MINUS_EXPR, type, expr, elt); | |
457 } | |
458 | |
459 if (!expr) | |
111 | 460 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
0 | 461 |
111 | 462 if (wi::neg_p (scale)) |
0 | 463 { |
464 code = MINUS_EXPR; | |
111 | 465 scale = -scale; |
0 | 466 } |
467 else | |
468 code = PLUS_EXPR; | |
469 | |
111 | 470 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
0 | 471 return fold_build2 (code, type, expr, elt); |
472 } | |
473 | |
474 /* Makes tree from the affine combination COMB. */ | |
475 | |
476 tree | |
477 aff_combination_to_tree (aff_tree *comb) | |
478 { | |
111 | 479 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE; |
0 | 480 unsigned i; |
111 | 481 widest_int off, sgn; |
0 | 482 |
483 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE); | |
484 | |
111 | 485 i = 0; |
486 if (POINTER_TYPE_P (type)) | |
487 { | |
488 type = sizetype; | |
489 if (comb->n > 0 && comb->elts[0].coef == 1 | |
490 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val))) | |
491 { | |
492 base = comb->elts[0].val; | |
493 ++i; | |
494 } | |
495 } | |
496 | |
497 for (; i < comb->n; i++) | |
498 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef); | |
0 | 499 |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
500 if (comb->rest) |
111 | 501 expr = add_elt_to_tree (expr, type, comb->rest, 1); |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
502 |
0 | 503 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is |
504 unsigned. */ | |
111 | 505 if (wi::neg_p (comb->offset)) |
0 | 506 { |
111 | 507 off = -comb->offset; |
508 sgn = -1; | |
0 | 509 } |
510 else | |
511 { | |
512 off = comb->offset; | |
111 | 513 sgn = 1; |
0 | 514 } |
111 | 515 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn); |
516 | |
517 if (base) | |
518 return fold_build_pointer_plus (base, expr); | |
519 else | |
520 return fold_convert (comb->type, expr); | |
0 | 521 } |
522 | |
523 /* Copies the tree elements of COMB to ensure that they are not shared. */ | |
524 | |
525 void | |
526 unshare_aff_combination (aff_tree *comb) | |
527 { | |
528 unsigned i; | |
529 | |
530 for (i = 0; i < comb->n; i++) | |
531 comb->elts[i].val = unshare_expr (comb->elts[i].val); | |
532 if (comb->rest) | |
533 comb->rest = unshare_expr (comb->rest); | |
534 } | |
535 | |
536 /* Remove M-th element from COMB. */ | |
537 | |
538 void | |
539 aff_combination_remove_elt (aff_tree *comb, unsigned m) | |
540 { | |
541 comb->n--; | |
542 if (m <= comb->n) | |
543 comb->elts[m] = comb->elts[comb->n]; | |
544 if (comb->rest) | |
545 { | |
111 | 546 comb->elts[comb->n].coef = 1; |
0 | 547 comb->elts[comb->n].val = comb->rest; |
548 comb->rest = NULL_TREE; | |
549 comb->n++; | |
550 } | |
551 } | |
552 | |
553 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only | |
554 C * COEF is added to R. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
555 |
0 | 556 |
557 static void | |
111 | 558 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val, |
0 | 559 aff_tree *r) |
560 { | |
561 unsigned i; | |
562 tree aval, type; | |
563 | |
564 for (i = 0; i < c->n; i++) | |
565 { | |
566 aval = c->elts[i].val; | |
567 if (val) | |
568 { | |
569 type = TREE_TYPE (aval); | |
570 aval = fold_build2 (MULT_EXPR, type, aval, | |
571 fold_convert (type, val)); | |
572 } | |
573 | |
111 | 574 aff_combination_add_elt (r, aval, coef * c->elts[i].coef); |
0 | 575 } |
576 | |
577 if (c->rest) | |
578 { | |
579 aval = c->rest; | |
580 if (val) | |
581 { | |
582 type = TREE_TYPE (aval); | |
583 aval = fold_build2 (MULT_EXPR, type, aval, | |
584 fold_convert (type, val)); | |
585 } | |
586 | |
587 aff_combination_add_elt (r, aval, coef); | |
588 } | |
589 | |
590 if (val) | |
111 | 591 aff_combination_add_elt (r, val, coef * c->offset); |
0 | 592 else |
111 | 593 aff_combination_add_cst (r, coef * c->offset); |
0 | 594 } |
595 | |
596 /* Multiplies C1 by C2, storing the result to R */ | |
597 | |
598 void | |
599 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r) | |
600 { | |
601 unsigned i; | |
602 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type)); | |
603 | |
604 aff_combination_zero (r, c1->type); | |
605 | |
606 for (i = 0; i < c2->n; i++) | |
607 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r); | |
608 if (c2->rest) | |
111 | 609 aff_combination_add_product (c1, 1, c2->rest, r); |
0 | 610 aff_combination_add_product (c1, c2->offset, NULL, r); |
611 } | |
612 | |
613 /* Returns the element of COMB whose value is VAL, or NULL if no such | |
614 element exists. If IDX is not NULL, it is set to the index of VAL in | |
615 COMB. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
616 |
0 | 617 static struct aff_comb_elt * |
618 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx) | |
619 { | |
620 unsigned i; | |
621 | |
622 for (i = 0; i < comb->n; i++) | |
623 if (operand_equal_p (comb->elts[i].val, val, 0)) | |
624 { | |
625 if (idx) | |
626 *idx = i; | |
627 | |
628 return &comb->elts[i]; | |
629 } | |
630 | |
631 return NULL; | |
632 } | |
633 | |
634 /* Element of the cache that maps ssa name NAME to its expanded form | |
635 as an affine expression EXPANSION. */ | |
636 | |
637 struct name_expansion | |
638 { | |
639 aff_tree expansion; | |
640 | |
641 /* True if the expansion for the name is just being generated. */ | |
642 unsigned in_progress : 1; | |
643 }; | |
644 | |
645 /* Expands SSA names in COMB recursively. CACHE is used to cache the | |
646 results. */ | |
647 | |
648 void | |
649 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED, | |
111 | 650 hash_map<tree, name_expansion *> **cache) |
0 | 651 { |
652 unsigned i; | |
653 aff_tree to_add, current, curre; | |
654 tree e, rhs; | |
111 | 655 gimple *def; |
656 widest_int scale; | |
0 | 657 struct name_expansion *exp; |
658 | |
659 aff_combination_zero (&to_add, comb->type); | |
660 for (i = 0; i < comb->n; i++) | |
661 { | |
662 tree type, name; | |
663 enum tree_code code; | |
664 | |
665 e = comb->elts[i].val; | |
666 type = TREE_TYPE (e); | |
667 name = e; | |
668 /* Look through some conversions. */ | |
111 | 669 if (CONVERT_EXPR_P (e) |
0 | 670 && (TYPE_PRECISION (type) |
671 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0))))) | |
672 name = TREE_OPERAND (e, 0); | |
673 if (TREE_CODE (name) != SSA_NAME) | |
674 continue; | |
675 def = SSA_NAME_DEF_STMT (name); | |
676 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name) | |
677 continue; | |
678 | |
679 code = gimple_assign_rhs_code (def); | |
680 if (code != SSA_NAME | |
681 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) | |
682 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | |
683 || !is_gimple_min_invariant (gimple_assign_rhs1 (def)))) | |
684 continue; | |
685 | |
686 /* We do not know whether the reference retains its value at the | |
687 place where the expansion is used. */ | |
688 if (TREE_CODE_CLASS (code) == tcc_reference) | |
689 continue; | |
690 | |
691 if (!*cache) | |
111 | 692 *cache = new hash_map<tree, name_expansion *>; |
693 name_expansion **slot = &(*cache)->get_or_insert (e); | |
694 exp = *slot; | |
0 | 695 |
696 if (!exp) | |
697 { | |
698 exp = XNEW (struct name_expansion); | |
699 exp->in_progress = 1; | |
700 *slot = exp; | |
111 | 701 rhs = gimple_assign_rhs_to_tree (def); |
702 if (e != name) | |
703 rhs = fold_convert (type, rhs); | |
704 | |
0 | 705 tree_to_aff_combination_expand (rhs, comb->type, ¤t, cache); |
706 exp->expansion = current; | |
707 exp->in_progress = 0; | |
708 } | |
709 else | |
710 { | |
711 /* Since we follow the definitions in the SSA form, we should not | |
712 enter a cycle unless we pass through a phi node. */ | |
713 gcc_assert (!exp->in_progress); | |
714 current = exp->expansion; | |
715 } | |
716 | |
717 /* Accumulate the new terms to TO_ADD, so that we do not modify | |
718 COMB while traversing it; include the term -coef * E, to remove | |
719 it from COMB. */ | |
720 scale = comb->elts[i].coef; | |
721 aff_combination_zero (&curre, comb->type); | |
111 | 722 aff_combination_add_elt (&curre, e, -scale); |
0 | 723 aff_combination_scale (¤t, scale); |
724 aff_combination_add (&to_add, ¤t); | |
725 aff_combination_add (&to_add, &curre); | |
726 } | |
727 aff_combination_add (comb, &to_add); | |
728 } | |
729 | |
730 /* Similar to tree_to_aff_combination, but follows SSA name definitions | |
731 and expands them recursively. CACHE is used to cache the expansions | |
732 of the ssa names, to avoid exponential time complexity for cases | |
733 like | |
734 | |
735 a1 = a0 + a0; | |
736 a2 = a1 + a1; | |
737 a3 = a2 + a2; | |
738 ... */ | |
739 | |
740 void | |
741 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, | |
111 | 742 hash_map<tree, name_expansion *> **cache) |
0 | 743 { |
744 tree_to_aff_combination (expr, type, comb); | |
745 aff_combination_expand (comb, cache); | |
746 } | |
747 | |
748 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for | |
111 | 749 hash_map::traverse. */ |
0 | 750 |
111 | 751 bool |
752 free_name_expansion (tree const &, name_expansion **value, void *) | |
0 | 753 { |
111 | 754 free (*value); |
0 | 755 return true; |
756 } | |
757 | |
758 /* Frees memory allocated for the CACHE used by | |
759 tree_to_aff_combination_expand. */ | |
760 | |
761 void | |
111 | 762 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache) |
0 | 763 { |
764 if (!*cache) | |
765 return; | |
766 | |
111 | 767 (*cache)->traverse<void *, free_name_expansion> (NULL); |
768 delete (*cache); | |
0 | 769 *cache = NULL; |
770 } | |
771 | |
772 /* If VAL != CST * DIV for any constant CST, returns false. | |
111 | 773 Otherwise, if *MULT_SET is true, additionally compares CST and MULT, |
774 and if they are different, returns false. Finally, if neither of these | |
775 two cases occur, true is returned, and CST is stored to MULT and MULT_SET | |
776 is set to true. */ | |
0 | 777 |
778 static bool | |
111 | 779 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div, |
780 bool *mult_set, widest_int *mult) | |
0 | 781 { |
111 | 782 widest_int rem, cst; |
0 | 783 |
111 | 784 if (val == 0) |
785 { | |
786 if (*mult_set && *mult != 0) | |
787 return false; | |
788 *mult_set = true; | |
789 *mult = 0; | |
790 return true; | |
791 } | |
0 | 792 |
111 | 793 if (div == 0) |
0 | 794 return false; |
795 | |
111 | 796 if (!wi::multiple_of_p (val, div, SIGNED, &cst)) |
0 | 797 return false; |
798 | |
111 | 799 if (*mult_set && *mult != cst) |
0 | 800 return false; |
801 | |
802 *mult_set = true; | |
803 *mult = cst; | |
804 return true; | |
805 } | |
806 | |
807 /* Returns true if VAL = X * DIV for some constant X. If this is the case, | |
808 X is stored to MULT. */ | |
809 | |
810 bool | |
811 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div, | |
111 | 812 widest_int *mult) |
0 | 813 { |
814 bool mult_set = false; | |
815 unsigned i; | |
816 | |
111 | 817 if (val->n == 0 && val->offset == 0) |
0 | 818 { |
111 | 819 *mult = 0; |
0 | 820 return true; |
821 } | |
822 if (val->n != div->n) | |
823 return false; | |
824 | |
825 if (val->rest || div->rest) | |
826 return false; | |
827 | |
111 | 828 if (!wide_int_constant_multiple_p (val->offset, div->offset, |
829 &mult_set, mult)) | |
0 | 830 return false; |
831 | |
832 for (i = 0; i < div->n; i++) | |
833 { | |
834 struct aff_comb_elt *elt | |
835 = aff_combination_find_elt (val, div->elts[i].val, NULL); | |
836 if (!elt) | |
837 return false; | |
111 | 838 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef, |
839 &mult_set, mult)) | |
0 | 840 return false; |
841 } | |
842 | |
843 gcc_assert (mult_set); | |
844 return true; | |
845 } | |
846 | |
847 /* Prints the affine VAL to the FILE. */ | |
848 | |
111 | 849 static void |
0 | 850 print_aff (FILE *file, aff_tree *val) |
851 { | |
852 unsigned i; | |
111 | 853 signop sgn = TYPE_SIGN (val->type); |
0 | 854 if (POINTER_TYPE_P (val->type)) |
111 | 855 sgn = SIGNED; |
0 | 856 fprintf (file, "{\n type = "); |
857 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS); | |
858 fprintf (file, "\n offset = "); | |
111 | 859 print_dec (val->offset, file, sgn); |
0 | 860 if (val->n > 0) |
861 { | |
862 fprintf (file, "\n elements = {\n"); | |
863 for (i = 0; i < val->n; i++) | |
864 { | |
865 fprintf (file, " [%d] = ", i); | |
866 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS); | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
867 |
0 | 868 fprintf (file, " * "); |
111 | 869 print_dec (val->elts[i].coef, file, sgn); |
0 | 870 if (i != val->n - 1) |
871 fprintf (file, ", \n"); | |
872 } | |
873 fprintf (file, "\n }"); | |
874 } | |
875 if (val->rest) | |
876 { | |
877 fprintf (file, "\n rest = "); | |
878 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS); | |
879 } | |
880 fprintf (file, "\n}"); | |
881 } | |
882 | |
883 /* Prints the affine VAL to the standard error, used for debugging. */ | |
884 | |
67
f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
parents:
63
diff
changeset
|
885 DEBUG_FUNCTION void |
0 | 886 debug_aff (aff_tree *val) |
887 { | |
888 print_aff (stderr, val); | |
889 fprintf (stderr, "\n"); | |
890 } | |
891 | |
111 | 892 /* Computes address of the reference REF in ADDR. The size of the accessed |
893 location is stored to SIZE. Returns the ultimate containing object to | |
894 which REF refers. */ | |
0 | 895 |
111 | 896 tree |
897 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size) | |
0 | 898 { |
899 HOST_WIDE_INT bitsize, bitpos; | |
900 tree toff; | |
111 | 901 machine_mode mode; |
902 int uns, rev, vol; | |
0 | 903 aff_tree tmp; |
904 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, | |
111 | 905 &uns, &rev, &vol); |
0 | 906 tree base_addr = build_fold_addr_expr (base); |
907 | |
908 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ | |
909 | |
910 tree_to_aff_combination (base_addr, sizetype, addr); | |
911 | |
912 if (toff) | |
913 { | |
914 tree_to_aff_combination (toff, sizetype, &tmp); | |
915 aff_combination_add (addr, &tmp); | |
916 } | |
917 | |
111 | 918 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT); |
0 | 919 aff_combination_add (addr, &tmp); |
920 | |
111 | 921 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT; |
922 | |
923 return base; | |
0 | 924 } |
925 | |
111 | 926 /* Returns true if a region of size SIZE1 at position 0 and a region of |
927 size SIZE2 at position DIFF cannot overlap. */ | |
928 | |
929 bool | |
930 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1, | |
931 const widest_int &size2) | |
932 { | |
933 /* Unless the difference is a constant, we fail. */ | |
934 if (diff->n != 0) | |
935 return false; | |
936 | |
937 if (wi::neg_p (diff->offset)) | |
938 { | |
939 /* The second object is before the first one, we succeed if the last | |
940 element of the second object is before the start of the first one. */ | |
941 return wi::neg_p (diff->offset + size2 - 1); | |
942 } | |
943 else | |
944 { | |
945 /* We succeed if the second object starts after the first one ends. */ | |
946 return size1 <= diff->offset; | |
947 } | |
948 } | |
949 |