145
|
1 /* intrinsics.cc -- D language compiler intrinsics.
|
|
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
|
|
3
|
|
4 GCC is free software; you can redistribute it and/or modify
|
|
5 it under the terms of the GNU General Public License as published by
|
|
6 the Free Software Foundation; either version 3, or (at your option)
|
|
7 any later version.
|
|
8
|
|
9 GCC is distributed in the hope that it will be useful,
|
|
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
12 GNU General Public License for more details.
|
|
13
|
|
14 You should have received a copy of the GNU General Public License
|
|
15 along with GCC; see the file COPYING3. If not see
|
|
16 <http://www.gnu.org/licenses/>. */
|
|
17
|
|
18 #include "config.h"
|
|
19 #include "system.h"
|
|
20 #include "coretypes.h"
|
|
21
|
|
22 #include "dmd/declaration.h"
|
|
23 #include "dmd/identifier.h"
|
|
24 #include "dmd/mangle.h"
|
|
25 #include "dmd/mangle.h"
|
|
26 #include "dmd/module.h"
|
|
27 #include "dmd/template.h"
|
|
28
|
|
29 #include "tm.h"
|
|
30 #include "function.h"
|
|
31 #include "tree.h"
|
|
32 #include "fold-const.h"
|
|
33 #include "stringpool.h"
|
|
34 #include "builtins.h"
|
|
35
|
|
36 #include "d-tree.h"
|
|
37
|
|
38
|
|
39 /* An internal struct used to hold information on D intrinsics. */
|
|
40
|
|
41 struct intrinsic_decl
|
|
42 {
|
|
43 /* The DECL_FUNCTION_CODE of this decl. */
|
|
44 intrinsic_code code;
|
|
45
|
|
46 /* The name of the intrinsic. */
|
|
47 const char *name;
|
|
48
|
|
49 /* The module where the intrinsic is located. */
|
|
50 const char *module;
|
|
51
|
|
52 /* The mangled signature decoration of the intrinsic. */
|
|
53 const char *deco;
|
|
54
|
|
55 /* True if the intrinsic is only handled in CTFE. */
|
|
56 bool ctfeonly;
|
|
57 };
|
|
58
|
|
59 static const intrinsic_decl intrinsic_decls[] =
|
|
60 {
|
|
61 #define DEF_D_INTRINSIC(CODE, ALIAS, NAME, MODULE, DECO, CTFE) \
|
|
62 { INTRINSIC_ ## ALIAS, NAME, MODULE, DECO, CTFE },
|
|
63
|
|
64 #include "intrinsics.def"
|
|
65
|
|
66 #undef DEF_D_INTRINSIC
|
|
67 };
|
|
68
|
|
69 /* Checks if DECL is an intrinsic or run time library function that requires
|
|
70 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
|
|
71 later in maybe_expand_intrinsic. */
|
|
72
|
|
73 void
|
|
74 maybe_set_intrinsic (FuncDeclaration *decl)
|
|
75 {
|
|
76 if (!decl->ident || decl->builtin != BUILTINunknown)
|
|
77 return;
|
|
78
|
|
79 /* The builtin flag is updated only if we can evaluate the intrinsic
|
|
80 at compile-time. Such as the math or bitop intrinsics. */
|
|
81 decl->builtin = BUILTINno;
|
|
82
|
|
83 /* Check if it's a compiler intrinsic. We only require that any
|
|
84 internally recognised intrinsics are declared in a module with
|
|
85 an explicit module declaration. */
|
|
86 Module *m = decl->getModule ();
|
|
87
|
|
88 if (!m || !m->md)
|
|
89 return;
|
|
90
|
|
91 TemplateInstance *ti = decl->isInstantiated ();
|
|
92 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
|
|
93
|
|
94 const char *tname = decl->ident->toChars ();
|
|
95 const char *tmodule = m->md->toChars ();
|
|
96 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
|
|
97
|
|
98 /* Look through all D intrinsics. */
|
|
99 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
|
|
100 {
|
|
101 if (!intrinsic_decls[i].name)
|
|
102 continue;
|
|
103
|
|
104 if (strcmp (intrinsic_decls[i].name, tname) != 0
|
|
105 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
|
|
106 continue;
|
|
107
|
|
108 /* Instantiated functions would have the wrong type deco, get it from the
|
|
109 template member instead. */
|
|
110 if (tdeco == NULL)
|
|
111 {
|
|
112 if (!td || !td->onemember)
|
|
113 return;
|
|
114
|
|
115 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
|
|
116 if (fd == NULL)
|
|
117 return;
|
|
118
|
|
119 OutBuffer buf;
|
|
120 mangleToBuffer (fd->type, &buf);
|
|
121 tdeco = buf.extractString ();
|
|
122 }
|
|
123
|
|
124 /* Matching the type deco may be a bit too strict, as it means that all
|
|
125 function attributes that end up in the signature must be kept aligned
|
|
126 between the compiler and library declaration. */
|
|
127 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
|
|
128 {
|
|
129 intrinsic_code code = intrinsic_decls[i].code;
|
|
130
|
|
131 if (decl->csym == NULL)
|
|
132 get_symbol_decl (decl);
|
|
133
|
|
134 /* If there is no function body, then the implementation is always
|
|
135 provided by the compiler. */
|
|
136 if (!decl->fbody)
|
|
137 set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
|
|
138
|
|
139 /* Infer whether the intrinsic can be used for CTFE, let the
|
|
140 front-end know that it can be evaluated at compile-time. */
|
|
141 switch (code)
|
|
142 {
|
|
143 case INTRINSIC_VA_ARG:
|
|
144 case INTRINSIC_C_VA_ARG:
|
|
145 case INTRINSIC_VASTART:
|
|
146 case INTRINSIC_ADDS:
|
|
147 case INTRINSIC_SUBS:
|
|
148 case INTRINSIC_MULS:
|
|
149 case INTRINSIC_NEGS:
|
|
150 case INTRINSIC_VLOAD:
|
|
151 case INTRINSIC_VSTORE:
|
|
152 break;
|
|
153
|
|
154 case INTRINSIC_POW:
|
|
155 {
|
|
156 /* Check that this overload of pow() is has an equivalent
|
|
157 built-in function. It could be `int pow(int, int)'. */
|
|
158 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
|
|
159 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
|
|
160 decl->builtin = BUILTINyes;
|
|
161 break;
|
|
162 }
|
|
163
|
|
164 default:
|
|
165 decl->builtin = BUILTINyes;
|
|
166 break;
|
|
167 }
|
|
168
|
|
169 /* The intrinsic was marked as CTFE-only. */
|
|
170 if (intrinsic_decls[i].ctfeonly)
|
|
171 DECL_BUILT_IN_CTFE (decl->csym) = 1;
|
|
172
|
|
173 DECL_INTRINSIC_CODE (decl->csym) = code;
|
|
174 break;
|
|
175 }
|
|
176 }
|
|
177 }
|
|
178
|
|
179 /* Construct a function call to the built-in function CODE, N is the number of
|
|
180 arguments, and the `...' parameters are the argument expressions.
|
|
181 The original call expression is held in CALLEXP. */
|
|
182
|
|
183 static tree
|
|
184 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
|
|
185 {
|
|
186 tree *argarray = XALLOCAVEC (tree, n);
|
|
187 va_list ap;
|
|
188
|
|
189 va_start (ap, n);
|
|
190 for (int i = 0; i < n; i++)
|
|
191 argarray[i] = va_arg (ap, tree);
|
|
192 va_end (ap);
|
|
193
|
|
194 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
|
|
195 builtin_decl_explicit (code),
|
|
196 n, argarray);
|
|
197 return convert (TREE_TYPE (callexp), fold (exp));
|
|
198 }
|
|
199
|
|
200 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
|
|
201 the signature to which can be either:
|
|
202
|
|
203 int bsf (uint arg);
|
|
204 int bsf (ulong arg);
|
|
205
|
|
206 This scans all bits in the given argument starting with the first,
|
|
207 returning the bit number of the first bit set. The original call
|
|
208 expression is held in CALLEXP. */
|
|
209
|
|
210 static tree
|
|
211 expand_intrinsic_bsf (tree callexp)
|
|
212 {
|
|
213 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
|
|
214 The return value is supposed to be undefined if arg is zero. */
|
|
215 tree arg = CALL_EXPR_ARG (callexp, 0);
|
|
216 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
|
|
217
|
|
218 /* Which variant of __builtin_ctz* should we call? */
|
|
219 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
|
|
220 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
|
|
221 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
|
|
222 : END_BUILTINS;
|
|
223
|
|
224 gcc_assert (code != END_BUILTINS);
|
|
225
|
|
226 return call_builtin_fn (callexp, code, 1, arg);
|
|
227 }
|
|
228
|
|
229 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
|
|
230 the signature to which can be either:
|
|
231
|
|
232 int bsr (uint arg);
|
|
233 int bsr (ulong arg);
|
|
234
|
|
235 This scans all bits in the given argument from the most significant bit
|
|
236 to the least significant, returning the bit number of the first bit set.
|
|
237 The original call expression is held in CALLEXP. */
|
|
238
|
|
239 static tree
|
|
240 expand_intrinsic_bsr (tree callexp)
|
|
241 {
|
|
242 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
|
|
243 The return value is supposed to be undefined if arg is zero. */
|
|
244 tree arg = CALL_EXPR_ARG (callexp, 0);
|
|
245 tree type = TREE_TYPE (arg);
|
|
246 int argsize = TYPE_PRECISION (type);
|
|
247
|
|
248 /* Which variant of __builtin_clz* should we call? */
|
|
249 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
|
|
250 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
|
|
251 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
|
|
252 : END_BUILTINS;
|
|
253
|
|
254 gcc_assert (code != END_BUILTINS);
|
|
255
|
|
256 tree result = call_builtin_fn (callexp, code, 1, arg);
|
|
257
|
|
258 /* Handle int -> long conversions. */
|
|
259 if (TREE_TYPE (result) != type)
|
|
260 result = fold_convert (type, result);
|
|
261
|
|
262 result = fold_build2 (MINUS_EXPR, type,
|
|
263 build_integer_cst (argsize - 1, type), result);
|
|
264 return fold_convert (TREE_TYPE (callexp), result);
|
|
265 }
|
|
266
|
|
267 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
|
|
268 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
|
|
269 the signature to which is:
|
|
270
|
|
271 int bt (size_t* ptr, size_t bitnum);
|
|
272
|
|
273 All intrinsics test if a bit is set and return the result of that condition.
|
|
274 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
|
|
275 sets the bit, and `btr' resets the bit. The original call expression is
|
|
276 held in CALLEXP. */
|
|
277
|
|
278 static tree
|
|
279 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
|
|
280 {
|
|
281 tree ptr = CALL_EXPR_ARG (callexp, 0);
|
|
282 tree bitnum = CALL_EXPR_ARG (callexp, 1);
|
|
283 tree type = TREE_TYPE (TREE_TYPE (ptr));
|
|
284
|
|
285 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
|
|
286 tree bitsize = fold_convert (type, TYPE_SIZE (type));
|
|
287
|
|
288 /* ptr[bitnum / bitsize] */
|
|
289 ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
|
|
290 bitnum, bitsize));
|
|
291 ptr = indirect_ref (type, ptr);
|
|
292
|
|
293 /* mask = 1 << (bitnum % bitsize); */
|
|
294 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
|
|
295 bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
|
|
296
|
|
297 /* cond = ptr[bitnum / size] & mask; */
|
|
298 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
|
|
299
|
|
300 /* cond ? -1 : 0; */
|
|
301 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
|
|
302 integer_minus_one_node, integer_zero_node);
|
|
303
|
|
304 /* Update the bit as needed, only testing the bit for bt(). */
|
|
305 if (intrinsic == INTRINSIC_BT)
|
|
306 return cond;
|
|
307
|
|
308 tree_code code = (intrinsic == INTRINSIC_BTC) ? BIT_XOR_EXPR
|
|
309 : (intrinsic == INTRINSIC_BTR) ? BIT_AND_EXPR
|
|
310 : (intrinsic == INTRINSIC_BTS) ? BIT_IOR_EXPR
|
|
311 : ERROR_MARK;
|
|
312 gcc_assert (code != ERROR_MARK);
|
|
313
|
|
314 /* ptr[bitnum / size] op= mask; */
|
|
315 if (intrinsic == INTRINSIC_BTR)
|
|
316 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
|
|
317
|
|
318 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
|
|
319
|
|
320 /* Store the condition result in a temporary, and return expressions in
|
|
321 correct order of evaluation. */
|
|
322 tree tmp = build_local_temp (TREE_TYPE (callexp));
|
|
323 cond = modify_expr (tmp, cond);
|
|
324
|
|
325 return compound_expr (cond, compound_expr (ptr, tmp));
|
|
326 }
|
|
327
|
|
328 /* Expand a front-end intrinsic call to bswap(). This takes one argument, the
|
|
329 signature to which can be either:
|
|
330
|
|
331 int bswap (uint arg);
|
|
332 int bswap (ulong arg);
|
|
333
|
|
334 This swaps all bytes in an N byte type end-to-end. The original call
|
|
335 expression is held in CALLEXP. */
|
|
336
|
|
337 static tree
|
|
338 expand_intrinsic_bswap (tree callexp)
|
|
339 {
|
|
340 tree arg = CALL_EXPR_ARG (callexp, 0);
|
|
341 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
|
|
342
|
|
343 /* Which variant of __builtin_bswap* should we call? */
|
|
344 built_in_function code = (argsize == 32) ? BUILT_IN_BSWAP32
|
|
345 : (argsize == 64) ? BUILT_IN_BSWAP64
|
|
346 : END_BUILTINS;
|
|
347
|
|
348 gcc_assert (code != END_BUILTINS);
|
|
349
|
|
350 return call_builtin_fn (callexp, code, 1, arg);
|
|
351 }
|
|
352
|
|
353 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
|
|
354 signature to which can be either:
|
|
355
|
|
356 int popcnt (uint arg);
|
|
357 int popcnt (ulong arg);
|
|
358
|
|
359 Calculates the number of set bits in an integer. The original call
|
|
360 expression is held in CALLEXP. */
|
|
361
|
|
362 static tree
|
|
363 expand_intrinsic_popcnt (tree callexp)
|
|
364 {
|
|
365 tree arg = CALL_EXPR_ARG (callexp, 0);
|
|
366 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
|
|
367
|
|
368 /* Which variant of __builtin_popcount* should we call? */
|
|
369 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
|
|
370 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
|
|
371 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
|
|
372 : END_BUILTINS;
|
|
373
|
|
374 gcc_assert (code != END_BUILTINS);
|
|
375
|
|
376 return call_builtin_fn (callexp, code, 1, arg);
|
|
377 }
|
|
378
|
|
379 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
|
|
380 sqrt(), sqrtf(), sqrtl(). These intrinsics expect to take one argument,
|
|
381 the signature to which can be either:
|
|
382
|
|
383 float sqrt (float arg);
|
|
384 double sqrt (double arg);
|
|
385 real sqrt (real arg);
|
|
386
|
|
387 This computes the square root of the given argument. The original call
|
|
388 expression is held in CALLEXP. */
|
|
389
|
|
390 static tree
|
|
391 expand_intrinsic_sqrt (intrinsic_code intrinsic, tree callexp)
|
|
392 {
|
|
393 tree arg = CALL_EXPR_ARG (callexp, 0);
|
|
394
|
|
395 /* Which variant of __builtin_sqrt* should we call? */
|
|
396 built_in_function code = (intrinsic == INTRINSIC_SQRT) ? BUILT_IN_SQRT
|
|
397 : (intrinsic == INTRINSIC_SQRTF) ? BUILT_IN_SQRTF
|
|
398 : (intrinsic == INTRINSIC_SQRTL) ? BUILT_IN_SQRTL
|
|
399 : END_BUILTINS;
|
|
400
|
|
401 gcc_assert (code != END_BUILTINS);
|
|
402 return call_builtin_fn (callexp, code, 1, arg);
|
|
403 }
|
|
404
|
|
405 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
|
|
406 the signature to which can be either:
|
|
407
|
|
408 float copysign (T to, float from);
|
|
409 double copysign (T to, double from);
|
|
410 real copysign (T to, real from);
|
|
411
|
|
412 This computes a value composed of TO with the sign bit of FROM. The original
|
|
413 call expression is held in CALLEXP. */
|
|
414
|
|
415 static tree
|
|
416 expand_intrinsic_copysign (tree callexp)
|
|
417 {
|
|
418 tree to = CALL_EXPR_ARG (callexp, 0);
|
|
419 tree from = CALL_EXPR_ARG (callexp, 1);
|
|
420 tree type = TREE_TYPE (to);
|
|
421
|
|
422 /* Convert parameters to the same type. Prefer the first parameter unless it
|
|
423 is an integral type. */
|
|
424 if (INTEGRAL_TYPE_P (type))
|
|
425 {
|
|
426 to = fold_convert (TREE_TYPE (from), to);
|
|
427 type = TREE_TYPE (to);
|
|
428 }
|
|
429 else
|
|
430 from = fold_convert (type, from);
|
|
431
|
|
432 /* Which variant of __builtin_copysign* should we call? */
|
|
433 tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN);
|
|
434 gcc_assert (builtin != NULL_TREE);
|
|
435
|
|
436 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
|
|
437 to, from);
|
|
438 }
|
|
439
|
|
440 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
|
|
441 signature to which can be either:
|
|
442
|
|
443 float pow (float base, T exponent);
|
|
444 double pow (double base, T exponent);
|
|
445 real pow (real base, T exponent);
|
|
446
|
|
447 This computes the value of BASE raised to the power of EXPONENT.
|
|
448 The original call expression is held in CALLEXP. */
|
|
449
|
|
450 static tree
|
|
451 expand_intrinsic_pow (tree callexp)
|
|
452 {
|
|
453 tree base = CALL_EXPR_ARG (callexp, 0);
|
|
454 tree exponent = CALL_EXPR_ARG (callexp, 1);
|
|
455 tree exptype = TREE_TYPE (exponent);
|
|
456
|
|
457 /* Which variant of __builtin_pow* should we call? */
|
|
458 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
|
|
459 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
|
|
460 : END_BUILTINS;
|
|
461 gcc_assert (code != END_BUILTINS);
|
|
462
|
|
463 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
|
|
464 gcc_assert (builtin != NULL_TREE);
|
|
465
|
|
466 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
|
|
467 base, exponent);
|
|
468 }
|
|
469
|
|
470 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
|
|
471 arguments, the signature to which can be either:
|
|
472
|
|
473 T va_arg(T) (ref va_list ap);
|
|
474 void va_arg(T) (va_list ap, ref T parmn);
|
|
475
|
|
476 This retrieves the next variadic parameter that is type T from the given
|
|
477 va_list. If also given, store the value into parmn, otherwise return it.
|
|
478 The original call expression is held in CALLEXP. */
|
|
479
|
|
480 static tree
|
|
481 expand_intrinsic_vaarg (tree callexp)
|
|
482 {
|
|
483 tree ap = CALL_EXPR_ARG (callexp, 0);
|
|
484 tree parmn = NULL_TREE;
|
|
485 tree type;
|
|
486
|
|
487 STRIP_NOPS (ap);
|
|
488
|
|
489 if (call_expr_nargs (callexp) == 1)
|
|
490 type = TREE_TYPE (callexp);
|
|
491 else
|
|
492 {
|
|
493 parmn = CALL_EXPR_ARG (callexp, 1);
|
|
494 STRIP_NOPS (parmn);
|
|
495 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
|
|
496 parmn = TREE_OPERAND (parmn, 0);
|
|
497 type = TREE_TYPE (parmn);
|
|
498 }
|
|
499
|
|
500 /* (T) VA_ARG_EXP<ap>; */
|
|
501 tree exp = build1 (VA_ARG_EXPR, type, ap);
|
|
502
|
|
503 /* parmn = (T) VA_ARG_EXP<ap>; */
|
|
504 if (parmn != NULL_TREE)
|
|
505 exp = modify_expr (parmn, exp);
|
|
506
|
|
507 return exp;
|
|
508 }
|
|
509
|
|
510 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
|
|
511 the signature to which is:
|
|
512
|
|
513 void va_start(T) (out va_list ap, ref T parmn);
|
|
514
|
|
515 This initializes the va_list type, where parmn should be the last named
|
|
516 parameter. The original call expression is held in CALLEXP. */
|
|
517
|
|
518 static tree
|
|
519 expand_intrinsic_vastart (tree callexp)
|
|
520 {
|
|
521 tree ap = CALL_EXPR_ARG (callexp, 0);
|
|
522 tree parmn = CALL_EXPR_ARG (callexp, 1);
|
|
523
|
|
524 STRIP_NOPS (ap);
|
|
525 STRIP_NOPS (parmn);
|
|
526
|
|
527 /* The va_list argument should already have its address taken. The second
|
|
528 argument, however, is inout and that needs to be fixed to prevent a
|
|
529 warning. Could be casting, so need to check type too? */
|
|
530 gcc_assert (TREE_CODE (ap) == ADDR_EXPR && TREE_CODE (parmn) == ADDR_EXPR);
|
|
531
|
|
532 /* Assuming nobody tries to change the return type. */
|
|
533 parmn = TREE_OPERAND (parmn, 0);
|
|
534
|
|
535 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
|
|
536 }
|
|
537
|
|
538 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
|
|
539 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
|
|
540 expect to take two or three arguments, the signature to which can be either:
|
|
541
|
|
542 int adds (int x, int y, ref bool overflow);
|
|
543 long adds (long x, long y, ref bool overflow);
|
|
544 int negs (int x, ref bool overflow);
|
|
545 long negs (long x, ref bool overflow);
|
|
546
|
|
547 This performs an operation on two signed or unsigned integers, checking for
|
|
548 overflow. The overflow is sticky, meaning that a sequence of operations
|
|
549 can be done and overflow need only be checked at the end. The original call
|
|
550 expression is held in CALLEXP. */
|
|
551
|
|
552 static tree
|
|
553 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
|
|
554 {
|
|
555 tree type = TREE_TYPE (callexp);
|
|
556 tree x;
|
|
557 tree y;
|
|
558 tree overflow;
|
|
559
|
|
560 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
|
|
561 if (intrinsic == INTRINSIC_NEGS)
|
|
562 {
|
|
563 x = fold_convert (type, integer_zero_node);
|
|
564 y = CALL_EXPR_ARG (callexp, 0);
|
|
565 overflow = CALL_EXPR_ARG (callexp, 1);
|
|
566 }
|
|
567 else
|
|
568 {
|
|
569 x = CALL_EXPR_ARG (callexp, 0);
|
|
570 y = CALL_EXPR_ARG (callexp, 1);
|
|
571 overflow = CALL_EXPR_ARG (callexp, 2);
|
|
572 }
|
|
573
|
|
574 /* Which variant of *_OVERFLOW should we generate? */
|
|
575 internal_fn icode = (intrinsic == INTRINSIC_ADDS) ? IFN_ADD_OVERFLOW
|
|
576 : (intrinsic == INTRINSIC_SUBS) ? IFN_SUB_OVERFLOW
|
|
577 : (intrinsic == INTRINSIC_MULS) ? IFN_MUL_OVERFLOW
|
|
578 : (intrinsic == INTRINSIC_NEGS) ? IFN_SUB_OVERFLOW
|
|
579 : IFN_LAST;
|
|
580 gcc_assert (icode != IFN_LAST);
|
|
581
|
|
582 tree result
|
|
583 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
|
|
584 build_complex_type (type), 2, x, y);
|
|
585
|
|
586 STRIP_NOPS (overflow);
|
|
587 overflow = build_deref (overflow);
|
|
588
|
|
589 /* Assign returned result to overflow parameter, however if overflow is
|
|
590 already true, maintain its value. */
|
|
591 type = TREE_TYPE (overflow);
|
|
592 result = save_expr (result);
|
|
593
|
|
594 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
|
|
595 fold_convert (type, imaginary_part (result)));
|
|
596 exp = modify_expr (overflow, exp);
|
|
597
|
|
598 /* Return the value of result. */
|
|
599 return compound_expr (exp, real_part (result));
|
|
600 }
|
|
601
|
|
602 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
|
|
603 argument, the signature to which can be either:
|
|
604
|
|
605 ubyte volatileLoad (ubyte* ptr);
|
|
606 ushort volatileLoad (ushort* ptr);
|
|
607 uint volatileLoad (uint* ptr);
|
|
608 ulong volatileLoad (ulong* ptr);
|
|
609
|
|
610 This reads a value from the memory location indicated by ptr. Calls to
|
|
611 them are be guaranteed to not be removed (such as during DCE) or reordered
|
|
612 in the same thread. The original call expression is held in CALLEXP. */
|
|
613
|
|
614 static tree
|
|
615 expand_volatile_load (tree callexp)
|
|
616 {
|
|
617 tree ptr = CALL_EXPR_ARG (callexp, 0);
|
|
618 tree ptrtype = TREE_TYPE (ptr);
|
|
619 gcc_assert (POINTER_TYPE_P (ptrtype));
|
|
620
|
|
621 /* (T) *(volatile T *) ptr; */
|
|
622 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
|
|
623 tree result = indirect_ref (type, ptr);
|
|
624 TREE_THIS_VOLATILE (result) = 1;
|
|
625
|
|
626 return result;
|
|
627 }
|
|
628
|
|
629 /* Expand a front-end instrinsic call to volatileStore(). This takes two
|
|
630 arguments, the signature to which can be either:
|
|
631
|
|
632 void volatileStore (ubyte* ptr, ubyte value);
|
|
633 void volatileStore (ushort* ptr, ushort value);
|
|
634 void volatileStore (uint* ptr, uint value);
|
|
635 void volatileStore (ulong* ptr, ulong value);
|
|
636
|
|
637 This writes a value to the memory location indicated by ptr. Calls to
|
|
638 them are be guaranteed to not be removed (such as during DCE) or reordered
|
|
639 in the same thread. The original call expression is held in CALLEXP. */
|
|
640
|
|
641 static tree
|
|
642 expand_volatile_store (tree callexp)
|
|
643 {
|
|
644 tree ptr = CALL_EXPR_ARG (callexp, 0);
|
|
645 tree ptrtype = TREE_TYPE (ptr);
|
|
646 gcc_assert (POINTER_TYPE_P (ptrtype));
|
|
647
|
|
648 /* (T) *(volatile T *) ptr; */
|
|
649 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
|
|
650 tree result = indirect_ref (type, ptr);
|
|
651 TREE_THIS_VOLATILE (result) = 1;
|
|
652
|
|
653 /* (*(volatile T *) ptr) = value; */
|
|
654 tree value = CALL_EXPR_ARG (callexp, 1);
|
|
655 return modify_expr (result, value);
|
|
656 }
|
|
657
|
|
658 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
|
|
659 generated instructions. Most map directly to GCC builtins, others
|
|
660 require a little extra work around them. */
|
|
661
|
|
662 tree
|
|
663 maybe_expand_intrinsic (tree callexp)
|
|
664 {
|
|
665 tree callee = CALL_EXPR_FN (callexp);
|
|
666
|
|
667 if (TREE_CODE (callee) == ADDR_EXPR)
|
|
668 callee = TREE_OPERAND (callee, 0);
|
|
669
|
|
670 if (TREE_CODE (callee) != FUNCTION_DECL)
|
|
671 return callexp;
|
|
672
|
|
673 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
|
|
674 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
|
|
675 return callexp;
|
|
676
|
|
677 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
|
|
678 built_in_function code;
|
|
679
|
|
680 switch (intrinsic)
|
|
681 {
|
|
682 case INTRINSIC_NONE:
|
|
683 return callexp;
|
|
684
|
|
685 case INTRINSIC_BSF:
|
|
686 return expand_intrinsic_bsf (callexp);
|
|
687
|
|
688 case INTRINSIC_BSR:
|
|
689 return expand_intrinsic_bsr (callexp);
|
|
690
|
|
691 case INTRINSIC_BT:
|
|
692 case INTRINSIC_BTC:
|
|
693 case INTRINSIC_BTR:
|
|
694 case INTRINSIC_BTS:
|
|
695 return expand_intrinsic_bt (intrinsic, callexp);
|
|
696
|
|
697 case INTRINSIC_BSWAP:
|
|
698 return expand_intrinsic_bswap (callexp);
|
|
699
|
|
700 case INTRINSIC_POPCNT:
|
|
701 return expand_intrinsic_popcnt (callexp);
|
|
702
|
|
703 case INTRINSIC_COS:
|
|
704 return call_builtin_fn (callexp, BUILT_IN_COSL, 1,
|
|
705 CALL_EXPR_ARG (callexp, 0));
|
|
706
|
|
707 case INTRINSIC_SIN:
|
|
708 return call_builtin_fn (callexp, BUILT_IN_SINL, 1,
|
|
709 CALL_EXPR_ARG (callexp, 0));
|
|
710
|
|
711 case INTRINSIC_RNDTOL:
|
|
712 /* Not sure if llroundl stands as a good replacement for the
|
|
713 expected behavior of rndtol. */
|
|
714 return call_builtin_fn (callexp, BUILT_IN_LLROUNDL, 1,
|
|
715 CALL_EXPR_ARG (callexp, 0));
|
|
716
|
|
717 case INTRINSIC_SQRT:
|
|
718 case INTRINSIC_SQRTF:
|
|
719 case INTRINSIC_SQRTL:
|
|
720 return expand_intrinsic_sqrt (intrinsic, callexp);
|
|
721
|
|
722 case INTRINSIC_LDEXP:
|
|
723 return call_builtin_fn (callexp, BUILT_IN_LDEXPL, 2,
|
|
724 CALL_EXPR_ARG (callexp, 0),
|
|
725 CALL_EXPR_ARG (callexp, 1));
|
|
726
|
|
727 case INTRINSIC_FABS:
|
|
728 return call_builtin_fn (callexp, BUILT_IN_FABSL, 1,
|
|
729 CALL_EXPR_ARG (callexp, 0));
|
|
730
|
|
731 case INTRINSIC_RINT:
|
|
732 return call_builtin_fn (callexp, BUILT_IN_RINTL, 1,
|
|
733 CALL_EXPR_ARG (callexp, 0));
|
|
734
|
|
735 case INTRINSIC_TAN:
|
|
736 return call_builtin_fn (callexp, BUILT_IN_TANL, 1,
|
|
737 CALL_EXPR_ARG (callexp, 0));
|
|
738
|
|
739 case INTRINSIC_ISNAN:
|
|
740 return call_builtin_fn (callexp, BUILT_IN_ISNAN, 1,
|
|
741 CALL_EXPR_ARG (callexp, 0));
|
|
742
|
|
743 case INTRINSIC_ISINFINITY:
|
|
744 return call_builtin_fn (callexp, BUILT_IN_ISINF, 1,
|
|
745 CALL_EXPR_ARG (callexp, 0));
|
|
746
|
|
747 case INTRINSIC_ISFINITE:
|
|
748 return call_builtin_fn (callexp, BUILT_IN_ISFINITE, 1,
|
|
749 CALL_EXPR_ARG (callexp, 0));
|
|
750
|
|
751 case INTRINSIC_EXP:
|
|
752 return call_builtin_fn (callexp, BUILT_IN_EXPL, 1,
|
|
753 CALL_EXPR_ARG (callexp, 0));
|
|
754
|
|
755 case INTRINSIC_EXPM1:
|
|
756 return call_builtin_fn (callexp, BUILT_IN_EXPM1L, 1,
|
|
757 CALL_EXPR_ARG (callexp, 0));
|
|
758
|
|
759 case INTRINSIC_EXP2:
|
|
760 return call_builtin_fn (callexp, BUILT_IN_EXP2L, 1,
|
|
761 CALL_EXPR_ARG (callexp, 0));
|
|
762
|
|
763 case INTRINSIC_LOG:
|
|
764 return call_builtin_fn (callexp, BUILT_IN_LOGL, 1,
|
|
765 CALL_EXPR_ARG (callexp, 0));
|
|
766
|
|
767 case INTRINSIC_LOG2:
|
|
768 return call_builtin_fn (callexp, BUILT_IN_LOG2L, 1,
|
|
769 CALL_EXPR_ARG (callexp, 0));
|
|
770
|
|
771 case INTRINSIC_LOG10:
|
|
772 return call_builtin_fn (callexp, BUILT_IN_LOG10L, 1,
|
|
773 CALL_EXPR_ARG (callexp, 0));
|
|
774
|
|
775 case INTRINSIC_ROUND:
|
|
776 return call_builtin_fn (callexp, BUILT_IN_ROUNDL, 1,
|
|
777 CALL_EXPR_ARG (callexp, 0));
|
|
778
|
|
779 case INTRINSIC_FLOORF:
|
|
780 case INTRINSIC_FLOOR:
|
|
781 case INTRINSIC_FLOORL:
|
|
782 code = (intrinsic == INTRINSIC_FLOOR) ? BUILT_IN_FLOOR
|
|
783 : (intrinsic == INTRINSIC_FLOORF) ? BUILT_IN_FLOORF
|
|
784 : BUILT_IN_FLOORL;
|
|
785 return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
|
|
786
|
|
787 case INTRINSIC_CEILF:
|
|
788 case INTRINSIC_CEIL:
|
|
789 case INTRINSIC_CEILL:
|
|
790 code = (intrinsic == INTRINSIC_CEIL) ? BUILT_IN_CEIL
|
|
791 : (intrinsic == INTRINSIC_CEILF) ? BUILT_IN_CEILF
|
|
792 : BUILT_IN_CEILL;
|
|
793 return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
|
|
794
|
|
795 case INTRINSIC_TRUNC:
|
|
796 return call_builtin_fn (callexp, BUILT_IN_TRUNCL, 1,
|
|
797 CALL_EXPR_ARG (callexp, 0));
|
|
798
|
|
799 case INTRINSIC_FMIN:
|
|
800 return call_builtin_fn (callexp, BUILT_IN_FMINL, 2,
|
|
801 CALL_EXPR_ARG (callexp, 0),
|
|
802 CALL_EXPR_ARG (callexp, 1));
|
|
803
|
|
804 case INTRINSIC_FMAX:
|
|
805 return call_builtin_fn (callexp, BUILT_IN_FMAXL, 2,
|
|
806 CALL_EXPR_ARG (callexp, 0),
|
|
807 CALL_EXPR_ARG (callexp, 1));
|
|
808
|
|
809 case INTRINSIC_COPYSIGN:
|
|
810 return expand_intrinsic_copysign (callexp);
|
|
811
|
|
812 case INTRINSIC_POW:
|
|
813 return expand_intrinsic_pow (callexp);
|
|
814
|
|
815 case INTRINSIC_FMA:
|
|
816 return call_builtin_fn (callexp, BUILT_IN_FMAL, 3,
|
|
817 CALL_EXPR_ARG (callexp, 0),
|
|
818 CALL_EXPR_ARG (callexp, 1),
|
|
819 CALL_EXPR_ARG (callexp, 2));
|
|
820
|
|
821 case INTRINSIC_VA_ARG:
|
|
822 case INTRINSIC_C_VA_ARG:
|
|
823 return expand_intrinsic_vaarg (callexp);
|
|
824
|
|
825 case INTRINSIC_VASTART:
|
|
826 return expand_intrinsic_vastart (callexp);
|
|
827
|
|
828 case INTRINSIC_ADDS:
|
|
829 case INTRINSIC_SUBS:
|
|
830 case INTRINSIC_MULS:
|
|
831 case INTRINSIC_NEGS:
|
|
832 return expand_intrinsic_checkedint (intrinsic, callexp);
|
|
833
|
|
834 case INTRINSIC_VLOAD:
|
|
835 return expand_volatile_load (callexp);
|
|
836
|
|
837 case INTRINSIC_VSTORE:
|
|
838 return expand_volatile_store (callexp);
|
|
839
|
|
840 default:
|
|
841 gcc_unreachable ();
|
|
842 }
|
|
843 }
|