Mercurial > hg > CbC > CbC_gcc
comparison gcc/ada/gcc-interface/utils.c @ 145:1830386684a0
gcc-9.2.0
author | anatofuz |
---|---|
date | Thu, 13 Feb 2020 11:34:05 +0900 |
parents | 84e7813d76e9 |
children |
comparison
equal
deleted
inserted
replaced
131:84e7813d76e9 | 145:1830386684a0 |
---|---|
4 * * | 4 * * |
5 * U T I L S * | 5 * U T I L S * |
6 * * | 6 * * |
7 * C Implementation File * | 7 * C Implementation File * |
8 * * | 8 * * |
9 * Copyright (C) 1992-2018, Free Software Foundation, Inc. * | 9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. * |
10 * * | 10 * * |
11 * GNAT is free software; you can redistribute it and/or modify it under * | 11 * GNAT is free software; you can redistribute it and/or modify it under * |
12 * terms of the GNU General Public License as published by the Free Soft- * | 12 * terms of the GNU General Public License as published by the Free Soft- * |
13 * ware Foundation; either version 3, or (at your option) any later ver- * | 13 * ware Foundation; either version 3, or (at your option) any later ver- * |
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- * | 14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- * |
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *); | 88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *); |
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *); | 89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *); |
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *); | 90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *); |
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *); | 91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *); |
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *); | 92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *); |
93 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *); | |
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *); | 94 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *); |
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *); | 95 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *); |
96 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *); | |
97 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *); | |
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *); | 98 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *); |
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *); | 99 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *); |
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *); | 100 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *); |
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *); | 101 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *); |
102 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *); | |
103 static tree handle_used_attribute (tree *, tree, tree, int, bool *); | |
104 static tree handle_cold_attribute (tree *, tree, tree, int, bool *); | |
105 static tree handle_hot_attribute (tree *, tree, tree, int, bool *); | |
106 static tree handle_target_attribute (tree *, tree, tree, int, bool *); | |
107 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *); | |
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *); | 108 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *); |
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *); | 109 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *); |
110 | |
111 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] = | |
112 { | |
113 { "cold", true, true, true }, | |
114 { "hot" , true, true, true }, | |
115 { NULL , false, false, false } | |
116 }; | |
101 | 117 |
102 /* Fake handler for attributes we don't properly support, typically because | 118 /* Fake handler for attributes we don't properly support, typically because |
103 they'd require dragging a lot of the common-c front-end circuitry. */ | 119 they'd require dragging a lot of the common-c front-end circuitry. */ |
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *); | 120 static tree fake_attribute_handler (tree *, tree, tree, int, bool *); |
105 | 121 |
121 handle_nonnull_attribute, NULL }, | 137 handle_nonnull_attribute, NULL }, |
122 { "sentinel", 0, 1, false, true, true, false, | 138 { "sentinel", 0, 1, false, true, true, false, |
123 handle_sentinel_attribute, NULL }, | 139 handle_sentinel_attribute, NULL }, |
124 { "noreturn", 0, 0, true, false, false, false, | 140 { "noreturn", 0, 0, true, false, false, false, |
125 handle_noreturn_attribute, NULL }, | 141 handle_noreturn_attribute, NULL }, |
142 { "stack_protect",0, 0, true, false, false, false, | |
143 handle_stack_protect_attribute, NULL }, | |
126 { "noinline", 0, 0, true, false, false, false, | 144 { "noinline", 0, 0, true, false, false, false, |
127 handle_noinline_attribute, NULL }, | 145 handle_noinline_attribute, NULL }, |
128 { "noclone", 0, 0, true, false, false, false, | 146 { "noclone", 0, 0, true, false, false, false, |
129 handle_noclone_attribute, NULL }, | 147 handle_noclone_attribute, NULL }, |
148 { "no_icf", 0, 0, true, false, false, false, | |
149 handle_noicf_attribute, NULL }, | |
150 { "noipa", 0, 0, true, false, false, false, | |
151 handle_noipa_attribute, NULL }, | |
130 { "leaf", 0, 0, true, false, false, false, | 152 { "leaf", 0, 0, true, false, false, false, |
131 handle_leaf_attribute, NULL }, | 153 handle_leaf_attribute, NULL }, |
132 { "always_inline",0, 0, true, false, false, false, | 154 { "always_inline",0, 0, true, false, false, false, |
133 handle_always_inline_attribute, NULL }, | 155 handle_always_inline_attribute, NULL }, |
134 { "malloc", 0, 0, true, false, false, false, | 156 { "malloc", 0, 0, true, false, false, false, |
135 handle_malloc_attribute, NULL }, | 157 handle_malloc_attribute, NULL }, |
136 { "type generic", 0, 0, false, true, true, false, | 158 { "type generic", 0, 0, false, true, true, false, |
137 handle_type_generic_attribute, NULL }, | 159 handle_type_generic_attribute, NULL }, |
138 | 160 |
139 { "vector_size", 1, 1, false, true, false, false, | 161 { "flatten", 0, 0, true, false, false, false, |
162 handle_flatten_attribute, NULL }, | |
163 { "used", 0, 0, true, false, false, false, | |
164 handle_used_attribute, NULL }, | |
165 { "cold", 0, 0, true, false, false, false, | |
166 handle_cold_attribute, attr_cold_hot_exclusions }, | |
167 { "hot", 0, 0, true, false, false, false, | |
168 handle_hot_attribute, attr_cold_hot_exclusions }, | |
169 { "target", 1, -1, true, false, false, false, | |
170 handle_target_attribute, NULL }, | |
171 { "target_clones",1, -1, true, false, false, false, | |
172 handle_target_clones_attribute, NULL }, | |
173 | |
174 { "vector_size", 1, 1, false, true, false, false, | |
140 handle_vector_size_attribute, NULL }, | 175 handle_vector_size_attribute, NULL }, |
141 { "vector_type", 0, 0, false, true, false, false, | 176 { "vector_type", 0, 0, false, true, false, false, |
142 handle_vector_type_attribute, NULL }, | 177 handle_vector_type_attribute, NULL }, |
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL }, | 178 { "may_alias", 0, 0, false, true, false, false, |
179 NULL, NULL }, | |
144 | 180 |
145 /* ??? format and format_arg are heavy and not supported, which actually | 181 /* ??? format and format_arg are heavy and not supported, which actually |
146 prevents support for stdio builtins, which we however declare as part | 182 prevents support for stdio builtins, which we however declare as part |
147 of the common builtins.def contents. */ | 183 of the common builtins.def contents. */ |
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler, | 184 { "format", 3, 3, false, true, true, false, |
149 NULL }, | 185 fake_attribute_handler, NULL }, |
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler, | 186 { "format_arg", 1, 1, false, true, true, false, |
151 NULL }, | 187 fake_attribute_handler, NULL }, |
152 | 188 |
153 { NULL, 0, 0, false, false, false, false, NULL, NULL } | 189 { NULL, 0, 0, false, false, false, false, |
190 NULL, NULL } | |
154 }; | 191 }; |
155 | 192 |
156 /* Associates a GNAT tree node to a GCC tree node. It is used in | 193 /* Associates a GNAT tree node to a GCC tree node. It is used in |
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation | 194 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation |
158 of `save_gnu_tree' for more info. */ | 195 of `save_gnu_tree' for more info. */ |
249 static tree compute_related_constant (tree, tree); | 286 static tree compute_related_constant (tree, tree); |
250 static tree split_plus (tree, tree *); | 287 static tree split_plus (tree, tree *); |
251 static tree float_type_for_precision (int, machine_mode); | 288 static tree float_type_for_precision (int, machine_mode); |
252 static tree convert_to_fat_pointer (tree, tree); | 289 static tree convert_to_fat_pointer (tree, tree); |
253 static unsigned int scale_by_factor_of (tree, unsigned int); | 290 static unsigned int scale_by_factor_of (tree, unsigned int); |
254 static bool potential_alignment_gap (tree, tree, tree); | |
255 | 291 |
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT | 292 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT |
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */ | 293 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */ |
258 struct deferred_decl_context_node | 294 struct deferred_decl_context_node |
259 { | 295 { |
945 create_type_decl (name, record_type, true, false, gnat_node); | 981 create_type_decl (name, record_type, true, false, gnat_node); |
946 | 982 |
947 return record_type; | 983 return record_type; |
948 } | 984 } |
949 | 985 |
986 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed | |
987 record. See if we can rewrite it as a type that has non-BLKmode, which we | |
988 can pack tighter in the packed record. If so, return the new type; if not, | |
989 return the original type. */ | |
990 | |
991 static tree | |
992 make_packable_array_type (tree type) | |
993 { | |
994 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type)); | |
995 unsigned HOST_WIDE_INT new_size; | |
996 unsigned int new_align; | |
997 | |
998 /* No point in doing anything if the size is either zero or too large for an | |
999 integral mode, or if the type already has non-BLKmode. */ | |
1000 if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode) | |
1001 return type; | |
1002 | |
1003 /* Punt if the component type is an aggregate type for now. */ | |
1004 if (AGGREGATE_TYPE_P (TREE_TYPE (type))) | |
1005 return type; | |
1006 | |
1007 tree new_type = copy_type (type); | |
1008 | |
1009 new_size = ceil_pow2 (size); | |
1010 new_align = MIN (new_size, BIGGEST_ALIGNMENT); | |
1011 SET_TYPE_ALIGN (new_type, new_align); | |
1012 | |
1013 TYPE_SIZE (new_type) = bitsize_int (new_size); | |
1014 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT); | |
1015 | |
1016 SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ()); | |
1017 | |
1018 return new_type; | |
1019 } | |
1020 | |
950 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used | 1021 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used |
951 as the field type of a packed record if IN_RECORD is true, or as the | 1022 as the type of a field in a packed record if IN_RECORD is true, or as |
952 component type of a packed array if IN_RECORD is false. See if we can | 1023 the component type of a packed array if IN_RECORD is false. See if we |
953 rewrite it either as a type that has non-BLKmode, which we can pack | 1024 can rewrite it either as a type that has non-BLKmode, which we can pack |
954 tighter in the packed record case, or as a smaller type with at most | 1025 tighter in the packed record case, or as a smaller type with at most |
955 MAX_ALIGN alignment if the value is non-zero. If so, return the new | 1026 MAX_ALIGN alignment if the value is non-zero. If so, return the new |
956 type; if not, return the original type. */ | 1027 type; if not, return the original type. */ |
957 | 1028 |
958 tree | 1029 tree |
959 make_packable_type (tree type, bool in_record, unsigned int max_align) | 1030 make_packable_type (tree type, bool in_record, unsigned int max_align) |
960 { | 1031 { |
961 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type)); | 1032 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type)); |
1033 const unsigned int align = TYPE_ALIGN (type); | |
962 unsigned HOST_WIDE_INT new_size; | 1034 unsigned HOST_WIDE_INT new_size; |
963 unsigned int align = TYPE_ALIGN (type); | |
964 unsigned int new_align; | 1035 unsigned int new_align; |
965 | 1036 |
966 /* No point in doing anything if the size is zero. */ | 1037 /* No point in doing anything if the size is zero. */ |
967 if (size == 0) | 1038 if (size == 0) |
968 return type; | 1039 return type; |
971 | 1042 |
972 /* Copy the name and flags from the old type to that of the new. | 1043 /* Copy the name and flags from the old type to that of the new. |
973 Note that we rely on the pointer equality created here for | 1044 Note that we rely on the pointer equality created here for |
974 TYPE_NAME to look through conversions in various places. */ | 1045 TYPE_NAME to look through conversions in various places. */ |
975 TYPE_NAME (new_type) = TYPE_NAME (type); | 1046 TYPE_NAME (new_type) = TYPE_NAME (type); |
1047 TYPE_PACKED (new_type) = 1; | |
976 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type); | 1048 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type); |
977 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type); | 1049 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type); |
978 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type); | 1050 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type); |
979 if (TREE_CODE (type) == RECORD_TYPE) | 1051 if (TREE_CODE (type) == RECORD_TYPE) |
980 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type); | 1052 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type); |
1016 to change the layout by propagating the packedness downwards. */ | 1088 to change the layout by propagating the packedness downwards. */ |
1017 tree new_field_list = NULL_TREE; | 1089 tree new_field_list = NULL_TREE; |
1018 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | 1090 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
1019 { | 1091 { |
1020 tree new_field_type = TREE_TYPE (field); | 1092 tree new_field_type = TREE_TYPE (field); |
1021 tree new_field, new_size; | 1093 tree new_field, new_field_size; |
1022 | 1094 |
1023 if (RECORD_OR_UNION_TYPE_P (new_field_type) | 1095 if (AGGREGATE_TYPE_P (new_field_type) |
1024 && !TYPE_FAT_POINTER_P (new_field_type) | |
1025 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type))) | 1096 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type))) |
1026 new_field_type = make_packable_type (new_field_type, true, max_align); | 1097 { |
1098 if (RECORD_OR_UNION_TYPE_P (new_field_type) | |
1099 && !TYPE_FAT_POINTER_P (new_field_type)) | |
1100 new_field_type | |
1101 = make_packable_type (new_field_type, true, max_align); | |
1102 else if (in_record | |
1103 && max_align > 0 | |
1104 && max_align < BITS_PER_UNIT | |
1105 && TREE_CODE (new_field_type) == ARRAY_TYPE) | |
1106 new_field_type = make_packable_array_type (new_field_type); | |
1107 } | |
1027 | 1108 |
1028 /* However, for the last field in a not already packed record type | 1109 /* However, for the last field in a not already packed record type |
1029 that is of an aggregate type, we need to use the RM size in the | 1110 that is of an aggregate type, we need to use the RM size in the |
1030 packable version of the record type, see finish_record_type. */ | 1111 packable version of the record type, see finish_record_type. */ |
1031 if (!DECL_CHAIN (field) | 1112 if (!DECL_CHAIN (field) |
1032 && !TYPE_PACKED (type) | 1113 && !TYPE_PACKED (type) |
1033 && RECORD_OR_UNION_TYPE_P (new_field_type) | 1114 && RECORD_OR_UNION_TYPE_P (new_field_type) |
1034 && !TYPE_FAT_POINTER_P (new_field_type) | 1115 && !TYPE_FAT_POINTER_P (new_field_type) |
1035 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type) | 1116 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type) |
1036 && TYPE_ADA_SIZE (new_field_type)) | 1117 && TYPE_ADA_SIZE (new_field_type)) |
1037 new_size = TYPE_ADA_SIZE (new_field_type); | 1118 new_field_size = TYPE_ADA_SIZE (new_field_type); |
1038 else | 1119 else |
1039 new_size = DECL_SIZE (field); | 1120 new_field_size = DECL_SIZE (field); |
1040 | 1121 |
1122 /* This is a layout with full representation, alignment and size clauses | |
1123 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */ | |
1041 new_field | 1124 new_field |
1042 = create_field_decl (DECL_NAME (field), new_field_type, new_type, | 1125 = create_field_decl (DECL_NAME (field), new_field_type, new_type, |
1043 new_size, bit_position (field), | 1126 new_field_size, bit_position (field), 0, |
1044 TYPE_PACKED (type), | |
1045 !DECL_NONADDRESSABLE_P (field)); | 1127 !DECL_NONADDRESSABLE_P (field)); |
1046 | 1128 |
1047 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field); | 1129 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field); |
1048 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field); | 1130 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field); |
1049 if (TREE_CODE (new_type) == QUAL_UNION_TYPE) | 1131 if (TREE_CODE (new_type) == QUAL_UNION_TYPE) |
1370 ??? This might not always be a win when done for a stand-alone object: | 1452 ??? This might not always be a win when done for a stand-alone object: |
1371 since the nominal and the effective type of the object will now have | 1453 since the nominal and the effective type of the object will now have |
1372 different modes, a VIEW_CONVERT_EXPR will be required for converting | 1454 different modes, a VIEW_CONVERT_EXPR will be required for converting |
1373 between them and it might be hard to overcome afterwards, including | 1455 between them and it might be hard to overcome afterwards, including |
1374 at the RTL level when the stand-alone object is accessed as a whole. */ | 1456 at the RTL level when the stand-alone object is accessed as a whole. */ |
1375 if (align != 0 | 1457 if (align > 0 |
1376 && RECORD_OR_UNION_TYPE_P (type) | 1458 && RECORD_OR_UNION_TYPE_P (type) |
1377 && TYPE_MODE (type) == BLKmode | 1459 && TYPE_MODE (type) == BLKmode |
1378 && !TYPE_BY_REFERENCE_P (type) | 1460 && !TYPE_BY_REFERENCE_P (type) |
1379 && TREE_CODE (orig_size) == INTEGER_CST | 1461 && TREE_CODE (orig_size) == INTEGER_CST |
1380 && !TREE_OVERFLOW (orig_size) | 1462 && !TREE_OVERFLOW (orig_size) |
1381 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0 | 1463 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0 |
1382 && (!size | 1464 && (!size |
1383 || (TREE_CODE (size) == INTEGER_CST | 1465 || (TREE_CODE (size) == INTEGER_CST |
1384 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0))) | 1466 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0))) |
1385 { | 1467 { |
1386 tree packable_type = make_packable_type (type, true); | 1468 tree packable_type = make_packable_type (type, true, align); |
1387 if (TYPE_MODE (packable_type) != BLKmode | 1469 if (TYPE_MODE (packable_type) != BLKmode |
1388 && align >= TYPE_ALIGN (packable_type)) | 1470 && align >= TYPE_ALIGN (packable_type)) |
1389 type = packable_type; | 1471 type = packable_type; |
1390 } | 1472 } |
1391 | 1473 |
1526 /* Generate message only for entities that come from source, since | 1608 /* Generate message only for entities that come from source, since |
1527 if we have an entity created by expansion, the message will be | 1609 if we have an entity created by expansion, the message will be |
1528 generated for some other corresponding source entity. */ | 1610 generated for some other corresponding source entity. */ |
1529 if (Comes_From_Source (gnat_entity)) | 1611 if (Comes_From_Source (gnat_entity)) |
1530 { | 1612 { |
1531 if (Present (gnat_error_node)) | 1613 if (is_component_type) |
1614 post_error_ne_tree ("component of& padded{ by ^ bits}?", | |
1615 gnat_entity, gnat_entity, | |
1616 size_diffop (size, orig_size)); | |
1617 else if (Present (gnat_error_node)) | |
1532 post_error_ne_tree ("{^ }bits of & unused?", | 1618 post_error_ne_tree ("{^ }bits of & unused?", |
1533 gnat_error_node, gnat_entity, | 1619 gnat_error_node, gnat_entity, |
1534 size_diffop (size, orig_size)); | |
1535 else if (is_component_type) | |
1536 post_error_ne_tree ("component of& padded{ by ^ bits}?", | |
1537 gnat_entity, gnat_entity, | |
1538 size_diffop (size, orig_size)); | 1620 size_diffop (size, orig_size)); |
1539 } | 1621 } |
1540 } | 1622 } |
1541 | 1623 |
1542 return record; | 1624 return record; |
1774 | 1856 |
1775 void | 1857 void |
1776 finish_record_type (tree record_type, tree field_list, int rep_level, | 1858 finish_record_type (tree record_type, tree field_list, int rep_level, |
1777 bool debug_info_p) | 1859 bool debug_info_p) |
1778 { | 1860 { |
1779 enum tree_code code = TREE_CODE (record_type); | 1861 const enum tree_code orig_code = TREE_CODE (record_type); |
1862 const bool had_size = TYPE_SIZE (record_type) != NULL_TREE; | |
1863 const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE; | |
1864 const bool had_align = TYPE_ALIGN (record_type) > 0; | |
1865 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE | |
1866 out just like a UNION_TYPE, since the size will be fixed. */ | |
1867 const enum tree_code code | |
1868 = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size | |
1869 ? UNION_TYPE : orig_code); | |
1780 tree name = TYPE_IDENTIFIER (record_type); | 1870 tree name = TYPE_IDENTIFIER (record_type); |
1781 tree ada_size = bitsize_zero_node; | 1871 tree ada_size = bitsize_zero_node; |
1782 tree size = bitsize_zero_node; | 1872 tree size = bitsize_zero_node; |
1783 bool had_size = TYPE_SIZE (record_type) != 0; | |
1784 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0; | |
1785 bool had_align = TYPE_ALIGN (record_type) != 0; | |
1786 tree field; | 1873 tree field; |
1787 | 1874 |
1788 TYPE_FIELDS (record_type) = field_list; | 1875 TYPE_FIELDS (record_type) = field_list; |
1789 | 1876 |
1790 /* Always attach the TYPE_STUB_DECL for a record type. It is required to | 1877 /* Always attach the TYPE_STUB_DECL for a record type. It is required to |
1793 | 1880 |
1794 /* Globally initialize the record first. If this is a rep'ed record, | 1881 /* Globally initialize the record first. If this is a rep'ed record, |
1795 that just means some initializations; otherwise, layout the record. */ | 1882 that just means some initializations; otherwise, layout the record. */ |
1796 if (rep_level > 0) | 1883 if (rep_level > 0) |
1797 { | 1884 { |
1798 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT, | 1885 if (TYPE_ALIGN (record_type) < BITS_PER_UNIT) |
1799 TYPE_ALIGN (record_type))); | 1886 SET_TYPE_ALIGN (record_type, BITS_PER_UNIT); |
1887 | |
1888 if (!had_size) | |
1889 TYPE_SIZE (record_type) = bitsize_zero_node; | |
1800 | 1890 |
1801 if (!had_size_unit) | 1891 if (!had_size_unit) |
1802 TYPE_SIZE_UNIT (record_type) = size_zero_node; | 1892 TYPE_SIZE_UNIT (record_type) = size_zero_node; |
1803 | |
1804 if (!had_size) | |
1805 TYPE_SIZE (record_type) = bitsize_zero_node; | |
1806 | |
1807 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE | |
1808 out just like a UNION_TYPE, since the size will be fixed. */ | |
1809 else if (code == QUAL_UNION_TYPE) | |
1810 code = UNION_TYPE; | |
1811 } | 1893 } |
1812 else | 1894 else |
1813 { | 1895 { |
1814 /* Ensure there isn't a size already set. There can be in an error | 1896 /* Ensure there isn't a size already set. There can be in an error |
1815 case where there is a rep clause but all fields have errors and | 1897 case where there is a rep clause but all fields have errors and |
1816 no longer have a position. */ | 1898 no longer have a position. */ |
1817 TYPE_SIZE (record_type) = 0; | 1899 TYPE_SIZE (record_type) = NULL_TREE; |
1818 | 1900 |
1819 /* Ensure we use the traditional GCC layout for bitfields when we need | 1901 /* Ensure we use the traditional GCC layout for bitfields when we need |
1820 to pack the record type or have a representation clause. The other | 1902 to pack the record type or have a representation clause. The other |
1821 possible layout (Microsoft C compiler), if available, would prevent | 1903 possible layout (Microsoft C compiler), if available, would prevent |
1822 efficient packing in almost all cases. */ | 1904 efficient packing in almost all cases. */ |
1856 && TYPE_ADA_SIZE (type)) | 1938 && TYPE_ADA_SIZE (type)) |
1857 this_ada_size = TYPE_ADA_SIZE (type); | 1939 this_ada_size = TYPE_ADA_SIZE (type); |
1858 else | 1940 else |
1859 this_ada_size = this_size; | 1941 this_ada_size = this_size; |
1860 | 1942 |
1943 const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE); | |
1944 const bool variant_part_at_zero = variant_part && integer_zerop (pos); | |
1945 | |
1861 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */ | 1946 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */ |
1862 if (DECL_BIT_FIELD (field) | 1947 if (DECL_BIT_FIELD (field) |
1863 && operand_equal_p (this_size, TYPE_SIZE (type), 0)) | 1948 && operand_equal_p (this_size, TYPE_SIZE (type), 0)) |
1864 { | 1949 { |
1865 unsigned int align = TYPE_ALIGN (type); | 1950 const unsigned int align = TYPE_ALIGN (type); |
1866 | 1951 |
1867 /* In the general case, type alignment is required. */ | 1952 /* In the general case, type alignment is required. */ |
1868 if (value_factor_p (pos, align)) | 1953 if (value_factor_p (pos, align)) |
1869 { | 1954 { |
1870 /* The enclosing record type must be sufficiently aligned. | 1955 /* The enclosing record type must be sufficiently aligned. |
1894 && DECL_BIT_FIELD (field) | 1979 && DECL_BIT_FIELD (field) |
1895 && value_factor_p (pos, BITS_PER_UNIT)) | 1980 && value_factor_p (pos, BITS_PER_UNIT)) |
1896 DECL_BIT_FIELD (field) = 0; | 1981 DECL_BIT_FIELD (field) = 0; |
1897 } | 1982 } |
1898 | 1983 |
1984 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply | |
1985 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because | |
1986 the variant part is always the last field in the list. */ | |
1987 if (variant_part_at_zero) | |
1988 DECL_BIT_FIELD_TYPE (field) = NULL_TREE; | |
1989 | |
1899 /* If we still have DECL_BIT_FIELD set at this point, we know that the | 1990 /* If we still have DECL_BIT_FIELD set at this point, we know that the |
1900 field is technically not addressable. Except that it can actually | 1991 field is technically not addressable. Except that it can actually |
1901 be addressed if it is BLKmode and happens to be properly aligned. */ | 1992 be addressed if it is BLKmode and happens to be properly aligned. */ |
1902 if (DECL_BIT_FIELD (field) | 1993 if (DECL_BIT_FIELD (field) |
1903 && !(DECL_MODE (field) == BLKmode | 1994 && !(DECL_MODE (field) == BLKmode |
1926 break; | 2017 break; |
1927 | 2018 |
1928 case RECORD_TYPE: | 2019 case RECORD_TYPE: |
1929 /* Since we know here that all fields are sorted in order of | 2020 /* Since we know here that all fields are sorted in order of |
1930 increasing bit position, the size of the record is one | 2021 increasing bit position, the size of the record is one |
1931 higher than the ending bit of the last field processed | 2022 higher than the ending bit of the last field processed, |
1932 unless we have a rep clause, since in that case we might | 2023 unless we have a variant part at offset 0, since in this |
1933 have a field outside a QUAL_UNION_TYPE that has a higher ending | 2024 case we might have a field outside the variant part that |
1934 position. So use a MAX in that case. Also, if this field is a | 2025 has a higher ending position; so use a MAX in this case. |
1935 QUAL_UNION_TYPE, we need to take into account the previous size in | 2026 Also, if this field is a QUAL_UNION_TYPE, we need to take |
1936 the case of empty variants. */ | 2027 into account the previous size in the case of empty variants. */ |
1937 ada_size | 2028 ada_size |
1938 = merge_sizes (ada_size, pos, this_ada_size, | 2029 = merge_sizes (ada_size, pos, this_ada_size, variant_part, |
1939 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0); | 2030 variant_part_at_zero); |
1940 size | 2031 size |
1941 = merge_sizes (size, pos, this_size, | 2032 = merge_sizes (size, pos, this_size, variant_part, |
1942 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0); | 2033 variant_part_at_zero); |
1943 break; | 2034 break; |
1944 | 2035 |
1945 default: | 2036 default: |
1946 gcc_unreachable (); | 2037 gcc_unreachable (); |
1947 } | 2038 } |
2077 tree new_record_type | 2168 tree new_record_type |
2078 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE | 2169 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE |
2079 ? UNION_TYPE : TREE_CODE (record_type)); | 2170 ? UNION_TYPE : TREE_CODE (record_type)); |
2080 tree orig_name = TYPE_IDENTIFIER (record_type), new_name; | 2171 tree orig_name = TYPE_IDENTIFIER (record_type), new_name; |
2081 tree last_pos = bitsize_zero_node; | 2172 tree last_pos = bitsize_zero_node; |
2082 tree old_field, prev_old_field = NULL_TREE; | |
2083 | 2173 |
2084 new_name | 2174 new_name |
2085 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE | 2175 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE |
2086 ? "XVU" : "XVE"); | 2176 ? "XVU" : "XVE"); |
2087 TYPE_NAME (new_record_type) = new_name; | 2177 TYPE_NAME (new_record_type) = new_name; |
2095 TYPE_SIZE_UNIT (new_record_type) | 2185 TYPE_SIZE_UNIT (new_record_type) |
2096 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT); | 2186 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT); |
2097 | 2187 |
2098 /* Now scan all the fields, replacing each field with a new field | 2188 /* Now scan all the fields, replacing each field with a new field |
2099 corresponding to the new encoding. */ | 2189 corresponding to the new encoding. */ |
2100 for (old_field = TYPE_FIELDS (record_type); old_field; | 2190 for (tree old_field = TYPE_FIELDS (record_type); |
2191 old_field; | |
2101 old_field = DECL_CHAIN (old_field)) | 2192 old_field = DECL_CHAIN (old_field)) |
2102 { | 2193 { |
2103 tree field_type = TREE_TYPE (old_field); | 2194 tree field_type = TREE_TYPE (old_field); |
2104 tree field_name = DECL_NAME (old_field); | 2195 tree field_name = DECL_NAME (old_field); |
2105 tree curpos = fold_bit_position (old_field); | 2196 tree curpos = fold_bit_position (old_field); |
2119 if (TREE_CODE (new_record_type) == UNION_TYPE) | 2210 if (TREE_CODE (new_record_type) == UNION_TYPE) |
2120 pos = bitsize_zero_node; | 2211 pos = bitsize_zero_node; |
2121 else | 2212 else |
2122 pos = compute_related_constant (curpos, last_pos); | 2213 pos = compute_related_constant (curpos, last_pos); |
2123 | 2214 |
2124 if (!pos | 2215 if (pos) |
2125 && TREE_CODE (curpos) == MULT_EXPR | 2216 ; |
2126 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))) | 2217 else if (TREE_CODE (curpos) == MULT_EXPR |
2218 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))) | |
2127 { | 2219 { |
2128 tree offset = TREE_OPERAND (curpos, 0); | 2220 tree offset = TREE_OPERAND (curpos, 0); |
2129 align = tree_to_uhwi (TREE_OPERAND (curpos, 1)); | 2221 align = tree_to_uhwi (TREE_OPERAND (curpos, 1)); |
2130 align = scale_by_factor_of (offset, align); | 2222 align = scale_by_factor_of (offset, align); |
2131 last_pos = round_up (last_pos, align); | 2223 last_pos = round_up (last_pos, align); |
2132 pos = compute_related_constant (curpos, last_pos); | 2224 pos = compute_related_constant (curpos, last_pos); |
2133 } | 2225 } |
2134 else if (!pos | 2226 else if (TREE_CODE (curpos) == PLUS_EXPR |
2135 && TREE_CODE (curpos) == PLUS_EXPR | |
2136 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)) | 2227 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)) |
2137 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR | 2228 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR |
2138 && tree_fits_uhwi_p | 2229 && tree_fits_uhwi_p |
2139 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1))) | 2230 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1))) |
2140 { | 2231 { |
2146 align = scale_by_factor_of (offset, align); | 2237 align = scale_by_factor_of (offset, align); |
2147 align = MIN (align, addend & -addend); | 2238 align = MIN (align, addend & -addend); |
2148 last_pos = round_up (last_pos, align); | 2239 last_pos = round_up (last_pos, align); |
2149 pos = compute_related_constant (curpos, last_pos); | 2240 pos = compute_related_constant (curpos, last_pos); |
2150 } | 2241 } |
2151 else if (potential_alignment_gap (prev_old_field, old_field, pos)) | 2242 else |
2152 { | 2243 { |
2153 align = TYPE_ALIGN (field_type); | 2244 align = DECL_ALIGN (old_field); |
2154 last_pos = round_up (last_pos, align); | 2245 last_pos = round_up (last_pos, align); |
2155 pos = compute_related_constant (curpos, last_pos); | 2246 pos = compute_related_constant (curpos, last_pos); |
2156 } | 2247 } |
2157 | |
2158 /* If we can't compute a position, set it to zero. | |
2159 | |
2160 ??? We really should abort here, but it's too much work | |
2161 to get this correct for all cases. */ | |
2162 if (!pos) | |
2163 pos = bitsize_zero_node; | |
2164 | 2248 |
2165 /* See if this type is variable-sized and make a pointer type | 2249 /* See if this type is variable-sized and make a pointer type |
2166 and indicate the indirection if so. Beware that the debug | 2250 and indicate the indirection if so. Beware that the debug |
2167 back-end may adjust the position computed above according | 2251 back-end may adjust the position computed above according |
2168 to the alignment of the field type, i.e. the pointer type | 2252 to the alignment of the field type, i.e. the pointer type |
2169 in this case, if we don't preventively counter that. */ | 2253 in this case, if we don't preventively counter that. */ |
2170 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST) | 2254 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST) |
2171 { | 2255 { |
2172 field_type = build_pointer_type (field_type); | 2256 field_type = copy_type (build_pointer_type (field_type)); |
2173 if (align != 0 && TYPE_ALIGN (field_type) > align) | 2257 SET_TYPE_ALIGN (field_type, BITS_PER_UNIT); |
2258 var = true; | |
2259 | |
2260 /* ??? Kludge to work around a bug in Workbench's debugger. */ | |
2261 if (align == 0) | |
2174 { | 2262 { |
2175 field_type = copy_type (field_type); | 2263 align = DECL_ALIGN (old_field); |
2176 SET_TYPE_ALIGN (field_type, align); | 2264 last_pos = round_up (last_pos, align); |
2265 pos = compute_related_constant (curpos, last_pos); | |
2177 } | 2266 } |
2178 var = true; | |
2179 } | 2267 } |
2268 | |
2269 /* If we can't compute a position, set it to zero. | |
2270 | |
2271 ??? We really should abort here, but it's too much work | |
2272 to get this correct for all cases. */ | |
2273 if (!pos) | |
2274 pos = bitsize_zero_node; | |
2180 | 2275 |
2181 /* Make a new field name, if necessary. */ | 2276 /* Make a new field name, if necessary. */ |
2182 if (var || align != 0) | 2277 if (var || align != 0) |
2183 { | 2278 { |
2184 char suffix[16]; | 2279 char suffix[16]; |
2193 } | 2288 } |
2194 | 2289 |
2195 new_field | 2290 new_field |
2196 = create_field_decl (field_name, field_type, new_record_type, | 2291 = create_field_decl (field_name, field_type, new_record_type, |
2197 DECL_SIZE (old_field), pos, 0, 0); | 2292 DECL_SIZE (old_field), pos, 0, 0); |
2293 /* The specified position is not the actual position of the field | |
2294 but the gap with the previous field, so the computation of the | |
2295 bit-field status may be incorrect. We adjust it manually to | |
2296 avoid generating useless attributes for the field in DWARF. */ | |
2297 if (DECL_SIZE (old_field) == TYPE_SIZE (field_type) | |
2298 && value_factor_p (pos, BITS_PER_UNIT)) | |
2299 { | |
2300 DECL_BIT_FIELD (new_field) = 0; | |
2301 DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE; | |
2302 } | |
2198 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type); | 2303 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type); |
2199 TYPE_FIELDS (new_record_type) = new_field; | 2304 TYPE_FIELDS (new_record_type) = new_field; |
2200 | 2305 |
2201 /* If old_field is a QUAL_UNION_TYPE, take its size as being | 2306 /* If old_field is a QUAL_UNION_TYPE, take its size as being |
2202 zero. The only time it's not the last field of the record | 2307 zero. The only time it's not the last field of the record |
2206 last_pos = size_binop (PLUS_EXPR, curpos, | 2311 last_pos = size_binop (PLUS_EXPR, curpos, |
2207 (TREE_CODE (TREE_TYPE (old_field)) | 2312 (TREE_CODE (TREE_TYPE (old_field)) |
2208 == QUAL_UNION_TYPE) | 2313 == QUAL_UNION_TYPE) |
2209 ? bitsize_zero_node | 2314 ? bitsize_zero_node |
2210 : DECL_SIZE (old_field)); | 2315 : DECL_SIZE (old_field)); |
2211 prev_old_field = old_field; | |
2212 } | 2316 } |
2213 | 2317 |
2214 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type)); | 2318 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type)); |
2215 | 2319 |
2216 add_parallel_type (record_type, new_record_type); | 2320 add_parallel_type (record_type, new_record_type); |
2218 } | 2322 } |
2219 | 2323 |
2220 /* Utility function of above to merge LAST_SIZE, the previous size of a record | 2324 /* Utility function of above to merge LAST_SIZE, the previous size of a record |
2221 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this | 2325 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this |
2222 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and | 2326 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and |
2223 replace a value of zero with the old size. If HAS_REP is true, we take the | 2327 replace a value of zero with the old size. If MAX is true, we take the |
2224 MAX of the end position of this field with LAST_SIZE. In all other cases, | 2328 MAX of the end position of this field with LAST_SIZE. In all other cases, |
2225 we use FIRST_BIT plus SIZE. Return an expression for the size. */ | 2329 we use FIRST_BIT plus SIZE. Return an expression for the size. */ |
2226 | 2330 |
2227 static tree | 2331 static tree |
2228 merge_sizes (tree last_size, tree first_bit, tree size, bool special, | 2332 merge_sizes (tree last_size, tree first_bit, tree size, bool special, bool max) |
2229 bool has_rep) | |
2230 { | 2333 { |
2231 tree type = TREE_TYPE (last_size); | 2334 tree type = TREE_TYPE (last_size); |
2232 tree new_size; | 2335 tree new_size; |
2233 | 2336 |
2234 if (!special || TREE_CODE (size) != COND_EXPR) | 2337 if (!special || TREE_CODE (size) != COND_EXPR) |
2235 { | 2338 { |
2236 new_size = size_binop (PLUS_EXPR, first_bit, size); | 2339 new_size = size_binop (PLUS_EXPR, first_bit, size); |
2237 if (has_rep) | 2340 if (max) |
2238 new_size = size_binop (MAX_EXPR, last_size, new_size); | 2341 new_size = size_binop (MAX_EXPR, last_size, new_size); |
2239 } | 2342 } |
2240 | 2343 |
2241 else | 2344 else |
2242 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0), | 2345 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0), |
2243 integer_zerop (TREE_OPERAND (size, 1)) | 2346 integer_zerop (TREE_OPERAND (size, 1)) |
2244 ? last_size : merge_sizes (last_size, first_bit, | 2347 ? last_size : merge_sizes (last_size, first_bit, |
2245 TREE_OPERAND (size, 1), | 2348 TREE_OPERAND (size, 1), |
2246 1, has_rep), | 2349 1, max), |
2247 integer_zerop (TREE_OPERAND (size, 2)) | 2350 integer_zerop (TREE_OPERAND (size, 2)) |
2248 ? last_size : merge_sizes (last_size, first_bit, | 2351 ? last_size : merge_sizes (last_size, first_bit, |
2249 TREE_OPERAND (size, 2), | 2352 TREE_OPERAND (size, 2), |
2250 1, has_rep)); | 2353 1, max)); |
2251 | 2354 |
2252 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially | 2355 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially |
2253 when fed through substitute_in_expr) into thinking that a constant | 2356 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant |
2254 size is not constant. */ | 2357 size is not constant. */ |
2255 while (TREE_CODE (new_size) == NON_LVALUE_EXPR) | 2358 while (TREE_CODE (new_size) == NON_LVALUE_EXPR) |
2256 new_size = TREE_OPERAND (new_size, 0); | 2359 new_size = TREE_OPERAND (new_size, 0); |
2257 | 2360 |
2258 return new_size; | 2361 return new_size; |
2259 } | 2362 } |
2260 | 2363 |
2364 /* Convert the size expression EXPR to TYPE and fold the result. */ | |
2365 | |
2366 static tree | |
2367 fold_convert_size (tree type, tree expr) | |
2368 { | |
2369 /* We assume that size expressions do not wrap around. */ | |
2370 if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR) | |
2371 return size_binop (TREE_CODE (expr), | |
2372 fold_convert_size (type, TREE_OPERAND (expr, 0)), | |
2373 fold_convert_size (type, TREE_OPERAND (expr, 1))); | |
2374 | |
2375 return fold_convert (type, expr); | |
2376 } | |
2377 | |
2261 /* Return the bit position of FIELD, in bits from the start of the record, | 2378 /* Return the bit position of FIELD, in bits from the start of the record, |
2262 and fold it as much as possible. This is a tree of type bitsizetype. */ | 2379 and fold it as much as possible. This is a tree of type bitsizetype. */ |
2263 | 2380 |
2264 static tree | 2381 static tree |
2265 fold_bit_position (const_tree field) | 2382 fold_bit_position (const_tree field) |
2266 { | 2383 { |
2267 tree offset = DECL_FIELD_OFFSET (field); | 2384 tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field)); |
2268 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR) | |
2269 offset = size_binop (TREE_CODE (offset), | |
2270 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)), | |
2271 fold_convert (bitsizetype, TREE_OPERAND (offset, 1))); | |
2272 else | |
2273 offset = fold_convert (bitsizetype, offset); | |
2274 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field), | 2385 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field), |
2275 size_binop (MULT_EXPR, offset, bitsize_unit_node)); | 2386 size_binop (MULT_EXPR, offset, bitsize_unit_node)); |
2276 } | 2387 } |
2277 | 2388 |
2278 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are | 2389 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are |
2415 /* Then set the actual range. */ | 2526 /* Then set the actual range. */ |
2416 SET_TYPE_RM_MIN_VALUE (range_type, min); | 2527 SET_TYPE_RM_MIN_VALUE (range_type, min); |
2417 SET_TYPE_RM_MAX_VALUE (range_type, max); | 2528 SET_TYPE_RM_MAX_VALUE (range_type, max); |
2418 | 2529 |
2419 return range_type; | 2530 return range_type; |
2531 } | |
2532 | |
2533 /* Return an extra subtype of TYPE with range MIN to MAX. */ | |
2534 | |
2535 tree | |
2536 create_extra_subtype (tree type, tree min, tree max) | |
2537 { | |
2538 const bool uns = TYPE_UNSIGNED (type); | |
2539 const unsigned prec = TYPE_PRECISION (type); | |
2540 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec); | |
2541 | |
2542 TREE_TYPE (subtype) = type; | |
2543 TYPE_EXTRA_SUBTYPE_P (subtype) = 1; | |
2544 | |
2545 SET_TYPE_RM_MIN_VALUE (subtype, min); | |
2546 SET_TYPE_RM_MAX_VALUE (subtype, max); | |
2547 | |
2548 return subtype; | |
2420 } | 2549 } |
2421 | 2550 |
2422 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE. | 2551 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE. |
2423 NAME gives the name of the type to be used in the declaration. */ | 2552 NAME gives the name of the type to be used in the declaration. */ |
2424 | 2553 |
2620 && TREE_CODE (var_decl) == VAR_DECL | 2749 && TREE_CODE (var_decl) == VAR_DECL |
2621 && TREE_PUBLIC (var_decl) | 2750 && TREE_PUBLIC (var_decl) |
2622 && !have_global_bss_p ()) | 2751 && !have_global_bss_p ()) |
2623 DECL_COMMON (var_decl) = 1; | 2752 DECL_COMMON (var_decl) = 1; |
2624 | 2753 |
2625 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled, | 2754 /* Do not emit debug info if not requested, or for an external constant whose |
2626 since we will create an associated variable. Likewise for an external | 2755 initializer is not absolute because this would require a global relocation |
2627 constant whose initializer is not absolute, because this would mean a | 2756 in a read-only section which runs afoul of the PE-COFF run-time relocation |
2628 global relocation in a read-only section which runs afoul of the PE-COFF | 2757 mechanism. */ |
2629 run-time relocation mechanism. */ | |
2630 if (!debug_info_p | 2758 if (!debug_info_p |
2631 || (TREE_CODE (var_decl) == CONST_DECL && !optimize) | |
2632 || (extern_flag | 2759 || (extern_flag |
2633 && constant_p | 2760 && constant_p |
2634 && init | 2761 && init |
2635 && initializer_constant_valid_p (init, TREE_TYPE (init)) | 2762 && initializer_constant_valid_p (init, TREE_TYPE (init)) |
2636 != null_pointer_node)) | 2763 != null_pointer_node)) |
2653 } | 2780 } |
2654 | 2781 |
2655 return var_decl; | 2782 return var_decl; |
2656 } | 2783 } |
2657 | 2784 |
2658 /* Return true if TYPE, an aggregate type, contains (or is) an array. */ | 2785 /* Return true if TYPE, an aggregate type, contains (or is) an array. |
2659 | 2786 If SELF_REFERENTIAL is true, then an additional requirement on the |
2660 static bool | 2787 array is that it be self-referential. */ |
2661 aggregate_type_contains_array_p (tree type) | 2788 |
2789 bool | |
2790 aggregate_type_contains_array_p (tree type, bool self_referential) | |
2662 { | 2791 { |
2663 switch (TREE_CODE (type)) | 2792 switch (TREE_CODE (type)) |
2664 { | 2793 { |
2665 case RECORD_TYPE: | 2794 case RECORD_TYPE: |
2666 case UNION_TYPE: | 2795 case UNION_TYPE: |
2667 case QUAL_UNION_TYPE: | 2796 case QUAL_UNION_TYPE: |
2668 { | 2797 { |
2669 tree field; | 2798 tree field; |
2670 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | 2799 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
2671 if (AGGREGATE_TYPE_P (TREE_TYPE (field)) | 2800 if (AGGREGATE_TYPE_P (TREE_TYPE (field)) |
2672 && aggregate_type_contains_array_p (TREE_TYPE (field))) | 2801 && aggregate_type_contains_array_p (TREE_TYPE (field), |
2802 self_referential)) | |
2673 return true; | 2803 return true; |
2674 return false; | 2804 return false; |
2675 } | 2805 } |
2676 | 2806 |
2677 case ARRAY_TYPE: | 2807 case ARRAY_TYPE: |
2678 return true; | 2808 return self_referential ? type_contains_placeholder_p (type) : true; |
2679 | 2809 |
2680 default: | 2810 default: |
2681 gcc_unreachable (); | 2811 gcc_unreachable (); |
2682 } | 2812 } |
2683 } | 2813 } |
2696 { | 2826 { |
2697 tree field_decl = build_decl (input_location, FIELD_DECL, name, type); | 2827 tree field_decl = build_decl (input_location, FIELD_DECL, name, type); |
2698 | 2828 |
2699 DECL_CONTEXT (field_decl) = record_type; | 2829 DECL_CONTEXT (field_decl) = record_type; |
2700 TREE_READONLY (field_decl) = TYPE_READONLY (type); | 2830 TREE_READONLY (field_decl) = TYPE_READONLY (type); |
2831 | |
2832 /* If a size is specified, use it. Otherwise, if the record type is packed | |
2833 compute a size to use, which may differ from the object's natural size. | |
2834 We always set a size in this case to trigger the checks for bitfield | |
2835 creation below, which is typically required when no position has been | |
2836 specified. */ | |
2837 if (size) | |
2838 size = convert (bitsizetype, size); | |
2839 else if (packed == 1) | |
2840 { | |
2841 size = rm_size (type); | |
2842 if (TYPE_MODE (type) == BLKmode) | |
2843 size = round_up (size, BITS_PER_UNIT); | |
2844 } | |
2845 | |
2846 /* If we may, according to ADDRESSABLE, then make a bitfield when the size | |
2847 is specified for two reasons: first, when it differs from the natural | |
2848 size; second, when the alignment is insufficient. | |
2849 | |
2850 We never make a bitfield if the type of the field has a nonconstant size, | |
2851 because no such entity requiring bitfield operations should reach here. | |
2852 | |
2853 We do *preventively* make a bitfield when there might be the need for it | |
2854 but we don't have all the necessary information to decide, as is the case | |
2855 of a field in a packed record. | |
2856 | |
2857 We also don't look at STRICT_ALIGNMENT here, and rely on later processing | |
2858 in layout_decl or finish_record_type to clear the bit_field indication if | |
2859 it is in fact not needed. */ | |
2860 if (addressable >= 0 | |
2861 && size | |
2862 && TREE_CODE (size) == INTEGER_CST | |
2863 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
2864 && (packed | |
2865 || !tree_int_cst_equal (size, TYPE_SIZE (type)) | |
2866 || (pos && !value_factor_p (pos, TYPE_ALIGN (type))) | |
2867 || (TYPE_ALIGN (record_type) | |
2868 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type)))) | |
2869 { | |
2870 DECL_BIT_FIELD (field_decl) = 1; | |
2871 DECL_SIZE (field_decl) = size; | |
2872 if (!packed && !pos) | |
2873 { | |
2874 if (TYPE_ALIGN (record_type) | |
2875 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type)) | |
2876 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type)); | |
2877 else | |
2878 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type)); | |
2879 } | |
2880 } | |
2881 | |
2882 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed; | |
2701 | 2883 |
2702 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a | 2884 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a |
2703 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. | 2885 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. |
2704 Likewise for an aggregate without specified position that contains an | 2886 Likewise for an aggregate without specified position that contains an |
2705 array, because in this case slices of variable length of this array | 2887 array, because in this case slices of variable length of this array |
2706 must be handled by GCC and variable-sized objects need to be aligned | 2888 must be handled by GCC and variable-sized objects need to be aligned |
2707 to at least a byte boundary. */ | 2889 to at least a byte boundary. */ |
2708 if (packed && (TYPE_MODE (type) == BLKmode | 2890 if (packed && (TYPE_MODE (type) == BLKmode |
2709 || (!pos | 2891 || (!pos |
2710 && AGGREGATE_TYPE_P (type) | 2892 && AGGREGATE_TYPE_P (type) |
2711 && aggregate_type_contains_array_p (type)))) | 2893 && aggregate_type_contains_array_p (type, false)))) |
2712 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT); | 2894 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT); |
2713 | 2895 |
2714 /* If a size is specified, use it. Otherwise, if the record type is packed | |
2715 compute a size to use, which may differ from the object's natural size. | |
2716 We always set a size in this case to trigger the checks for bitfield | |
2717 creation below, which is typically required when no position has been | |
2718 specified. */ | |
2719 if (size) | |
2720 size = convert (bitsizetype, size); | |
2721 else if (packed == 1) | |
2722 { | |
2723 size = rm_size (type); | |
2724 if (TYPE_MODE (type) == BLKmode) | |
2725 size = round_up (size, BITS_PER_UNIT); | |
2726 } | |
2727 | |
2728 /* If we may, according to ADDRESSABLE, make a bitfield if a size is | |
2729 specified for two reasons: first if the size differs from the natural | |
2730 size. Second, if the alignment is insufficient. There are a number of | |
2731 ways the latter can be true. | |
2732 | |
2733 We never make a bitfield if the type of the field has a nonconstant size, | |
2734 because no such entity requiring bitfield operations should reach here. | |
2735 | |
2736 We do *preventively* make a bitfield when there might be the need for it | |
2737 but we don't have all the necessary information to decide, as is the case | |
2738 of a field with no specified position in a packed record. | |
2739 | |
2740 We also don't look at STRICT_ALIGNMENT here, and rely on later processing | |
2741 in layout_decl or finish_record_type to clear the bit_field indication if | |
2742 it is in fact not needed. */ | |
2743 if (addressable >= 0 | |
2744 && size | |
2745 && TREE_CODE (size) == INTEGER_CST | |
2746 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
2747 && (!tree_int_cst_equal (size, TYPE_SIZE (type)) | |
2748 || (pos && !value_factor_p (pos, TYPE_ALIGN (type))) | |
2749 || packed | |
2750 || (TYPE_ALIGN (record_type) != 0 | |
2751 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type)))) | |
2752 { | |
2753 DECL_BIT_FIELD (field_decl) = 1; | |
2754 DECL_SIZE (field_decl) = size; | |
2755 if (!packed && !pos) | |
2756 { | |
2757 if (TYPE_ALIGN (record_type) != 0 | |
2758 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type)) | |
2759 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type)); | |
2760 else | |
2761 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type)); | |
2762 } | |
2763 } | |
2764 | |
2765 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed; | |
2766 | |
2767 /* Bump the alignment if need be, either for bitfield/packing purposes or | 2896 /* Bump the alignment if need be, either for bitfield/packing purposes or |
2768 to satisfy the type requirements if no such consideration applies. When | 2897 to satisfy the type requirements if no such considerations apply. When |
2769 we get the alignment from the type, indicate if this is from an explicit | 2898 we get the alignment from the type, indicate if this is from an explicit |
2770 user request, which prevents stor-layout from lowering it later on. */ | 2899 user request, which prevents stor-layout from lowering it later on. */ |
2771 { | 2900 else |
2772 unsigned int bit_align | 2901 { |
2773 = (DECL_BIT_FIELD (field_decl) ? 1 | 2902 const unsigned int field_align |
2774 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0); | 2903 = DECL_BIT_FIELD (field_decl) |
2775 | 2904 ? 1 |
2776 if (bit_align > DECL_ALIGN (field_decl)) | 2905 : packed |
2777 SET_DECL_ALIGN (field_decl, bit_align); | 2906 ? BITS_PER_UNIT |
2778 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl)) | 2907 : 0; |
2779 { | 2908 |
2780 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type)); | 2909 if (field_align > DECL_ALIGN (field_decl)) |
2781 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type); | 2910 SET_DECL_ALIGN (field_decl, field_align); |
2782 } | 2911 else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl)) |
2783 } | 2912 { |
2913 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type)); | |
2914 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type); | |
2915 } | |
2916 } | |
2784 | 2917 |
2785 if (pos) | 2918 if (pos) |
2786 { | 2919 { |
2787 /* We need to pass in the alignment the DECL is known to have. | 2920 /* We need to pass in the alignment the DECL is known to have. |
2788 This is the lowest-order bit set in POS, but no more than | 2921 This is the lowest-order bit set in POS, but no more than |
2799 && (known_align == 0 || known_align > TYPE_ALIGN (record_type))) | 2932 && (known_align == 0 || known_align > TYPE_ALIGN (record_type))) |
2800 known_align = TYPE_ALIGN (record_type); | 2933 known_align = TYPE_ALIGN (record_type); |
2801 | 2934 |
2802 layout_decl (field_decl, known_align); | 2935 layout_decl (field_decl, known_align); |
2803 SET_DECL_OFFSET_ALIGN (field_decl, | 2936 SET_DECL_OFFSET_ALIGN (field_decl, |
2804 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT | 2937 tree_fits_uhwi_p (pos) |
2805 : BITS_PER_UNIT); | 2938 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT); |
2806 pos_from_bit (&DECL_FIELD_OFFSET (field_decl), | 2939 pos_from_bit (&DECL_FIELD_OFFSET (field_decl), |
2807 &DECL_FIELD_BIT_OFFSET (field_decl), | 2940 &DECL_FIELD_BIT_OFFSET (field_decl), |
2808 DECL_OFFSET_ALIGN (field_decl), pos); | 2941 DECL_OFFSET_ALIGN (field_decl), pos); |
2809 } | 2942 } |
2810 | 2943 |
2817 value we have at this point is not accurate enough, so we don't account | 2950 value we have at this point is not accurate enough, so we don't account |
2818 for this here and let finish_record_type decide. */ | 2951 for this here and let finish_record_type decide. */ |
2819 if (!addressable && !type_for_nonaliased_component_p (type)) | 2952 if (!addressable && !type_for_nonaliased_component_p (type)) |
2820 addressable = 1; | 2953 addressable = 1; |
2821 | 2954 |
2955 /* Note that there is a trade-off in making a field nonaddressable because | |
2956 this will cause type-based alias analysis to use the same alias set for | |
2957 accesses to the field as for accesses to the whole record: while doing | |
2958 so will make it more likely to disambiguate accesses to other objects | |
2959 and accesses to the field, it will make it less likely to disambiguate | |
2960 accesses to the other fields of the record and accesses to the field. | |
2961 If the record is fully static, then the trade-off is irrelevant since | |
2962 the fields of the record can always be disambiguated by their offsets | |
2963 but, if the record is dynamic, then it can become problematic. */ | |
2822 DECL_NONADDRESSABLE_P (field_decl) = !addressable; | 2964 DECL_NONADDRESSABLE_P (field_decl) = !addressable; |
2823 | 2965 |
2824 return field_decl; | 2966 return field_decl; |
2825 } | 2967 } |
2826 | 2968 |
2925 | 3067 |
2926 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be | 3068 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be |
2927 a power of 2. */ | 3069 a power of 2. */ |
2928 | 3070 |
2929 bool | 3071 bool |
2930 value_factor_p (tree value, HOST_WIDE_INT factor) | 3072 value_factor_p (tree value, unsigned HOST_WIDE_INT factor) |
2931 { | 3073 { |
3074 gcc_checking_assert (pow2p_hwi (factor)); | |
3075 | |
2932 if (tree_fits_uhwi_p (value)) | 3076 if (tree_fits_uhwi_p (value)) |
2933 return tree_to_uhwi (value) % factor == 0; | 3077 return (tree_to_uhwi (value) & (factor - 1)) == 0; |
2934 | 3078 |
2935 if (TREE_CODE (value) == MULT_EXPR) | 3079 if (TREE_CODE (value) == MULT_EXPR) |
2936 return (value_factor_p (TREE_OPERAND (value, 0), factor) | 3080 return (value_factor_p (TREE_OPERAND (value, 0), factor) |
2937 || value_factor_p (TREE_OPERAND (value, 1), factor)); | 3081 || value_factor_p (TREE_OPERAND (value, 1), factor)); |
2938 | 3082 |
3126 factor = 1; | 3270 factor = 1; |
3127 | 3271 |
3128 return factor * value; | 3272 return factor * value; |
3129 } | 3273 } |
3130 | 3274 |
3131 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true | |
3132 unless we can prove these 2 fields are laid out in such a way that no gap | |
3133 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET | |
3134 is the distance in bits between the end of PREV_FIELD and the starting | |
3135 position of CURR_FIELD. It is ignored if null. */ | |
3136 | |
3137 static bool | |
3138 potential_alignment_gap (tree prev_field, tree curr_field, tree offset) | |
3139 { | |
3140 /* If this is the first field of the record, there cannot be any gap */ | |
3141 if (!prev_field) | |
3142 return false; | |
3143 | |
3144 /* If the previous field is a union type, then return false: The only | |
3145 time when such a field is not the last field of the record is when | |
3146 there are other components at fixed positions after it (meaning there | |
3147 was a rep clause for every field), in which case we don't want the | |
3148 alignment constraint to override them. */ | |
3149 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE) | |
3150 return false; | |
3151 | |
3152 /* If the distance between the end of prev_field and the beginning of | |
3153 curr_field is constant, then there is a gap if the value of this | |
3154 constant is not null. */ | |
3155 if (offset && tree_fits_uhwi_p (offset)) | |
3156 return !integer_zerop (offset); | |
3157 | |
3158 /* If the size and position of the previous field are constant, | |
3159 then check the sum of this size and position. There will be a gap | |
3160 iff it is not multiple of the current field alignment. */ | |
3161 if (tree_fits_uhwi_p (DECL_SIZE (prev_field)) | |
3162 && tree_fits_uhwi_p (bit_position (prev_field))) | |
3163 return ((tree_to_uhwi (bit_position (prev_field)) | |
3164 + tree_to_uhwi (DECL_SIZE (prev_field))) | |
3165 % DECL_ALIGN (curr_field) != 0); | |
3166 | |
3167 /* If both the position and size of the previous field are multiples | |
3168 of the current field alignment, there cannot be any gap. */ | |
3169 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field)) | |
3170 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field))) | |
3171 return false; | |
3172 | |
3173 /* Fallback, return that there may be a potential gap */ | |
3174 return true; | |
3175 } | |
3176 | |
3177 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of | 3275 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of |
3178 the decl. */ | 3276 the decl. */ |
3179 | 3277 |
3180 tree | 3278 tree |
3181 create_label_decl (tree name, Node_Id gnat_node) | 3279 create_label_decl (tree name, Node_Id gnat_node) |
3223 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type); | 3321 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type); |
3224 DECL_ARGUMENTS (subprog_decl) = param_decl_list; | 3322 DECL_ARGUMENTS (subprog_decl) = param_decl_list; |
3225 | 3323 |
3226 DECL_ARTIFICIAL (subprog_decl) = artificial_p; | 3324 DECL_ARTIFICIAL (subprog_decl) = artificial_p; |
3227 DECL_EXTERNAL (subprog_decl) = extern_flag; | 3325 DECL_EXTERNAL (subprog_decl) = extern_flag; |
3326 DECL_FUNCTION_IS_DEF (subprog_decl) = definition; | |
3327 DECL_IGNORED_P (subprog_decl) = !debug_info_p; | |
3228 TREE_PUBLIC (subprog_decl) = public_flag; | 3328 TREE_PUBLIC (subprog_decl) = public_flag; |
3229 | |
3230 if (!debug_info_p) | |
3231 DECL_IGNORED_P (subprog_decl) = 1; | |
3232 if (definition) | |
3233 DECL_FUNCTION_IS_DEF (subprog_decl) = 1; | |
3234 | 3329 |
3235 switch (inline_status) | 3330 switch (inline_status) |
3236 { | 3331 { |
3237 case is_suppressed: | 3332 case is_suppressed: |
3238 DECL_UNINLINABLE (subprog_decl) = 1; | 3333 DECL_UNINLINABLE (subprog_decl) = 1; |
3239 break; | 3334 break; |
3240 | 3335 |
3241 case is_disabled: | 3336 case is_default: |
3242 break; | 3337 break; |
3243 | 3338 |
3244 case is_required: | 3339 case is_required: |
3245 if (Back_End_Inlining) | 3340 if (Back_End_Inlining) |
3246 { | 3341 { |
3257 TREE_PUBLIC (subprog_decl) = 0; | 3352 TREE_PUBLIC (subprog_decl) = 0; |
3258 } | 3353 } |
3259 | 3354 |
3260 /* ... fall through ... */ | 3355 /* ... fall through ... */ |
3261 | 3356 |
3262 case is_enabled: | 3357 case is_prescribed: |
3358 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1; | |
3359 | |
3360 /* ... fall through ... */ | |
3361 | |
3362 case is_requested: | |
3263 DECL_DECLARED_INLINE_P (subprog_decl) = 1; | 3363 DECL_DECLARED_INLINE_P (subprog_decl) = 1; |
3264 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p; | 3364 if (!Debug_Generated_Code) |
3365 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p; | |
3265 break; | 3366 break; |
3266 | 3367 |
3267 default: | 3368 default: |
3268 gcc_unreachable (); | 3369 gcc_unreachable (); |
3269 } | 3370 } |
3352 gnat_pushlevel (); | 3453 gnat_pushlevel (); |
3353 | 3454 |
3354 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl; | 3455 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl; |
3355 param_decl = DECL_CHAIN (param_decl)) | 3456 param_decl = DECL_CHAIN (param_decl)) |
3356 DECL_CONTEXT (param_decl) = subprog_decl; | 3457 DECL_CONTEXT (param_decl) = subprog_decl; |
3357 | |
3358 make_decl_rtl (subprog_decl); | |
3359 } | 3458 } |
3360 | 3459 |
3361 /* Finish translating the current subprogram and set its BODY. */ | 3460 /* Finish translating the current subprogram and set its BODY. */ |
3362 | 3461 |
3363 void | 3462 void |
3600 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p; | 3699 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p; |
3601 } | 3700 } |
3602 | 3701 |
3603 /* EXP is an expression for the size of an object. If this size contains | 3702 /* EXP is an expression for the size of an object. If this size contains |
3604 discriminant references, replace them with the maximum (if MAX_P) or | 3703 discriminant references, replace them with the maximum (if MAX_P) or |
3605 minimum (if !MAX_P) possible value of the discriminant. */ | 3704 minimum (if !MAX_P) possible value of the discriminant. |
3705 | |
3706 Note that the expression may have already been gimplified,in which case | |
3707 COND_EXPRs have VOID_TYPE and no operands, and this must be handled. */ | |
3606 | 3708 |
3607 tree | 3709 tree |
3608 max_size (tree exp, bool max_p) | 3710 max_size (tree exp, bool max_p) |
3609 { | 3711 { |
3610 enum tree_code code = TREE_CODE (exp); | 3712 enum tree_code code = TREE_CODE (exp); |
3643 case tcc_reference: | 3745 case tcc_reference: |
3644 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to | 3746 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to |
3645 modify. Otherwise, we treat it like a variable. */ | 3747 modify. Otherwise, we treat it like a variable. */ |
3646 if (CONTAINS_PLACEHOLDER_P (exp)) | 3748 if (CONTAINS_PLACEHOLDER_P (exp)) |
3647 { | 3749 { |
3648 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1)); | 3750 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1))); |
3649 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type)); | 3751 tree val |
3650 return | 3752 = fold_convert (base_type, |
3651 convert (type, | 3753 max_p |
3652 max_size (convert (get_base_type (val_type), val), true)); | 3754 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type)); |
3755 | |
3756 /* Walk down the extra subtypes to get more restrictive bounds. */ | |
3757 while (TYPE_IS_EXTRA_SUBTYPE_P (type)) | |
3758 { | |
3759 type = TREE_TYPE (type); | |
3760 if (max_p) | |
3761 val = fold_build2 (MIN_EXPR, base_type, val, | |
3762 fold_convert (base_type, | |
3763 TYPE_MAX_VALUE (type))); | |
3764 else | |
3765 val = fold_build2 (MAX_EXPR, base_type, val, | |
3766 fold_convert (base_type, | |
3767 TYPE_MIN_VALUE (type))); | |
3768 } | |
3769 | |
3770 return fold_convert (type, max_size (val, max_p)); | |
3653 } | 3771 } |
3654 | 3772 |
3655 return exp; | 3773 return exp; |
3656 | 3774 |
3657 case tcc_comparison: | 3775 case tcc_comparison: |
3658 return build_int_cst (type, max_p ? 1 : 0); | 3776 return build_int_cst (type, max_p ? 1 : 0); |
3659 | 3777 |
3660 case tcc_unary: | 3778 case tcc_unary: |
3779 op0 = TREE_OPERAND (exp, 0); | |
3780 | |
3661 if (code == NON_LVALUE_EXPR) | 3781 if (code == NON_LVALUE_EXPR) |
3662 return max_size (TREE_OPERAND (exp, 0), max_p); | 3782 return max_size (op0, max_p); |
3663 | 3783 |
3664 op0 = max_size (TREE_OPERAND (exp, 0), | 3784 if (VOID_TYPE_P (TREE_TYPE (op0))) |
3665 code == NEGATE_EXPR ? !max_p : max_p); | 3785 return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type); |
3786 | |
3787 op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p); | |
3666 | 3788 |
3667 if (op0 == TREE_OPERAND (exp, 0)) | 3789 if (op0 == TREE_OPERAND (exp, 0)) |
3668 return exp; | 3790 return exp; |
3669 | 3791 |
3670 return fold_build1 (code, type, op0); | 3792 return fold_build1 (code, type, op0); |
3671 | 3793 |
3672 case tcc_binary: | 3794 case tcc_binary: |
3673 { | 3795 op0 = TREE_OPERAND (exp, 0); |
3674 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p); | 3796 op1 = TREE_OPERAND (exp, 1); |
3675 tree rhs = max_size (TREE_OPERAND (exp, 1), | 3797 |
3676 code == MINUS_EXPR ? !max_p : max_p); | 3798 /* If we have a multiply-add with a "negative" value in an unsigned |
3677 | 3799 type, do a multiply-subtract with the negated value, in order to |
3678 /* Special-case wanting the maximum value of a MIN_EXPR. | 3800 avoid creating a spurious overflow below. */ |
3679 In that case, if one side overflows, return the other. */ | 3801 if (code == PLUS_EXPR |
3680 if (max_p && code == MIN_EXPR) | 3802 && TREE_CODE (op0) == MULT_EXPR |
3681 { | 3803 && TYPE_UNSIGNED (type) |
3682 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs)) | 3804 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST |
3683 return lhs; | 3805 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1)) |
3684 | 3806 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1))) |
3685 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs)) | 3807 { |
3686 return rhs; | 3808 tree tmp = op1; |
3687 } | 3809 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0), |
3688 | 3810 fold_build1 (NEGATE_EXPR, type, |
3689 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS | 3811 TREE_OPERAND (op0, 1))); |
3690 overflowing and the RHS a variable. */ | 3812 op0 = tmp; |
3691 if ((code == MINUS_EXPR || code == PLUS_EXPR) | 3813 code = MINUS_EXPR; |
3692 && TREE_CODE (lhs) == INTEGER_CST | 3814 } |
3693 && TREE_OVERFLOW (lhs) | 3815 |
3694 && TREE_CODE (rhs) != INTEGER_CST) | 3816 op0 = max_size (op0, max_p); |
3695 return lhs; | 3817 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p); |
3696 | 3818 |
3697 /* If we are going to subtract a "negative" value in an unsigned type, | 3819 if ((code == MINUS_EXPR || code == PLUS_EXPR)) |
3698 do the operation as an addition of the negated value, in order to | 3820 { |
3699 avoid creating a spurious overflow below. */ | 3821 /* If the op0 has overflowed and the op1 is a variable, |
3700 if (code == MINUS_EXPR | 3822 propagate the overflow by returning the op0. */ |
3701 && TYPE_UNSIGNED (type) | 3823 if (TREE_CODE (op0) == INTEGER_CST |
3702 && TREE_CODE (rhs) == INTEGER_CST | 3824 && TREE_OVERFLOW (op0) |
3703 && !TREE_OVERFLOW (rhs) | 3825 && TREE_CODE (op1) != INTEGER_CST) |
3704 && tree_int_cst_sign_bit (rhs) != 0) | 3826 return op0; |
3705 { | 3827 |
3706 rhs = fold_build1 (NEGATE_EXPR, type, rhs); | 3828 /* If we have a "negative" value in an unsigned type, do the |
3707 code = PLUS_EXPR; | 3829 opposite operation on the negated value, in order to avoid |
3708 } | 3830 creating a spurious overflow below. */ |
3709 | 3831 if (TYPE_UNSIGNED (type) |
3710 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1)) | 3832 && TREE_CODE (op1) == INTEGER_CST |
3711 return exp; | 3833 && !TREE_OVERFLOW (op1) |
3712 | 3834 && tree_int_cst_sign_bit (op1)) |
3713 /* We need to detect overflows so we call size_binop here. */ | 3835 { |
3714 return size_binop (code, lhs, rhs); | 3836 op1 = fold_build1 (NEGATE_EXPR, type, op1); |
3715 } | 3837 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR); |
3838 } | |
3839 } | |
3840 | |
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) | |
3842 return exp; | |
3843 | |
3844 /* We need to detect overflows so we call size_binop here. */ | |
3845 return size_binop (code, op0, op1); | |
3716 | 3846 |
3717 case tcc_expression: | 3847 case tcc_expression: |
3718 switch (TREE_CODE_LENGTH (code)) | 3848 switch (TREE_CODE_LENGTH (code)) |
3719 { | 3849 { |
3720 case 1: | 3850 case 1: |
3742 return fold_build2 (code, type, op0, op1); | 3872 return fold_build2 (code, type, op0, op1); |
3743 | 3873 |
3744 case 3: | 3874 case 3: |
3745 if (code == COND_EXPR) | 3875 if (code == COND_EXPR) |
3746 { | 3876 { |
3877 op0 = TREE_OPERAND (exp, 0); | |
3747 op1 = TREE_OPERAND (exp, 1); | 3878 op1 = TREE_OPERAND (exp, 1); |
3748 op2 = TREE_OPERAND (exp, 2); | 3879 op2 = TREE_OPERAND (exp, 2); |
3749 | 3880 |
3750 if (!op1 || !op2) | 3881 if (!op1 || !op2) |
3751 return exp; | 3882 return exp; |
3752 | 3883 |
3753 return | 3884 op1 = max_size (op1, max_p); |
3754 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, | 3885 op2 = max_size (op2, max_p); |
3755 max_size (op1, max_p), max_size (op2, max_p)); | 3886 |
3887 /* If we have the MAX of a "negative" value in an unsigned type | |
3888 and zero for a length expression, just return zero. */ | |
3889 if (max_p | |
3890 && TREE_CODE (op0) == LE_EXPR | |
3891 && TYPE_UNSIGNED (type) | |
3892 && TREE_CODE (op1) == INTEGER_CST | |
3893 && !TREE_OVERFLOW (op1) | |
3894 && tree_int_cst_sign_bit (op1) | |
3895 && integer_zerop (op2)) | |
3896 return op2; | |
3897 | |
3898 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2); | |
3756 } | 3899 } |
3757 break; | 3900 break; |
3758 | 3901 |
3759 default: | 3902 default: |
3760 break; | 3903 break; |
3787 if (TREE_CODE (array_type) == ARRAY_TYPE | 3930 if (TREE_CODE (array_type) == ARRAY_TYPE |
3788 || (TREE_CODE (array_type) == INTEGER_TYPE | 3931 || (TREE_CODE (array_type) == INTEGER_TYPE |
3789 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type))) | 3932 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type))) |
3790 bound_list = TYPE_ACTUAL_BOUNDS (array_type); | 3933 bound_list = TYPE_ACTUAL_BOUNDS (array_type); |
3791 | 3934 |
3792 /* First make the list for a CONSTRUCTOR for the template. Go down the | 3935 /* First make the list for a CONSTRUCTOR for the template. Go down |
3793 field list of the template instead of the type chain because this | 3936 the field list of the template instead of the type chain because |
3794 array might be an Ada array of arrays and we can't tell where the | 3937 this array might be an Ada array of array and we can't tell where |
3795 nested arrays stop being the underlying object. */ | 3938 the nested array stop being the underlying object. */ |
3796 | 3939 for (field = TYPE_FIELDS (template_type); |
3797 for (field = TYPE_FIELDS (template_type); field; | 3940 field; |
3798 (bound_list | |
3799 ? (bound_list = TREE_CHAIN (bound_list)) | |
3800 : (array_type = TREE_TYPE (array_type))), | |
3801 field = DECL_CHAIN (DECL_CHAIN (field))) | 3941 field = DECL_CHAIN (DECL_CHAIN (field))) |
3802 { | 3942 { |
3803 tree bounds, min, max; | 3943 tree bounds, min, max; |
3804 | 3944 |
3805 /* If we have a bound list, get the bounds from there. Likewise | 3945 /* If we have a bound list, get the bounds from there. Likewise |
3806 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with | 3946 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with |
3807 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template. | 3947 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the |
3808 This will give us a maximum range. */ | 3948 template, but this will only give us a maximum range. */ |
3809 if (bound_list) | 3949 if (bound_list) |
3810 bounds = TREE_VALUE (bound_list); | 3950 { |
3951 bounds = TREE_VALUE (bound_list); | |
3952 bound_list = TREE_CHAIN (bound_list); | |
3953 } | |
3811 else if (TREE_CODE (array_type) == ARRAY_TYPE) | 3954 else if (TREE_CODE (array_type) == ARRAY_TYPE) |
3812 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type)); | 3955 { |
3956 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type)); | |
3957 array_type = TREE_TYPE (array_type); | |
3958 } | |
3813 else if (expr && TREE_CODE (expr) == PARM_DECL | 3959 else if (expr && TREE_CODE (expr) == PARM_DECL |
3814 && DECL_BY_COMPONENT_PTR_P (expr)) | 3960 && DECL_BY_COMPONENT_PTR_P (expr)) |
3815 bounds = TREE_TYPE (field); | 3961 bounds = TREE_TYPE (field); |
3816 else | 3962 else |
3817 gcc_unreachable (); | 3963 gcc_unreachable (); |
4270 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))))) | 4416 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))))) |
4271 return convert (type, TREE_OPERAND (expr, 0)); | 4417 return convert (type, TREE_OPERAND (expr, 0)); |
4272 | 4418 |
4273 /* If the inner type is of self-referential size and the expression type | 4419 /* If the inner type is of self-referential size and the expression type |
4274 is a record, do this as an unchecked conversion unless both types are | 4420 is a record, do this as an unchecked conversion unless both types are |
4275 essentially the same. But first pad the expression if possible to | 4421 essentially the same. */ |
4276 have the same size on both sides. */ | |
4277 if (ecode == RECORD_TYPE | 4422 if (ecode == RECORD_TYPE |
4278 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))) | 4423 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))) |
4279 && TYPE_MAIN_VARIANT (etype) | 4424 && TYPE_MAIN_VARIANT (etype) |
4280 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type)))) | 4425 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type)))) |
4281 { | 4426 return unchecked_convert (type, expr, false); |
4282 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST) | |
4283 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty, | |
4284 false, false, false, true), | |
4285 expr); | |
4286 return unchecked_convert (type, expr, false); | |
4287 } | |
4288 | 4427 |
4289 /* If we are converting between array types with variable size, do the | 4428 /* If we are converting between array types with variable size, do the |
4290 final conversion as an unchecked conversion, again to avoid the need | 4429 final conversion as an unchecked conversion, again to avoid the need |
4291 for some variable-sized temporaries. If valid, this conversion is | 4430 for some variable-sized temporaries. If valid, this conversion is |
4292 very likely purely technical and without real effects. */ | 4431 very likely purely technical and without real effects. */ |
4346 fold_convert (TREE_TYPE (etype), expr), | 4485 fold_convert (TREE_TYPE (etype), expr), |
4347 convert (TREE_TYPE (etype), | 4486 convert (TREE_TYPE (etype), |
4348 TYPE_MIN_VALUE (etype)))); | 4487 TYPE_MIN_VALUE (etype)))); |
4349 | 4488 |
4350 /* If the input is a justified modular type, we need to extract the actual | 4489 /* If the input is a justified modular type, we need to extract the actual |
4351 object before converting it to any other type with the exceptions of an | 4490 object before converting it to an other type with the exceptions of an |
4352 unconstrained array or of a mere type variant. It is useful to avoid the | 4491 [unconstrained] array or a mere type variant. It is useful to avoid |
4353 extraction and conversion in the type variant case because it could end | 4492 the extraction and conversion in these cases because it could end up |
4354 up replacing a VAR_DECL expr by a constructor and we might be about the | 4493 replacing a VAR_DECL by a constructor and we might be about the take |
4355 take the address of the result. */ | 4494 the address of the result. */ |
4356 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype) | 4495 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype) |
4496 && code != ARRAY_TYPE | |
4357 && code != UNCONSTRAINED_ARRAY_TYPE | 4497 && code != UNCONSTRAINED_ARRAY_TYPE |
4358 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype)) | 4498 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype)) |
4359 return | 4499 return |
4360 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false)); | 4500 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false)); |
4361 | 4501 |
4396 return expr; | 4536 return expr; |
4397 | 4537 |
4398 case STRING_CST: | 4538 case STRING_CST: |
4399 /* If we are converting a STRING_CST to another constrained array type, | 4539 /* If we are converting a STRING_CST to another constrained array type, |
4400 just make a new one in the proper type. */ | 4540 just make a new one in the proper type. */ |
4401 if (code == ecode && AGGREGATE_TYPE_P (etype) | 4541 if (code == ecode |
4402 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST | 4542 && !(TREE_CONSTANT (TYPE_SIZE (etype)) |
4403 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)) | 4543 && !TREE_CONSTANT (TYPE_SIZE (type)))) |
4404 { | 4544 { |
4405 expr = copy_node (expr); | 4545 expr = copy_node (expr); |
4406 TREE_TYPE (expr) = type; | 4546 TREE_TYPE (expr) = type; |
4407 return expr; | 4547 return expr; |
4408 } | 4548 } |
4853 enum tree_code code = TREE_CODE (expr); | 4993 enum tree_code code = TREE_CODE (expr); |
4854 tree type = TREE_TYPE (expr); | 4994 tree type = TREE_TYPE (expr); |
4855 | 4995 |
4856 /* If the type is unsigned, overflow is allowed so we cannot be sure that | 4996 /* If the type is unsigned, overflow is allowed so we cannot be sure that |
4857 EXPR doesn't overflow. Keep it simple if optimization is disabled. */ | 4997 EXPR doesn't overflow. Keep it simple if optimization is disabled. */ |
4858 if (TYPE_UNSIGNED (type) || !optimize) | 4998 if (TYPE_UNSIGNED (type) || !optimize || optimize_debug) |
4859 return convert (sizetype, expr); | 4999 return convert (sizetype, expr); |
4860 | 5000 |
4861 switch (code) | 5001 switch (code) |
4862 { | 5002 { |
4863 case VAR_DECL: | 5003 case VAR_DECL: |
5105 | 5245 |
5106 /* If the expression is already of the right type, we are done. */ | 5246 /* If the expression is already of the right type, we are done. */ |
5107 if (etype == type) | 5247 if (etype == type) |
5108 return expr; | 5248 return expr; |
5109 | 5249 |
5110 /* If both types are integral just do a normal conversion. | 5250 /* If both types are integral or regular pointer, then just do a normal |
5111 Likewise for a conversion to an unconstrained array. */ | 5251 conversion. Likewise for a conversion to an unconstrained array. */ |
5112 if (((INTEGRAL_TYPE_P (type) | 5252 if (((INTEGRAL_TYPE_P (type) |
5113 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type)) | 5253 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type)) |
5114 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type))) | 5254 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type))) |
5115 && (INTEGRAL_TYPE_P (etype) | 5255 && (INTEGRAL_TYPE_P (etype) |
5116 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype)) | 5256 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype)) |
5237 | 5377 |
5238 /* If we are converting from a scalar type to a type with a different size, | 5378 /* If we are converting from a scalar type to a type with a different size, |
5239 we need to pad to have the same size on both sides. | 5379 we need to pad to have the same size on both sides. |
5240 | 5380 |
5241 ??? We cannot do it unconditionally because unchecked conversions are | 5381 ??? We cannot do it unconditionally because unchecked conversions are |
5242 used liberally by the front-end to implement polymorphism, e.g. in: | 5382 used liberally by the front-end to implement interface thunks: |
5243 | 5383 |
5384 type ada__tags__addr_ptr is access system.address; | |
5244 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s); | 5385 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s); |
5245 return p___size__4 (p__object!(S191s.all)); | 5386 return p___size__4 (p__object!(S191s.all)); |
5246 | 5387 |
5247 so we skip all expressions that are references. */ | 5388 so we need to skip dereferences. */ |
5248 else if (!REFERENCE_CLASS_P (expr) | 5389 else if (!INDIRECT_REF_P (expr) |
5249 && !AGGREGATE_TYPE_P (etype) | 5390 && !AGGREGATE_TYPE_P (etype) |
5250 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | 5391 && ecode != UNCONSTRAINED_ARRAY_TYPE |
5392 && TREE_CONSTANT (TYPE_SIZE (type)) | |
5251 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type)))) | 5393 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type)))) |
5252 { | 5394 { |
5253 if (c < 0) | 5395 if (c < 0) |
5254 { | 5396 { |
5255 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty, | 5397 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty, |
5264 expr = unchecked_convert (rec_type, expr, notrunc_p); | 5406 expr = unchecked_convert (rec_type, expr, notrunc_p); |
5265 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false); | 5407 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false); |
5266 } | 5408 } |
5267 } | 5409 } |
5268 | 5410 |
5411 /* Likewise if we are converting from a scalar type to a type with self- | |
5412 referential size. We use the max size to do the padding in this case. */ | |
5413 else if (!INDIRECT_REF_P (expr) | |
5414 && !AGGREGATE_TYPE_P (etype) | |
5415 && ecode != UNCONSTRAINED_ARRAY_TYPE | |
5416 && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))) | |
5417 { | |
5418 tree new_size = max_size (TYPE_SIZE (type), true); | |
5419 c = tree_int_cst_compare (TYPE_SIZE (etype), new_size); | |
5420 if (c < 0) | |
5421 { | |
5422 expr = convert (maybe_pad_type (etype, new_size, 0, Empty, | |
5423 false, false, false, true), | |
5424 expr); | |
5425 expr = unchecked_convert (type, expr, notrunc_p); | |
5426 } | |
5427 else | |
5428 { | |
5429 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty, | |
5430 false, false, false, true); | |
5431 expr = unchecked_convert (rec_type, expr, notrunc_p); | |
5432 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false); | |
5433 } | |
5434 } | |
5435 | |
5269 /* We have a special case when we are converting between two unconstrained | 5436 /* We have a special case when we are converting between two unconstrained |
5270 array types. In that case, take the address, convert the fat pointer | 5437 array types. In that case, take the address, convert the fat pointer |
5271 types, and dereference. */ | 5438 types, and dereference. */ |
5272 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE) | 5439 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE) |
5273 expr = build_unary_op (INDIRECT_REF, NULL_TREE, | 5440 expr = build_unary_op (INDIRECT_REF, NULL_TREE, |
5293 { | 5460 { |
5294 expr = convert (tem, expr); | 5461 expr = convert (tem, expr); |
5295 return unchecked_convert (type, expr, notrunc_p); | 5462 return unchecked_convert (type, expr, notrunc_p); |
5296 } | 5463 } |
5297 | 5464 |
5298 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump | 5465 /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump |
5299 the alignment of the CONSTRUCTOR to speed up the copy operation. */ | 5466 the alignment of the CONSTRUCTOR to speed up the copy operation. But do |
5467 not do it for a conversion between original and packable version to avoid | |
5468 an infinite recursion. */ | |
5300 else if (TREE_CODE (expr) == CONSTRUCTOR | 5469 else if (TREE_CODE (expr) == CONSTRUCTOR |
5301 && code == RECORD_TYPE | 5470 && AGGREGATE_TYPE_P (type) |
5471 && TYPE_NAME (type) != TYPE_NAME (etype) | |
5302 && TYPE_ALIGN (etype) < TYPE_ALIGN (type)) | 5472 && TYPE_ALIGN (etype) < TYPE_ALIGN (type)) |
5303 { | 5473 { |
5304 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type), | 5474 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type), |
5475 Empty, false, false, false, true), | |
5476 expr); | |
5477 return unchecked_convert (type, expr, notrunc_p); | |
5478 } | |
5479 | |
5480 /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the | |
5481 size of the CONSTRUCTOR to make sure there are enough allocated bytes. | |
5482 But do not do it for a conversion between original and packable version | |
5483 to avoid an infinite recursion. */ | |
5484 else if (TREE_CODE (expr) == CONSTRUCTOR | |
5485 && AGGREGATE_TYPE_P (type) | |
5486 && TYPE_NAME (type) != TYPE_NAME (etype) | |
5487 && TREE_CONSTANT (TYPE_SIZE (type)) | |
5488 && (!TREE_CONSTANT (TYPE_SIZE (etype)) | |
5489 || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type)))) | |
5490 { | |
5491 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, | |
5305 Empty, false, false, false, true), | 5492 Empty, false, false, false, true), |
5306 expr); | 5493 expr); |
5307 return unchecked_convert (type, expr, notrunc_p); | 5494 return unchecked_convert (type, expr, notrunc_p); |
5308 } | 5495 } |
5309 | 5496 |
5621 if (TREE_CODE (iter) == FUNCTION_DECL | 5808 if (TREE_CODE (iter) == FUNCTION_DECL |
5622 && DECL_EXTERNAL (iter) | 5809 && DECL_EXTERNAL (iter) |
5623 && DECL_INITIAL (iter) == NULL | 5810 && DECL_INITIAL (iter) == NULL |
5624 && !DECL_IGNORED_P (iter) | 5811 && !DECL_IGNORED_P (iter) |
5625 && DECL_FUNCTION_IS_DEF (iter)) | 5812 && DECL_FUNCTION_IS_DEF (iter)) |
5813 debug_hooks->early_global_decl (iter); | |
5814 | |
5815 /* Output global constants. */ | |
5816 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter) | |
5817 if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter)) | |
5626 debug_hooks->early_global_decl (iter); | 5818 debug_hooks->early_global_decl (iter); |
5627 | 5819 |
5628 /* Then output the global variables. We need to do that after the debug | 5820 /* Then output the global variables. We need to do that after the debug |
5629 information for global types is emitted so that they are finalized. Skip | 5821 information for global types is emitted so that they are finalized. Skip |
5630 external global variables, unless we need to emit debug info for them: | 5822 external global variables, unless we need to emit debug info for them: |
5777 ARG6) NAME, | 5969 ARG6) NAME, |
5778 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ | 5970 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ |
5779 ARG6, ARG7) NAME, | 5971 ARG6, ARG7) NAME, |
5780 #define DEF_POINTER_TYPE(NAME, TYPE) NAME, | 5972 #define DEF_POINTER_TYPE(NAME, TYPE) NAME, |
5781 #include "builtin-types.def" | 5973 #include "builtin-types.def" |
5974 #include "ada-builtin-types.def" | |
5782 #undef DEF_PRIMITIVE_TYPE | 5975 #undef DEF_PRIMITIVE_TYPE |
5783 #undef DEF_FUNCTION_TYPE_0 | 5976 #undef DEF_FUNCTION_TYPE_0 |
5784 #undef DEF_FUNCTION_TYPE_1 | 5977 #undef DEF_FUNCTION_TYPE_1 |
5785 #undef DEF_FUNCTION_TYPE_2 | 5978 #undef DEF_FUNCTION_TYPE_2 |
5786 #undef DEF_FUNCTION_TYPE_3 | 5979 #undef DEF_FUNCTION_TYPE_3 |
5925 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7); | 6118 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7); |
5926 #define DEF_POINTER_TYPE(ENUM, TYPE) \ | 6119 #define DEF_POINTER_TYPE(ENUM, TYPE) \ |
5927 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]); | 6120 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]); |
5928 | 6121 |
5929 #include "builtin-types.def" | 6122 #include "builtin-types.def" |
6123 #include "ada-builtin-types.def" | |
5930 | 6124 |
5931 #undef DEF_PRIMITIVE_TYPE | 6125 #undef DEF_PRIMITIVE_TYPE |
5932 #undef DEF_FUNCTION_TYPE_0 | 6126 #undef DEF_FUNCTION_TYPE_0 |
5933 #undef DEF_FUNCTION_TYPE_1 | 6127 #undef DEF_FUNCTION_TYPE_1 |
5934 #undef DEF_FUNCTION_TYPE_2 | 6128 #undef DEF_FUNCTION_TYPE_2 |
6097 { | 6291 { |
6098 if (!prototype_p (type) | 6292 if (!prototype_p (type) |
6099 && (!TYPE_ATTRIBUTES (type) | 6293 && (!TYPE_ATTRIBUTES (type) |
6100 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type)))) | 6294 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type)))) |
6101 { | 6295 { |
6102 error ("nonnull attribute without arguments on a non-prototype"); | 6296 error ("%qs attribute without arguments on a non-prototype", |
6297 "nonnull"); | |
6103 *no_add_attrs = true; | 6298 *no_add_attrs = true; |
6104 } | 6299 } |
6105 return NULL_TREE; | 6300 return NULL_TREE; |
6106 } | 6301 } |
6107 | 6302 |
6111 { | 6306 { |
6112 unsigned HOST_WIDE_INT arg_num = 0, ck_num; | 6307 unsigned HOST_WIDE_INT arg_num = 0, ck_num; |
6113 | 6308 |
6114 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num)) | 6309 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num)) |
6115 { | 6310 { |
6116 error ("nonnull argument has invalid operand number (argument %lu)", | 6311 error ("%qs argument has invalid operand number (argument %lu)", |
6117 (unsigned long) attr_arg_num); | 6312 "nonnull", (unsigned long) attr_arg_num); |
6118 *no_add_attrs = true; | 6313 *no_add_attrs = true; |
6119 return NULL_TREE; | 6314 return NULL_TREE; |
6120 } | 6315 } |
6121 | 6316 |
6122 if (prototype_p (type)) | 6317 if (prototype_p (type)) |
6133 } | 6328 } |
6134 | 6329 |
6135 if (!argument | 6330 if (!argument |
6136 || TREE_CODE (argument) == VOID_TYPE) | 6331 || TREE_CODE (argument) == VOID_TYPE) |
6137 { | 6332 { |
6138 error ("nonnull argument with out-of-range operand number " | 6333 error ("%qs argument with out-of-range operand number " |
6139 "(argument %lu, operand %lu)", | 6334 "(argument %lu, operand %lu)", "nonnull", |
6140 (unsigned long) attr_arg_num, (unsigned long) arg_num); | 6335 (unsigned long) attr_arg_num, (unsigned long) arg_num); |
6141 *no_add_attrs = true; | 6336 *no_add_attrs = true; |
6142 return NULL_TREE; | 6337 return NULL_TREE; |
6143 } | 6338 } |
6144 | 6339 |
6145 if (TREE_CODE (argument) != POINTER_TYPE) | 6340 if (TREE_CODE (argument) != POINTER_TYPE) |
6146 { | 6341 { |
6147 error ("nonnull argument references non-pointer operand " | 6342 error ("%qs argument references non-pointer operand " |
6148 "(argument %lu, operand %lu)", | 6343 "(argument %lu, operand %lu)", "nonnull", |
6149 (unsigned long) attr_arg_num, (unsigned long) arg_num); | 6344 (unsigned long) attr_arg_num, (unsigned long) arg_num); |
6150 *no_add_attrs = true; | 6345 *no_add_attrs = true; |
6151 return NULL_TREE; | 6346 return NULL_TREE; |
6152 } | 6347 } |
6153 } | 6348 } |
6227 } | 6422 } |
6228 | 6423 |
6229 return NULL_TREE; | 6424 return NULL_TREE; |
6230 } | 6425 } |
6231 | 6426 |
6427 /* Handle a "stack_protect" attribute; arguments as in | |
6428 struct attribute_spec.handler. */ | |
6429 | |
6430 static tree | |
6431 handle_stack_protect_attribute (tree *node, tree name, tree, int, | |
6432 bool *no_add_attrs) | |
6433 { | |
6434 if (TREE_CODE (*node) != FUNCTION_DECL) | |
6435 { | |
6436 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6437 *no_add_attrs = true; | |
6438 } | |
6439 | |
6440 return NULL_TREE; | |
6441 } | |
6442 | |
6232 /* Handle a "noinline" attribute; arguments as in | 6443 /* Handle a "noinline" attribute; arguments as in |
6233 struct attribute_spec.handler. */ | 6444 struct attribute_spec.handler. */ |
6234 | 6445 |
6235 static tree | 6446 static tree |
6236 handle_noinline_attribute (tree *node, tree name, | 6447 handle_noinline_attribute (tree *node, tree name, |
6272 } | 6483 } |
6273 | 6484 |
6274 return NULL_TREE; | 6485 return NULL_TREE; |
6275 } | 6486 } |
6276 | 6487 |
6488 /* Handle a "no_icf" attribute; arguments as in | |
6489 struct attribute_spec.handler. */ | |
6490 | |
6491 static tree | |
6492 handle_noicf_attribute (tree *node, tree name, | |
6493 tree ARG_UNUSED (args), | |
6494 int ARG_UNUSED (flags), bool *no_add_attrs) | |
6495 { | |
6496 if (TREE_CODE (*node) != FUNCTION_DECL) | |
6497 { | |
6498 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6499 *no_add_attrs = true; | |
6500 } | |
6501 | |
6502 return NULL_TREE; | |
6503 } | |
6504 | |
6505 /* Handle a "noipa" attribute; arguments as in | |
6506 struct attribute_spec.handler. */ | |
6507 | |
6508 static tree | |
6509 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs) | |
6510 { | |
6511 if (TREE_CODE (*node) != FUNCTION_DECL) | |
6512 { | |
6513 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6514 *no_add_attrs = true; | |
6515 } | |
6516 | |
6517 return NULL_TREE; | |
6518 } | |
6519 | |
6277 /* Handle a "leaf" attribute; arguments as in | 6520 /* Handle a "leaf" attribute; arguments as in |
6278 struct attribute_spec.handler. */ | 6521 struct attribute_spec.handler. */ |
6279 | 6522 |
6280 static tree | 6523 static tree |
6281 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args), | 6524 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args), |
6359 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE); | 6602 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE); |
6360 | 6603 |
6361 /* Ensure we have a variadic function. */ | 6604 /* Ensure we have a variadic function. */ |
6362 gcc_assert (!prototype_p (*node) || stdarg_p (*node)); | 6605 gcc_assert (!prototype_p (*node) || stdarg_p (*node)); |
6363 | 6606 |
6607 return NULL_TREE; | |
6608 } | |
6609 | |
6610 /* Handle a "flatten" attribute; arguments as in | |
6611 struct attribute_spec.handler. */ | |
6612 | |
6613 static tree | |
6614 handle_flatten_attribute (tree *node, tree name, | |
6615 tree args ATTRIBUTE_UNUSED, | |
6616 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs) | |
6617 { | |
6618 if (TREE_CODE (*node) == FUNCTION_DECL) | |
6619 /* Do nothing else, just set the attribute. We'll get at | |
6620 it later with lookup_attribute. */ | |
6621 ; | |
6622 else | |
6623 { | |
6624 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6625 *no_add_attrs = true; | |
6626 } | |
6627 | |
6628 return NULL_TREE; | |
6629 } | |
6630 | |
6631 /* Handle a "used" attribute; arguments as in | |
6632 struct attribute_spec.handler. */ | |
6633 | |
6634 static tree | |
6635 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args), | |
6636 int ARG_UNUSED (flags), bool *no_add_attrs) | |
6637 { | |
6638 tree node = *pnode; | |
6639 | |
6640 if (TREE_CODE (node) == FUNCTION_DECL | |
6641 || (VAR_P (node) && TREE_STATIC (node)) | |
6642 || (TREE_CODE (node) == TYPE_DECL)) | |
6643 { | |
6644 TREE_USED (node) = 1; | |
6645 DECL_PRESERVE_P (node) = 1; | |
6646 if (VAR_P (node)) | |
6647 DECL_READ_P (node) = 1; | |
6648 } | |
6649 else | |
6650 { | |
6651 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6652 *no_add_attrs = true; | |
6653 } | |
6654 | |
6655 return NULL_TREE; | |
6656 } | |
6657 | |
6658 /* Handle a "cold" and attribute; arguments as in | |
6659 struct attribute_spec.handler. */ | |
6660 | |
6661 static tree | |
6662 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args), | |
6663 int ARG_UNUSED (flags), bool *no_add_attrs) | |
6664 { | |
6665 if (TREE_CODE (*node) == FUNCTION_DECL | |
6666 || TREE_CODE (*node) == LABEL_DECL) | |
6667 { | |
6668 /* Attribute cold processing is done later with lookup_attribute. */ | |
6669 } | |
6670 else | |
6671 { | |
6672 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6673 *no_add_attrs = true; | |
6674 } | |
6675 | |
6676 return NULL_TREE; | |
6677 } | |
6678 | |
6679 /* Handle a "hot" and attribute; arguments as in | |
6680 struct attribute_spec.handler. */ | |
6681 | |
6682 static tree | |
6683 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args), | |
6684 int ARG_UNUSED (flags), bool *no_add_attrs) | |
6685 { | |
6686 if (TREE_CODE (*node) == FUNCTION_DECL | |
6687 || TREE_CODE (*node) == LABEL_DECL) | |
6688 { | |
6689 /* Attribute hot processing is done later with lookup_attribute. */ | |
6690 } | |
6691 else | |
6692 { | |
6693 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6694 *no_add_attrs = true; | |
6695 } | |
6696 | |
6697 return NULL_TREE; | |
6698 } | |
6699 | |
6700 /* Handle a "target" attribute. */ | |
6701 | |
6702 static tree | |
6703 handle_target_attribute (tree *node, tree name, tree args, int flags, | |
6704 bool *no_add_attrs) | |
6705 { | |
6706 /* Ensure we have a function type. */ | |
6707 if (TREE_CODE (*node) != FUNCTION_DECL) | |
6708 { | |
6709 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6710 *no_add_attrs = true; | |
6711 } | |
6712 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node))) | |
6713 { | |
6714 warning (OPT_Wattributes, "%qE attribute ignored due to conflict " | |
6715 "with %qs attribute", name, "target_clones"); | |
6716 *no_add_attrs = true; | |
6717 } | |
6718 else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags)) | |
6719 *no_add_attrs = true; | |
6720 | |
6721 /* Check that there's no empty string in values of the attribute. */ | |
6722 for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t)) | |
6723 { | |
6724 tree value = TREE_VALUE (t); | |
6725 if (TREE_CODE (value) == STRING_CST | |
6726 && TREE_STRING_LENGTH (value) == 1 | |
6727 && TREE_STRING_POINTER (value)[0] == '\0') | |
6728 { | |
6729 warning (OPT_Wattributes, "empty string in attribute %<target%>"); | |
6730 *no_add_attrs = true; | |
6731 } | |
6732 } | |
6733 | |
6734 return NULL_TREE; | |
6735 } | |
6736 | |
6737 /* Handle a "target_clones" attribute. */ | |
6738 | |
6739 static tree | |
6740 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args), | |
6741 int ARG_UNUSED (flags), bool *no_add_attrs) | |
6742 { | |
6743 /* Ensure we have a function type. */ | |
6744 if (TREE_CODE (*node) == FUNCTION_DECL) | |
6745 { | |
6746 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node))) | |
6747 { | |
6748 warning (OPT_Wattributes, "%qE attribute ignored due to conflict " | |
6749 "with %qs attribute", name, "always_inline"); | |
6750 *no_add_attrs = true; | |
6751 } | |
6752 else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node))) | |
6753 { | |
6754 warning (OPT_Wattributes, "%qE attribute ignored due to conflict " | |
6755 "with %qs attribute", name, "target"); | |
6756 *no_add_attrs = true; | |
6757 } | |
6758 else | |
6759 /* Do not inline functions with multiple clone targets. */ | |
6760 DECL_UNINLINABLE (*node) = 1; | |
6761 } | |
6762 else | |
6763 { | |
6764 warning (OPT_Wattributes, "%qE attribute ignored", name); | |
6765 *no_add_attrs = true; | |
6766 } | |
6364 return NULL_TREE; | 6767 return NULL_TREE; |
6365 } | 6768 } |
6366 | 6769 |
6367 /* Handle a "vector_size" attribute; arguments as in | 6770 /* Handle a "vector_size" attribute; arguments as in |
6368 struct attribute_spec.handler. */ | 6771 struct attribute_spec.handler. */ |
6473 } | 6876 } |
6474 | 6877 |
6475 static int flag_isoc94 = 0; | 6878 static int flag_isoc94 = 0; |
6476 static int flag_isoc99 = 0; | 6879 static int flag_isoc99 = 0; |
6477 static int flag_isoc11 = 0; | 6880 static int flag_isoc11 = 0; |
6478 | 6881 static int flag_isoc2x = 0; |
6479 /* Install what the common builtins.def offers. */ | 6882 |
6883 /* Install what the common builtins.def offers plus our local additions. | |
6884 | |
6885 Note that ada-builtins.def is included first so that locally redefined | |
6886 built-in functions take precedence over the commonly defined ones. */ | |
6480 | 6887 |
6481 static void | 6888 static void |
6482 install_builtin_functions (void) | 6889 install_builtin_functions (void) |
6483 { | 6890 { |
6484 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \ | 6891 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \ |
6487 def_builtin_1 (ENUM, NAME, CLASS, \ | 6894 def_builtin_1 (ENUM, NAME, CLASS, \ |
6488 builtin_types[(int) TYPE], \ | 6895 builtin_types[(int) TYPE], \ |
6489 builtin_types[(int) LIBTYPE], \ | 6896 builtin_types[(int) LIBTYPE], \ |
6490 BOTH_P, FALLBACK_P, NONANSI_P, \ | 6897 BOTH_P, FALLBACK_P, NONANSI_P, \ |
6491 built_in_attributes[(int) ATTRS], IMPLICIT); | 6898 built_in_attributes[(int) ATTRS], IMPLICIT); |
6899 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS) \ | |
6900 DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \ | |
6901 false, false, false, ATTRS, true, true) | |
6902 #include "ada-builtins.def" | |
6492 #include "builtins.def" | 6903 #include "builtins.def" |
6493 } | 6904 } |
6494 | 6905 |
6495 /* ----------------------------------------------------------------------- * | 6906 /* ----------------------------------------------------------------------- * |
6496 * BUILTIN FUNCTIONS * | 6907 * BUILTIN FUNCTIONS * |