Mercurial > hg > CbC > CbC_gcc
comparison gcc/stor-layout.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | a06113de4d67 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
35 #include "ggc.h" | 35 #include "ggc.h" |
36 #include "target.h" | 36 #include "target.h" |
37 #include "langhooks.h" | 37 #include "langhooks.h" |
38 #include "regs.h" | 38 #include "regs.h" |
39 #include "params.h" | 39 #include "params.h" |
40 #include "cgraph.h" | |
41 #include "tree-inline.h" | |
42 #include "tree-dump.h" | |
43 #include "gimple.h" | |
40 | 44 |
41 /* Data type for the expressions representing sizes of data types. | 45 /* Data type for the expressions representing sizes of data types. |
42 It is the first integer type laid out. */ | 46 It is the first integer type laid out. */ |
43 tree sizetype_tab[(int) TYPE_KIND_LAST]; | 47 tree sizetype_tab[(int) TYPE_KIND_LAST]; |
44 | 48 |
46 The value is measured in bits. */ | 50 The value is measured in bits. */ |
47 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; | 51 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; |
48 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */ | 52 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */ |
49 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT; | 53 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT; |
50 | 54 |
51 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be | 55 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated |
52 allocated in Pmode, not ptr_mode. Set only by internal_reference_types | 56 in the address spaces' address_mode, not pointer_mode. Set only by |
53 called only by a front end. */ | 57 internal_reference_types called only by a front end. */ |
54 static int reference_types_internal = 0; | 58 static int reference_types_internal = 0; |
55 | 59 |
60 static tree self_referential_size (tree); | |
56 static void finalize_record_size (record_layout_info); | 61 static void finalize_record_size (record_layout_info); |
57 static void finalize_type_size (tree); | 62 static void finalize_type_size (tree); |
58 static void place_union_field (record_layout_info, tree); | 63 static void place_union_field (record_layout_info, tree); |
59 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) | 64 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
60 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, | 65 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, |
64 | 69 |
65 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */ | 70 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */ |
66 | 71 |
67 static GTY(()) tree pending_sizes; | 72 static GTY(()) tree pending_sizes; |
68 | 73 |
69 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only | 74 /* Show that REFERENCE_TYPES are internal and should use address_mode. |
70 by front end. */ | 75 Called only by front end. */ |
71 | 76 |
72 void | 77 void |
73 internal_reference_types (void) | 78 internal_reference_types (void) |
74 { | 79 { |
75 reference_types_internal = 1; | 80 reference_types_internal = 1; |
115 tree | 120 tree |
116 variable_size (tree size) | 121 variable_size (tree size) |
117 { | 122 { |
118 tree save; | 123 tree save; |
119 | 124 |
125 /* Obviously. */ | |
126 if (TREE_CONSTANT (size)) | |
127 return size; | |
128 | |
129 /* If the size is self-referential, we can't make a SAVE_EXPR (see | |
130 save_expr for the rationale). But we can do something else. */ | |
131 if (CONTAINS_PLACEHOLDER_P (size)) | |
132 return self_referential_size (size); | |
133 | |
120 /* If the language-processor is to take responsibility for variable-sized | 134 /* If the language-processor is to take responsibility for variable-sized |
121 items (e.g., languages which have elaboration procedures like Ada), | 135 items (e.g., languages which have elaboration procedures like Ada), |
122 just return SIZE unchanged. Likewise for self-referential sizes and | 136 just return SIZE unchanged. */ |
123 constant sizes. */ | 137 if (lang_hooks.decls.global_bindings_p () < 0) |
124 if (TREE_CONSTANT (size) | |
125 || lang_hooks.decls.global_bindings_p () < 0 | |
126 || CONTAINS_PLACEHOLDER_P (size)) | |
127 return size; | 138 return size; |
128 | 139 |
129 size = save_expr (size); | 140 size = save_expr (size); |
130 | 141 |
131 /* If an array with a variable number of elements is declared, and | 142 /* If an array with a variable number of elements is declared, and |
154 } | 165 } |
155 | 166 |
156 put_pending_size (save); | 167 put_pending_size (save); |
157 | 168 |
158 return size; | 169 return size; |
170 } | |
171 | |
172 /* An array of functions used for self-referential size computation. */ | |
173 static GTY(()) VEC (tree, gc) *size_functions; | |
174 | |
175 /* Similar to copy_tree_r but do not copy component references involving | |
176 PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr | |
177 and substituted in substitute_in_expr. */ | |
178 | |
179 static tree | |
180 copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data) | |
181 { | |
182 enum tree_code code = TREE_CODE (*tp); | |
183 | |
184 /* Stop at types, decls, constants like copy_tree_r. */ | |
185 if (TREE_CODE_CLASS (code) == tcc_type | |
186 || TREE_CODE_CLASS (code) == tcc_declaration | |
187 || TREE_CODE_CLASS (code) == tcc_constant) | |
188 { | |
189 *walk_subtrees = 0; | |
190 return NULL_TREE; | |
191 } | |
192 | |
193 /* This is the pattern built in ada/make_aligning_type. */ | |
194 else if (code == ADDR_EXPR | |
195 && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR) | |
196 { | |
197 *walk_subtrees = 0; | |
198 return NULL_TREE; | |
199 } | |
200 | |
201 /* Default case: the component reference. */ | |
202 else if (code == COMPONENT_REF) | |
203 { | |
204 tree inner; | |
205 for (inner = TREE_OPERAND (*tp, 0); | |
206 REFERENCE_CLASS_P (inner); | |
207 inner = TREE_OPERAND (inner, 0)) | |
208 ; | |
209 | |
210 if (TREE_CODE (inner) == PLACEHOLDER_EXPR) | |
211 { | |
212 *walk_subtrees = 0; | |
213 return NULL_TREE; | |
214 } | |
215 } | |
216 | |
217 /* We're not supposed to have them in self-referential size trees | |
218 because we wouldn't properly control when they are evaluated. | |
219 However, not creating superfluous SAVE_EXPRs requires accurate | |
220 tracking of readonly-ness all the way down to here, which we | |
221 cannot always guarantee in practice. So punt in this case. */ | |
222 else if (code == SAVE_EXPR) | |
223 return error_mark_node; | |
224 | |
225 return copy_tree_r (tp, walk_subtrees, data); | |
226 } | |
227 | |
228 /* Given a SIZE expression that is self-referential, return an equivalent | |
229 expression to serve as the actual size expression for a type. */ | |
230 | |
231 static tree | |
232 self_referential_size (tree size) | |
233 { | |
234 static unsigned HOST_WIDE_INT fnno = 0; | |
235 VEC (tree, heap) *self_refs = NULL; | |
236 tree param_type_list = NULL, param_decl_list = NULL, arg_list = NULL; | |
237 tree t, ref, return_type, fntype, fnname, fndecl; | |
238 unsigned int i; | |
239 char buf[128]; | |
240 | |
241 /* Do not factor out simple operations. */ | |
242 t = skip_simple_arithmetic (size); | |
243 if (TREE_CODE (t) == CALL_EXPR) | |
244 return size; | |
245 | |
246 /* Collect the list of self-references in the expression. */ | |
247 find_placeholder_in_expr (size, &self_refs); | |
248 gcc_assert (VEC_length (tree, self_refs) > 0); | |
249 | |
250 /* Obtain a private copy of the expression. */ | |
251 t = size; | |
252 if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE) | |
253 return size; | |
254 size = t; | |
255 | |
256 /* Build the parameter and argument lists in parallel; also | |
257 substitute the former for the latter in the expression. */ | |
258 for (i = 0; VEC_iterate (tree, self_refs, i, ref); i++) | |
259 { | |
260 tree subst, param_name, param_type, param_decl; | |
261 | |
262 if (DECL_P (ref)) | |
263 { | |
264 /* We shouldn't have true variables here. */ | |
265 gcc_assert (TREE_READONLY (ref)); | |
266 subst = ref; | |
267 } | |
268 /* This is the pattern built in ada/make_aligning_type. */ | |
269 else if (TREE_CODE (ref) == ADDR_EXPR) | |
270 subst = ref; | |
271 /* Default case: the component reference. */ | |
272 else | |
273 subst = TREE_OPERAND (ref, 1); | |
274 | |
275 sprintf (buf, "p%d", i); | |
276 param_name = get_identifier (buf); | |
277 param_type = TREE_TYPE (ref); | |
278 param_decl | |
279 = build_decl (input_location, PARM_DECL, param_name, param_type); | |
280 if (targetm.calls.promote_prototypes (NULL_TREE) | |
281 && INTEGRAL_TYPE_P (param_type) | |
282 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node)) | |
283 DECL_ARG_TYPE (param_decl) = integer_type_node; | |
284 else | |
285 DECL_ARG_TYPE (param_decl) = param_type; | |
286 DECL_ARTIFICIAL (param_decl) = 1; | |
287 TREE_READONLY (param_decl) = 1; | |
288 | |
289 size = substitute_in_expr (size, subst, param_decl); | |
290 | |
291 param_type_list = tree_cons (NULL_TREE, param_type, param_type_list); | |
292 param_decl_list = chainon (param_decl, param_decl_list); | |
293 arg_list = tree_cons (NULL_TREE, ref, arg_list); | |
294 } | |
295 | |
296 VEC_free (tree, heap, self_refs); | |
297 | |
298 /* Append 'void' to indicate that the number of parameters is fixed. */ | |
299 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list); | |
300 | |
301 /* The 3 lists have been created in reverse order. */ | |
302 param_type_list = nreverse (param_type_list); | |
303 param_decl_list = nreverse (param_decl_list); | |
304 arg_list = nreverse (arg_list); | |
305 | |
306 /* Build the function type. */ | |
307 return_type = TREE_TYPE (size); | |
308 fntype = build_function_type (return_type, param_type_list); | |
309 | |
310 /* Build the function declaration. */ | |
311 sprintf (buf, "SZ"HOST_WIDE_INT_PRINT_UNSIGNED, fnno++); | |
312 fnname = get_file_function_name (buf); | |
313 fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); | |
314 for (t = param_decl_list; t; t = TREE_CHAIN (t)) | |
315 DECL_CONTEXT (t) = fndecl; | |
316 DECL_ARGUMENTS (fndecl) = param_decl_list; | |
317 DECL_RESULT (fndecl) | |
318 = build_decl (input_location, RESULT_DECL, 0, return_type); | |
319 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl; | |
320 | |
321 /* The function has been created by the compiler and we don't | |
322 want to emit debug info for it. */ | |
323 DECL_ARTIFICIAL (fndecl) = 1; | |
324 DECL_IGNORED_P (fndecl) = 1; | |
325 | |
326 /* It is supposed to be "const" and never throw. */ | |
327 TREE_READONLY (fndecl) = 1; | |
328 TREE_NOTHROW (fndecl) = 1; | |
329 | |
330 /* We want it to be inlined when this is deemed profitable, as | |
331 well as discarded if every call has been integrated. */ | |
332 DECL_DECLARED_INLINE_P (fndecl) = 1; | |
333 | |
334 /* It is made up of a unique return statement. */ | |
335 DECL_INITIAL (fndecl) = make_node (BLOCK); | |
336 BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl; | |
337 t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size); | |
338 DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t); | |
339 TREE_STATIC (fndecl) = 1; | |
340 | |
341 /* Put it onto the list of size functions. */ | |
342 VEC_safe_push (tree, gc, size_functions, fndecl); | |
343 | |
344 /* Replace the original expression with a call to the size function. */ | |
345 return build_function_call_expr (input_location, fndecl, arg_list); | |
346 } | |
347 | |
348 /* Take, queue and compile all the size functions. It is essential that | |
349 the size functions be gimplified at the very end of the compilation | |
350 in order to guarantee transparent handling of self-referential sizes. | |
351 Otherwise the GENERIC inliner would not be able to inline them back | |
352 at each of their call sites, thus creating artificial non-constant | |
353 size expressions which would trigger nasty problems later on. */ | |
354 | |
355 void | |
356 finalize_size_functions (void) | |
357 { | |
358 unsigned int i; | |
359 tree fndecl; | |
360 | |
361 for (i = 0; VEC_iterate(tree, size_functions, i, fndecl); i++) | |
362 { | |
363 dump_function (TDI_original, fndecl); | |
364 gimplify_function_tree (fndecl); | |
365 dump_function (TDI_generic, fndecl); | |
366 cgraph_finalize_function (fndecl, false); | |
367 } | |
368 | |
369 VEC_free (tree, gc, size_functions); | |
159 } | 370 } |
160 | 371 |
161 #ifndef MAX_FIXED_MODE_SIZE | 372 #ifndef MAX_FIXED_MODE_SIZE |
162 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode) | 373 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode) |
163 #endif | 374 #endif |
302 layout_decl (tree decl, unsigned int known_align) | 513 layout_decl (tree decl, unsigned int known_align) |
303 { | 514 { |
304 tree type = TREE_TYPE (decl); | 515 tree type = TREE_TYPE (decl); |
305 enum tree_code code = TREE_CODE (decl); | 516 enum tree_code code = TREE_CODE (decl); |
306 rtx rtl = NULL_RTX; | 517 rtx rtl = NULL_RTX; |
518 location_t loc = DECL_SOURCE_LOCATION (decl); | |
307 | 519 |
308 if (code == CONST_DECL) | 520 if (code == CONST_DECL) |
309 return; | 521 return; |
310 | 522 |
311 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL | 523 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL |
335 DECL_SIZE (decl) = TYPE_SIZE (type); | 547 DECL_SIZE (decl) = TYPE_SIZE (type); |
336 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | 548 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); |
337 } | 549 } |
338 else if (DECL_SIZE_UNIT (decl) == 0) | 550 else if (DECL_SIZE_UNIT (decl) == 0) |
339 DECL_SIZE_UNIT (decl) | 551 DECL_SIZE_UNIT (decl) |
340 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl), | 552 = fold_convert_loc (loc, sizetype, |
341 bitsize_unit_node)); | 553 size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl), |
554 bitsize_unit_node)); | |
342 | 555 |
343 if (code != FIELD_DECL) | 556 if (code != FIELD_DECL) |
344 /* For non-fields, update the alignment from the type. */ | 557 /* For non-fields, update the alignment from the type. */ |
345 do_type_align (type, decl); | 558 do_type_align (type, decl); |
346 else | 559 else |
781 /* We assume the union's size will be a multiple of a byte so we don't | 994 /* We assume the union's size will be a multiple of a byte so we don't |
782 bother with BITPOS. */ | 995 bother with BITPOS. */ |
783 if (TREE_CODE (rli->t) == UNION_TYPE) | 996 if (TREE_CODE (rli->t) == UNION_TYPE) |
784 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | 997 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); |
785 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) | 998 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) |
786 rli->offset = fold_build3 (COND_EXPR, sizetype, | 999 rli->offset = fold_build3_loc (input_location, COND_EXPR, sizetype, |
787 DECL_QUALIFIER (field), | 1000 DECL_QUALIFIER (field), |
788 DECL_SIZE_UNIT (field), rli->offset); | 1001 DECL_SIZE_UNIT (field), rli->offset); |
789 } | 1002 } |
790 | 1003 |
791 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) | 1004 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
903 && !targetm.ms_bitfield_layout_p (rli->t)) | 1116 && !targetm.ms_bitfield_layout_p (rli->t)) |
904 { | 1117 { |
905 /* No, we need to skip space before this field. | 1118 /* No, we need to skip space before this field. |
906 Bump the cumulative size to multiple of field alignment. */ | 1119 Bump the cumulative size to multiple of field alignment. */ |
907 | 1120 |
908 warning (OPT_Wpadded, "padding struct to align %q+D", field); | 1121 if (DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION) |
1122 warning (OPT_Wpadded, "padding struct to align %q+D", field); | |
909 | 1123 |
910 /* If the alignment is still within offset_align, just align | 1124 /* If the alignment is still within offset_align, just align |
911 the bit position. */ | 1125 the bit position. */ |
912 if (desired_align < rli->offset_align) | 1126 if (desired_align < rli->offset_align) |
913 rli->bitpos = round_up (rli->bitpos, desired_align); | 1127 rli->bitpos = round_up (rli->bitpos, desired_align); |
968 (input_location, | 1182 (input_location, |
969 "Offset of packed bit-field %qD has changed in GCC 4.4", | 1183 "Offset of packed bit-field %qD has changed in GCC 4.4", |
970 field); | 1184 field); |
971 } | 1185 } |
972 else | 1186 else |
973 rli->bitpos = round_up (rli->bitpos, type_align); | 1187 rli->bitpos = round_up_loc (input_location, rli->bitpos, type_align); |
974 } | 1188 } |
975 | 1189 |
976 if (! DECL_PACKED (field)) | 1190 if (! DECL_PACKED (field)) |
977 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); | 1191 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); |
978 } | 1192 } |
1148 type_align = TYPE_ALIGN (TREE_TYPE (field)); | 1362 type_align = TYPE_ALIGN (TREE_TYPE (field)); |
1149 | 1363 |
1150 if (maximum_field_alignment != 0) | 1364 if (maximum_field_alignment != 0) |
1151 type_align = MIN (type_align, maximum_field_alignment); | 1365 type_align = MIN (type_align, maximum_field_alignment); |
1152 | 1366 |
1153 rli->bitpos = round_up (rli->bitpos, type_align); | 1367 rli->bitpos = round_up_loc (input_location, rli->bitpos, type_align); |
1154 | 1368 |
1155 /* If we really aligned, don't allow subsequent bitfields | 1369 /* If we really aligned, don't allow subsequent bitfields |
1156 to undo that. */ | 1370 to undo that. */ |
1157 rli->prev_field = NULL; | 1371 rli->prev_field = NULL; |
1158 } | 1372 } |
1262 if (! integer_zerop (rli->bitpos)) | 1476 if (! integer_zerop (rli->bitpos)) |
1263 unpadded_size_unit | 1477 unpadded_size_unit |
1264 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node); | 1478 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node); |
1265 | 1479 |
1266 /* Round the size up to be a multiple of the required alignment. */ | 1480 /* Round the size up to be a multiple of the required alignment. */ |
1267 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t)); | 1481 TYPE_SIZE (rli->t) = round_up_loc (input_location, unpadded_size, |
1482 TYPE_ALIGN (rli->t)); | |
1268 TYPE_SIZE_UNIT (rli->t) | 1483 TYPE_SIZE_UNIT (rli->t) |
1269 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t)); | 1484 = round_up_loc (input_location, unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t)); |
1270 | 1485 |
1271 if (TREE_CONSTANT (unpadded_size) | 1486 if (TREE_CONSTANT (unpadded_size) |
1272 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0) | 1487 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0 |
1488 && input_location != BUILTINS_LOCATION) | |
1273 warning (OPT_Wpadded, "padding struct size to alignment boundary"); | 1489 warning (OPT_Wpadded, "padding struct size to alignment boundary"); |
1274 | 1490 |
1275 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE | 1491 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE |
1276 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary | 1492 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary |
1277 && TREE_CONSTANT (unpadded_size)) | 1493 && TREE_CONSTANT (unpadded_size)) |
1283 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align); | 1499 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align); |
1284 #else | 1500 #else |
1285 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align); | 1501 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align); |
1286 #endif | 1502 #endif |
1287 | 1503 |
1288 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align); | 1504 unpacked_size = round_up_loc (input_location, TYPE_SIZE (rli->t), rli->unpacked_align); |
1289 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t))) | 1505 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t))) |
1290 { | 1506 { |
1291 TYPE_PACKED (rli->t) = 0; | 1507 TYPE_PACKED (rli->t) = 0; |
1292 | 1508 |
1293 if (TYPE_NAME (rli->t)) | 1509 if (TYPE_NAME (rli->t)) |
1294 { | 1510 { |
1295 const char *name; | 1511 tree name; |
1296 | 1512 |
1297 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE) | 1513 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE) |
1298 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t)); | 1514 name = TYPE_NAME (rli->t); |
1299 else | 1515 else |
1300 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t))); | 1516 name = DECL_NAME (TYPE_NAME (rli->t)); |
1301 | 1517 |
1302 if (STRICT_ALIGNMENT) | 1518 if (STRICT_ALIGNMENT) |
1303 warning (OPT_Wpacked, "packed attribute causes inefficient " | 1519 warning (OPT_Wpacked, "packed attribute causes inefficient " |
1304 "alignment for %qs", name); | 1520 "alignment for %qE", name); |
1305 else | 1521 else |
1306 warning (OPT_Wpacked, | 1522 warning (OPT_Wpacked, |
1307 "packed attribute is unnecessary for %qs", name); | 1523 "packed attribute is unnecessary for %qE", name); |
1308 } | 1524 } |
1309 else | 1525 else |
1310 { | 1526 { |
1311 if (STRICT_ALIGNMENT) | 1527 if (STRICT_ALIGNMENT) |
1312 warning (OPT_Wpacked, | 1528 warning (OPT_Wpacked, |
1437 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type), | 1653 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type), |
1438 bitsize_unit_node)); | 1654 bitsize_unit_node)); |
1439 | 1655 |
1440 if (TYPE_SIZE (type) != 0) | 1656 if (TYPE_SIZE (type) != 0) |
1441 { | 1657 { |
1442 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type)); | 1658 TYPE_SIZE (type) = round_up_loc (input_location, |
1443 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type), | 1659 TYPE_SIZE (type), TYPE_ALIGN (type)); |
1660 TYPE_SIZE_UNIT (type) = round_up_loc (input_location, TYPE_SIZE_UNIT (type), | |
1444 TYPE_ALIGN_UNIT (type)); | 1661 TYPE_ALIGN_UNIT (type)); |
1445 } | 1662 } |
1446 | 1663 |
1447 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ | 1664 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ |
1448 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | 1665 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) |
1545 | 1762 |
1546 layout_type (type); | 1763 layout_type (type); |
1547 #if 0 /* not yet, should get fixed properly later */ | 1764 #if 0 /* not yet, should get fixed properly later */ |
1548 TYPE_NAME (type) = make_type_decl (get_identifier (name), type); | 1765 TYPE_NAME (type) = make_type_decl (get_identifier (name), type); |
1549 #else | 1766 #else |
1550 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type); | 1767 TYPE_NAME (type) = build_decl (BUILTINS_LOCATION, |
1768 TYPE_DECL, get_identifier (name), type); | |
1551 #endif | 1769 #endif |
1552 TYPE_STUB_DECL (type) = TYPE_NAME (type); | 1770 TYPE_STUB_DECL (type) = TYPE_NAME (type); |
1553 layout_decl (TYPE_NAME (type), 0); | 1771 layout_decl (TYPE_NAME (type), 0); |
1554 } | 1772 } |
1555 | 1773 |
1697 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); | 1915 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); |
1698 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT); | 1916 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT); |
1699 /* A pointer might be MODE_PARTIAL_INT, | 1917 /* A pointer might be MODE_PARTIAL_INT, |
1700 but ptrdiff_t must be integral. */ | 1918 but ptrdiff_t must be integral. */ |
1701 SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); | 1919 SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); |
1920 TYPE_PRECISION (type) = POINTER_SIZE; | |
1702 break; | 1921 break; |
1703 | 1922 |
1704 case FUNCTION_TYPE: | 1923 case FUNCTION_TYPE: |
1705 case METHOD_TYPE: | 1924 case METHOD_TYPE: |
1706 /* It's hard to see what the mode and size of a function ought to | 1925 /* It's hard to see what the mode and size of a function ought to |
1712 break; | 1931 break; |
1713 | 1932 |
1714 case POINTER_TYPE: | 1933 case POINTER_TYPE: |
1715 case REFERENCE_TYPE: | 1934 case REFERENCE_TYPE: |
1716 { | 1935 { |
1717 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE | 1936 enum machine_mode mode = TYPE_MODE (type); |
1718 && reference_types_internal) | 1937 if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal) |
1719 ? Pmode : TYPE_MODE (type)); | 1938 { |
1720 | 1939 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type)); |
1721 int nbits = GET_MODE_BITSIZE (mode); | 1940 mode = targetm.addr_space.address_mode (as); |
1722 | 1941 } |
1723 TYPE_SIZE (type) = bitsize_int (nbits); | 1942 |
1943 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode)); | |
1724 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); | 1944 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); |
1725 TYPE_UNSIGNED (type) = 1; | 1945 TYPE_UNSIGNED (type) = 1; |
1726 TYPE_PRECISION (type) = nbits; | 1946 TYPE_PRECISION (type) = GET_MODE_BITSIZE (mode); |
1727 } | 1947 } |
1728 break; | 1948 break; |
1729 | 1949 |
1730 case ARRAY_TYPE: | 1950 case ARRAY_TYPE: |
1731 { | 1951 { |
1738 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) | 1958 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) |
1739 && TYPE_SIZE (element)) | 1959 && TYPE_SIZE (element)) |
1740 { | 1960 { |
1741 tree ub = TYPE_MAX_VALUE (index); | 1961 tree ub = TYPE_MAX_VALUE (index); |
1742 tree lb = TYPE_MIN_VALUE (index); | 1962 tree lb = TYPE_MIN_VALUE (index); |
1963 tree element_size = TYPE_SIZE (element); | |
1743 tree length; | 1964 tree length; |
1744 tree element_size; | 1965 |
1966 /* Make sure that an array of zero-sized element is zero-sized | |
1967 regardless of its extent. */ | |
1968 if (integer_zerop (element_size)) | |
1969 length = size_zero_node; | |
1745 | 1970 |
1746 /* The initial subtraction should happen in the original type so | 1971 /* The initial subtraction should happen in the original type so |
1747 that (possible) negative values are handled appropriately. */ | 1972 that (possible) negative values are handled appropriately. */ |
1748 length = size_binop (PLUS_EXPR, size_one_node, | 1973 else |
1749 fold_convert (sizetype, | 1974 length |
1750 fold_build2 (MINUS_EXPR, | 1975 = size_binop (PLUS_EXPR, size_one_node, |
1751 TREE_TYPE (lb), | 1976 fold_convert (sizetype, |
1752 ub, lb))); | 1977 fold_build2_loc (input_location, |
1753 | 1978 MINUS_EXPR, |
1754 /* Special handling for arrays of bits (for Chill). */ | 1979 TREE_TYPE (lb), |
1755 element_size = TYPE_SIZE (element); | 1980 ub, lb))); |
1756 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element) | |
1757 && (integer_zerop (TYPE_MAX_VALUE (element)) | |
1758 || integer_onep (TYPE_MAX_VALUE (element))) | |
1759 && host_integerp (TYPE_MIN_VALUE (element), 1)) | |
1760 { | |
1761 HOST_WIDE_INT maxvalue | |
1762 = tree_low_cst (TYPE_MAX_VALUE (element), 1); | |
1763 HOST_WIDE_INT minvalue | |
1764 = tree_low_cst (TYPE_MIN_VALUE (element), 1); | |
1765 | |
1766 if (maxvalue - minvalue == 1 | |
1767 && (maxvalue == 1 || maxvalue == 0)) | |
1768 element_size = integer_one_node; | |
1769 } | |
1770 | |
1771 /* If neither bound is a constant and sizetype is signed, make | |
1772 sure the size is never negative. We should really do this | |
1773 if *either* bound is non-constant, but this is the best | |
1774 compromise between C and Ada. */ | |
1775 if (!TYPE_UNSIGNED (sizetype) | |
1776 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST | |
1777 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST) | |
1778 length = size_binop (MAX_EXPR, length, size_zero_node); | |
1779 | 1981 |
1780 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, | 1982 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, |
1781 fold_convert (bitsizetype, | 1983 fold_convert (bitsizetype, |
1782 length)); | 1984 length)); |
1783 | 1985 |
1784 /* If we know the size of the element, calculate the total | 1986 /* If we know the size of the element, calculate the total size |
1785 size directly, rather than do some division thing below. | 1987 directly, rather than do some division thing below. This |
1786 This optimization helps Fortran assumed-size arrays | 1988 optimization helps Fortran assumed-size arrays (where the |
1787 (where the size of the array is determined at runtime) | 1989 size of the array is determined at runtime) substantially. */ |
1788 substantially. | 1990 if (TYPE_SIZE_UNIT (element)) |
1789 Note that we can't do this in the case where the size of | |
1790 the elements is one bit since TYPE_SIZE_UNIT cannot be | |
1791 set correctly in that case. */ | |
1792 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size)) | |
1793 TYPE_SIZE_UNIT (type) | 1991 TYPE_SIZE_UNIT (type) |
1794 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length); | 1992 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length); |
1795 } | 1993 } |
1796 | 1994 |
1797 /* Now round the alignment and size, | 1995 /* Now round the alignment and size, |
1898 /* Vector types need to re-check the target flags each time we report | 2096 /* Vector types need to re-check the target flags each time we report |
1899 the machine mode. We need to do this because attribute target can | 2097 the machine mode. We need to do this because attribute target can |
1900 change the result of vector_mode_supported_p and have_regs_of_mode | 2098 change the result of vector_mode_supported_p and have_regs_of_mode |
1901 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can | 2099 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can |
1902 change on a per-function basis. */ | 2100 change on a per-function basis. */ |
1903 /* ??? Possibly a better solution is to run through all the types | 2101 /* ??? Possibly a better solution is to run through all the types |
1904 referenced by a function and re-compute the TYPE_MODE once, rather | 2102 referenced by a function and re-compute the TYPE_MODE once, rather |
1905 than make the TYPE_MODE macro call a function. */ | 2103 than make the TYPE_MODE macro call a function. */ |
1906 | 2104 |
1907 enum machine_mode | 2105 enum machine_mode |
1908 vector_type_mode (const_tree t) | 2106 vector_type_mode (const_tree t) |
2044 INTEGER_CSTs created with such a type, remain valid. */ | 2242 INTEGER_CSTs created with such a type, remain valid. */ |
2045 | 2243 |
2046 void | 2244 void |
2047 set_sizetype (tree type) | 2245 set_sizetype (tree type) |
2048 { | 2246 { |
2247 tree t; | |
2049 int oprecision = TYPE_PRECISION (type); | 2248 int oprecision = TYPE_PRECISION (type); |
2050 /* The *bitsizetype types use a precision that avoids overflows when | 2249 /* The *bitsizetype types use a precision that avoids overflows when |
2051 calculating signed sizes / offsets in bits. However, when | 2250 calculating signed sizes / offsets in bits. However, when |
2052 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit | 2251 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit |
2053 precision. */ | 2252 precision. */ |
2054 int precision = MIN (MIN (oprecision + BITS_PER_UNIT_LOG + 1, | 2253 int precision |
2055 MAX_FIXED_MODE_SIZE), | 2254 = MIN (oprecision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE); |
2056 2 * HOST_BITS_PER_WIDE_INT); | 2255 precision |
2057 tree t; | 2256 = GET_MODE_PRECISION (smallest_mode_for_size (precision, MODE_INT)); |
2257 if (precision > HOST_BITS_PER_WIDE_INT * 2) | |
2258 precision = HOST_BITS_PER_WIDE_INT * 2; | |
2058 | 2259 |
2059 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype)); | 2260 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype)); |
2060 | 2261 |
2061 t = build_distinct_type_copy (type); | 2262 t = build_distinct_type_copy (type); |
2062 /* We do want to use sizetype's cache, as we will be replacing that | 2263 /* We do want to use sizetype's cache, as we will be replacing that |
2068 TYPE_IS_SIZETYPE (t) = 1; | 2269 TYPE_IS_SIZETYPE (t) = 1; |
2069 | 2270 |
2070 /* Replace our original stub sizetype. */ | 2271 /* Replace our original stub sizetype. */ |
2071 memcpy (sizetype, t, tree_size (sizetype)); | 2272 memcpy (sizetype, t, tree_size (sizetype)); |
2072 TYPE_MAIN_VARIANT (sizetype) = sizetype; | 2273 TYPE_MAIN_VARIANT (sizetype) = sizetype; |
2274 TYPE_CANONICAL (sizetype) = sizetype; | |
2073 | 2275 |
2074 t = make_node (INTEGER_TYPE); | 2276 t = make_node (INTEGER_TYPE); |
2075 TYPE_NAME (t) = get_identifier ("bit_size_type"); | 2277 TYPE_NAME (t) = get_identifier ("bit_size_type"); |
2076 /* We do want to use bitsizetype's cache, as we will be replacing that | 2278 /* We do want to use bitsizetype's cache, as we will be replacing that |
2077 type. */ | 2279 type. */ |
2082 TYPE_IS_SIZETYPE (t) = 1; | 2284 TYPE_IS_SIZETYPE (t) = 1; |
2083 | 2285 |
2084 /* Replace our original stub bitsizetype. */ | 2286 /* Replace our original stub bitsizetype. */ |
2085 memcpy (bitsizetype, t, tree_size (bitsizetype)); | 2287 memcpy (bitsizetype, t, tree_size (bitsizetype)); |
2086 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype; | 2288 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype; |
2289 TYPE_CANONICAL (bitsizetype) = bitsizetype; | |
2087 | 2290 |
2088 if (TYPE_UNSIGNED (type)) | 2291 if (TYPE_UNSIGNED (type)) |
2089 { | 2292 { |
2090 fixup_unsigned_type (bitsizetype); | 2293 fixup_unsigned_type (bitsizetype); |
2091 ssizetype = build_distinct_type_copy (make_signed_type (oprecision)); | 2294 ssizetype = make_signed_type (oprecision); |
2092 TYPE_IS_SIZETYPE (ssizetype) = 1; | 2295 TYPE_IS_SIZETYPE (ssizetype) = 1; |
2093 sbitsizetype = build_distinct_type_copy (make_signed_type (precision)); | 2296 sbitsizetype = make_signed_type (precision); |
2094 TYPE_IS_SIZETYPE (sbitsizetype) = 1; | 2297 TYPE_IS_SIZETYPE (sbitsizetype) = 1; |
2095 } | 2298 } |
2096 else | 2299 else |
2097 { | 2300 { |
2098 fixup_signed_type (bitsizetype); | 2301 fixup_signed_type (bitsizetype); |