comparison gcc/stor-layout.c @ 16:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
15:561a7518be6b 16:04ced10e8804
1 /* C-compiler utilities for types and variables storage layout 1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998, 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 3
6 This file is part of GCC. 4 This file is part of GCC.
7 5
8 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
21 19
22 20
23 #include "config.h" 21 #include "config.h"
24 #include "system.h" 22 #include "system.h"
25 #include "coretypes.h" 23 #include "coretypes.h"
26 #include "tm.h" 24 #include "target.h"
25 #include "function.h"
26 #include "rtl.h"
27 #include "tree.h" 27 #include "tree.h"
28 #include "rtl.h" 28 #include "memmodel.h"
29 #include "tm_p.h" 29 #include "tm_p.h"
30 #include "flags.h" 30 #include "stringpool.h"
31 #include "function.h" 31 #include "regs.h"
32 #include "expr.h" 32 #include "emit-rtl.h"
33 #include "output.h" 33 #include "cgraph.h"
34 #include "diagnostic-core.h" 34 #include "diagnostic-core.h"
35 #include "ggc.h" 35 #include "fold-const.h"
36 #include "target.h" 36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "print-tree.h"
37 #include "langhooks.h" 39 #include "langhooks.h"
38 #include "regs.h"
39 #include "params.h"
40 #include "cgraph.h"
41 #include "tree-inline.h" 40 #include "tree-inline.h"
42 #include "tree-dump.h" 41 #include "dumpfile.h"
43 #include "gimple.h" 42 #include "gimplify.h"
43 #include "debug.h"
44 44
45 /* Data type for the expressions representing sizes of data types. 45 /* Data type for the expressions representing sizes of data types.
46 It is the first integer type laid out. */ 46 It is the first integer type laid out. */
47 tree sizetype_tab[(int) TYPE_KIND_LAST]; 47 tree sizetype_tab[(int) stk_type_kind_last];
48 48
49 /* If nonzero, this is an upper limit on alignment of structure fields. 49 /* If nonzero, this is an upper limit on alignment of structure fields.
50 The value is measured in bits. */ 50 The value is measured in bits. */
51 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; 51 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
52
53 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated
54 in the address spaces' address_mode, not pointer_mode. Set only by
55 internal_reference_types called only by a front end. */
56 static int reference_types_internal = 0;
57 52
58 static tree self_referential_size (tree); 53 static tree self_referential_size (tree);
59 static void finalize_record_size (record_layout_info); 54 static void finalize_record_size (record_layout_info);
60 static void finalize_type_size (tree); 55 static void finalize_type_size (tree);
61 static void place_union_field (record_layout_info, tree); 56 static void place_union_field (record_layout_info, tree);
62 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
63 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, 57 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
64 HOST_WIDE_INT, tree); 58 HOST_WIDE_INT, tree);
65 #endif
66 extern void debug_rli (record_layout_info); 59 extern void debug_rli (record_layout_info);
67 60
68 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
69
70 static GTY(()) VEC(tree,gc) *pending_sizes;
71
72 /* Show that REFERENCE_TYPES are internal and should use address_mode.
73 Called only by front end. */
74
75 void
76 internal_reference_types (void)
77 {
78 reference_types_internal = 1;
79 }
80
81 /* Get a VEC of all the objects put on the pending sizes list. */
82
83 VEC(tree,gc) *
84 get_pending_sizes (void)
85 {
86 VEC(tree,gc) *chain = pending_sizes;
87
88 pending_sizes = 0;
89 return chain;
90 }
91
92 /* Add EXPR to the pending sizes list. */
93
94 void
95 put_pending_size (tree expr)
96 {
97 /* Strip any simple arithmetic from EXPR to see if it has an underlying
98 SAVE_EXPR. */
99 expr = skip_simple_arithmetic (expr);
100
101 if (TREE_CODE (expr) == SAVE_EXPR)
102 VEC_safe_push (tree, gc, pending_sizes, expr);
103 }
104
105 /* Put a chain of objects into the pending sizes list, which must be
106 empty. */
107
108 void
109 put_pending_sizes (VEC(tree,gc) *chain)
110 {
111 gcc_assert (!pending_sizes);
112 pending_sizes = chain;
113 }
114
115 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR 61 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
116 to serve as the actual size-expression for a type or decl. */ 62 to serve as the actual size-expression for a type or decl. */
117 63
118 tree 64 tree
119 variable_size (tree size) 65 variable_size (tree size)
120 { 66 {
121 tree save;
122
123 /* Obviously. */ 67 /* Obviously. */
124 if (TREE_CONSTANT (size)) 68 if (TREE_CONSTANT (size))
125 return size; 69 return size;
126 70
127 /* If the size is self-referential, we can't make a SAVE_EXPR (see 71 /* If the size is self-referential, we can't make a SAVE_EXPR (see
128 save_expr for the rationale). But we can do something else. */ 72 save_expr for the rationale). But we can do something else. */
129 if (CONTAINS_PLACEHOLDER_P (size)) 73 if (CONTAINS_PLACEHOLDER_P (size))
130 return self_referential_size (size); 74 return self_referential_size (size);
131 75
132 /* If the language-processor is to take responsibility for variable-sized 76 /* If we are in the global binding level, we can't make a SAVE_EXPR
133 items (e.g., languages which have elaboration procedures like Ada), 77 since it may end up being shared across functions, so it is up
134 just return SIZE unchanged. */ 78 to the front-end to deal with this case. */
135 if (lang_hooks.decls.global_bindings_p () < 0) 79 if (lang_hooks.decls.global_bindings_p ())
136 return size; 80 return size;
137 81
138 size = save_expr (size); 82 return save_expr (size);
139
140 /* If an array with a variable number of elements is declared, and
141 the elements require destruction, we will emit a cleanup for the
142 array. That cleanup is run both on normal exit from the block
143 and in the exception-handler for the block. Normally, when code
144 is used in both ordinary code and in an exception handler it is
145 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
146 not wish to do that here; the array-size is the same in both
147 places. */
148 save = skip_simple_arithmetic (size);
149
150 if (cfun && cfun->dont_save_pending_sizes_p)
151 /* The front-end doesn't want us to keep a list of the expressions
152 that determine sizes for variable size objects. Trust it. */
153 return size;
154
155 if (lang_hooks.decls.global_bindings_p ())
156 {
157 if (TREE_CONSTANT (size))
158 error ("type size can%'t be explicitly evaluated");
159 else
160 error ("variable-size type declared outside of any function");
161
162 return size_one_node;
163 }
164
165 put_pending_size (save);
166
167 return size;
168 } 83 }
169 84
170 /* An array of functions used for self-referential size computation. */ 85 /* An array of functions used for self-referential size computation. */
171 static GTY(()) VEC (tree, gc) *size_functions; 86 static GTY(()) vec<tree, va_gc> *size_functions;
172 87
173 /* Look inside EXPR into simple arithmetic operations involving constants. 88 /* Return true if T is a self-referential component reference. */
174 Return the outermost non-arithmetic or non-constant node. */ 89
175 90 static bool
176 static tree 91 self_referential_component_ref_p (tree t)
177 skip_simple_constant_arithmetic (tree expr) 92 {
178 { 93 if (TREE_CODE (t) != COMPONENT_REF)
179 while (true) 94 return false;
180 { 95
181 if (UNARY_CLASS_P (expr)) 96 while (REFERENCE_CLASS_P (t))
182 expr = TREE_OPERAND (expr, 0); 97 t = TREE_OPERAND (t, 0);
183 else if (BINARY_CLASS_P (expr)) 98
184 { 99 return (TREE_CODE (t) == PLACEHOLDER_EXPR);
185 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
186 expr = TREE_OPERAND (expr, 0);
187 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
188 expr = TREE_OPERAND (expr, 1);
189 else
190 break;
191 }
192 else
193 break;
194 }
195
196 return expr;
197 } 100 }
198 101
199 /* Similar to copy_tree_r but do not copy component references involving 102 /* Similar to copy_tree_r but do not copy component references involving
200 PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr 103 PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr
201 and substituted in substitute_in_expr. */ 104 and substituted in substitute_in_expr. */
221 *walk_subtrees = 0; 124 *walk_subtrees = 0;
222 return NULL_TREE; 125 return NULL_TREE;
223 } 126 }
224 127
225 /* Default case: the component reference. */ 128 /* Default case: the component reference. */
226 else if (code == COMPONENT_REF) 129 else if (self_referential_component_ref_p (*tp))
227 { 130 {
228 tree inner; 131 *walk_subtrees = 0;
229 for (inner = TREE_OPERAND (*tp, 0); 132 return NULL_TREE;
230 REFERENCE_CLASS_P (inner);
231 inner = TREE_OPERAND (inner, 0))
232 ;
233
234 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
235 {
236 *walk_subtrees = 0;
237 return NULL_TREE;
238 }
239 } 133 }
240 134
241 /* We're not supposed to have them in self-referential size trees 135 /* We're not supposed to have them in self-referential size trees
242 because we wouldn't properly control when they are evaluated. 136 because we wouldn't properly control when they are evaluated.
243 However, not creating superfluous SAVE_EXPRs requires accurate 137 However, not creating superfluous SAVE_EXPRs requires accurate
244 tracking of readonly-ness all the way down to here, which we 138 tracking of readonly-ness all the way down to here, which we
245 cannot always guarantee in practice. So punt in this case. */ 139 cannot always guarantee in practice. So punt in this case. */
246 else if (code == SAVE_EXPR) 140 else if (code == SAVE_EXPR)
247 return error_mark_node; 141 return error_mark_node;
248 142
143 else if (code == STATEMENT_LIST)
144 gcc_unreachable ();
145
249 return copy_tree_r (tp, walk_subtrees, data); 146 return copy_tree_r (tp, walk_subtrees, data);
250 } 147 }
251 148
252 /* Given a SIZE expression that is self-referential, return an equivalent 149 /* Given a SIZE expression that is self-referential, return an equivalent
253 expression to serve as the actual size expression for a type. */ 150 expression to serve as the actual size expression for a type. */
254 151
255 static tree 152 static tree
256 self_referential_size (tree size) 153 self_referential_size (tree size)
257 { 154 {
258 static unsigned HOST_WIDE_INT fnno = 0; 155 static unsigned HOST_WIDE_INT fnno = 0;
259 VEC (tree, heap) *self_refs = NULL; 156 vec<tree> self_refs = vNULL;
260 tree param_type_list = NULL, param_decl_list = NULL; 157 tree param_type_list = NULL, param_decl_list = NULL;
261 tree t, ref, return_type, fntype, fnname, fndecl; 158 tree t, ref, return_type, fntype, fnname, fndecl;
262 unsigned int i; 159 unsigned int i;
263 char buf[128]; 160 char buf[128];
264 VEC(tree,gc) *args = NULL; 161 vec<tree, va_gc> *args = NULL;
265 162
266 /* Do not factor out simple operations. */ 163 /* Do not factor out simple operations. */
267 t = skip_simple_constant_arithmetic (size); 164 t = skip_simple_constant_arithmetic (size);
268 if (TREE_CODE (t) == CALL_EXPR) 165 if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t))
269 return size; 166 return size;
270 167
271 /* Collect the list of self-references in the expression. */ 168 /* Collect the list of self-references in the expression. */
272 find_placeholder_in_expr (size, &self_refs); 169 find_placeholder_in_expr (size, &self_refs);
273 gcc_assert (VEC_length (tree, self_refs) > 0); 170 gcc_assert (self_refs.length () > 0);
274 171
275 /* Obtain a private copy of the expression. */ 172 /* Obtain a private copy of the expression. */
276 t = size; 173 t = size;
277 if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE) 174 if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE)
278 return size; 175 return size;
279 size = t; 176 size = t;
280 177
281 /* Build the parameter and argument lists in parallel; also 178 /* Build the parameter and argument lists in parallel; also
282 substitute the former for the latter in the expression. */ 179 substitute the former for the latter in the expression. */
283 args = VEC_alloc (tree, gc, VEC_length (tree, self_refs)); 180 vec_alloc (args, self_refs.length ());
284 FOR_EACH_VEC_ELT (tree, self_refs, i, ref) 181 FOR_EACH_VEC_ELT (self_refs, i, ref)
285 { 182 {
286 tree subst, param_name, param_type, param_decl; 183 tree subst, param_name, param_type, param_decl;
287 184
288 if (DECL_P (ref)) 185 if (DECL_P (ref))
289 { 186 {
301 sprintf (buf, "p%d", i); 198 sprintf (buf, "p%d", i);
302 param_name = get_identifier (buf); 199 param_name = get_identifier (buf);
303 param_type = TREE_TYPE (ref); 200 param_type = TREE_TYPE (ref);
304 param_decl 201 param_decl
305 = build_decl (input_location, PARM_DECL, param_name, param_type); 202 = build_decl (input_location, PARM_DECL, param_name, param_type);
306 if (targetm.calls.promote_prototypes (NULL_TREE) 203 DECL_ARG_TYPE (param_decl) = param_type;
307 && INTEGRAL_TYPE_P (param_type)
308 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
309 DECL_ARG_TYPE (param_decl) = integer_type_node;
310 else
311 DECL_ARG_TYPE (param_decl) = param_type;
312 DECL_ARTIFICIAL (param_decl) = 1; 204 DECL_ARTIFICIAL (param_decl) = 1;
313 TREE_READONLY (param_decl) = 1; 205 TREE_READONLY (param_decl) = 1;
314 206
315 size = substitute_in_expr (size, subst, param_decl); 207 size = substitute_in_expr (size, subst, param_decl);
316 208
317 param_type_list = tree_cons (NULL_TREE, param_type, param_type_list); 209 param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
318 param_decl_list = chainon (param_decl, param_decl_list); 210 param_decl_list = chainon (param_decl, param_decl_list);
319 VEC_quick_push (tree, args, ref); 211 args->quick_push (ref);
320 } 212 }
321 213
322 VEC_free (tree, heap, self_refs); 214 self_refs.release ();
323 215
324 /* Append 'void' to indicate that the number of parameters is fixed. */ 216 /* Append 'void' to indicate that the number of parameters is fixed. */
325 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list); 217 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
326 218
327 /* The 3 lists have been created in reverse order. */ 219 /* The 3 lists have been created in reverse order. */
331 /* Build the function type. */ 223 /* Build the function type. */
332 return_type = TREE_TYPE (size); 224 return_type = TREE_TYPE (size);
333 fntype = build_function_type (return_type, param_type_list); 225 fntype = build_function_type (return_type, param_type_list);
334 226
335 /* Build the function declaration. */ 227 /* Build the function declaration. */
336 sprintf (buf, "SZ"HOST_WIDE_INT_PRINT_UNSIGNED, fnno++); 228 sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++);
337 fnname = get_file_function_name (buf); 229 fnname = get_file_function_name (buf);
338 fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); 230 fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype);
339 for (t = param_decl_list; t; t = DECL_CHAIN (t)) 231 for (t = param_decl_list; t; t = DECL_CHAIN (t))
340 DECL_CONTEXT (t) = fndecl; 232 DECL_CONTEXT (t) = fndecl;
341 DECL_ARGUMENTS (fndecl) = param_decl_list; 233 DECL_ARGUMENTS (fndecl) = param_decl_list;
362 t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size); 254 t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size);
363 DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t); 255 DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t);
364 TREE_STATIC (fndecl) = 1; 256 TREE_STATIC (fndecl) = 1;
365 257
366 /* Put it onto the list of size functions. */ 258 /* Put it onto the list of size functions. */
367 VEC_safe_push (tree, gc, size_functions, fndecl); 259 vec_safe_push (size_functions, fndecl);
368 260
369 /* Replace the original expression with a call to the size function. */ 261 /* Replace the original expression with a call to the size function. */
370 return build_call_expr_loc_vec (input_location, fndecl, args); 262 return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
371 } 263 }
372 264
373 /* Take, queue and compile all the size functions. It is essential that 265 /* Take, queue and compile all the size functions. It is essential that
374 the size functions be gimplified at the very end of the compilation 266 the size functions be gimplified at the very end of the compilation
375 in order to guarantee transparent handling of self-referential sizes. 267 in order to guarantee transparent handling of self-referential sizes.
381 finalize_size_functions (void) 273 finalize_size_functions (void)
382 { 274 {
383 unsigned int i; 275 unsigned int i;
384 tree fndecl; 276 tree fndecl;
385 277
386 for (i = 0; VEC_iterate(tree, size_functions, i, fndecl); i++) 278 for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
387 { 279 {
280 allocate_struct_function (fndecl, false);
281 set_cfun (NULL);
388 dump_function (TDI_original, fndecl); 282 dump_function (TDI_original, fndecl);
283
284 /* As these functions are used to describe the layout of variable-length
285 structures, debug info generation needs their implementation. */
286 debug_hooks->size_function (fndecl);
389 gimplify_function_tree (fndecl); 287 gimplify_function_tree (fndecl);
390 dump_function (TDI_generic, fndecl); 288 cgraph_node::finalize_function (fndecl, false);
391 cgraph_finalize_function (fndecl, false); 289 }
392 } 290
393 291 vec_free (size_functions);
394 VEC_free (tree, gc, size_functions);
395 } 292 }
396 293
397 /* Return the machine mode to use for a nonscalar of SIZE bits. The 294 /* Return a machine mode of class MCLASS with SIZE bits of precision,
398 mode must be in class MCLASS, and have exactly that many value bits; 295 if one exists. The mode may have padding bits as well the SIZE
399 it may have padding as well. If LIMIT is nonzero, modes of wider 296 value bits. If LIMIT is nonzero, disregard modes wider than
400 than MAX_FIXED_MODE_SIZE will not be used. */ 297 MAX_FIXED_MODE_SIZE. */
401 298
402 enum machine_mode 299 opt_machine_mode
403 mode_for_size (unsigned int size, enum mode_class mclass, int limit) 300 mode_for_size (unsigned int size, enum mode_class mclass, int limit)
404 { 301 {
405 enum machine_mode mode; 302 machine_mode mode;
303 int i;
406 304
407 if (limit && size > MAX_FIXED_MODE_SIZE) 305 if (limit && size > MAX_FIXED_MODE_SIZE)
408 return BLKmode; 306 return opt_machine_mode ();
409 307
410 /* Get the first mode which has this size, in the specified class. */ 308 /* Get the first mode which has this size, in the specified class. */
411 for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; 309 FOR_EACH_MODE_IN_CLASS (mode, mclass)
412 mode = GET_MODE_WIDER_MODE (mode))
413 if (GET_MODE_PRECISION (mode) == size) 310 if (GET_MODE_PRECISION (mode) == size)
414 return mode; 311 return mode;
415 312
416 return BLKmode; 313 if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
314 for (i = 0; i < NUM_INT_N_ENTS; i ++)
315 if (int_n_data[i].bitsize == size
316 && int_n_enabled_p[i])
317 return int_n_data[i].m;
318
319 return opt_machine_mode ();
417 } 320 }
418 321
419 /* Similar, except passed a tree node. */ 322 /* Similar, except passed a tree node. */
420 323
421 enum machine_mode 324 opt_machine_mode
422 mode_for_size_tree (const_tree size, enum mode_class mclass, int limit) 325 mode_for_size_tree (const_tree size, enum mode_class mclass, int limit)
423 { 326 {
424 unsigned HOST_WIDE_INT uhwi; 327 unsigned HOST_WIDE_INT uhwi;
425 unsigned int ui; 328 unsigned int ui;
426 329
427 if (!host_integerp (size, 1)) 330 if (!tree_fits_uhwi_p (size))
428 return BLKmode; 331 return opt_machine_mode ();
429 uhwi = tree_low_cst (size, 1); 332 uhwi = tree_to_uhwi (size);
430 ui = uhwi; 333 ui = uhwi;
431 if (uhwi != ui) 334 if (uhwi != ui)
432 return BLKmode; 335 return opt_machine_mode ();
433 return mode_for_size (ui, mclass, limit); 336 return mode_for_size (ui, mclass, limit);
434 } 337 }
435 338
436 /* Similar, but never return BLKmode; return the narrowest mode that 339 /* Return the narrowest mode of class MCLASS that contains at least
437 contains at least the requested number of value bits. */ 340 SIZE bits. Abort if no such mode exists. */
438 341
439 enum machine_mode 342 machine_mode
440 smallest_mode_for_size (unsigned int size, enum mode_class mclass) 343 smallest_mode_for_size (unsigned int size, enum mode_class mclass)
441 { 344 {
442 enum machine_mode mode; 345 machine_mode mode = VOIDmode;
346 int i;
443 347
444 /* Get the first mode which has at least this size, in the 348 /* Get the first mode which has at least this size, in the
445 specified class. */ 349 specified class. */
446 for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; 350 FOR_EACH_MODE_IN_CLASS (mode, mclass)
447 mode = GET_MODE_WIDER_MODE (mode))
448 if (GET_MODE_PRECISION (mode) >= size) 351 if (GET_MODE_PRECISION (mode) >= size)
449 return mode; 352 break;
450 353
451 gcc_unreachable (); 354 if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT)
452 } 355 for (i = 0; i < NUM_INT_N_ENTS; i ++)
453 356 if (int_n_data[i].bitsize >= size
454 /* Find an integer mode of the exact same size, or BLKmode on failure. */ 357 && int_n_data[i].bitsize < GET_MODE_PRECISION (mode)
455 358 && int_n_enabled_p[i])
456 enum machine_mode 359 mode = int_n_data[i].m;
457 int_mode_for_mode (enum machine_mode mode) 360
361 if (mode == VOIDmode)
362 gcc_unreachable ();
363
364 return mode;
365 }
366
367 /* Return an integer mode of exactly the same size as MODE, if one exists. */
368
369 opt_scalar_int_mode
370 int_mode_for_mode (machine_mode mode)
458 { 371 {
459 switch (GET_MODE_CLASS (mode)) 372 switch (GET_MODE_CLASS (mode))
460 { 373 {
461 case MODE_INT: 374 case MODE_INT:
462 case MODE_PARTIAL_INT: 375 case MODE_PARTIAL_INT:
463 break; 376 return as_a <scalar_int_mode> (mode);
464 377
465 case MODE_COMPLEX_INT: 378 case MODE_COMPLEX_INT:
466 case MODE_COMPLEX_FLOAT: 379 case MODE_COMPLEX_FLOAT:
467 case MODE_FLOAT: 380 case MODE_FLOAT:
468 case MODE_DECIMAL_FLOAT: 381 case MODE_DECIMAL_FLOAT:
474 case MODE_UACCUM: 387 case MODE_UACCUM:
475 case MODE_VECTOR_FRACT: 388 case MODE_VECTOR_FRACT:
476 case MODE_VECTOR_ACCUM: 389 case MODE_VECTOR_ACCUM:
477 case MODE_VECTOR_UFRACT: 390 case MODE_VECTOR_UFRACT:
478 case MODE_VECTOR_UACCUM: 391 case MODE_VECTOR_UACCUM:
479 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0); 392 case MODE_POINTER_BOUNDS:
480 break; 393 return int_mode_for_size (GET_MODE_BITSIZE (mode), 0);
481 394
482 case MODE_RANDOM: 395 case MODE_RANDOM:
483 if (mode == BLKmode) 396 if (mode == BLKmode)
484 break; 397 return opt_scalar_int_mode ();
485 398
486 /* ... fall through ... */ 399 /* fall through */
487 400
488 case MODE_CC: 401 case MODE_CC:
489 default: 402 default:
490 gcc_unreachable (); 403 gcc_unreachable ();
491 } 404 }
492 405 }
493 return mode; 406
494 } 407 /* Find a mode that can be used for efficient bitwise operations on MODE,
495 408 if one exists. */
496 /* Find a mode that is suitable for representing a vector with 409
497 NUNITS elements of mode INNERMODE. Returns BLKmode if there 410 opt_machine_mode
498 is no suitable mode. */ 411 bitwise_mode_for_mode (machine_mode mode)
499 412 {
500 enum machine_mode 413 /* Quick exit if we already have a suitable mode. */
501 mode_for_vector (enum machine_mode innermode, unsigned nunits) 414 unsigned int bitsize = GET_MODE_BITSIZE (mode);
502 { 415 scalar_int_mode int_mode;
503 enum machine_mode mode; 416 if (is_a <scalar_int_mode> (mode, &int_mode)
417 && GET_MODE_BITSIZE (int_mode) <= MAX_FIXED_MODE_SIZE)
418 return int_mode;
419
420 /* Reuse the sanity checks from int_mode_for_mode. */
421 gcc_checking_assert ((int_mode_for_mode (mode), true));
422
423 /* Try to replace complex modes with complex modes. In general we
424 expect both components to be processed independently, so we only
425 care whether there is a register for the inner mode. */
426 if (COMPLEX_MODE_P (mode))
427 {
428 machine_mode trial = mode;
429 if ((GET_MODE_CLASS (trial) == MODE_COMPLEX_INT
430 || mode_for_size (bitsize, MODE_COMPLEX_INT, false).exists (&trial))
431 && have_regs_of_mode[GET_MODE_INNER (trial)])
432 return trial;
433 }
434
435 /* Try to replace vector modes with vector modes. Also try using vector
436 modes if an integer mode would be too big. */
437 if (VECTOR_MODE_P (mode) || bitsize > MAX_FIXED_MODE_SIZE)
438 {
439 machine_mode trial = mode;
440 if ((GET_MODE_CLASS (trial) == MODE_VECTOR_INT
441 || mode_for_size (bitsize, MODE_VECTOR_INT, 0).exists (&trial))
442 && have_regs_of_mode[trial]
443 && targetm.vector_mode_supported_p (trial))
444 return trial;
445 }
446
447 /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */
448 return mode_for_size (bitsize, MODE_INT, true);
449 }
450
451 /* Find a type that can be used for efficient bitwise operations on MODE.
452 Return null if no such mode exists. */
453
454 tree
455 bitwise_type_for_mode (machine_mode mode)
456 {
457 if (!bitwise_mode_for_mode (mode).exists (&mode))
458 return NULL_TREE;
459
460 unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode);
461 tree inner_type = build_nonstandard_integer_type (inner_size, true);
462
463 if (VECTOR_MODE_P (mode))
464 return build_vector_type_for_mode (inner_type, mode);
465
466 if (COMPLEX_MODE_P (mode))
467 return build_complex_type (inner_type);
468
469 gcc_checking_assert (GET_MODE_INNER (mode) == mode);
470 return inner_type;
471 }
472
473 /* Find a mode that is suitable for representing a vector with NUNITS
474 elements of mode INNERMODE, if one exists. The returned mode can be
475 either an integer mode or a vector mode. */
476
477 opt_machine_mode
478 mode_for_vector (scalar_mode innermode, unsigned nunits)
479 {
480 machine_mode mode;
504 481
505 /* First, look for a supported vector type. */ 482 /* First, look for a supported vector type. */
506 if (SCALAR_FLOAT_MODE_P (innermode)) 483 if (SCALAR_FLOAT_MODE_P (innermode))
507 mode = MIN_MODE_VECTOR_FLOAT; 484 mode = MIN_MODE_VECTOR_FLOAT;
508 else if (SCALAR_FRACT_MODE_P (innermode)) 485 else if (SCALAR_FRACT_MODE_P (innermode))
516 else 493 else
517 mode = MIN_MODE_VECTOR_INT; 494 mode = MIN_MODE_VECTOR_INT;
518 495
519 /* Do not check vector_mode_supported_p here. We'll do that 496 /* Do not check vector_mode_supported_p here. We'll do that
520 later in vector_type_mode. */ 497 later in vector_type_mode. */
521 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode)) 498 FOR_EACH_MODE_FROM (mode, mode)
522 if (GET_MODE_NUNITS (mode) == nunits 499 if (GET_MODE_NUNITS (mode) == nunits
523 && GET_MODE_INNER (mode) == innermode) 500 && GET_MODE_INNER (mode) == innermode)
524 break; 501 return mode;
525 502
526 /* For integers, try mapping it to a same-sized scalar mode. */ 503 /* For integers, try mapping it to a same-sized scalar mode. */
527 if (mode == VOIDmode 504 if (GET_MODE_CLASS (innermode) == MODE_INT)
528 && GET_MODE_CLASS (innermode) == MODE_INT) 505 {
529 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode), 506 unsigned int nbits = nunits * GET_MODE_BITSIZE (innermode);
530 MODE_INT, 0); 507 if (int_mode_for_size (nbits, 0).exists (&mode)
531 508 && have_regs_of_mode[mode])
532 if (mode == VOIDmode 509 return mode;
533 || (GET_MODE_CLASS (mode) == MODE_INT 510 }
534 && !have_regs_of_mode[mode])) 511
535 return BLKmode; 512 return opt_machine_mode ();
536 513 }
537 return mode; 514
515 /* Return the mode for a vector that has NUNITS integer elements of
516 INT_BITS bits each, if such a mode exists. The mode can be either
517 an integer mode or a vector mode. */
518
519 opt_machine_mode
520 mode_for_int_vector (unsigned int int_bits, unsigned int nunits)
521 {
522 scalar_int_mode int_mode;
523 machine_mode vec_mode;
524 if (int_mode_for_size (int_bits, 0).exists (&int_mode)
525 && mode_for_vector (int_mode, nunits).exists (&vec_mode))
526 return vec_mode;
527 return opt_machine_mode ();
538 } 528 }
539 529
540 /* Return the alignment of MODE. This will be bounded by 1 and 530 /* Return the alignment of MODE. This will be bounded by 1 and
541 BIGGEST_ALIGNMENT. */ 531 BIGGEST_ALIGNMENT. */
542 532
543 unsigned int 533 unsigned int
544 get_mode_alignment (enum machine_mode mode) 534 get_mode_alignment (machine_mode mode)
545 { 535 {
546 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT)); 536 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
547 } 537 }
548 538
539 /* Return the natural mode of an array, given that it is SIZE bytes in
540 total and has elements of type ELEM_TYPE. */
541
542 static machine_mode
543 mode_for_array (tree elem_type, tree size)
544 {
545 tree elem_size;
546 unsigned HOST_WIDE_INT int_size, int_elem_size;
547 bool limit_p;
548
549 /* One-element arrays get the component type's mode. */
550 elem_size = TYPE_SIZE (elem_type);
551 if (simple_cst_equal (size, elem_size))
552 return TYPE_MODE (elem_type);
553
554 limit_p = true;
555 if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size))
556 {
557 int_size = tree_to_uhwi (size);
558 int_elem_size = tree_to_uhwi (elem_size);
559 if (int_elem_size > 0
560 && int_size % int_elem_size == 0
561 && targetm.array_mode_supported_p (TYPE_MODE (elem_type),
562 int_size / int_elem_size))
563 limit_p = false;
564 }
565 return mode_for_size_tree (size, MODE_INT, limit_p).else_blk ();
566 }
549 567
550 /* Subroutine of layout_decl: Force alignment required for the data type. 568 /* Subroutine of layout_decl: Force alignment required for the data type.
551 But if the decl itself wants greater alignment, don't override that. */ 569 But if the decl itself wants greater alignment, don't override that. */
552 570
553 static inline void 571 static inline void
554 do_type_align (tree type, tree decl) 572 do_type_align (tree type, tree decl)
555 { 573 {
556 if (TYPE_ALIGN (type) > DECL_ALIGN (decl)) 574 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
557 { 575 {
558 DECL_ALIGN (decl) = TYPE_ALIGN (type); 576 SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
559 if (TREE_CODE (decl) == FIELD_DECL) 577 if (TREE_CODE (decl) == FIELD_DECL)
560 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type); 578 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
561 } 579 }
580 if (TYPE_WARN_IF_NOT_ALIGN (type) > DECL_WARN_IF_NOT_ALIGN (decl))
581 SET_DECL_WARN_IF_NOT_ALIGN (decl, TYPE_WARN_IF_NOT_ALIGN (type));
562 } 582 }
563 583
564 /* Set the size, mode and alignment of a ..._DECL node. 584 /* Set the size, mode and alignment of a ..._DECL node.
565 TYPE_DECL does need this for C++. 585 TYPE_DECL does need this for C++.
566 Note that LABEL_DECL and CONST_DECL nodes do not need this, 586 Note that LABEL_DECL and CONST_DECL nodes do not need this,
584 604
585 if (code == CONST_DECL) 605 if (code == CONST_DECL)
586 return; 606 return;
587 607
588 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL 608 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
589 || code == TYPE_DECL ||code == FIELD_DECL); 609 || code == TYPE_DECL || code == FIELD_DECL);
590 610
591 rtl = DECL_RTL_IF_SET (decl); 611 rtl = DECL_RTL_IF_SET (decl);
592 612
593 if (type == error_mark_node) 613 if (type == error_mark_node)
594 type = void_type_node; 614 type = void_type_node;
603 size in bytes from the size in bits. If we have already set the mode, 623 size in bytes from the size in bits. If we have already set the mode,
604 don't set it again since we can be called twice for FIELD_DECLs. */ 624 don't set it again since we can be called twice for FIELD_DECLs. */
605 625
606 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type); 626 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
607 if (DECL_MODE (decl) == VOIDmode) 627 if (DECL_MODE (decl) == VOIDmode)
608 DECL_MODE (decl) = TYPE_MODE (type); 628 SET_DECL_MODE (decl, TYPE_MODE (type));
609 629
610 if (DECL_SIZE (decl) == 0) 630 if (DECL_SIZE (decl) == 0)
611 { 631 {
612 DECL_SIZE (decl) = TYPE_SIZE (type); 632 DECL_SIZE (decl) = TYPE_SIZE (type);
613 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); 633 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
639 if (integer_zerop (DECL_SIZE (decl)) 659 if (integer_zerop (DECL_SIZE (decl))
640 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl))) 660 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
641 { 661 {
642 zero_bitfield = true; 662 zero_bitfield = true;
643 packed_p = false; 663 packed_p = false;
644 #ifdef PCC_BITFIELD_TYPE_MATTERS
645 if (PCC_BITFIELD_TYPE_MATTERS) 664 if (PCC_BITFIELD_TYPE_MATTERS)
646 do_type_align (type, decl); 665 do_type_align (type, decl);
647 else 666 else
648 #endif
649 { 667 {
650 #ifdef EMPTY_FIELD_BOUNDARY 668 #ifdef EMPTY_FIELD_BOUNDARY
651 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl)) 669 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
652 { 670 {
653 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY; 671 SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
654 DECL_USER_ALIGN (decl) = 0; 672 DECL_USER_ALIGN (decl) = 0;
655 } 673 }
656 #endif 674 #endif
657 } 675 }
658 } 676 }
659 677
660 /* See if we can use an ordinary integer mode for a bit-field. 678 /* See if we can use an ordinary integer mode for a bit-field.
661 Conditions are: a fixed size that is correct for another mode, 679 Conditions are: a fixed size that is correct for another mode,
662 occupying a complete byte or bytes on proper boundary, 680 occupying a complete byte or bytes on proper boundary. */
663 and not volatile or not -fstrict-volatile-bitfields. */
664 if (TYPE_SIZE (type) != 0 681 if (TYPE_SIZE (type) != 0
665 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST 682 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
666 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT 683 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
667 && !(TREE_THIS_VOLATILE (decl)
668 && flag_strict_volatile_bitfields > 0))
669 { 684 {
670 enum machine_mode xmode 685 machine_mode xmode;
671 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1); 686 if (mode_for_size_tree (DECL_SIZE (decl),
672 unsigned int xalign = GET_MODE_ALIGNMENT (xmode); 687 MODE_INT, 1).exists (&xmode))
673
674 if (xmode != BLKmode
675 && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
676 && (known_align == 0 || known_align >= xalign))
677 { 688 {
678 DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl)); 689 unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
679 DECL_MODE (decl) = xmode; 690 if (!(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
680 DECL_BIT_FIELD (decl) = 0; 691 && (known_align == 0 || known_align >= xalign))
692 {
693 SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
694 SET_DECL_MODE (decl, xmode);
695 DECL_BIT_FIELD (decl) = 0;
696 }
681 } 697 }
682 } 698 }
683 699
684 /* Turn off DECL_BIT_FIELD if we won't need it set. */ 700 /* Turn off DECL_BIT_FIELD if we won't need it set. */
685 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode 701 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
698 /* If the field is packed and not explicitly aligned, give it the 714 /* If the field is packed and not explicitly aligned, give it the
699 minimum alignment. Note that do_type_align may set 715 minimum alignment. Note that do_type_align may set
700 DECL_USER_ALIGN, so we need to check old_user_align instead. */ 716 DECL_USER_ALIGN, so we need to check old_user_align instead. */
701 if (packed_p 717 if (packed_p
702 && !old_user_align) 718 && !old_user_align)
703 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT); 719 SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
704 720
705 if (! packed_p && ! DECL_USER_ALIGN (decl)) 721 if (! packed_p && ! DECL_USER_ALIGN (decl))
706 { 722 {
707 /* Some targets (i.e. i386, VMS) limit struct field alignment 723 /* Some targets (i.e. i386, VMS) limit struct field alignment
708 to a lower boundary than alignment of variables unless 724 to a lower boundary than alignment of variables unless
709 it was overridden by attribute aligned. */ 725 it was overridden by attribute aligned. */
710 #ifdef BIGGEST_FIELD_ALIGNMENT 726 #ifdef BIGGEST_FIELD_ALIGNMENT
711 DECL_ALIGN (decl) 727 SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
712 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT); 728 (unsigned) BIGGEST_FIELD_ALIGNMENT));
713 #endif 729 #endif
714 #ifdef ADJUST_FIELD_ALIGN 730 #ifdef ADJUST_FIELD_ALIGN
715 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl)); 731 SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, TREE_TYPE (decl),
732 DECL_ALIGN (decl)));
716 #endif 733 #endif
717 } 734 }
718 735
719 if (zero_bitfield) 736 if (zero_bitfield)
720 mfa = initial_max_fld_align * BITS_PER_UNIT; 737 mfa = initial_max_fld_align * BITS_PER_UNIT;
721 else 738 else
722 mfa = maximum_field_alignment; 739 mfa = maximum_field_alignment;
723 /* Should this be controlled by DECL_USER_ALIGN, too? */ 740 /* Should this be controlled by DECL_USER_ALIGN, too? */
724 if (mfa != 0) 741 if (mfa != 0)
725 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa); 742 SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
726 } 743 }
727 744
728 /* Evaluate nonconstant size only once, either now or as soon as safe. */ 745 /* Evaluate nonconstant size only once, either now or as soon as safe. */
729 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 746 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
730 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl)); 747 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
755 /* If the RTL was already set, update its mode and mem attributes. */ 772 /* If the RTL was already set, update its mode and mem attributes. */
756 if (rtl) 773 if (rtl)
757 { 774 {
758 PUT_MODE (rtl, DECL_MODE (decl)); 775 PUT_MODE (rtl, DECL_MODE (decl));
759 SET_DECL_RTL (decl, 0); 776 SET_DECL_RTL (decl, 0);
760 set_mem_attributes (rtl, decl, 1); 777 if (MEM_P (rtl))
778 set_mem_attributes (rtl, decl, 1);
761 SET_DECL_RTL (decl, rtl); 779 SET_DECL_RTL (decl, rtl);
762 } 780 }
763 } 781 }
764 782
765 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of 783 /* Given a VAR_DECL, PARM_DECL, RESULT_DECL, or FIELD_DECL, clears the
766 a previous call to layout_decl and calls it again. */ 784 results of a previous call to layout_decl and calls it again. */
767 785
768 void 786 void
769 relayout_decl (tree decl) 787 relayout_decl (tree decl)
770 { 788 {
771 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0; 789 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
772 DECL_MODE (decl) = VOIDmode; 790 SET_DECL_MODE (decl, VOIDmode);
773 if (!DECL_USER_ALIGN (decl)) 791 if (!DECL_USER_ALIGN (decl))
774 DECL_ALIGN (decl) = 0; 792 SET_DECL_ALIGN (decl, 0);
775 SET_DECL_RTL (decl, 0); 793 if (DECL_RTL_SET_P (decl))
794 SET_DECL_RTL (decl, 0);
776 795
777 layout_decl (decl, 0); 796 layout_decl (decl, 0);
778 } 797 }
779 798
780 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or 799 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
813 #endif 832 #endif
814 833
815 rli->offset = size_zero_node; 834 rli->offset = size_zero_node;
816 rli->bitpos = bitsize_zero_node; 835 rli->bitpos = bitsize_zero_node;
817 rli->prev_field = 0; 836 rli->prev_field = 0;
818 rli->pending_statics = NULL; 837 rli->pending_statics = 0;
819 rli->packed_maybe_necessary = 0; 838 rli->packed_maybe_necessary = 0;
820 rli->remaining_in_alignment = 0; 839 rli->remaining_in_alignment = 0;
821 840
822 return rli; 841 return rli;
823 } 842 }
824 843
825 /* These four routines perform computations that convert between 844 /* Return the combined bit position for the byte offset OFFSET and the
826 the offset/bitpos forms and byte and bit offsets. */ 845 bit position BITPOS.
846
847 These functions operate on byte and bit positions present in FIELD_DECLs
848 and assume that these expressions result in no (intermediate) overflow.
849 This assumption is necessary to fold the expressions as much as possible,
850 so as to avoid creating artificially variable-sized types in languages
851 supporting variable-sized types like Ada. */
827 852
828 tree 853 tree
829 bit_from_pos (tree offset, tree bitpos) 854 bit_from_pos (tree offset, tree bitpos)
830 { 855 {
831 return size_binop (PLUS_EXPR, bitpos, 856 return size_binop (PLUS_EXPR, bitpos,
832 size_binop (MULT_EXPR, 857 size_binop (MULT_EXPR,
833 fold_convert (bitsizetype, offset), 858 fold_convert (bitsizetype, offset),
834 bitsize_unit_node)); 859 bitsize_unit_node));
835 } 860 }
836 861
862 /* Return the combined truncated byte position for the byte offset OFFSET and
863 the bit position BITPOS. */
864
837 tree 865 tree
838 byte_from_pos (tree offset, tree bitpos) 866 byte_from_pos (tree offset, tree bitpos)
839 { 867 {
840 return size_binop (PLUS_EXPR, offset, 868 tree bytepos;
841 fold_convert (sizetype, 869 if (TREE_CODE (bitpos) == MULT_EXPR
842 size_binop (TRUNC_DIV_EXPR, bitpos, 870 && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node))
843 bitsize_unit_node))); 871 bytepos = TREE_OPERAND (bitpos, 0);
844 } 872 else
873 bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node);
874 return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos));
875 }
876
877 /* Split the bit position POS into a byte offset *POFFSET and a bit
878 position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */
845 879
846 void 880 void
847 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align, 881 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
848 tree pos) 882 tree pos)
849 { 883 {
850 *poffset = size_binop (MULT_EXPR, 884 tree toff_align = bitsize_int (off_align);
851 fold_convert (sizetype, 885 if (TREE_CODE (pos) == MULT_EXPR
852 size_binop (FLOOR_DIV_EXPR, pos, 886 && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align))
853 bitsize_int (off_align))), 887 {
854 size_int (off_align / BITS_PER_UNIT)); 888 *poffset = size_binop (MULT_EXPR,
855 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align)); 889 fold_convert (sizetype, TREE_OPERAND (pos, 0)),
890 size_int (off_align / BITS_PER_UNIT));
891 *pbitpos = bitsize_zero_node;
892 }
893 else
894 {
895 *poffset = size_binop (MULT_EXPR,
896 fold_convert (sizetype,
897 size_binop (FLOOR_DIV_EXPR, pos,
898 toff_align)),
899 size_int (off_align / BITS_PER_UNIT));
900 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align);
901 }
856 } 902 }
857 903
858 /* Given a pointer to bit and byte offsets and an offset alignment, 904 /* Given a pointer to bit and byte offsets and an offset alignment,
859 normalize the offsets so they are within the alignment. */ 905 normalize the offsets so they are within the alignment. */
860 906
863 { 909 {
864 /* If the bit position is now larger than it should be, adjust it 910 /* If the bit position is now larger than it should be, adjust it
865 downwards. */ 911 downwards. */
866 if (compare_tree_int (*pbitpos, off_align) >= 0) 912 if (compare_tree_int (*pbitpos, off_align) >= 0)
867 { 913 {
868 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos, 914 tree offset, bitpos;
869 bitsize_int (off_align)); 915 pos_from_bit (&offset, &bitpos, off_align, *pbitpos);
870 916 *poffset = size_binop (PLUS_EXPR, *poffset, offset);
871 *poffset 917 *pbitpos = bitpos;
872 = size_binop (PLUS_EXPR, *poffset,
873 size_binop (MULT_EXPR,
874 fold_convert (sizetype, extra_aligns),
875 size_int (off_align / BITS_PER_UNIT)));
876
877 *pbitpos
878 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
879 } 918 }
880 } 919 }
881 920
882 /* Print debugging information about the information in RLI. */ 921 /* Print debugging information about the information in RLI. */
883 922
897 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment); 936 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
898 937
899 if (rli->packed_maybe_necessary) 938 if (rli->packed_maybe_necessary)
900 fprintf (stderr, "packed may be necessary\n"); 939 fprintf (stderr, "packed may be necessary\n");
901 940
902 if (!VEC_empty (tree, rli->pending_statics)) 941 if (!vec_safe_is_empty (rli->pending_statics))
903 { 942 {
904 fprintf (stderr, "pending statics:\n"); 943 fprintf (stderr, "pending statics:\n");
905 debug_vec_tree (rli->pending_statics); 944 debug_vec_tree (rli->pending_statics);
906 } 945 }
907 } 946 }
971 can do this. The alignment should be to the alignment of 1010 can do this. The alignment should be to the alignment of
972 the type, except that for zero-size bitfields this only 1011 the type, except that for zero-size bitfields this only
973 applies if there was an immediately prior, nonzero-size 1012 applies if there was an immediately prior, nonzero-size
974 bitfield. (That's the way it is, experimentally.) */ 1013 bitfield. (That's the way it is, experimentally.) */
975 if ((!is_bitfield && !DECL_PACKED (field)) 1014 if ((!is_bitfield && !DECL_PACKED (field))
976 || (!integer_zerop (DECL_SIZE (field)) 1015 || ((DECL_SIZE (field) == NULL_TREE
1016 || !integer_zerop (DECL_SIZE (field)))
977 ? !DECL_PACKED (field) 1017 ? !DECL_PACKED (field)
978 : (rli->prev_field 1018 : (rli->prev_field
979 && DECL_BIT_FIELD_TYPE (rli->prev_field) 1019 && DECL_BIT_FIELD_TYPE (rli->prev_field)
980 && ! integer_zerop (DECL_SIZE (rli->prev_field))))) 1020 && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
981 { 1021 {
985 type_align = MIN (type_align, maximum_field_alignment); 1025 type_align = MIN (type_align, maximum_field_alignment);
986 rli->record_align = MAX (rli->record_align, type_align); 1026 rli->record_align = MAX (rli->record_align, type_align);
987 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); 1027 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
988 } 1028 }
989 } 1029 }
990 #ifdef PCC_BITFIELD_TYPE_MATTERS
991 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS) 1030 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
992 { 1031 {
993 /* Named bit-fields cause the entire structure to have the 1032 /* Named bit-fields cause the entire structure to have the
994 alignment implied by their type. Some targets also apply the same 1033 alignment implied by their type. Some targets also apply the same
995 rules to unnamed bitfields. */ 1034 rules to unnamed bitfields. */
998 { 1037 {
999 unsigned int type_align = TYPE_ALIGN (type); 1038 unsigned int type_align = TYPE_ALIGN (type);
1000 1039
1001 #ifdef ADJUST_FIELD_ALIGN 1040 #ifdef ADJUST_FIELD_ALIGN
1002 if (! TYPE_USER_ALIGN (type)) 1041 if (! TYPE_USER_ALIGN (type))
1003 type_align = ADJUST_FIELD_ALIGN (field, type_align); 1042 type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1004 #endif 1043 #endif
1005 1044
1006 /* Targets might chose to handle unnamed and hence possibly 1045 /* Targets might chose to handle unnamed and hence possibly
1007 zero-width bitfield. Those are not influenced by #pragmas 1046 zero-width bitfield. Those are not influenced by #pragmas
1008 or packed attributes. */ 1047 or packed attributes. */
1028 if (warn_packed) 1067 if (warn_packed)
1029 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); 1068 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1030 user_align |= TYPE_USER_ALIGN (type); 1069 user_align |= TYPE_USER_ALIGN (type);
1031 } 1070 }
1032 } 1071 }
1033 #endif
1034 else 1072 else
1035 { 1073 {
1036 rli->record_align = MAX (rli->record_align, desired_align); 1074 rli->record_align = MAX (rli->record_align, desired_align);
1037 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); 1075 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
1038 } 1076 }
1039 1077
1040 TYPE_USER_ALIGN (rli->t) |= user_align; 1078 TYPE_USER_ALIGN (rli->t) |= user_align;
1041 1079
1042 return desired_align; 1080 return desired_align;
1081 }
1082
1083 /* Issue a warning if the record alignment, RECORD_ALIGN, is less than
1084 the field alignment of FIELD or FIELD isn't aligned. */
1085
1086 static void
1087 handle_warn_if_not_align (tree field, unsigned int record_align)
1088 {
1089 tree type = TREE_TYPE (field);
1090
1091 if (type == error_mark_node)
1092 return;
1093
1094 unsigned int warn_if_not_align = 0;
1095
1096 int opt_w = 0;
1097
1098 if (warn_if_not_aligned)
1099 {
1100 warn_if_not_align = DECL_WARN_IF_NOT_ALIGN (field);
1101 if (!warn_if_not_align)
1102 warn_if_not_align = TYPE_WARN_IF_NOT_ALIGN (type);
1103 if (warn_if_not_align)
1104 opt_w = OPT_Wif_not_aligned;
1105 }
1106
1107 if (!warn_if_not_align
1108 && warn_packed_not_aligned
1109 && TYPE_USER_ALIGN (type))
1110 {
1111 warn_if_not_align = TYPE_ALIGN (type);
1112 opt_w = OPT_Wpacked_not_aligned;
1113 }
1114
1115 if (!warn_if_not_align)
1116 return;
1117
1118 tree context = DECL_CONTEXT (field);
1119
1120 warn_if_not_align /= BITS_PER_UNIT;
1121 record_align /= BITS_PER_UNIT;
1122 if ((record_align % warn_if_not_align) != 0)
1123 warning (opt_w, "alignment %u of %qT is less than %u",
1124 record_align, context, warn_if_not_align);
1125
1126 unsigned HOST_WIDE_INT off
1127 = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1128 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) / BITS_PER_UNIT);
1129 if ((off % warn_if_not_align) != 0)
1130 warning (opt_w, "%q+D offset %wu in %qT isn't aligned to %u",
1131 field, off, context, warn_if_not_align);
1043 } 1132 }
1044 1133
1045 /* Called from place_field to handle unions. */ 1134 /* Called from place_field to handle unions. */
1046 1135
1047 static void 1136 static void
1050 update_alignment_for_field (rli, field, /*known_align=*/0); 1139 update_alignment_for_field (rli, field, /*known_align=*/0);
1051 1140
1052 DECL_FIELD_OFFSET (field) = size_zero_node; 1141 DECL_FIELD_OFFSET (field) = size_zero_node;
1053 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node; 1142 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
1054 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT); 1143 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
1144 handle_warn_if_not_align (field, rli->record_align);
1055 1145
1056 /* If this is an ERROR_MARK return *after* having set the 1146 /* If this is an ERROR_MARK return *after* having set the
1057 field at the start of the union. This helps when parsing 1147 field at the start of the union. This helps when parsing
1058 invalid fields. */ 1148 invalid fields. */
1059 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK) 1149 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
1060 return; 1150 return;
1151
1152 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1153 && TYPE_TYPELESS_STORAGE (TREE_TYPE (field)))
1154 TYPE_TYPELESS_STORAGE (rli->t) = 1;
1061 1155
1062 /* We assume the union's size will be a multiple of a byte so we don't 1156 /* We assume the union's size will be a multiple of a byte so we don't
1063 bother with BITPOS. */ 1157 bother with BITPOS. */
1064 if (TREE_CODE (rli->t) == UNION_TYPE) 1158 if (TREE_CODE (rli->t) == UNION_TYPE)
1065 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); 1159 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1066 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) 1160 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
1067 rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field), 1161 rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field),
1068 DECL_SIZE_UNIT (field), rli->offset); 1162 DECL_SIZE_UNIT (field), rli->offset);
1069 } 1163 }
1070 1164
1071 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
1072 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated 1165 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
1073 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more 1166 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
1074 units of alignment than the underlying TYPE. */ 1167 units of alignment than the underlying TYPE. */
1075 static int 1168 static int
1076 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset, 1169 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
1080 that we still get the right result as long as ALIGN is a power of two. */ 1173 that we still get the right result as long as ALIGN is a power of two. */
1081 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset; 1174 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
1082 1175
1083 offset = offset % align; 1176 offset = offset % align;
1084 return ((offset + size + align - 1) / align 1177 return ((offset + size + align - 1) / align
1085 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1) 1178 > tree_to_uhwi (TYPE_SIZE (type)) / align);
1086 / align)); 1179 }
1087 }
1088 #endif
1089 1180
1090 /* RLI contains information about the layout of a RECORD_TYPE. FIELD 1181 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
1091 is a FIELD_DECL to be added after those fields already present in 1182 is a FIELD_DECL to be added after those fields already present in
1092 T. (FIELD is not actually added to the TYPE_FIELDS list here; 1183 T. (FIELD is not actually added to the TYPE_FIELDS list here;
1093 callers that desire that behavior must manually perform that step.) */ 1184 callers that desire that behavior must manually perform that step.) */
1108 1199
1109 /* If FIELD is static, then treat it like a separate variable, not 1200 /* If FIELD is static, then treat it like a separate variable, not
1110 really like a structure field. If it is a FUNCTION_DECL, it's a 1201 really like a structure field. If it is a FUNCTION_DECL, it's a
1111 method. In both cases, all we do is lay out the decl, and we do 1202 method. In both cases, all we do is lay out the decl, and we do
1112 it *after* the record is laid out. */ 1203 it *after* the record is laid out. */
1113 if (TREE_CODE (field) == VAR_DECL) 1204 if (VAR_P (field))
1114 { 1205 {
1115 VEC_safe_push (tree, gc, rli->pending_statics, field); 1206 vec_safe_push (rli->pending_statics, field);
1116 return; 1207 return;
1117 } 1208 }
1118 1209
1119 /* Enumerators and enum types which are local to this class need not 1210 /* Enumerators and enum types which are local to this class need not
1120 be laid out. Likewise for initialized constant fields. */ 1211 be laid out. Likewise for initialized constant fields. */
1134 /* Place this field at the current allocation position, so we 1225 /* Place this field at the current allocation position, so we
1135 maintain monotonicity. */ 1226 maintain monotonicity. */
1136 DECL_FIELD_OFFSET (field) = rli->offset; 1227 DECL_FIELD_OFFSET (field) = rli->offset;
1137 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; 1228 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1138 SET_DECL_OFFSET_ALIGN (field, rli->offset_align); 1229 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1230 handle_warn_if_not_align (field, rli->record_align);
1139 return; 1231 return;
1140 } 1232 }
1233
1234 if (AGGREGATE_TYPE_P (type)
1235 && TYPE_TYPELESS_STORAGE (type))
1236 TYPE_TYPELESS_STORAGE (rli->t) = 1;
1141 1237
1142 /* Work out the known alignment so far. Note that A & (-A) is the 1238 /* Work out the known alignment so far. Note that A & (-A) is the
1143 value of the least-significant bit in A that is one. */ 1239 value of the least-significant bit in A that is one. */
1144 if (! integer_zerop (rli->bitpos)) 1240 if (! integer_zerop (rli->bitpos))
1145 known_align = (tree_low_cst (rli->bitpos, 1) 1241 known_align = least_bit_hwi (tree_to_uhwi (rli->bitpos));
1146 & - tree_low_cst (rli->bitpos, 1));
1147 else if (integer_zerop (rli->offset)) 1242 else if (integer_zerop (rli->offset))
1148 known_align = 0; 1243 known_align = 0;
1149 else if (host_integerp (rli->offset, 1)) 1244 else if (tree_fits_uhwi_p (rli->offset))
1150 known_align = (BITS_PER_UNIT 1245 known_align = (BITS_PER_UNIT
1151 * (tree_low_cst (rli->offset, 1) 1246 * least_bit_hwi (tree_to_uhwi (rli->offset)));
1152 & - tree_low_cst (rli->offset, 1)));
1153 else 1247 else
1154 known_align = rli->offset_align; 1248 known_align = rli->offset_align;
1155 1249
1156 desired_align = update_alignment_for_field (rli, field, known_align); 1250 desired_align = update_alignment_for_field (rli, field, known_align);
1157 if (known_align == 0) 1251 if (known_align == 0)
1175 else 1269 else
1176 rli->packed_maybe_necessary = 1; 1270 rli->packed_maybe_necessary = 1;
1177 } 1271 }
1178 1272
1179 /* Does this field automatically have alignment it needs by virtue 1273 /* Does this field automatically have alignment it needs by virtue
1180 of the fields that precede it and the record's own alignment? 1274 of the fields that precede it and the record's own alignment? */
1181 We already align ms_struct fields, so don't re-align them. */ 1275 if (known_align < desired_align)
1182 if (known_align < desired_align
1183 && !targetm.ms_bitfield_layout_p (rli->t))
1184 { 1276 {
1185 /* No, we need to skip space before this field. 1277 /* No, we need to skip space before this field.
1186 Bump the cumulative size to multiple of field alignment. */ 1278 Bump the cumulative size to multiple of field alignment. */
1187 1279
1188 if (DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION) 1280 if (!targetm.ms_bitfield_layout_p (rli->t)
1281 && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION)
1189 warning (OPT_Wpadded, "padding struct to align %q+D", field); 1282 warning (OPT_Wpadded, "padding struct to align %q+D", field);
1190 1283
1191 /* If the alignment is still within offset_align, just align 1284 /* If the alignment is still within offset_align, just align
1192 the bit position. */ 1285 the bit position. */
1193 if (desired_align < rli->offset_align) 1286 if (desired_align < rli->offset_align)
1205 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT); 1298 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
1206 } 1299 }
1207 1300
1208 if (! TREE_CONSTANT (rli->offset)) 1301 if (! TREE_CONSTANT (rli->offset))
1209 rli->offset_align = desired_align; 1302 rli->offset_align = desired_align;
1210 1303 if (targetm.ms_bitfield_layout_p (rli->t))
1304 rli->prev_field = NULL;
1211 } 1305 }
1212 1306
1213 /* Handle compatibility with PCC. Note that if the record has any 1307 /* Handle compatibility with PCC. Note that if the record has any
1214 variable-sized fields, we need not worry about compatibility. */ 1308 variable-sized fields, we need not worry about compatibility. */
1215 #ifdef PCC_BITFIELD_TYPE_MATTERS
1216 if (PCC_BITFIELD_TYPE_MATTERS 1309 if (PCC_BITFIELD_TYPE_MATTERS
1217 && ! targetm.ms_bitfield_layout_p (rli->t) 1310 && ! targetm.ms_bitfield_layout_p (rli->t)
1218 && TREE_CODE (field) == FIELD_DECL 1311 && TREE_CODE (field) == FIELD_DECL
1219 && type != error_mark_node 1312 && type != error_mark_node
1220 && DECL_BIT_FIELD (field) 1313 && DECL_BIT_FIELD (field)
1221 && (! DECL_PACKED (field) 1314 && (! DECL_PACKED (field)
1222 /* Enter for these packed fields only to issue a warning. */ 1315 /* Enter for these packed fields only to issue a warning. */
1223 || TYPE_ALIGN (type) <= BITS_PER_UNIT) 1316 || TYPE_ALIGN (type) <= BITS_PER_UNIT)
1224 && maximum_field_alignment == 0 1317 && maximum_field_alignment == 0
1225 && ! integer_zerop (DECL_SIZE (field)) 1318 && ! integer_zerop (DECL_SIZE (field))
1226 && host_integerp (DECL_SIZE (field), 1) 1319 && tree_fits_uhwi_p (DECL_SIZE (field))
1227 && host_integerp (rli->offset, 1) 1320 && tree_fits_uhwi_p (rli->offset)
1228 && host_integerp (TYPE_SIZE (type), 1)) 1321 && tree_fits_uhwi_p (TYPE_SIZE (type)))
1229 { 1322 {
1230 unsigned int type_align = TYPE_ALIGN (type); 1323 unsigned int type_align = TYPE_ALIGN (type);
1231 tree dsize = DECL_SIZE (field); 1324 tree dsize = DECL_SIZE (field);
1232 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1); 1325 HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1233 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0); 1326 HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1234 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0); 1327 HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1235 1328
1236 #ifdef ADJUST_FIELD_ALIGN 1329 #ifdef ADJUST_FIELD_ALIGN
1237 if (! TYPE_USER_ALIGN (type)) 1330 if (! TYPE_USER_ALIGN (type))
1238 type_align = ADJUST_FIELD_ALIGN (field, type_align); 1331 type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1239 #endif 1332 #endif
1240 1333
1241 /* A bit field may not span more units of alignment of its type 1334 /* A bit field may not span more units of alignment of its type
1242 than its type itself. Advance to next boundary if necessary. */ 1335 than its type itself. Advance to next boundary if necessary. */
1243 if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) 1336 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1254 rli->bitpos = round_up (rli->bitpos, type_align); 1347 rli->bitpos = round_up (rli->bitpos, type_align);
1255 } 1348 }
1256 1349
1257 if (! DECL_PACKED (field)) 1350 if (! DECL_PACKED (field))
1258 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); 1351 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1259 } 1352
1260 #endif 1353 SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1354 TYPE_WARN_IF_NOT_ALIGN (type));
1355 }
1261 1356
1262 #ifdef BITFIELD_NBYTES_LIMITED 1357 #ifdef BITFIELD_NBYTES_LIMITED
1263 if (BITFIELD_NBYTES_LIMITED 1358 if (BITFIELD_NBYTES_LIMITED
1264 && ! targetm.ms_bitfield_layout_p (rli->t) 1359 && ! targetm.ms_bitfield_layout_p (rli->t)
1265 && TREE_CODE (field) == FIELD_DECL 1360 && TREE_CODE (field) == FIELD_DECL
1266 && type != error_mark_node 1361 && type != error_mark_node
1267 && DECL_BIT_FIELD_TYPE (field) 1362 && DECL_BIT_FIELD_TYPE (field)
1268 && ! DECL_PACKED (field) 1363 && ! DECL_PACKED (field)
1269 && ! integer_zerop (DECL_SIZE (field)) 1364 && ! integer_zerop (DECL_SIZE (field))
1270 && host_integerp (DECL_SIZE (field), 1) 1365 && tree_fits_uhwi_p (DECL_SIZE (field))
1271 && host_integerp (rli->offset, 1) 1366 && tree_fits_uhwi_p (rli->offset)
1272 && host_integerp (TYPE_SIZE (type), 1)) 1367 && tree_fits_uhwi_p (TYPE_SIZE (type)))
1273 { 1368 {
1274 unsigned int type_align = TYPE_ALIGN (type); 1369 unsigned int type_align = TYPE_ALIGN (type);
1275 tree dsize = DECL_SIZE (field); 1370 tree dsize = DECL_SIZE (field);
1276 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1); 1371 HOST_WIDE_INT field_size = tree_to_uhwi (dsize);
1277 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0); 1372 HOST_WIDE_INT offset = tree_to_uhwi (rli->offset);
1278 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0); 1373 HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos);
1279 1374
1280 #ifdef ADJUST_FIELD_ALIGN 1375 #ifdef ADJUST_FIELD_ALIGN
1281 if (! TYPE_USER_ALIGN (type)) 1376 if (! TYPE_USER_ALIGN (type))
1282 type_align = ADJUST_FIELD_ALIGN (field, type_align); 1377 type_align = ADJUST_FIELD_ALIGN (field, type, type_align);
1283 #endif 1378 #endif
1284 1379
1285 if (maximum_field_alignment != 0) 1380 if (maximum_field_alignment != 0)
1286 type_align = MIN (type_align, maximum_field_alignment); 1381 type_align = MIN (type_align, maximum_field_alignment);
1287 /* ??? This test is opposite the test in the containing if 1382 /* ??? This test is opposite the test in the containing if
1293 Advance to next boundary if necessary. */ 1388 Advance to next boundary if necessary. */
1294 if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) 1389 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1295 rli->bitpos = round_up (rli->bitpos, type_align); 1390 rli->bitpos = round_up (rli->bitpos, type_align);
1296 1391
1297 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); 1392 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1393 SET_TYPE_WARN_IF_NOT_ALIGN (rli->t,
1394 TYPE_WARN_IF_NOT_ALIGN (type));
1298 } 1395 }
1299 #endif 1396 #endif
1300 1397
1301 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details. 1398 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1302 A subtlety: 1399 A subtlety:
1327 the current and previous fields are bitfields by the 1424 the current and previous fields are bitfields by the
1328 time we check it, DECL_SIZE must be present for both.) */ 1425 time we check it, DECL_SIZE must be present for both.) */
1329 if (DECL_BIT_FIELD_TYPE (field) 1426 if (DECL_BIT_FIELD_TYPE (field)
1330 && !integer_zerop (DECL_SIZE (field)) 1427 && !integer_zerop (DECL_SIZE (field))
1331 && !integer_zerop (DECL_SIZE (rli->prev_field)) 1428 && !integer_zerop (DECL_SIZE (rli->prev_field))
1332 && host_integerp (DECL_SIZE (rli->prev_field), 0) 1429 && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
1333 && host_integerp (TYPE_SIZE (type), 0) 1430 && tree_fits_uhwi_p (TYPE_SIZE (type))
1334 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))) 1431 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1335 { 1432 {
1336 /* We're in the middle of a run of equal type size fields; make 1433 /* We're in the middle of a run of equal type size fields; make
1337 sure we realign if we run out of bits. (Not decl size, 1434 sure we realign if we run out of bits. (Not decl size,
1338 type size!) */ 1435 type size!) */
1339 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1); 1436 HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field));
1340 1437
1341 if (rli->remaining_in_alignment < bitsize) 1438 if (rli->remaining_in_alignment < bitsize)
1342 { 1439 {
1343 HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1); 1440 HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type));
1344 1441
1345 /* out of bits; bump up to next 'word'. */ 1442 /* out of bits; bump up to next 'word'. */
1346 rli->bitpos 1443 rli->bitpos
1347 = size_binop (PLUS_EXPR, rli->bitpos, 1444 = size_binop (PLUS_EXPR, rli->bitpos,
1348 bitsize_int (rli->remaining_in_alignment)); 1445 bitsize_int (rli->remaining_in_alignment));
1377 as if the prior field was not a bitfield. */ 1474 as if the prior field was not a bitfield. */
1378 prev_saved = NULL; 1475 prev_saved = NULL;
1379 1476
1380 /* Cause a new bitfield to be captured, either this time (if 1477 /* Cause a new bitfield to be captured, either this time (if
1381 currently a bitfield) or next time we see one. */ 1478 currently a bitfield) or next time we see one. */
1382 if (!DECL_BIT_FIELD_TYPE(field) 1479 if (!DECL_BIT_FIELD_TYPE (field)
1383 || integer_zerop (DECL_SIZE (field))) 1480 || integer_zerop (DECL_SIZE (field)))
1384 rli->prev_field = NULL; 1481 rli->prev_field = NULL;
1385 } 1482 }
1386 1483
1387 normalize_rli (rli); 1484 normalize_rli (rli);
1388 } 1485 }
1389 1486
1390 /* If we're starting a new run of same size type bitfields 1487 /* If we're starting a new run of same type size bitfields
1391 (or a run of non-bitfields), set up the "first of the run" 1488 (or a run of non-bitfields), set up the "first of the run"
1392 fields. 1489 fields.
1393 1490
1394 That is, if the current field is not a bitfield, or if there 1491 That is, if the current field is not a bitfield, or if there
1395 was a prior bitfield the type sizes differ, or if there wasn't 1492 was a prior bitfield the type sizes differ, or if there wasn't
1410 /* (When not a bitfield), we could be seeing a flex array (with 1507 /* (When not a bitfield), we could be seeing a flex array (with
1411 no DECL_SIZE). Since we won't be using remaining_in_alignment 1508 no DECL_SIZE). Since we won't be using remaining_in_alignment
1412 until we see a bitfield (and come by here again) we just skip 1509 until we see a bitfield (and come by here again) we just skip
1413 calculating it. */ 1510 calculating it. */
1414 if (DECL_SIZE (field) != NULL 1511 if (DECL_SIZE (field) != NULL
1415 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 1) 1512 && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field)))
1416 && host_integerp (DECL_SIZE (field), 1)) 1513 && tree_fits_uhwi_p (DECL_SIZE (field)))
1417 { 1514 {
1418 unsigned HOST_WIDE_INT bitsize 1515 unsigned HOST_WIDE_INT bitsize
1419 = tree_low_cst (DECL_SIZE (field), 1); 1516 = tree_to_uhwi (DECL_SIZE (field));
1420 unsigned HOST_WIDE_INT typesize 1517 unsigned HOST_WIDE_INT typesize
1421 = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1); 1518 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field)));
1422 1519
1423 if (typesize < bitsize) 1520 if (typesize < bitsize)
1424 rli->remaining_in_alignment = 0; 1521 rli->remaining_in_alignment = 0;
1425 else 1522 else
1426 rli->remaining_in_alignment = typesize - bitsize; 1523 rli->remaining_in_alignment = typesize - bitsize;
1443 /* Offset so far becomes the position of this field after normalizing. */ 1540 /* Offset so far becomes the position of this field after normalizing. */
1444 normalize_rli (rli); 1541 normalize_rli (rli);
1445 DECL_FIELD_OFFSET (field) = rli->offset; 1542 DECL_FIELD_OFFSET (field) = rli->offset;
1446 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; 1543 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1447 SET_DECL_OFFSET_ALIGN (field, rli->offset_align); 1544 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1545 handle_warn_if_not_align (field, rli->record_align);
1546
1547 /* Evaluate nonconstant offsets only once, either now or as soon as safe. */
1548 if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST)
1549 DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field));
1448 1550
1449 /* If this field ended up more aligned than we thought it would be (we 1551 /* If this field ended up more aligned than we thought it would be (we
1450 approximate this by seeing if its position changed), lay out the field 1552 approximate this by seeing if its position changed), lay out the field
1451 again; perhaps we can use an integral mode for it now. */ 1553 again; perhaps we can use an integral mode for it now. */
1452 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field))) 1554 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1453 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) 1555 actual_align = least_bit_hwi (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)));
1454 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1455 else if (integer_zerop (DECL_FIELD_OFFSET (field))) 1556 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1456 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); 1557 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1457 else if (host_integerp (DECL_FIELD_OFFSET (field), 1)) 1558 else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1458 actual_align = (BITS_PER_UNIT 1559 actual_align = (BITS_PER_UNIT
1459 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1) 1560 * least_bit_hwi (tree_to_uhwi (DECL_FIELD_OFFSET (field))));
1460 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1461 else 1561 else
1462 actual_align = DECL_OFFSET_ALIGN (field); 1562 actual_align = DECL_OFFSET_ALIGN (field);
1463 /* ACTUAL_ALIGN is still the actual alignment *within the record* . 1563 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1464 store / extract bit field operations will check the alignment of the 1564 store / extract bit field operations will check the alignment of the
1465 record against the mode of bit fields. */ 1565 record against the mode of bit fields. */
1528 rli->offset_align = BITS_PER_UNIT; 1628 rli->offset_align = BITS_PER_UNIT;
1529 normalize_rli (rli); 1629 normalize_rli (rli);
1530 1630
1531 /* Determine the desired alignment. */ 1631 /* Determine the desired alignment. */
1532 #ifdef ROUND_TYPE_ALIGN 1632 #ifdef ROUND_TYPE_ALIGN
1533 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), 1633 SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1534 rli->record_align); 1634 rli->record_align));
1535 #else 1635 #else
1536 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align); 1636 SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
1537 #endif 1637 #endif
1538 1638
1539 /* Compute the size so far. Be sure to allow for extra bits in the 1639 /* Compute the size so far. Be sure to allow for extra bits in the
1540 size in bytes. We have guaranteed above that it will be no more 1640 size in bytes. We have guaranteed above that it will be no more
1541 than a single byte. */ 1641 than a single byte. */
1603 1703
1604 void 1704 void
1605 compute_record_mode (tree type) 1705 compute_record_mode (tree type)
1606 { 1706 {
1607 tree field; 1707 tree field;
1608 enum machine_mode mode = VOIDmode; 1708 machine_mode mode = VOIDmode;
1609 1709
1610 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that. 1710 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1611 However, if possible, we use a mode that fits in a register 1711 However, if possible, we use a mode that fits in a register
1612 instead, in order to allow for better optimization down the 1712 instead, in order to allow for better optimization down the
1613 line. */ 1713 line. */
1614 SET_TYPE_MODE (type, BLKmode); 1714 SET_TYPE_MODE (type, BLKmode);
1615 1715
1616 if (! host_integerp (TYPE_SIZE (type), 1)) 1716 if (! tree_fits_uhwi_p (TYPE_SIZE (type)))
1617 return; 1717 return;
1618 1718
1619 /* A record which has any BLKmode members must itself be 1719 /* A record which has any BLKmode members must itself be
1620 BLKmode; it can't go in a register. Unless the member is 1720 BLKmode; it can't go in a register. Unless the member is
1621 BLKmode only because it isn't aligned. */ 1721 BLKmode only because it isn't aligned. */
1627 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK 1727 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1628 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode 1728 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1629 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)) 1729 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1630 && !(TYPE_SIZE (TREE_TYPE (field)) != 0 1730 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1631 && integer_zerop (TYPE_SIZE (TREE_TYPE (field))))) 1731 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1632 || ! host_integerp (bit_position (field), 1) 1732 || ! tree_fits_uhwi_p (bit_position (field))
1633 || DECL_SIZE (field) == 0 1733 || DECL_SIZE (field) == 0
1634 || ! host_integerp (DECL_SIZE (field), 1)) 1734 || ! tree_fits_uhwi_p (DECL_SIZE (field)))
1635 return; 1735 return;
1636 1736
1637 /* If this field is the whole struct, remember its mode so 1737 /* If this field is the whole struct, remember its mode so
1638 that, say, we can put a double in a class into a DF 1738 that, say, we can put a double in a class into a DF
1639 register instead of forcing it to live in the stack. */ 1739 register instead of forcing it to live in the stack. */
1640 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field))) 1740 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1641 mode = DECL_MODE (field); 1741 mode = DECL_MODE (field);
1642 1742
1643 #ifdef MEMBER_TYPE_FORCES_BLK 1743 /* With some targets, it is sub-optimal to access an aligned
1644 /* With some targets, eg. c4x, it is sub-optimal 1744 BLKmode structure as a scalar. */
1645 to access an aligned BLKmode structure as a scalar. */ 1745 if (targetm.member_type_forces_blk (field, mode))
1646
1647 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1648 return; 1746 return;
1649 #endif /* MEMBER_TYPE_FORCES_BLK */
1650 } 1747 }
1651 1748
1652 /* If we only have one real field; use its mode if that mode's size 1749 /* If we only have one real field; use its mode if that mode's size
1653 matches the type's size. This only applies to RECORD_TYPE. This 1750 matches the type's size. This only applies to RECORD_TYPE. This
1654 does not apply to unions. */ 1751 does not apply to unions. */
1655 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode 1752 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1656 && host_integerp (TYPE_SIZE (type), 1) 1753 && tree_fits_uhwi_p (TYPE_SIZE (type))
1657 && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type))) 1754 && GET_MODE_BITSIZE (mode) == tree_to_uhwi (TYPE_SIZE (type)))
1658 SET_TYPE_MODE (type, mode); 1755 ;
1659 else 1756 else
1660 SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1)); 1757 mode = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1).else_blk ();
1661 1758
1662 /* If structure's known alignment is less than what the scalar 1759 /* If structure's known alignment is less than what the scalar
1663 mode would need, and it matters, then stick with BLKmode. */ 1760 mode would need, and it matters, then stick with BLKmode. */
1664 if (TYPE_MODE (type) != BLKmode 1761 if (mode != BLKmode
1665 && STRICT_ALIGNMENT 1762 && STRICT_ALIGNMENT
1666 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT 1763 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1667 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))) 1764 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (mode)))
1668 { 1765 {
1669 /* If this is the only reason this type is BLKmode, then 1766 /* If this is the only reason this type is BLKmode, then
1670 don't force containing types to be BLKmode. */ 1767 don't force containing types to be BLKmode. */
1671 TYPE_NO_FORCE_BLK (type) = 1; 1768 TYPE_NO_FORCE_BLK (type) = 1;
1672 SET_TYPE_MODE (type, BLKmode); 1769 mode = BLKmode;
1673 } 1770 }
1771
1772 SET_TYPE_MODE (type, mode);
1674 } 1773 }
1675 1774
1676 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid 1775 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1677 out. */ 1776 out. */
1678 1777
1681 { 1780 {
1682 /* Normally, use the alignment corresponding to the mode chosen. 1781 /* Normally, use the alignment corresponding to the mode chosen.
1683 However, where strict alignment is not required, avoid 1782 However, where strict alignment is not required, avoid
1684 over-aligning structures, since most compilers do not do this 1783 over-aligning structures, since most compilers do not do this
1685 alignment. */ 1784 alignment. */
1686 1785 if (TYPE_MODE (type) != BLKmode
1687 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode 1786 && TYPE_MODE (type) != VOIDmode
1688 && (STRICT_ALIGNMENT 1787 && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type)))
1689 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1690 && TREE_CODE (type) != QUAL_UNION_TYPE
1691 && TREE_CODE (type) != ARRAY_TYPE)))
1692 { 1788 {
1693 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type)); 1789 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1694 1790
1695 /* Don't override a larger alignment requirement coming from a user 1791 /* Don't override a larger alignment requirement coming from a user
1696 alignment of one of the fields. */ 1792 alignment of one of the fields. */
1697 if (mode_align >= TYPE_ALIGN (type)) 1793 if (mode_align >= TYPE_ALIGN (type))
1698 { 1794 {
1699 TYPE_ALIGN (type) = mode_align; 1795 SET_TYPE_ALIGN (type, mode_align);
1700 TYPE_USER_ALIGN (type) = 0; 1796 TYPE_USER_ALIGN (type) = 0;
1701 } 1797 }
1702 } 1798 }
1703 1799
1704 /* Do machine-dependent extra alignment. */ 1800 /* Do machine-dependent extra alignment. */
1705 #ifdef ROUND_TYPE_ALIGN 1801 #ifdef ROUND_TYPE_ALIGN
1706 TYPE_ALIGN (type) 1802 SET_TYPE_ALIGN (type,
1707 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT); 1803 ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
1708 #endif 1804 #endif
1709 1805
1710 /* If we failed to find a simple way to calculate the unit size 1806 /* If we failed to find a simple way to calculate the unit size
1711 of the type, find it by division. */ 1807 of the type, find it by division. */
1712 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0) 1808 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1739 tree variant; 1835 tree variant;
1740 /* Record layout info of this variant. */ 1836 /* Record layout info of this variant. */
1741 tree size = TYPE_SIZE (type); 1837 tree size = TYPE_SIZE (type);
1742 tree size_unit = TYPE_SIZE_UNIT (type); 1838 tree size_unit = TYPE_SIZE_UNIT (type);
1743 unsigned int align = TYPE_ALIGN (type); 1839 unsigned int align = TYPE_ALIGN (type);
1840 unsigned int precision = TYPE_PRECISION (type);
1744 unsigned int user_align = TYPE_USER_ALIGN (type); 1841 unsigned int user_align = TYPE_USER_ALIGN (type);
1745 enum machine_mode mode = TYPE_MODE (type); 1842 machine_mode mode = TYPE_MODE (type);
1746 1843
1747 /* Copy it into all variants. */ 1844 /* Copy it into all variants. */
1748 for (variant = TYPE_MAIN_VARIANT (type); 1845 for (variant = TYPE_MAIN_VARIANT (type);
1749 variant != 0; 1846 variant != 0;
1750 variant = TYPE_NEXT_VARIANT (variant)) 1847 variant = TYPE_NEXT_VARIANT (variant))
1751 { 1848 {
1752 TYPE_SIZE (variant) = size; 1849 TYPE_SIZE (variant) = size;
1753 TYPE_SIZE_UNIT (variant) = size_unit; 1850 TYPE_SIZE_UNIT (variant) = size_unit;
1754 TYPE_ALIGN (variant) = align; 1851 unsigned valign = align;
1755 TYPE_USER_ALIGN (variant) = user_align; 1852 if (TYPE_USER_ALIGN (variant))
1853 valign = MAX (valign, TYPE_ALIGN (variant));
1854 else
1855 TYPE_USER_ALIGN (variant) = user_align;
1856 SET_TYPE_ALIGN (variant, valign);
1857 TYPE_PRECISION (variant) = precision;
1756 SET_TYPE_MODE (variant, mode); 1858 SET_TYPE_MODE (variant, mode);
1757 } 1859 }
1758 } 1860 }
1861 }
1862
1863 /* Return a new underlying object for a bitfield started with FIELD. */
1864
1865 static tree
1866 start_bitfield_representative (tree field)
1867 {
1868 tree repr = make_node (FIELD_DECL);
1869 DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field);
1870 /* Force the representative to begin at a BITS_PER_UNIT aligned
1871 boundary - C++ may use tail-padding of a base object to
1872 continue packing bits so the bitfield region does not start
1873 at bit zero (see g++.dg/abi/bitfield5.C for example).
1874 Unallocated bits may happen for other reasons as well,
1875 for example Ada which allows explicit bit-granular structure layout. */
1876 DECL_FIELD_BIT_OFFSET (repr)
1877 = size_binop (BIT_AND_EXPR,
1878 DECL_FIELD_BIT_OFFSET (field),
1879 bitsize_int (~(BITS_PER_UNIT - 1)));
1880 SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field));
1881 DECL_SIZE (repr) = DECL_SIZE (field);
1882 DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field);
1883 DECL_PACKED (repr) = DECL_PACKED (field);
1884 DECL_CONTEXT (repr) = DECL_CONTEXT (field);
1885 /* There are no indirect accesses to this field. If we introduce
1886 some then they have to use the record alias set. This makes
1887 sure to properly conflict with [indirect] accesses to addressable
1888 fields of the bitfield group. */
1889 DECL_NONADDRESSABLE_P (repr) = 1;
1890 return repr;
1891 }
1892
1893 /* Finish up a bitfield group that was started by creating the underlying
1894 object REPR with the last field in the bitfield group FIELD. */
1895
1896 static void
1897 finish_bitfield_representative (tree repr, tree field)
1898 {
1899 unsigned HOST_WIDE_INT bitsize, maxbitsize;
1900 tree nextf, size;
1901
1902 size = size_diffop (DECL_FIELD_OFFSET (field),
1903 DECL_FIELD_OFFSET (repr));
1904 while (TREE_CODE (size) == COMPOUND_EXPR)
1905 size = TREE_OPERAND (size, 1);
1906 gcc_assert (tree_fits_uhwi_p (size));
1907 bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT
1908 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
1909 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))
1910 + tree_to_uhwi (DECL_SIZE (field)));
1911
1912 /* Round up bitsize to multiples of BITS_PER_UNIT. */
1913 bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
1914
1915 /* Now nothing tells us how to pad out bitsize ... */
1916 nextf = DECL_CHAIN (field);
1917 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
1918 nextf = DECL_CHAIN (nextf);
1919 if (nextf)
1920 {
1921 tree maxsize;
1922 /* If there was an error, the field may be not laid out
1923 correctly. Don't bother to do anything. */
1924 if (TREE_TYPE (nextf) == error_mark_node)
1925 return;
1926 maxsize = size_diffop (DECL_FIELD_OFFSET (nextf),
1927 DECL_FIELD_OFFSET (repr));
1928 if (tree_fits_uhwi_p (maxsize))
1929 {
1930 maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
1931 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf))
1932 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1933 /* If the group ends within a bitfield nextf does not need to be
1934 aligned to BITS_PER_UNIT. Thus round up. */
1935 maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1);
1936 }
1937 else
1938 maxbitsize = bitsize;
1939 }
1940 else
1941 {
1942 /* Note that if the C++ FE sets up tail-padding to be re-used it
1943 creates a as-base variant of the type with TYPE_SIZE adjusted
1944 accordingly. So it is safe to include tail-padding here. */
1945 tree aggsize = lang_hooks.types.unit_size_without_reusable_padding
1946 (DECL_CONTEXT (field));
1947 tree maxsize = size_diffop (aggsize, DECL_FIELD_OFFSET (repr));
1948 /* We cannot generally rely on maxsize to fold to an integer constant,
1949 so use bitsize as fallback for this case. */
1950 if (tree_fits_uhwi_p (maxsize))
1951 maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT
1952 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
1953 else
1954 maxbitsize = bitsize;
1955 }
1956
1957 /* Only if we don't artificially break up the representative in
1958 the middle of a large bitfield with different possibly
1959 overlapping representatives. And all representatives start
1960 at byte offset. */
1961 gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
1962
1963 /* Find the smallest nice mode to use. */
1964 opt_scalar_int_mode mode_iter;
1965 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1966 if (GET_MODE_BITSIZE (mode_iter.require ()) >= bitsize)
1967 break;
1968
1969 scalar_int_mode mode;
1970 if (!mode_iter.exists (&mode)
1971 || GET_MODE_BITSIZE (mode) > maxbitsize
1972 || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)
1973 {
1974 /* We really want a BLKmode representative only as a last resort,
1975 considering the member b in
1976 struct { int a : 7; int b : 17; int c; } __attribute__((packed));
1977 Otherwise we simply want to split the representative up
1978 allowing for overlaps within the bitfield region as required for
1979 struct { int a : 7; int b : 7;
1980 int c : 10; int d; } __attribute__((packed));
1981 [0, 15] HImode for a and b, [8, 23] HImode for c. */
1982 DECL_SIZE (repr) = bitsize_int (bitsize);
1983 DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT);
1984 SET_DECL_MODE (repr, BLKmode);
1985 TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node,
1986 bitsize / BITS_PER_UNIT);
1987 }
1988 else
1989 {
1990 unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode);
1991 DECL_SIZE (repr) = bitsize_int (modesize);
1992 DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT);
1993 SET_DECL_MODE (repr, mode);
1994 TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1);
1995 }
1996
1997 /* Remember whether the bitfield group is at the end of the
1998 structure or not. */
1999 DECL_CHAIN (repr) = nextf;
2000 }
2001
2002 /* Compute and set FIELD_DECLs for the underlying objects we should
2003 use for bitfield access for the structure T. */
2004
2005 void
2006 finish_bitfield_layout (tree t)
2007 {
2008 tree field, prev;
2009 tree repr = NULL_TREE;
2010
2011 /* Unions would be special, for the ease of type-punning optimizations
2012 we could use the underlying type as hint for the representative
2013 if the bitfield would fit and the representative would not exceed
2014 the union in size. */
2015 if (TREE_CODE (t) != RECORD_TYPE)
2016 return;
2017
2018 for (prev = NULL_TREE, field = TYPE_FIELDS (t);
2019 field; field = DECL_CHAIN (field))
2020 {
2021 if (TREE_CODE (field) != FIELD_DECL)
2022 continue;
2023
2024 /* In the C++ memory model, consecutive bit fields in a structure are
2025 considered one memory location and updating a memory location
2026 may not store into adjacent memory locations. */
2027 if (!repr
2028 && DECL_BIT_FIELD_TYPE (field))
2029 {
2030 /* Start new representative. */
2031 repr = start_bitfield_representative (field);
2032 }
2033 else if (repr
2034 && ! DECL_BIT_FIELD_TYPE (field))
2035 {
2036 /* Finish off new representative. */
2037 finish_bitfield_representative (repr, prev);
2038 repr = NULL_TREE;
2039 }
2040 else if (DECL_BIT_FIELD_TYPE (field))
2041 {
2042 gcc_assert (repr != NULL_TREE);
2043
2044 /* Zero-size bitfields finish off a representative and
2045 do not have a representative themselves. This is
2046 required by the C++ memory model. */
2047 if (integer_zerop (DECL_SIZE (field)))
2048 {
2049 finish_bitfield_representative (repr, prev);
2050 repr = NULL_TREE;
2051 }
2052
2053 /* We assume that either DECL_FIELD_OFFSET of the representative
2054 and each bitfield member is a constant or they are equal.
2055 This is because we need to be able to compute the bit-offset
2056 of each field relative to the representative in get_bit_range
2057 during RTL expansion.
2058 If these constraints are not met, simply force a new
2059 representative to be generated. That will at most
2060 generate worse code but still maintain correctness with
2061 respect to the C++ memory model. */
2062 else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr))
2063 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
2064 || operand_equal_p (DECL_FIELD_OFFSET (repr),
2065 DECL_FIELD_OFFSET (field), 0)))
2066 {
2067 finish_bitfield_representative (repr, prev);
2068 repr = start_bitfield_representative (field);
2069 }
2070 }
2071 else
2072 continue;
2073
2074 if (repr)
2075 DECL_BIT_FIELD_REPRESENTATIVE (field) = repr;
2076
2077 prev = field;
2078 }
2079
2080 if (repr)
2081 finish_bitfield_representative (repr, prev);
1759 } 2082 }
1760 2083
1761 /* Do all of the work required to layout the type indicated by RLI, 2084 /* Do all of the work required to layout the type indicated by RLI,
1762 once the fields have been laid out. This function will call `free' 2085 once the fields have been laid out. This function will call `free'
1763 for RLI, unless FREE_P is false. Passing a value other than false 2086 for RLI, unless FREE_P is false. Passing a value other than false
1776 compute_record_mode (rli->t); 2099 compute_record_mode (rli->t);
1777 2100
1778 /* Perform any last tweaks to the TYPE_SIZE, etc. */ 2101 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1779 finalize_type_size (rli->t); 2102 finalize_type_size (rli->t);
1780 2103
1781 /* Propagate TYPE_PACKED to variants. With C++ templates, 2104 /* Compute bitfield representatives. */
1782 handle_packed_attribute is too early to do this. */ 2105 finish_bitfield_layout (rli->t);
2106
2107 /* Propagate TYPE_PACKED and TYPE_REVERSE_STORAGE_ORDER to variants.
2108 With C++ templates, it is too early to do this when the attribute
2109 is being parsed. */
1783 for (variant = TYPE_NEXT_VARIANT (rli->t); variant; 2110 for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
1784 variant = TYPE_NEXT_VARIANT (variant)) 2111 variant = TYPE_NEXT_VARIANT (variant))
1785 TYPE_PACKED (variant) = TYPE_PACKED (rli->t); 2112 {
2113 TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
2114 TYPE_REVERSE_STORAGE_ORDER (variant)
2115 = TYPE_REVERSE_STORAGE_ORDER (rli->t);
2116 }
1786 2117
1787 /* Lay out any static members. This is done now because their type 2118 /* Lay out any static members. This is done now because their type
1788 may use the record's type. */ 2119 may use the record's type. */
1789 while (!VEC_empty (tree, rli->pending_statics)) 2120 while (!vec_safe_is_empty (rli->pending_statics))
1790 layout_decl (VEC_pop (tree, rli->pending_statics), 0); 2121 layout_decl (rli->pending_statics->pop (), 0);
1791 2122
1792 /* Clean up. */ 2123 /* Clean up. */
1793 if (free_p) 2124 if (free_p)
1794 { 2125 {
1795 VEC_free (tree, gc, rli->pending_statics); 2126 vec_free (rli->pending_statics);
1796 free (rli); 2127 free (rli);
1797 } 2128 }
1798 } 2129 }
1799 2130
1800 2131
1818 } 2149 }
1819 TYPE_FIELDS (type) = tail; 2150 TYPE_FIELDS (type) = tail;
1820 2151
1821 if (align_type) 2152 if (align_type)
1822 { 2153 {
1823 TYPE_ALIGN (type) = TYPE_ALIGN (align_type); 2154 SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
1824 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type); 2155 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
2156 SET_TYPE_WARN_IF_NOT_ALIGN (type,
2157 TYPE_WARN_IF_NOT_ALIGN (align_type));
1825 } 2158 }
1826 2159
1827 layout_type (type); 2160 layout_type (type);
1828 #if 0 /* not yet, should get fixed properly later */ 2161 #if 0 /* not yet, should get fixed properly later */
1829 TYPE_NAME (type) = make_type_decl (get_identifier (name), type); 2162 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1851 gcc_assert (type); 2184 gcc_assert (type);
1852 2185
1853 if (type == error_mark_node) 2186 if (type == error_mark_node)
1854 return; 2187 return;
1855 2188
2189 /* We don't want finalize_type_size to copy an alignment attribute to
2190 variants that don't have it. */
2191 type = TYPE_MAIN_VARIANT (type);
2192
1856 /* Do nothing if type has been laid out before. */ 2193 /* Do nothing if type has been laid out before. */
1857 if (TYPE_SIZE (type)) 2194 if (TYPE_SIZE (type))
1858 return; 2195 return;
1859 2196
1860 switch (TREE_CODE (type)) 2197 switch (TREE_CODE (type))
1862 case LANG_TYPE: 2199 case LANG_TYPE:
1863 /* This kind of type is the responsibility 2200 /* This kind of type is the responsibility
1864 of the language-specific code. */ 2201 of the language-specific code. */
1865 gcc_unreachable (); 2202 gcc_unreachable ();
1866 2203
1867 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */ 2204 case BOOLEAN_TYPE:
1868 if (TYPE_PRECISION (type) == 0)
1869 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1870
1871 /* ... fall through ... */
1872
1873 case INTEGER_TYPE: 2205 case INTEGER_TYPE:
1874 case ENUMERAL_TYPE: 2206 case ENUMERAL_TYPE:
1875 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST 2207 {
1876 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0) 2208 scalar_int_mode mode
1877 TYPE_UNSIGNED (type) = 1; 2209 = smallest_int_mode_for_size (TYPE_PRECISION (type));
1878 2210 SET_TYPE_MODE (type, mode);
1879 SET_TYPE_MODE (type, 2211 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
1880 smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT)); 2212 /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */
1881 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); 2213 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1882 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); 2214 break;
1883 break; 2215 }
1884 2216
1885 case REAL_TYPE: 2217 case REAL_TYPE:
1886 SET_TYPE_MODE (type, 2218 {
1887 mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0)); 2219 /* Allow the caller to choose the type mode, which is how decimal
1888 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); 2220 floats are distinguished from binary ones. */
1889 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); 2221 if (TYPE_MODE (type) == VOIDmode)
1890 break; 2222 SET_TYPE_MODE
2223 (type, float_mode_for_size (TYPE_PRECISION (type)).require ());
2224 scalar_float_mode mode = as_a <scalar_float_mode> (TYPE_MODE (type));
2225 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2226 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2227 break;
2228 }
1891 2229
1892 case FIXED_POINT_TYPE: 2230 case FIXED_POINT_TYPE:
1893 /* TYPE_MODE (type) has been set already. */ 2231 {
1894 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); 2232 /* TYPE_MODE (type) has been set already. */
1895 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); 2233 scalar_mode mode = SCALAR_TYPE_MODE (type);
1896 break; 2234 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
2235 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
2236 break;
2237 }
1897 2238
1898 case COMPLEX_TYPE: 2239 case COMPLEX_TYPE:
1899 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); 2240 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1900 SET_TYPE_MODE (type, 2241 SET_TYPE_MODE (type,
1901 mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)), 2242 GET_MODE_COMPLEX_MODE (TYPE_MODE (TREE_TYPE (type))));
1902 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE 2243
1903 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1904 0));
1905 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); 2244 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1906 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); 2245 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1907 break; 2246 break;
1908 2247
1909 case VECTOR_TYPE: 2248 case VECTOR_TYPE:
1914 gcc_assert (!(nunits & (nunits - 1))); 2253 gcc_assert (!(nunits & (nunits - 1)));
1915 2254
1916 /* Find an appropriate mode for the vector type. */ 2255 /* Find an appropriate mode for the vector type. */
1917 if (TYPE_MODE (type) == VOIDmode) 2256 if (TYPE_MODE (type) == VOIDmode)
1918 SET_TYPE_MODE (type, 2257 SET_TYPE_MODE (type,
1919 mode_for_vector (TYPE_MODE (innertype), nunits)); 2258 mode_for_vector (SCALAR_TYPE_MODE (innertype),
2259 nunits).else_blk ());
1920 2260
1921 TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type)); 2261 TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
1922 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); 2262 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1923 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, 2263 /* Several boolean vector elements may fit in a single unit. */
1924 TYPE_SIZE_UNIT (innertype), 2264 if (VECTOR_BOOLEAN_TYPE_P (type)
1925 size_int (nunits), 0); 2265 && type->type_common.mode != BLKmode)
1926 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype), 2266 TYPE_SIZE_UNIT (type)
1927 bitsize_int (nunits), 0); 2267 = size_int (GET_MODE_SIZE (type->type_common.mode));
1928 2268 else
1929 /* Always naturally align vectors. This prevents ABI changes 2269 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1930 depending on whether or not native vector modes are supported. */ 2270 TYPE_SIZE_UNIT (innertype),
1931 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0); 2271 size_int (nunits));
2272 TYPE_SIZE (type) = int_const_binop (MULT_EXPR,
2273 TYPE_SIZE (innertype),
2274 bitsize_int (nunits));
2275
2276 /* For vector types, we do not default to the mode's alignment.
2277 Instead, query a target hook, defaulting to natural alignment.
2278 This prevents ABI changes depending on whether or not native
2279 vector modes are supported. */
2280 SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
2281
2282 /* However, if the underlying mode requires a bigger alignment than
2283 what the target hook provides, we cannot use the mode. For now,
2284 simply reject that case. */
2285 gcc_assert (TYPE_ALIGN (type)
2286 >= GET_MODE_ALIGNMENT (TYPE_MODE (type)));
1932 break; 2287 break;
1933 } 2288 }
1934 2289
1935 case VOID_TYPE: 2290 case VOID_TYPE:
1936 /* This is an incomplete type and so doesn't have a size. */ 2291 /* This is an incomplete type and so doesn't have a size. */
1937 TYPE_ALIGN (type) = 1; 2292 SET_TYPE_ALIGN (type, 1);
1938 TYPE_USER_ALIGN (type) = 0; 2293 TYPE_USER_ALIGN (type) = 0;
1939 SET_TYPE_MODE (type, VOIDmode); 2294 SET_TYPE_MODE (type, VOIDmode);
1940 break; 2295 break;
1941 2296
2297 case POINTER_BOUNDS_TYPE:
2298 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
2299 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
2300 break;
2301
1942 case OFFSET_TYPE: 2302 case OFFSET_TYPE:
1943 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); 2303 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1944 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT); 2304 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS);
1945 /* A pointer might be MODE_PARTIAL_INT, 2305 /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be
1946 but ptrdiff_t must be integral. */ 2306 integral, which may be an __intN. */
1947 SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); 2307 SET_TYPE_MODE (type, int_mode_for_size (POINTER_SIZE, 0).require ());
1948 TYPE_PRECISION (type) = POINTER_SIZE; 2308 TYPE_PRECISION (type) = POINTER_SIZE;
1949 break; 2309 break;
1950 2310
1951 case FUNCTION_TYPE: 2311 case FUNCTION_TYPE:
1952 case METHOD_TYPE: 2312 case METHOD_TYPE:
1953 /* It's hard to see what the mode and size of a function ought to 2313 /* It's hard to see what the mode and size of a function ought to
1954 be, but we do know the alignment is FUNCTION_BOUNDARY, so 2314 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1955 make it consistent with that. */ 2315 make it consistent with that. */
1956 SET_TYPE_MODE (type, mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0)); 2316 SET_TYPE_MODE (type,
2317 int_mode_for_size (FUNCTION_BOUNDARY, 0).else_blk ());
1957 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY); 2318 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1958 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT); 2319 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1959 break; 2320 break;
1960 2321
1961 case POINTER_TYPE: 2322 case POINTER_TYPE:
1962 case REFERENCE_TYPE: 2323 case REFERENCE_TYPE:
1963 { 2324 {
1964 enum machine_mode mode = TYPE_MODE (type); 2325 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
1965 if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal)
1966 {
1967 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type));
1968 mode = targetm.addr_space.address_mode (as);
1969 }
1970
1971 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode)); 2326 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
1972 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); 2327 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1973 TYPE_UNSIGNED (type) = 1; 2328 TYPE_UNSIGNED (type) = 1;
1974 TYPE_PRECISION (type) = GET_MODE_BITSIZE (mode); 2329 TYPE_PRECISION (type) = GET_MODE_PRECISION (mode);
1975 } 2330 }
1976 break; 2331 break;
1977 2332
1978 case ARRAY_TYPE: 2333 case ARRAY_TYPE:
1979 { 2334 {
1980 tree index = TYPE_DOMAIN (type); 2335 tree index = TYPE_DOMAIN (type);
1981 tree element = TREE_TYPE (type); 2336 tree element = TREE_TYPE (type);
1982
1983 build_pointer_type (element);
1984 2337
1985 /* We need to know both bounds in order to compute the size. */ 2338 /* We need to know both bounds in order to compute the size. */
1986 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) 2339 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1987 && TYPE_SIZE (element)) 2340 && TYPE_SIZE (element))
1988 { 2341 {
1994 /* Make sure that an array of zero-sized element is zero-sized 2347 /* Make sure that an array of zero-sized element is zero-sized
1995 regardless of its extent. */ 2348 regardless of its extent. */
1996 if (integer_zerop (element_size)) 2349 if (integer_zerop (element_size))
1997 length = size_zero_node; 2350 length = size_zero_node;
1998 2351
1999 /* The initial subtraction should happen in the original type so 2352 /* The computation should happen in the original signedness so
2000 that (possible) negative values are handled appropriately. */ 2353 that (possible) negative values are handled appropriately
2354 when determining overflow. */
2001 else 2355 else
2002 length 2356 {
2003 = size_binop (PLUS_EXPR, size_one_node, 2357 /* ??? When it is obvious that the range is signed
2004 fold_convert (sizetype, 2358 represent it using ssizetype. */
2005 fold_build2 (MINUS_EXPR, 2359 if (TREE_CODE (lb) == INTEGER_CST
2006 TREE_TYPE (lb), 2360 && TREE_CODE (ub) == INTEGER_CST
2007 ub, lb))); 2361 && TYPE_UNSIGNED (TREE_TYPE (lb))
2362 && tree_int_cst_lt (ub, lb))
2363 {
2364 lb = wide_int_to_tree (ssizetype,
2365 offset_int::from (wi::to_wide (lb),
2366 SIGNED));
2367 ub = wide_int_to_tree (ssizetype,
2368 offset_int::from (wi::to_wide (ub),
2369 SIGNED));
2370 }
2371 length
2372 = fold_convert (sizetype,
2373 size_binop (PLUS_EXPR,
2374 build_int_cst (TREE_TYPE (lb), 1),
2375 size_binop (MINUS_EXPR, ub, lb)));
2376 }
2377
2378 /* ??? We have no way to distinguish a null-sized array from an
2379 array spanning the whole sizetype range, so we arbitrarily
2380 decide that [0, -1] is the only valid representation. */
2381 if (integer_zerop (length)
2382 && TREE_OVERFLOW (length)
2383 && integer_zerop (lb))
2384 length = size_zero_node;
2008 2385
2009 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, 2386 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
2010 fold_convert (bitsizetype, 2387 fold_convert (bitsizetype,
2011 length)); 2388 length));
2012 2389
2020 } 2397 }
2021 2398
2022 /* Now round the alignment and size, 2399 /* Now round the alignment and size,
2023 using machine-dependent criteria if any. */ 2400 using machine-dependent criteria if any. */
2024 2401
2402 unsigned align = TYPE_ALIGN (element);
2403 if (TYPE_USER_ALIGN (type))
2404 align = MAX (align, TYPE_ALIGN (type));
2405 else
2406 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
2407 if (!TYPE_WARN_IF_NOT_ALIGN (type))
2408 SET_TYPE_WARN_IF_NOT_ALIGN (type,
2409 TYPE_WARN_IF_NOT_ALIGN (element));
2025 #ifdef ROUND_TYPE_ALIGN 2410 #ifdef ROUND_TYPE_ALIGN
2026 TYPE_ALIGN (type) 2411 align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT);
2027 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
2028 #else 2412 #else
2029 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT); 2413 align = MAX (align, BITS_PER_UNIT);
2030 #endif 2414 #endif
2031 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element); 2415 SET_TYPE_ALIGN (type, align);
2032 SET_TYPE_MODE (type, BLKmode); 2416 SET_TYPE_MODE (type, BLKmode);
2033 if (TYPE_SIZE (type) != 0 2417 if (TYPE_SIZE (type) != 0
2034 #ifdef MEMBER_TYPE_FORCES_BLK 2418 && ! targetm.member_type_forces_blk (type, VOIDmode)
2035 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
2036 #endif
2037 /* BLKmode elements force BLKmode aggregate; 2419 /* BLKmode elements force BLKmode aggregate;
2038 else extract/store fields may lose. */ 2420 else extract/store fields may lose. */
2039 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode 2421 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
2040 || TYPE_NO_FORCE_BLK (TREE_TYPE (type)))) 2422 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
2041 { 2423 {
2042 /* One-element arrays get the component type's mode. */ 2424 SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type),
2043 if (simple_cst_equal (TYPE_SIZE (type), 2425 TYPE_SIZE (type)));
2044 TYPE_SIZE (TREE_TYPE (type))))
2045 SET_TYPE_MODE (type, TYPE_MODE (TREE_TYPE (type)));
2046 else
2047 SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type),
2048 MODE_INT, 1));
2049
2050 if (TYPE_MODE (type) != BLKmode 2426 if (TYPE_MODE (type) != BLKmode
2051 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT 2427 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
2052 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) 2428 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
2053 { 2429 {
2054 TYPE_NO_FORCE_BLK (type) = 1; 2430 TYPE_NO_FORCE_BLK (type) = 1;
2055 SET_TYPE_MODE (type, BLKmode); 2431 SET_TYPE_MODE (type, BLKmode);
2056 } 2432 }
2057 } 2433 }
2434 if (AGGREGATE_TYPE_P (element))
2435 TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (element);
2058 /* When the element size is constant, check that it is at least as 2436 /* When the element size is constant, check that it is at least as
2059 large as the element alignment. */ 2437 large as the element alignment. */
2060 if (TYPE_SIZE_UNIT (element) 2438 if (TYPE_SIZE_UNIT (element)
2061 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST 2439 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
2062 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than 2440 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
2102 } 2480 }
2103 2481
2104 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For 2482 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
2105 records and unions, finish_record_layout already called this 2483 records and unions, finish_record_layout already called this
2106 function. */ 2484 function. */
2107 if (TREE_CODE (type) != RECORD_TYPE 2485 if (!RECORD_OR_UNION_TYPE_P (type))
2108 && TREE_CODE (type) != UNION_TYPE
2109 && TREE_CODE (type) != QUAL_UNION_TYPE)
2110 finalize_type_size (type); 2486 finalize_type_size (type);
2111 2487
2112 /* We should never see alias sets on incomplete aggregates. And we 2488 /* We should never see alias sets on incomplete aggregates. And we
2113 should not call layout_type on not incomplete aggregates. */ 2489 should not call layout_type on not incomplete aggregates. */
2114 if (AGGREGATE_TYPE_P (type)) 2490 if (AGGREGATE_TYPE_P (type))
2115 gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type)); 2491 gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
2116 } 2492 }
2117 2493
2118 /* Vector types need to re-check the target flags each time we report 2494 /* Return the least alignment required for type TYPE. */
2119 the machine mode. We need to do this because attribute target can 2495
2120 change the result of vector_mode_supported_p and have_regs_of_mode 2496 unsigned int
2121 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can 2497 min_align_of_type (tree type)
2122 change on a per-function basis. */ 2498 {
2123 /* ??? Possibly a better solution is to run through all the types 2499 unsigned int align = TYPE_ALIGN (type);
2124 referenced by a function and re-compute the TYPE_MODE once, rather 2500 if (!TYPE_USER_ALIGN (type))
2125 than make the TYPE_MODE macro call a function. */ 2501 {
2126 2502 align = MIN (align, BIGGEST_ALIGNMENT);
2127 enum machine_mode 2503 #ifdef BIGGEST_FIELD_ALIGNMENT
2128 vector_type_mode (const_tree t) 2504 align = MIN (align, BIGGEST_FIELD_ALIGNMENT);
2129 { 2505 #endif
2130 enum machine_mode mode; 2506 unsigned int field_align = align;
2131 2507 #ifdef ADJUST_FIELD_ALIGN
2132 gcc_assert (TREE_CODE (t) == VECTOR_TYPE); 2508 field_align = ADJUST_FIELD_ALIGN (NULL_TREE, type, field_align);
2133 2509 #endif
2134 mode = t->type.mode; 2510 align = MIN (align, field_align);
2135 if (VECTOR_MODE_P (mode) 2511 }
2136 && (!targetm.vector_mode_supported_p (mode) 2512 return align / BITS_PER_UNIT;
2137 || !have_regs_of_mode[mode]))
2138 {
2139 enum machine_mode innermode = TREE_TYPE (t)->type.mode;
2140
2141 /* For integers, try mapping it to a same-sized scalar mode. */
2142 if (GET_MODE_CLASS (innermode) == MODE_INT)
2143 {
2144 mode = mode_for_size (TYPE_VECTOR_SUBPARTS (t)
2145 * GET_MODE_BITSIZE (innermode), MODE_INT, 0);
2146
2147 if (mode != VOIDmode && have_regs_of_mode[mode])
2148 return mode;
2149 }
2150
2151 return BLKmode;
2152 }
2153
2154 return mode;
2155 } 2513 }
2156 2514
2157 /* Create and return a type for signed integers of PRECISION bits. */ 2515 /* Create and return a type for signed integers of PRECISION bits. */
2158 2516
2159 tree 2517 tree
2192 2550
2193 if (satp) 2551 if (satp)
2194 TYPE_SATURATING (type) = 1; 2552 TYPE_SATURATING (type) = 1;
2195 2553
2196 /* Lay out the type: set its alignment, size, etc. */ 2554 /* Lay out the type: set its alignment, size, etc. */
2197 if (unsignedp) 2555 TYPE_UNSIGNED (type) = unsignedp;
2198 { 2556 enum mode_class mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
2199 TYPE_UNSIGNED (type) = 1; 2557 SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2200 SET_TYPE_MODE (type, mode_for_size (precision, MODE_UFRACT, 0));
2201 }
2202 else
2203 SET_TYPE_MODE (type, mode_for_size (precision, MODE_FRACT, 0));
2204 layout_type (type); 2558 layout_type (type);
2205 2559
2206 return type; 2560 return type;
2207 } 2561 }
2208 2562
2218 2572
2219 if (satp) 2573 if (satp)
2220 TYPE_SATURATING (type) = 1; 2574 TYPE_SATURATING (type) = 1;
2221 2575
2222 /* Lay out the type: set its alignment, size, etc. */ 2576 /* Lay out the type: set its alignment, size, etc. */
2223 if (unsignedp) 2577 TYPE_UNSIGNED (type) = unsignedp;
2224 { 2578 enum mode_class mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
2225 TYPE_UNSIGNED (type) = 1; 2579 SET_TYPE_MODE (type, mode_for_size (precision, mclass, 0).require ());
2226 SET_TYPE_MODE (type, mode_for_size (precision, MODE_UACCUM, 0));
2227 }
2228 else
2229 SET_TYPE_MODE (type, mode_for_size (precision, MODE_ACCUM, 0));
2230 layout_type (type); 2580 layout_type (type);
2231 2581
2232 return type; 2582 return type;
2233 } 2583 }
2234 2584
2235 /* Initialize sizetype and bitsizetype to a reasonable and temporary 2585 /* Initialize sizetypes so layout_type can use them. */
2236 value to enable integer types to be created. */
2237 2586
2238 void 2587 void
2239 initialize_sizetypes (void) 2588 initialize_sizetypes (void)
2240 { 2589 {
2241 tree t = make_node (INTEGER_TYPE); 2590 int precision, bprecision;
2242 int precision = GET_MODE_BITSIZE (SImode); 2591
2243 2592 /* Get sizetypes precision from the SIZE_TYPE target macro. */
2244 SET_TYPE_MODE (t, SImode); 2593 if (strcmp (SIZETYPE, "unsigned int") == 0)
2245 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode); 2594 precision = INT_TYPE_SIZE;
2246 TYPE_IS_SIZETYPE (t) = 1; 2595 else if (strcmp (SIZETYPE, "long unsigned int") == 0)
2247 TYPE_UNSIGNED (t) = 1; 2596 precision = LONG_TYPE_SIZE;
2248 TYPE_SIZE (t) = build_int_cst (t, precision); 2597 else if (strcmp (SIZETYPE, "long long unsigned int") == 0)
2249 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode)); 2598 precision = LONG_LONG_TYPE_SIZE;
2250 TYPE_PRECISION (t) = precision; 2599 else if (strcmp (SIZETYPE, "short unsigned int") == 0)
2251 2600 precision = SHORT_TYPE_SIZE;
2252 set_min_and_max_values_for_integral_type (t, precision, true); 2601 else
2253 2602 {
2254 sizetype = t; 2603 int i;
2255 bitsizetype = build_distinct_type_copy (t); 2604
2256 } 2605 precision = -1;
2257 2606 for (i = 0; i < NUM_INT_N_ENTS; i++)
2258 /* Make sizetype a version of TYPE, and initialize *sizetype accordingly. 2607 if (int_n_enabled_p[i])
2259 We do this by overwriting the stub sizetype and bitsizetype nodes created 2608 {
2260 by initialize_sizetypes. This makes sure that (a) anything stubby about 2609 char name[50];
2261 them no longer exists and (b) any INTEGER_CSTs created with such a type, 2610 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
2262 remain valid. */ 2611
2263 2612 if (strcmp (name, SIZETYPE) == 0)
2264 void 2613 {
2265 set_sizetype (tree type) 2614 precision = int_n_data[i].bitsize;
2266 { 2615 }
2267 tree t, max; 2616 }
2268 int oprecision = TYPE_PRECISION (type); 2617 if (precision == -1)
2269 /* The *bitsizetype types use a precision that avoids overflows when 2618 gcc_unreachable ();
2270 calculating signed sizes / offsets in bits. However, when 2619 }
2271 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit 2620
2272 precision. */ 2621 bprecision
2273 int precision 2622 = MIN (precision + LOG2_BITS_PER_UNIT + 1, MAX_FIXED_MODE_SIZE);
2274 = MIN (oprecision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE); 2623 bprecision = GET_MODE_PRECISION (smallest_int_mode_for_size (bprecision));
2275 precision 2624 if (bprecision > HOST_BITS_PER_DOUBLE_INT)
2276 = GET_MODE_PRECISION (smallest_mode_for_size (precision, MODE_INT)); 2625 bprecision = HOST_BITS_PER_DOUBLE_INT;
2277 if (precision > HOST_BITS_PER_WIDE_INT * 2) 2626
2278 precision = HOST_BITS_PER_WIDE_INT * 2; 2627 /* Create stubs for sizetype and bitsizetype so we can create constants. */
2279 2628 sizetype = make_node (INTEGER_TYPE);
2280 /* sizetype must be an unsigned type. */ 2629 TYPE_NAME (sizetype) = get_identifier ("sizetype");
2281 gcc_assert (TYPE_UNSIGNED (type)); 2630 TYPE_PRECISION (sizetype) = precision;
2282 2631 TYPE_UNSIGNED (sizetype) = 1;
2283 t = build_distinct_type_copy (type); 2632 bitsizetype = make_node (INTEGER_TYPE);
2284 /* We want to use sizetype's cache, as we will be replacing that type. */ 2633 TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype");
2285 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype); 2634 TYPE_PRECISION (bitsizetype) = bprecision;
2286 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype); 2635 TYPE_UNSIGNED (bitsizetype) = 1;
2287 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type; 2636
2288 TYPE_UID (t) = TYPE_UID (sizetype); 2637 /* Now layout both types manually. */
2289 TYPE_IS_SIZETYPE (t) = 1; 2638 scalar_int_mode mode = smallest_int_mode_for_size (precision);
2290 2639 SET_TYPE_MODE (sizetype, mode);
2291 /* Replace our original stub sizetype. */ 2640 SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
2292 memcpy (sizetype, t, tree_size (sizetype)); 2641 TYPE_SIZE (sizetype) = bitsize_int (precision);
2293 TYPE_MAIN_VARIANT (sizetype) = sizetype; 2642 TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (mode));
2294 TYPE_CANONICAL (sizetype) = sizetype; 2643 set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
2295 2644
2296 /* sizetype is unsigned but we need to fix TYPE_MAX_VALUE so that it is 2645 mode = smallest_int_mode_for_size (bprecision);
2297 sign-extended in a way consistent with force_fit_type. */ 2646 SET_TYPE_MODE (bitsizetype, mode);
2298 max = TYPE_MAX_VALUE (sizetype); 2647 SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
2299 TYPE_MAX_VALUE (sizetype) 2648 TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
2300 = double_int_to_tree (sizetype, tree_to_double_int (max)); 2649 TYPE_SIZE_UNIT (bitsizetype) = size_int (GET_MODE_SIZE (mode));
2301 2650 set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED);
2302 t = make_node (INTEGER_TYPE);
2303 TYPE_NAME (t) = get_identifier ("bit_size_type");
2304 /* We want to use bitsizetype's cache, as we will be replacing that type. */
2305 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
2306 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
2307 TYPE_PRECISION (t) = precision;
2308 TYPE_UID (t) = TYPE_UID (bitsizetype);
2309 TYPE_IS_SIZETYPE (t) = 1;
2310
2311 /* Replace our original stub bitsizetype. */
2312 memcpy (bitsizetype, t, tree_size (bitsizetype));
2313 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype;
2314 TYPE_CANONICAL (bitsizetype) = bitsizetype;
2315
2316 fixup_unsigned_type (bitsizetype);
2317 2651
2318 /* Create the signed variants of *sizetype. */ 2652 /* Create the signed variants of *sizetype. */
2319 ssizetype = make_signed_type (oprecision); 2653 ssizetype = make_signed_type (TYPE_PRECISION (sizetype));
2320 TYPE_IS_SIZETYPE (ssizetype) = 1; 2654 TYPE_NAME (ssizetype) = get_identifier ("ssizetype");
2321 sbitsizetype = make_signed_type (precision); 2655 sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype));
2322 TYPE_IS_SIZETYPE (sbitsizetype) = 1; 2656 TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype");
2323 } 2657 }
2324 2658
2325 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE 2659 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2326 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE 2660 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2327 for TYPE, based on the PRECISION and whether or not the TYPE 2661 for TYPE, based on the PRECISION and whether or not the TYPE
2331 61. */ 2665 61. */
2332 2666
2333 void 2667 void
2334 set_min_and_max_values_for_integral_type (tree type, 2668 set_min_and_max_values_for_integral_type (tree type,
2335 int precision, 2669 int precision,
2336 bool is_unsigned) 2670 signop sgn)
2337 { 2671 {
2338 tree min_value; 2672 /* For bitfields with zero width we end up creating integer types
2339 tree max_value; 2673 with zero precision. Don't assign any minimum/maximum values
2340 2674 to those types, they don't have any valid value. */
2341 if (is_unsigned) 2675 if (precision < 1)
2342 { 2676 return;
2343 min_value = build_int_cst (type, 0); 2677
2344 max_value 2678 TYPE_MIN_VALUE (type)
2345 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0 2679 = wide_int_to_tree (type, wi::min_value (precision, sgn));
2346 ? -1 2680 TYPE_MAX_VALUE (type)
2347 : ((HOST_WIDE_INT) 1 << precision) - 1, 2681 = wide_int_to_tree (type, wi::max_value (precision, sgn));
2348 precision - HOST_BITS_PER_WIDE_INT > 0
2349 ? ((unsigned HOST_WIDE_INT) ~0
2350 >> (HOST_BITS_PER_WIDE_INT
2351 - (precision - HOST_BITS_PER_WIDE_INT)))
2352 : 0);
2353 }
2354 else
2355 {
2356 min_value
2357 = build_int_cst_wide (type,
2358 (precision - HOST_BITS_PER_WIDE_INT > 0
2359 ? 0
2360 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2361 (((HOST_WIDE_INT) (-1)
2362 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2363 ? precision - HOST_BITS_PER_WIDE_INT - 1
2364 : 0))));
2365 max_value
2366 = build_int_cst_wide (type,
2367 (precision - HOST_BITS_PER_WIDE_INT > 0
2368 ? -1
2369 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2370 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2371 ? (((HOST_WIDE_INT) 1
2372 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2373 : 0));
2374 }
2375
2376 TYPE_MIN_VALUE (type) = min_value;
2377 TYPE_MAX_VALUE (type) = max_value;
2378 } 2682 }
2379 2683
2380 /* Set the extreme values of TYPE based on its precision in bits, 2684 /* Set the extreme values of TYPE based on its precision in bits,
2381 then lay it out. Used when make_signed_type won't do 2685 then lay it out. Used when make_signed_type won't do
2382 because the tree code is not INTEGER_TYPE. 2686 because the tree code is not INTEGER_TYPE. */
2383 E.g. for Pascal, when the -fsigned-char option is given. */
2384 2687
2385 void 2688 void
2386 fixup_signed_type (tree type) 2689 fixup_signed_type (tree type)
2387 { 2690 {
2388 int precision = TYPE_PRECISION (type); 2691 int precision = TYPE_PRECISION (type);
2389 2692
2390 /* We can not represent properly constants greater then 2693 set_min_and_max_values_for_integral_type (type, precision, SIGNED);
2391 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2392 as they are used by i386 vector extensions and friends. */
2393 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2394 precision = HOST_BITS_PER_WIDE_INT * 2;
2395
2396 set_min_and_max_values_for_integral_type (type, precision,
2397 /*is_unsigned=*/false);
2398 2694
2399 /* Lay out the type: set its alignment, size, etc. */ 2695 /* Lay out the type: set its alignment, size, etc. */
2400 layout_type (type); 2696 layout_type (type);
2401 } 2697 }
2402 2698
2407 void 2703 void
2408 fixup_unsigned_type (tree type) 2704 fixup_unsigned_type (tree type)
2409 { 2705 {
2410 int precision = TYPE_PRECISION (type); 2706 int precision = TYPE_PRECISION (type);
2411 2707
2412 /* We can not represent properly constants greater then
2413 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2414 as they are used by i386 vector extensions and friends. */
2415 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2416 precision = HOST_BITS_PER_WIDE_INT * 2;
2417
2418 TYPE_UNSIGNED (type) = 1; 2708 TYPE_UNSIGNED (type) = 1;
2419 2709
2420 set_min_and_max_values_for_integral_type (type, precision, 2710 set_min_and_max_values_for_integral_type (type, precision, UNSIGNED);
2421 /*is_unsigned=*/true);
2422 2711
2423 /* Lay out the type: set its alignment, size, etc. */ 2712 /* Lay out the type: set its alignment, size, etc. */
2424 layout_type (type); 2713 layout_type (type);
2425 } 2714 }
2426 2715
2716 /* Construct an iterator for a bitfield that spans BITSIZE bits,
2717 starting at BITPOS.
2718
2719 BITREGION_START is the bit position of the first bit in this
2720 sequence of bit fields. BITREGION_END is the last bit in this
2721 sequence. If these two fields are non-zero, we should restrict the
2722 memory access to that range. Otherwise, we are allowed to touch
2723 any adjacent non bit-fields.
2724
2725 ALIGN is the alignment of the underlying object in bits.
2726 VOLATILEP says whether the bitfield is volatile. */
2727
2728 bit_field_mode_iterator
2729 ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
2730 HOST_WIDE_INT bitregion_start,
2731 HOST_WIDE_INT bitregion_end,
2732 unsigned int align, bool volatilep)
2733 : m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
2734 m_bitpos (bitpos), m_bitregion_start (bitregion_start),
2735 m_bitregion_end (bitregion_end), m_align (align),
2736 m_volatilep (volatilep), m_count (0)
2737 {
2738 if (!m_bitregion_end)
2739 {
2740 /* We can assume that any aligned chunk of ALIGN bits that overlaps
2741 the bitfield is mapped and won't trap, provided that ALIGN isn't
2742 too large. The cap is the biggest required alignment for data,
2743 or at least the word size. And force one such chunk at least. */
2744 unsigned HOST_WIDE_INT units
2745 = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
2746 if (bitsize <= 0)
2747 bitsize = 1;
2748 m_bitregion_end = bitpos + bitsize + units - 1;
2749 m_bitregion_end -= m_bitregion_end % units + 1;
2750 }
2751 }
2752
2753 /* Calls to this function return successively larger modes that can be used
2754 to represent the bitfield. Return true if another bitfield mode is
2755 available, storing it in *OUT_MODE if so. */
2756
2757 bool
2758 bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
2759 {
2760 scalar_int_mode mode;
2761 for (; m_mode.exists (&mode); m_mode = GET_MODE_WIDER_MODE (mode))
2762 {
2763 unsigned int unit = GET_MODE_BITSIZE (mode);
2764
2765 /* Skip modes that don't have full precision. */
2766 if (unit != GET_MODE_PRECISION (mode))
2767 continue;
2768
2769 /* Stop if the mode is too wide to handle efficiently. */
2770 if (unit > MAX_FIXED_MODE_SIZE)
2771 break;
2772
2773 /* Don't deliver more than one multiword mode; the smallest one
2774 should be used. */
2775 if (m_count > 0 && unit > BITS_PER_WORD)
2776 break;
2777
2778 /* Skip modes that are too small. */
2779 unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit;
2780 unsigned HOST_WIDE_INT subend = substart + m_bitsize;
2781 if (subend > unit)
2782 continue;
2783
2784 /* Stop if the mode goes outside the bitregion. */
2785 HOST_WIDE_INT start = m_bitpos - substart;
2786 if (m_bitregion_start && start < m_bitregion_start)
2787 break;
2788 HOST_WIDE_INT end = start + unit;
2789 if (end > m_bitregion_end + 1)
2790 break;
2791
2792 /* Stop if the mode requires too much alignment. */
2793 if (GET_MODE_ALIGNMENT (mode) > m_align
2794 && targetm.slow_unaligned_access (mode, m_align))
2795 break;
2796
2797 *out_mode = mode;
2798 m_mode = GET_MODE_WIDER_MODE (mode);
2799 m_count++;
2800 return true;
2801 }
2802 return false;
2803 }
2804
2805 /* Return true if smaller modes are generally preferred for this kind
2806 of bitfield. */
2807
2808 bool
2809 bit_field_mode_iterator::prefer_smaller_modes ()
2810 {
2811 return (m_volatilep
2812 ? targetm.narrow_volatile_bitfield ()
2813 : !SLOW_BYTE_ACCESS);
2814 }
2815
2427 /* Find the best machine mode to use when referencing a bit field of length 2816 /* Find the best machine mode to use when referencing a bit field of length
2428 BITSIZE bits starting at BITPOS. 2817 BITSIZE bits starting at BITPOS.
2429 2818
2819 BITREGION_START is the bit position of the first bit in this
2820 sequence of bit fields. BITREGION_END is the last bit in this
2821 sequence. If these two fields are non-zero, we should restrict the
2822 memory access to that range. Otherwise, we are allowed to touch
2823 any adjacent non bit-fields.
2824
2825 The chosen mode must have no more than LARGEST_MODE_BITSIZE bits.
2826 INT_MAX is a suitable value for LARGEST_MODE_BITSIZE if the caller
2827 doesn't want to apply a specific limit.
2828
2829 If no mode meets all these conditions, we return VOIDmode.
2830
2430 The underlying object is known to be aligned to a boundary of ALIGN bits. 2831 The underlying object is known to be aligned to a boundary of ALIGN bits.
2431 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2432 larger than LARGEST_MODE (usually SImode).
2433
2434 If no mode meets all these conditions, we return VOIDmode.
2435 2832
2436 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the 2833 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
2437 smallest mode meeting these conditions. 2834 smallest mode meeting these conditions.
2438 2835
2439 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the 2836 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
2441 all the conditions. 2838 all the conditions.
2442 2839
2443 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to 2840 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
2444 decide which of the above modes should be used. */ 2841 decide which of the above modes should be used. */
2445 2842
2446 enum machine_mode 2843 bool
2447 get_best_mode (int bitsize, int bitpos, unsigned int align, 2844 get_best_mode (int bitsize, int bitpos,
2448 enum machine_mode largest_mode, int volatilep) 2845 unsigned HOST_WIDE_INT bitregion_start,
2449 { 2846 unsigned HOST_WIDE_INT bitregion_end,
2450 enum machine_mode mode; 2847 unsigned int align,
2451 unsigned int unit = 0; 2848 unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
2452 2849 scalar_int_mode *best_mode)
2453 /* Find the narrowest integer mode that contains the bit field. */ 2850 {
2454 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; 2851 bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
2455 mode = GET_MODE_WIDER_MODE (mode)) 2852 bitregion_end, align, volatilep);
2456 { 2853 scalar_int_mode mode;
2457 unit = GET_MODE_BITSIZE (mode); 2854 bool found = false;
2458 if ((bitpos % unit) + bitsize <= unit) 2855 while (iter.next_mode (&mode)
2856 /* ??? For historical reasons, reject modes that would normally
2857 receive greater alignment, even if unaligned accesses are
2858 acceptable. This has both advantages and disadvantages.
2859 Removing this check means that something like:
2860
2861 struct s { unsigned int x; unsigned int y; };
2862 int f (struct s *s) { return s->x == 0 && s->y == 0; }
2863
2864 can be implemented using a single load and compare on
2865 64-bit machines that have no alignment restrictions.
2866 For example, on powerpc64-linux-gnu, we would generate:
2867
2868 ld 3,0(3)
2869 cntlzd 3,3
2870 srdi 3,3,6
2871 blr
2872
2873 rather than:
2874
2875 lwz 9,0(3)
2876 cmpwi 7,9,0
2877 bne 7,.L3
2878 lwz 3,4(3)
2879 cntlzw 3,3
2880 srwi 3,3,5
2881 extsw 3,3
2882 blr
2883 .p2align 4,,15
2884 .L3:
2885 li 3,0
2886 blr
2887
2888 However, accessing more than one field can make life harder
2889 for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c
2890 has a series of unsigned short copies followed by a series of
2891 unsigned short comparisons. With this check, both the copies
2892 and comparisons remain 16-bit accesses and FRE is able
2893 to eliminate the latter. Without the check, the comparisons
2894 can be done using 2 64-bit operations, which FRE isn't able
2895 to handle in the same way.
2896
2897 Either way, it would probably be worth disabling this check
2898 during expand. One particular example where removing the
2899 check would help is the get_best_mode call in store_bit_field.
2900 If we are given a memory bitregion of 128 bits that is aligned
2901 to a 64-bit boundary, and the bitfield we want to modify is
2902 in the second half of the bitregion, this check causes
2903 store_bitfield to turn the memory into a 64-bit reference
2904 to the _first_ half of the region. We later use
2905 adjust_bitfield_address to get a reference to the correct half,
2906 but doing so looks to adjust_bitfield_address as though we are
2907 moving past the end of the original object, so it drops the
2908 associated MEM_EXPR and MEM_OFFSET. Removing the check
2909 causes store_bit_field to keep a 128-bit memory reference,
2910 so that the final bitfield reference still has a MEM_EXPR
2911 and MEM_OFFSET. */
2912 && GET_MODE_ALIGNMENT (mode) <= align
2913 && GET_MODE_BITSIZE (mode) <= largest_mode_bitsize)
2914 {
2915 *best_mode = mode;
2916 found = true;
2917 if (iter.prefer_smaller_modes ())
2459 break; 2918 break;
2460 } 2919 }
2461 2920
2462 if (mode == VOIDmode 2921 return found;
2463 /* It is tempting to omit the following line
2464 if STRICT_ALIGNMENT is true.
2465 But that is incorrect, since if the bitfield uses part of 3 bytes
2466 and we use a 4-byte mode, we could get a spurious segv
2467 if the extra 4th byte is past the end of memory.
2468 (Though at least one Unix compiler ignores this problem:
2469 that on the Sequent 386 machine. */
2470 || MIN (unit, BIGGEST_ALIGNMENT) > align
2471 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2472 return VOIDmode;
2473
2474 if ((SLOW_BYTE_ACCESS && ! volatilep)
2475 || (volatilep && !targetm.narrow_volatile_bitfield ()))
2476 {
2477 enum machine_mode wide_mode = VOIDmode, tmode;
2478
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2480 tmode = GET_MODE_WIDER_MODE (tmode))
2481 {
2482 unit = GET_MODE_BITSIZE (tmode);
2483 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2484 && unit <= BITS_PER_WORD
2485 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2486 && (largest_mode == VOIDmode
2487 || unit <= GET_MODE_BITSIZE (largest_mode)))
2488 wide_mode = tmode;
2489 }
2490
2491 if (wide_mode != VOIDmode)
2492 return wide_mode;
2493 }
2494
2495 return mode;
2496 } 2922 }
2497 2923
2498 /* Gets minimal and maximal values for MODE (signed or unsigned depending on 2924 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2499 SIGN). The returned constants are made to be usable in TARGET_MODE. */ 2925 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2500 2926
2501 void 2927 void
2502 get_mode_bounds (enum machine_mode mode, int sign, 2928 get_mode_bounds (scalar_int_mode mode, int sign,
2503 enum machine_mode target_mode, 2929 scalar_int_mode target_mode,
2504 rtx *mmin, rtx *mmax) 2930 rtx *mmin, rtx *mmax)
2505 { 2931 {
2506 unsigned size = GET_MODE_BITSIZE (mode); 2932 unsigned size = GET_MODE_PRECISION (mode);
2507 unsigned HOST_WIDE_INT min_val, max_val; 2933 unsigned HOST_WIDE_INT min_val, max_val;
2508 2934
2509 gcc_assert (size <= HOST_BITS_PER_WIDE_INT); 2935 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2510 2936
2511 if (sign) 2937 /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */
2512 { 2938 if (mode == BImode)
2513 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1)); 2939 {
2514 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1; 2940 if (STORE_FLAG_VALUE < 0)
2941 {
2942 min_val = STORE_FLAG_VALUE;
2943 max_val = 0;
2944 }
2945 else
2946 {
2947 min_val = 0;
2948 max_val = STORE_FLAG_VALUE;
2949 }
2950 }
2951 else if (sign)
2952 {
2953 min_val = -(HOST_WIDE_INT_1U << (size - 1));
2954 max_val = (HOST_WIDE_INT_1U << (size - 1)) - 1;
2515 } 2955 }
2516 else 2956 else
2517 { 2957 {
2518 min_val = 0; 2958 min_val = 0;
2519 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1; 2959 max_val = (HOST_WIDE_INT_1U << (size - 1) << 1) - 1;
2520 } 2960 }
2521 2961
2522 *mmin = gen_int_mode (min_val, target_mode); 2962 *mmin = gen_int_mode (min_val, target_mode);
2523 *mmax = gen_int_mode (max_val, target_mode); 2963 *mmax = gen_int_mode (max_val, target_mode);
2524 } 2964 }