comparison gcc/config/sparc/sparc.c @ 47:3bfb6c00c1e0

update it from 4.4.2 to 4.4.3.
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Sun, 07 Feb 2010 17:44:34 +0900
parents a06113de4d67
children 77e2b8dfacca
comparison
equal deleted inserted replaced
46:b85a337e5837 47:3bfb6c00c1e0
410 static bool sparc_return_in_memory (const_tree, const_tree); 410 static bool sparc_return_in_memory (const_tree, const_tree);
411 static bool sparc_strict_argument_naming (CUMULATIVE_ARGS *); 411 static bool sparc_strict_argument_naming (CUMULATIVE_ARGS *);
412 static void sparc_va_start (tree, rtx); 412 static void sparc_va_start (tree, rtx);
413 static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *); 413 static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
414 static bool sparc_vector_mode_supported_p (enum machine_mode); 414 static bool sparc_vector_mode_supported_p (enum machine_mode);
415 static bool sparc_tls_referenced_p (rtx);
416 static rtx legitimize_tls_address (rtx);
417 static rtx legitimize_pic_address (rtx, rtx);
415 static bool sparc_pass_by_reference (CUMULATIVE_ARGS *, 418 static bool sparc_pass_by_reference (CUMULATIVE_ARGS *,
416 enum machine_mode, const_tree, bool); 419 enum machine_mode, const_tree, bool);
417 static int sparc_arg_partial_bytes (CUMULATIVE_ARGS *, 420 static int sparc_arg_partial_bytes (CUMULATIVE_ARGS *,
418 enum machine_mode, tree, bool); 421 enum machine_mode, tree, bool);
419 static void sparc_dwarf_handle_frame_unspec (const char *, rtx, int); 422 static void sparc_dwarf_handle_frame_unspec (const char *, rtx, int);
984 } 987 }
985 988
986 /* Fixup TLS cases. */ 989 /* Fixup TLS cases. */
987 if (TARGET_HAVE_TLS 990 if (TARGET_HAVE_TLS
988 && CONSTANT_P (operands[1]) 991 && CONSTANT_P (operands[1])
989 && GET_CODE (operands[1]) != HIGH
990 && sparc_tls_referenced_p (operands [1])) 992 && sparc_tls_referenced_p (operands [1]))
991 { 993 {
992 rtx sym = operands[1]; 994 operands[1] = legitimize_tls_address (operands[1]);
993 rtx addend = NULL; 995 return false;
994 996 }
995 if (GET_CODE (sym) == CONST && GET_CODE (XEXP (sym, 0)) == PLUS) 997
996 {
997 addend = XEXP (XEXP (sym, 0), 1);
998 sym = XEXP (XEXP (sym, 0), 0);
999 }
1000
1001 gcc_assert (SPARC_SYMBOL_REF_TLS_P (sym));
1002
1003 sym = legitimize_tls_address (sym);
1004 if (addend)
1005 {
1006 sym = gen_rtx_PLUS (mode, sym, addend);
1007 sym = force_operand (sym, operands[0]);
1008 }
1009 operands[1] = sym;
1010 }
1011
1012 /* Fixup PIC cases. */ 998 /* Fixup PIC cases. */
1013 if (flag_pic && CONSTANT_P (operands[1])) 999 if (flag_pic && CONSTANT_P (operands[1]))
1014 { 1000 {
1015 if (pic_address_needs_scratch (operands[1])) 1001 if (pic_address_needs_scratch (operands[1]))
1016 operands[1] = legitimize_pic_address (operands[1], mode, 0); 1002 operands[1] = legitimize_pic_address (operands[1], NULL_RTX);
1017 1003
1018 /* VxWorks does not impose a fixed gap between segments; the run-time 1004 /* VxWorks does not impose a fixed gap between segments; the run-time
1019 gap can be different from the object-file gap. We therefore can't 1005 gap can be different from the object-file gap. We therefore can't
1020 assume X - _GLOBAL_OFFSET_TABLE_ is a link-time constant unless we 1006 assume X - _GLOBAL_OFFSET_TABLE_ is a link-time constant unless we
1021 are absolutely sure that X is in the same segment as the GOT. 1007 are absolutely sure that X is in the same segment as the GOT.
1039 } 1025 }
1040 1026
1041 if (symbolic_operand (operands[1], mode)) 1027 if (symbolic_operand (operands[1], mode))
1042 { 1028 {
1043 operands[1] = legitimize_pic_address (operands[1], 1029 operands[1] = legitimize_pic_address (operands[1],
1044 mode, 1030 reload_in_progress
1045 (reload_in_progress ? 1031 ? operands[0] : NULL_RTX);
1046 operands[0] :
1047 NULL_RTX));
1048 return false; 1032 return false;
1049 } 1033 }
1050 } 1034 }
1051 1035
1052 /* If we are trying to toss an integer constant into FP registers, 1036 /* If we are trying to toss an integer constant into FP registers,
2863 satisfies CONSTANT_P. */ 2847 satisfies CONSTANT_P. */
2864 2848
2865 bool 2849 bool
2866 legitimate_constant_p (rtx x) 2850 legitimate_constant_p (rtx x)
2867 { 2851 {
2868 rtx inner;
2869
2870 switch (GET_CODE (x)) 2852 switch (GET_CODE (x))
2871 { 2853 {
2854 case CONST:
2872 case SYMBOL_REF: 2855 case SYMBOL_REF:
2873 /* TLS symbols are not constant. */ 2856 if (sparc_tls_referenced_p (x))
2874 if (SYMBOL_REF_TLS_MODEL (x))
2875 return false;
2876 break;
2877
2878 case CONST:
2879 inner = XEXP (x, 0);
2880
2881 /* Offsets of TLS symbols are never valid.
2882 Discourage CSE from creating them. */
2883 if (GET_CODE (inner) == PLUS
2884 && SPARC_SYMBOL_REF_TLS_P (XEXP (inner, 0)))
2885 return false; 2857 return false;
2886 break; 2858 break;
2887 2859
2888 case CONST_DOUBLE: 2860 case CONST_DOUBLE:
2889 if (GET_MODE (x) == VOIDmode) 2861 if (GET_MODE (x) == VOIDmode)
2946 bool 2918 bool
2947 legitimate_pic_operand_p (rtx x) 2919 legitimate_pic_operand_p (rtx x)
2948 { 2920 {
2949 if (pic_address_needs_scratch (x)) 2921 if (pic_address_needs_scratch (x))
2950 return false; 2922 return false;
2951 if (SPARC_SYMBOL_REF_TLS_P (x) 2923 if (sparc_tls_referenced_p (x))
2952 || (GET_CODE (x) == CONST
2953 && GET_CODE (XEXP (x, 0)) == PLUS
2954 && SPARC_SYMBOL_REF_TLS_P (XEXP (XEXP (x, 0), 0))))
2955 return false; 2924 return false;
2956 return true; 2925 return true;
2957 } 2926 }
2958 2927
2959 /* Return nonzero if ADDR is a valid memory address. 2928 /* Return nonzero if ADDR is a valid memory address.
2987 && rs1 == pic_offset_table_rtx 2956 && rs1 == pic_offset_table_rtx
2988 && !REG_P (rs2) 2957 && !REG_P (rs2)
2989 && GET_CODE (rs2) != SUBREG 2958 && GET_CODE (rs2) != SUBREG
2990 && GET_CODE (rs2) != LO_SUM 2959 && GET_CODE (rs2) != LO_SUM
2991 && GET_CODE (rs2) != MEM 2960 && GET_CODE (rs2) != MEM
2992 && ! SPARC_SYMBOL_REF_TLS_P (rs2) 2961 && !(GET_CODE (rs2) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (rs2))
2993 && (! symbolic_operand (rs2, VOIDmode) || mode == Pmode) 2962 && (! symbolic_operand (rs2, VOIDmode) || mode == Pmode)
2994 && (GET_CODE (rs2) != CONST_INT || SMALL_INT (rs2))) 2963 && (GET_CODE (rs2) != CONST_INT || SMALL_INT (rs2)))
2995 || ((REG_P (rs1) 2964 || ((REG_P (rs1)
2996 || GET_CODE (rs1) == SUBREG) 2965 || GET_CODE (rs1) == SUBREG)
2997 && RTX_OK_FOR_OFFSET_P (rs2))) 2966 && RTX_OK_FOR_OFFSET_P (rs2)))
3027 && RTX_OK_FOR_OLO10_P (rs2)) 2996 && RTX_OK_FOR_OLO10_P (rs2))
3028 { 2997 {
3029 rs2 = NULL; 2998 rs2 = NULL;
3030 imm1 = XEXP (rs1, 1); 2999 imm1 = XEXP (rs1, 1);
3031 rs1 = XEXP (rs1, 0); 3000 rs1 = XEXP (rs1, 0);
3032 if (! CONSTANT_P (imm1) || SPARC_SYMBOL_REF_TLS_P (rs1)) 3001 if (!CONSTANT_P (imm1)
3002 || (GET_CODE (rs1) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (rs1)))
3033 return 0; 3003 return 0;
3034 } 3004 }
3035 } 3005 }
3036 else if (GET_CODE (addr) == LO_SUM) 3006 else if (GET_CODE (addr) == LO_SUM)
3037 { 3007 {
3038 rs1 = XEXP (addr, 0); 3008 rs1 = XEXP (addr, 0);
3039 imm1 = XEXP (addr, 1); 3009 imm1 = XEXP (addr, 1);
3040 3010
3041 if (! CONSTANT_P (imm1) || SPARC_SYMBOL_REF_TLS_P (rs1)) 3011 if (!CONSTANT_P (imm1)
3012 || (GET_CODE (rs1) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (rs1)))
3042 return 0; 3013 return 0;
3043 3014
3044 /* We can't allow TFmode in 32-bit mode, because an offset greater 3015 /* We can't allow TFmode in 32-bit mode, because an offset greater
3045 than the alignment (8) may cause the LO_SUM to overflow. */ 3016 than the alignment (8) may cause the LO_SUM to overflow. */
3046 if (mode == TFmode && TARGET_ARCH32) 3017 if (mode == TFmode && TARGET_ARCH32)
3112 temp = gen_reg_rtx (Pmode); 3083 temp = gen_reg_rtx (Pmode);
3113 emit_move_insn (temp, global_offset_table); 3084 emit_move_insn (temp, global_offset_table);
3114 return temp; 3085 return temp;
3115 } 3086 }
3116 3087
3117 /* Return 1 if *X is a thread-local symbol. */ 3088 /* Return true if X contains a thread-local symbol. */
3118 3089
3119 static int 3090 static bool
3120 sparc_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3121 {
3122 return SPARC_SYMBOL_REF_TLS_P (*x);
3123 }
3124
3125 /* Return 1 if X contains a thread-local symbol. */
3126
3127 bool
3128 sparc_tls_referenced_p (rtx x) 3091 sparc_tls_referenced_p (rtx x)
3129 { 3092 {
3130 if (!TARGET_HAVE_TLS) 3093 if (!TARGET_HAVE_TLS)
3131 return false; 3094 return false;
3132 3095
3133 return for_each_rtx (&x, &sparc_tls_symbol_ref_1, 0); 3096 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
3097 x = XEXP (XEXP (x, 0), 0);
3098
3099 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
3100 return true;
3101
3102 /* That's all we handle in legitimize_tls_address for now. */
3103 return false;
3134 } 3104 }
3135 3105
3136 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute 3106 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3137 this (thread-local) address. */ 3107 this (thread-local) address. */
3138 3108
3139 rtx 3109 static rtx
3140 legitimize_tls_address (rtx addr) 3110 legitimize_tls_address (rtx addr)
3141 { 3111 {
3142 rtx temp1, temp2, temp3, ret, o0, got, insn; 3112 rtx temp1, temp2, temp3, ret, o0, got, insn;
3143 3113
3144 gcc_assert (can_create_pseudo_p ()); 3114 gcc_assert (can_create_pseudo_p ());
3258 3228
3259 default: 3229 default:
3260 gcc_unreachable (); 3230 gcc_unreachable ();
3261 } 3231 }
3262 3232
3233 else if (GET_CODE (addr) == CONST)
3234 {
3235 rtx base, offset;
3236
3237 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
3238
3239 base = legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
3240 offset = XEXP (XEXP (addr, 0), 1);
3241
3242 base = force_operand (base, NULL_RTX);
3243 if (!(GET_CODE (offset) == CONST_INT && SMALL_INT (offset)))
3244 offset = force_reg (Pmode, offset);
3245 ret = gen_rtx_PLUS (Pmode, base, offset);
3246 }
3247
3263 else 3248 else
3264 gcc_unreachable (); /* for now ... */ 3249 gcc_unreachable (); /* for now ... */
3265 3250
3266 return ret; 3251 return ret;
3267 } 3252 }
3268
3269 3253
3270 /* Legitimize PIC addresses. If the address is already position-independent, 3254 /* Legitimize PIC addresses. If the address is already position-independent,
3271 we return ORIG. Newly generated position-independent addresses go into a 3255 we return ORIG. Newly generated position-independent addresses go into a
3272 reg. This is REG if nonzero, otherwise we allocate register(s) as 3256 reg. This is REG if nonzero, otherwise we allocate register(s) as
3273 necessary. */ 3257 necessary. */
3274 3258
3275 rtx 3259 static rtx
3276 legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED, 3260 legitimize_pic_address (rtx orig, rtx reg)
3277 rtx reg)
3278 { 3261 {
3279 if (GET_CODE (orig) == SYMBOL_REF 3262 if (GET_CODE (orig) == SYMBOL_REF
3280 /* See the comment in sparc_expand_move. */ 3263 /* See the comment in sparc_expand_move. */
3281 || (TARGET_VXWORKS_RTP && GET_CODE (orig) == LABEL_REF)) 3264 || (TARGET_VXWORKS_RTP && GET_CODE (orig) == LABEL_REF))
3282 { 3265 {
3339 gcc_assert (! reload_in_progress && ! reload_completed); 3322 gcc_assert (! reload_in_progress && ! reload_completed);
3340 reg = gen_reg_rtx (Pmode); 3323 reg = gen_reg_rtx (Pmode);
3341 } 3324 }
3342 3325
3343 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS); 3326 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
3344 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg); 3327 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
3345 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode, 3328 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
3346 base == reg ? 0 : reg); 3329 base == reg ? NULL_RTX : reg);
3347 3330
3348 if (GET_CODE (offset) == CONST_INT) 3331 if (GET_CODE (offset) == CONST_INT)
3349 { 3332 {
3350 if (SMALL_INT (offset)) 3333 if (SMALL_INT (offset))
3351 return plus_constant (base, INTVAL (offset)); 3334 return plus_constant (base, INTVAL (offset));
3393 force_operand (XEXP (x, 1), NULL_RTX)); 3376 force_operand (XEXP (x, 1), NULL_RTX));
3394 3377
3395 if (x != orig_x && legitimate_address_p (mode, x, FALSE)) 3378 if (x != orig_x && legitimate_address_p (mode, x, FALSE))
3396 return x; 3379 return x;
3397 3380
3398 if (SPARC_SYMBOL_REF_TLS_P (x)) 3381 if (sparc_tls_referenced_p (x))
3399 x = legitimize_tls_address (x); 3382 x = legitimize_tls_address (x);
3400 else if (flag_pic) 3383 else if (flag_pic)
3401 x = legitimize_pic_address (x, mode, 0); 3384 x = legitimize_pic_address (x, NULL_RTX);
3402 else if (GET_CODE (x) == PLUS && CONSTANT_ADDRESS_P (XEXP (x, 1))) 3385 else if (GET_CODE (x) == PLUS && CONSTANT_ADDRESS_P (XEXP (x, 1)))
3403 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), 3386 x = gen_rtx_PLUS (Pmode, XEXP (x, 0),
3404 copy_to_mode_reg (Pmode, XEXP (x, 1))); 3387 copy_to_mode_reg (Pmode, XEXP (x, 1)));
3405 else if (GET_CODE (x) == PLUS && CONSTANT_ADDRESS_P (XEXP (x, 0))) 3388 else if (GET_CODE (x) == PLUS && CONSTANT_ADDRESS_P (XEXP (x, 0)))
3406 x = gen_rtx_PLUS (Pmode, XEXP (x, 1), 3389 x = gen_rtx_PLUS (Pmode, XEXP (x, 1),
3407 copy_to_mode_reg (Pmode, XEXP (x, 0))); 3390 copy_to_mode_reg (Pmode, XEXP (x, 0)));
3408 else if (GET_CODE (x) == SYMBOL_REF 3391 else if (GET_CODE (x) == SYMBOL_REF
3409 || GET_CODE (x) == CONST 3392 || GET_CODE (x) == CONST
3410 || GET_CODE (x) == LABEL_REF) 3393 || GET_CODE (x) == LABEL_REF)
3411 x = copy_to_suggested_reg (x, NULL_RTX, Pmode); 3394 x = copy_to_suggested_reg (x, NULL_RTX, Pmode);
3395
3412 return x; 3396 return x;
3413 } 3397 }
3414 3398
3415 /* Emit the special PIC helper function. */ 3399 /* Emit the special PIC helper function. */
3416 3400
8756 spill_reg2 = gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM); 8740 spill_reg2 = gen_rtx_REG (word_mode, PIC_OFFSET_TABLE_REGNUM);
8757 start_sequence (); 8741 start_sequence ();
8758 /* Delay emitting the PIC helper function because it needs to 8742 /* Delay emitting the PIC helper function because it needs to
8759 change the section and we are emitting assembly code. */ 8743 change the section and we are emitting assembly code. */
8760 load_pic_register (true); /* clobbers %o7 */ 8744 load_pic_register (true); /* clobbers %o7 */
8761 scratch = legitimize_pic_address (funexp, Pmode, scratch); 8745 scratch = legitimize_pic_address (funexp, scratch);
8762 seq = get_insns (); 8746 seq = get_insns ();
8763 end_sequence (); 8747 end_sequence ();
8764 emit_and_preserve (seq, spill_reg, spill_reg2); 8748 emit_and_preserve (seq, spill_reg, spill_reg2);
8765 } 8749 }
8766 else if (TARGET_ARCH32) 8750 else if (TARGET_ARCH32)