111
|
1 ;; ARM Thumb-1 Machine Description
|
|
2 ;; Copyright (C) 2007-2017 Free Software Foundation, Inc.
|
|
3 ;;
|
|
4 ;; This file is part of GCC.
|
|
5 ;;
|
|
6 ;; GCC is free software; you can redistribute it and/or modify it
|
|
7 ;; under the terms of the GNU General Public License as published by
|
|
8 ;; the Free Software Foundation; either version 3, or (at your option)
|
|
9 ;; any later version.
|
|
10 ;;
|
|
11 ;; GCC is distributed in the hope that it will be useful, but
|
|
12 ;; WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
14 ;; General Public License for more details.
|
|
15 ;;
|
|
16 ;; You should have received a copy of the GNU General Public License
|
|
17 ;; along with GCC; see the file COPYING3. If not see
|
|
18 ;; <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20
|
|
21 ;;---------------------------------------------------------------------------
|
|
22 ;; Insn patterns
|
|
23 ;;
|
|
24
|
|
25 ;; Beware of splitting Thumb1 patterns that output multiple
|
|
26 ;; assembly instructions, in particular instruction such as SBC and
|
|
27 ;; ADC which consume flags. For example, in the pattern thumb_subdi3
|
|
28 ;; below, the output SUB implicitly sets the flags (assembled to SUBS)
|
|
29 ;; and then the Carry flag is used by SBC to compute the correct
|
|
30 ;; result. If we split thumb_subdi3 pattern into two separate RTL
|
|
31 ;; insns (using define_insn_and_split), the scheduler might place
|
|
32 ;; other RTL insns between SUB and SBC, possibly modifying the Carry
|
|
33 ;; flag used by SBC. This might happen because most Thumb1 patterns
|
|
34 ;; for flag-setting instructions do not have explicit RTL for setting
|
|
35 ;; or clobbering the flags. Instead, they have the attribute "conds"
|
|
36 ;; with value "set" or "clob". However, this attribute is not used to
|
|
37 ;; identify dependencies and therefore the scheduler might reorder
|
|
38 ;; these instruction. Currenly, this problem cannot happen because
|
|
39 ;; there are no separate Thumb1 patterns for individual instruction
|
|
40 ;; that consume flags (except conditional execution, which is treated
|
|
41 ;; differently). In particular there is no Thumb1 armv6-m pattern for
|
|
42 ;; sbc or adc.
|
|
43
|
|
44
|
|
45
|
|
46 (define_insn "*thumb1_adddi3"
|
|
47 [(set (match_operand:DI 0 "register_operand" "=l")
|
|
48 (plus:DI (match_operand:DI 1 "register_operand" "%0")
|
|
49 (match_operand:DI 2 "register_operand" "l")))
|
|
50 (clobber (reg:CC CC_REGNUM))
|
|
51 ]
|
|
52 "TARGET_THUMB1"
|
|
53 "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
|
|
54 [(set_attr "length" "4")
|
|
55 (set_attr "type" "multiple")]
|
|
56 )
|
|
57
|
|
58 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
59 ;; atomic additions in sync.md and to the logic for bind_old_new in
|
|
60 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
|
|
61 ;; constraints here and aim to be as permissive.
|
|
62 (define_insn_and_split "*thumb1_addsi3"
|
|
63 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
|
|
64 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
|
|
65 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
|
|
66 "TARGET_THUMB1"
|
|
67 "*
|
|
68 static const char * const asms[] =
|
|
69 {
|
|
70 \"adds\\t%0, %0, %2\",
|
|
71 \"subs\\t%0, %0, #%n2\",
|
|
72 \"adds\\t%0, %1, %2\",
|
|
73 \"add\\t%0, %0, %2\",
|
|
74 \"add\\t%0, %0, %2\",
|
|
75 \"add\\t%0, %1, %2\",
|
|
76 \"add\\t%0, %1, %2\",
|
|
77 \"#\",
|
|
78 \"#\",
|
|
79 \"#\"
|
|
80 };
|
|
81 if ((which_alternative == 2 || which_alternative == 6)
|
|
82 && CONST_INT_P (operands[2])
|
|
83 && INTVAL (operands[2]) < 0)
|
|
84 return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
|
|
85 return asms[which_alternative];
|
|
86 "
|
|
87 "&& reload_completed && CONST_INT_P (operands[2])
|
|
88 && ((operands[1] != stack_pointer_rtx
|
|
89 && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
|
|
90 || (operands[1] == stack_pointer_rtx
|
|
91 && INTVAL (operands[2]) > 1020))"
|
|
92 [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
|
|
93 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
|
|
94 {
|
|
95 HOST_WIDE_INT offset = INTVAL (operands[2]);
|
|
96 if (operands[1] == stack_pointer_rtx)
|
|
97 offset -= 1020;
|
|
98 else
|
|
99 {
|
|
100 if (offset > 255)
|
|
101 offset = 255;
|
|
102 else if (offset < -255)
|
|
103 offset = -255;
|
|
104 }
|
|
105 operands[3] = GEN_INT (offset);
|
|
106 operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
|
|
107 }
|
|
108 [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
|
|
109 (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
|
|
110 alus_sreg,alus_sreg,multiple,multiple,multiple")]
|
|
111 )
|
|
112
|
|
113 ;; Reloading and elimination of the frame pointer can
|
|
114 ;; sometimes cause this optimization to be missed.
|
|
115 (define_peephole2
|
|
116 [(set (match_operand:SI 0 "arm_general_register_operand" "")
|
|
117 (match_operand:SI 1 "const_int_operand" ""))
|
|
118 (set (match_dup 0)
|
|
119 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
|
|
120 "TARGET_THUMB1
|
|
121 && UINTVAL (operands[1]) < 1024
|
|
122 && (UINTVAL (operands[1]) & 3) == 0"
|
|
123 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
|
|
124 ""
|
|
125 )
|
|
126
|
|
127 (define_insn "*thumb_subdi3"
|
|
128 [(set (match_operand:DI 0 "register_operand" "=l")
|
|
129 (minus:DI (match_operand:DI 1 "register_operand" "0")
|
|
130 (match_operand:DI 2 "register_operand" "l")))
|
|
131 (clobber (reg:CC CC_REGNUM))]
|
|
132 "TARGET_THUMB1"
|
|
133 "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
|
|
134 [(set_attr "length" "4")
|
|
135 (set_attr "type" "multiple")]
|
|
136 )
|
|
137
|
|
138 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
139 ;; atomic subtractions in sync.md and to the logic for bind_old_new in
|
|
140 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
|
|
141 ;; constraints here and aim to be as permissive.
|
|
142 (define_insn "thumb1_subsi3_insn"
|
|
143 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
144 (minus:SI (match_operand:SI 1 "register_operand" "l")
|
|
145 (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
|
|
146 "TARGET_THUMB1"
|
|
147 "subs\\t%0, %1, %2"
|
|
148 [(set_attr "length" "2")
|
|
149 (set_attr "conds" "set")
|
|
150 (set_attr "type" "alus_sreg")]
|
|
151 )
|
|
152
|
|
153 ;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
|
|
154 ;; 1 and 2 are the same, because reload will make operand 0 match
|
|
155 ;; operand 1 without realizing that this conflicts with operand 2. We fix
|
|
156 ;; this by adding another alternative to match this case, and then `reload'
|
|
157 ;; it ourselves. This alternative must come first.
|
|
158 (define_insn "*thumb_mulsi3"
|
|
159 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
|
|
160 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
|
|
161 (match_operand:SI 2 "register_operand" "l,l,l")))]
|
|
162 "TARGET_THUMB1 && !arm_arch6"
|
|
163 "@
|
|
164 movs\\t%0, %1\;muls\\t%0, %2
|
|
165 mov\\t%0, %1\;muls\\t%0, %2
|
|
166 muls\\t%0, %2"
|
|
167 [(set_attr "length" "4,4,2")
|
|
168 (set_attr "type" "muls")]
|
|
169 )
|
|
170
|
|
171 (define_insn "*thumb_mulsi3_v6"
|
|
172 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
|
|
173 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
|
|
174 (match_operand:SI 2 "register_operand" "l,0,0")))]
|
|
175 "TARGET_THUMB1 && arm_arch6"
|
|
176 "@
|
|
177 muls\\t%0, %2
|
|
178 muls\\t%0, %1
|
|
179 muls\\t%0, %1"
|
|
180 [(set_attr "length" "2")
|
|
181 (set_attr "type" "muls")]
|
|
182 )
|
|
183
|
|
184 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
185 ;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
|
|
186 ;; in arm_split_atomic_op in arm.c. These must be at least as strict as the
|
|
187 ;; constraints here and aim to be as permissive.
|
|
188 (define_insn "*thumb1_andsi3_insn"
|
|
189 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
190 (and:SI (match_operand:SI 1 "register_operand" "%0")
|
|
191 (match_operand:SI 2 "register_operand" "l")))]
|
|
192 "TARGET_THUMB1"
|
|
193 "ands\\t%0, %2"
|
|
194 [(set_attr "length" "2")
|
|
195 (set_attr "type" "logic_imm")
|
|
196 (set_attr "conds" "set")])
|
|
197
|
|
198 (define_split
|
|
199 [(set (match_operand:SI 0 "s_register_operand" "")
|
|
200 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
|
|
201 (match_operand:SI 2 "const_int_operand" "")
|
|
202 (match_operand:SI 3 "const_int_operand" "")))
|
|
203 (clobber (match_operand:SI 4 "s_register_operand" ""))]
|
|
204 "TARGET_THUMB1"
|
|
205 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
|
|
206 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
|
|
207 "{
|
|
208 HOST_WIDE_INT temp = INTVAL (operands[2]);
|
|
209
|
|
210 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
|
|
211 operands[3] = GEN_INT (32 - temp);
|
|
212 }"
|
|
213 )
|
|
214
|
|
215 (define_split
|
|
216 [(set (match_operand:SI 0 "s_register_operand" "")
|
|
217 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
|
|
218 (match_operand:SI 2 "const_int_operand" "")
|
|
219 (match_operand:SI 3 "const_int_operand" "")))]
|
|
220 "TARGET_THUMB1"
|
|
221 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
|
|
222 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
|
|
223 "{
|
|
224 HOST_WIDE_INT temp = INTVAL (operands[2]);
|
|
225
|
|
226 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
|
|
227 operands[3] = GEN_INT (32 - temp);
|
|
228 }"
|
|
229 )
|
|
230
|
|
231 (define_insn "thumb1_bicsi3"
|
|
232 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
233 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
|
|
234 (match_operand:SI 2 "register_operand" "0")))]
|
|
235 "TARGET_THUMB1"
|
|
236 "bics\\t%0, %1"
|
|
237 [(set_attr "length" "2")
|
|
238 (set_attr "conds" "set")
|
|
239 (set_attr "type" "logics_reg")]
|
|
240 )
|
|
241
|
|
242 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
243 ;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
|
|
244 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
|
|
245 ;; constraints here and aim to be as permissive.
|
|
246 (define_insn "*thumb1_iorsi3_insn"
|
|
247 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
248 (ior:SI (match_operand:SI 1 "register_operand" "%0")
|
|
249 (match_operand:SI 2 "register_operand" "l")))]
|
|
250 "TARGET_THUMB1"
|
|
251 "orrs\\t%0, %2"
|
|
252 [(set_attr "length" "2")
|
|
253 (set_attr "conds" "set")
|
|
254 (set_attr "type" "logics_reg")])
|
|
255
|
|
256 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
257 ;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
|
|
258 ;; arm_split_atomic_op in arm.c. These must be at least as strict as the
|
|
259 ;; constraints here and aim to be as permissive.
|
|
260 (define_insn "*thumb1_xorsi3_insn"
|
|
261 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
262 (xor:SI (match_operand:SI 1 "register_operand" "%0")
|
|
263 (match_operand:SI 2 "register_operand" "l")))]
|
|
264 "TARGET_THUMB1"
|
|
265 "eors\\t%0, %2"
|
|
266 [(set_attr "length" "2")
|
|
267 (set_attr "conds" "set")
|
|
268 (set_attr "type" "logics_reg")]
|
|
269 )
|
|
270
|
|
271 (define_insn "*thumb1_ashlsi3"
|
|
272 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
273 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
|
|
274 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
|
|
275 "TARGET_THUMB1"
|
|
276 "lsls\\t%0, %1, %2"
|
|
277 [(set_attr "length" "2")
|
|
278 (set_attr "type" "shift_imm,shift_reg")
|
|
279 (set_attr "conds" "set")])
|
|
280
|
|
281 (define_insn "*thumb1_ashrsi3"
|
|
282 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
283 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
|
|
284 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
|
|
285 "TARGET_THUMB1"
|
|
286 "asrs\\t%0, %1, %2"
|
|
287 [(set_attr "length" "2")
|
|
288 (set_attr "type" "shift_imm,shift_reg")
|
|
289 (set_attr "conds" "set")])
|
|
290
|
|
291 (define_insn "*thumb1_lshrsi3"
|
|
292 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
293 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
|
|
294 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
|
|
295 "TARGET_THUMB1"
|
|
296 "lsrs\\t%0, %1, %2"
|
|
297 [(set_attr "length" "2")
|
|
298 (set_attr "type" "shift_imm,shift_reg")
|
|
299 (set_attr "conds" "set")])
|
|
300
|
|
301 (define_insn "*thumb1_rotrsi3"
|
|
302 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
303 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
|
|
304 (match_operand:SI 2 "register_operand" "l")))]
|
|
305 "TARGET_THUMB1"
|
|
306 "rors\\t%0, %0, %2"
|
|
307 [(set_attr "type" "shift_reg")
|
|
308 (set_attr "length" "2")]
|
|
309 )
|
|
310
|
|
311 (define_insn "*thumb1_negdi2"
|
|
312 [(set (match_operand:DI 0 "register_operand" "=&l")
|
|
313 (neg:DI (match_operand:DI 1 "register_operand" "l")))
|
|
314 (clobber (reg:CC CC_REGNUM))]
|
|
315 "TARGET_THUMB1"
|
|
316 "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
|
|
317 [(set_attr "length" "6")
|
|
318 (set_attr "type" "multiple")]
|
|
319 )
|
|
320
|
|
321 (define_insn "*thumb1_negsi2"
|
|
322 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
323 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
|
|
324 "TARGET_THUMB1"
|
|
325 "rsbs\\t%0, %1, #0"
|
|
326 [(set_attr "length" "2")
|
|
327 (set_attr "type" "alu_imm")]
|
|
328 )
|
|
329
|
|
330 (define_insn_and_split "*thumb1_abssi2"
|
|
331 [(set (match_operand:SI 0 "s_register_operand" "=l")
|
|
332 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
|
|
333 (clobber (match_scratch:SI 2 "=&l"))]
|
|
334 "TARGET_THUMB1"
|
|
335 "#"
|
|
336 "TARGET_THUMB1 && reload_completed"
|
|
337 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
|
|
338 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
|
|
339 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
|
|
340 ""
|
|
341 [(set_attr "length" "6")
|
|
342 (set_attr "type" "multiple")]
|
|
343 )
|
|
344
|
|
345 (define_insn_and_split "*thumb1_neg_abssi2"
|
|
346 [(set (match_operand:SI 0 "s_register_operand" "=l")
|
|
347 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
|
|
348 (clobber (match_scratch:SI 2 "=&l"))]
|
|
349 "TARGET_THUMB1"
|
|
350 "#"
|
|
351 "TARGET_THUMB1 && reload_completed"
|
|
352 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
|
|
353 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
|
|
354 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
|
|
355 ""
|
|
356 [(set_attr "length" "6")
|
|
357 (set_attr "type" "multiple")]
|
|
358 )
|
|
359
|
|
360 (define_insn "*thumb1_one_cmplsi2"
|
|
361 [(set (match_operand:SI 0 "register_operand" "=l")
|
|
362 (not:SI (match_operand:SI 1 "register_operand" "l")))]
|
|
363 "TARGET_THUMB1"
|
|
364 "mvns\\t%0, %1"
|
|
365 [(set_attr "length" "2")
|
|
366 (set_attr "type" "mvn_reg")]
|
|
367 )
|
|
368
|
|
369 (define_insn "*thumb1_zero_extendhisi2"
|
|
370 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
371 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
|
|
372 "TARGET_THUMB1"
|
|
373 {
|
|
374 rtx mem;
|
|
375
|
|
376 if (which_alternative == 0 && arm_arch6)
|
|
377 return "uxth\t%0, %1";
|
|
378 if (which_alternative == 0)
|
|
379 return "#";
|
|
380
|
|
381 mem = XEXP (operands[1], 0);
|
|
382
|
|
383 if (GET_CODE (mem) == CONST)
|
|
384 mem = XEXP (mem, 0);
|
|
385
|
|
386 if (GET_CODE (mem) == PLUS)
|
|
387 {
|
|
388 rtx a = XEXP (mem, 0);
|
|
389
|
|
390 /* This can happen due to bugs in reload. */
|
|
391 if (REG_P (a) && REGNO (a) == SP_REGNUM)
|
|
392 {
|
|
393 rtx ops[2];
|
|
394 ops[0] = operands[0];
|
|
395 ops[1] = a;
|
|
396
|
|
397 output_asm_insn ("mov\t%0, %1", ops);
|
|
398
|
|
399 XEXP (mem, 0) = operands[0];
|
|
400 }
|
|
401 }
|
|
402
|
|
403 return "ldrh\t%0, %1";
|
|
404 }
|
|
405 [(set_attr_alternative "length"
|
|
406 [(if_then_else (eq_attr "is_arch6" "yes")
|
|
407 (const_int 2) (const_int 4))
|
|
408 (const_int 4)])
|
|
409 (set_attr "type" "extend,load_byte")]
|
|
410 )
|
|
411
|
|
412 (define_insn "*thumb1_zero_extendqisi2"
|
|
413 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
414 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
|
|
415 "TARGET_THUMB1 && !arm_arch6"
|
|
416 "@
|
|
417 #
|
|
418 ldrb\\t%0, %1"
|
|
419 [(set_attr "length" "4,2")
|
|
420 (set_attr "type" "alu_shift_reg,load_byte")
|
|
421 (set_attr "pool_range" "*,32")]
|
|
422 )
|
|
423
|
|
424 (define_insn "*thumb1_zero_extendqisi2_v6"
|
|
425 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
426 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
|
|
427 "TARGET_THUMB1 && arm_arch6"
|
|
428 "@
|
|
429 uxtb\\t%0, %1
|
|
430 ldrb\\t%0, %1"
|
|
431 [(set_attr "length" "2")
|
|
432 (set_attr "type" "extend,load_byte")]
|
|
433 )
|
|
434
|
|
435 ;; We used to have an early-clobber on the scratch register here.
|
|
436 ;; However, there's a bug somewhere in reload which means that this
|
|
437 ;; can be partially ignored during spill allocation if the memory
|
|
438 ;; address also needs reloading; this causes us to die later on when
|
|
439 ;; we try to verify the operands. Fortunately, we don't really need
|
|
440 ;; the early-clobber: we can always use operand 0 if operand 2
|
|
441 ;; overlaps the address.
|
|
442 (define_insn "thumb1_extendhisi2"
|
|
443 [(set (match_operand:SI 0 "register_operand" "=l,l")
|
|
444 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
|
|
445 (clobber (match_scratch:SI 2 "=X,l"))]
|
|
446 "TARGET_THUMB1"
|
|
447 "*
|
|
448 {
|
|
449 rtx ops[4];
|
|
450 rtx mem;
|
|
451
|
|
452 if (which_alternative == 0 && !arm_arch6)
|
|
453 return \"#\";
|
|
454 if (which_alternative == 0)
|
|
455 return \"sxth\\t%0, %1\";
|
|
456
|
|
457 mem = XEXP (operands[1], 0);
|
|
458
|
|
459 /* This code used to try to use 'V', and fix the address only if it was
|
|
460 offsettable, but this fails for e.g. REG+48 because 48 is outside the
|
|
461 range of QImode offsets, and offsettable_address_p does a QImode
|
|
462 address check. */
|
|
463
|
|
464 if (GET_CODE (mem) == CONST)
|
|
465 mem = XEXP (mem, 0);
|
|
466
|
|
467 if (GET_CODE (mem) == LABEL_REF)
|
|
468 return \"ldr\\t%0, %1\";
|
|
469
|
|
470 if (GET_CODE (mem) == PLUS)
|
|
471 {
|
|
472 rtx a = XEXP (mem, 0);
|
|
473 rtx b = XEXP (mem, 1);
|
|
474
|
|
475 if (GET_CODE (a) == LABEL_REF
|
|
476 && CONST_INT_P (b))
|
|
477 return \"ldr\\t%0, %1\";
|
|
478
|
|
479 if (REG_P (b))
|
|
480 return \"ldrsh\\t%0, %1\";
|
|
481
|
|
482 ops[1] = a;
|
|
483 ops[2] = b;
|
|
484 }
|
|
485 else
|
|
486 {
|
|
487 ops[1] = mem;
|
|
488 ops[2] = const0_rtx;
|
|
489 }
|
|
490
|
|
491 gcc_assert (REG_P (ops[1]));
|
|
492
|
|
493 ops[0] = operands[0];
|
|
494 if (reg_mentioned_p (operands[2], ops[1]))
|
|
495 ops[3] = ops[0];
|
|
496 else
|
|
497 ops[3] = operands[2];
|
|
498 output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
|
|
499 return \"\";
|
|
500 }"
|
|
501 [(set_attr_alternative "length"
|
|
502 [(if_then_else (eq_attr "is_arch6" "yes")
|
|
503 (const_int 2) (const_int 4))
|
|
504 (const_int 4)])
|
|
505 (set_attr "type" "extend,load_byte")
|
|
506 (set_attr "pool_range" "*,1018")]
|
|
507 )
|
|
508
|
|
509 (define_split
|
|
510 [(set (match_operand:SI 0 "register_operand" "")
|
|
511 (sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
|
|
512 "TARGET_THUMB1 && reload_completed"
|
|
513 [(set (match_dup 0) (match_dup 2))
|
|
514 (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
|
|
515 {
|
|
516 rtx addr = XEXP (operands[1], 0);
|
|
517
|
|
518 if (GET_CODE (addr) == CONST)
|
|
519 addr = XEXP (addr, 0);
|
|
520
|
|
521 if (GET_CODE (addr) == PLUS
|
|
522 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
|
|
523 /* No split necessary. */
|
|
524 FAIL;
|
|
525
|
|
526 if (GET_CODE (addr) == PLUS
|
|
527 && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
|
|
528 FAIL;
|
|
529
|
|
530 if (reg_overlap_mentioned_p (operands[0], addr))
|
|
531 {
|
|
532 rtx t = gen_lowpart (QImode, operands[0]);
|
|
533 emit_move_insn (t, operands[1]);
|
|
534 emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
|
|
535 DONE;
|
|
536 }
|
|
537
|
|
538 if (REG_P (addr))
|
|
539 {
|
|
540 addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
|
|
541 operands[2] = const0_rtx;
|
|
542 }
|
|
543 else if (GET_CODE (addr) != PLUS)
|
|
544 FAIL;
|
|
545 else if (REG_P (XEXP (addr, 0)))
|
|
546 {
|
|
547 operands[2] = XEXP (addr, 1);
|
|
548 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
|
|
549 }
|
|
550 else
|
|
551 {
|
|
552 operands[2] = XEXP (addr, 0);
|
|
553 addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
|
|
554 }
|
|
555
|
|
556 operands[3] = change_address (operands[1], QImode, addr);
|
|
557 })
|
|
558
|
|
559 (define_peephole2
|
|
560 [(set (match_operand:SI 0 "register_operand" "")
|
|
561 (plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
|
|
562 (set (match_operand:SI 2 "register_operand" "") (const_int 0))
|
|
563 (set (match_operand:SI 3 "register_operand" "")
|
|
564 (sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
|
|
565 "TARGET_THUMB1
|
|
566 && GET_CODE (XEXP (operands[4], 0)) == PLUS
|
|
567 && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
|
|
568 && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
|
|
569 && (peep2_reg_dead_p (3, operands[0])
|
|
570 || rtx_equal_p (operands[0], operands[3]))
|
|
571 && (peep2_reg_dead_p (3, operands[2])
|
|
572 || rtx_equal_p (operands[2], operands[3]))"
|
|
573 [(set (match_dup 2) (match_dup 1))
|
|
574 (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
|
|
575 {
|
|
576 rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
|
|
577 operands[4] = change_address (operands[4], QImode, addr);
|
|
578 })
|
|
579
|
|
580 (define_insn "thumb1_extendqisi2"
|
|
581 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
|
|
582 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
|
|
583 "TARGET_THUMB1"
|
|
584 {
|
|
585 rtx addr;
|
|
586
|
|
587 if (which_alternative == 0 && arm_arch6)
|
|
588 return "sxtb\\t%0, %1";
|
|
589 if (which_alternative == 0)
|
|
590 return "#";
|
|
591
|
|
592 addr = XEXP (operands[1], 0);
|
|
593 if (GET_CODE (addr) == PLUS
|
|
594 && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
|
|
595 return "ldrsb\\t%0, %1";
|
|
596
|
|
597 return "#";
|
|
598 }
|
|
599 [(set_attr_alternative "length"
|
|
600 [(if_then_else (eq_attr "is_arch6" "yes")
|
|
601 (const_int 2) (const_int 4))
|
|
602 (const_int 2)
|
|
603 (if_then_else (eq_attr "is_arch6" "yes")
|
|
604 (const_int 4) (const_int 6))])
|
|
605 (set_attr "type" "extend,load_byte,load_byte")]
|
|
606 )
|
|
607
|
|
608 ;;; ??? This should have alternatives for constants.
|
|
609 ;;; ??? This was originally identical to the movdf_insn pattern.
|
|
610 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
|
|
611 ;;; thumb_reorg with a memory reference.
|
|
612 (define_insn "*thumb1_movdi_insn"
|
|
613 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
|
|
614 (match_operand:DI 1 "general_operand" "l, I,J,j,>,l,mi,l,*r"))]
|
|
615 "TARGET_THUMB1
|
|
616 && ( register_operand (operands[0], DImode)
|
|
617 || register_operand (operands[1], DImode))"
|
|
618 "*
|
|
619 {
|
|
620 switch (which_alternative)
|
|
621 {
|
|
622 default:
|
|
623 case 0:
|
|
624 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
|
|
625 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
|
|
626 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
|
|
627 case 1:
|
|
628 return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
|
|
629 case 2:
|
|
630 operands[1] = GEN_INT (- INTVAL (operands[1]));
|
|
631 return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
|
|
632 case 3:
|
|
633 gcc_assert (TARGET_HAVE_MOVT);
|
|
634 return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
|
|
635 case 4:
|
|
636 return \"ldmia\\t%1, {%0, %H0}\";
|
|
637 case 5:
|
|
638 return \"stmia\\t%0, {%1, %H1}\";
|
|
639 case 6:
|
|
640 return thumb_load_double_from_address (operands);
|
|
641 case 7:
|
|
642 operands[2] = gen_rtx_MEM (SImode,
|
|
643 plus_constant (Pmode, XEXP (operands[0], 0), 4));
|
|
644 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
|
|
645 return \"\";
|
|
646 case 8:
|
|
647 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
|
|
648 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
|
|
649 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
|
|
650 }
|
|
651 }"
|
|
652 [(set_attr "length" "4,4,6,6,2,2,6,4,4")
|
|
653 (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
|
|
654 (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
|
|
655 (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
|
|
656 )
|
|
657
|
|
658 (define_insn "*thumb1_movsi_insn"
|
|
659 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, m,*l*h*k")
|
|
660 (match_operand:SI 1 "general_operand" "l, I,j,J,K,>,l,mi,l,*l*h*k"))]
|
|
661 "TARGET_THUMB1
|
|
662 && ( register_operand (operands[0], SImode)
|
|
663 || register_operand (operands[1], SImode))"
|
|
664 "@
|
|
665 movs %0, %1
|
|
666 movs %0, %1
|
|
667 movw %0, %1
|
|
668 #
|
|
669 #
|
|
670 ldmia\\t%1, {%0}
|
|
671 stmia\\t%0, {%1}
|
|
672 ldr\\t%0, %1
|
|
673 str\\t%1, %0
|
|
674 mov\\t%0, %1"
|
|
675 [(set_attr "length" "2,2,4,4,4,2,2,2,2,2")
|
|
676 (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,load_4,store_4,mov_reg")
|
|
677 (set_attr "pool_range" "*,*,*,*,*,*,*,1018,*,*")
|
|
678 (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1")
|
|
679 (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond")])
|
|
680
|
|
681 ; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
|
|
682 ; to see if we can load them in fewer instructions or fewer cycles.
|
|
683 ; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
|
|
684 ; thumb1_movdi_insn has a better way to handle them.
|
|
685 (define_split
|
|
686 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
|
|
687 (match_operand:ANY64 1 "immediate_operand" ""))]
|
|
688 "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
|
|
689 [(set (match_dup 0) (match_dup 1))
|
|
690 (set (match_dup 2) (match_dup 3))]
|
|
691 "
|
|
692 operands[2] = gen_highpart (SImode, operands[0]);
|
|
693 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
|
|
694 operands[1]);
|
|
695 operands[0] = gen_lowpart (SImode, operands[0]);
|
|
696 operands[1] = gen_lowpart (SImode, operands[1]);
|
|
697 "
|
|
698 )
|
|
699
|
|
700 (define_split
|
|
701 [(set (match_operand:SI 0 "register_operand" "")
|
|
702 (match_operand:SI 1 "const_int_operand" ""))]
|
|
703 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
|
|
704 [(set (match_dup 2) (match_dup 1))
|
|
705 (set (match_dup 0) (neg:SI (match_dup 2)))]
|
|
706 "
|
|
707 {
|
|
708 operands[1] = GEN_INT (- INTVAL (operands[1]));
|
|
709 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
|
|
710 }"
|
|
711 )
|
|
712
|
|
713 (define_split
|
|
714 [(set (match_operand:SI 0 "register_operand" "")
|
|
715 (match_operand:SI 1 "const_int_operand" ""))]
|
|
716 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
|
|
717 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
|
|
718 [(set (match_dup 2) (match_dup 1))
|
|
719 (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
|
|
720 "
|
|
721 {
|
|
722 unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
|
|
723 unsigned HOST_WIDE_INT mask = 0xff;
|
|
724 int i;
|
|
725
|
|
726 for (i = 0; i < 25; i++)
|
|
727 if ((val & (mask << i)) == val)
|
|
728 break;
|
|
729
|
|
730 /* Don't split if the shift is zero. */
|
|
731 if (i == 0)
|
|
732 FAIL;
|
|
733
|
|
734 operands[1] = GEN_INT (val >> i);
|
|
735 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
|
|
736 operands[3] = GEN_INT (i);
|
|
737 }"
|
|
738 )
|
|
739
|
|
740 ;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
|
|
741 (define_split
|
|
742 [(set (match_operand:SI 0 "register_operand" "")
|
|
743 (match_operand:SI 1 "const_int_operand" ""))]
|
|
744 "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
|
|
745 && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
|
|
746 [(set (match_dup 2) (match_dup 1))
|
|
747 (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
|
|
748 "
|
|
749 {
|
|
750 operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
|
|
751 operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
|
|
752 operands[3] = GEN_INT (255);
|
|
753 }"
|
|
754 )
|
|
755
|
|
756 (define_insn "*thumb1_movhi_insn"
|
|
757 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
|
|
758 (match_operand:HI 1 "general_operand" "l,m,l,k*h,*r,I,n"))]
|
|
759 "TARGET_THUMB1
|
|
760 && ( register_operand (operands[0], HImode)
|
|
761 || register_operand (operands[1], HImode))"
|
|
762 "*
|
|
763 switch (which_alternative)
|
|
764 {
|
|
765 case 0: return \"adds %0, %1, #0\";
|
|
766 case 2: return \"strh %1, %0\";
|
|
767 case 3: return \"mov %0, %1\";
|
|
768 case 4: return \"mov %0, %1\";
|
|
769 case 5: return \"movs %0, %1\";
|
|
770 case 6: gcc_assert (TARGET_HAVE_MOVT);
|
|
771 return \"movw %0, %L1\";
|
|
772 default: gcc_unreachable ();
|
|
773 case 1:
|
|
774 /* The stack pointer can end up being taken as an index register.
|
|
775 Catch this case here and deal with it. */
|
|
776 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
|
|
777 && REG_P (XEXP (XEXP (operands[1], 0), 0))
|
|
778 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
|
|
779 {
|
|
780 rtx ops[2];
|
|
781 ops[0] = operands[0];
|
|
782 ops[1] = XEXP (XEXP (operands[1], 0), 0);
|
|
783
|
|
784 output_asm_insn (\"mov %0, %1\", ops);
|
|
785
|
|
786 XEXP (XEXP (operands[1], 0), 0) = operands[0];
|
|
787
|
|
788 }
|
|
789 return \"ldrh %0, %1\";
|
|
790 }"
|
|
791 [(set_attr "length" "2,4,2,2,2,2,4")
|
|
792 (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
|
|
793 (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
|
|
794 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
|
|
795
|
|
796 (define_expand "thumb_movhi_clobber"
|
|
797 [(set (match_operand:HI 0 "memory_operand" "")
|
|
798 (match_operand:HI 1 "register_operand" ""))
|
|
799 (clobber (match_operand:DI 2 "register_operand" ""))]
|
|
800 "TARGET_THUMB1"
|
|
801 "
|
|
802 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
|
|
803 && REGNO (operands[1]) <= LAST_LO_REGNUM)
|
|
804 {
|
|
805 emit_insn (gen_movhi (operands[0], operands[1]));
|
|
806 DONE;
|
|
807 }
|
|
808 /* XXX Fixme, need to handle other cases here as well. */
|
|
809 gcc_unreachable ();
|
|
810 "
|
|
811 )
|
|
812
|
|
813 (define_insn "*thumb1_movqi_insn"
|
|
814 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
|
|
815 (match_operand:QI 1 "general_operand" "l,m,l,k*h,*r,I"))]
|
|
816 "TARGET_THUMB1
|
|
817 && ( register_operand (operands[0], QImode)
|
|
818 || register_operand (operands[1], QImode))"
|
|
819 "@
|
|
820 adds\\t%0, %1, #0
|
|
821 ldrb\\t%0, %1
|
|
822 strb\\t%1, %0
|
|
823 mov\\t%0, %1
|
|
824 mov\\t%0, %1
|
|
825 movs\\t%0, %1"
|
|
826 [(set_attr "length" "2")
|
|
827 (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
|
|
828 (set_attr "pool_range" "*,32,*,*,*,*")
|
|
829 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
|
|
830
|
|
831 (define_insn "*thumb1_movhf"
|
|
832 [(set (match_operand:HF 0 "nonimmediate_operand" "=l,l,m,*r,*h")
|
|
833 (match_operand:HF 1 "general_operand" "l,mF,l,*h,*r"))]
|
|
834 "TARGET_THUMB1
|
|
835 && ( s_register_operand (operands[0], HFmode)
|
|
836 || s_register_operand (operands[1], HFmode))"
|
|
837 "*
|
|
838 switch (which_alternative)
|
|
839 {
|
|
840 case 0:
|
|
841 return \"movs\\t%0, %1\";
|
|
842 case 1:
|
|
843 {
|
|
844 rtx addr;
|
|
845 gcc_assert (MEM_P (operands[1]));
|
|
846 addr = XEXP (operands[1], 0);
|
|
847 if (GET_CODE (addr) == LABEL_REF
|
|
848 || (GET_CODE (addr) == CONST
|
|
849 && GET_CODE (XEXP (addr, 0)) == PLUS
|
|
850 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
|
|
851 && CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
|
|
852 {
|
|
853 /* Constant pool entry. */
|
|
854 return \"ldr\\t%0, %1\";
|
|
855 }
|
|
856 return \"ldrh\\t%0, %1\";
|
|
857 }
|
|
858 case 2: return \"strh\\t%1, %0\";
|
|
859 default: return \"mov\\t%0, %1\";
|
|
860 }
|
|
861 "
|
|
862 [(set_attr "length" "2")
|
|
863 (set_attr "type" "mov_reg,load_4,store_4,mov_reg,mov_reg")
|
|
864 (set_attr "pool_range" "*,1018,*,*,*")
|
|
865 (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
|
|
866 ;;; ??? This should have alternatives for constants.
|
|
867 (define_insn "*thumb1_movsf_insn"
|
|
868 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
|
|
869 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
|
|
870 "TARGET_THUMB1
|
|
871 && ( register_operand (operands[0], SFmode)
|
|
872 || register_operand (operands[1], SFmode))"
|
|
873 "@
|
|
874 adds\\t%0, %1, #0
|
|
875 ldmia\\t%1, {%0}
|
|
876 stmia\\t%0, {%1}
|
|
877 ldr\\t%0, %1
|
|
878 str\\t%1, %0
|
|
879 mov\\t%0, %1
|
|
880 mov\\t%0, %1"
|
|
881 [(set_attr "length" "2")
|
|
882 (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
|
|
883 (set_attr "pool_range" "*,*,*,1018,*,*,*")
|
|
884 (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
|
|
885 )
|
|
886
|
|
887 ;;; ??? This should have alternatives for constants.
|
|
888 ;;; ??? This was originally identical to the movdi_insn pattern.
|
|
889 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
|
|
890 ;;; thumb_reorg with a memory reference.
|
|
891 (define_insn "*thumb_movdf_insn"
|
|
892 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
|
|
893 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
|
|
894 "TARGET_THUMB1
|
|
895 && ( register_operand (operands[0], DFmode)
|
|
896 || register_operand (operands[1], DFmode))"
|
|
897 "*
|
|
898 switch (which_alternative)
|
|
899 {
|
|
900 default:
|
|
901 case 0:
|
|
902 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
|
|
903 return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
|
|
904 return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
|
|
905 case 1:
|
|
906 return \"ldmia\\t%1, {%0, %H0}\";
|
|
907 case 2:
|
|
908 return \"stmia\\t%0, {%1, %H1}\";
|
|
909 case 3:
|
|
910 return thumb_load_double_from_address (operands);
|
|
911 case 4:
|
|
912 operands[2] = gen_rtx_MEM (SImode,
|
|
913 plus_constant (Pmode,
|
|
914 XEXP (operands[0], 0), 4));
|
|
915 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
|
|
916 return \"\";
|
|
917 case 5:
|
|
918 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
|
|
919 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
|
|
920 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
|
|
921 }
|
|
922 "
|
|
923 [(set_attr "length" "4,2,2,6,4,4")
|
|
924 (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
|
|
925 (set_attr "pool_range" "*,*,*,1018,*,*")]
|
|
926 )
|
|
927
|
|
928
|
|
929 ;; Thumb block-move insns
|
|
930
|
|
931 (define_insn "movmem12b"
|
|
932 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
|
|
933 (mem:SI (match_operand:SI 3 "register_operand" "1")))
|
|
934 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
|
|
935 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
|
|
936 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
|
|
937 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
|
|
938 (set (match_operand:SI 0 "register_operand" "=l")
|
|
939 (plus:SI (match_dup 2) (const_int 12)))
|
|
940 (set (match_operand:SI 1 "register_operand" "=l")
|
|
941 (plus:SI (match_dup 3) (const_int 12)))
|
|
942 (clobber (match_scratch:SI 4 "=&l"))
|
|
943 (clobber (match_scratch:SI 5 "=&l"))
|
|
944 (clobber (match_scratch:SI 6 "=&l"))]
|
|
945 "TARGET_THUMB1"
|
|
946 "* return thumb_output_move_mem_multiple (3, operands);"
|
|
947 [(set_attr "length" "4")
|
|
948 ; This isn't entirely accurate... It loads as well, but in terms of
|
|
949 ; scheduling the following insn it is better to consider it as a store
|
|
950 (set_attr "type" "store_12")]
|
|
951 )
|
|
952
|
|
953 (define_insn "movmem8b"
|
|
954 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
|
|
955 (mem:SI (match_operand:SI 3 "register_operand" "1")))
|
|
956 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
|
|
957 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
|
|
958 (set (match_operand:SI 0 "register_operand" "=l")
|
|
959 (plus:SI (match_dup 2) (const_int 8)))
|
|
960 (set (match_operand:SI 1 "register_operand" "=l")
|
|
961 (plus:SI (match_dup 3) (const_int 8)))
|
|
962 (clobber (match_scratch:SI 4 "=&l"))
|
|
963 (clobber (match_scratch:SI 5 "=&l"))]
|
|
964 "TARGET_THUMB1"
|
|
965 "* return thumb_output_move_mem_multiple (2, operands);"
|
|
966 [(set_attr "length" "4")
|
|
967 ; This isn't entirely accurate... It loads as well, but in terms of
|
|
968 ; scheduling the following insn it is better to consider it as a store
|
|
969 (set_attr "type" "store_8")]
|
|
970 )
|
|
971
|
|
972
|
|
973 ;; A pattern to recognize a special situation and optimize for it.
|
|
974 ;; On the thumb, zero-extension from memory is preferrable to sign-extension
|
|
975 ;; due to the available addressing modes. Hence, convert a signed comparison
|
|
976 ;; with zero into an unsigned comparison with 127 if possible.
|
|
977 (define_expand "cbranchqi4"
|
|
978 [(set (pc) (if_then_else
|
|
979 (match_operator 0 "lt_ge_comparison_operator"
|
|
980 [(match_operand:QI 1 "memory_operand" "")
|
|
981 (match_operand:QI 2 "const0_operand" "")])
|
|
982 (label_ref (match_operand 3 "" ""))
|
|
983 (pc)))]
|
|
984 "TARGET_THUMB1"
|
|
985 {
|
|
986 rtx xops[4];
|
|
987 xops[1] = gen_reg_rtx (SImode);
|
|
988 emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
|
|
989 xops[2] = GEN_INT (127);
|
|
990 xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
|
|
991 VOIDmode, xops[1], xops[2]);
|
|
992 xops[3] = operands[3];
|
|
993 emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
|
|
994 DONE;
|
|
995 })
|
|
996
|
|
997 ;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
|
|
998 ;; adapted from cbranchsi4_insn. Modifying cbranchsi4_insn instead leads to
|
|
999 ;; code generation difference for ARMv6-M because the minimum length of the
|
|
1000 ;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
|
|
1001 ;; handling of PC in the length condition.
|
|
1002 (define_insn "thumb1_cbz"
|
|
1003 [(set (pc) (if_then_else
|
|
1004 (match_operator 0 "equality_operator"
|
|
1005 [(match_operand:SI 1 "s_register_operand" "l")
|
|
1006 (const_int 0)])
|
|
1007 (label_ref (match_operand 2 "" ""))
|
|
1008 (pc)))]
|
|
1009 "TARGET_THUMB1 && TARGET_HAVE_CBZ"
|
|
1010 {
|
|
1011 if (get_attr_length (insn) == 2)
|
|
1012 {
|
|
1013 if (GET_CODE (operands[0]) == EQ)
|
|
1014 return "cbz\t%1, %l2";
|
|
1015 else
|
|
1016 return "cbnz\t%1, %l2";
|
|
1017 }
|
|
1018 else
|
|
1019 {
|
|
1020 rtx t = cfun->machine->thumb1_cc_insn;
|
|
1021 if (t != NULL_RTX)
|
|
1022 {
|
|
1023 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
|
|
1024 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
|
|
1025 t = NULL_RTX;
|
|
1026 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
|
|
1027 {
|
|
1028 if (!noov_comparison_operator (operands[0], VOIDmode))
|
|
1029 t = NULL_RTX;
|
|
1030 }
|
|
1031 else if (cfun->machine->thumb1_cc_mode != CCmode)
|
|
1032 t = NULL_RTX;
|
|
1033 }
|
|
1034 if (t == NULL_RTX)
|
|
1035 {
|
|
1036 output_asm_insn ("cmp\t%1, #0", operands);
|
|
1037 cfun->machine->thumb1_cc_insn = insn;
|
|
1038 cfun->machine->thumb1_cc_op0 = operands[1];
|
|
1039 cfun->machine->thumb1_cc_op1 = operands[2];
|
|
1040 cfun->machine->thumb1_cc_mode = CCmode;
|
|
1041 }
|
|
1042 else
|
|
1043 /* Ensure we emit the right type of condition code on the jump. */
|
|
1044 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
|
|
1045 CC_REGNUM);
|
|
1046
|
|
1047 switch (get_attr_length (insn))
|
|
1048 {
|
|
1049 case 4: return "b%d0\t%l2";
|
|
1050 case 6: return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
|
|
1051 case 8: return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
|
|
1052 default: gcc_unreachable ();
|
|
1053 }
|
|
1054 }
|
|
1055 }
|
|
1056 [(set (attr "far_jump")
|
|
1057 (if_then_else
|
|
1058 (eq_attr "length" "8")
|
|
1059 (const_string "yes")
|
|
1060 (const_string "no")))
|
|
1061 (set (attr "length")
|
|
1062 (if_then_else
|
|
1063 (and (ge (minus (match_dup 2) (pc)) (const_int 2))
|
|
1064 (le (minus (match_dup 2) (pc)) (const_int 128)))
|
|
1065 (const_int 2)
|
|
1066 (if_then_else
|
|
1067 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
|
|
1068 (le (minus (match_dup 2) (pc)) (const_int 256)))
|
|
1069 (const_int 4)
|
|
1070 (if_then_else
|
|
1071 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
|
|
1072 (le (minus (match_dup 2) (pc)) (const_int 2048)))
|
|
1073 (const_int 6)
|
|
1074 (const_int 8)))))
|
|
1075 (set (attr "type")
|
|
1076 (if_then_else
|
|
1077 (eq_attr "length" "2")
|
|
1078 (const_string "branch")
|
|
1079 (const_string "multiple")))]
|
|
1080 )
|
|
1081
|
|
1082 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
1083 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
|
|
1084 ;; strict as the constraints here and aim to be as permissive.
|
|
1085 (define_insn "cbranchsi4_insn"
|
|
1086 [(set (pc) (if_then_else
|
|
1087 (match_operator 0 "arm_comparison_operator"
|
|
1088 [(match_operand:SI 1 "s_register_operand" "l,l*h")
|
|
1089 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
|
|
1090 (label_ref (match_operand 3 "" ""))
|
|
1091 (pc)))]
|
|
1092 "TARGET_THUMB1"
|
|
1093 {
|
|
1094 rtx t = cfun->machine->thumb1_cc_insn;
|
|
1095 if (t != NULL_RTX)
|
|
1096 {
|
|
1097 if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
|
|
1098 || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
|
|
1099 t = NULL_RTX;
|
|
1100 if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
|
|
1101 {
|
|
1102 if (!noov_comparison_operator (operands[0], VOIDmode))
|
|
1103 t = NULL_RTX;
|
|
1104 }
|
|
1105 else if (cfun->machine->thumb1_cc_mode != CCmode)
|
|
1106 t = NULL_RTX;
|
|
1107 }
|
|
1108 if (t == NULL_RTX)
|
|
1109 {
|
|
1110 output_asm_insn ("cmp\t%1, %2", operands);
|
|
1111 cfun->machine->thumb1_cc_insn = insn;
|
|
1112 cfun->machine->thumb1_cc_op0 = operands[1];
|
|
1113 cfun->machine->thumb1_cc_op1 = operands[2];
|
|
1114 cfun->machine->thumb1_cc_mode = CCmode;
|
|
1115 }
|
|
1116 else
|
|
1117 /* Ensure we emit the right type of condition code on the jump. */
|
|
1118 XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
|
|
1119 CC_REGNUM);
|
|
1120
|
|
1121 switch (get_attr_length (insn))
|
|
1122 {
|
|
1123 case 4: return \"b%d0\\t%l3\";
|
|
1124 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
|
|
1125 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
|
|
1126 }
|
|
1127 }
|
|
1128 [(set (attr "far_jump")
|
|
1129 (if_then_else
|
|
1130 (eq_attr "length" "8")
|
|
1131 (const_string "yes")
|
|
1132 (const_string "no")))
|
|
1133 (set (attr "length")
|
|
1134 (if_then_else
|
|
1135 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
|
|
1136 (le (minus (match_dup 3) (pc)) (const_int 256)))
|
|
1137 (const_int 4)
|
|
1138 (if_then_else
|
|
1139 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
|
|
1140 (le (minus (match_dup 3) (pc)) (const_int 2048)))
|
|
1141 (const_int 6)
|
|
1142 (const_int 8))))
|
|
1143 (set_attr "type" "multiple")]
|
|
1144 )
|
|
1145
|
|
1146 ;; Changes to the constraints of this pattern must be propagated to those of
|
|
1147 ;; atomic compare_and_swap splitters in sync.md. These must be at least as
|
|
1148 ;; strict as the constraints here and aim to be as permissive.
|
|
1149 (define_insn "cbranchsi4_scratch"
|
|
1150 [(set (pc) (if_then_else
|
|
1151 (match_operator 4 "arm_comparison_operator"
|
|
1152 [(match_operand:SI 1 "s_register_operand" "l,0")
|
|
1153 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
|
|
1154 (label_ref (match_operand 3 "" ""))
|
|
1155 (pc)))
|
|
1156 (clobber (match_scratch:SI 0 "=l,l"))]
|
|
1157 "TARGET_THUMB1"
|
|
1158 "*
|
|
1159 output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
|
|
1160
|
|
1161 switch (get_attr_length (insn))
|
|
1162 {
|
|
1163 case 4: return \"b%d4\\t%l3\";
|
|
1164 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
|
|
1165 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
|
|
1166 }
|
|
1167 "
|
|
1168 [(set (attr "far_jump")
|
|
1169 (if_then_else
|
|
1170 (eq_attr "length" "8")
|
|
1171 (const_string "yes")
|
|
1172 (const_string "no")))
|
|
1173 (set (attr "length")
|
|
1174 (if_then_else
|
|
1175 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
|
|
1176 (le (minus (match_dup 3) (pc)) (const_int 256)))
|
|
1177 (const_int 4)
|
|
1178 (if_then_else
|
|
1179 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
|
|
1180 (le (minus (match_dup 3) (pc)) (const_int 2048)))
|
|
1181 (const_int 6)
|
|
1182 (const_int 8))))
|
|
1183 (set_attr "type" "multiple")]
|
|
1184 )
|
|
1185
|
|
1186 (define_insn "*negated_cbranchsi4"
|
|
1187 [(set (pc)
|
|
1188 (if_then_else
|
|
1189 (match_operator 0 "equality_operator"
|
|
1190 [(match_operand:SI 1 "s_register_operand" "l")
|
|
1191 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
|
|
1192 (label_ref (match_operand 3 "" ""))
|
|
1193 (pc)))]
|
|
1194 "TARGET_THUMB1"
|
|
1195 "*
|
|
1196 output_asm_insn (\"cmn\\t%1, %2\", operands);
|
|
1197 switch (get_attr_length (insn))
|
|
1198 {
|
|
1199 case 4: return \"b%d0\\t%l3\";
|
|
1200 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
|
|
1201 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
|
|
1202 }
|
|
1203 "
|
|
1204 [(set (attr "far_jump")
|
|
1205 (if_then_else
|
|
1206 (eq_attr "length" "8")
|
|
1207 (const_string "yes")
|
|
1208 (const_string "no")))
|
|
1209 (set (attr "length")
|
|
1210 (if_then_else
|
|
1211 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
|
|
1212 (le (minus (match_dup 3) (pc)) (const_int 256)))
|
|
1213 (const_int 4)
|
|
1214 (if_then_else
|
|
1215 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
|
|
1216 (le (minus (match_dup 3) (pc)) (const_int 2048)))
|
|
1217 (const_int 6)
|
|
1218 (const_int 8))))
|
|
1219 (set_attr "type" "multiple")]
|
|
1220 )
|
|
1221
|
|
1222 (define_insn "*tbit_cbranch"
|
|
1223 [(set (pc)
|
|
1224 (if_then_else
|
|
1225 (match_operator 0 "equality_operator"
|
|
1226 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
|
|
1227 (const_int 1)
|
|
1228 (match_operand:SI 2 "const_int_operand" "i"))
|
|
1229 (const_int 0)])
|
|
1230 (label_ref (match_operand 3 "" ""))
|
|
1231 (pc)))
|
|
1232 (clobber (match_scratch:SI 4 "=l"))]
|
|
1233 "TARGET_THUMB1"
|
|
1234 "*
|
|
1235 {
|
|
1236 rtx op[3];
|
|
1237 op[0] = operands[4];
|
|
1238 op[1] = operands[1];
|
|
1239 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
|
|
1240
|
|
1241 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
|
|
1242 switch (get_attr_length (insn))
|
|
1243 {
|
|
1244 case 4: return \"b%d0\\t%l3\";
|
|
1245 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
|
|
1246 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
|
|
1247 }
|
|
1248 }"
|
|
1249 [(set (attr "far_jump")
|
|
1250 (if_then_else
|
|
1251 (eq_attr "length" "8")
|
|
1252 (const_string "yes")
|
|
1253 (const_string "no")))
|
|
1254 (set (attr "length")
|
|
1255 (if_then_else
|
|
1256 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
|
|
1257 (le (minus (match_dup 3) (pc)) (const_int 256)))
|
|
1258 (const_int 4)
|
|
1259 (if_then_else
|
|
1260 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
|
|
1261 (le (minus (match_dup 3) (pc)) (const_int 2048)))
|
|
1262 (const_int 6)
|
|
1263 (const_int 8))))
|
|
1264 (set_attr "type" "multiple")]
|
|
1265 )
|
|
1266
|
|
1267 (define_insn "*tlobits_cbranch"
|
|
1268 [(set (pc)
|
|
1269 (if_then_else
|
|
1270 (match_operator 0 "equality_operator"
|
|
1271 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
|
|
1272 (match_operand:SI 2 "const_int_operand" "i")
|
|
1273 (const_int 0))
|
|
1274 (const_int 0)])
|
|
1275 (label_ref (match_operand 3 "" ""))
|
|
1276 (pc)))
|
|
1277 (clobber (match_scratch:SI 4 "=l"))]
|
|
1278 "TARGET_THUMB1"
|
|
1279 "*
|
|
1280 {
|
|
1281 rtx op[3];
|
|
1282 op[0] = operands[4];
|
|
1283 op[1] = operands[1];
|
|
1284 op[2] = GEN_INT (32 - INTVAL (operands[2]));
|
|
1285
|
|
1286 output_asm_insn (\"lsls\\t%0, %1, %2\", op);
|
|
1287 switch (get_attr_length (insn))
|
|
1288 {
|
|
1289 case 4: return \"b%d0\\t%l3\";
|
|
1290 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
|
|
1291 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
|
|
1292 }
|
|
1293 }"
|
|
1294 [(set (attr "far_jump")
|
|
1295 (if_then_else
|
|
1296 (eq_attr "length" "8")
|
|
1297 (const_string "yes")
|
|
1298 (const_string "no")))
|
|
1299 (set (attr "length")
|
|
1300 (if_then_else
|
|
1301 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
|
|
1302 (le (minus (match_dup 3) (pc)) (const_int 256)))
|
|
1303 (const_int 4)
|
|
1304 (if_then_else
|
|
1305 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
|
|
1306 (le (minus (match_dup 3) (pc)) (const_int 2048)))
|
|
1307 (const_int 6)
|
|
1308 (const_int 8))))
|
|
1309 (set_attr "type" "multiple")]
|
|
1310 )
|
|
1311
|
|
1312 (define_insn "*tstsi3_cbranch"
|
|
1313 [(set (pc)
|
|
1314 (if_then_else
|
|
1315 (match_operator 3 "equality_operator"
|
|
1316 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
|
|
1317 (match_operand:SI 1 "s_register_operand" "l"))
|
|
1318 (const_int 0)])
|
|
1319 (label_ref (match_operand 2 "" ""))
|
|
1320 (pc)))]
|
|
1321 "TARGET_THUMB1"
|
|
1322 "*
|
|
1323 {
|
|
1324 output_asm_insn (\"tst\\t%0, %1\", operands);
|
|
1325 switch (get_attr_length (insn))
|
|
1326 {
|
|
1327 case 4: return \"b%d3\\t%l2\";
|
|
1328 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
|
|
1329 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
|
|
1330 }
|
|
1331 }"
|
|
1332 [(set (attr "far_jump")
|
|
1333 (if_then_else
|
|
1334 (eq_attr "length" "8")
|
|
1335 (const_string "yes")
|
|
1336 (const_string "no")))
|
|
1337 (set (attr "length")
|
|
1338 (if_then_else
|
|
1339 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
|
|
1340 (le (minus (match_dup 2) (pc)) (const_int 256)))
|
|
1341 (const_int 4)
|
|
1342 (if_then_else
|
|
1343 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
|
|
1344 (le (minus (match_dup 2) (pc)) (const_int 2048)))
|
|
1345 (const_int 6)
|
|
1346 (const_int 8))))
|
|
1347 (set_attr "type" "multiple")]
|
|
1348 )
|
|
1349
|
|
1350 (define_insn "*cbranchne_decr1"
|
|
1351 [(set (pc)
|
|
1352 (if_then_else (match_operator 3 "equality_operator"
|
|
1353 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
|
|
1354 (const_int 0)])
|
|
1355 (label_ref (match_operand 4 "" ""))
|
|
1356 (pc)))
|
|
1357 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
|
|
1358 (plus:SI (match_dup 2) (const_int -1)))
|
|
1359 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
|
|
1360 "TARGET_THUMB1"
|
|
1361 "*
|
|
1362 {
|
|
1363 rtx cond[2];
|
|
1364 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
|
|
1365 ? GEU : LTU),
|
|
1366 VOIDmode, operands[2], const1_rtx);
|
|
1367 cond[1] = operands[4];
|
|
1368
|
|
1369 if (which_alternative == 0)
|
|
1370 output_asm_insn (\"subs\\t%0, %2, #1\", operands);
|
|
1371 else if (which_alternative == 1)
|
|
1372 {
|
|
1373 /* We must provide an alternative for a hi reg because reload
|
|
1374 cannot handle output reloads on a jump instruction, but we
|
|
1375 can't subtract into that. Fortunately a mov from lo to hi
|
|
1376 does not clobber the condition codes. */
|
|
1377 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
|
|
1378 output_asm_insn (\"mov\\t%0, %1\", operands);
|
|
1379 }
|
|
1380 else
|
|
1381 {
|
|
1382 /* Similarly, but the target is memory. */
|
|
1383 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
|
|
1384 output_asm_insn (\"str\\t%1, %0\", operands);
|
|
1385 }
|
|
1386
|
|
1387 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
|
|
1388 {
|
|
1389 case 4:
|
|
1390 output_asm_insn (\"b%d0\\t%l1\", cond);
|
|
1391 return \"\";
|
|
1392 case 6:
|
|
1393 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
|
|
1394 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
|
|
1395 default:
|
|
1396 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
|
|
1397 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
|
|
1398 }
|
|
1399 }
|
|
1400 "
|
|
1401 [(set (attr "far_jump")
|
|
1402 (if_then_else
|
|
1403 (ior (and (eq (symbol_ref ("which_alternative"))
|
|
1404 (const_int 0))
|
|
1405 (eq_attr "length" "8"))
|
|
1406 (eq_attr "length" "10"))
|
|
1407 (const_string "yes")
|
|
1408 (const_string "no")))
|
|
1409 (set_attr_alternative "length"
|
|
1410 [
|
|
1411 ;; Alternative 0
|
|
1412 (if_then_else
|
|
1413 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
|
|
1414 (le (minus (match_dup 4) (pc)) (const_int 256)))
|
|
1415 (const_int 4)
|
|
1416 (if_then_else
|
|
1417 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
|
|
1418 (le (minus (match_dup 4) (pc)) (const_int 2048)))
|
|
1419 (const_int 6)
|
|
1420 (const_int 8)))
|
|
1421 ;; Alternative 1
|
|
1422 (if_then_else
|
|
1423 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
|
|
1424 (le (minus (match_dup 4) (pc)) (const_int 256)))
|
|
1425 (const_int 6)
|
|
1426 (if_then_else
|
|
1427 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
|
|
1428 (le (minus (match_dup 4) (pc)) (const_int 2048)))
|
|
1429 (const_int 8)
|
|
1430 (const_int 10)))
|
|
1431 ;; Alternative 2
|
|
1432 (if_then_else
|
|
1433 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
|
|
1434 (le (minus (match_dup 4) (pc)) (const_int 256)))
|
|
1435 (const_int 6)
|
|
1436 (if_then_else
|
|
1437 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
|
|
1438 (le (minus (match_dup 4) (pc)) (const_int 2048)))
|
|
1439 (const_int 8)
|
|
1440 (const_int 10)))
|
|
1441 ;; Alternative 3
|
|
1442 (if_then_else
|
|
1443 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
|
|
1444 (le (minus (match_dup 4) (pc)) (const_int 256)))
|
|
1445 (const_int 6)
|
|
1446 (if_then_else
|
|
1447 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
|
|
1448 (le (minus (match_dup 4) (pc)) (const_int 2048)))
|
|
1449 (const_int 8)
|
|
1450 (const_int 10)))])
|
|
1451 (set_attr "type" "multiple")]
|
|
1452 )
|
|
1453
|
|
1454 (define_insn "*addsi3_cbranch"
|
|
1455 [(set (pc)
|
|
1456 (if_then_else
|
|
1457 (match_operator 4 "arm_comparison_operator"
|
|
1458 [(plus:SI
|
|
1459 (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
|
|
1460 (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
|
|
1461 (const_int 0)])
|
|
1462 (label_ref (match_operand 5 "" ""))
|
|
1463 (pc)))
|
|
1464 (set
|
|
1465 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
|
|
1466 (plus:SI (match_dup 2) (match_dup 3)))
|
|
1467 (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
|
|
1468 "TARGET_THUMB1
|
|
1469 && (GET_CODE (operands[4]) == EQ
|
|
1470 || GET_CODE (operands[4]) == NE
|
|
1471 || GET_CODE (operands[4]) == GE
|
|
1472 || GET_CODE (operands[4]) == LT)"
|
|
1473 "*
|
|
1474 {
|
|
1475 rtx cond[3];
|
|
1476
|
|
1477 cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
|
|
1478 cond[1] = operands[2];
|
|
1479 cond[2] = operands[3];
|
|
1480
|
|
1481 if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
|
|
1482 output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
|
|
1483 else
|
|
1484 output_asm_insn (\"adds\\t%0, %1, %2\", cond);
|
|
1485
|
|
1486 if (which_alternative >= 2
|
|
1487 && which_alternative < 4)
|
|
1488 output_asm_insn (\"mov\\t%0, %1\", operands);
|
|
1489 else if (which_alternative >= 4)
|
|
1490 output_asm_insn (\"str\\t%1, %0\", operands);
|
|
1491
|
|
1492 switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
|
|
1493 {
|
|
1494 case 4:
|
|
1495 return \"b%d4\\t%l5\";
|
|
1496 case 6:
|
|
1497 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
|
|
1498 default:
|
|
1499 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
|
|
1500 }
|
|
1501 }
|
|
1502 "
|
|
1503 [(set (attr "far_jump")
|
|
1504 (if_then_else
|
|
1505 (ior (and (lt (symbol_ref ("which_alternative"))
|
|
1506 (const_int 2))
|
|
1507 (eq_attr "length" "8"))
|
|
1508 (eq_attr "length" "10"))
|
|
1509 (const_string "yes")
|
|
1510 (const_string "no")))
|
|
1511 (set (attr "length")
|
|
1512 (if_then_else
|
|
1513 (lt (symbol_ref ("which_alternative"))
|
|
1514 (const_int 2))
|
|
1515 (if_then_else
|
|
1516 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
|
|
1517 (le (minus (match_dup 5) (pc)) (const_int 256)))
|
|
1518 (const_int 4)
|
|
1519 (if_then_else
|
|
1520 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
|
|
1521 (le (minus (match_dup 5) (pc)) (const_int 2048)))
|
|
1522 (const_int 6)
|
|
1523 (const_int 8)))
|
|
1524 (if_then_else
|
|
1525 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
|
|
1526 (le (minus (match_dup 5) (pc)) (const_int 256)))
|
|
1527 (const_int 6)
|
|
1528 (if_then_else
|
|
1529 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
|
|
1530 (le (minus (match_dup 5) (pc)) (const_int 2048)))
|
|
1531 (const_int 8)
|
|
1532 (const_int 10)))))
|
|
1533 (set_attr "type" "multiple")]
|
|
1534 )
|
|
1535
|
|
1536 (define_insn "*addsi3_cbranch_scratch"
|
|
1537 [(set (pc)
|
|
1538 (if_then_else
|
|
1539 (match_operator 3 "arm_comparison_operator"
|
|
1540 [(plus:SI
|
|
1541 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
|
|
1542 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
|
|
1543 (const_int 0)])
|
|
1544 (label_ref (match_operand 4 "" ""))
|
|
1545 (pc)))
|
|
1546 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
|
|
1547 "TARGET_THUMB1
|
|
1548 && (GET_CODE (operands[3]) == EQ
|
|
1549 || GET_CODE (operands[3]) == NE
|
|
1550 || GET_CODE (operands[3]) == GE
|
|
1551 || GET_CODE (operands[3]) == LT)"
|
|
1552 "*
|
|
1553 {
|
|
1554 switch (which_alternative)
|
|
1555 {
|
|
1556 case 0:
|
|
1557 output_asm_insn (\"cmp\t%1, #%n2\", operands);
|
|
1558 break;
|
|
1559 case 1:
|
|
1560 output_asm_insn (\"cmn\t%1, %2\", operands);
|
|
1561 break;
|
|
1562 case 2:
|
|
1563 if (INTVAL (operands[2]) < 0)
|
|
1564 output_asm_insn (\"subs\t%0, %1, %2\", operands);
|
|
1565 else
|
|
1566 output_asm_insn (\"adds\t%0, %1, %2\", operands);
|
|
1567 break;
|
|
1568 case 3:
|
|
1569 if (INTVAL (operands[2]) < 0)
|
|
1570 output_asm_insn (\"subs\t%0, %0, %2\", operands);
|
|
1571 else
|
|
1572 output_asm_insn (\"adds\t%0, %0, %2\", operands);
|
|
1573 break;
|
|
1574 }
|
|
1575
|
|
1576 switch (get_attr_length (insn))
|
|
1577 {
|
|
1578 case 4:
|
|
1579 return \"b%d3\\t%l4\";
|
|
1580 case 6:
|
|
1581 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
|
|
1582 default:
|
|
1583 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
|
|
1584 }
|
|
1585 }
|
|
1586 "
|
|
1587 [(set (attr "far_jump")
|
|
1588 (if_then_else
|
|
1589 (eq_attr "length" "8")
|
|
1590 (const_string "yes")
|
|
1591 (const_string "no")))
|
|
1592 (set (attr "length")
|
|
1593 (if_then_else
|
|
1594 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
|
|
1595 (le (minus (match_dup 4) (pc)) (const_int 256)))
|
|
1596 (const_int 4)
|
|
1597 (if_then_else
|
|
1598 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
|
|
1599 (le (minus (match_dup 4) (pc)) (const_int 2048)))
|
|
1600 (const_int 6)
|
|
1601 (const_int 8))))
|
|
1602 (set_attr "type" "multiple")]
|
|
1603 )
|
|
1604
|
|
1605 (define_insn "*thumb_cmpdi_zero"
|
|
1606 [(set (reg:CC_Z CC_REGNUM)
|
|
1607 (compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
|
|
1608 (const_int 0)))
|
|
1609 (clobber (match_scratch:SI 1 "=l"))]
|
|
1610 "TARGET_THUMB1"
|
|
1611 "orrs\\t%1, %Q0, %R0"
|
|
1612 [(set_attr "conds" "set")
|
|
1613 (set_attr "length" "2")
|
|
1614 (set_attr "type" "logics_reg")]
|
|
1615 )
|
|
1616
|
|
1617 (define_expand "cstoresi_eq0_thumb1"
|
|
1618 [(parallel
|
|
1619 [(set (match_operand:SI 0 "s_register_operand" "")
|
|
1620 (eq:SI (match_operand:SI 1 "s_register_operand" "")
|
|
1621 (const_int 0)))
|
|
1622 (clobber (match_dup:SI 2))])]
|
|
1623 "TARGET_THUMB1"
|
|
1624 "operands[2] = gen_reg_rtx (SImode);"
|
|
1625 )
|
|
1626
|
|
1627 (define_expand "cstoresi_ne0_thumb1"
|
|
1628 [(parallel
|
|
1629 [(set (match_operand:SI 0 "s_register_operand" "")
|
|
1630 (ne:SI (match_operand:SI 1 "s_register_operand" "")
|
|
1631 (const_int 0)))
|
|
1632 (clobber (match_dup:SI 2))])]
|
|
1633 "TARGET_THUMB1"
|
|
1634 "operands[2] = gen_reg_rtx (SImode);"
|
|
1635 )
|
|
1636
|
|
1637 (define_insn "*cstoresi_eq0_thumb1_insn"
|
|
1638 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
|
|
1639 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
|
|
1640 (const_int 0)))
|
|
1641 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
|
|
1642 "TARGET_THUMB1"
|
|
1643 "@
|
|
1644 rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
|
|
1645 rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
|
|
1646 [(set_attr "length" "4")
|
|
1647 (set_attr "type" "multiple")]
|
|
1648 )
|
|
1649
|
|
1650 (define_insn "*cstoresi_ne0_thumb1_insn"
|
|
1651 [(set (match_operand:SI 0 "s_register_operand" "=l")
|
|
1652 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
|
|
1653 (const_int 0)))
|
|
1654 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
|
|
1655 "TARGET_THUMB1"
|
|
1656 "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
|
|
1657 [(set_attr "length" "4")]
|
|
1658 )
|
|
1659
|
|
1660 ;; Used as part of the expansion of thumb ltu and gtu sequences
|
|
1661 (define_insn "cstoresi_nltu_thumb1"
|
|
1662 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
|
|
1663 (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
|
|
1664 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
|
|
1665 "TARGET_THUMB1"
|
|
1666 "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
|
|
1667 [(set_attr "length" "4")
|
|
1668 (set_attr "type" "multiple")]
|
|
1669 )
|
|
1670
|
|
1671 (define_insn_and_split "cstoresi_ltu_thumb1"
|
|
1672 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
|
|
1673 (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
|
|
1674 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
|
|
1675 "TARGET_THUMB1"
|
|
1676 "#"
|
|
1677 "TARGET_THUMB1"
|
|
1678 [(set (match_dup 3)
|
|
1679 (neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
|
|
1680 (set (match_dup 0) (neg:SI (match_dup 3)))]
|
|
1681 "operands[3] = gen_reg_rtx (SImode);"
|
|
1682 [(set_attr "length" "4")
|
|
1683 (set_attr "type" "multiple")]
|
|
1684 )
|
|
1685
|
|
1686 ;; Used as part of the expansion of thumb les sequence.
|
|
1687 (define_insn "thumb1_addsi3_addgeu"
|
|
1688 [(set (match_operand:SI 0 "s_register_operand" "=l")
|
|
1689 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
|
|
1690 (match_operand:SI 2 "s_register_operand" "l"))
|
|
1691 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
|
|
1692 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
|
|
1693 "TARGET_THUMB1"
|
|
1694 "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
|
|
1695 [(set_attr "length" "4")
|
|
1696 (set_attr "type" "multiple")]
|
|
1697 )
|
|
1698
|
|
1699
|
|
1700 (define_insn "*thumb_jump"
|
|
1701 [(set (pc)
|
|
1702 (label_ref (match_operand 0 "" "")))]
|
|
1703 "TARGET_THUMB1"
|
|
1704 "*
|
|
1705 if (get_attr_length (insn) == 2)
|
|
1706 return \"b\\t%l0\";
|
|
1707 return \"bl\\t%l0\\t%@ far jump\";
|
|
1708 "
|
|
1709 [(set (attr "far_jump")
|
|
1710 (if_then_else
|
|
1711 (eq_attr "length" "4")
|
|
1712 (const_string "yes")
|
|
1713 (const_string "no")))
|
|
1714 (set (attr "length")
|
|
1715 (if_then_else
|
|
1716 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
|
|
1717 (le (minus (match_dup 0) (pc)) (const_int 2048)))
|
|
1718 (const_int 2)
|
|
1719 (const_int 4)))
|
|
1720 (set_attr "type" "branch")]
|
|
1721 )
|
|
1722
|
|
1723 (define_insn "*call_reg_thumb1_v5"
|
|
1724 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
|
|
1725 (match_operand 1 "" ""))
|
|
1726 (use (match_operand 2 "" ""))
|
|
1727 (clobber (reg:SI LR_REGNUM))]
|
|
1728 "TARGET_THUMB1 && arm_arch5 && !SIBLING_CALL_P (insn)"
|
|
1729 "blx\\t%0"
|
|
1730 [(set_attr "length" "2")
|
|
1731 (set_attr "type" "call")]
|
|
1732 )
|
|
1733
|
|
1734 (define_insn "*nonsecure_call_reg_thumb1_v5"
|
|
1735 [(call (unspec:SI [(mem:SI (match_operand:SI 0 "register_operand" "l*r"))]
|
|
1736 UNSPEC_NONSECURE_MEM)
|
|
1737 (match_operand 1 "" ""))
|
|
1738 (use (match_operand 2 "" ""))
|
|
1739 (clobber (reg:SI LR_REGNUM))
|
|
1740 (clobber (match_dup 0))]
|
|
1741 "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
|
|
1742 "bl\\t__gnu_cmse_nonsecure_call"
|
|
1743 [(set_attr "length" "4")
|
|
1744 (set_attr "type" "call")]
|
|
1745 )
|
|
1746
|
|
1747 (define_insn "*call_reg_thumb1"
|
|
1748 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
|
|
1749 (match_operand 1 "" ""))
|
|
1750 (use (match_operand 2 "" ""))
|
|
1751 (clobber (reg:SI LR_REGNUM))]
|
|
1752 "TARGET_THUMB1 && !arm_arch5 && !SIBLING_CALL_P (insn)"
|
|
1753 "*
|
|
1754 {
|
|
1755 if (!TARGET_CALLER_INTERWORKING)
|
|
1756 return thumb_call_via_reg (operands[0]);
|
|
1757 else if (operands[1] == const0_rtx)
|
|
1758 return \"bl\\t%__interwork_call_via_%0\";
|
|
1759 else if (frame_pointer_needed)
|
|
1760 return \"bl\\t%__interwork_r7_call_via_%0\";
|
|
1761 else
|
|
1762 return \"bl\\t%__interwork_r11_call_via_%0\";
|
|
1763 }"
|
|
1764 [(set_attr "type" "call")]
|
|
1765 )
|
|
1766
|
|
1767 (define_insn "*call_value_reg_thumb1_v5"
|
|
1768 [(set (match_operand 0 "" "")
|
|
1769 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
|
|
1770 (match_operand 2 "" "")))
|
|
1771 (use (match_operand 3 "" ""))
|
|
1772 (clobber (reg:SI LR_REGNUM))]
|
|
1773 "TARGET_THUMB1 && arm_arch5"
|
|
1774 "blx\\t%1"
|
|
1775 [(set_attr "length" "2")
|
|
1776 (set_attr "type" "call")]
|
|
1777 )
|
|
1778
|
|
1779 (define_insn "*nonsecure_call_value_reg_thumb1_v5"
|
|
1780 [(set (match_operand 0 "" "")
|
|
1781 (call (unspec:SI
|
|
1782 [(mem:SI (match_operand:SI 1 "register_operand" "l*r"))]
|
|
1783 UNSPEC_NONSECURE_MEM)
|
|
1784 (match_operand 2 "" "")))
|
|
1785 (use (match_operand 3 "" ""))
|
|
1786 (clobber (reg:SI LR_REGNUM))
|
|
1787 (clobber (match_dup 1))]
|
|
1788 "TARGET_THUMB1 && use_cmse"
|
|
1789 "bl\\t__gnu_cmse_nonsecure_call"
|
|
1790 [(set_attr "length" "4")
|
|
1791 (set_attr "type" "call")]
|
|
1792 )
|
|
1793
|
|
1794 (define_insn "*call_value_reg_thumb1"
|
|
1795 [(set (match_operand 0 "" "")
|
|
1796 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
|
|
1797 (match_operand 2 "" "")))
|
|
1798 (use (match_operand 3 "" ""))
|
|
1799 (clobber (reg:SI LR_REGNUM))]
|
|
1800 "TARGET_THUMB1 && !arm_arch5"
|
|
1801 "*
|
|
1802 {
|
|
1803 if (!TARGET_CALLER_INTERWORKING)
|
|
1804 return thumb_call_via_reg (operands[1]);
|
|
1805 else if (operands[2] == const0_rtx)
|
|
1806 return \"bl\\t%__interwork_call_via_%1\";
|
|
1807 else if (frame_pointer_needed)
|
|
1808 return \"bl\\t%__interwork_r7_call_via_%1\";
|
|
1809 else
|
|
1810 return \"bl\\t%__interwork_r11_call_via_%1\";
|
|
1811 }"
|
|
1812 [(set_attr "type" "call")]
|
|
1813 )
|
|
1814
|
|
1815 (define_insn "*call_insn"
|
|
1816 [(call (mem:SI (match_operand:SI 0 "" ""))
|
|
1817 (match_operand:SI 1 "" ""))
|
|
1818 (use (match_operand 2 "" ""))
|
|
1819 (clobber (reg:SI LR_REGNUM))]
|
|
1820 "TARGET_THUMB1
|
|
1821 && GET_CODE (operands[0]) == SYMBOL_REF
|
|
1822 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
|
|
1823 "bl\\t%a0"
|
|
1824 [(set_attr "length" "4")
|
|
1825 (set_attr "type" "call")]
|
|
1826 )
|
|
1827
|
|
1828 (define_insn "*call_value_insn"
|
|
1829 [(set (match_operand 0 "" "")
|
|
1830 (call (mem:SI (match_operand 1 "" ""))
|
|
1831 (match_operand 2 "" "")))
|
|
1832 (use (match_operand 3 "" ""))
|
|
1833 (clobber (reg:SI LR_REGNUM))]
|
|
1834 "TARGET_THUMB1
|
|
1835 && GET_CODE (operands[1]) == SYMBOL_REF
|
|
1836 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
|
|
1837 "bl\\t%a1"
|
|
1838 [(set_attr "length" "4")
|
|
1839 (set_attr "type" "call")]
|
|
1840 )
|
|
1841
|
|
1842 (define_expand "thumb1_casesi_internal_pic"
|
|
1843 [(match_operand:SI 0 "s_register_operand" "")
|
|
1844 (match_operand:SI 1 "thumb1_cmp_operand" "")
|
|
1845 (match_operand 2 "" "")
|
|
1846 (match_operand 3 "" "")]
|
|
1847 "TARGET_THUMB1"
|
|
1848 {
|
|
1849 rtx reg0;
|
|
1850 rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
|
|
1851 emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
|
|
1852 operands[3]));
|
|
1853 reg0 = gen_rtx_REG (SImode, 0);
|
|
1854 emit_move_insn (reg0, operands[0]);
|
|
1855 emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
|
|
1856 DONE;
|
|
1857 }
|
|
1858 )
|
|
1859
|
|
1860 (define_insn "thumb1_casesi_dispatch"
|
|
1861 [(parallel [(set (pc) (unspec [(reg:SI 0)
|
|
1862 (label_ref (match_operand 0 "" ""))
|
|
1863 ;; (label_ref (match_operand 1 "" ""))
|
|
1864 ]
|
|
1865 UNSPEC_THUMB1_CASESI))
|
|
1866 (clobber (reg:SI IP_REGNUM))
|
|
1867 (clobber (reg:SI LR_REGNUM))])]
|
|
1868 "TARGET_THUMB1"
|
|
1869 "* return thumb1_output_casesi(operands);"
|
|
1870 [(set_attr "length" "4")
|
|
1871 (set_attr "type" "multiple")]
|
|
1872 )
|
|
1873
|
|
1874 ;; NB Never uses BX.
|
|
1875 (define_insn "*thumb1_indirect_jump"
|
|
1876 [(set (pc)
|
|
1877 (match_operand:SI 0 "register_operand" "l*r"))]
|
|
1878 "TARGET_THUMB1"
|
|
1879 "mov\\tpc, %0"
|
|
1880 [(set_attr "conds" "clob")
|
|
1881 (set_attr "length" "2")
|
|
1882 (set_attr "type" "branch")]
|
|
1883 )
|
|
1884
|
|
1885
|
|
1886 (define_insn "prologue_thumb1_interwork"
|
|
1887 [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
|
|
1888 "TARGET_THUMB1"
|
|
1889 "* return thumb1_output_interwork ();"
|
|
1890 [(set_attr "length" "8")
|
|
1891 (set_attr "type" "multiple")]
|
|
1892 )
|
|
1893
|
|
1894 (define_insn "*epilogue_insns"
|
|
1895 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
|
|
1896 "TARGET_THUMB1"
|
|
1897 "*
|
|
1898 return thumb1_unexpanded_epilogue ();
|
|
1899 "
|
|
1900 ; Length is absolute worst case, when using CMSE and if this is an entry
|
|
1901 ; function an extra 4 (MSR) bytes will be added.
|
|
1902 [(set (attr "length")
|
|
1903 (if_then_else
|
|
1904 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
|
|
1905 (const_int 48)
|
|
1906 (const_int 44)))
|
|
1907 (set_attr "type" "block")
|
|
1908 ;; We don't clobber the conditions, but the potential length of this
|
|
1909 ;; operation is sufficient to make conditionalizing the sequence
|
|
1910 ;; unlikely to be profitable.
|
|
1911 (set_attr "conds" "clob")]
|
|
1912 )
|
|
1913
|
|
1914 ;; Miscellaneous Thumb patterns
|
|
1915 (define_expand "tablejump"
|
|
1916 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
|
|
1917 (use (label_ref (match_operand 1 "" "")))])]
|
|
1918 "TARGET_THUMB1"
|
|
1919 "
|
|
1920 if (flag_pic)
|
|
1921 {
|
|
1922 /* Hopefully, CSE will eliminate this copy. */
|
|
1923 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
|
|
1924 rtx reg2 = gen_reg_rtx (SImode);
|
|
1925
|
|
1926 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
|
|
1927 operands[0] = reg2;
|
|
1928 }
|
|
1929 "
|
|
1930 )
|
|
1931
|
|
1932 (define_insn "*thumb1_movpc_insn"
|
|
1933 [(set (match_operand:SI 0 "s_register_operand" "=l")
|
|
1934 (reg:SI PC_REGNUM))]
|
|
1935 "TARGET_THUMB1"
|
|
1936 "mov\\t%0, pc"
|
|
1937 [(set_attr "length" "2")
|
|
1938 (set_attr "conds" "nocond")
|
|
1939 (set_attr "type" "mov_reg")]
|
|
1940 )
|
|
1941
|
|
1942 ;; NB never uses BX.
|
|
1943 (define_insn "*thumb1_tablejump"
|
|
1944 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
|
|
1945 (use (label_ref (match_operand 1 "" "")))]
|
|
1946 "TARGET_THUMB1"
|
|
1947 "mov\\t%|pc, %0"
|
|
1948 [(set_attr "length" "2")
|
|
1949 (set_attr "type" "no_insn")]
|
|
1950 )
|
|
1951
|
|
1952 (define_insn_and_split "thumb_eh_return"
|
|
1953 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
|
|
1954 VUNSPEC_EH_RETURN)
|
|
1955 (clobber (match_scratch:SI 1 "=&l"))]
|
|
1956 "TARGET_THUMB1"
|
|
1957 "#"
|
|
1958 "&& reload_completed"
|
|
1959 [(const_int 0)]
|
|
1960 "
|
|
1961 {
|
|
1962 thumb_set_return_address (operands[0], operands[1]);
|
|
1963 DONE;
|
|
1964 }"
|
|
1965 [(set_attr "type" "mov_reg")]
|
|
1966 )
|
|
1967
|