comparison gcc/config/powerpcspe/sync.md @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 ;; Machine description for PowerPC synchronization instructions.
2 ;; Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 ;; Contributed by Geoffrey Keating.
4
5 ;; This file is part of GCC.
6
7 ;; GCC is free software; you can redistribute it and/or modify it
8 ;; under the terms of the GNU General Public License as published
9 ;; by the Free Software Foundation; either version 3, or (at your
10 ;; option) any later version.
11
12 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
13 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 ;; License for more details.
16
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
20
21 (define_mode_attr larx [(QI "lbarx")
22 (HI "lharx")
23 (SI "lwarx")
24 (DI "ldarx")
25 (TI "lqarx")])
26
27 (define_mode_attr stcx [(QI "stbcx.")
28 (HI "sthcx.")
29 (SI "stwcx.")
30 (DI "stdcx.")
31 (TI "stqcx.")])
32
33 (define_code_iterator FETCHOP [plus minus ior xor and])
34 (define_code_attr fetchop_name
35 [(plus "add") (minus "sub") (ior "or") (xor "xor") (and "and")])
36 (define_code_attr fetchop_pred
37 [(plus "add_operand") (minus "int_reg_operand")
38 (ior "logical_operand") (xor "logical_operand") (and "and_operand")])
39
40 (define_expand "mem_thread_fence"
41 [(match_operand:SI 0 "const_int_operand" "")] ;; model
42 ""
43 {
44 enum memmodel model = memmodel_base (INTVAL (operands[0]));
45 switch (model)
46 {
47 case MEMMODEL_RELAXED:
48 break;
49 case MEMMODEL_CONSUME:
50 case MEMMODEL_ACQUIRE:
51 case MEMMODEL_RELEASE:
52 case MEMMODEL_ACQ_REL:
53 emit_insn (gen_lwsync ());
54 break;
55 case MEMMODEL_SEQ_CST:
56 emit_insn (gen_hwsync ());
57 break;
58 default:
59 gcc_unreachable ();
60 }
61 DONE;
62 })
63
64 (define_expand "hwsync"
65 [(set (match_dup 0)
66 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
67 ""
68 {
69 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
70 MEM_VOLATILE_P (operands[0]) = 1;
71 })
72
73 (define_insn "*hwsync"
74 [(set (match_operand:BLK 0 "" "")
75 (unspec:BLK [(match_dup 0)] UNSPEC_SYNC))]
76 ""
77 "sync"
78 [(set_attr "type" "sync")])
79
80 (define_expand "lwsync"
81 [(set (match_dup 0)
82 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
83 ""
84 {
85 operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
86 MEM_VOLATILE_P (operands[0]) = 1;
87 })
88
89 (define_insn "*lwsync"
90 [(set (match_operand:BLK 0 "" "")
91 (unspec:BLK [(match_dup 0)] UNSPEC_LWSYNC))]
92 ""
93 {
94 /* Some AIX assemblers don't accept lwsync, so we use a .long. */
95 if (TARGET_NO_LWSYNC)
96 return "sync";
97 else if (TARGET_LWSYNC_INSTRUCTION)
98 return "lwsync";
99 else
100 return ".long 0x7c2004ac";
101 }
102 [(set_attr "type" "sync")])
103
104 (define_insn "isync"
105 [(unspec_volatile:BLK [(const_int 0)] UNSPECV_ISYNC)]
106 ""
107 "isync"
108 [(set_attr "type" "isync")])
109
110 ;; Types that we should provide atomic instructions for.
111 (define_mode_iterator AINT [QI
112 HI
113 SI
114 (DI "TARGET_POWERPC64")
115 (TI "TARGET_SYNC_TI")])
116
117 ;; The control dependency used for load dependency described
118 ;; in B.2.3 of the Power ISA 2.06B.
119 (define_insn "loadsync_<mode>"
120 [(unspec_volatile:BLK [(match_operand:AINT 0 "register_operand" "r")]
121 UNSPECV_ISYNC)
122 (clobber (match_scratch:CC 1 "=y"))]
123 ""
124 "cmpw %1,%0,%0\;bne- %1,$+4\;isync"
125 [(set_attr "type" "isync")
126 (set_attr "length" "12")])
127
128 (define_insn "load_quadpti"
129 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
130 (unspec:PTI
131 [(match_operand:TI 1 "quad_memory_operand" "wQ")] UNSPEC_LSQ))]
132 "TARGET_SYNC_TI
133 && !reg_mentioned_p (operands[0], operands[1])"
134 "lq %0,%1"
135 [(set_attr "type" "load")
136 (set_attr "length" "4")])
137
138 (define_expand "atomic_load<mode>"
139 [(set (match_operand:AINT 0 "register_operand" "") ;; output
140 (match_operand:AINT 1 "memory_operand" "")) ;; memory
141 (use (match_operand:SI 2 "const_int_operand" ""))] ;; model
142 ""
143 {
144 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
145 FAIL;
146
147 enum memmodel model = memmodel_base (INTVAL (operands[2]));
148
149 if (is_mm_seq_cst (model))
150 emit_insn (gen_hwsync ());
151
152 if (<MODE>mode != TImode)
153 emit_move_insn (operands[0], operands[1]);
154 else
155 {
156 rtx op0 = operands[0];
157 rtx op1 = operands[1];
158 rtx pti_reg = gen_reg_rtx (PTImode);
159
160 if (!quad_address_p (XEXP (op1, 0), TImode, false))
161 {
162 rtx old_addr = XEXP (op1, 0);
163 rtx new_addr = force_reg (Pmode, old_addr);
164 operands[1] = op1 = replace_equiv_address (op1, new_addr);
165 }
166
167 emit_insn (gen_load_quadpti (pti_reg, op1));
168
169 if (WORDS_BIG_ENDIAN)
170 emit_move_insn (op0, gen_lowpart (TImode, pti_reg));
171 else
172 {
173 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti_reg));
174 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti_reg));
175 }
176 }
177
178 switch (model)
179 {
180 case MEMMODEL_RELAXED:
181 break;
182 case MEMMODEL_CONSUME:
183 case MEMMODEL_ACQUIRE:
184 case MEMMODEL_SEQ_CST:
185 emit_insn (gen_loadsync_<mode> (operands[0]));
186 break;
187 default:
188 gcc_unreachable ();
189 }
190 DONE;
191 })
192
193 (define_insn "store_quadpti"
194 [(set (match_operand:PTI 0 "quad_memory_operand" "=wQ")
195 (unspec:PTI
196 [(match_operand:PTI 1 "quad_int_reg_operand" "r")] UNSPEC_LSQ))]
197 "TARGET_SYNC_TI"
198 "stq %1,%0"
199 [(set_attr "type" "store")
200 (set_attr "length" "4")])
201
202 (define_expand "atomic_store<mode>"
203 [(set (match_operand:AINT 0 "memory_operand" "") ;; memory
204 (match_operand:AINT 1 "register_operand" "")) ;; input
205 (use (match_operand:SI 2 "const_int_operand" ""))] ;; model
206 ""
207 {
208 if (<MODE>mode == TImode && !TARGET_SYNC_TI)
209 FAIL;
210
211 enum memmodel model = memmodel_base (INTVAL (operands[2]));
212 switch (model)
213 {
214 case MEMMODEL_RELAXED:
215 break;
216 case MEMMODEL_RELEASE:
217 emit_insn (gen_lwsync ());
218 break;
219 case MEMMODEL_SEQ_CST:
220 emit_insn (gen_hwsync ());
221 break;
222 default:
223 gcc_unreachable ();
224 }
225 if (<MODE>mode != TImode)
226 emit_move_insn (operands[0], operands[1]);
227 else
228 {
229 rtx op0 = operands[0];
230 rtx op1 = operands[1];
231 rtx pti_reg = gen_reg_rtx (PTImode);
232
233 if (!quad_address_p (XEXP (op0, 0), TImode, false))
234 {
235 rtx old_addr = XEXP (op0, 0);
236 rtx new_addr = force_reg (Pmode, old_addr);
237 operands[0] = op0 = replace_equiv_address (op0, new_addr);
238 }
239
240 if (WORDS_BIG_ENDIAN)
241 emit_move_insn (pti_reg, gen_lowpart (PTImode, op1));
242 else
243 {
244 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op1));
245 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op1));
246 }
247
248 emit_insn (gen_store_quadpti (gen_lowpart (PTImode, op0), pti_reg));
249 }
250
251 DONE;
252 })
253
254 ;; Any supported integer mode that has atomic l<x>arx/st<x>cx. instrucitons
255 ;; other than the quad memory operations, which have special restrictions.
256 ;; Byte/halfword atomic instructions were added in ISA 2.06B, but were phased
257 ;; in and did not show up until power8. TImode atomic lqarx/stqcx. require
258 ;; special handling due to even/odd register requirements.
259 (define_mode_iterator ATOMIC [(QI "TARGET_SYNC_HI_QI")
260 (HI "TARGET_SYNC_HI_QI")
261 SI
262 (DI "TARGET_POWERPC64")])
263
264 (define_insn "load_locked<mode>"
265 [(set (match_operand:ATOMIC 0 "int_reg_operand" "=r")
266 (unspec_volatile:ATOMIC
267 [(match_operand:ATOMIC 1 "memory_operand" "Z")] UNSPECV_LL))]
268 ""
269 "<larx> %0,%y1"
270 [(set_attr "type" "load_l")])
271
272 (define_insn "load_locked<QHI:mode>_si"
273 [(set (match_operand:SI 0 "int_reg_operand" "=r")
274 (unspec_volatile:SI
275 [(match_operand:QHI 1 "memory_operand" "Z")] UNSPECV_LL))]
276 "TARGET_SYNC_HI_QI"
277 "<QHI:larx> %0,%y1"
278 [(set_attr "type" "load_l")])
279
280 ;; Use PTImode to get even/odd register pairs.
281 ;; Use a temporary register to force getting an even register for the
282 ;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
283 ;; copy on big endian systems.
284
285 ;; On little endian systems where non-atomic quad word load/store instructions
286 ;; are not used, the address can be register+offset, so make sure the address
287 ;; is indexed or indirect before register allocation.
288
289 (define_expand "load_lockedti"
290 [(use (match_operand:TI 0 "quad_int_reg_operand" ""))
291 (use (match_operand:TI 1 "memory_operand" ""))]
292 "TARGET_SYNC_TI"
293 {
294 rtx op0 = operands[0];
295 rtx op1 = operands[1];
296 rtx pti = gen_reg_rtx (PTImode);
297
298 if (!indexed_or_indirect_operand (op1, TImode))
299 {
300 rtx old_addr = XEXP (op1, 0);
301 rtx new_addr = force_reg (Pmode, old_addr);
302 operands[1] = op1 = change_address (op1, TImode, new_addr);
303 }
304
305 emit_insn (gen_load_lockedpti (pti, op1));
306 if (WORDS_BIG_ENDIAN)
307 emit_move_insn (op0, gen_lowpart (TImode, pti));
308 else
309 {
310 emit_move_insn (gen_lowpart (DImode, op0), gen_highpart (DImode, pti));
311 emit_move_insn (gen_highpart (DImode, op0), gen_lowpart (DImode, pti));
312 }
313 DONE;
314 })
315
316 (define_insn "load_lockedpti"
317 [(set (match_operand:PTI 0 "quad_int_reg_operand" "=&r")
318 (unspec_volatile:PTI
319 [(match_operand:TI 1 "indexed_or_indirect_operand" "Z")] UNSPECV_LL))]
320 "TARGET_SYNC_TI
321 && !reg_mentioned_p (operands[0], operands[1])
322 && quad_int_reg_operand (operands[0], PTImode)"
323 "lqarx %0,%y1"
324 [(set_attr "type" "load_l")])
325
326 (define_insn "store_conditional<mode>"
327 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
328 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
329 (set (match_operand:ATOMIC 1 "memory_operand" "=Z")
330 (match_operand:ATOMIC 2 "int_reg_operand" "r"))]
331 ""
332 "<stcx> %2,%y1"
333 [(set_attr "type" "store_c")])
334
335 ;; Use a temporary register to force getting an even register for the
336 ;; lqarx/stqcrx. instructions. Normal optimizations will eliminate this extra
337 ;; copy on big endian systems.
338
339 ;; On little endian systems where non-atomic quad word load/store instructions
340 ;; are not used, the address can be register+offset, so make sure the address
341 ;; is indexed or indirect before register allocation.
342
343 (define_expand "store_conditionalti"
344 [(use (match_operand:CC 0 "cc_reg_operand" ""))
345 (use (match_operand:TI 1 "memory_operand" ""))
346 (use (match_operand:TI 2 "quad_int_reg_operand" ""))]
347 "TARGET_SYNC_TI"
348 {
349 rtx op0 = operands[0];
350 rtx op1 = operands[1];
351 rtx op2 = operands[2];
352 rtx addr = XEXP (op1, 0);
353 rtx pti_mem;
354 rtx pti_reg;
355
356 if (!indexed_or_indirect_operand (op1, TImode))
357 {
358 rtx new_addr = force_reg (Pmode, addr);
359 operands[1] = op1 = change_address (op1, TImode, new_addr);
360 addr = new_addr;
361 }
362
363 pti_mem = change_address (op1, PTImode, addr);
364 pti_reg = gen_reg_rtx (PTImode);
365
366 if (WORDS_BIG_ENDIAN)
367 emit_move_insn (pti_reg, gen_lowpart (PTImode, op2));
368 else
369 {
370 emit_move_insn (gen_lowpart (DImode, pti_reg), gen_highpart (DImode, op2));
371 emit_move_insn (gen_highpart (DImode, pti_reg), gen_lowpart (DImode, op2));
372 }
373
374 emit_insn (gen_store_conditionalpti (op0, pti_mem, pti_reg));
375 DONE;
376 })
377
378 (define_insn "store_conditionalpti"
379 [(set (match_operand:CC 0 "cc_reg_operand" "=x")
380 (unspec_volatile:CC [(const_int 0)] UNSPECV_SC))
381 (set (match_operand:PTI 1 "indexed_or_indirect_operand" "=Z")
382 (match_operand:PTI 2 "quad_int_reg_operand" "r"))]
383 "TARGET_SYNC_TI && quad_int_reg_operand (operands[2], PTImode)"
384 "stqcx. %2,%y1"
385 [(set_attr "type" "store_c")])
386
387 (define_expand "atomic_compare_and_swap<mode>"
388 [(match_operand:SI 0 "int_reg_operand" "") ;; bool out
389 (match_operand:AINT 1 "int_reg_operand" "") ;; val out
390 (match_operand:AINT 2 "memory_operand" "") ;; memory
391 (match_operand:AINT 3 "reg_or_short_operand" "") ;; expected
392 (match_operand:AINT 4 "int_reg_operand" "") ;; desired
393 (match_operand:SI 5 "const_int_operand" "") ;; is_weak
394 (match_operand:SI 6 "const_int_operand" "") ;; model succ
395 (match_operand:SI 7 "const_int_operand" "")] ;; model fail
396 ""
397 {
398 rs6000_expand_atomic_compare_and_swap (operands);
399 DONE;
400 })
401
402 (define_expand "atomic_exchange<mode>"
403 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
404 (match_operand:AINT 1 "memory_operand" "") ;; memory
405 (match_operand:AINT 2 "int_reg_operand" "") ;; input
406 (match_operand:SI 3 "const_int_operand" "")] ;; model
407 ""
408 {
409 rs6000_expand_atomic_exchange (operands);
410 DONE;
411 })
412
413 (define_expand "atomic_<fetchop_name><mode>"
414 [(match_operand:AINT 0 "memory_operand" "") ;; memory
415 (FETCHOP:AINT (match_dup 0)
416 (match_operand:AINT 1 "<fetchop_pred>" "")) ;; operand
417 (match_operand:SI 2 "const_int_operand" "")] ;; model
418 ""
419 {
420 rs6000_expand_atomic_op (<CODE>, operands[0], operands[1],
421 NULL_RTX, NULL_RTX, operands[2]);
422 DONE;
423 })
424
425 (define_expand "atomic_nand<mode>"
426 [(match_operand:AINT 0 "memory_operand" "") ;; memory
427 (match_operand:AINT 1 "int_reg_operand" "") ;; operand
428 (match_operand:SI 2 "const_int_operand" "")] ;; model
429 ""
430 {
431 rs6000_expand_atomic_op (NOT, operands[0], operands[1],
432 NULL_RTX, NULL_RTX, operands[2]);
433 DONE;
434 })
435
436 (define_expand "atomic_fetch_<fetchop_name><mode>"
437 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
438 (match_operand:AINT 1 "memory_operand" "") ;; memory
439 (FETCHOP:AINT (match_dup 1)
440 (match_operand:AINT 2 "<fetchop_pred>" "")) ;; operand
441 (match_operand:SI 3 "const_int_operand" "")] ;; model
442 ""
443 {
444 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
445 operands[0], NULL_RTX, operands[3]);
446 DONE;
447 })
448
449 (define_expand "atomic_fetch_nand<mode>"
450 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
451 (match_operand:AINT 1 "memory_operand" "") ;; memory
452 (match_operand:AINT 2 "int_reg_operand" "") ;; operand
453 (match_operand:SI 3 "const_int_operand" "")] ;; model
454 ""
455 {
456 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
457 operands[0], NULL_RTX, operands[3]);
458 DONE;
459 })
460
461 (define_expand "atomic_<fetchop_name>_fetch<mode>"
462 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
463 (match_operand:AINT 1 "memory_operand" "") ;; memory
464 (FETCHOP:AINT (match_dup 1)
465 (match_operand:AINT 2 "<fetchop_pred>" "")) ;; operand
466 (match_operand:SI 3 "const_int_operand" "")] ;; model
467 ""
468 {
469 rs6000_expand_atomic_op (<CODE>, operands[1], operands[2],
470 NULL_RTX, operands[0], operands[3]);
471 DONE;
472 })
473
474 (define_expand "atomic_nand_fetch<mode>"
475 [(match_operand:AINT 0 "int_reg_operand" "") ;; output
476 (match_operand:AINT 1 "memory_operand" "") ;; memory
477 (match_operand:AINT 2 "int_reg_operand" "") ;; operand
478 (match_operand:SI 3 "const_int_operand" "")] ;; model
479 ""
480 {
481 rs6000_expand_atomic_op (NOT, operands[1], operands[2],
482 NULL_RTX, operands[0], operands[3]);
483 DONE;
484 })