145
|
1 /* Copyright (C) 2010-2020 Free Software Foundation, Inc.
|
111
|
2
|
|
3 This file is part of GCC.
|
|
4
|
|
5 GCC is free software; you can redistribute it and/or modify
|
|
6 it under the terms of the GNU General Public License as published by
|
|
7 the Free Software Foundation; either version 3, or (at your option)
|
|
8 any later version.
|
|
9
|
|
10 GCC is distributed in the hope that it will be useful,
|
|
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
13 GNU General Public License for more details.
|
|
14
|
|
15 Under Section 7 of GPL version 3, you are granted additional
|
|
16 permissions described in the GCC Runtime Library Exception, version
|
|
17 3.1, as published by the Free Software Foundation.
|
|
18
|
|
19 You should have received a copy of the GNU General Public License and
|
|
20 a copy of the GCC Runtime Library Exception along with this program;
|
|
21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
|
22 <http://www.gnu.org/licenses/>. */
|
|
23
|
|
24 /* This header is distributed to simplify porting x86_64 code that
|
|
25 makes explicit use of Intel intrinsics to powerpc64le.
|
|
26 It is the user's responsibility to determine if the results are
|
|
27 acceptable and make additional changes as necessary.
|
|
28 Note that much code that uses Intel intrinsics can be rewritten in
|
|
29 standard C or GNU C extensions, which are more portable and better
|
|
30 optimized across multiple targets. */
|
|
31
|
|
32 #if !defined _X86INTRIN_H_INCLUDED
|
|
33 # error "Never use <bmiintrin.h> directly; include <x86intrin.h> instead."
|
|
34 #endif
|
|
35
|
|
36 #ifndef _BMIINTRIN_H_INCLUDED
|
|
37 #define _BMIINTRIN_H_INCLUDED
|
|
38
|
|
39 extern __inline unsigned short __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
40 __tzcnt_u16 (unsigned short __X)
|
|
41 {
|
|
42 return __builtin_ctz (__X);
|
|
43 }
|
|
44
|
|
45 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
46 __andn_u32 (unsigned int __X, unsigned int __Y)
|
|
47 {
|
|
48 return (~__X & __Y);
|
|
49 }
|
|
50
|
|
51 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
52 _bextr_u32 (unsigned int __X, unsigned int __P, unsigned int __L)
|
|
53 {
|
|
54 return ((__X << (32 - (__L + __P))) >> (32 - __L));
|
|
55 }
|
|
56
|
|
57 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
58 __bextr_u32 (unsigned int __X, unsigned int __Y)
|
|
59 {
|
|
60 unsigned int __P, __L;
|
|
61 __P = __Y & 0xFF;
|
|
62 __L = (__Y >> 8) & 0xFF;
|
|
63 return (_bextr_u32 (__X, __P, __L));
|
|
64 }
|
|
65
|
|
66 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
67 __blsi_u32 (unsigned int __X)
|
|
68 {
|
|
69 return (__X & -__X);
|
|
70 }
|
|
71
|
|
72 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
73 _blsi_u32 (unsigned int __X)
|
|
74 {
|
|
75 return __blsi_u32 (__X);
|
|
76 }
|
|
77
|
|
78 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
79 __blsmsk_u32 (unsigned int __X)
|
|
80 {
|
|
81 return (__X ^ (__X - 1));
|
|
82 }
|
|
83
|
|
84 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
85 _blsmsk_u32 (unsigned int __X)
|
|
86 {
|
|
87 return __blsmsk_u32 (__X);
|
|
88 }
|
|
89
|
|
90 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
91 __blsr_u32 (unsigned int __X)
|
|
92 {
|
|
93 return (__X & (__X - 1));
|
|
94 }
|
|
95
|
|
96 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
97 _blsr_u32 (unsigned int __X)
|
|
98 {
|
|
99 return __blsr_u32 (__X);
|
|
100 }
|
|
101
|
|
102 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
103 __tzcnt_u32 (unsigned int __X)
|
|
104 {
|
|
105 return __builtin_ctz (__X);
|
|
106 }
|
|
107
|
|
108 extern __inline unsigned int __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
109 _tzcnt_u32 (unsigned int __X)
|
|
110 {
|
|
111 return __builtin_ctz (__X);
|
|
112 }
|
|
113
|
|
114 /* use the 64-bit shift, rotate, and count leading zeros instructions
|
|
115 for long long. */
|
|
116 #ifdef __PPC64__
|
|
117 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
118 __andn_u64 (unsigned long long __X, unsigned long long __Y)
|
|
119 {
|
|
120 return (~__X & __Y);
|
|
121 }
|
|
122
|
|
123 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
124 _bextr_u64 (unsigned long long __X, unsigned int __P, unsigned int __L)
|
|
125 {
|
|
126 return ((__X << (64 - (__L + __P))) >> (64 - __L));
|
|
127 }
|
|
128
|
|
129 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
130 __bextr_u64 (unsigned long long __X, unsigned long long __Y)
|
|
131 {
|
|
132 unsigned int __P, __L;
|
|
133 __P = __Y & 0xFF;
|
|
134 __L = (__Y & 0xFF00) >> 8;
|
|
135 return (_bextr_u64 (__X, __P, __L));
|
|
136 }
|
|
137
|
|
138 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
139 __blsi_u64 (unsigned long long __X)
|
|
140 {
|
|
141 return __X & -__X;
|
|
142 }
|
|
143
|
|
144 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
145 _blsi_u64 (unsigned long long __X)
|
|
146 {
|
|
147 return __blsi_u64 (__X);
|
|
148 }
|
|
149
|
|
150 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
151 __blsmsk_u64 (unsigned long long __X)
|
|
152 {
|
|
153 return (__X ^ (__X - 1));
|
|
154 }
|
|
155
|
|
156 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
157 _blsmsk_u64 (unsigned long long __X)
|
|
158 {
|
|
159 return __blsmsk_u64 (__X);
|
|
160 }
|
|
161
|
|
162 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
163 __blsr_u64 (unsigned long long __X)
|
|
164 {
|
|
165 return (__X & (__X - 1));
|
|
166 }
|
|
167
|
|
168 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
169 _blsr_u64 (unsigned long long __X)
|
|
170 {
|
|
171 return __blsr_u64 (__X);
|
|
172 }
|
|
173
|
|
174 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
175 __tzcnt_u64 (unsigned long long __X)
|
|
176 {
|
|
177 return __builtin_ctzll (__X);
|
|
178 }
|
|
179
|
|
180 extern __inline unsigned long long __attribute__((__gnu_inline__, __always_inline__, __artificial__))
|
|
181 _tzcnt_u64 (unsigned long long __X)
|
|
182 {
|
|
183 return __builtin_ctzll (__X);
|
|
184 }
|
|
185 #endif /* __PPC64__ */
|
|
186
|
|
187 #endif /* _BMIINTRIN_H_INCLUDED */
|