master
  1/*===---- bmiintrin.h - Implementation of BMI intrinsics on PowerPC --------===
  2 *
  3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4 * See https://llvm.org/LICENSE.txt for license information.
  5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6 *
  7 *===-----------------------------------------------------------------------===
  8 */
  9
 10#if !defined X86GPRINTRIN_H_
 11#error "Never use <bmiintrin.h> directly; include <x86gprintrin.h> instead."
 12#endif
 13
 14#ifndef BMIINTRIN_H_
 15#define BMIINTRIN_H_
 16
 17extern __inline unsigned short
 18    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 19    __tzcnt_u16(unsigned short __X) {
 20  return __builtin_ctz(__X);
 21}
 22
 23extern __inline unsigned int
 24    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 25    __andn_u32(unsigned int __X, unsigned int __Y) {
 26  return (~__X & __Y);
 27}
 28
 29extern __inline unsigned int
 30    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 31    _bextr_u32(unsigned int __X, unsigned int __P, unsigned int __L) {
 32  return ((__X << (32 - (__L + __P))) >> (32 - __L));
 33}
 34
 35extern __inline unsigned int
 36    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 37    __bextr_u32(unsigned int __X, unsigned int __Y) {
 38  unsigned int __P, __L;
 39  __P = __Y & 0xFF;
 40  __L = (__Y >> 8) & 0xFF;
 41  return (_bextr_u32(__X, __P, __L));
 42}
 43
 44extern __inline unsigned int
 45    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 46    __blsi_u32(unsigned int __X) {
 47  return (__X & -__X);
 48}
 49
 50extern __inline unsigned int
 51    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 52    _blsi_u32(unsigned int __X) {
 53  return __blsi_u32(__X);
 54}
 55
 56extern __inline unsigned int
 57    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 58    __blsmsk_u32(unsigned int __X) {
 59  return (__X ^ (__X - 1));
 60}
 61
 62extern __inline unsigned int
 63    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 64    _blsmsk_u32(unsigned int __X) {
 65  return __blsmsk_u32(__X);
 66}
 67
 68extern __inline unsigned int
 69    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 70    __blsr_u32(unsigned int __X) {
 71  return (__X & (__X - 1));
 72}
 73
 74extern __inline unsigned int
 75    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 76    _blsr_u32(unsigned int __X) {
 77  return __blsr_u32(__X);
 78}
 79
 80extern __inline unsigned int
 81    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 82    __tzcnt_u32(unsigned int __X) {
 83  return __builtin_ctz(__X);
 84}
 85
 86extern __inline unsigned int
 87    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 88    _tzcnt_u32(unsigned int __X) {
 89  return __builtin_ctz(__X);
 90}
 91
 92/* use the 64-bit shift, rotate, and count leading zeros instructions
 93   for long long.  */
 94#ifdef __PPC64__
 95extern __inline unsigned long long
 96    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
 97    __andn_u64(unsigned long long __X, unsigned long long __Y) {
 98  return (~__X & __Y);
 99}
100
101extern __inline unsigned long long
102    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
103    _bextr_u64(unsigned long long __X, unsigned int __P, unsigned int __L) {
104  return ((__X << (64 - (__L + __P))) >> (64 - __L));
105}
106
107extern __inline unsigned long long
108    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
109    __bextr_u64(unsigned long long __X, unsigned long long __Y) {
110  unsigned int __P, __L;
111  __P = __Y & 0xFF;
112  __L = (__Y & 0xFF00) >> 8;
113  return (_bextr_u64(__X, __P, __L));
114}
115
116extern __inline unsigned long long
117    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
118    __blsi_u64(unsigned long long __X) {
119  return __X & -__X;
120}
121
122extern __inline unsigned long long
123    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
124    _blsi_u64(unsigned long long __X) {
125  return __blsi_u64(__X);
126}
127
128extern __inline unsigned long long
129    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
130    __blsmsk_u64(unsigned long long __X) {
131  return (__X ^ (__X - 1));
132}
133
134extern __inline unsigned long long
135    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
136    _blsmsk_u64(unsigned long long __X) {
137  return __blsmsk_u64(__X);
138}
139
140extern __inline unsigned long long
141    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
142    __blsr_u64(unsigned long long __X) {
143  return (__X & (__X - 1));
144}
145
146extern __inline unsigned long long
147    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
148    _blsr_u64(unsigned long long __X) {
149  return __blsr_u64(__X);
150}
151
152extern __inline unsigned long long
153    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
154    __tzcnt_u64(unsigned long long __X) {
155  return __builtin_ctzll(__X);
156}
157
158extern __inline unsigned long long
159    __attribute__((__gnu_inline__, __always_inline__, __artificial__))
160    _tzcnt_u64(unsigned long long __X) {
161  return __builtin_ctzll(__X);
162}
163#endif /* __PPC64__  */
164
165#endif /* BMIINTRIN_H_ */