diff options
Diffstat (limited to 'ANDROID_3.4.5/include/asm-generic/bitops')
17 files changed, 0 insertions, 841 deletions
diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/__ffs.h b/ANDROID_3.4.5/include/asm-generic/bitops/__ffs.h deleted file mode 100644 index 937d7c43..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/__ffs.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS___FFS_H_ -#define _ASM_GENERIC_BITOPS___FFS_H_ - -#include <asm/types.h> - -/** - * __ffs - find first bit in word. - * @word: The word to search - * - * Undefined if no bit exists, so code should check against 0 first. - */ -static __always_inline unsigned long __ffs(unsigned long word) -{ - int num = 0; - -#if BITS_PER_LONG == 64 - if ((word & 0xffffffff) == 0) { - num += 32; - word >>= 32; - } -#endif - if ((word & 0xffff) == 0) { - num += 16; - word >>= 16; - } - if ((word & 0xff) == 0) { - num += 8; - word >>= 8; - } - if ((word & 0xf) == 0) { - num += 4; - word >>= 4; - } - if ((word & 0x3) == 0) { - num += 2; - word >>= 2; - } - if ((word & 0x1) == 0) - num += 1; - return num; -} - -#endif /* _ASM_GENERIC_BITOPS___FFS_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/__fls.h b/ANDROID_3.4.5/include/asm-generic/bitops/__fls.h deleted file mode 100644 index a60a7ccb..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/__fls.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS___FLS_H_ -#define _ASM_GENERIC_BITOPS___FLS_H_ - -#include <asm/types.h> - -/** - * __fls - find last (most-significant) set bit in a long word - * @word: the word to search - * - * Undefined if no set bit exists, so code should check against 0 first. - */ -static __always_inline unsigned long __fls(unsigned long word) -{ - int num = BITS_PER_LONG - 1; - -#if BITS_PER_LONG == 64 - if (!(word & (~0ul << 32))) { - num -= 32; - word <<= 32; - } -#endif - if (!(word & (~0ul << (BITS_PER_LONG-16)))) { - num -= 16; - word <<= 16; - } - if (!(word & (~0ul << (BITS_PER_LONG-8)))) { - num -= 8; - word <<= 8; - } - if (!(word & (~0ul << (BITS_PER_LONG-4)))) { - num -= 4; - word <<= 4; - } - if (!(word & (~0ul << (BITS_PER_LONG-2)))) { - num -= 2; - word <<= 2; - } - if (!(word & (~0ul << (BITS_PER_LONG-1)))) - num -= 1; - return num; -} - -#endif /* _ASM_GENERIC_BITOPS___FLS_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/arch_hweight.h b/ANDROID_3.4.5/include/asm-generic/bitops/arch_hweight.h deleted file mode 100644 index 6a211f40..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/arch_hweight.h +++ /dev/null @@ -1,25 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_ -#define _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_ - -#include <asm/types.h> - -static inline unsigned int __arch_hweight32(unsigned int w) -{ - return __sw_hweight32(w); -} - -static inline unsigned int __arch_hweight16(unsigned int w) -{ - return __sw_hweight16(w); -} - -static inline unsigned int __arch_hweight8(unsigned int w) -{ - return __sw_hweight8(w); -} - -static inline unsigned long __arch_hweight64(__u64 w) -{ - return __sw_hweight64(w); -} -#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/atomic.h b/ANDROID_3.4.5/include/asm-generic/bitops/atomic.h deleted file mode 100644 index 9ae6c34d..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/atomic.h +++ /dev/null @@ -1,189 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_ -#define _ASM_GENERIC_BITOPS_ATOMIC_H_ - -#include <asm/types.h> -#include <linux/irqflags.h> - -#ifdef CONFIG_SMP -#include <asm/spinlock.h> -#include <asm/cache.h> /* we use L1_CACHE_BYTES */ - -/* Use an array of spinlocks for our atomic_ts. - * Hash function to index into a different SPINLOCK. - * Since "a" is usually an address, use one spinlock per cacheline. - */ -# define ATOMIC_HASH_SIZE 4 -# define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ])) - -extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; - -/* Can't use raw_spin_lock_irq because of #include problems, so - * this is the substitute */ -#define _atomic_spin_lock_irqsave(l,f) do { \ - arch_spinlock_t *s = ATOMIC_HASH(l); \ - local_irq_save(f); \ - arch_spin_lock(s); \ -} while(0) - -#define _atomic_spin_unlock_irqrestore(l,f) do { \ - arch_spinlock_t *s = ATOMIC_HASH(l); \ - arch_spin_unlock(s); \ - local_irq_restore(f); \ -} while(0) - - -#else -# define _atomic_spin_lock_irqsave(l,f) do { local_irq_save(f); } while (0) -# define _atomic_spin_unlock_irqrestore(l,f) do { local_irq_restore(f); } while (0) -#endif - -/* - * NMI events can occur at any time, including when interrupts have been - * disabled by *_irqsave(). So you can get NMI events occurring while a - * *_bit function is holding a spin lock. If the NMI handler also wants - * to do bit manipulation (and they do) then you can get a deadlock - * between the original caller of *_bit() and the NMI handler. - * - * by Keith Owens - */ - -/** - * set_bit - Atomically set a bit in memory - * @nr: the bit to set - * @addr: the address to start counting from - * - * This function is atomic and may not be reordered. See __set_bit() - * if you do not require the atomic guarantees. - * - * Note: there are no guarantees that this function will not be reordered - * on non x86 architectures, so if you are writing portable code, - * make sure not to rely on its reordering guarantees. - * - * Note that @nr may be almost arbitrarily large; this function is not - * restricted to acting on a single-word quantity. - */ -static inline void set_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - *p |= mask; - _atomic_spin_unlock_irqrestore(p, flags); -} - -/** - * clear_bit - Clears a bit in memory - * @nr: Bit to clear - * @addr: Address to start counting from - * - * clear_bit() is atomic and may not be reordered. However, it does - * not contain a memory barrier, so if it is used for locking purposes, - * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() - * in order to ensure changes are visible on other processors. - */ -static inline void clear_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - *p &= ~mask; - _atomic_spin_unlock_irqrestore(p, flags); -} - -/** - * change_bit - Toggle a bit in memory - * @nr: Bit to change - * @addr: Address to start counting from - * - * change_bit() is atomic and may not be reordered. It may be - * reordered on other architectures than x86. - * Note that @nr may be almost arbitrarily large; this function is not - * restricted to acting on a single-word quantity. - */ -static inline void change_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - *p ^= mask; - _atomic_spin_unlock_irqrestore(p, flags); -} - -/** - * test_and_set_bit - Set a bit and return its old value - * @nr: Bit to set - * @addr: Address to count from - * - * This operation is atomic and cannot be reordered. - * It may be reordered on other architectures than x86. - * It also implies a memory barrier. - */ -static inline int test_and_set_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old; - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - old = *p; - *p = old | mask; - _atomic_spin_unlock_irqrestore(p, flags); - - return (old & mask) != 0; -} - -/** - * test_and_clear_bit - Clear a bit and return its old value - * @nr: Bit to clear - * @addr: Address to count from - * - * This operation is atomic and cannot be reordered. - * It can be reorderdered on other architectures other than x86. - * It also implies a memory barrier. - */ -static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old; - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - old = *p; - *p = old & ~mask; - _atomic_spin_unlock_irqrestore(p, flags); - - return (old & mask) != 0; -} - -/** - * test_and_change_bit - Change a bit and return its old value - * @nr: Bit to change - * @addr: Address to count from - * - * This operation is atomic and cannot be reordered. - * It also implies a memory barrier. - */ -static inline int test_and_change_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old; - unsigned long flags; - - _atomic_spin_lock_irqsave(p, flags); - old = *p; - *p = old ^ mask; - _atomic_spin_unlock_irqrestore(p, flags); - - return (old & mask) != 0; -} - -#endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/const_hweight.h b/ANDROID_3.4.5/include/asm-generic/bitops/const_hweight.h deleted file mode 100644 index fa2a50b7..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/const_hweight.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ -#define _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ - -/* - * Compile time versions of __arch_hweightN() - */ -#define __const_hweight8(w) \ - ( (!!((w) & (1ULL << 0))) + \ - (!!((w) & (1ULL << 1))) + \ - (!!((w) & (1ULL << 2))) + \ - (!!((w) & (1ULL << 3))) + \ - (!!((w) & (1ULL << 4))) + \ - (!!((w) & (1ULL << 5))) + \ - (!!((w) & (1ULL << 6))) + \ - (!!((w) & (1ULL << 7))) ) - -#define __const_hweight16(w) (__const_hweight8(w) + __const_hweight8((w) >> 8 )) -#define __const_hweight32(w) (__const_hweight16(w) + __const_hweight16((w) >> 16)) -#define __const_hweight64(w) (__const_hweight32(w) + __const_hweight32((w) >> 32)) - -/* - * Generic interface. - */ -#define hweight8(w) (__builtin_constant_p(w) ? __const_hweight8(w) : __arch_hweight8(w)) -#define hweight16(w) (__builtin_constant_p(w) ? __const_hweight16(w) : __arch_hweight16(w)) -#define hweight32(w) (__builtin_constant_p(w) ? __const_hweight32(w) : __arch_hweight32(w)) -#define hweight64(w) (__builtin_constant_p(w) ? __const_hweight64(w) : __arch_hweight64(w)) - -/* - * Interface for known constant arguments - */ -#define HWEIGHT8(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight8(w)) -#define HWEIGHT16(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight16(w)) -#define HWEIGHT32(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight32(w)) -#define HWEIGHT64(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight64(w)) - -/* - * Type invariant interface to the compile time constant hweight functions. - */ -#define HWEIGHT(w) HWEIGHT64((u64)w) - -#endif /* _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic-setbit.h b/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic-setbit.h deleted file mode 100644 index 5a099785..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic-setbit.h +++ /dev/null @@ -1,11 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ -#define _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ - -/* - * Atomic bitops based version of ext2 atomic bitops - */ - -#define ext2_set_bit_atomic(l, nr, addr) test_and_set_bit_le(nr, addr) -#define ext2_clear_bit_atomic(l, nr, addr) test_and_clear_bit_le(nr, addr) - -#endif /* _ASM_GENERIC_BITOPS_EXT2_ATOMIC_SETBIT_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic.h b/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic.h deleted file mode 100644 index 87f0f109..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/ext2-atomic.h +++ /dev/null @@ -1,26 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ -#define _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ - -/* - * Spinlock based version of ext2 atomic bitops - */ - -#define ext2_set_bit_atomic(lock, nr, addr) \ - ({ \ - int ret; \ - spin_lock(lock); \ - ret = __test_and_set_bit_le(nr, addr); \ - spin_unlock(lock); \ - ret; \ - }) - -#define ext2_clear_bit_atomic(lock, nr, addr) \ - ({ \ - int ret; \ - spin_lock(lock); \ - ret = __test_and_clear_bit_le(nr, addr); \ - spin_unlock(lock); \ - ret; \ - }) - -#endif /* _ASM_GENERIC_BITOPS_EXT2_ATOMIC_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/ffs.h b/ANDROID_3.4.5/include/asm-generic/bitops/ffs.h deleted file mode 100644 index fbbb43af..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/ffs.h +++ /dev/null @@ -1,41 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_FFS_H_ -#define _ASM_GENERIC_BITOPS_FFS_H_ - -/** - * ffs - find first bit set - * @x: the word to search - * - * This is defined the same way as - * the libc and compiler builtin ffs routines, therefore - * differs in spirit from the above ffz (man ffs). - */ -static inline int ffs(int x) -{ - int r = 1; - - if (!x) - return 0; - if (!(x & 0xffff)) { - x >>= 16; - r += 16; - } - if (!(x & 0xff)) { - x >>= 8; - r += 8; - } - if (!(x & 0xf)) { - x >>= 4; - r += 4; - } - if (!(x & 3)) { - x >>= 2; - r += 2; - } - if (!(x & 1)) { - x >>= 1; - r += 1; - } - return r; -} - -#endif /* _ASM_GENERIC_BITOPS_FFS_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/ffz.h b/ANDROID_3.4.5/include/asm-generic/bitops/ffz.h deleted file mode 100644 index 6744bd4c..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/ffz.h +++ /dev/null @@ -1,12 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_FFZ_H_ -#define _ASM_GENERIC_BITOPS_FFZ_H_ - -/* - * ffz - find first zero in word. - * @word: The word to search - * - * Undefined if no zero exists, so code should check against ~0UL first. - */ -#define ffz(x) __ffs(~(x)) - -#endif /* _ASM_GENERIC_BITOPS_FFZ_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/find.h b/ANDROID_3.4.5/include/asm-generic/bitops/find.h deleted file mode 100644 index 71c77803..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/find.h +++ /dev/null @@ -1,54 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_FIND_H_ -#define _ASM_GENERIC_BITOPS_FIND_H_ - -#ifndef find_next_bit -/** - * find_next_bit - find the next set bit in a memory region - * @addr: The address to base the search on - * @offset: The bitnumber to start searching at - * @size: The bitmap size in bits - */ -extern unsigned long find_next_bit(const unsigned long *addr, unsigned long - size, unsigned long offset); -#endif - -#ifndef find_next_zero_bit -/** - * find_next_zero_bit - find the next cleared bit in a memory region - * @addr: The address to base the search on - * @offset: The bitnumber to start searching at - * @size: The bitmap size in bits - */ -extern unsigned long find_next_zero_bit(const unsigned long *addr, unsigned - long size, unsigned long offset); -#endif - -#ifdef CONFIG_GENERIC_FIND_FIRST_BIT - -/** - * find_first_bit - find the first set bit in a memory region - * @addr: The address to start the search at - * @size: The maximum size to search - * - * Returns the bit number of the first set bit. - */ -extern unsigned long find_first_bit(const unsigned long *addr, - unsigned long size); - -/** - * find_first_zero_bit - find the first cleared bit in a memory region - * @addr: The address to start the search at - * @size: The maximum size to search - * - * Returns the bit number of the first cleared bit. - */ -extern unsigned long find_first_zero_bit(const unsigned long *addr, - unsigned long size); -#else /* CONFIG_GENERIC_FIND_FIRST_BIT */ - -#define find_first_bit(addr, size) find_next_bit((addr), (size), 0) -#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0) - -#endif /* CONFIG_GENERIC_FIND_FIRST_BIT */ - -#endif /*_ASM_GENERIC_BITOPS_FIND_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/fls.h b/ANDROID_3.4.5/include/asm-generic/bitops/fls.h deleted file mode 100644 index 0576d1f4..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/fls.h +++ /dev/null @@ -1,41 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_FLS_H_ -#define _ASM_GENERIC_BITOPS_FLS_H_ - -/** - * fls - find last (most-significant) bit set - * @x: the word to search - * - * This is defined the same way as ffs. - * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. - */ - -static __always_inline int fls(int x) -{ - int r = 32; - - if (!x) - return 0; - if (!(x & 0xffff0000u)) { - x <<= 16; - r -= 16; - } - if (!(x & 0xff000000u)) { - x <<= 8; - r -= 8; - } - if (!(x & 0xf0000000u)) { - x <<= 4; - r -= 4; - } - if (!(x & 0xc0000000u)) { - x <<= 2; - r -= 2; - } - if (!(x & 0x80000000u)) { - x <<= 1; - r -= 1; - } - return r; -} - -#endif /* _ASM_GENERIC_BITOPS_FLS_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/fls64.h b/ANDROID_3.4.5/include/asm-generic/bitops/fls64.h deleted file mode 100644 index b097cf84..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/fls64.h +++ /dev/null @@ -1,36 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_FLS64_H_ -#define _ASM_GENERIC_BITOPS_FLS64_H_ - -#include <asm/types.h> - -/** - * fls64 - find last set bit in a 64-bit word - * @x: the word to search - * - * This is defined in a similar way as the libc and compiler builtin - * ffsll, but returns the position of the most significant set bit. - * - * fls64(value) returns 0 if value is 0 or the position of the last - * set bit if value is nonzero. The last (most significant) bit is - * at position 64. - */ -#if BITS_PER_LONG == 32 -static __always_inline int fls64(__u64 x) -{ - __u32 h = x >> 32; - if (h) - return fls(h) + 32; - return fls(x); -} -#elif BITS_PER_LONG == 64 -static __always_inline int fls64(__u64 x) -{ - if (x == 0) - return 0; - return __fls(x) + 1; -} -#else -#error BITS_PER_LONG not 32 or 64 -#endif - -#endif /* _ASM_GENERIC_BITOPS_FLS64_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/hweight.h b/ANDROID_3.4.5/include/asm-generic/bitops/hweight.h deleted file mode 100644 index a94d6519..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/hweight.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_HWEIGHT_H_ -#define _ASM_GENERIC_BITOPS_HWEIGHT_H_ - -#include <asm-generic/bitops/arch_hweight.h> -#include <asm-generic/bitops/const_hweight.h> - -#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/le.h b/ANDROID_3.4.5/include/asm-generic/bitops/le.h deleted file mode 100644 index f95c663a..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/le.h +++ /dev/null @@ -1,87 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_LE_H_ -#define _ASM_GENERIC_BITOPS_LE_H_ - -#include <asm/types.h> -#include <asm/byteorder.h> - -#if defined(__LITTLE_ENDIAN) - -#define BITOP_LE_SWIZZLE 0 - -static inline unsigned long find_next_zero_bit_le(const void *addr, - unsigned long size, unsigned long offset) -{ - return find_next_zero_bit(addr, size, offset); -} - -static inline unsigned long find_next_bit_le(const void *addr, - unsigned long size, unsigned long offset) -{ - return find_next_bit(addr, size, offset); -} - -static inline unsigned long find_first_zero_bit_le(const void *addr, - unsigned long size) -{ - return find_first_zero_bit(addr, size); -} - -#elif defined(__BIG_ENDIAN) - -#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) - -#ifndef find_next_zero_bit_le -extern unsigned long find_next_zero_bit_le(const void *addr, - unsigned long size, unsigned long offset); -#endif - -#ifndef find_next_bit_le -extern unsigned long find_next_bit_le(const void *addr, - unsigned long size, unsigned long offset); -#endif - -#ifndef find_first_zero_bit_le -#define find_first_zero_bit_le(addr, size) \ - find_next_zero_bit_le((addr), (size), 0) -#endif - -#else -#error "Please fix <asm/byteorder.h>" -#endif - -static inline int test_bit_le(int nr, const void *addr) -{ - return test_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline void __set_bit_le(int nr, void *addr) -{ - __set_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline void __clear_bit_le(int nr, void *addr) -{ - __clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline int test_and_set_bit_le(int nr, void *addr) -{ - return test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline int test_and_clear_bit_le(int nr, void *addr) -{ - return test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline int __test_and_set_bit_le(int nr, void *addr) -{ - return __test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -static inline int __test_and_clear_bit_le(int nr, void *addr) -{ - return __test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); -} - -#endif /* _ASM_GENERIC_BITOPS_LE_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/lock.h b/ANDROID_3.4.5/include/asm-generic/bitops/lock.h deleted file mode 100644 index 308a9e22..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/lock.h +++ /dev/null @@ -1,45 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_LOCK_H_ -#define _ASM_GENERIC_BITOPS_LOCK_H_ - -/** - * test_and_set_bit_lock - Set a bit and return its old value, for lock - * @nr: Bit to set - * @addr: Address to count from - * - * This operation is atomic and provides acquire barrier semantics. - * It can be used to implement bit locks. - */ -#define test_and_set_bit_lock(nr, addr) test_and_set_bit(nr, addr) - -/** - * clear_bit_unlock - Clear a bit in memory, for unlock - * @nr: the bit to set - * @addr: the address to start counting from - * - * This operation is atomic and provides release barrier semantics. - */ -#define clear_bit_unlock(nr, addr) \ -do { \ - smp_mb__before_clear_bit(); \ - clear_bit(nr, addr); \ -} while (0) - -/** - * __clear_bit_unlock - Clear a bit in memory, for unlock - * @nr: the bit to set - * @addr: the address to start counting from - * - * This operation is like clear_bit_unlock, however it is not atomic. - * It does provide release barrier semantics so it can be used to unlock - * a bit lock, however it would only be used if no other CPU can modify - * any bits in the memory until the lock is released (a good example is - * if the bit lock itself protects access to the other bits in the word). - */ -#define __clear_bit_unlock(nr, addr) \ -do { \ - smp_mb(); \ - __clear_bit(nr, addr); \ -} while (0) - -#endif /* _ASM_GENERIC_BITOPS_LOCK_H_ */ - diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/non-atomic.h b/ANDROID_3.4.5/include/asm-generic/bitops/non-atomic.h deleted file mode 100644 index 697cc2b7..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/non-atomic.h +++ /dev/null @@ -1,108 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ -#define _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ - -#include <asm/types.h> - -/** - * __set_bit - Set a bit in memory - * @nr: the bit to set - * @addr: the address to start counting from - * - * Unlike set_bit(), this function is non-atomic and may be reordered. - * If it's called on the same region of memory simultaneously, the effect - * may be that only one operation succeeds. - */ -static inline void __set_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - - *p |= mask; -} - -static inline void __clear_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - - *p &= ~mask; -} - -/** - * __change_bit - Toggle a bit in memory - * @nr: the bit to change - * @addr: the address to start counting from - * - * Unlike change_bit(), this function is non-atomic and may be reordered. - * If it's called on the same region of memory simultaneously, the effect - * may be that only one operation succeeds. - */ -static inline void __change_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - - *p ^= mask; -} - -/** - * __test_and_set_bit - Set a bit and return its old value - * @nr: Bit to set - * @addr: Address to count from - * - * This operation is non-atomic and can be reordered. - * If two examples of this operation race, one can appear to succeed - * but actually fail. You must protect multiple accesses with a lock. - */ -static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old = *p; - - *p = old | mask; - return (old & mask) != 0; -} - -/** - * __test_and_clear_bit - Clear a bit and return its old value - * @nr: Bit to clear - * @addr: Address to count from - * - * This operation is non-atomic and can be reordered. - * If two examples of this operation race, one can appear to succeed - * but actually fail. You must protect multiple accesses with a lock. - */ -static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old = *p; - - *p = old & ~mask; - return (old & mask) != 0; -} - -/* WARNING: non atomic and it can be reordered! */ -static inline int __test_and_change_bit(int nr, - volatile unsigned long *addr) -{ - unsigned long mask = BIT_MASK(nr); - unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); - unsigned long old = *p; - - *p = old ^ mask; - return (old & mask) != 0; -} - -/** - * test_bit - Determine whether a bit is set - * @nr: bit number to test - * @addr: Address to start counting from - */ -static inline int test_bit(int nr, const volatile unsigned long *addr) -{ - return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); -} - -#endif /* _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ */ diff --git a/ANDROID_3.4.5/include/asm-generic/bitops/sched.h b/ANDROID_3.4.5/include/asm-generic/bitops/sched.h deleted file mode 100644 index 604fab70..00000000 --- a/ANDROID_3.4.5/include/asm-generic/bitops/sched.h +++ /dev/null @@ -1,31 +0,0 @@ -#ifndef _ASM_GENERIC_BITOPS_SCHED_H_ -#define _ASM_GENERIC_BITOPS_SCHED_H_ - -#include <linux/compiler.h> /* unlikely() */ -#include <asm/types.h> - -/* - * Every architecture must define this function. It's the fastest - * way of searching a 100-bit bitmap. It's guaranteed that at least - * one of the 100 bits is cleared. - */ -static inline int sched_find_first_bit(const unsigned long *b) -{ -#if BITS_PER_LONG == 64 - if (b[0]) - return __ffs(b[0]); - return __ffs(b[1]) + 64; -#elif BITS_PER_LONG == 32 - if (b[0]) - return __ffs(b[0]); - if (b[1]) - return __ffs(b[1]) + 32; - if (b[2]) - return __ffs(b[2]) + 64; - return __ffs(b[3]) + 96; -#else -#error BITS_PER_LONG not defined -#endif -} - -#endif /* _ASM_GENERIC_BITOPS_SCHED_H_ */ |