| 1 | /* SPDX-License-Identifier: GPL-2.0 */ | 
|---|
| 2 | #include <linux/export.h> | 
|---|
| 3 | #include <linux/linkage.h> | 
|---|
| 4 | #include <linux/objtool.h> | 
|---|
| 5 |  | 
|---|
| 6 | #include <asm/asm.h> | 
|---|
| 7 |  | 
|---|
| 8 | /* | 
|---|
| 9 | * unsigned int __sw_hweight32(unsigned int w) | 
|---|
| 10 | * %rdi: w | 
|---|
| 11 | */ | 
|---|
| 12 | SYM_FUNC_START(__sw_hweight32) | 
|---|
| 13 | ANNOTATE_NOENDBR | 
|---|
| 14 |  | 
|---|
| 15 | #ifdef CONFIG_X86_64 | 
|---|
| 16 | movl %edi, %eax				# w | 
|---|
| 17 | #endif | 
|---|
| 18 | __ASM_SIZE(push,) %__ASM_REG(dx) | 
|---|
| 19 | movl %eax, %edx				# w -> t | 
|---|
| 20 | shrl %edx				# t >>= 1 | 
|---|
| 21 | andl $0x55555555, %edx			# t &= 0x55555555 | 
|---|
| 22 | subl %edx, %eax				# w -= t | 
|---|
| 23 |  | 
|---|
| 24 | movl %eax, %edx				# w -> t | 
|---|
| 25 | shrl $2, %eax				# w_tmp >>= 2 | 
|---|
| 26 | andl $0x33333333, %edx			# t	&= 0x33333333 | 
|---|
| 27 | andl $0x33333333, %eax			# w_tmp &= 0x33333333 | 
|---|
| 28 | addl %edx, %eax				# w = w_tmp + t | 
|---|
| 29 |  | 
|---|
| 30 | movl %eax, %edx				# w -> t | 
|---|
| 31 | shrl $4, %edx				# t >>= 4 | 
|---|
| 32 | addl %edx, %eax				# w_tmp += t | 
|---|
| 33 | andl  $0x0f0f0f0f, %eax			# w_tmp &= 0x0f0f0f0f | 
|---|
| 34 | imull $0x01010101, %eax, %eax		# w_tmp *= 0x01010101 | 
|---|
| 35 | shrl $24, %eax				# w = w_tmp >> 24 | 
|---|
| 36 | __ASM_SIZE(pop,) %__ASM_REG(dx) | 
|---|
| 37 | RET | 
|---|
| 38 | SYM_FUNC_END(__sw_hweight32) | 
|---|
| 39 | EXPORT_SYMBOL(__sw_hweight32) | 
|---|
| 40 |  | 
|---|
| 41 | /* | 
|---|
| 42 | * No 32-bit variant, because it's implemented as an inline wrapper | 
|---|
| 43 | * on top of __arch_hweight32(): | 
|---|
| 44 | */ | 
|---|
| 45 | #ifdef CONFIG_X86_64 | 
|---|
| 46 | SYM_FUNC_START(__sw_hweight64) | 
|---|
| 47 | ANNOTATE_NOENDBR | 
|---|
| 48 | pushq   %rdi | 
|---|
| 49 | pushq   %rdx | 
|---|
| 50 |  | 
|---|
| 51 | movq    %rdi, %rdx                      # w -> t | 
|---|
| 52 | movabsq $0x5555555555555555, %rax | 
|---|
| 53 | shrq    %rdx                            # t >>= 1 | 
|---|
| 54 | andq    %rdx, %rax                      # t &= 0x5555555555555555 | 
|---|
| 55 | movabsq $0x3333333333333333, %rdx | 
|---|
| 56 | subq    %rax, %rdi                      # w -= t | 
|---|
| 57 |  | 
|---|
| 58 | movq    %rdi, %rax                      # w -> t | 
|---|
| 59 | shrq    $2, %rdi                        # w_tmp >>= 2 | 
|---|
| 60 | andq    %rdx, %rax                      # t     &= 0x3333333333333333 | 
|---|
| 61 | andq    %rdi, %rdx                      # w_tmp &= 0x3333333333333333 | 
|---|
| 62 | addq    %rdx, %rax                      # w = w_tmp + t | 
|---|
| 63 |  | 
|---|
| 64 | movq    %rax, %rdx                      # w -> t | 
|---|
| 65 | shrq    $4, %rdx                        # t >>= 4 | 
|---|
| 66 | addq    %rdx, %rax                      # w_tmp += t | 
|---|
| 67 | movabsq $0x0f0f0f0f0f0f0f0f, %rdx | 
|---|
| 68 | andq    %rdx, %rax                      # w_tmp &= 0x0f0f0f0f0f0f0f0f | 
|---|
| 69 | movabsq $0x0101010101010101, %rdx | 
|---|
| 70 | imulq   %rdx, %rax                      # w_tmp *= 0x0101010101010101 | 
|---|
| 71 | shrq    $56, %rax                       # w = w_tmp >> 56 | 
|---|
| 72 |  | 
|---|
| 73 | popq    %rdx | 
|---|
| 74 | popq    %rdi | 
|---|
| 75 | RET | 
|---|
| 76 | SYM_FUNC_END(__sw_hweight64) | 
|---|
| 77 | EXPORT_SYMBOL(__sw_hweight64) | 
|---|
| 78 | #endif | 
|---|
| 79 |  | 
|---|