0001
0002
0003
0004
0005
0006
0007
0008 #include <linux/linkage.h>
0009 #include <asm/alternative.h>
0010 #include <asm/assembler.h>
0011
0012 .arch armv8-a+crc
0013
0014 .macro byteorder, reg, be
0015 .if \be
0016 CPU_LE( rev \reg, \reg )
0017 .else
0018 CPU_BE( rev \reg, \reg )
0019 .endif
0020 .endm
0021
0022 .macro byteorder16, reg, be
0023 .if \be
0024 CPU_LE( rev16 \reg, \reg )
0025 .else
0026 CPU_BE( rev16 \reg, \reg )
0027 .endif
0028 .endm
0029
0030 .macro bitorder, reg, be
0031 .if \be
0032 rbit \reg, \reg
0033 .endif
0034 .endm
0035
0036 .macro bitorder16, reg, be
0037 .if \be
0038 rbit \reg, \reg
0039 lsr \reg, \reg, #16
0040 .endif
0041 .endm
0042
0043 .macro bitorder8, reg, be
0044 .if \be
0045 rbit \reg, \reg
0046 lsr \reg, \reg, #24
0047 .endif
0048 .endm
0049
0050 .macro __crc32, c, be=0
0051 bitorder w0, \be
0052 cmp x2, #16
0053 b.lt 8f // less than 16 bytes
0054
0055 and x7, x2, #0x1f
0056 and x2, x2, #~0x1f
0057 cbz x7, 32f // multiple of 32 bytes
0058
0059 and x8, x7, #0xf
0060 ldp x3, x4, [x1]
0061 add x8, x8, x1
0062 add x1, x1, x7
0063 ldp x5, x6, [x8]
0064 byteorder x3, \be
0065 byteorder x4, \be
0066 byteorder x5, \be
0067 byteorder x6, \be
0068 bitorder x3, \be
0069 bitorder x4, \be
0070 bitorder x5, \be
0071 bitorder x6, \be
0072
0073 tst x7, #8
0074 crc32\c\()x w8, w0, x3
0075 csel x3, x3, x4, eq
0076 csel w0, w0, w8, eq
0077 tst x7, #4
0078 lsr x4, x3, #32
0079 crc32\c\()w w8, w0, w3
0080 csel x3, x3, x4, eq
0081 csel w0, w0, w8, eq
0082 tst x7, #2
0083 lsr w4, w3, #16
0084 crc32\c\()h w8, w0, w3
0085 csel w3, w3, w4, eq
0086 csel w0, w0, w8, eq
0087 tst x7, #1
0088 crc32\c\()b w8, w0, w3
0089 csel w0, w0, w8, eq
0090 tst x7, #16
0091 crc32\c\()x w8, w0, x5
0092 crc32\c\()x w8, w8, x6
0093 csel w0, w0, w8, eq
0094 cbz x2, 0f
0095
0096 32: ldp x3, x4, [x1], #32
0097 sub x2, x2, #32
0098 ldp x5, x6, [x1, #-16]
0099 byteorder x3, \be
0100 byteorder x4, \be
0101 byteorder x5, \be
0102 byteorder x6, \be
0103 bitorder x3, \be
0104 bitorder x4, \be
0105 bitorder x5, \be
0106 bitorder x6, \be
0107 crc32\c\()x w0, w0, x3
0108 crc32\c\()x w0, w0, x4
0109 crc32\c\()x w0, w0, x5
0110 crc32\c\()x w0, w0, x6
0111 cbnz x2, 32b
0112 0: bitorder w0, \be
0113 ret
0114
0115 8: tbz x2, #3, 4f
0116 ldr x3, [x1], #8
0117 byteorder x3, \be
0118 bitorder x3, \be
0119 crc32\c\()x w0, w0, x3
0120 4: tbz x2, #2, 2f
0121 ldr w3, [x1], #4
0122 byteorder w3, \be
0123 bitorder w3, \be
0124 crc32\c\()w w0, w0, w3
0125 2: tbz x2, #1, 1f
0126 ldrh w3, [x1], #2
0127 byteorder16 w3, \be
0128 bitorder16 w3, \be
0129 crc32\c\()h w0, w0, w3
0130 1: tbz x2, #0, 0f
0131 ldrb w3, [x1]
0132 bitorder8 w3, \be
0133 crc32\c\()b w0, w0, w3
0134 0: bitorder w0, \be
0135 ret
0136 .endm
0137
0138 .align 5
0139 SYM_FUNC_START(crc32_le)
0140 alternative_if_not ARM64_HAS_CRC32
0141 b crc32_le_base
0142 alternative_else_nop_endif
0143 __crc32
0144 SYM_FUNC_END(crc32_le)
0145
0146 .align 5
0147 SYM_FUNC_START(__crc32c_le)
0148 alternative_if_not ARM64_HAS_CRC32
0149 b __crc32c_le_base
0150 alternative_else_nop_endif
0151 __crc32 c
0152 SYM_FUNC_END(__crc32c_le)
0153
0154 .align 5
0155 SYM_FUNC_START(crc32_be)
0156 alternative_if_not ARM64_HAS_CRC32
0157 b crc32_be_base
0158 alternative_else_nop_endif
0159 __crc32 be=1
0160 SYM_FUNC_END(crc32_be)