github.com/insolar/x-crypto@v0.0.0-20191031140942-75fab8a325f6/md5/md5block_arm64.s (about) 1 // Copyright 2018 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 // 5 // ARM64 version of md5block.go 6 // derived from crypto/md5/md5block_amd64.s 7 8 #include "textflag.h" 9 10 TEXT ·block(SB),NOSPLIT,$0-32 11 MOVD dig+0(FP), R0 12 MOVD p+8(FP), R1 13 MOVD p_len+16(FP), R2 14 AND $~63, R2 15 CBZ R2, zero 16 17 ADD R1, R2, R21 18 LDPW (0*8)(R0), (R4, R5) 19 LDPW (1*8)(R0), (R6, R7) 20 21 loop: 22 MOVW R4, R12 23 MOVW R5, R13 24 MOVW R6, R14 25 MOVW R7, R15 26 27 MOVW (0*4)(R1), R8 28 MOVW R7, R9 29 30 #define ROUND1(a, b, c, d, index, const, shift) \ 31 ADDW $const, a; \ 32 ADDW R8, a; \ 33 MOVW (index*4)(R1), R8; \ 34 EORW c, R9; \ 35 ANDW b, R9; \ 36 EORW d, R9; \ 37 ADDW R9, a; \ 38 RORW $(32-shift), a; \ 39 MOVW c, R9; \ 40 ADDW b, a 41 42 ROUND1(R4,R5,R6,R7, 1,0xd76aa478, 7); 43 ROUND1(R7,R4,R5,R6, 2,0xe8c7b756,12); 44 ROUND1(R6,R7,R4,R5, 3,0x242070db,17); 45 ROUND1(R5,R6,R7,R4, 4,0xc1bdceee,22); 46 ROUND1(R4,R5,R6,R7, 5,0xf57c0faf, 7); 47 ROUND1(R7,R4,R5,R6, 6,0x4787c62a,12); 48 ROUND1(R6,R7,R4,R5, 7,0xa8304613,17); 49 ROUND1(R5,R6,R7,R4, 8,0xfd469501,22); 50 ROUND1(R4,R5,R6,R7, 9,0x698098d8, 7); 51 ROUND1(R7,R4,R5,R6,10,0x8b44f7af,12); 52 ROUND1(R6,R7,R4,R5,11,0xffff5bb1,17); 53 ROUND1(R5,R6,R7,R4,12,0x895cd7be,22); 54 ROUND1(R4,R5,R6,R7,13,0x6b901122, 7); 55 ROUND1(R7,R4,R5,R6,14,0xfd987193,12); 56 ROUND1(R6,R7,R4,R5,15,0xa679438e,17); 57 ROUND1(R5,R6,R7,R4, 0,0x49b40821,22); 58 59 MOVW (1*4)(R1), R8 60 MOVW R7, R9 61 MOVW R7, R10 62 63 #define ROUND2(a, b, c, d, index, const, shift) \ 64 ADDW $const, a; \ 65 ADDW R8, a; \ 66 MOVW (index*4)(R1), R8; \ 67 ANDW b, R10; \ 68 BICW R9, c, R9; \ 69 ORRW R9, R10; \ 70 MOVW c, R9; \ 71 ADDW R10, a; \ 72 MOVW c, R10; \ 73 RORW $(32-shift), a; \ 74 ADDW b, a 75 76 ROUND2(R4,R5,R6,R7, 6,0xf61e2562, 5); 77 ROUND2(R7,R4,R5,R6,11,0xc040b340, 9); 78 ROUND2(R6,R7,R4,R5, 0,0x265e5a51,14); 79 ROUND2(R5,R6,R7,R4, 5,0xe9b6c7aa,20); 80 ROUND2(R4,R5,R6,R7,10,0xd62f105d, 5); 81 ROUND2(R7,R4,R5,R6,15, 0x2441453, 9); 82 ROUND2(R6,R7,R4,R5, 4,0xd8a1e681,14); 83 ROUND2(R5,R6,R7,R4, 9,0xe7d3fbc8,20); 84 ROUND2(R4,R5,R6,R7,14,0x21e1cde6, 5); 85 ROUND2(R7,R4,R5,R6, 3,0xc33707d6, 9); 86 ROUND2(R6,R7,R4,R5, 8,0xf4d50d87,14); 87 ROUND2(R5,R6,R7,R4,13,0x455a14ed,20); 88 ROUND2(R4,R5,R6,R7, 2,0xa9e3e905, 5); 89 ROUND2(R7,R4,R5,R6, 7,0xfcefa3f8, 9); 90 ROUND2(R6,R7,R4,R5,12,0x676f02d9,14); 91 ROUND2(R5,R6,R7,R4, 0,0x8d2a4c8a,20); 92 93 MOVW (5*4)(R1), R8 94 MOVW R6, R9 95 96 #define ROUND3(a, b, c, d, index, const, shift) \ 97 ADDW $const, a; \ 98 ADDW R8, a; \ 99 MOVW (index*4)(R1), R8; \ 100 EORW d, R9; \ 101 EORW b, R9; \ 102 ADDW R9, a; \ 103 RORW $(32-shift), a; \ 104 MOVW b, R9; \ 105 ADDW b, a 106 107 ROUND3(R4,R5,R6,R7, 8,0xfffa3942, 4); 108 ROUND3(R7,R4,R5,R6,11,0x8771f681,11); 109 ROUND3(R6,R7,R4,R5,14,0x6d9d6122,16); 110 ROUND3(R5,R6,R7,R4, 1,0xfde5380c,23); 111 ROUND3(R4,R5,R6,R7, 4,0xa4beea44, 4); 112 ROUND3(R7,R4,R5,R6, 7,0x4bdecfa9,11); 113 ROUND3(R6,R7,R4,R5,10,0xf6bb4b60,16); 114 ROUND3(R5,R6,R7,R4,13,0xbebfbc70,23); 115 ROUND3(R4,R5,R6,R7, 0,0x289b7ec6, 4); 116 ROUND3(R7,R4,R5,R6, 3,0xeaa127fa,11); 117 ROUND3(R6,R7,R4,R5, 6,0xd4ef3085,16); 118 ROUND3(R5,R6,R7,R4, 9, 0x4881d05,23); 119 ROUND3(R4,R5,R6,R7,12,0xd9d4d039, 4); 120 ROUND3(R7,R4,R5,R6,15,0xe6db99e5,11); 121 ROUND3(R6,R7,R4,R5, 2,0x1fa27cf8,16); 122 ROUND3(R5,R6,R7,R4, 0,0xc4ac5665,23); 123 124 MOVW (0*4)(R1), R8 125 MVNW R7, R9 126 127 #define ROUND4(a, b, c, d, index, const, shift) \ 128 ADDW $const, a; \ 129 ADDW R8, a; \ 130 MOVW (index*4)(R1), R8; \ 131 ORRW b, R9; \ 132 EORW c, R9; \ 133 ADDW R9, a; \ 134 RORW $(32-shift), a; \ 135 MVNW c, R9; \ 136 ADDW b, a 137 138 ROUND4(R4,R5,R6,R7, 7,0xf4292244, 6); 139 ROUND4(R7,R4,R5,R6,14,0x432aff97,10); 140 ROUND4(R6,R7,R4,R5, 5,0xab9423a7,15); 141 ROUND4(R5,R6,R7,R4,12,0xfc93a039,21); 142 ROUND4(R4,R5,R6,R7, 3,0x655b59c3, 6); 143 ROUND4(R7,R4,R5,R6,10,0x8f0ccc92,10); 144 ROUND4(R6,R7,R4,R5, 1,0xffeff47d,15); 145 ROUND4(R5,R6,R7,R4, 8,0x85845dd1,21); 146 ROUND4(R4,R5,R6,R7,15,0x6fa87e4f, 6); 147 ROUND4(R7,R4,R5,R6, 6,0xfe2ce6e0,10); 148 ROUND4(R6,R7,R4,R5,13,0xa3014314,15); 149 ROUND4(R5,R6,R7,R4, 4,0x4e0811a1,21); 150 ROUND4(R4,R5,R6,R7,11,0xf7537e82, 6); 151 ROUND4(R7,R4,R5,R6, 2,0xbd3af235,10); 152 ROUND4(R6,R7,R4,R5, 9,0x2ad7d2bb,15); 153 ROUND4(R5,R6,R7,R4, 0,0xeb86d391,21); 154 155 ADDW R12, R4 156 ADDW R13, R5 157 ADDW R14, R6 158 ADDW R15, R7 159 160 ADD $64, R1 161 CMP R1, R21 162 BNE loop 163 164 STPW (R4, R5), (0*8)(R0) 165 STPW (R6, R7), (1*8)(R0) 166 zero: 167 RET