github.com/ethereum/go-ethereum@v1.16.1/crypto/secp256k1/libsecp256k1/src/scalar_low_impl.h (about) 1 /*********************************************************************** 2 * Copyright (c) 2015 Andrew Poelstra * 3 * Distributed under the MIT software license, see the accompanying * 4 * file COPYING or https://www.opensource.org/licenses/mit-license.php.* 5 ***********************************************************************/ 6 7 #ifndef SECP256K1_SCALAR_REPR_IMPL_H 8 #define SECP256K1_SCALAR_REPR_IMPL_H 9 10 #include "checkmem.h" 11 #include "scalar.h" 12 #include "util.h" 13 14 #include <string.h> 15 16 SECP256K1_INLINE static int secp256k1_scalar_is_even(const secp256k1_scalar *a) { 17 SECP256K1_SCALAR_VERIFY(a); 18 19 return !(*a & 1); 20 } 21 22 SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar *r, unsigned int v) { 23 *r = v % EXHAUSTIVE_TEST_ORDER; 24 25 SECP256K1_SCALAR_VERIFY(r); 26 } 27 28 SECP256K1_INLINE static uint32_t secp256k1_scalar_get_bits_limb32(const secp256k1_scalar *a, unsigned int offset, unsigned int count) { 29 SECP256K1_SCALAR_VERIFY(a); 30 31 VERIFY_CHECK(count > 0 && count <= 32); 32 if (offset < 32) { 33 return (*a >> offset) & (0xFFFFFFFF >> (32 - count)); 34 } else { 35 return 0; 36 } 37 } 38 39 SECP256K1_INLINE static uint32_t secp256k1_scalar_get_bits_var(const secp256k1_scalar *a, unsigned int offset, unsigned int count) { 40 SECP256K1_SCALAR_VERIFY(a); 41 42 return secp256k1_scalar_get_bits_limb32(a, offset, count); 43 } 44 45 SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scalar *a) { return *a >= EXHAUSTIVE_TEST_ORDER; } 46 47 static int secp256k1_scalar_add(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) { 48 SECP256K1_SCALAR_VERIFY(a); 49 SECP256K1_SCALAR_VERIFY(b); 50 51 *r = (*a + *b) % EXHAUSTIVE_TEST_ORDER; 52 53 SECP256K1_SCALAR_VERIFY(r); 54 return *r < *b; 55 } 56 57 static void secp256k1_scalar_cadd_bit(secp256k1_scalar *r, unsigned int bit, int flag) { 58 SECP256K1_SCALAR_VERIFY(r); 59 60 if (flag && bit < 32) 61 *r += ((uint32_t)1 << bit); 62 63 SECP256K1_SCALAR_VERIFY(r); 64 VERIFY_CHECK(bit < 32); 65 /* Verify that adding (1 << bit) will not overflow any in-range scalar *r by overflowing the underlying uint32_t. */ 66 VERIFY_CHECK(((uint32_t)1 << bit) - 1 <= UINT32_MAX - EXHAUSTIVE_TEST_ORDER); 67 } 68 69 static void secp256k1_scalar_set_b32(secp256k1_scalar *r, const unsigned char *b32, int *overflow) { 70 int i; 71 int over = 0; 72 *r = 0; 73 for (i = 0; i < 32; i++) { 74 *r = (*r * 0x100) + b32[i]; 75 if (*r >= EXHAUSTIVE_TEST_ORDER) { 76 over = 1; 77 *r %= EXHAUSTIVE_TEST_ORDER; 78 } 79 } 80 if (overflow) *overflow = over; 81 82 SECP256K1_SCALAR_VERIFY(r); 83 } 84 85 static void secp256k1_scalar_get_b32(unsigned char *bin, const secp256k1_scalar* a) { 86 SECP256K1_SCALAR_VERIFY(a); 87 88 memset(bin, 0, 32); 89 bin[28] = *a >> 24; bin[29] = *a >> 16; bin[30] = *a >> 8; bin[31] = *a; 90 } 91 92 SECP256K1_INLINE static int secp256k1_scalar_is_zero(const secp256k1_scalar *a) { 93 SECP256K1_SCALAR_VERIFY(a); 94 95 return *a == 0; 96 } 97 98 static void secp256k1_scalar_negate(secp256k1_scalar *r, const secp256k1_scalar *a) { 99 SECP256K1_SCALAR_VERIFY(a); 100 101 if (*a == 0) { 102 *r = 0; 103 } else { 104 *r = EXHAUSTIVE_TEST_ORDER - *a; 105 } 106 107 SECP256K1_SCALAR_VERIFY(r); 108 } 109 110 SECP256K1_INLINE static int secp256k1_scalar_is_one(const secp256k1_scalar *a) { 111 SECP256K1_SCALAR_VERIFY(a); 112 113 return *a == 1; 114 } 115 116 static int secp256k1_scalar_is_high(const secp256k1_scalar *a) { 117 SECP256K1_SCALAR_VERIFY(a); 118 119 return *a > EXHAUSTIVE_TEST_ORDER / 2; 120 } 121 122 static int secp256k1_scalar_cond_negate(secp256k1_scalar *r, int flag) { 123 SECP256K1_SCALAR_VERIFY(r); 124 125 if (flag) secp256k1_scalar_negate(r, r); 126 127 SECP256K1_SCALAR_VERIFY(r); 128 return flag ? -1 : 1; 129 } 130 131 static void secp256k1_scalar_mul(secp256k1_scalar *r, const secp256k1_scalar *a, const secp256k1_scalar *b) { 132 SECP256K1_SCALAR_VERIFY(a); 133 SECP256K1_SCALAR_VERIFY(b); 134 135 *r = (*a * *b) % EXHAUSTIVE_TEST_ORDER; 136 137 SECP256K1_SCALAR_VERIFY(r); 138 } 139 140 static void secp256k1_scalar_split_128(secp256k1_scalar *r1, secp256k1_scalar *r2, const secp256k1_scalar *a) { 141 SECP256K1_SCALAR_VERIFY(a); 142 143 *r1 = *a; 144 *r2 = 0; 145 146 SECP256K1_SCALAR_VERIFY(r1); 147 SECP256K1_SCALAR_VERIFY(r2); 148 } 149 150 SECP256K1_INLINE static int secp256k1_scalar_eq(const secp256k1_scalar *a, const secp256k1_scalar *b) { 151 SECP256K1_SCALAR_VERIFY(a); 152 SECP256K1_SCALAR_VERIFY(b); 153 154 return *a == *b; 155 } 156 157 static SECP256K1_INLINE void secp256k1_scalar_cmov(secp256k1_scalar *r, const secp256k1_scalar *a, int flag) { 158 uint32_t mask0, mask1; 159 volatile int vflag = flag; 160 SECP256K1_SCALAR_VERIFY(a); 161 SECP256K1_CHECKMEM_CHECK_VERIFY(r, sizeof(*r)); 162 163 mask0 = vflag + ~((uint32_t)0); 164 mask1 = ~mask0; 165 *r = (*r & mask0) | (*a & mask1); 166 167 SECP256K1_SCALAR_VERIFY(r); 168 } 169 170 static void secp256k1_scalar_inverse(secp256k1_scalar *r, const secp256k1_scalar *x) { 171 int i; 172 uint32_t res = 0; 173 SECP256K1_SCALAR_VERIFY(x); 174 175 for (i = 0; i < EXHAUSTIVE_TEST_ORDER; i++) { 176 if ((i * *x) % EXHAUSTIVE_TEST_ORDER == 1) { 177 res = i; 178 break; 179 } 180 } 181 182 /* If this VERIFY_CHECK triggers we were given a noninvertible scalar (and thus 183 * have a composite group order; fix it in exhaustive_tests.c). */ 184 VERIFY_CHECK(res != 0); 185 *r = res; 186 187 SECP256K1_SCALAR_VERIFY(r); 188 } 189 190 static void secp256k1_scalar_inverse_var(secp256k1_scalar *r, const secp256k1_scalar *x) { 191 SECP256K1_SCALAR_VERIFY(x); 192 193 secp256k1_scalar_inverse(r, x); 194 195 SECP256K1_SCALAR_VERIFY(r); 196 } 197 198 static void secp256k1_scalar_half(secp256k1_scalar *r, const secp256k1_scalar *a) { 199 SECP256K1_SCALAR_VERIFY(a); 200 201 *r = (*a + ((-(uint32_t)(*a & 1)) & EXHAUSTIVE_TEST_ORDER)) >> 1; 202 203 SECP256K1_SCALAR_VERIFY(r); 204 } 205 206 #endif /* SECP256K1_SCALAR_REPR_IMPL_H */