github.com/ethereum/go-ethereum@v1.16.1/crypto/secp256k1/libsecp256k1/src/field_5x52_int128_impl.h (about)

     1  /***********************************************************************
     2   * Copyright (c) 2013, 2014 Pieter Wuille                              *
     3   * Distributed under the MIT software license, see the accompanying    *
     4   * file COPYING or https://www.opensource.org/licenses/mit-license.php.*
     5   ***********************************************************************/
     6  
     7  #ifndef SECP256K1_FIELD_INNER5X52_IMPL_H
     8  #define SECP256K1_FIELD_INNER5X52_IMPL_H
     9  
    10  #include <stdint.h>
    11  
    12  #include "int128.h"
    13  #include "util.h"
    14  
    15  #define VERIFY_BITS(x, n) VERIFY_CHECK(((x) >> (n)) == 0)
    16  #define VERIFY_BITS_128(x, n) VERIFY_CHECK(secp256k1_u128_check_bits((x), (n)))
    17  
    18  SECP256K1_INLINE static void secp256k1_fe_mul_inner(uint64_t *r, const uint64_t *a, const uint64_t * SECP256K1_RESTRICT b) {
    19      secp256k1_uint128 c, d;
    20      uint64_t t3, t4, tx, u0;
    21      uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
    22      const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
    23  
    24      VERIFY_BITS(a[0], 56);
    25      VERIFY_BITS(a[1], 56);
    26      VERIFY_BITS(a[2], 56);
    27      VERIFY_BITS(a[3], 56);
    28      VERIFY_BITS(a[4], 52);
    29      VERIFY_BITS(b[0], 56);
    30      VERIFY_BITS(b[1], 56);
    31      VERIFY_BITS(b[2], 56);
    32      VERIFY_BITS(b[3], 56);
    33      VERIFY_BITS(b[4], 52);
    34      VERIFY_CHECK(r != b);
    35      VERIFY_CHECK(a != b);
    36  
    37      /*  [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
    38       *  for 0 <= x <= 4, px is a shorthand for sum(a[i]*b[x-i], i=0..x).
    39       *  for 4 <= x <= 8, px is a shorthand for sum(a[i]*b[x-i], i=(x-4)..4)
    40       *  Note that [x 0 0 0 0 0] = [x*R].
    41       */
    42  
    43      secp256k1_u128_mul(&d, a0, b[3]);
    44      secp256k1_u128_accum_mul(&d, a1, b[2]);
    45      secp256k1_u128_accum_mul(&d, a2, b[1]);
    46      secp256k1_u128_accum_mul(&d, a3, b[0]);
    47      VERIFY_BITS_128(&d, 114);
    48      /* [d 0 0 0] = [p3 0 0 0] */
    49      secp256k1_u128_mul(&c, a4, b[4]);
    50      VERIFY_BITS_128(&c, 112);
    51      /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
    52      secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
    53      VERIFY_BITS_128(&d, 115);
    54      VERIFY_BITS_128(&c, 48);
    55      /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
    56      t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
    57      VERIFY_BITS(t3, 52);
    58      VERIFY_BITS_128(&d, 63);
    59      /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
    60  
    61      secp256k1_u128_accum_mul(&d, a0, b[4]);
    62      secp256k1_u128_accum_mul(&d, a1, b[3]);
    63      secp256k1_u128_accum_mul(&d, a2, b[2]);
    64      secp256k1_u128_accum_mul(&d, a3, b[1]);
    65      secp256k1_u128_accum_mul(&d, a4, b[0]);
    66      VERIFY_BITS_128(&d, 115);
    67      /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
    68      secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
    69      VERIFY_BITS_128(&d, 116);
    70      /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
    71      t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
    72      VERIFY_BITS(t4, 52);
    73      VERIFY_BITS_128(&d, 64);
    74      /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
    75      tx = (t4 >> 48); t4 &= (M >> 4);
    76      VERIFY_BITS(tx, 4);
    77      VERIFY_BITS(t4, 48);
    78      /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
    79  
    80      secp256k1_u128_mul(&c, a0, b[0]);
    81      VERIFY_BITS_128(&c, 112);
    82      /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
    83      secp256k1_u128_accum_mul(&d, a1, b[4]);
    84      secp256k1_u128_accum_mul(&d, a2, b[3]);
    85      secp256k1_u128_accum_mul(&d, a3, b[2]);
    86      secp256k1_u128_accum_mul(&d, a4, b[1]);
    87      VERIFY_BITS_128(&d, 114);
    88      /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
    89      u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
    90      VERIFY_BITS(u0, 52);
    91      VERIFY_BITS_128(&d, 62);
    92      /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
    93      /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
    94      u0 = (u0 << 4) | tx;
    95      VERIFY_BITS(u0, 56);
    96      /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
    97      secp256k1_u128_accum_mul(&c, u0, R >> 4);
    98      VERIFY_BITS_128(&c, 113);
    99      /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   100      r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   101      VERIFY_BITS(r[0], 52);
   102      VERIFY_BITS_128(&c, 61);
   103      /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
   104  
   105      secp256k1_u128_accum_mul(&c, a0, b[1]);
   106      secp256k1_u128_accum_mul(&c, a1, b[0]);
   107      VERIFY_BITS_128(&c, 114);
   108      /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
   109      secp256k1_u128_accum_mul(&d, a2, b[4]);
   110      secp256k1_u128_accum_mul(&d, a3, b[3]);
   111      secp256k1_u128_accum_mul(&d, a4, b[2]);
   112      VERIFY_BITS_128(&d, 114);
   113      /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   114      secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
   115      VERIFY_BITS_128(&c, 115);
   116      VERIFY_BITS_128(&d, 62);
   117      /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   118      r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   119      VERIFY_BITS(r[1], 52);
   120      VERIFY_BITS_128(&c, 63);
   121      /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   122  
   123      secp256k1_u128_accum_mul(&c, a0, b[2]);
   124      secp256k1_u128_accum_mul(&c, a1, b[1]);
   125      secp256k1_u128_accum_mul(&c, a2, b[0]);
   126      VERIFY_BITS_128(&c, 114);
   127      /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
   128      secp256k1_u128_accum_mul(&d, a3, b[4]);
   129      secp256k1_u128_accum_mul(&d, a4, b[3]);
   130      VERIFY_BITS_128(&d, 114);
   131      /* [d 0 0 t4 t3 c t1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   132      secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
   133      VERIFY_BITS_128(&c, 115);
   134      VERIFY_BITS_128(&d, 50);
   135      /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   136  
   137      r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   138      VERIFY_BITS(r[2], 52);
   139      VERIFY_BITS_128(&c, 63);
   140      /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   141      secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
   142      secp256k1_u128_accum_u64(&c, t3);
   143      VERIFY_BITS_128(&c, 100);
   144      /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   145      r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   146      VERIFY_BITS(r[3], 52);
   147      VERIFY_BITS_128(&c, 48);
   148      /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   149      r[4] = secp256k1_u128_to_u64(&c) + t4;
   150      VERIFY_BITS(r[4], 49);
   151      /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   152  }
   153  
   154  SECP256K1_INLINE static void secp256k1_fe_sqr_inner(uint64_t *r, const uint64_t *a) {
   155      secp256k1_uint128 c, d;
   156      uint64_t a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3], a4 = a[4];
   157      uint64_t t3, t4, tx, u0;
   158      const uint64_t M = 0xFFFFFFFFFFFFFULL, R = 0x1000003D10ULL;
   159  
   160      VERIFY_BITS(a[0], 56);
   161      VERIFY_BITS(a[1], 56);
   162      VERIFY_BITS(a[2], 56);
   163      VERIFY_BITS(a[3], 56);
   164      VERIFY_BITS(a[4], 52);
   165  
   166      /**  [... a b c] is a shorthand for ... + a<<104 + b<<52 + c<<0 mod n.
   167       *  px is a shorthand for sum(a[i]*a[x-i], i=0..x).
   168       *  Note that [x 0 0 0 0 0] = [x*R].
   169       */
   170  
   171      secp256k1_u128_mul(&d, a0*2, a3);
   172      secp256k1_u128_accum_mul(&d, a1*2, a2);
   173      VERIFY_BITS_128(&d, 114);
   174      /* [d 0 0 0] = [p3 0 0 0] */
   175      secp256k1_u128_mul(&c, a4, a4);
   176      VERIFY_BITS_128(&c, 112);
   177      /* [c 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
   178      secp256k1_u128_accum_mul(&d, R, secp256k1_u128_to_u64(&c)); secp256k1_u128_rshift(&c, 64);
   179      VERIFY_BITS_128(&d, 115);
   180      VERIFY_BITS_128(&c, 48);
   181      /* [(c<<12) 0 0 0 0 0 d 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
   182      t3 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
   183      VERIFY_BITS(t3, 52);
   184      VERIFY_BITS_128(&d, 63);
   185      /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 0 p3 0 0 0] */
   186  
   187      a4 *= 2;
   188      secp256k1_u128_accum_mul(&d, a0, a4);
   189      secp256k1_u128_accum_mul(&d, a1*2, a3);
   190      secp256k1_u128_accum_mul(&d, a2, a2);
   191      VERIFY_BITS_128(&d, 115);
   192      /* [(c<<12) 0 0 0 0 d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
   193      secp256k1_u128_accum_mul(&d, R << 12, secp256k1_u128_to_u64(&c));
   194      VERIFY_BITS_128(&d, 116);
   195      /* [d t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
   196      t4 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
   197      VERIFY_BITS(t4, 52);
   198      VERIFY_BITS_128(&d, 64);
   199      /* [d t4 t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
   200      tx = (t4 >> 48); t4 &= (M >> 4);
   201      VERIFY_BITS(tx, 4);
   202      VERIFY_BITS(t4, 48);
   203      /* [d t4+(tx<<48) t3 0 0 0] = [p8 0 0 0 p4 p3 0 0 0] */
   204  
   205      secp256k1_u128_mul(&c, a0, a0);
   206      VERIFY_BITS_128(&c, 112);
   207      /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 0 p4 p3 0 0 p0] */
   208      secp256k1_u128_accum_mul(&d, a1, a4);
   209      secp256k1_u128_accum_mul(&d, a2*2, a3);
   210      VERIFY_BITS_128(&d, 114);
   211      /* [d t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   212      u0 = secp256k1_u128_to_u64(&d) & M; secp256k1_u128_rshift(&d, 52);
   213      VERIFY_BITS(u0, 52);
   214      VERIFY_BITS_128(&d, 62);
   215      /* [d u0 t4+(tx<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   216      /* [d 0 t4+(tx<<48)+(u0<<52) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   217      u0 = (u0 << 4) | tx;
   218      VERIFY_BITS(u0, 56);
   219      /* [d 0 t4+(u0<<48) t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   220      secp256k1_u128_accum_mul(&c, u0, R >> 4);
   221      VERIFY_BITS_128(&c, 113);
   222      /* [d 0 t4 t3 0 0 c] = [p8 0 0 p5 p4 p3 0 0 p0] */
   223      r[0] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   224      VERIFY_BITS(r[0], 52);
   225      VERIFY_BITS_128(&c, 61);
   226      /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 0 p0] */
   227  
   228      a0 *= 2;
   229      secp256k1_u128_accum_mul(&c, a0, a1);
   230      VERIFY_BITS_128(&c, 114);
   231      /* [d 0 t4 t3 0 c r0] = [p8 0 0 p5 p4 p3 0 p1 p0] */
   232      secp256k1_u128_accum_mul(&d, a2, a4);
   233      secp256k1_u128_accum_mul(&d, a3, a3);
   234      VERIFY_BITS_128(&d, 114);
   235      /* [d 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   236      secp256k1_u128_accum_mul(&c, secp256k1_u128_to_u64(&d) & M, R); secp256k1_u128_rshift(&d, 52);
   237      VERIFY_BITS_128(&c, 115);
   238      VERIFY_BITS_128(&d, 62);
   239      /* [d 0 0 t4 t3 0 c r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   240      r[1] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   241      VERIFY_BITS(r[1], 52);
   242      VERIFY_BITS_128(&c, 63);
   243      /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 0 p1 p0] */
   244  
   245      secp256k1_u128_accum_mul(&c, a0, a2);
   246      secp256k1_u128_accum_mul(&c, a1, a1);
   247      VERIFY_BITS_128(&c, 114);
   248      /* [d 0 0 t4 t3 c r1 r0] = [p8 0 p6 p5 p4 p3 p2 p1 p0] */
   249      secp256k1_u128_accum_mul(&d, a3, a4);
   250      VERIFY_BITS_128(&d, 114);
   251      /* [d 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   252      secp256k1_u128_accum_mul(&c, R, secp256k1_u128_to_u64(&d)); secp256k1_u128_rshift(&d, 64);
   253      VERIFY_BITS_128(&c, 115);
   254      VERIFY_BITS_128(&d, 50);
   255      /* [(d<<12) 0 0 0 t4 t3 c r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   256      r[2] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   257      VERIFY_BITS(r[2], 52);
   258      VERIFY_BITS_128(&c, 63);
   259      /* [(d<<12) 0 0 0 t4 t3+c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   260  
   261      secp256k1_u128_accum_mul(&c, R << 12, secp256k1_u128_to_u64(&d));
   262      secp256k1_u128_accum_u64(&c, t3);
   263      VERIFY_BITS_128(&c, 100);
   264      /* [t4 c r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   265      r[3] = secp256k1_u128_to_u64(&c) & M; secp256k1_u128_rshift(&c, 52);
   266      VERIFY_BITS(r[3], 52);
   267      VERIFY_BITS_128(&c, 48);
   268      /* [t4+c r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   269      r[4] = secp256k1_u128_to_u64(&c) + t4;
   270      VERIFY_BITS(r[4], 49);
   271      /* [r4 r3 r2 r1 r0] = [p8 p7 p6 p5 p4 p3 p2 p1 p0] */
   272  }
   273  
   274  #endif /* SECP256K1_FIELD_INNER5X52_IMPL_H */