@@ -41,16 +41,22 @@ SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar *r, unsig
4141 r -> d [1 ] = 0 ;
4242 r -> d [2 ] = 0 ;
4343 r -> d [3 ] = 0 ;
44+
45+ secp256k1_scalar_verify (r );
4446}
4547
4648SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits (const secp256k1_scalar * a , unsigned int offset , unsigned int count ) {
49+ secp256k1_scalar_verify (a );
4750 VERIFY_CHECK ((offset + count - 1 ) >> 6 == offset >> 6 );
51+
4852 return (a -> d [offset >> 6 ] >> (offset & 0x3F )) & ((((uint64_t )1 ) << count ) - 1 );
4953}
5054
5155SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits_var (const secp256k1_scalar * a , unsigned int offset , unsigned int count ) {
56+ secp256k1_scalar_verify (a );
5257 VERIFY_CHECK (count < 32 );
5358 VERIFY_CHECK (offset + count <= 256 );
59+
5460 if ((offset + count - 1 ) >> 6 == offset >> 6 ) {
5561 return secp256k1_scalar_get_bits (a , offset , count );
5662 } else {
@@ -74,6 +80,7 @@ SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scal
7480SECP256K1_INLINE static int secp256k1_scalar_reduce (secp256k1_scalar * r , unsigned int overflow ) {
7581 secp256k1_uint128 t ;
7682 VERIFY_CHECK (overflow <= 1 );
83+
7784 secp256k1_u128_from_u64 (& t , r -> d [0 ]);
7885 secp256k1_u128_accum_u64 (& t , overflow * SECP256K1_N_C_0 );
7986 r -> d [0 ] = secp256k1_u128_to_u64 (& t ); secp256k1_u128_rshift (& t , 64 );
@@ -85,12 +92,17 @@ SECP256K1_INLINE static int secp256k1_scalar_reduce(secp256k1_scalar *r, unsigne
8592 r -> d [2 ] = secp256k1_u128_to_u64 (& t ); secp256k1_u128_rshift (& t , 64 );
8693 secp256k1_u128_accum_u64 (& t , r -> d [3 ]);
8794 r -> d [3 ] = secp256k1_u128_to_u64 (& t );
95+
96+ secp256k1_scalar_verify (r );
8897 return overflow ;
8998}
9099
91100static int secp256k1_scalar_add (secp256k1_scalar * r , const secp256k1_scalar * a , const secp256k1_scalar * b ) {
92101 int overflow ;
93102 secp256k1_uint128 t ;
103+ secp256k1_scalar_verify (a );
104+ secp256k1_scalar_verify (b );
105+
94106 secp256k1_u128_from_u64 (& t , a -> d [0 ]);
95107 secp256k1_u128_accum_u64 (& t , b -> d [0 ]);
96108 r -> d [0 ] = secp256k1_u128_to_u64 (& t ); secp256k1_u128_rshift (& t , 64 );
@@ -106,13 +118,17 @@ static int secp256k1_scalar_add(secp256k1_scalar *r, const secp256k1_scalar *a,
106118 overflow = secp256k1_u128_to_u64 (& t ) + secp256k1_scalar_check_overflow (r );
107119 VERIFY_CHECK (overflow == 0 || overflow == 1 );
108120 secp256k1_scalar_reduce (r , overflow );
121+
122+ secp256k1_scalar_verify (r );
109123 return overflow ;
110124}
111125
112126static void secp256k1_scalar_cadd_bit (secp256k1_scalar * r , unsigned int bit , int flag ) {
113127 secp256k1_uint128 t ;
114128 volatile int vflag = flag ;
129+ secp256k1_scalar_verify (r );
115130 VERIFY_CHECK (bit < 256 );
131+
116132 bit += ((uint32_t ) vflag - 1 ) & 0x100 ; /* forcing (bit >> 6) > 3 makes this a noop */
117133 secp256k1_u128_from_u64 (& t , r -> d [0 ]);
118134 secp256k1_u128_accum_u64 (& t , ((uint64_t )((bit >> 6 ) == 0 )) << (bit & 0x3F ));
@@ -126,6 +142,8 @@ static void secp256k1_scalar_cadd_bit(secp256k1_scalar *r, unsigned int bit, int
126142 secp256k1_u128_accum_u64 (& t , r -> d [3 ]);
127143 secp256k1_u128_accum_u64 (& t , ((uint64_t )((bit >> 6 ) == 3 )) << (bit & 0x3F ));
128144 r -> d [3 ] = secp256k1_u128_to_u64 (& t );
145+
146+ secp256k1_scalar_verify (r );
129147#ifdef VERIFY
130148 VERIFY_CHECK (secp256k1_u128_hi_u64 (& t ) == 0 );
131149#endif
@@ -141,22 +159,30 @@ static void secp256k1_scalar_set_b32(secp256k1_scalar *r, const unsigned char *b
141159 if (overflow ) {
142160 * overflow = over ;
143161 }
162+
163+ secp256k1_scalar_verify (r );
144164}
145165
146166static void secp256k1_scalar_get_b32 (unsigned char * bin , const secp256k1_scalar * a ) {
167+ secp256k1_scalar_verify (a );
168+
147169 secp256k1_write_be64 (& bin [0 ], a -> d [3 ]);
148170 secp256k1_write_be64 (& bin [8 ], a -> d [2 ]);
149171 secp256k1_write_be64 (& bin [16 ], a -> d [1 ]);
150172 secp256k1_write_be64 (& bin [24 ], a -> d [0 ]);
151173}
152174
153175SECP256K1_INLINE static int secp256k1_scalar_is_zero (const secp256k1_scalar * a ) {
176+ secp256k1_scalar_verify (a );
177+
154178 return (a -> d [0 ] | a -> d [1 ] | a -> d [2 ] | a -> d [3 ]) == 0 ;
155179}
156180
157181static void secp256k1_scalar_negate (secp256k1_scalar * r , const secp256k1_scalar * a ) {
158182 uint64_t nonzero = 0xFFFFFFFFFFFFFFFFULL * (secp256k1_scalar_is_zero (a ) == 0 );
159183 secp256k1_uint128 t ;
184+ secp256k1_scalar_verify (a );
185+
160186 secp256k1_u128_from_u64 (& t , ~a -> d [0 ]);
161187 secp256k1_u128_accum_u64 (& t , SECP256K1_N_0 + 1 );
162188 r -> d [0 ] = secp256k1_u128_to_u64 (& t ) & nonzero ; secp256k1_u128_rshift (& t , 64 );
@@ -169,15 +195,21 @@ static void secp256k1_scalar_negate(secp256k1_scalar *r, const secp256k1_scalar
169195 secp256k1_u128_accum_u64 (& t , ~a -> d [3 ]);
170196 secp256k1_u128_accum_u64 (& t , SECP256K1_N_3 );
171197 r -> d [3 ] = secp256k1_u128_to_u64 (& t ) & nonzero ;
198+
199+ secp256k1_scalar_verify (r );
172200}
173201
174202SECP256K1_INLINE static int secp256k1_scalar_is_one (const secp256k1_scalar * a ) {
203+ secp256k1_scalar_verify (a );
204+
175205 return ((a -> d [0 ] ^ 1 ) | a -> d [1 ] | a -> d [2 ] | a -> d [3 ]) == 0 ;
176206}
177207
178208static int secp256k1_scalar_is_high (const secp256k1_scalar * a ) {
179209 int yes = 0 ;
180210 int no = 0 ;
211+ secp256k1_scalar_verify (a );
212+
181213 no |= (a -> d [3 ] < SECP256K1_N_H_3 );
182214 yes |= (a -> d [3 ] > SECP256K1_N_H_3 ) & ~no ;
183215 no |= (a -> d [2 ] < SECP256K1_N_H_2 ) & ~yes ; /* No need for a > check. */
@@ -194,6 +226,8 @@ static int secp256k1_scalar_cond_negate(secp256k1_scalar *r, int flag) {
194226 uint64_t mask = - vflag ;
195227 uint64_t nonzero = (secp256k1_scalar_is_zero (r ) != 0 ) - 1 ;
196228 secp256k1_uint128 t ;
229+ secp256k1_scalar_verify (r );
230+
197231 secp256k1_u128_from_u64 (& t , r -> d [0 ] ^ mask );
198232 secp256k1_u128_accum_u64 (& t , (SECP256K1_N_0 + 1 ) & mask );
199233 r -> d [0 ] = secp256k1_u128_to_u64 (& t ) & nonzero ; secp256k1_u128_rshift (& t , 64 );
@@ -206,6 +240,8 @@ static int secp256k1_scalar_cond_negate(secp256k1_scalar *r, int flag) {
206240 secp256k1_u128_accum_u64 (& t , r -> d [3 ] ^ mask );
207241 secp256k1_u128_accum_u64 (& t , SECP256K1_N_3 & mask );
208242 r -> d [3 ] = secp256k1_u128_to_u64 (& t ) & nonzero ;
243+
244+ secp256k1_scalar_verify (r );
209245 return 2 * (mask == 0 ) - 1 ;
210246}
211247
@@ -764,23 +800,34 @@ static void secp256k1_scalar_mul_512(uint64_t l[8], const secp256k1_scalar *a, c
764800
765801static void secp256k1_scalar_mul (secp256k1_scalar * r , const secp256k1_scalar * a , const secp256k1_scalar * b ) {
766802 uint64_t l [8 ];
803+ secp256k1_scalar_verify (a );
804+ secp256k1_scalar_verify (b );
805+
767806 secp256k1_scalar_mul_512 (l , a , b );
768807 secp256k1_scalar_reduce_512 (r , l );
808+
809+ secp256k1_scalar_verify (r );
769810}
770811
771812static int secp256k1_scalar_shr_int (secp256k1_scalar * r , int n ) {
772813 int ret ;
814+ secp256k1_scalar_verify (r );
773815 VERIFY_CHECK (n > 0 );
774816 VERIFY_CHECK (n < 16 );
817+
775818 ret = r -> d [0 ] & ((1 << n ) - 1 );
776819 r -> d [0 ] = (r -> d [0 ] >> n ) + (r -> d [1 ] << (64 - n ));
777820 r -> d [1 ] = (r -> d [1 ] >> n ) + (r -> d [2 ] << (64 - n ));
778821 r -> d [2 ] = (r -> d [2 ] >> n ) + (r -> d [3 ] << (64 - n ));
779822 r -> d [3 ] = (r -> d [3 ] >> n );
823+
824+ secp256k1_scalar_verify (r );
780825 return ret ;
781826}
782827
783828static void secp256k1_scalar_split_128 (secp256k1_scalar * r1 , secp256k1_scalar * r2 , const secp256k1_scalar * k ) {
829+ secp256k1_scalar_verify (k );
830+
784831 r1 -> d [0 ] = k -> d [0 ];
785832 r1 -> d [1 ] = k -> d [1 ];
786833 r1 -> d [2 ] = 0 ;
@@ -789,9 +836,15 @@ static void secp256k1_scalar_split_128(secp256k1_scalar *r1, secp256k1_scalar *r
789836 r2 -> d [1 ] = k -> d [3 ];
790837 r2 -> d [2 ] = 0 ;
791838 r2 -> d [3 ] = 0 ;
839+
840+ secp256k1_scalar_verify (r1 );
841+ secp256k1_scalar_verify (r2 );
792842}
793843
794844SECP256K1_INLINE static int secp256k1_scalar_eq (const secp256k1_scalar * a , const secp256k1_scalar * b ) {
845+ secp256k1_scalar_verify (a );
846+ secp256k1_scalar_verify (b );
847+
795848 return ((a -> d [0 ] ^ b -> d [0 ]) | (a -> d [1 ] ^ b -> d [1 ]) | (a -> d [2 ] ^ b -> d [2 ]) | (a -> d [3 ] ^ b -> d [3 ])) == 0 ;
796849}
797850
@@ -800,7 +853,10 @@ SECP256K1_INLINE static void secp256k1_scalar_mul_shift_var(secp256k1_scalar *r,
800853 unsigned int shiftlimbs ;
801854 unsigned int shiftlow ;
802855 unsigned int shifthigh ;
856+ secp256k1_scalar_verify (a );
857+ secp256k1_scalar_verify (b );
803858 VERIFY_CHECK (shift >= 256 );
859+
804860 secp256k1_scalar_mul_512 (l , a , b );
805861 shiftlimbs = shift >> 6 ;
806862 shiftlow = shift & 0x3F ;
@@ -810,18 +866,24 @@ SECP256K1_INLINE static void secp256k1_scalar_mul_shift_var(secp256k1_scalar *r,
810866 r -> d [2 ] = shift < 384 ? (l [2 + shiftlimbs ] >> shiftlow | (shift < 320 && shiftlow ? (l [3 + shiftlimbs ] << shifthigh ) : 0 )) : 0 ;
811867 r -> d [3 ] = shift < 320 ? (l [3 + shiftlimbs ] >> shiftlow ) : 0 ;
812868 secp256k1_scalar_cadd_bit (r , 0 , (l [(shift - 1 ) >> 6 ] >> ((shift - 1 ) & 0x3f )) & 1 );
869+
870+ secp256k1_scalar_verify (r );
813871}
814872
815873static SECP256K1_INLINE void secp256k1_scalar_cmov (secp256k1_scalar * r , const secp256k1_scalar * a , int flag ) {
816874 uint64_t mask0 , mask1 ;
817875 volatile int vflag = flag ;
876+ secp256k1_scalar_verify (a );
818877 SECP256K1_CHECKMEM_CHECK_VERIFY (r -> d , sizeof (r -> d ));
878+
819879 mask0 = vflag + ~((uint64_t )0 );
820880 mask1 = ~mask0 ;
821881 r -> d [0 ] = (r -> d [0 ] & mask0 ) | (a -> d [0 ] & mask1 );
822882 r -> d [1 ] = (r -> d [1 ] & mask0 ) | (a -> d [1 ] & mask1 );
823883 r -> d [2 ] = (r -> d [2 ] & mask0 ) | (a -> d [2 ] & mask1 );
824884 r -> d [3 ] = (r -> d [3 ] & mask0 ) | (a -> d [3 ] & mask1 );
885+
886+ secp256k1_scalar_verify (r );
825887}
826888
827889static void secp256k1_scalar_from_signed62 (secp256k1_scalar * r , const secp256k1_modinv64_signed62 * a ) {
@@ -841,18 +903,13 @@ static void secp256k1_scalar_from_signed62(secp256k1_scalar *r, const secp256k1_
841903 r -> d [2 ] = a2 >> 4 | a3 << 58 ;
842904 r -> d [3 ] = a3 >> 6 | a4 << 56 ;
843905
844- #ifdef VERIFY
845- VERIFY_CHECK (secp256k1_scalar_check_overflow (r ) == 0 );
846- #endif
906+ secp256k1_scalar_verify (r );
847907}
848908
849909static void secp256k1_scalar_to_signed62 (secp256k1_modinv64_signed62 * r , const secp256k1_scalar * a ) {
850910 const uint64_t M62 = UINT64_MAX >> 2 ;
851911 const uint64_t a0 = a -> d [0 ], a1 = a -> d [1 ], a2 = a -> d [2 ], a3 = a -> d [3 ];
852-
853- #ifdef VERIFY
854- VERIFY_CHECK (secp256k1_scalar_check_overflow (a ) == 0 );
855- #endif
912+ secp256k1_scalar_verify (a );
856913
857914 r -> v [0 ] = a0 & M62 ;
858915 r -> v [1 ] = (a0 >> 62 | a1 << 2 ) & M62 ;
@@ -871,10 +928,13 @@ static void secp256k1_scalar_inverse(secp256k1_scalar *r, const secp256k1_scalar
871928#ifdef VERIFY
872929 int zero_in = secp256k1_scalar_is_zero (x );
873930#endif
931+ secp256k1_scalar_verify (x );
932+
874933 secp256k1_scalar_to_signed62 (& s , x );
875934 secp256k1_modinv64 (& s , & secp256k1_const_modinfo_scalar );
876935 secp256k1_scalar_from_signed62 (r , & s );
877936
937+ secp256k1_scalar_verify (r );
878938#ifdef VERIFY
879939 VERIFY_CHECK (secp256k1_scalar_is_zero (r ) == zero_in );
880940#endif
@@ -885,16 +945,21 @@ static void secp256k1_scalar_inverse_var(secp256k1_scalar *r, const secp256k1_sc
885945#ifdef VERIFY
886946 int zero_in = secp256k1_scalar_is_zero (x );
887947#endif
948+ secp256k1_scalar_verify (x );
949+
888950 secp256k1_scalar_to_signed62 (& s , x );
889951 secp256k1_modinv64_var (& s , & secp256k1_const_modinfo_scalar );
890952 secp256k1_scalar_from_signed62 (r , & s );
891953
954+ secp256k1_scalar_verify (r );
892955#ifdef VERIFY
893956 VERIFY_CHECK (secp256k1_scalar_is_zero (r ) == zero_in );
894957#endif
895958}
896959
897960SECP256K1_INLINE static int secp256k1_scalar_is_even (const secp256k1_scalar * a ) {
961+ secp256k1_scalar_verify (a );
962+
898963 return !(a -> d [0 ] & 1 );
899964}
900965
0 commit comments