curve25519-ref10.c 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272
  1. #include <stdint.h>
  2. typedef int32_t crypto_int32;
  3. typedef int64_t crypto_int64;
  4. typedef uint64_t crypto_uint64;
  5. typedef crypto_int32 fe[10];
  6. /*
  7. h = 0
  8. */
  9. void fe_0(fe h)
  10. {
  11. h[0] = 0;
  12. h[1] = 0;
  13. h[2] = 0;
  14. h[3] = 0;
  15. h[4] = 0;
  16. h[5] = 0;
  17. h[6] = 0;
  18. h[7] = 0;
  19. h[8] = 0;
  20. h[9] = 0;
  21. }
  22. /*
  23. h = 1
  24. */
  25. void fe_1(fe h)
  26. {
  27. h[0] = 1;
  28. h[1] = 0;
  29. h[2] = 0;
  30. h[3] = 0;
  31. h[4] = 0;
  32. h[5] = 0;
  33. h[6] = 0;
  34. h[7] = 0;
  35. h[8] = 0;
  36. h[9] = 0;
  37. }
  38. /*
  39. h = f + g
  40. Can overlap h with f or g.
  41. Preconditions:
  42. |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  43. |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  44. Postconditions:
  45. |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  46. */
  47. void fe_add(fe h,fe f,fe g)
  48. {
  49. crypto_int32 f0 = f[0];
  50. crypto_int32 f1 = f[1];
  51. crypto_int32 f2 = f[2];
  52. crypto_int32 f3 = f[3];
  53. crypto_int32 f4 = f[4];
  54. crypto_int32 f5 = f[5];
  55. crypto_int32 f6 = f[6];
  56. crypto_int32 f7 = f[7];
  57. crypto_int32 f8 = f[8];
  58. crypto_int32 f9 = f[9];
  59. crypto_int32 g0 = g[0];
  60. crypto_int32 g1 = g[1];
  61. crypto_int32 g2 = g[2];
  62. crypto_int32 g3 = g[3];
  63. crypto_int32 g4 = g[4];
  64. crypto_int32 g5 = g[5];
  65. crypto_int32 g6 = g[6];
  66. crypto_int32 g7 = g[7];
  67. crypto_int32 g8 = g[8];
  68. crypto_int32 g9 = g[9];
  69. crypto_int32 h0 = f0 + g0;
  70. crypto_int32 h1 = f1 + g1;
  71. crypto_int32 h2 = f2 + g2;
  72. crypto_int32 h3 = f3 + g3;
  73. crypto_int32 h4 = f4 + g4;
  74. crypto_int32 h5 = f5 + g5;
  75. crypto_int32 h6 = f6 + g6;
  76. crypto_int32 h7 = f7 + g7;
  77. crypto_int32 h8 = f8 + g8;
  78. crypto_int32 h9 = f9 + g9;
  79. h[0] = h0;
  80. h[1] = h1;
  81. h[2] = h2;
  82. h[3] = h3;
  83. h[4] = h4;
  84. h[5] = h5;
  85. h[6] = h6;
  86. h[7] = h7;
  87. h[8] = h8;
  88. h[9] = h9;
  89. }
  90. /*
  91. h = f
  92. */
  93. void fe_copy(fe h,fe f)
  94. {
  95. crypto_int32 f0 = f[0];
  96. crypto_int32 f1 = f[1];
  97. crypto_int32 f2 = f[2];
  98. crypto_int32 f3 = f[3];
  99. crypto_int32 f4 = f[4];
  100. crypto_int32 f5 = f[5];
  101. crypto_int32 f6 = f[6];
  102. crypto_int32 f7 = f[7];
  103. crypto_int32 f8 = f[8];
  104. crypto_int32 f9 = f[9];
  105. h[0] = f0;
  106. h[1] = f1;
  107. h[2] = f2;
  108. h[3] = f3;
  109. h[4] = f4;
  110. h[5] = f5;
  111. h[6] = f6;
  112. h[7] = f7;
  113. h[8] = f8;
  114. h[9] = f9;
  115. }
  116. /*
  117. Replace (f,g) with (g,f) if b == 1;
  118. replace (f,g) with (f,g) if b == 0.
  119. Preconditions: b in {0,1}.
  120. */
  121. void fe_cswap(fe f,fe g,unsigned int b)
  122. {
  123. crypto_int32 f0 = f[0];
  124. crypto_int32 f1 = f[1];
  125. crypto_int32 f2 = f[2];
  126. crypto_int32 f3 = f[3];
  127. crypto_int32 f4 = f[4];
  128. crypto_int32 f5 = f[5];
  129. crypto_int32 f6 = f[6];
  130. crypto_int32 f7 = f[7];
  131. crypto_int32 f8 = f[8];
  132. crypto_int32 f9 = f[9];
  133. crypto_int32 g0 = g[0];
  134. crypto_int32 g1 = g[1];
  135. crypto_int32 g2 = g[2];
  136. crypto_int32 g3 = g[3];
  137. crypto_int32 g4 = g[4];
  138. crypto_int32 g5 = g[5];
  139. crypto_int32 g6 = g[6];
  140. crypto_int32 g7 = g[7];
  141. crypto_int32 g8 = g[8];
  142. crypto_int32 g9 = g[9];
  143. crypto_int32 x0 = f0 ^ g0;
  144. crypto_int32 x1 = f1 ^ g1;
  145. crypto_int32 x2 = f2 ^ g2;
  146. crypto_int32 x3 = f3 ^ g3;
  147. crypto_int32 x4 = f4 ^ g4;
  148. crypto_int32 x5 = f5 ^ g5;
  149. crypto_int32 x6 = f6 ^ g6;
  150. crypto_int32 x7 = f7 ^ g7;
  151. crypto_int32 x8 = f8 ^ g8;
  152. crypto_int32 x9 = f9 ^ g9;
  153. b = -b;
  154. x0 &= b;
  155. x1 &= b;
  156. x2 &= b;
  157. x3 &= b;
  158. x4 &= b;
  159. x5 &= b;
  160. x6 &= b;
  161. x7 &= b;
  162. x8 &= b;
  163. x9 &= b;
  164. f[0] = f0 ^ x0;
  165. f[1] = f1 ^ x1;
  166. f[2] = f2 ^ x2;
  167. f[3] = f3 ^ x3;
  168. f[4] = f4 ^ x4;
  169. f[5] = f5 ^ x5;
  170. f[6] = f6 ^ x6;
  171. f[7] = f7 ^ x7;
  172. f[8] = f8 ^ x8;
  173. f[9] = f9 ^ x9;
  174. g[0] = g0 ^ x0;
  175. g[1] = g1 ^ x1;
  176. g[2] = g2 ^ x2;
  177. g[3] = g3 ^ x3;
  178. g[4] = g4 ^ x4;
  179. g[5] = g5 ^ x5;
  180. g[6] = g6 ^ x6;
  181. g[7] = g7 ^ x7;
  182. g[8] = g8 ^ x8;
  183. g[9] = g9 ^ x9;
  184. }
  185. static crypto_uint64 load_3(const unsigned char *in)
  186. {
  187. crypto_uint64 result;
  188. result = (crypto_uint64) in[0];
  189. result |= ((crypto_uint64) in[1]) << 8;
  190. result |= ((crypto_uint64) in[2]) << 16;
  191. return result;
  192. }
  193. static crypto_uint64 load_4(const unsigned char *in)
  194. {
  195. crypto_uint64 result;
  196. result = (crypto_uint64) in[0];
  197. result |= ((crypto_uint64) in[1]) << 8;
  198. result |= ((crypto_uint64) in[2]) << 16;
  199. result |= ((crypto_uint64) in[3]) << 24;
  200. return result;
  201. }
  202. void fe_frombytes(fe h,const unsigned char *s)
  203. {
  204. crypto_int64 h0 = load_4(s);
  205. crypto_int64 h1 = load_3(s + 4) << 6;
  206. crypto_int64 h2 = load_3(s + 7) << 5;
  207. crypto_int64 h3 = load_3(s + 10) << 3;
  208. crypto_int64 h4 = load_3(s + 13) << 2;
  209. crypto_int64 h5 = load_4(s + 16);
  210. crypto_int64 h6 = load_3(s + 20) << 7;
  211. crypto_int64 h7 = load_3(s + 23) << 5;
  212. crypto_int64 h8 = load_3(s + 26) << 4;
  213. crypto_int64 h9 = load_3(s + 29) << 2;
  214. crypto_int64 carry0;
  215. crypto_int64 carry1;
  216. crypto_int64 carry2;
  217. crypto_int64 carry3;
  218. crypto_int64 carry4;
  219. crypto_int64 carry5;
  220. crypto_int64 carry6;
  221. crypto_int64 carry7;
  222. crypto_int64 carry8;
  223. crypto_int64 carry9;
  224. carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25;
  225. carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25;
  226. carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25;
  227. carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25;
  228. carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25;
  229. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  230. carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26;
  231. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  232. carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26;
  233. carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26;
  234. h[0] = h0;
  235. h[1] = h1;
  236. h[2] = h2;
  237. h[3] = h3;
  238. h[4] = h4;
  239. h[5] = h5;
  240. h[6] = h6;
  241. h[7] = h7;
  242. h[8] = h8;
  243. h[9] = h9;
  244. }
  245. /*
  246. h = f * g
  247. Can overlap h with f or g.
  248. Preconditions:
  249. |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  250. |g| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  251. Postconditions:
  252. |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  253. */
  254. /*
  255. Notes on implementation strategy:
  256. Using schoolbook multiplication.
  257. Karatsuba would save a little in some cost models.
  258. Most multiplications by 2 and 19 are 32-bit precomputations;
  259. cheaper than 64-bit postcomputations.
  260. There is one remaining multiplication by 19 in the carry chain;
  261. one *19 precomputation can be merged into this,
  262. but the resulting data flow is considerably less clean.
  263. There are 12 carries below.
  264. 10 of them are 2-way parallelizable and vectorizable.
  265. Can get away with 11 carries, but then data flow is much deeper.
  266. With tighter constraints on inputs can squeeze carries into int32.
  267. */
  268. void fe_mul(fe h,fe f,fe g)
  269. {
  270. crypto_int32 f0 = f[0];
  271. crypto_int32 f1 = f[1];
  272. crypto_int32 f2 = f[2];
  273. crypto_int32 f3 = f[3];
  274. crypto_int32 f4 = f[4];
  275. crypto_int32 f5 = f[5];
  276. crypto_int32 f6 = f[6];
  277. crypto_int32 f7 = f[7];
  278. crypto_int32 f8 = f[8];
  279. crypto_int32 f9 = f[9];
  280. crypto_int32 g0 = g[0];
  281. crypto_int32 g1 = g[1];
  282. crypto_int32 g2 = g[2];
  283. crypto_int32 g3 = g[3];
  284. crypto_int32 g4 = g[4];
  285. crypto_int32 g5 = g[5];
  286. crypto_int32 g6 = g[6];
  287. crypto_int32 g7 = g[7];
  288. crypto_int32 g8 = g[8];
  289. crypto_int32 g9 = g[9];
  290. crypto_int32 g1_19 = 19 * g1; /* 1.4*2^29 */
  291. crypto_int32 g2_19 = 19 * g2; /* 1.4*2^30; still ok */
  292. crypto_int32 g3_19 = 19 * g3;
  293. crypto_int32 g4_19 = 19 * g4;
  294. crypto_int32 g5_19 = 19 * g5;
  295. crypto_int32 g6_19 = 19 * g6;
  296. crypto_int32 g7_19 = 19 * g7;
  297. crypto_int32 g8_19 = 19 * g8;
  298. crypto_int32 g9_19 = 19 * g9;
  299. crypto_int32 f1_2 = 2 * f1;
  300. crypto_int32 f3_2 = 2 * f3;
  301. crypto_int32 f5_2 = 2 * f5;
  302. crypto_int32 f7_2 = 2 * f7;
  303. crypto_int32 f9_2 = 2 * f9;
  304. crypto_int64 f0g0 = f0 * (crypto_int64) g0;
  305. crypto_int64 f0g1 = f0 * (crypto_int64) g1;
  306. crypto_int64 f0g2 = f0 * (crypto_int64) g2;
  307. crypto_int64 f0g3 = f0 * (crypto_int64) g3;
  308. crypto_int64 f0g4 = f0 * (crypto_int64) g4;
  309. crypto_int64 f0g5 = f0 * (crypto_int64) g5;
  310. crypto_int64 f0g6 = f0 * (crypto_int64) g6;
  311. crypto_int64 f0g7 = f0 * (crypto_int64) g7;
  312. crypto_int64 f0g8 = f0 * (crypto_int64) g8;
  313. crypto_int64 f0g9 = f0 * (crypto_int64) g9;
  314. crypto_int64 f1g0 = f1 * (crypto_int64) g0;
  315. crypto_int64 f1g1_2 = f1_2 * (crypto_int64) g1;
  316. crypto_int64 f1g2 = f1 * (crypto_int64) g2;
  317. crypto_int64 f1g3_2 = f1_2 * (crypto_int64) g3;
  318. crypto_int64 f1g4 = f1 * (crypto_int64) g4;
  319. crypto_int64 f1g5_2 = f1_2 * (crypto_int64) g5;
  320. crypto_int64 f1g6 = f1 * (crypto_int64) g6;
  321. crypto_int64 f1g7_2 = f1_2 * (crypto_int64) g7;
  322. crypto_int64 f1g8 = f1 * (crypto_int64) g8;
  323. crypto_int64 f1g9_38 = f1_2 * (crypto_int64) g9_19;
  324. crypto_int64 f2g0 = f2 * (crypto_int64) g0;
  325. crypto_int64 f2g1 = f2 * (crypto_int64) g1;
  326. crypto_int64 f2g2 = f2 * (crypto_int64) g2;
  327. crypto_int64 f2g3 = f2 * (crypto_int64) g3;
  328. crypto_int64 f2g4 = f2 * (crypto_int64) g4;
  329. crypto_int64 f2g5 = f2 * (crypto_int64) g5;
  330. crypto_int64 f2g6 = f2 * (crypto_int64) g6;
  331. crypto_int64 f2g7 = f2 * (crypto_int64) g7;
  332. crypto_int64 f2g8_19 = f2 * (crypto_int64) g8_19;
  333. crypto_int64 f2g9_19 = f2 * (crypto_int64) g9_19;
  334. crypto_int64 f3g0 = f3 * (crypto_int64) g0;
  335. crypto_int64 f3g1_2 = f3_2 * (crypto_int64) g1;
  336. crypto_int64 f3g2 = f3 * (crypto_int64) g2;
  337. crypto_int64 f3g3_2 = f3_2 * (crypto_int64) g3;
  338. crypto_int64 f3g4 = f3 * (crypto_int64) g4;
  339. crypto_int64 f3g5_2 = f3_2 * (crypto_int64) g5;
  340. crypto_int64 f3g6 = f3 * (crypto_int64) g6;
  341. crypto_int64 f3g7_38 = f3_2 * (crypto_int64) g7_19;
  342. crypto_int64 f3g8_19 = f3 * (crypto_int64) g8_19;
  343. crypto_int64 f3g9_38 = f3_2 * (crypto_int64) g9_19;
  344. crypto_int64 f4g0 = f4 * (crypto_int64) g0;
  345. crypto_int64 f4g1 = f4 * (crypto_int64) g1;
  346. crypto_int64 f4g2 = f4 * (crypto_int64) g2;
  347. crypto_int64 f4g3 = f4 * (crypto_int64) g3;
  348. crypto_int64 f4g4 = f4 * (crypto_int64) g4;
  349. crypto_int64 f4g5 = f4 * (crypto_int64) g5;
  350. crypto_int64 f4g6_19 = f4 * (crypto_int64) g6_19;
  351. crypto_int64 f4g7_19 = f4 * (crypto_int64) g7_19;
  352. crypto_int64 f4g8_19 = f4 * (crypto_int64) g8_19;
  353. crypto_int64 f4g9_19 = f4 * (crypto_int64) g9_19;
  354. crypto_int64 f5g0 = f5 * (crypto_int64) g0;
  355. crypto_int64 f5g1_2 = f5_2 * (crypto_int64) g1;
  356. crypto_int64 f5g2 = f5 * (crypto_int64) g2;
  357. crypto_int64 f5g3_2 = f5_2 * (crypto_int64) g3;
  358. crypto_int64 f5g4 = f5 * (crypto_int64) g4;
  359. crypto_int64 f5g5_38 = f5_2 * (crypto_int64) g5_19;
  360. crypto_int64 f5g6_19 = f5 * (crypto_int64) g6_19;
  361. crypto_int64 f5g7_38 = f5_2 * (crypto_int64) g7_19;
  362. crypto_int64 f5g8_19 = f5 * (crypto_int64) g8_19;
  363. crypto_int64 f5g9_38 = f5_2 * (crypto_int64) g9_19;
  364. crypto_int64 f6g0 = f6 * (crypto_int64) g0;
  365. crypto_int64 f6g1 = f6 * (crypto_int64) g1;
  366. crypto_int64 f6g2 = f6 * (crypto_int64) g2;
  367. crypto_int64 f6g3 = f6 * (crypto_int64) g3;
  368. crypto_int64 f6g4_19 = f6 * (crypto_int64) g4_19;
  369. crypto_int64 f6g5_19 = f6 * (crypto_int64) g5_19;
  370. crypto_int64 f6g6_19 = f6 * (crypto_int64) g6_19;
  371. crypto_int64 f6g7_19 = f6 * (crypto_int64) g7_19;
  372. crypto_int64 f6g8_19 = f6 * (crypto_int64) g8_19;
  373. crypto_int64 f6g9_19 = f6 * (crypto_int64) g9_19;
  374. crypto_int64 f7g0 = f7 * (crypto_int64) g0;
  375. crypto_int64 f7g1_2 = f7_2 * (crypto_int64) g1;
  376. crypto_int64 f7g2 = f7 * (crypto_int64) g2;
  377. crypto_int64 f7g3_38 = f7_2 * (crypto_int64) g3_19;
  378. crypto_int64 f7g4_19 = f7 * (crypto_int64) g4_19;
  379. crypto_int64 f7g5_38 = f7_2 * (crypto_int64) g5_19;
  380. crypto_int64 f7g6_19 = f7 * (crypto_int64) g6_19;
  381. crypto_int64 f7g7_38 = f7_2 * (crypto_int64) g7_19;
  382. crypto_int64 f7g8_19 = f7 * (crypto_int64) g8_19;
  383. crypto_int64 f7g9_38 = f7_2 * (crypto_int64) g9_19;
  384. crypto_int64 f8g0 = f8 * (crypto_int64) g0;
  385. crypto_int64 f8g1 = f8 * (crypto_int64) g1;
  386. crypto_int64 f8g2_19 = f8 * (crypto_int64) g2_19;
  387. crypto_int64 f8g3_19 = f8 * (crypto_int64) g3_19;
  388. crypto_int64 f8g4_19 = f8 * (crypto_int64) g4_19;
  389. crypto_int64 f8g5_19 = f8 * (crypto_int64) g5_19;
  390. crypto_int64 f8g6_19 = f8 * (crypto_int64) g6_19;
  391. crypto_int64 f8g7_19 = f8 * (crypto_int64) g7_19;
  392. crypto_int64 f8g8_19 = f8 * (crypto_int64) g8_19;
  393. crypto_int64 f8g9_19 = f8 * (crypto_int64) g9_19;
  394. crypto_int64 f9g0 = f9 * (crypto_int64) g0;
  395. crypto_int64 f9g1_38 = f9_2 * (crypto_int64) g1_19;
  396. crypto_int64 f9g2_19 = f9 * (crypto_int64) g2_19;
  397. crypto_int64 f9g3_38 = f9_2 * (crypto_int64) g3_19;
  398. crypto_int64 f9g4_19 = f9 * (crypto_int64) g4_19;
  399. crypto_int64 f9g5_38 = f9_2 * (crypto_int64) g5_19;
  400. crypto_int64 f9g6_19 = f9 * (crypto_int64) g6_19;
  401. crypto_int64 f9g7_38 = f9_2 * (crypto_int64) g7_19;
  402. crypto_int64 f9g8_19 = f9 * (crypto_int64) g8_19;
  403. crypto_int64 f9g9_38 = f9_2 * (crypto_int64) g9_19;
  404. crypto_int64 h0 = f0g0+f1g9_38+f2g8_19+f3g7_38+f4g6_19+f5g5_38+f6g4_19+f7g3_38+f8g2_19+f9g1_38;
  405. crypto_int64 h1 = f0g1+f1g0 +f2g9_19+f3g8_19+f4g7_19+f5g6_19+f6g5_19+f7g4_19+f8g3_19+f9g2_19;
  406. crypto_int64 h2 = f0g2+f1g1_2 +f2g0 +f3g9_38+f4g8_19+f5g7_38+f6g6_19+f7g5_38+f8g4_19+f9g3_38;
  407. crypto_int64 h3 = f0g3+f1g2 +f2g1 +f3g0 +f4g9_19+f5g8_19+f6g7_19+f7g6_19+f8g5_19+f9g4_19;
  408. crypto_int64 h4 = f0g4+f1g3_2 +f2g2 +f3g1_2 +f4g0 +f5g9_38+f6g8_19+f7g7_38+f8g6_19+f9g5_38;
  409. crypto_int64 h5 = f0g5+f1g4 +f2g3 +f3g2 +f4g1 +f5g0 +f6g9_19+f7g8_19+f8g7_19+f9g6_19;
  410. crypto_int64 h6 = f0g6+f1g5_2 +f2g4 +f3g3_2 +f4g2 +f5g1_2 +f6g0 +f7g9_38+f8g8_19+f9g7_38;
  411. crypto_int64 h7 = f0g7+f1g6 +f2g5 +f3g4 +f4g3 +f5g2 +f6g1 +f7g0 +f8g9_19+f9g8_19;
  412. crypto_int64 h8 = f0g8+f1g7_2 +f2g6 +f3g5_2 +f4g4 +f5g3_2 +f6g2 +f7g1_2 +f8g0 +f9g9_38;
  413. crypto_int64 h9 = f0g9+f1g8 +f2g7 +f3g6 +f4g5 +f5g4 +f6g3 +f7g2 +f8g1 +f9g0 ;
  414. crypto_int64 carry0;
  415. crypto_int64 carry1;
  416. crypto_int64 carry2;
  417. crypto_int64 carry3;
  418. crypto_int64 carry4;
  419. crypto_int64 carry5;
  420. crypto_int64 carry6;
  421. crypto_int64 carry7;
  422. crypto_int64 carry8;
  423. crypto_int64 carry9;
  424. /*
  425. |h0| <= (1.1*1.1*2^52*(1+19+19+19+19)+1.1*1.1*2^50*(38+38+38+38+38))
  426. i.e. |h0| <= 1.2*2^59; narrower ranges for h2, h4, h6, h8
  427. |h1| <= (1.1*1.1*2^51*(1+1+19+19+19+19+19+19+19+19))
  428. i.e. |h1| <= 1.5*2^58; narrower ranges for h3, h5, h7, h9
  429. */
  430. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  431. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  432. /* |h0| <= 2^25 */
  433. /* |h4| <= 2^25 */
  434. /* |h1| <= 1.51*2^58 */
  435. /* |h5| <= 1.51*2^58 */
  436. carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25;
  437. carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25;
  438. /* |h1| <= 2^24; from now on fits into int32 */
  439. /* |h5| <= 2^24; from now on fits into int32 */
  440. /* |h2| <= 1.21*2^59 */
  441. /* |h6| <= 1.21*2^59 */
  442. carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26;
  443. carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26;
  444. /* |h2| <= 2^25; from now on fits into int32 unchanged */
  445. /* |h6| <= 2^25; from now on fits into int32 unchanged */
  446. /* |h3| <= 1.51*2^58 */
  447. /* |h7| <= 1.51*2^58 */
  448. carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25;
  449. carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25;
  450. /* |h3| <= 2^24; from now on fits into int32 unchanged */
  451. /* |h7| <= 2^24; from now on fits into int32 unchanged */
  452. /* |h4| <= 1.52*2^33 */
  453. /* |h8| <= 1.52*2^33 */
  454. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  455. carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26;
  456. /* |h4| <= 2^25; from now on fits into int32 unchanged */
  457. /* |h8| <= 2^25; from now on fits into int32 unchanged */
  458. /* |h5| <= 1.01*2^24 */
  459. /* |h9| <= 1.51*2^58 */
  460. carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25;
  461. /* |h9| <= 2^24; from now on fits into int32 unchanged */
  462. /* |h0| <= 1.8*2^37 */
  463. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  464. /* |h0| <= 2^25; from now on fits into int32 unchanged */
  465. /* |h1| <= 1.01*2^24 */
  466. h[0] = h0;
  467. h[1] = h1;
  468. h[2] = h2;
  469. h[3] = h3;
  470. h[4] = h4;
  471. h[5] = h5;
  472. h[6] = h6;
  473. h[7] = h7;
  474. h[8] = h8;
  475. h[9] = h9;
  476. }
  477. /*
  478. h = f * 121666
  479. Can overlap h with f.
  480. Preconditions:
  481. |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  482. Postconditions:
  483. |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  484. */
  485. void fe_mul121666(fe h,fe f)
  486. {
  487. crypto_int32 f0 = f[0];
  488. crypto_int32 f1 = f[1];
  489. crypto_int32 f2 = f[2];
  490. crypto_int32 f3 = f[3];
  491. crypto_int32 f4 = f[4];
  492. crypto_int32 f5 = f[5];
  493. crypto_int32 f6 = f[6];
  494. crypto_int32 f7 = f[7];
  495. crypto_int32 f8 = f[8];
  496. crypto_int32 f9 = f[9];
  497. crypto_int64 h0 = f0 * (crypto_int64) 121666;
  498. crypto_int64 h1 = f1 * (crypto_int64) 121666;
  499. crypto_int64 h2 = f2 * (crypto_int64) 121666;
  500. crypto_int64 h3 = f3 * (crypto_int64) 121666;
  501. crypto_int64 h4 = f4 * (crypto_int64) 121666;
  502. crypto_int64 h5 = f5 * (crypto_int64) 121666;
  503. crypto_int64 h6 = f6 * (crypto_int64) 121666;
  504. crypto_int64 h7 = f7 * (crypto_int64) 121666;
  505. crypto_int64 h8 = f8 * (crypto_int64) 121666;
  506. crypto_int64 h9 = f9 * (crypto_int64) 121666;
  507. crypto_int64 carry0;
  508. crypto_int64 carry1;
  509. crypto_int64 carry2;
  510. crypto_int64 carry3;
  511. crypto_int64 carry4;
  512. crypto_int64 carry5;
  513. crypto_int64 carry6;
  514. crypto_int64 carry7;
  515. crypto_int64 carry8;
  516. crypto_int64 carry9;
  517. carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25;
  518. carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25;
  519. carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25;
  520. carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25;
  521. carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25;
  522. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  523. carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26;
  524. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  525. carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26;
  526. carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26;
  527. h[0] = h0;
  528. h[1] = h1;
  529. h[2] = h2;
  530. h[3] = h3;
  531. h[4] = h4;
  532. h[5] = h5;
  533. h[6] = h6;
  534. h[7] = h7;
  535. h[8] = h8;
  536. h[9] = h9;
  537. }
  538. /*
  539. h = f * f
  540. Can overlap h with f.
  541. Preconditions:
  542. |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  543. Postconditions:
  544. |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  545. */
  546. /*
  547. See fe_mul.c for discussion of implementation strategy.
  548. */
  549. void fe_sq(fe h,fe f)
  550. {
  551. crypto_int32 f0 = f[0];
  552. crypto_int32 f1 = f[1];
  553. crypto_int32 f2 = f[2];
  554. crypto_int32 f3 = f[3];
  555. crypto_int32 f4 = f[4];
  556. crypto_int32 f5 = f[5];
  557. crypto_int32 f6 = f[6];
  558. crypto_int32 f7 = f[7];
  559. crypto_int32 f8 = f[8];
  560. crypto_int32 f9 = f[9];
  561. crypto_int32 f0_2 = 2 * f0;
  562. crypto_int32 f1_2 = 2 * f1;
  563. crypto_int32 f2_2 = 2 * f2;
  564. crypto_int32 f3_2 = 2 * f3;
  565. crypto_int32 f4_2 = 2 * f4;
  566. crypto_int32 f5_2 = 2 * f5;
  567. crypto_int32 f6_2 = 2 * f6;
  568. crypto_int32 f7_2 = 2 * f7;
  569. crypto_int32 f5_38 = 38 * f5; /* 1.31*2^30 */
  570. crypto_int32 f6_19 = 19 * f6; /* 1.31*2^30 */
  571. crypto_int32 f7_38 = 38 * f7; /* 1.31*2^30 */
  572. crypto_int32 f8_19 = 19 * f8; /* 1.31*2^30 */
  573. crypto_int32 f9_38 = 38 * f9; /* 1.31*2^30 */
  574. crypto_int64 f0f0 = f0 * (crypto_int64) f0;
  575. crypto_int64 f0f1_2 = f0_2 * (crypto_int64) f1;
  576. crypto_int64 f0f2_2 = f0_2 * (crypto_int64) f2;
  577. crypto_int64 f0f3_2 = f0_2 * (crypto_int64) f3;
  578. crypto_int64 f0f4_2 = f0_2 * (crypto_int64) f4;
  579. crypto_int64 f0f5_2 = f0_2 * (crypto_int64) f5;
  580. crypto_int64 f0f6_2 = f0_2 * (crypto_int64) f6;
  581. crypto_int64 f0f7_2 = f0_2 * (crypto_int64) f7;
  582. crypto_int64 f0f8_2 = f0_2 * (crypto_int64) f8;
  583. crypto_int64 f0f9_2 = f0_2 * (crypto_int64) f9;
  584. crypto_int64 f1f1_2 = f1_2 * (crypto_int64) f1;
  585. crypto_int64 f1f2_2 = f1_2 * (crypto_int64) f2;
  586. crypto_int64 f1f3_4 = f1_2 * (crypto_int64) f3_2;
  587. crypto_int64 f1f4_2 = f1_2 * (crypto_int64) f4;
  588. crypto_int64 f1f5_4 = f1_2 * (crypto_int64) f5_2;
  589. crypto_int64 f1f6_2 = f1_2 * (crypto_int64) f6;
  590. crypto_int64 f1f7_4 = f1_2 * (crypto_int64) f7_2;
  591. crypto_int64 f1f8_2 = f1_2 * (crypto_int64) f8;
  592. crypto_int64 f1f9_76 = f1_2 * (crypto_int64) f9_38;
  593. crypto_int64 f2f2 = f2 * (crypto_int64) f2;
  594. crypto_int64 f2f3_2 = f2_2 * (crypto_int64) f3;
  595. crypto_int64 f2f4_2 = f2_2 * (crypto_int64) f4;
  596. crypto_int64 f2f5_2 = f2_2 * (crypto_int64) f5;
  597. crypto_int64 f2f6_2 = f2_2 * (crypto_int64) f6;
  598. crypto_int64 f2f7_2 = f2_2 * (crypto_int64) f7;
  599. crypto_int64 f2f8_38 = f2_2 * (crypto_int64) f8_19;
  600. crypto_int64 f2f9_38 = f2 * (crypto_int64) f9_38;
  601. crypto_int64 f3f3_2 = f3_2 * (crypto_int64) f3;
  602. crypto_int64 f3f4_2 = f3_2 * (crypto_int64) f4;
  603. crypto_int64 f3f5_4 = f3_2 * (crypto_int64) f5_2;
  604. crypto_int64 f3f6_2 = f3_2 * (crypto_int64) f6;
  605. crypto_int64 f3f7_76 = f3_2 * (crypto_int64) f7_38;
  606. crypto_int64 f3f8_38 = f3_2 * (crypto_int64) f8_19;
  607. crypto_int64 f3f9_76 = f3_2 * (crypto_int64) f9_38;
  608. crypto_int64 f4f4 = f4 * (crypto_int64) f4;
  609. crypto_int64 f4f5_2 = f4_2 * (crypto_int64) f5;
  610. crypto_int64 f4f6_38 = f4_2 * (crypto_int64) f6_19;
  611. crypto_int64 f4f7_38 = f4 * (crypto_int64) f7_38;
  612. crypto_int64 f4f8_38 = f4_2 * (crypto_int64) f8_19;
  613. crypto_int64 f4f9_38 = f4 * (crypto_int64) f9_38;
  614. crypto_int64 f5f5_38 = f5 * (crypto_int64) f5_38;
  615. crypto_int64 f5f6_38 = f5_2 * (crypto_int64) f6_19;
  616. crypto_int64 f5f7_76 = f5_2 * (crypto_int64) f7_38;
  617. crypto_int64 f5f8_38 = f5_2 * (crypto_int64) f8_19;
  618. crypto_int64 f5f9_76 = f5_2 * (crypto_int64) f9_38;
  619. crypto_int64 f6f6_19 = f6 * (crypto_int64) f6_19;
  620. crypto_int64 f6f7_38 = f6 * (crypto_int64) f7_38;
  621. crypto_int64 f6f8_38 = f6_2 * (crypto_int64) f8_19;
  622. crypto_int64 f6f9_38 = f6 * (crypto_int64) f9_38;
  623. crypto_int64 f7f7_38 = f7 * (crypto_int64) f7_38;
  624. crypto_int64 f7f8_38 = f7_2 * (crypto_int64) f8_19;
  625. crypto_int64 f7f9_76 = f7_2 * (crypto_int64) f9_38;
  626. crypto_int64 f8f8_19 = f8 * (crypto_int64) f8_19;
  627. crypto_int64 f8f9_38 = f8 * (crypto_int64) f9_38;
  628. crypto_int64 f9f9_38 = f9 * (crypto_int64) f9_38;
  629. crypto_int64 h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38;
  630. crypto_int64 h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38;
  631. crypto_int64 h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19;
  632. crypto_int64 h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38;
  633. crypto_int64 h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38;
  634. crypto_int64 h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38;
  635. crypto_int64 h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19;
  636. crypto_int64 h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38;
  637. crypto_int64 h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38;
  638. crypto_int64 h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2;
  639. crypto_int64 carry0;
  640. crypto_int64 carry1;
  641. crypto_int64 carry2;
  642. crypto_int64 carry3;
  643. crypto_int64 carry4;
  644. crypto_int64 carry5;
  645. crypto_int64 carry6;
  646. crypto_int64 carry7;
  647. crypto_int64 carry8;
  648. crypto_int64 carry9;
  649. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  650. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  651. carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= carry1 << 25;
  652. carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= carry5 << 25;
  653. carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= carry2 << 26;
  654. carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= carry6 << 26;
  655. carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= carry3 << 25;
  656. carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= carry7 << 25;
  657. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= carry4 << 26;
  658. carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= carry8 << 26;
  659. carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= carry9 << 25;
  660. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= carry0 << 26;
  661. h[0] = h0;
  662. h[1] = h1;
  663. h[2] = h2;
  664. h[3] = h3;
  665. h[4] = h4;
  666. h[5] = h5;
  667. h[6] = h6;
  668. h[7] = h7;
  669. h[8] = h8;
  670. h[9] = h9;
  671. }
  672. /*
  673. h = f - g
  674. Can overlap h with f or g.
  675. Preconditions:
  676. |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  677. |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  678. Postconditions:
  679. |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc.
  680. */
  681. void fe_sub(fe h,fe f,fe g)
  682. {
  683. crypto_int32 f0 = f[0];
  684. crypto_int32 f1 = f[1];
  685. crypto_int32 f2 = f[2];
  686. crypto_int32 f3 = f[3];
  687. crypto_int32 f4 = f[4];
  688. crypto_int32 f5 = f[5];
  689. crypto_int32 f6 = f[6];
  690. crypto_int32 f7 = f[7];
  691. crypto_int32 f8 = f[8];
  692. crypto_int32 f9 = f[9];
  693. crypto_int32 g0 = g[0];
  694. crypto_int32 g1 = g[1];
  695. crypto_int32 g2 = g[2];
  696. crypto_int32 g3 = g[3];
  697. crypto_int32 g4 = g[4];
  698. crypto_int32 g5 = g[5];
  699. crypto_int32 g6 = g[6];
  700. crypto_int32 g7 = g[7];
  701. crypto_int32 g8 = g[8];
  702. crypto_int32 g9 = g[9];
  703. crypto_int32 h0 = f0 - g0;
  704. crypto_int32 h1 = f1 - g1;
  705. crypto_int32 h2 = f2 - g2;
  706. crypto_int32 h3 = f3 - g3;
  707. crypto_int32 h4 = f4 - g4;
  708. crypto_int32 h5 = f5 - g5;
  709. crypto_int32 h6 = f6 - g6;
  710. crypto_int32 h7 = f7 - g7;
  711. crypto_int32 h8 = f8 - g8;
  712. crypto_int32 h9 = f9 - g9;
  713. h[0] = h0;
  714. h[1] = h1;
  715. h[2] = h2;
  716. h[3] = h3;
  717. h[4] = h4;
  718. h[5] = h5;
  719. h[6] = h6;
  720. h[7] = h7;
  721. h[8] = h8;
  722. h[9] = h9;
  723. }
  724. /*
  725. Preconditions:
  726. |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc.
  727. Write p=2^255-19; q=floor(h/p).
  728. Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))).
  729. Proof:
  730. Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4.
  731. Also have |h-2^230 h9|<2^230 so |19 2^(-255)(h-2^230 h9)|<1/4.
  732. Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9).
  733. Then 0<y<1.
  734. Write r=h-pq.
  735. Have 0<=r<=p-1=2^255-20.
  736. Thus 0<=r+19(2^-255)r<r+19(2^-255)2^255<=2^255-1.
  737. Write x=r+19(2^-255)r+y.
  738. Then 0<x<2^255 so floor(2^(-255)x) = 0 so floor(q+2^(-255)x) = q.
  739. Have q+2^(-255)x = 2^(-255)(h + 19 2^(-25) h9 + 2^(-1))
  740. so floor(2^(-255)(h + 19 2^(-25) h9 + 2^(-1))) = q.
  741. */
  742. void fe_tobytes(unsigned char *s,fe h)
  743. {
  744. crypto_int32 h0 = h[0];
  745. crypto_int32 h1 = h[1];
  746. crypto_int32 h2 = h[2];
  747. crypto_int32 h3 = h[3];
  748. crypto_int32 h4 = h[4];
  749. crypto_int32 h5 = h[5];
  750. crypto_int32 h6 = h[6];
  751. crypto_int32 h7 = h[7];
  752. crypto_int32 h8 = h[8];
  753. crypto_int32 h9 = h[9];
  754. crypto_int32 q;
  755. crypto_int32 carry0;
  756. crypto_int32 carry1;
  757. crypto_int32 carry2;
  758. crypto_int32 carry3;
  759. crypto_int32 carry4;
  760. crypto_int32 carry5;
  761. crypto_int32 carry6;
  762. crypto_int32 carry7;
  763. crypto_int32 carry8;
  764. crypto_int32 carry9;
  765. q = (19 * h9 + (((crypto_int32) 1) << 24)) >> 25;
  766. q = (h0 + q) >> 26;
  767. q = (h1 + q) >> 25;
  768. q = (h2 + q) >> 26;
  769. q = (h3 + q) >> 25;
  770. q = (h4 + q) >> 26;
  771. q = (h5 + q) >> 25;
  772. q = (h6 + q) >> 26;
  773. q = (h7 + q) >> 25;
  774. q = (h8 + q) >> 26;
  775. q = (h9 + q) >> 25;
  776. /* Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. */
  777. h0 += 19 * q;
  778. /* Goal: Output h-2^255 q, which is between 0 and 2^255-20. */
  779. carry0 = h0 >> 26; h1 += carry0; h0 -= carry0 << 26;
  780. carry1 = h1 >> 25; h2 += carry1; h1 -= carry1 << 25;
  781. carry2 = h2 >> 26; h3 += carry2; h2 -= carry2 << 26;
  782. carry3 = h3 >> 25; h4 += carry3; h3 -= carry3 << 25;
  783. carry4 = h4 >> 26; h5 += carry4; h4 -= carry4 << 26;
  784. carry5 = h5 >> 25; h6 += carry5; h5 -= carry5 << 25;
  785. carry6 = h6 >> 26; h7 += carry6; h6 -= carry6 << 26;
  786. carry7 = h7 >> 25; h8 += carry7; h7 -= carry7 << 25;
  787. carry8 = h8 >> 26; h9 += carry8; h8 -= carry8 << 26;
  788. carry9 = h9 >> 25; h9 -= carry9 << 25;
  789. /* h10 = carry9 */
  790. /*
  791. Goal: Output h0+...+2^255 h10-2^255 q, which is between 0 and 2^255-20.
  792. Have h0+...+2^230 h9 between 0 and 2^255-1;
  793. evidently 2^255 h10-2^255 q = 0.
  794. Goal: Output h0+...+2^230 h9.
  795. */
  796. s[0] = h0 >> 0;
  797. s[1] = h0 >> 8;
  798. s[2] = h0 >> 16;
  799. s[3] = (h0 >> 24) | (h1 << 2);
  800. s[4] = h1 >> 6;
  801. s[5] = h1 >> 14;
  802. s[6] = (h1 >> 22) | (h2 << 3);
  803. s[7] = h2 >> 5;
  804. s[8] = h2 >> 13;
  805. s[9] = (h2 >> 21) | (h3 << 5);
  806. s[10] = h3 >> 3;
  807. s[11] = h3 >> 11;
  808. s[12] = (h3 >> 19) | (h4 << 6);
  809. s[13] = h4 >> 2;
  810. s[14] = h4 >> 10;
  811. s[15] = h4 >> 18;
  812. s[16] = h5 >> 0;
  813. s[17] = h5 >> 8;
  814. s[18] = h5 >> 16;
  815. s[19] = (h5 >> 24) | (h6 << 1);
  816. s[20] = h6 >> 7;
  817. s[21] = h6 >> 15;
  818. s[22] = (h6 >> 23) | (h7 << 3);
  819. s[23] = h7 >> 5;
  820. s[24] = h7 >> 13;
  821. s[25] = (h7 >> 21) | (h8 << 4);
  822. s[26] = h8 >> 4;
  823. s[27] = h8 >> 12;
  824. s[28] = (h8 >> 20) | (h9 << 6);
  825. s[29] = h9 >> 2;
  826. s[30] = h9 >> 10;
  827. s[31] = h9 >> 18;
  828. }
  829. void fe_invert(fe out,fe z)
  830. {
  831. fe t0;
  832. fe t1;
  833. fe t2;
  834. fe t3;
  835. int i;
  836. /* qhasm: fe z1 */
  837. /* qhasm: fe z2 */
  838. /* qhasm: fe z8 */
  839. /* qhasm: fe z9 */
  840. /* qhasm: fe z11 */
  841. /* qhasm: fe z22 */
  842. /* qhasm: fe z_5_0 */
  843. /* qhasm: fe z_10_5 */
  844. /* qhasm: fe z_10_0 */
  845. /* qhasm: fe z_20_10 */
  846. /* qhasm: fe z_20_0 */
  847. /* qhasm: fe z_40_20 */
  848. /* qhasm: fe z_40_0 */
  849. /* qhasm: fe z_50_10 */
  850. /* qhasm: fe z_50_0 */
  851. /* qhasm: fe z_100_50 */
  852. /* qhasm: fe z_100_0 */
  853. /* qhasm: fe z_200_100 */
  854. /* qhasm: fe z_200_0 */
  855. /* qhasm: fe z_250_50 */
  856. /* qhasm: fe z_250_0 */
  857. /* qhasm: fe z_255_5 */
  858. /* qhasm: fe z_255_21 */
  859. /* qhasm: enter pow225521 */
  860. /* qhasm: z2 = z1^2^1 */
  861. /* asm 1: fe_sq(>z2=fe#1,<z1=fe#11); for (i = 1;i < 1;++i) fe_sq(>z2=fe#1,>z2=fe#1); */
  862. /* asm 2: fe_sq(>z2=t0,<z1=z); for (i = 1;i < 1;++i) fe_sq(>z2=t0,>z2=t0); */
  863. fe_sq(t0,z); for (i = 1;i < 1;++i) fe_sq(t0,t0);
  864. /* qhasm: z8 = z2^2^2 */
  865. /* asm 1: fe_sq(>z8=fe#2,<z2=fe#1); for (i = 1;i < 2;++i) fe_sq(>z8=fe#2,>z8=fe#2); */
  866. /* asm 2: fe_sq(>z8=t1,<z2=t0); for (i = 1;i < 2;++i) fe_sq(>z8=t1,>z8=t1); */
  867. fe_sq(t1,t0); for (i = 1;i < 2;++i) fe_sq(t1,t1);
  868. /* qhasm: z9 = z1*z8 */
  869. /* asm 1: fe_mul(>z9=fe#2,<z1=fe#11,<z8=fe#2); */
  870. /* asm 2: fe_mul(>z9=t1,<z1=z,<z8=t1); */
  871. fe_mul(t1,z,t1);
  872. /* qhasm: z11 = z2*z9 */
  873. /* asm 1: fe_mul(>z11=fe#1,<z2=fe#1,<z9=fe#2); */
  874. /* asm 2: fe_mul(>z11=t0,<z2=t0,<z9=t1); */
  875. fe_mul(t0,t0,t1);
  876. /* qhasm: z22 = z11^2^1 */
  877. /* asm 1: fe_sq(>z22=fe#3,<z11=fe#1); for (i = 1;i < 1;++i) fe_sq(>z22=fe#3,>z22=fe#3); */
  878. /* asm 2: fe_sq(>z22=t2,<z11=t0); for (i = 1;i < 1;++i) fe_sq(>z22=t2,>z22=t2); */
  879. fe_sq(t2,t0); for (i = 1;i < 1;++i) fe_sq(t2,t2);
  880. /* qhasm: z_5_0 = z9*z22 */
  881. /* asm 1: fe_mul(>z_5_0=fe#2,<z9=fe#2,<z22=fe#3); */
  882. /* asm 2: fe_mul(>z_5_0=t1,<z9=t1,<z22=t2); */
  883. fe_mul(t1,t1,t2);
  884. /* qhasm: z_10_5 = z_5_0^2^5 */
  885. /* asm 1: fe_sq(>z_10_5=fe#3,<z_5_0=fe#2); for (i = 1;i < 5;++i) fe_sq(>z_10_5=fe#3,>z_10_5=fe#3); */
  886. /* asm 2: fe_sq(>z_10_5=t2,<z_5_0=t1); for (i = 1;i < 5;++i) fe_sq(>z_10_5=t2,>z_10_5=t2); */
  887. fe_sq(t2,t1); for (i = 1;i < 5;++i) fe_sq(t2,t2);
  888. /* qhasm: z_10_0 = z_10_5*z_5_0 */
  889. /* asm 1: fe_mul(>z_10_0=fe#2,<z_10_5=fe#3,<z_5_0=fe#2); */
  890. /* asm 2: fe_mul(>z_10_0=t1,<z_10_5=t2,<z_5_0=t1); */
  891. fe_mul(t1,t2,t1);
  892. /* qhasm: z_20_10 = z_10_0^2^10 */
  893. /* asm 1: fe_sq(>z_20_10=fe#3,<z_10_0=fe#2); for (i = 1;i < 10;++i) fe_sq(>z_20_10=fe#3,>z_20_10=fe#3); */
  894. /* asm 2: fe_sq(>z_20_10=t2,<z_10_0=t1); for (i = 1;i < 10;++i) fe_sq(>z_20_10=t2,>z_20_10=t2); */
  895. fe_sq(t2,t1); for (i = 1;i < 10;++i) fe_sq(t2,t2);
  896. /* qhasm: z_20_0 = z_20_10*z_10_0 */
  897. /* asm 1: fe_mul(>z_20_0=fe#3,<z_20_10=fe#3,<z_10_0=fe#2); */
  898. /* asm 2: fe_mul(>z_20_0=t2,<z_20_10=t2,<z_10_0=t1); */
  899. fe_mul(t2,t2,t1);
  900. /* qhasm: z_40_20 = z_20_0^2^20 */
  901. /* asm 1: fe_sq(>z_40_20=fe#4,<z_20_0=fe#3); for (i = 1;i < 20;++i) fe_sq(>z_40_20=fe#4,>z_40_20=fe#4); */
  902. /* asm 2: fe_sq(>z_40_20=t3,<z_20_0=t2); for (i = 1;i < 20;++i) fe_sq(>z_40_20=t3,>z_40_20=t3); */
  903. fe_sq(t3,t2); for (i = 1;i < 20;++i) fe_sq(t3,t3);
  904. /* qhasm: z_40_0 = z_40_20*z_20_0 */
  905. /* asm 1: fe_mul(>z_40_0=fe#3,<z_40_20=fe#4,<z_20_0=fe#3); */
  906. /* asm 2: fe_mul(>z_40_0=t2,<z_40_20=t3,<z_20_0=t2); */
  907. fe_mul(t2,t3,t2);
  908. /* qhasm: z_50_10 = z_40_0^2^10 */
  909. /* asm 1: fe_sq(>z_50_10=fe#3,<z_40_0=fe#3); for (i = 1;i < 10;++i) fe_sq(>z_50_10=fe#3,>z_50_10=fe#3); */
  910. /* asm 2: fe_sq(>z_50_10=t2,<z_40_0=t2); for (i = 1;i < 10;++i) fe_sq(>z_50_10=t2,>z_50_10=t2); */
  911. fe_sq(t2,t2); for (i = 1;i < 10;++i) fe_sq(t2,t2);
  912. /* qhasm: z_50_0 = z_50_10*z_10_0 */
  913. /* asm 1: fe_mul(>z_50_0=fe#2,<z_50_10=fe#3,<z_10_0=fe#2); */
  914. /* asm 2: fe_mul(>z_50_0=t1,<z_50_10=t2,<z_10_0=t1); */
  915. fe_mul(t1,t2,t1);
  916. /* qhasm: z_100_50 = z_50_0^2^50 */
  917. /* asm 1: fe_sq(>z_100_50=fe#3,<z_50_0=fe#2); for (i = 1;i < 50;++i) fe_sq(>z_100_50=fe#3,>z_100_50=fe#3); */
  918. /* asm 2: fe_sq(>z_100_50=t2,<z_50_0=t1); for (i = 1;i < 50;++i) fe_sq(>z_100_50=t2,>z_100_50=t2); */
  919. fe_sq(t2,t1); for (i = 1;i < 50;++i) fe_sq(t2,t2);
  920. /* qhasm: z_100_0 = z_100_50*z_50_0 */
  921. /* asm 1: fe_mul(>z_100_0=fe#3,<z_100_50=fe#3,<z_50_0=fe#2); */
  922. /* asm 2: fe_mul(>z_100_0=t2,<z_100_50=t2,<z_50_0=t1); */
  923. fe_mul(t2,t2,t1);
  924. /* qhasm: z_200_100 = z_100_0^2^100 */
  925. /* asm 1: fe_sq(>z_200_100=fe#4,<z_100_0=fe#3); for (i = 1;i < 100;++i) fe_sq(>z_200_100=fe#4,>z_200_100=fe#4); */
  926. /* asm 2: fe_sq(>z_200_100=t3,<z_100_0=t2); for (i = 1;i < 100;++i) fe_sq(>z_200_100=t3,>z_200_100=t3); */
  927. fe_sq(t3,t2); for (i = 1;i < 100;++i) fe_sq(t3,t3);
  928. /* qhasm: z_200_0 = z_200_100*z_100_0 */
  929. /* asm 1: fe_mul(>z_200_0=fe#3,<z_200_100=fe#4,<z_100_0=fe#3); */
  930. /* asm 2: fe_mul(>z_200_0=t2,<z_200_100=t3,<z_100_0=t2); */
  931. fe_mul(t2,t3,t2);
  932. /* qhasm: z_250_50 = z_200_0^2^50 */
  933. /* asm 1: fe_sq(>z_250_50=fe#3,<z_200_0=fe#3); for (i = 1;i < 50;++i) fe_sq(>z_250_50=fe#3,>z_250_50=fe#3); */
  934. /* asm 2: fe_sq(>z_250_50=t2,<z_200_0=t2); for (i = 1;i < 50;++i) fe_sq(>z_250_50=t2,>z_250_50=t2); */
  935. fe_sq(t2,t2); for (i = 1;i < 50;++i) fe_sq(t2,t2);
  936. /* qhasm: z_250_0 = z_250_50*z_50_0 */
  937. /* asm 1: fe_mul(>z_250_0=fe#2,<z_250_50=fe#3,<z_50_0=fe#2); */
  938. /* asm 2: fe_mul(>z_250_0=t1,<z_250_50=t2,<z_50_0=t1); */
  939. fe_mul(t1,t2,t1);
  940. /* qhasm: z_255_5 = z_250_0^2^5 */
  941. /* asm 1: fe_sq(>z_255_5=fe#2,<z_250_0=fe#2); for (i = 1;i < 5;++i) fe_sq(>z_255_5=fe#2,>z_255_5=fe#2); */
  942. /* asm 2: fe_sq(>z_255_5=t1,<z_250_0=t1); for (i = 1;i < 5;++i) fe_sq(>z_255_5=t1,>z_255_5=t1); */
  943. fe_sq(t1,t1); for (i = 1;i < 5;++i) fe_sq(t1,t1);
  944. /* qhasm: z_255_21 = z_255_5*z11 */
  945. /* asm 1: fe_mul(>z_255_21=fe#12,<z_255_5=fe#2,<z11=fe#1); */
  946. /* asm 2: fe_mul(>z_255_21=out,<z_255_5=t1,<z11=t0); */
  947. fe_mul(out,t1,t0);
  948. /* qhasm: return */
  949. return;
  950. }
  951. int crypto_scalarmult_ref10(unsigned char *q,
  952. const unsigned char *n,
  953. const unsigned char *p)
  954. {
  955. unsigned char e[32];
  956. unsigned int i;
  957. fe x1;
  958. fe x2;
  959. fe z2;
  960. fe x3;
  961. fe z3;
  962. fe tmp0;
  963. fe tmp1;
  964. int pos;
  965. unsigned int swap;
  966. unsigned int b;
  967. for (i = 0;i < 32;++i) e[i] = n[i];
  968. e[0] &= 248;
  969. e[31] &= 127;
  970. e[31] |= 64;
  971. fe_frombytes(x1,p);
  972. fe_1(x2);
  973. fe_0(z2);
  974. fe_copy(x3,x1);
  975. fe_1(z3);
  976. swap = 0;
  977. for (pos = 254;pos >= 0;--pos) {
  978. b = e[pos / 8] >> (pos & 7);
  979. b &= 1;
  980. swap ^= b;
  981. fe_cswap(x2,x3,swap);
  982. fe_cswap(z2,z3,swap);
  983. swap = b;
  984. /* qhasm: fe X2 */
  985. /* qhasm: fe Z2 */
  986. /* qhasm: fe X3 */
  987. /* qhasm: fe Z3 */
  988. /* qhasm: fe X4 */
  989. /* qhasm: fe Z4 */
  990. /* qhasm: fe X5 */
  991. /* qhasm: fe Z5 */
  992. /* qhasm: fe A */
  993. /* qhasm: fe B */
  994. /* qhasm: fe C */
  995. /* qhasm: fe D */
  996. /* qhasm: fe E */
  997. /* qhasm: fe AA */
  998. /* qhasm: fe BB */
  999. /* qhasm: fe DA */
  1000. /* qhasm: fe CB */
  1001. /* qhasm: fe t0 */
  1002. /* qhasm: fe t1 */
  1003. /* qhasm: fe t2 */
  1004. /* qhasm: fe t3 */
  1005. /* qhasm: fe t4 */
  1006. /* qhasm: enter ladder */
  1007. /* qhasm: D = X3-Z3 */
  1008. /* asm 1: fe_sub(>D=fe#5,<X3=fe#3,<Z3=fe#4); */
  1009. /* asm 2: fe_sub(>D=tmp0,<X3=x3,<Z3=z3); */
  1010. fe_sub(tmp0,x3,z3);
  1011. /* qhasm: B = X2-Z2 */
  1012. /* asm 1: fe_sub(>B=fe#6,<X2=fe#1,<Z2=fe#2); */
  1013. /* asm 2: fe_sub(>B=tmp1,<X2=x2,<Z2=z2); */
  1014. fe_sub(tmp1,x2,z2);
  1015. /* qhasm: A = X2+Z2 */
  1016. /* asm 1: fe_add(>A=fe#1,<X2=fe#1,<Z2=fe#2); */
  1017. /* asm 2: fe_add(>A=x2,<X2=x2,<Z2=z2); */
  1018. fe_add(x2,x2,z2);
  1019. /* qhasm: C = X3+Z3 */
  1020. /* asm 1: fe_add(>C=fe#2,<X3=fe#3,<Z3=fe#4); */
  1021. /* asm 2: fe_add(>C=z2,<X3=x3,<Z3=z3); */
  1022. fe_add(z2,x3,z3);
  1023. /* qhasm: DA = D*A */
  1024. /* asm 1: fe_mul(>DA=fe#4,<D=fe#5,<A=fe#1); */
  1025. /* asm 2: fe_mul(>DA=z3,<D=tmp0,<A=x2); */
  1026. fe_mul(z3,tmp0,x2);
  1027. /* qhasm: CB = C*B */
  1028. /* asm 1: fe_mul(>CB=fe#2,<C=fe#2,<B=fe#6); */
  1029. /* asm 2: fe_mul(>CB=z2,<C=z2,<B=tmp1); */
  1030. fe_mul(z2,z2,tmp1);
  1031. /* qhasm: BB = B^2 */
  1032. /* asm 1: fe_sq(>BB=fe#5,<B=fe#6); */
  1033. /* asm 2: fe_sq(>BB=tmp0,<B=tmp1); */
  1034. fe_sq(tmp0,tmp1);
  1035. /* qhasm: AA = A^2 */
  1036. /* asm 1: fe_sq(>AA=fe#6,<A=fe#1); */
  1037. /* asm 2: fe_sq(>AA=tmp1,<A=x2); */
  1038. fe_sq(tmp1,x2);
  1039. /* qhasm: t0 = DA+CB */
  1040. /* asm 1: fe_add(>t0=fe#3,<DA=fe#4,<CB=fe#2); */
  1041. /* asm 2: fe_add(>t0=x3,<DA=z3,<CB=z2); */
  1042. fe_add(x3,z3,z2);
  1043. /* qhasm: assign x3 to t0 */
  1044. /* qhasm: t1 = DA-CB */
  1045. /* asm 1: fe_sub(>t1=fe#2,<DA=fe#4,<CB=fe#2); */
  1046. /* asm 2: fe_sub(>t1=z2,<DA=z3,<CB=z2); */
  1047. fe_sub(z2,z3,z2);
  1048. /* qhasm: X4 = AA*BB */
  1049. /* asm 1: fe_mul(>X4=fe#1,<AA=fe#6,<BB=fe#5); */
  1050. /* asm 2: fe_mul(>X4=x2,<AA=tmp1,<BB=tmp0); */
  1051. fe_mul(x2,tmp1,tmp0);
  1052. /* qhasm: E = AA-BB */
  1053. /* asm 1: fe_sub(>E=fe#6,<AA=fe#6,<BB=fe#5); */
  1054. /* asm 2: fe_sub(>E=tmp1,<AA=tmp1,<BB=tmp0); */
  1055. fe_sub(tmp1,tmp1,tmp0);
  1056. /* qhasm: t2 = t1^2 */
  1057. /* asm 1: fe_sq(>t2=fe#2,<t1=fe#2); */
  1058. /* asm 2: fe_sq(>t2=z2,<t1=z2); */
  1059. fe_sq(z2,z2);
  1060. /* qhasm: t3 = a24*E */
  1061. /* asm 1: fe_mul121666(>t3=fe#4,<E=fe#6); */
  1062. /* asm 2: fe_mul121666(>t3=z3,<E=tmp1); */
  1063. fe_mul121666(z3,tmp1);
  1064. /* qhasm: X5 = t0^2 */
  1065. /* asm 1: fe_sq(>X5=fe#3,<t0=fe#3); */
  1066. /* asm 2: fe_sq(>X5=x3,<t0=x3); */
  1067. fe_sq(x3,x3);
  1068. /* qhasm: t4 = BB+t3 */
  1069. /* asm 1: fe_add(>t4=fe#5,<BB=fe#5,<t3=fe#4); */
  1070. /* asm 2: fe_add(>t4=tmp0,<BB=tmp0,<t3=z3); */
  1071. fe_add(tmp0,tmp0,z3);
  1072. /* qhasm: Z5 = X1*t2 */
  1073. /* asm 1: fe_mul(>Z5=fe#4,x1,<t2=fe#2); */
  1074. /* asm 2: fe_mul(>Z5=z3,x1,<t2=z2); */
  1075. fe_mul(z3,x1,z2);
  1076. /* qhasm: Z4 = E*t4 */
  1077. /* asm 1: fe_mul(>Z4=fe#2,<E=fe#6,<t4=fe#5); */
  1078. /* asm 2: fe_mul(>Z4=z2,<E=tmp1,<t4=tmp0); */
  1079. fe_mul(z2,tmp1,tmp0);
  1080. /* qhasm: return */
  1081. }
  1082. fe_cswap(x2,x3,swap);
  1083. fe_cswap(z2,z3,swap);
  1084. fe_invert(z2,z2);
  1085. fe_mul(x2,x2,z2);
  1086. fe_tobytes(q,x2);
  1087. return 0;
  1088. }
  1089. static const unsigned char basepoint[32] = {9};
  1090. int crypto_scalarmult_base_ref10(unsigned char *q,const unsigned char *n)
  1091. {
  1092. return crypto_scalarmult_ref10(q,n,basepoint);
  1093. }