fe_sq2.c 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. #include "fe.h"
  2. #include "crypto_int64.h"
  3. /*
  4. h = 2 * f * f
  5. Can overlap h with f.
  6. Preconditions:
  7. |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc.
  8. Postconditions:
  9. |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc.
  10. */
  11. /*
  12. See fe_mul.c for discussion of implementation strategy.
  13. */
  14. void fe_sq2(fe h,const fe f)
  15. {
  16. crypto_int32 f0 = f[0];
  17. crypto_int32 f1 = f[1];
  18. crypto_int32 f2 = f[2];
  19. crypto_int32 f3 = f[3];
  20. crypto_int32 f4 = f[4];
  21. crypto_int32 f5 = f[5];
  22. crypto_int32 f6 = f[6];
  23. crypto_int32 f7 = f[7];
  24. crypto_int32 f8 = f[8];
  25. crypto_int32 f9 = f[9];
  26. crypto_int32 f0_2 = 2 * f0;
  27. crypto_int32 f1_2 = 2 * f1;
  28. crypto_int32 f2_2 = 2 * f2;
  29. crypto_int32 f3_2 = 2 * f3;
  30. crypto_int32 f4_2 = 2 * f4;
  31. crypto_int32 f5_2 = 2 * f5;
  32. crypto_int32 f6_2 = 2 * f6;
  33. crypto_int32 f7_2 = 2 * f7;
  34. crypto_int32 f5_38 = 38 * f5; /* 1.959375*2^30 */
  35. crypto_int32 f6_19 = 19 * f6; /* 1.959375*2^30 */
  36. crypto_int32 f7_38 = 38 * f7; /* 1.959375*2^30 */
  37. crypto_int32 f8_19 = 19 * f8; /* 1.959375*2^30 */
  38. crypto_int32 f9_38 = 38 * f9; /* 1.959375*2^30 */
  39. crypto_int64 f0f0 = f0 * (crypto_int64) f0;
  40. crypto_int64 f0f1_2 = f0_2 * (crypto_int64) f1;
  41. crypto_int64 f0f2_2 = f0_2 * (crypto_int64) f2;
  42. crypto_int64 f0f3_2 = f0_2 * (crypto_int64) f3;
  43. crypto_int64 f0f4_2 = f0_2 * (crypto_int64) f4;
  44. crypto_int64 f0f5_2 = f0_2 * (crypto_int64) f5;
  45. crypto_int64 f0f6_2 = f0_2 * (crypto_int64) f6;
  46. crypto_int64 f0f7_2 = f0_2 * (crypto_int64) f7;
  47. crypto_int64 f0f8_2 = f0_2 * (crypto_int64) f8;
  48. crypto_int64 f0f9_2 = f0_2 * (crypto_int64) f9;
  49. crypto_int64 f1f1_2 = f1_2 * (crypto_int64) f1;
  50. crypto_int64 f1f2_2 = f1_2 * (crypto_int64) f2;
  51. crypto_int64 f1f3_4 = f1_2 * (crypto_int64) f3_2;
  52. crypto_int64 f1f4_2 = f1_2 * (crypto_int64) f4;
  53. crypto_int64 f1f5_4 = f1_2 * (crypto_int64) f5_2;
  54. crypto_int64 f1f6_2 = f1_2 * (crypto_int64) f6;
  55. crypto_int64 f1f7_4 = f1_2 * (crypto_int64) f7_2;
  56. crypto_int64 f1f8_2 = f1_2 * (crypto_int64) f8;
  57. crypto_int64 f1f9_76 = f1_2 * (crypto_int64) f9_38;
  58. crypto_int64 f2f2 = f2 * (crypto_int64) f2;
  59. crypto_int64 f2f3_2 = f2_2 * (crypto_int64) f3;
  60. crypto_int64 f2f4_2 = f2_2 * (crypto_int64) f4;
  61. crypto_int64 f2f5_2 = f2_2 * (crypto_int64) f5;
  62. crypto_int64 f2f6_2 = f2_2 * (crypto_int64) f6;
  63. crypto_int64 f2f7_2 = f2_2 * (crypto_int64) f7;
  64. crypto_int64 f2f8_38 = f2_2 * (crypto_int64) f8_19;
  65. crypto_int64 f2f9_38 = f2 * (crypto_int64) f9_38;
  66. crypto_int64 f3f3_2 = f3_2 * (crypto_int64) f3;
  67. crypto_int64 f3f4_2 = f3_2 * (crypto_int64) f4;
  68. crypto_int64 f3f5_4 = f3_2 * (crypto_int64) f5_2;
  69. crypto_int64 f3f6_2 = f3_2 * (crypto_int64) f6;
  70. crypto_int64 f3f7_76 = f3_2 * (crypto_int64) f7_38;
  71. crypto_int64 f3f8_38 = f3_2 * (crypto_int64) f8_19;
  72. crypto_int64 f3f9_76 = f3_2 * (crypto_int64) f9_38;
  73. crypto_int64 f4f4 = f4 * (crypto_int64) f4;
  74. crypto_int64 f4f5_2 = f4_2 * (crypto_int64) f5;
  75. crypto_int64 f4f6_38 = f4_2 * (crypto_int64) f6_19;
  76. crypto_int64 f4f7_38 = f4 * (crypto_int64) f7_38;
  77. crypto_int64 f4f8_38 = f4_2 * (crypto_int64) f8_19;
  78. crypto_int64 f4f9_38 = f4 * (crypto_int64) f9_38;
  79. crypto_int64 f5f5_38 = f5 * (crypto_int64) f5_38;
  80. crypto_int64 f5f6_38 = f5_2 * (crypto_int64) f6_19;
  81. crypto_int64 f5f7_76 = f5_2 * (crypto_int64) f7_38;
  82. crypto_int64 f5f8_38 = f5_2 * (crypto_int64) f8_19;
  83. crypto_int64 f5f9_76 = f5_2 * (crypto_int64) f9_38;
  84. crypto_int64 f6f6_19 = f6 * (crypto_int64) f6_19;
  85. crypto_int64 f6f7_38 = f6 * (crypto_int64) f7_38;
  86. crypto_int64 f6f8_38 = f6_2 * (crypto_int64) f8_19;
  87. crypto_int64 f6f9_38 = f6 * (crypto_int64) f9_38;
  88. crypto_int64 f7f7_38 = f7 * (crypto_int64) f7_38;
  89. crypto_int64 f7f8_38 = f7_2 * (crypto_int64) f8_19;
  90. crypto_int64 f7f9_76 = f7_2 * (crypto_int64) f9_38;
  91. crypto_int64 f8f8_19 = f8 * (crypto_int64) f8_19;
  92. crypto_int64 f8f9_38 = f8 * (crypto_int64) f9_38;
  93. crypto_int64 f9f9_38 = f9 * (crypto_int64) f9_38;
  94. crypto_int64 h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38;
  95. crypto_int64 h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38;
  96. crypto_int64 h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19;
  97. crypto_int64 h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38;
  98. crypto_int64 h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38;
  99. crypto_int64 h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38;
  100. crypto_int64 h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19;
  101. crypto_int64 h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38;
  102. crypto_int64 h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38;
  103. crypto_int64 h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2;
  104. crypto_int64 carry0;
  105. crypto_int64 carry1;
  106. crypto_int64 carry2;
  107. crypto_int64 carry3;
  108. crypto_int64 carry4;
  109. crypto_int64 carry5;
  110. crypto_int64 carry6;
  111. crypto_int64 carry7;
  112. crypto_int64 carry8;
  113. crypto_int64 carry9;
  114. h0 += h0;
  115. h1 += h1;
  116. h2 += h2;
  117. h3 += h3;
  118. h4 += h4;
  119. h5 += h5;
  120. h6 += h6;
  121. h7 += h7;
  122. h8 += h8;
  123. h9 += h9;
  124. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= SHL64(carry0,26);
  125. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= SHL64(carry4,26);
  126. carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= SHL64(carry1,25);
  127. carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= SHL64(carry5,25);
  128. carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= SHL64(carry2,26);
  129. carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= SHL64(carry6,26);
  130. carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= SHL64(carry3,25);
  131. carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= SHL64(carry7,25);
  132. carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= SHL64(carry4,26);
  133. carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= SHL64(carry8,26);
  134. carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= SHL64(carry9,25);
  135. carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= SHL64(carry0,26);
  136. h[0] = (crypto_int32) h0;
  137. h[1] = (crypto_int32) h1;
  138. h[2] = (crypto_int32) h2;
  139. h[3] = (crypto_int32) h3;
  140. h[4] = (crypto_int32) h4;
  141. h[5] = (crypto_int32) h5;
  142. h[6] = (crypto_int32) h6;
  143. h[7] = (crypto_int32) h7;
  144. h[8] = (crypto_int32) h8;
  145. h[9] = (crypto_int32) h9;
  146. }