summaryrefslogtreecommitdiff
path: root/mpz
diff options
context:
space:
mode:
authorNiels Möller <nisse@lysator.liu.se>2011-05-23 10:57:57 +0200
committerNiels Möller <nisse@lysator.liu.se>2011-05-23 10:57:57 +0200
commite727b8a3d5709346027983f7c280d087c5b40686 (patch)
tree939817da949b247d365c8d548c0557f97391d42b /mpz
parent2124166fce28581fecf7c26b9c7929d529ee5ffb (diff)
downloadgmp-e727b8a3d5709346027983f7c280d087c5b40686.tar.gz
Moved (asize < bsize) check and swap before (bsize == 0) check.
Diffstat (limited to 'mpz')
-rw-r--r--mpz/jacobi.c75
1 files changed, 38 insertions, 37 deletions
diff --git a/mpz/jacobi.c b/mpz/jacobi.c
index c39c635ae..afd9a49b4 100644
--- a/mpz/jacobi.c
+++ b/mpz/jacobi.c
@@ -113,56 +113,52 @@ mpz_jacobi (mpz_srcptr a, mpz_srcptr b)
JACOBI_STRIP_LOW_ZEROS (result_bit1, blow, asrcp, asize, alow);
- if (bsize == 1)
+ /* Ensure asize >= bsize. Take advantage of the generalized
+ reciprocity law (a/b*2^n) = (b*2^n / a) * RECIP(a,b) */
+
+ if (asize < bsize)
{
- result_bit1 ^= JACOBI_TWOS_U_BIT1(btwos, alow);
+ MPN_SRCPTR_SWAP (asrcp, asize, bsrcp, bsize);
+ MP_LIMB_T_SWAP (alow, blow);
- if (blow == 1)
- return JACOBI_BIT1_TO_PN (result_bit1);
+ /* NOTE: The value of alow (old blow) is a bit subtle. For this
+ code path, we get alow as the low, always odd, limb of
+ shifted A. Which is what we need for the reciprocity update
+ below.
- if (asize > 1)
- JACOBI_MOD_OR_MODEXACT_1_ODD (result_bit1, alow, asrcp, asize, blow);
+ However, all other uses of alow assumes that it is *not*
+ shifted. Luckily, alow matters only when either
- return mpn_jacobi_base (alow, blow, result_bit1);
- }
+ + btwos > 0, in which case A is always odd
- if (asize == 1)
- {
- /* Logic copied from mpz_ui_kronecker */
- if (alow == 1)
- return JACOBI_BIT1_TO_PN (result_bit1); /* (1/b)=1 */
+ + asize == bsize == 1, in which case this code path is never
+ taken. */
+
+ count_trailing_zeros (btwos, blow);
+ blow >>= btwos;
- if ( (alow & 1) == 0)
+ if (bsize > 1 && btwos > 0)
{
- unsigned atwos;
- ASSERT (btwos == 0);
- count_trailing_zeros (atwos, alow);
- alow >>= atwos;
- result_bit1 ^= JACOBI_TWOS_U_BIT1 (atwos, blow);
+ mp_limb_t b1 = bsrcp[1];
+ blow |= b1 << (GMP_NUMB_BITS - btwos);
+ if (bsize == 2 && (b1 >> btwos) == 0)
+ bsize = 1;
}
- if (alow == 1)
- return JACOBI_BIT1_TO_PN (result_bit1); /* (1/b)=1 */
-
- /* (a/b*2^n) = (b*2^n / a) * RECIP(a,b) */
result_bit1 ^= JACOBI_RECIP_UU_BIT1 (alow, blow);
- JACOBI_MOD_OR_MODEXACT_1_ODD (result_bit1, blow, bsrcp, bsize, alow);
- return mpn_jacobi_base (blow, alow, result_bit1);
}
+
+ if (bsize == 1)
+ {
+ result_bit1 ^= JACOBI_TWOS_U_BIT1(btwos, alow);
- /* Ensure asize >= bsize. Take advantage of the reciprocity law
- (a/b*2^n) = (b*2^n / a) * RECIP(a,b) */
+ if (blow == 1)
+ return JACOBI_BIT1_TO_PN (result_bit1);
- if (asize < bsize)
- {
- MPN_SRCPTR_SWAP (asrcp, asize, bsrcp, bsize);
- MP_LIMB_T_SWAP (alow, blow);
-
- count_trailing_zeros (btwos, blow);
- if (btwos > 0)
- blow = (blow >> btwos) | (bsrcp[1] << (GMP_LIMB_BITS - btwos));
+ if (asize > 1)
+ JACOBI_MOD_OR_MODEXACT_1_ODD (result_bit1, alow, asrcp, asize, blow);
- result_bit1 ^= JACOBI_RECIP_UU_BIT1 (alow, blow);
+ return mpn_jacobi_base (alow, blow, result_bit1);
}
/* Allocation strategy: For A, we allocate a working copy only for A
@@ -177,6 +173,12 @@ mpz_jacobi (mpz_srcptr a, mpz_srcptr b)
else
TMP_ALLOC_LIMBS_2 (ap, bsize, bp, bsize);
+ /* In the case of even B, we conceptually shift out the powers of
+ two first, and then divide A mod B. Hence, when taking those
+ powers of two into account, we must use alow *before* the the
+ division. Doing the actual division first is ok, because the
+ point is to remove multiples of B from A, and multiples of 2^k B
+ are good enough. */
if (asize > bsize)
mpn_tdiv_qr (bp, ap, 0, asrcp, asize, bsrcp, bsize);
else
@@ -184,7 +186,6 @@ mpz_jacobi (mpz_srcptr a, mpz_srcptr b)
if (btwos > 0)
{
- /* Must use the original alow *before* the division above. */
result_bit1 ^= JACOBI_TWOS_U_BIT1(btwos, alow);
ASSERT_NOCARRY (mpn_rshift (bp, bsrcp, bsize, btwos));