@@ -808,8 +808,8 @@ Base.deepcopy_internal(x::BigInt, stackdict::IdDict) = get!(() -> MPZ.set(x), st
808
808
809
809
# # streamlined hashing for BigInt, by avoiding allocation from shifts ##
810
810
811
- if Limb === UInt
812
- # this condition is true most (all?) of the time, and in this case we can define
811
+ if Limb === UInt64 === UInt
812
+ # On 64 bit systems we can define
813
813
# an optimized version for BigInt of hash_integer (used e.g. for Rational{BigInt}),
814
814
# and of hash
815
815
@@ -819,7 +819,7 @@ if Limb === UInt
819
819
GC. @preserve n begin
820
820
s = n. size
821
821
s == 0 && return hash_integer (0 , h)
822
- p = convert (Ptr{UInt }, n. d)
822
+ p = convert (Ptr{UInt64 }, n. d)
823
823
b = unsafe_load (p)
824
824
h ⊻= hash_uint (ifelse (s < 0 , - b, b) ⊻ h)
825
825
for k = 2 : abs (s)
@@ -829,14 +829,11 @@ if Limb === UInt
829
829
end
830
830
end
831
831
832
- _divLimb (n) = UInt === UInt64 ? n >>> 6 : n >>> 5
833
- _modLimb (n) = UInt === UInt64 ? n & 63 : n & 31
834
-
835
832
function hash (x:: BigInt , h:: UInt )
836
833
GC. @preserve x begin
837
834
sz = x. size
838
835
sz == 0 && return hash (0 , h)
839
- ptr = Ptr {UInt } (x. d)
836
+ ptr = Ptr {UInt64 } (x. d)
840
837
if sz == 1
841
838
return hash (unsafe_load (ptr), h)
842
839
elseif sz == - 1
@@ -845,8 +842,8 @@ if Limb === UInt
845
842
end
846
843
pow = trailing_zeros (x)
847
844
nd = Base. ndigits0z (x, 2 )
848
- idx = _divLimb (pow) + 1
849
- shift = _modLimb (pow) % UInt
845
+ idx = (pow >>> 6 ) + 1
846
+ shift = (pow & 63 ) % UInt
850
847
upshift = BITS_PER_LIMB - shift
851
848
asz = abs (sz)
852
849
if shift == 0
0 commit comments