Skip to content

Commit 20f842c

Browse files
authored
Backport fix for bitmatrix matmul regression 1.11.1 to 1.11.2 (#57387)
Fix #56954.
1 parent d0b2346 commit 20f842c

File tree

2 files changed

+2
-0
lines changed

2 files changed

+2
-0
lines changed

stdlib/LinearAlgebra/src/matmul.jl

+1
Original file line numberDiff line numberDiff line change
@@ -892,6 +892,7 @@ Base.@constprop :aggressive generic_matmatmul!(C::AbstractVecOrMat, tA, tB, A::A
892892
@inbounds for n in BxN, k in BxK
893893
# Balpha = B[k,n] * alpha, but we skip the multiplication in case isone(alpha)
894894
Balpha = _rmul_alpha(B[k,n])
895+
!ismissing(Balpha) && iszero(Balpha) && continue
895896
@simd for m in AxM
896897
C[m,n] = muladd(A[m,k], Balpha, C[m,n])
897898
end

stdlib/LinearAlgebra/test/matmul.jl

+1
Original file line numberDiff line numberDiff line change
@@ -755,6 +755,7 @@ import LinearAlgebra: Adjoint, Transpose
755755
(*)(x::RootInt, y::Integer) = x.i * y
756756
adjoint(x::RootInt) = x
757757
transpose(x::RootInt) = x
758+
Base.zero(::RootInt) = RootInt(0)
758759

759760
@test Base.promote_op(*, RootInt, RootInt) === Int
760761

0 commit comments

Comments
 (0)