Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]

### Fixed

- Fixed `BlockPMatrix{V}(::UndefInitializer, rows, cols)` constructor dropping the `cols` argument, causing a `MethodError` at runtime. Since PR[#194](https://github.com/gridap/GridapDistributed.jl/pull/194).
- Fixed `local_views(::BlockPMatrix, rows, cols)` indexing 1D block-range vectors with a 2D `CartesianIndex`, causing `BoundsError` for any multi-field problem with ≥2 fields. Since PR[#194](https://github.com/gridap/GridapDistributed.jl/pull/194).
- Fixed `mul!(y::BlockPVector, A::BlockPMatrix, x::BlockPVector, α, β)` computing `α*β*(A*x)` instead of `α*(A*x) + β*y`; the 3-arg `mul!` was also updated to correctly zero `y` before accumulating. Since PR[#194](https://github.com/gridap/GridapDistributed.jl/pull/194).

## [0.4.11] - 2026-02-20

### Added
Expand Down
2 changes: 1 addition & 1 deletion src/Algebra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ end

function local_views(a::BlockPMatrix,new_rows::BlockPRange,new_cols::BlockPRange)
vals = map(CartesianIndices(blocksize(a))) do I
local_views(blocks(a)[I],blocks(new_rows)[I],blocks(new_cols)[I])
local_views(blocks(a)[I],blocks(new_rows)[I[1]],blocks(new_cols)[I[2]])
end |> to_parray_of_arrays
return map(mortar,vals)
end
Expand Down
14 changes: 9 additions & 5 deletions src/BlockPartitionedArrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ function BlockPMatrix{V}(::UndefInitializer,rows::BlockPRange,cols::BlockPRange)
c = block_cols[I[2]]
PSparseMatrix{V}(undef,partition(r),partition(c))
end
return BlockPMatrix(vals,rows)
return BlockPMatrix(vals,rows,cols)
end

# AbstractArray API
Expand Down Expand Up @@ -329,19 +329,23 @@ end

function LinearAlgebra.mul!(y::BlockPVector,A::BlockPMatrix,x::BlockPVector)
o = one(eltype(A))
mul!(y,A,x,o,o)
z = zero(eltype(A))
mul!(y,A,x,o,z)
end

function LinearAlgebra.mul!(y::BlockPVector,A::BlockPMatrix,x::BlockPVector,α::Number,β::Number)
yb, Ab, xb = blocks(y), blocks(A), blocks(x)
z = zero(eltype(y))
o = one(eltype(A))
z = zero(eltype(y))
for i in 1:blocksize(A,1)
fill!(yb[i],z)
if iszero(β)
fill!(yb[i],z)
else
rmul!(yb[i],β)
end
for j in 1:blocksize(A,2)
mul!(yb[i],Ab[i,j],xb[j],α,o)
end
rmul!(yb[i],β)
end
return y
end
Expand Down
178 changes: 73 additions & 105 deletions test/BlockPartitionedArraysTests.jl
Original file line number Diff line number Diff line change
@@ -1,112 +1,80 @@
module BlockPartitionedArraysTests

using Test, LinearAlgebra, BlockArrays, SparseArrays

using Test
using Gridap
using PartitionedArrays
using GridapDistributed
using BlockArrays
using SparseArrays
using LinearAlgebra

using PartitionedArrays
using GridapDistributed: BlockPArray, BlockPVector, BlockPMatrix, BlockPRange

ranks = with_debug() do distribute
distribute(LinearIndices((2,)))
function main(distribute, parts)
ranks = distribute(LinearIndices((prod(parts),)))

n_global = 4*prod(parts)
indices = uniform_partition(ranks, n_global, true)

pr = PRange(indices)
block_range = BlockPRange([pr, pr])

# BlockPRange API
@test blocklength(block_range) == 2
@test blocksize(block_range) == (2,)

# BlockPVector: constructor, fill!, similar, arithmetic
v = BlockPVector{Vector{Float64}}(undef, block_range)
fill!(v, 1.0)
v2 = similar(v, block_range)
fill!(v2, 2.0)

v2 = v .+ 1.0
v2 = v .- 1.0
v2 = v .* 2.0
v2 = v ./ 2.0
copy!(v2, v)

@test norm(v) ≈ sqrt(2*n_global)

# PartitionedArrays vector API
consistent!(v) |> wait
assemble!(v) |> wait
partition(v)
local_values(v)
own_values(v)
ghost_values(v)

# local_views on vector with and without new range
local_views(v)
local_views(v, block_range)

# BlockPMatrix: constructor (exercises BUG3 fix: cols must not be dropped)
m = BlockPMatrix{SparseMatrixCSC{Float64,Int64}}(undef, block_range, block_range)
@test blocksize(m) == (2,2)

m2 = similar(m, (block_range, block_range))
@test blocksize(m2) == (2,2)

# PartitionedArrays matrix API
assemble!(m) |> wait
LinearAlgebra.fillstored!(m, 0.0)
partition(m)
own_ghost_values(m)
ghost_own_values(m)

# local_views on matrix (exercises BUG5 fix: correct I[1]/I[2] indexing)
local_views(m)
local_views(m, block_range, block_range)

maximum(abs, v)
minimum(abs, v)

# BUG4 fix: mul!(y,A,x,α,β) must compute α*(A*x) + β*y, not α*β*(A*x)
# With A=0: y ← α*(0*x) + β*y_old = β*y_old
fill!(v, 1.0)
v4 = similar(v, block_range)
fill!(v4, 3.0)
LinearAlgebra.fillstored!(m, 0.0)
mul!(v4, m, v, 2.0, 2.0) # y ← 2*(0*x) + 2*[3,...] = [6,...]
@test norm(v4) ≈ 6.0 * sqrt(2*n_global)
end

indices = map(ranks) do r
if r == 1
own_gids = [1,2,3,4,5]
ghost_gids = [6,7]
ghost_owners = [2,2]
else
own_gids = [6,7,8,9,10]
ghost_gids = [5]
ghost_owners = [1]
end
own_idx = OwnIndices(10,r,own_gids)
ghost_idx = GhostIndices(10,ghost_gids,ghost_owners)
OwnAndGhostIndices(own_idx,ghost_idx)
end

block_range = BlockPRange([PRange(indices),PRange(indices)])

_v = PVector{OwnAndGhostVectors{Vector{Float64}}}(undef,indices)
v = BlockPArray([_v,_v],(block_range,))
fill!(v,1.0)

_m = map(CartesianIndices((2,2))) do I
i,j = I[1],I[2]
local_mats = map(ranks,indices) do r, ind
n = local_length(ind)
if i==j && r == 1
SparseMatrixCSC(n,n,Int[1,3,5,7,9,10,11,13],Int[1,2,2,3,3,4,4,5,5,6,6,7],fill(1.0,12))
elseif i==j && r == 2
SparseMatrixCSC(n,n,Int[1,2,4,6,8,10,11],Int[1,1,2,2,3,3,4,4,5,6],fill(1.0,10))
else
SparseMatrixCSC(n,n,fill(Int(1),n+1),Int[],Float64.([]))
end
end
PSparseMatrix(local_mats,indices,indices)
end;
m = BlockPArray(_m,(block_range,block_range))

x = similar(_v)
mul!(x,_m[1,1],_v)

# BlockPRange

@test blocklength(block_range) == 2
@test blocksize(block_range) == (2,)

# AbstractArray API

__v = similar(v,block_range)
__m = similar(m,(block_range,block_range))
fill!(__v,1.0)

__v = __v .+ 1.0
__v = __v .- 1.0
__v = __v .* 1.0
__v = __v ./ 1.0

# PartitionedArrays API

consistent!(__v) |> wait
t = assemble!(__v)
assemble!(__m) |> wait
fetch(t);

PartitionedArrays.to_trivial_partition(m)

partition(v)
partition(m)
local_values(v)
own_values(v)
ghost_values(v)
own_ghost_values(m)
ghost_own_values(m)

# LinearAlgebra API
fill!(v,1.0)
x = similar(v)
mul!(x,m,v)
consistent!(x) |> wait

@test dot(v,x) ≈ 36
norm(v)
copy!(x,v)

LinearAlgebra.fillstored!(__m,1.0)

__v = BlockPVector{Vector{Float64}}(undef,block_range)
#__m = BlockPMatrix{SparseMatrixCSC{Float64,Int64}}(undef,block_range,block_range)

maximum(abs,v)
minimum(abs,v)

# GridapDistributed API
v_parts = local_views(v)
m_parts = local_views(m)

v_parts = local_views(v,block_range)
m_parts = local_views(m,block_range,block_range)
end # module
2 changes: 1 addition & 1 deletion test/BlockSparseMatrixAssemblersTests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ function is_same_vector(x::BlockPVector,y::PVector,Ub,U)
res = map(1:num_fields(Ub)) do i
xi = restrict_to_field(Ub,x_fespace,i)
yi = restrict_to_field(U,y_fespace,i)
xi yi
norm(xi - yi) < 1.0e-10
end
return all(res)
end
Expand Down
1 change: 1 addition & 0 deletions test/TestApp/src/TestApp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ module TestApp
include("../../AdaptivityMultiFieldTests.jl")
include("../../PolytopalCoarseningTests.jl")
include("../../BlockSparseMatrixAssemblersTests.jl")
include("../../BlockPartitionedArraysTests.jl")
include("../../VisualizationTests.jl")
include("../../AutodiffTests.jl")
include("../../ConstantFESpacesTests.jl")
Expand Down
1 change: 1 addition & 0 deletions test/mpi/runtests_np4_body.jl
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ function all_tests(distribute,parts)
end

TestApp.BlockSparseMatrixAssemblersTests.main(distribute,parts)
TestApp.BlockPartitionedArraysTests.main(distribute,parts)
PArrays.toc!(t,"BlockSparseMatrixAssemblers")

if prod(parts) == 4
Expand Down
4 changes: 4 additions & 0 deletions test/sequential/BlockSparseMatrixAssemblersTests.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
module BlockSparseMatrixAssemblersTestsSeq
using PartitionedArrays
include("../BlockSparseMatrixAssemblersTests.jl")
include("../BlockPartitionedArraysTests.jl")

with_debug() do distribute
BlockSparseMatrixAssemblersTests.main(distribute,(2,2))
BlockPartitionedArraysTests.main(distribute,(2,2))
end

with_debug() do distribute
BlockSparseMatrixAssemblersTests.main(distribute,(2,1))
BlockPartitionedArraysTests.main(distribute,(2,1))
end

with_debug() do distribute
BlockSparseMatrixAssemblersTests.main(distribute,(1,2))
BlockPartitionedArraysTests.main(distribute,(1,2))
end

end # module
4 changes: 2 additions & 2 deletions test/sequential/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ end

@time @testset "StokesHdivDGTests.jl" begin include("StokesHdivDGTests.jl") end

@time @testset "BlockSparseMatrixAssemblers" begin
include("BlockSparseMatrixAssemblersTests.jl")
@time @testset "BlockSparseMatrixAssemblers" begin
include("BlockSparseMatrixAssemblersTests.jl")
end

@time @testset "AdaptivityTests" begin
Expand Down
Loading