Skip to content

Commit 4e30991

Browse files
committed
More updates for Julia 1.0
1 parent e554319 commit 4e30991

File tree

20 files changed

+122
-96
lines changed

20 files changed

+122
-96
lines changed

src/Elemental.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
module Elemental
22

3+
using Distributed
34
using DistributedArrays
45
using LinearAlgebra
56

@@ -30,9 +31,11 @@ function Finalize()
3031
end
3132

3233
function __init__()
34+
# ccall(:jl_, Cvoid, (Any,), "starting up!")
3335
Init()
3436
DefaultGrid[] = Grid()
3537
atexit() do
38+
# ccall(:jl_, Cvoid, (Any,), "closing down!")
3639
Initialized() && Finalize()
3740
end
3841
end

src/blas_like/level2.jl

Lines changed: 22 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,27 @@
1-
for (elty, relty, ext) in ((:Float32, :Float32, :s),
2-
(:Float64, :Float64, :d),
3-
(:ComplexF32, :Float32, :c),
4-
(:ComplexF64, :Float64, :z))
1+
for (elty, ext) in ((:Float32, :s),
2+
(:Float64, :d),
3+
(:ComplexF32, :c),
4+
(:ComplexF64, :z))
55

66
# Distributed sparse gemv
7-
for (trans, elenum) in (("", :NORMAL), ("t", :TRANSPOSE), ("c", :ADJOINT))
8-
9-
f = Symbol("A", trans, "_mul_B!")
10-
11-
@eval begin
12-
function ($f)(α::$elty, A::DistSparseMatrix{$elty}, x::DistMultiVec{$elty}, β::$elty, y::DistMultiVec{$elty})
13-
ElError(ccall(($(string("ElMultiplyDist_", ext)), libEl), Cuint,
14-
(Cint, $elty, Ptr{Cvoid}, Ptr{Cvoid}, $elty, Ptr{Cvoid}),
15-
$elenum, α, A.obj, x.obj, β, y.obj))
16-
return y
17-
end
7+
@eval begin
8+
function LinearAlgebra.mul!(y::DistMultiVec{$elty}, A::DistSparseMatrix{$elty}, x::DistMultiVec{$elty}, α::$elty, β::$elty)
9+
ElError(ccall(($(string("ElMultiplyDist_", ext)), libEl), Cuint,
10+
(Cint, $elty, Ptr{Cvoid}, Ptr{Cvoid}, $elty, Ptr{Cvoid}),
11+
NORMAL, α, A.obj, x.obj, β, y.obj))
12+
return y
13+
end
14+
function LinearAlgebra.mul!(y::DistMultiVec{$elty}, adjA::Adjoint{<:Any,DistSparseMatrix{$elty}}, x::DistMultiVec{$elty}, α::$elty, β::$elty)
15+
ElError(ccall(($(string("ElMultiplyDist_", ext)), libEl), Cuint,
16+
(Cint, $elty, Ptr{Cvoid}, Ptr{Cvoid}, $elty, Ptr{Cvoid}),
17+
ADJOINT, α, parent(adjA).obj, x.obj, β, y.obj))
18+
return y
19+
end
20+
function LinearAlgebra.mul!(y::DistMultiVec{$elty}, trA::Transpose{<:Any,DistSparseMatrix{$elty}}, x::DistMultiVec{$elty}, α::$elty, β::$elty)
21+
ElError(ccall(($(string("ElMultiplyDist_", ext)), libEl), Cuint,
22+
(Cint, $elty, Ptr{Cvoid}, Ptr{Cvoid}, $elty, Ptr{Cvoid}),
23+
TRANSPOSE, α, parent(trA).obj, x.obj, β, y.obj))
24+
return y
1825
end
1926
end
2027
end

src/core/distmatrix.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ for (elty, ext) in ((:ElInt, :i),
2323
(Cint, Cint, Ptr{Cvoid}, Ref{Ptr{Cvoid}}),
2424
colDist, rowDist, grid.obj, obj))
2525
A = DistMatrix{$elty}(obj[], grid)
26-
finalizer(A, destroy)
26+
finalizer(destroy, A)
2727
return A
2828
end
2929

src/core/distmultivec.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,13 @@ for (elty, ext) in ((:ElInt, :i),
1616
return nothing
1717
end
1818

19-
function DistMultiVec(::Type{$elty}, cm::ElComm = CommWorld)
19+
function DistMultiVec(::Type{$elty}, cm::ElComm = MPI.CommWorld[])
2020
obj = Ref{Ptr{Cvoid}}(C_NULL)
2121
ElError(ccall(($(string("ElDistMultiVecCreate_", ext)), libEl), Cuint,
2222
(Ref{Ptr{Cvoid}}, ElComm),
2323
obj, cm))
2424
A = DistMultiVec{$elty}(obj[])
25-
finalizer(A, destroy)
25+
finalizer(destroy, A)
2626
return A
2727
end
2828

@@ -117,7 +117,7 @@ end
117117

118118
getindex(x::DistMultiVec, i, j) = get(x, i, j)
119119

120-
function similar(::DistMultiVec, ::Type{T}, sz::Dims, cm::ElComm = CommWorld) where {T}
120+
function similar(::DistMultiVec, ::Type{T}, sz::Dims, cm::ElComm = MPI.CommWorld[]) where {T}
121121
A = DistMultiVec(T, cm)
122122
resize!(A, sz...)
123123
return A

src/core/distsparsematrix.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,13 @@ for (elty, ext) in ((:ElInt, :i),
1616
return nothing
1717
end
1818

19-
function DistSparseMatrix(::Type{$elty}, comm::ElComm = CommWorld)
19+
function DistSparseMatrix(::Type{$elty}, comm::ElComm = MPI.CommWorld[])
2020
obj = Ref{Ptr{Cvoid}}(C_NULL)
2121
ElError(ccall(($(string("ElDistSparseMatrixCreate_", ext)), libEl), Cuint,
2222
(Ref{Ptr{Cvoid}}, ElComm),
2323
obj, comm))
2424
A = DistSparseMatrix{$elty}(obj[])
25-
finalizer(A, destroy)
25+
finalizer(destroy, A)
2626
return A
2727
end
2828

@@ -112,7 +112,7 @@ for (elty, ext) in ((:ElInt, :i),
112112
end
113113

114114
# The other constructors don't have a version with dimensions. Should they, or should this one go?
115-
function DistSparseMatrix(::Type{T}, m::Integer, n::Integer, comm::ElComm = CommWorld) where {T}
115+
function DistSparseMatrix(::Type{T}, m::Integer, n::Integer, comm::ElComm = MPI.CommWorld[]) where {T}
116116
A = DistSparseMatrix(T, comm)
117117
resize!(A, m, n)
118118
return A

src/core/matrix.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ for (elty, ext) in ((:ElInt, :i),
2121
ElError(ccall(($(string("ElMatrixCreate_", ext)), libEl), Cuint,
2222
(Ref{Ptr{Cvoid}},), obj))
2323
A = Matrix{$elty}(obj[])
24-
finalizer(A, destroy)
24+
finalizer(destroy, A)
2525
return A
2626
end
2727

src/core/types.jl

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -61,11 +61,3 @@ eltype(A::ElementalMatrix{T}) where {T} = T
6161
@enum LeftOrRight LEFT RIGHT
6262
@enum UnitOrNonUnit NON_UNIT UNIT
6363
@enum Pencil AXBX=1 ABX=2 BAX=3
64-
65-
# Get MPIWorldComm
66-
function CommWorldValue()
67-
r = Ref{ElComm}(0)
68-
ccall((:ElMPICommWorld, libEl), Cuint, (Ref{ElComm},), r)
69-
return r[]
70-
end
71-
const CommWorld = CommWorldValue()

src/julia/darray.jl

Lines changed: 23 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,23 @@
11
# FixMe! Right now the MPI workers are deduced from the DArrays, but if a DArray is distributed on fewer workers that what consistutes the MPI world, then this approach will fail.
22

3+
using DistributedArrays: procs
4+
35
mutable struct RemoteElementalMatrix
46
refs::Matrix{Any}
57
end
68

7-
function toback(A::DArray{T,2,S} where {T<:BlasFloat,S<:StridedMatrix})
8-
rs = Array{Any}(size(procs(A)))
9+
function toback(A::DArray{T,2,S}) where {T<:BlasFloat,S<:StridedMatrix}
10+
rs = Array{Any}(undef, size(procs(A)))
911
@sync for p in eachindex(procs(A))
10-
ind = A.indexes[p]
12+
ind = A.indices[p]
1113
@async rs[p] = remotecall(procs(A)[p]) do
1214
lA = localpart(A)
1315
AlA = Elemental.DistMatrix(T)
1416
zeros!(AlA, size(A)...)
1517
for j = 1:size(lA,2), i = 1:size(lA, 1)
1618
queueUpdate(AlA,
17-
start(ind[1]) + i - 1,
18-
start(ind[2]) + j - 1, lA[i,j])
19+
first(ind[1]) + i - 1,
20+
first(ind[2]) + j - 1, lA[i,j])
1921
end
2022
processQueues(AlA)
2123
AlA
@@ -25,7 +27,7 @@ function toback(A::DArray{T,2,S} where {T<:BlasFloat,S<:StridedMatrix})
2527
end
2628

2729
function tofront(r::Base.Matrix)
28-
tt = Array{Any}(length(r))
30+
tt = Array{Any}(undef, length(r))
2931
for i = 1:length(r)
3032
tt[i] = remotecall(r[i].where, r[i]) do t
3133
typeof(fetch(t))
@@ -50,15 +52,15 @@ function tofront(r::Base.Matrix)
5052
Int[r[i].where for i in eachindex(r)])
5153

5254
@sync for p in eachindex(r)
53-
ind = A.indexes[p]
55+
ind = A.indices[p]
5456
rr = r[p]
5557
@async remotecall_wait(r[p].where) do
5658
rrr = fetch(rr)
5759
lA = localpart(A)
5860
for j = 1:size(lA, 2), i = 1:size(lA, 1)
5961
queuePull(rrr,
60-
start(ind[1]) + i - 1,
61-
start(ind[2]) + j - 1)
62+
first(ind[1]) + i - 1,
63+
first(ind[2]) + j - 1)
6264
end
6365
processPullQueue(rrr, lA)
6466
end
@@ -75,7 +77,7 @@ function (\)(A::DArray{T,2,S}, B::DArray{T,2,S}) where {T<:BlasFloat,S}
7577
rA = toback(A)
7678
rB = toback(B)
7779
pidsAB = union(A.pids, B.pids)
78-
rvals = Vector{Any}(length(pidsAB))
80+
rvals = Vector{Any}(undef, length(pidsAB))
7981
@sync for i = 1:length(pidsAB)
8082
@async rvals[i] = remotecall_wait(pidsAB[i], rA[i], rB[i]) do t1,t2
8183
solve!(fetch(t1), fetch(t2))
@@ -86,7 +88,7 @@ end
8688

8789
function LinearAlgebra.eigvals(A::Hermitian{T,DArray{T,2,Array{T,2}}} where {T<:BlasFloat} )
8890
rA = toback(A.data)
89-
rvals = Array{Any}(size(procs(A.data)))
91+
rvals = Array{Any}(undef, size(procs(A.data)))
9092
uplo = A.uplo == 'U' ? UPPER : LOWER
9193
@sync for i in eachindex(rvals)
9294
@async rvals[i] = remotecall_wait(rA[i].where, rA[i]) do t
@@ -98,7 +100,7 @@ end
98100

99101
function LinearAlgebra.svdvals(A::DArray{<:BlasFloat,2})
100102
rA = toback(A)
101-
rvals = Array{Any}(size(procs(A)))
103+
rvals = Array{Any}(undef, size(procs(A)))
102104
@sync for i in eachindex(rvals)
103105
@async rvals[i] = remotecall_wait(rA[i].where, rA[i]) do t
104106
svdvals(fetch(t))
@@ -109,7 +111,7 @@ end
109111

110112
function LinearAlgebra.inv!(A::DArray{<:BlasFloat,2})
111113
rA = toback(A)
112-
rvals = Array{Any}(size(procs(A)))
114+
rvals = Array{Any}(undef, size(procs(A)))
113115
@sync for j = 1:size(rvals, 2)
114116
for i = 1:size(rvals, 1)
115117
@async rvals[i,j] = remotecall_wait(t -> inverse!(fetch(t)), rA[i,j].where, rA[i,j])
@@ -118,11 +120,11 @@ function LinearAlgebra.inv!(A::DArray{<:BlasFloat,2})
118120
return tofront(rvals)
119121
end
120122

121-
LinearAlgebra.inv(A::DArray{<:BlasFloat,2}) = inv!(copy(A))
123+
LinearAlgebra.inv(A::DArray{<:BlasFloat,2}) = LinearAlgebra.inv!(copy(A))
122124

123125
function LinearAlgebra.logdet(A::DArray{<:BlasFloat,2})
124126
rA = toback(A)
125-
rvals = Array{Any}(size(procs(A)))
127+
rvals = Array{Any}(undef, size(procs(A)))
126128
@sync for i in eachindex(rvals)
127129
@async rvals[i] = remotecall_wait(rA[i].where, rA[i]) do t
128130
d = safeHPDDeterminant(Elemental.LOWER, fetch(t))
@@ -137,7 +139,7 @@ function spectralPortrait(A::DArray{T,2},
137139
imagSize::Integer,
138140
psCtrl::PseudospecCtrl{T}=PseudospecCtrl(T)) where {T<:BlasReal}
139141
rA = toback(A)
140-
rvals = Array{Any}(size(procs(A)))
142+
rvals = Array{Any}(undef, size(procs(A)))
141143
@sync for i in eachindex(rvals)
142144
@async rvals[i] = remotecall_wait(rA[i].where, rA[i]) do t
143145
spectralPortrait(fetch(t), ElInt(realSize), ElInt(imagSize), psCtrl)[1]
@@ -151,7 +153,7 @@ function spectralPortrait(A::DArray{Complex{T},2},
151153
imagSize::Integer,
152154
psCtrl::PseudospecCtrl{T}=PseudospecCtrl(T)) where {T<:BlasReal}
153155
rA = toback(A)
154-
rvals = Array{Any}(size(procs(A)))
156+
rvals = Array{Any}(undef, size(procs(A)))
155157
@sync for i in eachindex(rvals)
156158
@async rvals[i,j] = remotecall_wait(rA[i].where, rA[i]) do t
157159
spectralPortrait(fetch(t), ElInt(realSize), ElInt(imagSize), psCtrl)[1]
@@ -168,7 +170,7 @@ function spectralWindow(A::DArray{T,2},
168170
imagSize::Integer,
169171
psCtrl::PseudospecCtrl{T}=PseudospecCtrl(T)) where {T<:BlasReal}
170172
rA = toback(A)
171-
rvals = Array{Any}(size(procs(A)))
173+
rvals = Array{Any}(undef, size(procs(A)))
172174
@sync for i in eachindex(rvals)
173175
@async rvals[i] = remotecall_wait(rA[i].where, rA[i]) do t
174176
spectralWindow(fetch(t), center, realWidth, imagWidth,
@@ -186,7 +188,7 @@ function spectralWindow(A::DArray{Complex{T},2},
186188
imagSize::Integer,
187189
psCtrl::PseudospecCtrl{T}=PseudospecCtrl(T)) where {T<:BlasReal}
188190
rA = toback(A)
189-
rvals = Array{Any}(size(procs(A)))
191+
rvals = Array{Any}(undef, size(procs(A)))
190192
@sync for i in eachindex(rvals)
191193
@async rvals[i] = remotecall_wait(rA[i,j].where, rA[i]) do t
192194
spectralWindow(fetch(t), center, realWidth, imagWidth,
@@ -198,7 +200,7 @@ end
198200

199201
function foxLi(::Type{T}, n::Integer, ω::Real) where {T<:BlasComplex}
200202
sz = tuple(DistributedArrays.defaultdist((n,n), workers())...)
201-
rvals = Array{Any}(sz)
203+
rvals = Array{Any}(undef, sz)
202204
@sync for j = 1:size(rvals, 2), i = 1:size(rvals, 1)
203205
@async rvals[i,j] = remotecall_wait(workers()[sub2ind(sz, i, j)]) do
204206
A = Elemental.DistMatrix(T)
@@ -233,7 +235,7 @@ for (elty, ext) in ((:ElInt, :i),
233235
for id in workers()
234236
let A = A, DA = DA
235237
@async remotecall_fetch(id) do
236-
rows, cols = DistributedArrays.localindexes(DA)
238+
rows, cols = DistributedArrays.localindices(DA)
237239
i,j,v = findnz(DistributedArrays.localpart(DA))
238240
gi, gj, gv = (i.+(first(rows)-1), j.+(first(cols)-1), v)
239241
numLocal = length(gi)

src/julia/generic.jl

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ end
1616

1717
Base.size(A::ElementalMatrix) = (size(A, 1), size(A, 2))
1818

19-
Base.copy!(A::T, B::T) where {T<:ElementalMatrix} = _copy!(B, A)
19+
Base.copyto!(A::T, B::T) where {T<:ElementalMatrix} = _copy!(B, A)
2020
# copy(A::ElementalMatrix) = copy!(similar(A), A)
2121
Base.length(A::ElementalMatrix) = prod(size(A))
2222

@@ -160,12 +160,13 @@ function Base.convert(::Type{DistMatrix{T}}, A::DistMultiVec{T}) where {T}
160160
return B
161161
end
162162

163-
function LinearAlgebra.norm(x::ElementalMatrix)
164-
if size(x, 2) == 1
165-
return nrm2(x)
166-
else
167-
return twoNorm(x)
168-
end
169-
end
163+
LinearAlgebra.norm(x::ElementalMatrix) = nrm2(x)
164+
# function LinearAlgebra.norm(x::ElementalMatrix)
165+
# if size(x, 2) == 1
166+
# return nrm2(x)
167+
# else
168+
# return twoNorm(x)
169+
# end
170+
# end
170171

171172
LinearAlgebra.cholesky!(A::Hermitian{<:Any,<:ElementalMatrix}, ::Type{Val{false}}) = LinearAlgebra.Cholesky(cholesky(A.uplo == 'U' ? UPPER : LOWER, A.data), A.uplo)

src/lapack_like/props.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ for (elty, relty, ext) in ((:Float32, :Float32, :s),
8080
end
8181
end
8282

83-
function LinearAlgebra.norm(A::ElementalMatrix, p::Real)
83+
function LinearAlgebra.opnorm(A::ElementalMatrix, p::Real = 2)
8484
if p == 1
8585
return oneNorm(A)
8686
elseif p == 2

0 commit comments

Comments
 (0)