Skip to content

Commit 1d00873

Browse files
authored
Merge pull request #236 from ReactiveBayes/remove-lv
Remove LV from the package
2 parents d4a72ce + 3280393 commit 1d00873

File tree

11 files changed

+25
-31
lines changed

11 files changed

+25
-31
lines changed

.github/workflows/CI.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ jobs:
3030
- uses: actions/checkout@v2
3131
- uses: julia-actions/setup-julia@v2
3232
with:
33+
include-all-prereleases: true
3334
version: ${{ matrix.version }}
3435
arch: ${{ matrix.arch }}
3536
- uses: julia-actions/cache@v2
@@ -50,6 +51,8 @@ jobs:
5051
steps:
5152
- uses: actions/checkout@v2
5253
- uses: julia-actions/setup-julia@v2
54+
with:
55+
version: '1'
5356
- uses: actions/cache@v4
5457
env:
5558
cache-name: cache-artifacts

Project.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953"
1717
IrrationalConstants = "92d709cd-6900-40b7-9082-c6be49f344b6"
1818
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1919
LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688"
20-
LoopVectorization = "bdcacae8-1622-11e9-2a5c-532679323890"
2120
PositiveFactorizations = "85a6dd25-e78a-55b7-8502-1745935b8125"
2221
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
2322
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
@@ -42,7 +41,6 @@ IntervalSets = "0.6, 0.7"
4241
IrrationalConstants = "0.1, 0.2"
4342
LinearAlgebra = "1.9"
4443
LogExpFunctions = "0.3"
45-
LoopVectorization = "0.12"
4644
PositiveFactorizations = "0.2"
4745
Random = "1.9"
4846
RecursiveArrayTools = "3.26"

src/distributions/categorical.jl

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ export logpartition
44
import Distributions: Categorical, probs
55
import LogExpFunctions: logsumexp
66
import FillArrays: OneElement
7-
using LoopVectorization
87

98
BayesBase.vague(::Type{<:Categorical}, dims::Int) = Categorical(ones(dims) ./ dims)
109
BayesBase.convert_paramfloattype(::Type{T}, distribution::Categorical) where {T <: Real} = Categorical(convert(AbstractVector{T}, probs(distribution)))
@@ -49,7 +48,7 @@ end
4948
function (::MeanToNatural{Categorical})(tuple_of_θ::Tuple{Any}, _)
5049
(p,) = tuple_of_θ
5150
pₖ = p[end]
52-
return (LoopVectorization.vmap(pᵢ -> log(pᵢ / pₖ), p),)
51+
return (map(pᵢ -> log(pᵢ / pₖ), p),)
5352
end
5453

5554
function (::NaturalToMean{Categorical})(tuple_of_η::Tuple{V}, _) where {V <: Vector}
@@ -101,8 +100,8 @@ getgradlogpartition(::NaturalParametersSpace, ::Type{Categorical}, conditioner)
101100
)
102101
)
103102
end
104-
sumη = vmapreduce(exp, +, η)
105-
return vmap(d -> exp(d) / sumη, η)
103+
sumη = mapreduce(exp, +, η)
104+
return map(d -> exp(d) / sumη, η)
106105
end
107106

108107
getfisherinformation(::NaturalParametersSpace, ::Type{Categorical}, conditioner) =

src/distributions/dirichlet.jl

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import SpecialFunctions: digamma, loggamma, trigamma
55
import Base.Broadcast: BroadcastFunction
66

77
using FillArrays
8-
using LoopVectorization
98
using StaticArrays
109
using LinearAlgebra
1110
using LogExpFunctions
@@ -20,7 +19,7 @@ function BayesBase.prod(::PreserveTypeProd{Distribution}, left::Dirichlet, right
2019
end
2120

2221
BayesBase.probvec(dist::Dirichlet) = params(dist)[1]
23-
BayesBase.std(dist::Dirichlet) = vmap(sqrt, var(dist))
22+
BayesBase.std(dist::Dirichlet) = map(sqrt, var(dist))
2423

2524
BayesBase.mean(::BroadcastFunction{typeof(log)}, dist::Dirichlet) = digamma.(probvec(dist)) .- digamma(sum(probvec(dist)))
2625
BayesBase.mean(::BroadcastFunction{typeof(clamplog)}, dist::Dirichlet) = digamma.((clamp(p, tiny, typemax(p)) for p in probvec(dist))) .- digamma(sum(probvec(dist)))
@@ -54,7 +53,7 @@ unpack_parameters(::Type{Dirichlet}, packed) = (packed,)
5453
isbasemeasureconstant(::Type{Dirichlet}) = ConstantBaseMeasure()
5554

5655
getbasemeasure(::Type{Dirichlet}) = (x) -> one(Float64)
57-
getsufficientstatistics(::Type{Dirichlet}) = (x -> vmap(log, x),)
56+
getsufficientstatistics(::Type{Dirichlet}) = (x -> map(log, x),)
5857

5958
getlogpartition(::NaturalParametersSpace, ::Type{Dirichlet}) = (η) -> begin
6059
(η1,) = unpack_parameters(Dirichlet, η)

src/distributions/dirichlet_collection.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ using Distributions
77
using SpecialFunctions, LogExpFunctions
88

99
import FillArrays: Ones, Eye
10-
import LoopVectorization: vmap, vmapreduce
1110
using LinearAlgebra, Random
1211

1312
using BlockArrays: BlockDiagonal
@@ -74,7 +73,7 @@ isbasemeasureconstant(::Type{DirichletCollection}) = ConstantBaseMeasure()
7473
getbasemeasure(::Type{DirichletCollection}, conditioner) = (x) -> one(Float64)
7574
getlogbasemeasure(::Type{DirichletCollection}, conditioner) = (x) -> zero(Float64)
7675

77-
getsufficientstatistics(::Type{DirichletCollection}, conditioner) = (x -> vmap(log, x),)
76+
getsufficientstatistics(::Type{DirichletCollection}, conditioner) = (x -> map(log, x),)
7877

7978
BayesBase.mean(dist::DirichletCollection) = dist.α ./ dist.α0
8079
BayesBase.mean(::BroadcastFunction{typeof(log)}, dist::DirichletCollection) = digamma.(dist.α) .- digamma.(dist.α0)

src/distributions/laplace.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ function BayesBase.prod!(
4141
(η_right, conditioner_right) = (getnaturalparameters(right), getconditioner(right))
4242

4343
if isapprox(conditioner_left, conditioner_right) && isapprox(conditioner_left, conditioner_container)
44-
LoopVectorization.vmap!(+, η_container, η_left, η_right)
44+
map!(+, η_container, η_left, η_right)
4545
return container
4646
end
4747

src/distributions/normal_family/normal_family.jl

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import Random: rand!
2525
import Distributions: logpdf
2626
import StatsFuns: invsqrt2π
2727

28-
using LoopVectorization
2928
using StatsFuns: log2π
3029
using LinearAlgebra
3130
using SpecialFunctions
@@ -237,7 +236,7 @@ BayesBase.promote_variate_type(::Type{Multivariate}, ::Type{<:NormalWeightedMean
237236

238237
function Base.convert(::Type{Normal{T}}, dist::UnivariateNormalDistributionsFamily) where {T <: Real}
239238
mean, std = mean_std(dist)
240-
return Normal(convert(T, mean), convert(T, std))
239+
return Normal(convert(T, T(mean)), convert(T, T(std)))
241240
end
242241

243242
function Base.convert(::Type{Normal}, dist::UnivariateNormalDistributionsFamily{T}) where {T <: Real}
@@ -249,7 +248,7 @@ function Base.convert(
249248
dist::MultivariateNormalDistributionsFamily
250249
) where {T <: Real, C <: Distributions.PDMats.PDMat{T, Matrix{T}}, M <: AbstractVector{T}}
251250
mean, cov = mean_cov(dist)
252-
return MvNormal(convert(M, mean), Distributions.PDMats.PDMat(convert(AbstractMatrix{T}, cov)))
251+
return MvNormal(convert(M, M(mean)), Distributions.PDMats.PDMat(convert(AbstractMatrix{T}, cov)))
253252
end
254253

255254
function Base.convert(::Type{MvNormal{T}}, dist::MultivariateNormalDistributionsFamily) where {T <: Real}
@@ -268,7 +267,7 @@ end
268267

269268
function Base.convert(::Type{NormalMeanVariance{T}}, dist::UnivariateNormalDistributionsFamily) where {T <: Real}
270269
mean, var = mean_var(dist)
271-
return NormalMeanVariance(convert(T, mean), convert(T, var))
270+
return NormalMeanVariance(convert(T, T(mean)), convert(T, T(var)))
272271
end
273272

274273
function Base.convert(::Type{MvNormalMeanCovariance{T}}, dist::MultivariateNormalDistributionsFamily) where {T <: Real}
@@ -287,7 +286,7 @@ function Base.convert(
287286
dist::MultivariateNormalDistributionsFamily
288287
) where {T <: Real, M <: AbstractArray{T}, P <: AbstractArray{T}}
289288
mean, cov = mean_cov(dist)
290-
return MvNormalMeanCovariance(convert(M, mean), convert(P, cov))
289+
return MvNormalMeanCovariance(convert(M, M(mean)), convert(P, P(cov)))
291290
end
292291

293292
function Base.convert(::Type{NormalMeanVariance}, dist::UnivariateNormalDistributionsFamily{T}) where {T <: Real}
@@ -302,7 +301,7 @@ end
302301

303302
function Base.convert(::Type{NormalMeanPrecision{T}}, dist::UnivariateNormalDistributionsFamily) where {T <: Real}
304303
mean, precision = mean_precision(dist)
305-
return NormalMeanPrecision(convert(T, mean), convert(T, precision))
304+
return NormalMeanPrecision(convert(T, T(mean)), convert(T, T(precision)))
306305
end
307306

308307
function Base.convert(::Type{MvNormalMeanPrecision{T}}, dist::MultivariateNormalDistributionsFamily) where {T <: Real}
@@ -321,7 +320,7 @@ function Base.convert(
321320
dist::MultivariateNormalDistributionsFamily
322321
) where {T <: Real, M <: AbstractArray{T}, P <: AbstractArray{T}}
323322
mean, precision = mean_precision(dist)
324-
return MvNormalMeanPrecision(convert(M, mean), convert(P, precision))
323+
return MvNormalMeanPrecision(convert(M, M(mean)), convert(P, P(precision)))
325324
end
326325

327326
function Base.convert(::Type{NormalMeanPrecision}, dist::UnivariateNormalDistributionsFamily{T}) where {T <: Real}
@@ -339,7 +338,7 @@ function Base.convert(
339338
dist::UnivariateNormalDistributionsFamily
340339
) where {T <: Real}
341340
weightedmean, precision = weightedmean_precision(dist)
342-
return NormalWeightedMeanPrecision(convert(T, weightedmean), convert(T, precision))
341+
return NormalWeightedMeanPrecision(convert(T, T(weightedmean)), convert(T, T(precision)))
343342
end
344343

345344
function Base.convert(
@@ -361,7 +360,7 @@ function Base.convert(
361360
dist::MultivariateNormalDistributionsFamily
362361
) where {T <: Real, M <: AbstractArray{T}, P <: AbstractArray{T}}
363362
weightedmean, precision = weightedmean_precision(dist)
364-
return MvNormalWeightedMeanPrecision(convert(M, weightedmean), convert(P, precision))
363+
return MvNormalWeightedMeanPrecision(convert(M, M(weightedmean)), convert(P, P(precision)))
365364
end
366365

367366
function Base.convert(
@@ -497,7 +496,7 @@ function BayesBase.rand!(
497496
) where {T <: Real}
498497
randn!(rng, container)
499498
μ, σ = mean_std(dist)
500-
@turbo for i in eachindex(container)
499+
for i in eachindex(container)
501500
container[i] = μ + σ * container[i]
502501
end
503502
container

src/distributions/pareto.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ function BayesBase.prod!(
4141
(η_right, conditioner_right) = (getnaturalparameters(right), getconditioner(right))
4242

4343
if isapprox(conditioner_left, conditioner_right) && isapprox(conditioner_left, conditioner_container)
44-
LoopVectorization.vmap!(+, η_container, η_left, η_right)
44+
map!(+, η_container, η_left, η_right)
4545
return container
4646
end
4747

src/distributions/wip/multinomial.jl

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ export Multinomial
33
import Distributions: Multinomial, probs
44
import StableRNGs: StableRNG
55
using StaticArrays
6-
using LoopVectorization
76
using LogExpFunctions
87

98
vague(::Type{<:Multinomial}, n::Int, dims::Int) = Multinomial(n, ones(dims) ./ dims)
@@ -58,7 +57,7 @@ end
5857

5958
function pack_naturalparameters(dist::Multinomial)
6059
@inbounds p = params(dist)[2]
61-
return vmap(log, p / p[end])
60+
return map(log, p / p[end])
6261
end
6362

6463
unpack_naturalparameters(ef::ExponentialFamilyDistribution{Multinomial}) = (getnaturalparameters(ef),)
@@ -69,7 +68,7 @@ function Base.convert(::Type{ExponentialFamilyDistribution}, dist::Multinomial)
6968
end
7069

7170
function Base.convert(::Type{Distribution}, exponentialfamily::ExponentialFamilyDistribution{Multinomial})
72-
expη = vmap(exp, getnaturalparameters(exponentialfamily))
71+
expη = map(exp, getnaturalparameters(exponentialfamily))
7372
p = expη / sum(expη)
7473
return Multinomial(getconditioner(exponentialfamily), p)
7574
end
@@ -110,7 +109,7 @@ function fisherinformation(expfamily::ExponentialFamilyDistribution{Multinomial}
110109
η = getnaturalparameters(expfamily)
111110
n = getconditioner(expfamily)
112111
I = Matrix{Float64}(undef, length(η), length(η))
113-
seη = sum(vmap(exp, η))
112+
seη = mapreduce(exp, +, η)
114113
@inbounds for i in 1:length(η)
115114
I[i, i] = exp(η[i]) * (seη - exp(η[i])) / (seη)^2
116115
@inbounds for j in 1:i-1

src/distributions/wishart.jl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import Base: ndims, size, convert
55
import LinearAlgebra
66
import SpecialFunctions: digamma
77
import StatsFuns: logmvgamma
8-
using LoopVectorization
98

109
"""
1110
WishartFast{T <: Real, A <: AbstractMatrix{T}} <: ContinuousMatrixDistribution
@@ -38,7 +37,7 @@ BayesBase.params(dist::WishartFast) = (dist.ν, cholinv(dist.invS))
3837
BayesBase.mean(dist::WishartFast) = mean(convert(Wishart, dist))
3938
BayesBase.var(dist::WishartFast) = var(convert(Wishart, dist))
4039
BayesBase.cov(dist::WishartFast) = cov(convert(Wishart, dist))
41-
BayesBase.std(dist::WishartFast) = vmap(sqrt, var(dist))
40+
BayesBase.std(dist::WishartFast) = map(sqrt, var(dist))
4241
BayesBase.mode(dist::WishartFast) = mode(convert(Wishart, dist))
4342
BayesBase.entropy(dist::WishartFast) = entropy(convert(Wishart, dist))
4443

0 commit comments

Comments
 (0)