Skip to content

Commit

Permalink
Merge pull request #235 from biaslab/224-gaussian-mixture-relax-const…
Browse files Browse the repository at this point in the history
…raints-all-around

224 gaussian mixture relax constraints all around
  • Loading branch information
bvdmitri authored Nov 4, 2022
2 parents ae952de + e200708 commit 866780d
Show file tree
Hide file tree
Showing 11 changed files with 280 additions and 93 deletions.
10 changes: 6 additions & 4 deletions src/distributions/wishart.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,16 @@ end

function Distributions.mean(::typeof(logdet), distribution::WishartMessage)
d = size(distribution, 1)
ν, invS = (distribution.ν, distributions.invS)
return mapreduce(i -> digamma((ν + 1 - i) / 2), +, 1:d) + d * log(2) - logdet(invS)
ν, invS = (distribution.ν, distribution.invS)
T = promote_type(typeof(ν), eltype(invS))
return mapreduce(i -> digamma((ν + 1 - i) / 2), +, 1:d) + d * log(convert(T, 2)) - logdet(invS)
end

function Distributions.mean(::typeof(logdet), distribution::Wishart)
d = size(distribution, 1)
d = size(distribution, 1)
ν, S = params(distribution)
return mapreduce(i -> digamma((ν + 1 - i) / 2), +, 1:d) + d * log(2) + logdet(S)
T = promote_type(typeof(ν), eltype(S))
return mapreduce(i -> digamma((ν + 1 - i) / 2), +, 1:d) + d * log(convert(T, 2)) + logdet(S)
end

function Distributions.mean(::typeof(inv), distribution::WishartDistributionsFamily)
Expand Down
12 changes: 8 additions & 4 deletions src/nodes/mv_normal_mean_precision.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,16 @@ end
m_out, v_out = mean_cov(q_out)
df_Λ, S_Λ = params(q_Λ) # prevent allocation of mean matrix

result = zero(promote_type(eltype(m_mean), eltype(m_out), eltype(S_Λ)))
T = promote_type(eltype(m_mean), eltype(m_out), eltype(S_Λ))
result = zero(T)

@inbounds for k1 in 1:dim, k2 in 1:dim
# optimize trace operation (indices can be interchanges because of symmetry)
result += S_Λ[k1, k2] * (v_out[k1, k2] + v_mean[k1, k2] + (m_out[k2] - m_mean[k2]) * (m_out[k1] - m_mean[k1]))
end

result *= df_Λ
result += dim * log2π
result += dim * convert(T, log2π)
result -= mean(logdet, q_Λ)
result /= 2

Expand All @@ -56,8 +58,10 @@ end
m, V = mean_cov(q_out_μ)
m_Λ = mean(q_Λ)

result = zero(promote_type(eltype(m), eltype(m_Λ)))
result += dim * log2π
T = promote_type(eltype(m), eltype(m_Λ))

result = zero(T)
result += dim * convert(T, log2π)
result -= mean(logdet, q_Λ)
@inbounds for k1 in 1:dim, k2 in 1:dim
# optimize trace operation (indices can be interchanges because of symmetry)
Expand Down
41 changes: 6 additions & 35 deletions src/nodes/normal_mixture.jl
Original file line number Diff line number Diff line change
Expand Up @@ -139,48 +139,19 @@ end

# FreeEnergy related functions

@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::ManyOf{N, UnivariateGaussianDistributionsFamily}, q_p::ManyOf{N, GammaDistributionsFamily}) where {N} = begin
@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::ManyOf{N, Any}, q_p::ManyOf{N, Any}) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
return avg_energy_nm(variate_form(q_out), q_out, q_m, q_p, z_bar, i)
end
end

@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::NTuple{N, MultivariateGaussianDistributionsFamily}, q_p::NTuple{N, Wishart}) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end
function avg_energy_nm(::Type{Univariate}, q_out, q_m, q_p, z_bar, i)
return z_bar[i] * score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end

@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::NTuple{N, PointMass{T} where T <: Real}, q_p::NTuple{N, PointMass{T} where T <: Real}) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end
end

@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::ManyOf{N, MultivariateGaussianDistributionsFamily}, q_p::ManyOf{N, Wishart}) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end
end

@average_energy NormalMixture (q_out::Any, q_switch::Any, q_m::ManyOf{N, PointMass{T} where T <: Real}, q_p::ManyOf{N, PointMass{T} where T <: Real}) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end
end

@average_energy NormalMixture (
q_out::Any, q_switch::Any, q_m::ManyOf{N, PointMass{T} where T <: AbstractVector}, q_p::ManyOf{N, PointMass{T} where T <: AbstractMatrix}
) where {N} = begin
z_bar = probvec(q_switch)
return mapreduce(+, 1:N; init = 0.0) do i
return z_bar[i] * score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end
function avg_energy_nm(::Type{Multivariate}, q_out, q_m, q_p, z_bar, i)
return z_bar[i] * score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[i], q_p[i])), nothing)
end

function score(::Type{T}, ::FactorBoundFreeEnergy, ::Stochastic, node::NormalMixtureNode{N, MeanField}, skip_strategy, scheduler) where {T <: CountingReal, N}
Expand Down
12 changes: 4 additions & 8 deletions src/rules/normal_mixture/m.jl
Original file line number Diff line number Diff line change
@@ -1,15 +1,11 @@
export rule

@rule NormalMixture((:m, k), Marginalisation) (q_out::Any, q_switch::Any, q_p::GammaDistributionsFamily) = begin
@rule NormalMixture((:m, k), Marginalisation) (q_out::Any, q_switch::Any, q_p::Any) = begin
pv = probvec(q_switch)
T = eltype(pv)
z_bar = clamp.(pv, tiny, one(T) - tiny)
return NormalMeanVariance(mean(q_out), inv(z_bar[k] * mean(q_p)))
end

@rule NormalMixture((:m, k), Marginalisation) (q_out::Any, q_switch::Any, q_p::Wishart) = begin
pv = probvec(q_switch)
T = eltype(pv)
z_bar = clamp.(pv, tiny, one(T) - tiny)
return MvNormalMeanCovariance(mean(q_out), cholinv(z_bar[k] * mean(q_p)))
F = variate_form(q_out)

return convert(promote_variate_type(F, NormalMeanPrecision), mean(q_out), z_bar[k] * mean(q_p))
end
15 changes: 8 additions & 7 deletions src/rules/normal_mixture/p.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
export rule

@rule NormalMixture((:p, k), Marginalisation) (q_out::Any, q_switch::Any, q_m::UnivariateNormalDistributionsFamily) = begin
@rule NormalMixture((:p, k), Marginalisation) (q_out::Any, q_switch::Any, q_m::Any) = begin
m_mean_k, v_mean_k = mean_cov(q_m)
m_out, v_out = mean_cov(q_out)
z_bar = probvec(q_switch)

return rule_nm_p_k(variate_form(q_out), m_mean_k, v_mean_k, m_out, v_out, z_bar, k)
end

function rule_nm_p_k(::Type{Univariate}, m_mean_k, v_mean_k, m_out, v_out, z_bar, k)
return GammaShapeRate(one(eltype(z_bar)) + z_bar[k] / 2, z_bar[k] * (v_out + v_mean_k + abs2(m_out - m_mean_k)) / 2)
end

@rule NormalMixture((:p, k), Marginalisation) (q_out::Any, q_switch::Any, q_m::MultivariateNormalDistributionsFamily) = begin
m_mean_k, v_mean_k = mean_cov(q_m)
m_out, v_out = mean_cov(q_out)
z_bar = probvec(q_switch)
d = length(m_mean_k)
return WishartMessage(one(eltype(z_bar)) + z_bar[k] + d, z_bar[k] * (v_out + v_mean_k + (m_out - m_mean_k) * (m_out - m_mean_k)'))
function rule_nm_p_k(::Type{Multivariate}, m_mean_k, v_mean_k, m_out, v_out, z_bar, k)
return WishartMessage(one(eltype(z_bar)) + z_bar[k] + length(m_mean_k), z_bar[k] * (v_out + v_mean_k + (m_out - m_mean_k) * (m_out - m_mean_k)'))
end
41 changes: 6 additions & 35 deletions src/rules/normal_mixture/switch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,46 +8,17 @@ export rule
# return Bernoulli(clamp(softmax((U1, U2))[1], tiny, 1.0 - tiny))
# end

@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::ManyOf{N, UnivariateNormalDistributionsFamily}, q_p::ManyOf{N, GammaDistributionsFamily}) where {N} = begin
@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::ManyOf{N, Any}, q_p::ManyOf{N, Any}) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
return rule_nm_switch_k(variate_form(m), q_out, m, p)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
end

@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::NTuple{N, MultivariateNormalDistributionsFamily}, q_p::NTuple{N, Wishart}) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
function rule_nm_switch_k(::Type{Univariate}, q_out, m, p)
return -score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end

@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::NTuple{N, PointMass{T} where T <: Real}, q_p::NTuple{N, PointMass{T} where T <: Real}) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
end

@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::ManyOf{N, MultivariateNormalDistributionsFamily}, q_p::ManyOf{N, Wishart}) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
end

@rule NormalMixture{N}(:switch, Marginalisation) (q_out::Any, q_m::ManyOf{N, PointMass{T} where T <: Real}, q_p::ManyOf{N, PointMass{T} where T <: Real}) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
end

@rule NormalMixture{N}(:switch, Marginalisation) (
q_out::Any, q_m::ManyOf{N, PointMass{T} where T <: AbstractVector}, q_p::ManyOf{N, PointMass{T} where T <: AbstractMatrix}
) where {N} = begin
U = map(zip(q_m, q_p)) do (m, p)
return -score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
return Categorical(clamp!(softmax!(U), tiny, one(eltype(U)) - tiny))
function rule_nm_switch_k(::Type{Multivariate}, q_out, m, p)
return -score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, m, p)), nothing)
end
91 changes: 91 additions & 0 deletions test/nodes/test_normal_mixture.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
module NodesNormalMixtureTest

using Test
using ReactiveMP
using Random
using Distributions

import ReactiveMP: @test_rules
import ReactiveMP: WishartMessage, ManyOf

@testset "NormalMixtureNode" begin
@testset "AverageEnergy" begin
begin
q_out = NormalMeanVariance(0.0, 1.0)
q_switch = Bernoulli(0.2)
q_m = (NormalMeanVariance(1.0, 2.0), NormalMeanVariance(3.0, 4.0))
q_p = (GammaShapeRate(2.0, 3.0), GammaShapeRate(4.0, 5.0))

marginals = (
Marginal(q_out, false, false),
Marginal(q_switch, false, false),
ManyOf(map(q_m_ -> Marginal(q_m_, false, false), q_m)),
ManyOf(map(q_p_ -> Marginal(q_p_, false, false), q_p))
)

ref_val =
0.2 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[1], q_p[1])), nothing)) +
0.8 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[2], q_p[2])), nothing))
@test score(AverageEnergy(), NormalMixture, Val{(:out, :switch, :m, :p)}, marginals, nothing) ref_val
end

begin
q_out = NormalMeanVariance(1.0, 1.0)
q_switch = Bernoulli(0.4)
q_m = (NormalMeanVariance(3.0, 2.0), NormalMeanVariance(3.0, 4.0))
q_p = (GammaShapeRate(2.0, 3.0), GammaShapeRate(1.0, 5.0))

marginals = (
Marginal(q_out, false, false),
Marginal(q_switch, false, false),
ManyOf(map(q_m_ -> Marginal(q_m_, false, false), q_m)),
ManyOf(map(q_p_ -> Marginal(q_p_, false, false), q_p))
)

ref_val =
0.4 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[1], q_p[1])), nothing)) +
0.6 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[2], q_p[2])), nothing))
@test score(AverageEnergy(), NormalMixture, Val{(:out, :switch, :m, :p)}, marginals, nothing) ref_val
end

begin
q_out = NormalMeanVariance(0.0, 1.0)
q_switch = Categorical([0.5, 0.5])
q_m = (NormalMeanPrecision(1.0, 2.0), NormalMeanPrecision(3.0, 4.0))
q_p = (GammaShapeRate(3.0, 3.0), GammaShapeRate(4.0, 5.0))

marginals = (
Marginal(q_out, false, false),
Marginal(q_switch, false, false),
ManyOf(map(q_m_ -> Marginal(q_m_, false, false), q_m)),
ManyOf(map(q_p_ -> Marginal(q_p_, false, false), q_p))
)

ref_val =
0.5 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[1], q_p[1])), nothing)) +
0.5 * (score(AverageEnergy(), NormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[2], q_p[2])), nothing))
@test score(AverageEnergy(), NormalMixture, Val{(:out, :switch, :m, :p)}, marginals, nothing) ref_val
end

begin
q_out = MvNormalMeanCovariance([0.0], [1.0])
q_switch = Categorical([0.5, 0.5])
q_m = (MvNormalMeanPrecision([1.0], [2.0]), MvNormalMeanPrecision([3.0], [4.0]))
q_p = (WishartMessage(3.0, fill(3.0, 1, 1)), WishartMessage(4.0, fill(5.0, 1, 1)))

marginals = (
Marginal(q_out, false, false),
Marginal(q_switch, false, false),
ManyOf(map(q_m_ -> Marginal(q_m_, false, false), q_m)),
ManyOf(map(q_p_ -> Marginal(q_p_, false, false), q_p))
)

ref_val =
0.5 * (score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[1], q_p[1])), nothing)) +
0.5 * (score(AverageEnergy(), MvNormalMeanPrecision, Val{(:out, , )}, map((q) -> Marginal(q, false, false), (q_out, q_m[2], q_p[2])), nothing))
@test score(AverageEnergy(), NormalMixture, Val{(:out, :switch, :m, :p)}, marginals, nothing) ref_val
end
end
end

end
69 changes: 69 additions & 0 deletions test/rules/normal_mixture/test_m.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
module RulesNormalMixtureMTest

using Test
using ReactiveMP
using Random
using Distributions

import ReactiveMP: @test_rules

@testset "rules:NormalMixture:m" begin
@testset "Variational : (m_out::UnivariateNormalDistributionsFamily..., m_p::GammaDistributionsFamily...) k=1" begin
@test_rules [with_float_conversions = true] NormalMixture{2}((:m, k = 1), Marginalisation) [
(input = (q_out = NormalMeanVariance(8.5, 0.5), q_switch = Bernoulli(0.2), q_p = GammaShapeRate(1.0, 2.0)), output = NormalMeanPrecision(8.5, 0.1)),
(
input = (q_out = NormalWeightedMeanPrecision(3 / 10, 6 / 10), q_switch = Categorical([0.5, 0.5]), q_p = GammaShapeRate(1.0, 1.0)),
output = NormalMeanPrecision(0.5, 0.5)
),
(
input = (q_out = NormalWeightedMeanPrecision(5.0, 1 / 4), q_switch = Categorical([0.75, 0.25]), q_p = GammaShapeScale(1.0, 1.0)),
output = NormalMeanPrecision(20.0, 0.75)
),
(input = (q_out = NormalWeightedMeanPrecision(1, 1), q_switch = Categorical([1.0, 0.0]), q_p = GammaShapeRate(1.0, 2.0)), output = NormalMeanPrecision(1.0, 0.5))
]
end

@testset "Variational : (m_out::UnivariateNormalDistributionsFamily..., m_p::GammaDistributionsFamily...) k=2" begin
@test_rules [with_float_conversions = true] NormalMixture{2}((:m, k = 2), Marginalisation) [
(input = (q_out = NormalMeanVariance(8.5, 0.5), q_switch = Bernoulli(0.2), q_p = GammaShapeRate(1.0, 2.0)), output = NormalMeanPrecision(8.5, 0.4)),
(
input = (q_out = NormalWeightedMeanPrecision(3 / 10, 6 / 10), q_switch = Categorical([0.5, 0.5]), q_p = GammaShapeRate(1.0, 1.0)),
output = NormalMeanPrecision(0.5, 0.5)
),
(
input = (q_out = NormalWeightedMeanPrecision(5.0, 1 / 4), q_switch = Categorical([0.75, 0.25]), q_p = GammaShapeScale(1.0, 1.0)),
output = NormalMeanPrecision(20.0, 0.25)
)
]
end

@testset "Variational : (m_out::MultivariateNormalDistributionsFamily..., m_p::Wishart...) k=1" begin
@test_rules [with_float_conversions = true, atol = 1e-4] NormalMixture{2}((:m, k = 1), Marginalisation) [
(
input = (
q_out = MvNormalWeightedMeanPrecision([6.75, 12.0], [4.5 -0.75; -0.75 4.5]), q_switch = Categorical([0.5, 0.5]), q_p = Wishart(3.0, [2.0 -0.25; -0.25 1.0])
),
output = MvNormalMeanPrecision([2.0, 3.0], [3.0 -0.375; -0.375 1.5])
),
(
input = (
q_out = MvNormalMeanPrecision([3.75, 10.3125], [5.25 -0.75; -0.75 3.75]), q_switch = Categorical([0.75, 0.25]), q_p = Wishart(3.0, [2.0 -0.25; -0.25 1.0])
),
output = MvNormalMeanPrecision([3.75, 10.3125], [4.5 -0.5625; -0.5625 2.25])
),
(
input = (q_out = MvNormalMeanPrecision([0.75, 17.25], [3.0 -0.75; -0.75 6.0]), q_switch = Categorical([1.0, 0.0]), q_p = Wishart(3.0, [2.0 -0.25; -0.25 1.0])),
output = MvNormalMeanPrecision([0.75, 17.25], [6.0 -0.75; -0.75 3.0])
)
]
end

@testset "Variational : (m_out::UnivariateNormalDistributionsFamily..., m_p::GammaDistributionsFamily...) k=1" begin
@test_rules [with_float_conversions = true] NormalMixture{2}((:m, k = 1), Marginalisation) [
(input = (q_out = PointMass(8.5), q_switch = Bernoulli(0.2), q_p = GammaShapeRate(1.0, 2.0)), output = NormalMeanPrecision(8.5, 0.1)),
(input = (q_out = NormalWeightedMeanPrecision(3 / 10, 6 / 10), q_switch = Categorical([0.5, 0.5]), q_p = PointMass(1.0)), output = NormalMeanPrecision(0.5, 0.5))
]
end
end

end
Loading

2 comments on commit 866780d

@bvdmitri
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/71698

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v3.0.2 -m "<description of version>" 866780d430d3320669c24b56bc20690c6ffbea99
git push origin v3.0.2

Please sign in to comment.