diff --git a/docs/src/api/bayesian_regression.md b/docs/src/api/bayesian_regression.md index b0ad4eb..0ab4856 100644 --- a/docs/src/api/bayesian_regression.md +++ b/docs/src/api/bayesian_regression.md @@ -8,101 +8,101 @@ BayesianRegression ### Linear Regression with User Specific Gaussian Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) ``` ### Linear Regression with Ridge Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` ### Linear Regression with Laplace Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` ### Linear Regression with Cauchy Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Cauchy, sim_size::Int64 = 1000) ``` ### Linear Regression with T-distributed Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) ``` ### Linear Regression with Horse Shoe Prior ```@docs -fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` ## Logistic Regression ### Logistic Regression with Ridge Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` ### Logistic Regression with Laplace Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` ### Logistic Regression with Cauchy Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` ### Logistic Regression with T-Distributed Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_TDist, h::Float64 = 1.0, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::TDist, h::Float64 = 1.0, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` ### Logistic Regression with Horse Shoe Prior ```@docs -fit(formula::FormulaTerm,data::DataFrame,modelClass::LogisticRegression,Link::CRRaoLink,prior::Prior_HorseShoe,level::Float64 = 0.95,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::LogisticRegression,Link::CRRaoLink,prior::HorseShoe,level::Float64 = 0.95,sim_size::Int64 = 1000) ``` ## Negative Binomial Regression ### Negative Binomial Regression with Ridge Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` ### Negative Binomial Regression with Laplace Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` ### Negative Binomial Regression with Cauchy Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` ### Negative Binomial Regression with T-Distributed Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_TDist, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::TDist, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` ### Negative Binomial Regression with HorseShoe Prior ```@docs -fit(formula::FormulaTerm,data::DataFrame,modelClass::NegBinomRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::NegBinomRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` ## Poisson Regression ### Poisson Regression with Ridge Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` ### Poisson Regression with Laplace Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` ### Poisson Regression with Cauchy Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` ### Poisson Regression with T-Distributed Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) ``` ### Poisson Regression with Horse Shoe Prior ```@docs -fit(formula::FormulaTerm,data::DataFrame,modelClass::PoissonRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::PoissonRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` \ No newline at end of file diff --git a/docs/src/api/interface.md b/docs/src/api/interface.md index 117d273..bcd54e0 100644 --- a/docs/src/api/interface.md +++ b/docs/src/api/interface.md @@ -30,12 +30,12 @@ It should be noted that not all model classes support every type of signature. T - [`Cauchit`](@ref) 5. CRRao also supports Bayesian models, and the priors to be can be specified while calling `fit`. Currently CRRao supports six different kinds of priors, and the type of the `prior` parameter must be one of the following. - - [`Prior_Gauss`](@ref) - - [`Prior_Ridge`](@ref) - - [`Prior_Laplace`](@ref) - - [`Prior_Cauchy`](@ref) - - [`Prior_TDist`](@ref) - - [`Prior_HorseShoe`](@ref) + - [`Gauss`](@ref) + - [`Ridge`](@ref) + - [`Laplace`](@ref) + - [`Cauchy`](@ref) + - [`TDist`](@ref) + - [`HorseShoe`](@ref) ## Model Classes and Data Models @@ -59,12 +59,12 @@ Cauchit ## Prior Distributions ```@docs -Prior_Gauss -Prior_Ridge -Prior_Laplace -Prior_Cauchy -Prior_TDist -Prior_HorseShoe +Gauss +Ridge +Laplace +Cauchy +TDist +HorseShoe ``` ## Setting Random Number Generators diff --git a/docs/src/man/guide.md b/docs/src/man/guide.md index 6070ce5..746525d 100644 --- a/docs/src/man/guide.md +++ b/docs/src/man/guide.md @@ -58,7 +58,7 @@ To understand more about these functions and in general how frequentist models w ## Tutorial: Bayesian Logistic Regression -Next, let's see an example of doing bayesian statistical inference with CRRao. In this example, we will perform bayesian logistic regression on the `turnout` dataset from R's [Zelig](https://zeligproject.org/). Further, we will use the [`Logit`](@ref) link function with a Ridge prior ([`Prior_Ridge`](@ref)). +Next, let's see an example of doing bayesian statistical inference with CRRao. In this example, we will perform bayesian logistic regression on the `turnout` dataset from R's [Zelig](https://zeligproject.org/). Further, we will use the [`Logit`](@ref) link function with a Ridge prior ([`Ridge`](@ref)). With this example, we'll also showcase how to use random number generators to get reproducible results. For this, we will use the [StableRNGs](https://github.com/JuliaRandom/StableRNGs.jl) package (although any random number generator can be used). So, first we import the required modules. @@ -90,5 +90,5 @@ turnout = dataset("Zelig", "turnout") And finally, we do the inference using our proposed model. ```@repl bayesian_logistic_regression -model = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_Ridge()) +model = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Ridge()) ``` diff --git a/src/CRRao.jl b/src/CRRao.jl index 59c062d..ad2df87 100644 --- a/src/CRRao.jl +++ b/src/CRRao.jl @@ -102,9 +102,11 @@ Type representing Residual Bootstrap. """ struct Boot_Residual end +abstract type Prior end + """ ```julia -Prior_Gauss +Gauss ``` Type representing the Gaussian Prior. Users have specific prior mean and standard deviation, for ``\\alpha`` and ``\\beta`` @@ -133,11 +135,11 @@ y_i \\sim N(\\mu_i,\\sigma), + ``\\mathbf{E}(y_i)=g(\\mu_i)``, and + ``Var(y_i)=\\sigma^2``. """ -struct Prior_Gauss end +struct Gauss <: Prior end """ ```julia -Prior_Ridge +Ridge ``` Type representing the Ridge Prior. @@ -167,11 +169,11 @@ y_i \\sim D(\\mu_i,\\sigma), + ``\\mathbf{E}(y_i)=g(\\mu_i)``, and + ``Var(y_i)=\\sigma^2``. """ -struct Prior_Ridge end +struct Ridge <: Prior end """ ```julia -Prior_Laplace +Laplace ``` Type representing the Laplace Prior. @@ -200,11 +202,11 @@ y_i \\sim D(\\mu_i,\\sigma), + ``\\mathbf{E}(y_i)=g(\\mu_i)``, and + ``Var(y_i)=\\sigma^2``. """ -struct Prior_Laplace end +struct Laplace <: Prior end """ ```julia -Prior_Cauchy +Cauchy ``` Type representing the Cauchy Prior. @@ -231,11 +233,11 @@ y_i \\sim D(\\mu_i,\\sigma), + ``\\mathbf{E}(y_i)=g(\\mu_i)``, and + ``Var(y_i)=\\sigma^2``. """ -struct Prior_Cauchy end +struct Cauchy <: Prior end """ ```julia -Prior_TDist +TDist ``` Type representing the T-Distributed Prior. @@ -266,12 +268,12 @@ y_i \\sim D(\\mu_i,\\sigma), + ``Var(y_i)=\\sigma^2``. + The ``t(v)`` is ``t`` distribution with ``v`` degrees of freedom. """ -struct Prior_TDist end +struct TDist <: Prior end """ ```julia -Prior_HorseShoe +HorseShoe ``` Type representing the HorseShoe Prior. @@ -305,7 +307,7 @@ y_i \\sim D(\\mu_i,\\sigma), i=1,2,\\cdots,n + ``Var(y_i)=\\sigma^2``, and + ``\\beta``=(``\\beta_1,\\beta_2,\\cdots,\\beta_p``) """ -struct Prior_HorseShoe end +struct HorseShoe <: Prior end """ ```julia @@ -393,7 +395,7 @@ end Cauchit() = Cauchit(Cauchit_Link) export LinearRegression, LogisticRegression, PoissonRegression, NegBinomRegression, Boot_Residual -export Prior_Ridge, Prior_Laplace, Prior_Cauchy, Prior_TDist, Prior_HorseShoe, Prior_Gauss +export Ridge, Laplace, Cauchy, TDist, HorseShoe, Gauss export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit export coef, coeftable, r2, adjr2, loglikelihood, aic, bic, sigma, predict, residuals, cooksdistance, BPTest, pvalue export FrequentistRegression, BayesianRegression diff --git a/src/bayesian/linear_regression.jl b/src/bayesian/linear_regression.jl index 010b148..a79e05f 100644 --- a/src/bayesian/linear_regression.jl +++ b/src/bayesian/linear_regression.jl @@ -11,7 +11,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a Ridge prior. @@ -36,7 +36,7 @@ julia> df = dataset("datasets", "mtcars") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_Ridge()) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Ridge()) ┌ Info: Found initial step size └ ϵ = 0.00078125 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -76,7 +76,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, - prior::Prior_Ridge, + prior::Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000 ) @@ -102,7 +102,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a Laplace prior. @@ -125,7 +125,7 @@ julia> df = dataset("datasets", "mtcars") 32 │ Volvo 142E 21.4 4 121.0 109 4.11 2.78 18.6 1 1 4 2 julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_Laplace()) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Laplace()) ┌ Info: Found initial step size └ ϵ = 0.00078125 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -165,7 +165,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, - prior::Prior_Laplace, + prior::Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000 ) @@ -177,8 +177,8 @@ function fit( b0 = 0.1 v ~ InverseGamma(h, h) σ ~ InverseGamma(a0, b0) - #α ~ Laplace(0, σ * v) - β ~ filldist(Laplace(0, σ * v), p) + #α ~ Distributions.Laplace(0, σ * v) + β ~ filldist(Distributions.Laplace(0, σ * v), p) #likelihood #y ~ MvNormal(α .+ X * β, σ) @@ -190,7 +190,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Cauchy, sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a Cauchy prior. @@ -214,7 +214,7 @@ julia> df = dataset("datasets", "mtcars") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_Cauchy(), 1000) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Cauchy(), 1000) ┌ Info: Found initial step size └ ϵ = 0.000390625 Chains MCMC chain (1000×17×1 Array{Float64, 3}): @@ -252,16 +252,16 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, - prior::Prior_Cauchy, + prior::Cauchy, sim_size::Int64 = 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) #priors - σ ~ Truncated(TDist(1), 0, Inf) - #α ~ TDist(1) * σ - β ~ filldist(TDist(1) * σ, p) + σ ~ Truncated(Distributions.TDist(1), 0, Inf) + #α ~ Distributions.TDist(1) * σ + β ~ filldist(Distributions.TDist(1) * σ, p) #likelihood #y ~ MvNormal(α .+ X * β, σ) @@ -273,7 +273,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a t(ν) distributed prior. @@ -297,7 +297,7 @@ julia> df = dataset("datasets", "mtcars") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_TDist()) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), TDist()) ┌ Info: Found initial step size └ ϵ = 2.44140625e-5 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -339,7 +339,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, - prior::Prior_TDist, + prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000 ) @@ -351,8 +351,8 @@ function fit( b0 = 0.1 ν ~ InverseGamma(h, h) σ ~ InverseGamma(a0, b0) - #α ~ TDist(ν) * σ - β ~ filldist(TDist(ν) * σ, p) + #α ~ Distributions.TDist(ν) * σ + β ~ filldist(Distributions.TDist(ν) * σ, p) #likelihood #y ~ MvNormal(α .+ X * β, σ) @@ -365,7 +365,7 @@ end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a HorseShoe prior. @@ -374,7 +374,7 @@ Fit a Bayesian Linear Regression model on the input data with a HorseShoe prior. julia> using CRRao, RDatasets, StableRNGs, StatsPlots, StatsModels julia> df = dataset("datasets", "mtcars"); julia> CRRao.set_rng(StableRNG(123)); -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_HorseShoe()) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), HorseShoe()) ┌ Info: Found initial step size └ ϵ = 0.00078125 Chains MCMC chain (1000×22×1 Array{Float64, 3}): @@ -424,7 +424,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, - prior::Prior_HorseShoe, + prior::HorseShoe, sim_size::Int64 = 1000 ) @model LinearRegression(X, y) = begin @@ -432,7 +432,7 @@ function fit( #priors - halfcauchy = Truncated(TDist(1), 0, Inf) + halfcauchy = Truncated(Distributions.TDist(1), 0, Inf) τ ~ halfcauchy ## Global Shrinkage λ ~ filldist(halfcauchy, p) ## Local Shrinkage @@ -451,7 +451,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss,alpha_prior_mean::Float64 = 0.0, beta_prior_mean::Float64, sim_size::Int64 = 1000, h::Float64 = 0.1) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Gauss,alpha_prior_mean::Float64 = 0.0, beta_prior_mean::Float64, sim_size::Int64 = 1000, h::Float64 = 0.1) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean for α and β. User doesnot have @@ -462,7 +462,7 @@ Fit a Bayesian Linear Regression model on the input data with a Gaussian prior w julia> using CRRao, RDatasets, StableRNGs, StatsModels julia> df = dataset("datasets", "mtcars"); julia> CRRao.set_rng(StableRNG(123)); -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_Gauss(),30.0,[0.0,-3.0,1.0],1000) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Gauss(),30.0,[0.0,-3.0,1.0],1000) ┌ Info: Found initial step size └ ϵ = 0.000390625 Chains MCMC chain (1000×17×1 Array{Float64, 3}): @@ -500,7 +500,7 @@ function fit( formula::FormulaTerm , data::DataFrame , modelClass::LinearRegression - , prior::Prior_Gauss + , prior::Gauss , alpha_prior_mean::Float64 , beta_prior_mean::Vector{Float64} , sim_size::Int64 = 1000 @@ -535,7 +535,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean and sd for α and β. @@ -545,7 +545,7 @@ Fit a Bayesian Linear Regression model on the input data with a Gaussian prior w julia> using CRRao, RDatasets, StableRNGs, StatsModels julia> df = dataset("datasets", "mtcars"); julia> CRRao.set_rng(StableRNG(123)); -julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Prior_Gauss(),30.0,10.0,[0.0,-3.0,1.0],[0.1,1.0,1.0],1000) +julia> container = fit(@formula(MPG ~ HP + WT + Gear), df, LinearRegression(), Gauss(),30.0,10.0,[0.0,-3.0,1.0],[0.1,1.0,1.0],1000) ┌ Info: Found initial step size └ ϵ = 0.000390625 Chains MCMC chain (1000×17×1 Array{Float64, 3}): @@ -583,7 +583,7 @@ function fit( formula::FormulaTerm , data::DataFrame , modelClass::LinearRegression - , prior::Prior_Gauss + , prior::Gauss , alpha_prior_mean::Float64 , alpha_prior_sd::Float64 , beta_prior_mean::Vector{Float64} diff --git a/src/bayesian/logistic_regression.jl b/src/bayesian/logistic_regression.jl index 4c7d83a..20bf397 100644 --- a/src/bayesian/logistic_regression.jl +++ b/src/bayesian/logistic_regression.jl @@ -11,7 +11,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` Fit a Bayesian Logistic Regression model on the input data with a Ridge prior with the provided `Link` function. @@ -36,7 +36,7 @@ julia> turnout = dataset("Zelig", "turnout") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_Ridge()) +julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Ridge()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -72,7 +72,7 @@ Quantiles julia> predict(container_logit,turnout) julia> CRRao.set_rng(StableRNG(123)) -julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Prior_Ridge()) +julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Ridge()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -108,7 +108,7 @@ Quantiles julia> predict(container_probit,turnout) julia> CRRao.set_rng(StableRNG(123)) -julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Prior_Ridge()) +julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Ridge()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -142,7 +142,7 @@ Quantiles β[5] 0.0728 0.0879 0.0948 0.1019 0.1164 julia> CRRao.set_rng(StableRNG(123)) -julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Prior_Ridge()) +julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Ridge()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -184,7 +184,7 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, - prior::Prior_Ridge, + prior::Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000 @@ -215,7 +215,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` Fit a Bayesian Logistic Regression model on the input data with a Laplace prior with the provided `Link` function. @@ -239,7 +239,7 @@ julia> turnout = dataset("Zelig", "turnout") 1999 │ white 22 10.0 2.4811 0 2000 │ white 59 10.0 0.5523 0 -julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_Laplace()) +julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Laplace()) ┌ Info: Found initial step size └ ϵ = 0.0015625 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -275,7 +275,7 @@ Quantiles β[5] 0.1282 0.1562 0.1693 0.1828 0.2087 -julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Prior_Laplace()) +julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Laplace()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -309,7 +309,7 @@ Quantiles β[5] 0.0772 0.0925 0.1004 0.1091 0.1219 julia> CRRao.set_rng(StableRNG(123)) -julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Prior_Laplace()) +julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Laplace()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -342,7 +342,7 @@ Quantiles β[4] 0.0549 0.0696 0.0772 0.0849 0.0999 β[5] 0.0738 0.0882 0.0959 0.1031 0.1151 -julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Prior_Laplace()) +julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Laplace()) ┌ Info: Found initial step size └ ϵ = 0.00078125 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -383,7 +383,7 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, - prior::Prior_Laplace, + prior::Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000 @@ -394,7 +394,7 @@ function fit( #priors λ ~ InverseGamma(h, h) #α ~ Normal(0, λ) - β ~ filldist(Laplace(0, λ), p) + β ~ filldist(Distributions.Laplace(0, λ), p) #z = α .+ X * β z = X * β @@ -414,7 +414,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` Fit a Bayesian Logistic Regression model on the input data with a Cauchy prior with the provided `Link` function. @@ -438,7 +438,7 @@ julia> turnout = dataset("Zelig", "turnout") 1999 │ white 22 10.0 2.4811 0 2000 │ white 59 10.0 0.5523 0 1993 rows omitted -julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_Cauchy()) +julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Cauchy()) ┌ Info: Found initial step size └ ϵ = 0.0015625 Chains MCMC chain (1000×18×1 Array{Float64, 3}): @@ -473,7 +473,7 @@ Quantiles β[4] 0.1237 0.1595 0.1787 0.1962 0.2341 β[5] 0.1358 0.1608 0.1735 0.1866 0.2164 -julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Prior_Cauchy()) +julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Cauchy()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -505,7 +505,7 @@ Quantiles β[3] -0.0366 0.0566 0.1128 0.1706 0.2800 β[4] 0.0657 0.0852 0.0958 0.1058 0.1245 β[5] 0.0798 0.0949 0.1028 0.1111 0.1251 -julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Prior_Cauchy()) +julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Cauchy()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -538,7 +538,7 @@ Quantiles β[4] 0.0538 0.0689 0.0768 0.0842 0.1003 β[5] 0.0750 0.0883 0.0960 0.1042 0.1206 -julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Prior_Cauchy()) +julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Cauchy()) Chains MCMC chain (1000×18×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -577,7 +577,7 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, - prior::Prior_Cauchy, + prior::Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000 @@ -586,9 +586,9 @@ function fit( p = size(X, 2) n = size(X, 1) #priors - λ ~ Truncated(TDist(1), 0, Inf) - #α ~ TDist(1) * λ - β ~ filldist(TDist(1) * λ, p) + λ ~ Truncated(Distributions.TDist(1), 0, Inf) + #α ~ Distributions.TDist(1) * λ + β ~ filldist(Distributions.TDist(1) * λ, p) #z = α .+ X * β z = X * β @@ -608,7 +608,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_TDist, h::Float64 = 1.0, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::TDist, h::Float64 = 1.0, level::Float64 = 0.95, sim_size::Int64 = 1000) ``` Fit a Bayesian Logistic Regression model on the input data with a T-Dist prior with the provided `Link` function. @@ -634,7 +634,7 @@ julia> turnout = dataset("Zelig", "turnout") 1993 rows omitted julia> CRRao.set_rng(StableRNG(7740)); StableRNGs.LehmerRNG(state=0x00000000000000000000000000003c79) -julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_TDist()) +julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.003125 ┌ Warning: The current proposal will be rejected due to numerical error(s). @@ -675,7 +675,7 @@ Quantiles β[5] 0.1349 0.1604 0.1723 0.1861 0.2119 julia> CRRao.set_rng(StableRNG(7740)) -julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Prior_TDist()) +julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.00078125 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -713,7 +713,7 @@ Quantiles β[5] 0.0787 0.0937 0.1015 0.1095 0.1241 julia> CRRao.set_rng(StableRNG(7740)) -julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Prior_TDist()) +julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.0015625 Chains MCMC chain (10000×19×1 Array{Float64, 3}): @@ -753,7 +753,7 @@ Quantiles julia> CRRao.set_rng(StableRNG(7740)) StableRNGs.LehmerRNG(state=0x00000000000000000000000000003c79) -julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Prior_TDist()) +julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.8 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -796,7 +796,7 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, - prior::Prior_TDist, + prior::TDist, h::Float64 = 3.0, level::Float64 = 0.95, sim_size::Int64 = 1000 @@ -807,8 +807,8 @@ function fit( #priors λ ~ InverseGamma(h, h) ν ~ InverseGamma(h, h) - #α ~ TDist(ν) * λ - β ~ filldist(TDist(ν) * λ, p) + #α ~ Distributions.TDist(ν) * λ + β ~ filldist(Distributions.TDist(ν) * λ, p) #z = α .+ X * β z = X * β @@ -830,7 +830,7 @@ end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::LogisticRegression,Link::CRRaoLink,prior::Prior_HorseShoe,level::Float64 = 0.95,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::LogisticRegression,Link::CRRaoLink,prior::HorseShoe,level::Float64 = 0.95,sim_size::Int64 = 1000) ``` Fit a Bayesian Logistic Regression model on the input data with a HorseShoe prior with the provided `Link` function. @@ -841,7 +841,7 @@ Fit a Bayesian Logistic Regression model on the input data with a HorseShoe prio julia> using CRRao, RDatasets, StableRNGs, StatsModels julia> turnout = dataset("Zelig", "turnout"); julia> CRRao.set_rng(StableRNG(7740)) -julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), Prior_HorseShoe()) +julia> container_logit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Logit(), HorseShoe()) Chains MCMC chain (1000×24×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -887,7 +887,7 @@ Quantiles β[5] 0.1345 0.1600 0.1736 0.1888 0.2162 julia> CRRao.set_rng(StableRNG(7750)) -julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), Prior_HorseShoe()) +julia> container_probit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Probit(), HorseShoe()) Chains MCMC chain (1000×24×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -933,7 +933,7 @@ Quantiles β[5] 0.0782 0.0946 0.1021 0.1099 0.1258 julia> CRRao.set_rng(StableRNG(7750)) -julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), Prior_HorseShoe()) +julia> container_cloglog = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cloglog(), HorseShoe()) Chains MCMC chain (1000×24×1 Array{Float64, 3}): Iterations = 501:1:1500 @@ -979,7 +979,7 @@ Quantiles β[5] 0.0765 0.0894 0.0966 0.1039 0.1198 julia> CRRao.set_rng(StableRNG(7750)) -julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), Prior_HorseShoe()) +julia> container_cauchit = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), Cauchit(), HorseShoe()) ┌ Info: Found initial step size └ ϵ = 0.8 Chains MCMC chain (1000×24×1 Array{Float64, 3}): @@ -1033,7 +1033,7 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, - prior::Prior_HorseShoe, + prior::HorseShoe, level::Float64 = 0.95, sim_size::Int64 = 1000 ) @@ -1042,10 +1042,10 @@ function fit( n = size(X, 1) #priors #v ~ InverseGamma(h, h) - #α ~ TDist(1) + #α ~ Distributions.TDist(1) #β ~ filldist(Uniform(-v, v), p) - halfcauchy = Truncated(TDist(1), 0, Inf) + halfcauchy = Truncated(Distributions.TDist(1), 0, Inf) τ ~ halfcauchy ## Global Shrinkage λ ~ filldist(halfcauchy, p) ## Local Shrinkage diff --git a/src/bayesian/negativebinomial_regression.jl b/src/bayesian/negativebinomial_regression.jl index 20638ab..1e9cda9 100644 --- a/src/bayesian/negativebinomial_regression.jl +++ b/src/bayesian/negativebinomial_regression.jl @@ -16,7 +16,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` Fit a Bayesian Negative Binomial Regression model on the input data with a Ridge prior. @@ -41,7 +41,7 @@ julia> sanction = dataset("Zelig", "sanction") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Prior_Ridge()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Ridge()) ┌ Info: Found initial step size └ ϵ = 0.05 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -102,7 +102,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, - prior::Prior_Ridge, + prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000 ) @@ -131,7 +131,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) ``` Fit a Bayesian Negative Binomial Regression model on the input data with a Laplace prior. @@ -156,7 +156,7 @@ julia> sanction = dataset("Zelig", "sanction") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Prior_Laplace()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Laplace()) ┌ Info: Found initial step size └ ϵ = 0.05 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -217,7 +217,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, - prior::Prior_Laplace, + prior::Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000 ) @@ -227,8 +227,8 @@ function fit( #priors λ ~ InverseGamma(h, h) - #α ~ Laplace(0, λ) - β ~ filldist(Laplace(0, λ), p) + #α ~ Distributions.Laplace(0, λ) + β ~ filldist(Distributions.Laplace(0, λ), p) ## link #z = α .+ X * β @@ -246,7 +246,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` Fit a Bayesian Negative Binomial Regression model on the input data with a Cauchy prior. @@ -271,7 +271,7 @@ julia> sanction = dataset("Zelig", "sanction") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Prior_Cauchy()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Cauchy()) ┌ Info: Found initial step size └ ϵ = 0.05 ┌ Warning: The current proposal will be rejected due to numerical error(s). @@ -317,7 +317,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, - prior::Prior_Cauchy, + prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000 ) @@ -326,8 +326,8 @@ function fit( n = size(X, 1) #priors λ ~ InverseGamma(h, h) - #α ~ TDist(1) * λ - β ~ filldist(TDist(1) * λ, p) + #α ~ Distributions.TDist(1) * λ + β ~ filldist(Distributions.TDist(1) * λ, p) ## link #z = α .+ X * β @@ -345,7 +345,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::Prior_TDist, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, prior::TDist, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` Fit a Bayesian Negative Binomial Regression model on the input data with a t(ν) distributed prior. @@ -369,7 +369,7 @@ julia> sanction = dataset("Zelig", "sanction") 71 rows omitted julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Prior_TDist()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.05 Chains MCMC chain (1000×20×1 Array{Float64, 3}): @@ -414,7 +414,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, - prior::Prior_TDist, + prior::TDist, h::Float64 = 1.0, sim_size::Int64 = 1000 ) @@ -424,8 +424,8 @@ function fit( #priors λ ~ InverseGamma(h, h) ν ~ InverseGamma(h, h) - #α ~ TDist(ν) * λ - β ~ filldist(TDist(ν) * λ, p) + #α ~ Distributions.TDist(ν) * λ + β ~ filldist(Distributions.TDist(ν) * λ, p) ## link #z = α .+ X * β @@ -444,7 +444,7 @@ end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::NegBinomRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::NegBinomRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` Fit a Bayesian Negative Binomial Regression model on the input data with a HorseShoe prior. @@ -454,7 +454,7 @@ Fit a Bayesian Negative Binomial Regression model on the input data with a Horse julia> using CRRao, RDatasets, StableRNGs, StatsPlots, StatsModels julia> sanction = dataset("Zelig", "sanction"); julia> CRRao.set_rng(StableRNG(123)) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), Prior_HorseShoe()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, NegBinomRegression(), HorseShoe()) ┌ Info: Found initial step size └ ϵ = 0.05 Chains MCMC chain (1000×26×1 Array{Float64, 3}): @@ -510,7 +510,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::NegBinomRegression, - prior::Prior_HorseShoe, + prior::HorseShoe, sim_size::Int64 = 1000 ) @model NegativeBinomialRegression(X, y) = begin @@ -519,7 +519,7 @@ function fit( #priors - halfcauchy = Truncated(TDist(1), 0, Inf) + halfcauchy = Truncated(Distributions.TDist(1), 0, Inf) τ ~ halfcauchy ## Global Shrinkage λ ~ filldist(halfcauchy, p) ## Local Shrinkage diff --git a/src/bayesian/poisson_regression.jl b/src/bayesian/poisson_regression.jl index 0653ea2..ee9b127 100644 --- a/src/bayesian/poisson_regression.jl +++ b/src/bayesian/poisson_regression.jl @@ -11,7 +11,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` Fit a Bayesian Poisson Regression model on the input data with a Ridge prior. @@ -36,7 +36,7 @@ julia> sanction = dataset("Zelig", "sanction") julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Prior_Ridge()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Ridge()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -98,7 +98,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, - prior::Prior_Ridge, + prior::Ridge, h::Float64 = 0.1, sim_size::Int64 = 1000 ) @@ -126,7 +126,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000) ``` Fit a Bayesian Poisson Regression model on the input data with a Laplace prior. @@ -150,7 +150,7 @@ julia> sanction = dataset("Zelig", "sanction") 71 rows omitted julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Prior_Laplace()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Laplace()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -210,7 +210,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, - prior::Prior_Laplace, + prior::Laplace, h::Float64 = 0.1, sim_size::Int64 = 1000 ) @@ -219,8 +219,8 @@ function fit( n = size(X, 1) #priors λ ~ InverseGamma(h, h) - #α ~ Laplace(0, λ) - β ~ filldist(Laplace(0, λ), p) + #α ~ Distributions.Laplace(0, λ) + β ~ filldist(Distributions.Laplace(0, λ), p) ## link #z = α .+ X * β @@ -238,7 +238,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000) ``` Fit a Bayesian Poisson Regression model on the input data with a Cauchy prior. @@ -262,7 +262,7 @@ julia> sanction = dataset("Zelig", "sanction") 71 rows omitted julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Prior_Cauchy()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Cauchy()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×19×1 Array{Float64, 3}): @@ -304,7 +304,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, - prior::Prior_Cauchy, + prior::Cauchy, h::Float64 = 1.0, sim_size::Int64 = 1000 ) @@ -313,8 +313,8 @@ function fit( n = size(X, 1) #priors λ ~ InverseGamma(h, h) - #α ~ TDist(1) * λ - β ~ filldist(TDist(1) * λ, p) + #α ~ Distributions.TDist(1) * λ + β ~ filldist(Distributions.TDist(1) * λ, p) ## link #z = α .+ X * β @@ -332,7 +332,7 @@ end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) ``` Fit a Bayesian Poisson Regression model on the input data with a t(ν) distributed prior. @@ -358,7 +358,7 @@ julia> sanction = dataset("Zelig", "sanction") 71 rows omitted julia> CRRao.set_rng(StableRNG(123)) StableRNGs.LehmerRNG(state=0x000000000000000000000000000000f7) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Prior_TDist()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), TDist()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×20×1 Array{Float64, 3}): @@ -402,7 +402,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, - prior::Prior_TDist, + prior::TDist, h::Float64 = 2.0, sim_size::Int64 = 1000 ) @@ -412,8 +412,8 @@ function fit( #priors λ ~ InverseGamma(h, h) ν ~ InverseGamma(h, h) - #α ~ TDist(ν) * λ - β ~ filldist(TDist(ν) * λ, p) + #α ~ Distributions.TDist(ν) * λ + β ~ filldist(Distributions.TDist(ν) * λ, p) ## link #z = α .+ X * β @@ -433,7 +433,7 @@ end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::PoissonRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::PoissonRegression,prior::HorseShoe,sim_size::Int64 = 1000) ``` Fit a Bayesian Poisson Regression model on the input data with a Horse Shoe prior. @@ -443,7 +443,7 @@ Fit a Bayesian Poisson Regression model on the input data with a Horse Shoe prio julia> using CRRao, RDatasets, StableRNGs, StatsModels julia> sanction = dataset("Zelig", "sanction"); julia> CRRao.set_rng(StableRNG(123)) -julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), Prior_HorseShoe()) +julia> container = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression(), HorseShoe()) ┌ Info: Found initial step size └ ϵ = 0.025 Chains MCMC chain (1000×25×1 Array{Float64, 3}): @@ -499,7 +499,7 @@ function fit( formula::FormulaTerm, data::DataFrame, modelClass::PoissonRegression, - prior::Prior_HorseShoe, + prior::HorseShoe, sim_size::Int64 = 1000 ) @model PoissonRegression(X, y) = begin @@ -507,7 +507,7 @@ function fit( n = size(X, 1) #priors - halfcauchy = Truncated(TDist(1), 0, Inf) + halfcauchy = Truncated(Distributions.TDist(1), 0, Inf) τ ~ halfcauchy ## Global Shrinkage λ ~ filldist(halfcauchy, p) ## Local Shrinkage diff --git a/test/basic/LinearRegression.jl b/test/basic/LinearRegression.jl index 0df3635..462c97b 100644 --- a/test/basic/LinearRegression.jl +++ b/test/basic/LinearRegression.jl @@ -1,10 +1,10 @@ mtcars = dataset("datasets", "mtcars") priors = [ - Prior_Ridge(), - Prior_Laplace(), - Prior_Cauchy(), - Prior_TDist(), + Ridge(), + Laplace(), + Cauchy(), + TDist(), ] model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression()) diff --git a/test/basic/LogisticRegression.jl b/test/basic/LogisticRegression.jl index 970b3b4..143511d 100644 --- a/test/basic/LogisticRegression.jl +++ b/test/basic/LogisticRegression.jl @@ -3,10 +3,10 @@ turnout = dataset("Zelig", "turnout")[1:100,:] # Take a subset of rows to reduce links = [Logit(), Probit(), Cloglog(), Cauchit()] priors = [ - Prior_Ridge(), - Prior_Laplace(), - Prior_Cauchy(), - Prior_TDist(), + Ridge(), + Laplace(), + Cauchy(), + TDist(), ] for link in links diff --git a/test/basic/NegBinomialRegression.jl b/test/basic/NegBinomialRegression.jl index 04f8170..c6937ab 100644 --- a/test/basic/NegBinomialRegression.jl +++ b/test/basic/NegBinomialRegression.jl @@ -1,10 +1,10 @@ sanction = dataset("Zelig", "sanction") priors = [ - Prior_Ridge(), - Prior_Laplace(), - Prior_Cauchy(), - Prior_TDist(), + Ridge(), + Laplace(), + Cauchy(), + TDist(), ] CRRao.set_rng(StableRNG(123)) diff --git a/test/basic/PoissonRegression.jl b/test/basic/PoissonRegression.jl index ff04107..a23f6cd 100644 --- a/test/basic/PoissonRegression.jl +++ b/test/basic/PoissonRegression.jl @@ -1,10 +1,10 @@ sanction = dataset("Zelig", "sanction") priors = [ - Prior_Ridge(), - Prior_Laplace(), - Prior_Cauchy(), - Prior_TDist(), + Ridge(), + Laplace(), + Cauchy(), + TDist(), ] model = fit(@formula(Num ~ Target + Coop + NCost), sanction, PoissonRegression()) diff --git a/test/numerical/bayesian/LinearRegression.jl b/test/numerical/bayesian/LinearRegression.jl index 8758c25..79f3a72 100644 --- a/test/numerical/bayesian/LinearRegression.jl +++ b/test/numerical/bayesian/LinearRegression.jl @@ -1,11 +1,11 @@ mtcars = dataset("datasets", "mtcars") tests = [ - (Prior_Ridge(), 20.080877893580514), - (Prior_Laplace(), 20.070783434589128), - (Prior_Cauchy(), 20.019759144845644), - (Prior_TDist(), 20.08147561106022), - (Prior_HorseShoe(), 20.042984550677183), + (Ridge(), 20.080877893580514), + (Laplace(), 20.070783434589128), + (Cauchy(), 20.019759144845644), + (TDist(), 20.08147561106022), + (HorseShoe(), 20.042984550677183), ] for (prior, test_mean) in tests @@ -18,6 +18,6 @@ end gauss_test = 20.0796026428345 CRRao.set_rng(StableRNG(123)) -model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0], 1000) +model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Gauss(), 30.0, [0.0,-3.0,1.0], 1000) @test mean(predict(model, mtcars)) ≈ gauss_test \ No newline at end of file diff --git a/test/numerical/bayesian/LogisticRegression.jl b/test/numerical/bayesian/LogisticRegression.jl index 2708cfa..78c36cc 100644 --- a/test/numerical/bayesian/LogisticRegression.jl +++ b/test/numerical/bayesian/LogisticRegression.jl @@ -2,7 +2,7 @@ turnout = dataset("Zelig", "turnout")[1:100,:] # Take a subset of rows to reduce tests = [ ( - Prior_Ridge(), + Ridge(), ( (Logit(), 0.7690822208626806), (Probit(), 0.7685999218881091), @@ -11,7 +11,7 @@ tests = [ ) ), ( - Prior_Laplace(), + Laplace(), ( (Logit(), 0.7718593681922629), (Probit(), 0.7695587585010469), @@ -20,7 +20,7 @@ tests = [ ) ), ( - Prior_Cauchy(), + Cauchy(), ( (Logit(), 0.7678814727043146), (Probit(), 0.764699194194744), @@ -29,7 +29,7 @@ tests = [ ) ), ( - Prior_TDist(), + TDist(), ( (Logit(), 0.588835403024102), (Probit(), 0.7642595382152266), @@ -38,7 +38,7 @@ tests = [ ) ), ( - Prior_HorseShoe(), + HorseShoe(), ( (Logit(), 0.38683395333332327), (Probit(), 0.38253233489484173), diff --git a/test/numerical/bayesian/NegBinomialRegression.jl b/test/numerical/bayesian/NegBinomialRegression.jl index 0d75a20..f254c17 100644 --- a/test/numerical/bayesian/NegBinomialRegression.jl +++ b/test/numerical/bayesian/NegBinomialRegression.jl @@ -1,11 +1,11 @@ sanction = dataset("Zelig", "sanction") tests = [ - (Prior_Ridge(), 6.999865486088317), - (Prior_Laplace(), 6.886529206600885), - (Prior_Cauchy(), 6.900001819752649), - (Prior_TDist(), 6.876415480722939), - (Prior_HorseShoe(), 6.902138507950901), + (Ridge(), 6.999865486088317), + (Laplace(), 6.886529206600885), + (Cauchy(), 6.900001819752649), + (TDist(), 6.876415480722939), + (HorseShoe(), 6.902138507950901), ] for (prior, test_mean) in tests diff --git a/test/numerical/bayesian/PoissonRegression.jl b/test/numerical/bayesian/PoissonRegression.jl index d80b490..3e5c54e 100644 --- a/test/numerical/bayesian/PoissonRegression.jl +++ b/test/numerical/bayesian/PoissonRegression.jl @@ -1,11 +1,11 @@ sanction = dataset("Zelig", "sanction") tests = [ - (Prior_Ridge(), 7.163048138457556), - (Prior_Laplace(), 7.164837449702468), - (Prior_Cauchy(), 7.166326185314563), - (Prior_TDist(), 7.167147727917408), - (Prior_HorseShoe(), 7.158818008027834), + (Ridge(), 7.163048138457556), + (Laplace(), 7.164837449702468), + (Cauchy(), 7.166326185314563), + (TDist(), 7.167147727917408), + (HorseShoe(), 7.158818008027834), ] for (prior, test_mean) in tests