From d6c038bf86e2e9f8e3c026d9402ea0b334751dc3 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Wed, 26 Feb 2020 20:36:59 +0100 Subject: [PATCH 01/16] return the dispersion for varest of GLMMs with a dispersion param --- src/generalizedlinearmixedmodel.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 71edb1e44..7dc3d05bf 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -585,7 +585,7 @@ function updateη!(m::GeneralizedLinearMixedModel) m end -varest(m::GeneralizedLinearMixedModel{T}) where {T} = one(T) +varest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? dispersion(m) : one(T) # delegate GLMM method to LMM field for f in ( From 2760775af9f0f35448f49190f67d5b73b84c0370 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Wed, 26 Feb 2020 20:57:44 +0100 Subject: [PATCH 02/16] export additional links --- src/MixedModels.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/MixedModels.jl b/src/MixedModels.jl index 7fcad200f..98ce568b3 100644 --- a/src/MixedModels.jl +++ b/src/MixedModels.jl @@ -42,6 +42,7 @@ export @formula, Gamma, GeneralizedLinearMixedModel, HelmertCoding, + IdentityLink, InverseGaussian, InverseLink, LinearMixedModel, @@ -52,6 +53,7 @@ export @formula, Normal, OptSummary, Poisson, + ProbitLink, RaggedArray, RandomEffectsTerm, ReMat, From c8261be750ffb58818dcf0f795ebe3280b7b3c32 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Thu, 27 Feb 2020 00:09:14 +0100 Subject: [PATCH 03/16] new computation of loglikelihood for GLMM that works with all families --- src/generalizedlinearmixedmodel.jl | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 7dc3d05bf..b31b9620e 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -402,17 +402,27 @@ end function StatsBase.loglikelihood(m::GeneralizedLinearMixedModel{T}) where {T} accum = zero(T) - D = Distribution(m.resp) - if D <: Binomial - for (μ, y, n) in zip(m.resp.mu, m.resp.y, m.wt) - accum += logpdf(D(round(Int, n), μ), round(Int, y * n)) + # adapted from GLM.jl + # note the use of loglik_obs to handle the different parameterizations + # of various response distributions which may not just be location+scale + r = m.resp + wts = r.wts + y = r.y + mu = r.mu + d = r.d + if length(wts) == length(y) + # in GLM.jl, they use the deviance of the + ϕ = deviance(r)/sum(wts) + @inbounds for i in eachindex(y, mu, wts) + accum += GLM.loglik_obs(d, y[i], mu[i], wts[i], ϕ) end else - for (μ, y) in zip(m.resp.mu, m.resp.y) - accum += logpdf(D(μ), y) + ϕ = deviance(r)/length(y) + @inbounds for i in eachindex(y, mu) + accum += GLM.loglik_obs(d, y[i], mu[i], 1, ϕ) end end - accum - (mapreduce(u -> sum(abs2, u), +, m.u) + logdet(m)) / 2 + accum end StatsBase.nobs(m::GeneralizedLinearMixedModel) = length(m.η) From cb90435350fa01c76fa018dd1af619141b9a1260 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Thu, 27 Feb 2020 00:09:30 +0100 Subject: [PATCH 04/16] sdest for GLMM --- src/generalizedlinearmixedmodel.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index b31b9620e..9e1fe16e3 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -552,7 +552,7 @@ function Base.setproperty!(m::GeneralizedLinearMixedModel, s::Symbol, y) end end -sdest(m::GeneralizedLinearMixedModel{T}) where {T} = convert(T, NaN) +sdest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? convert(T, NaN) : √varest(m) function Base.show(io::IO, m::GeneralizedLinearMixedModel) if m.optsum.feval < 0 From 43d09cac6c5614f7484219801fa1add9cfab75c9 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Thu, 27 Feb 2020 00:09:51 +0100 Subject: [PATCH 05/16] initial work on including dispersion in GLMM deviance --- src/generalizedlinearmixedmodel.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 9e1fe16e3..d3b5da0a1 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -69,7 +69,11 @@ is the squared length of the conditional modes, `u`, plus the determinant of `Λ'Z'WZΛ + I`, plus the sum of the squared deviance residuals. """ function StatsBase.deviance(m::GeneralizedLinearMixedModel{T}, nAGQ = 1) where {T} - nAGQ == 1 && return T(sum(m.resp.devresid) + logdet(m) + sum(u -> sum(abs2, u), m.u)) + # TODO: fix dispersion's contribution for non Gaussians + disp = dispersion_parameter(m) ? nobs(m) * (1 + log2π + log(varest(m))) : zero(T) + nAGQ == 1 && return T(sum(m.resp.devresid) + logdet(m) + + sum(u -> sum(abs2, u), m.u) + disp) + # TODO: incorporate disp into nAGQ > 1 u = vec(first(m.u)) u₀ = vec(first(m.u₀)) copyto!(u₀, u) From 0858435ffa7446fb22f6f56b6d29cd4706a0e9d2 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Thu, 27 Feb 2020 00:37:11 +0100 Subject: [PATCH 06/16] fix sdest --- src/generalizedlinearmixedmodel.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index d3b5da0a1..1a33a590f 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -556,7 +556,7 @@ function Base.setproperty!(m::GeneralizedLinearMixedModel, s::Symbol, y) end end -sdest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? convert(T, NaN) : √varest(m) +sdest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? √varest(m) : convert(T, NaN) function Base.show(io::IO, m::GeneralizedLinearMixedModel) if m.optsum.feval < 0 From a79d32b222a60e5a2437ea006a763499636294ab Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 13 Apr 2020 23:27:44 +0200 Subject: [PATCH 07/16] fix loglik calc --- src/generalizedlinearmixedmodel.jl | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 1a33a590f..12bec0389 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -69,7 +69,7 @@ is the squared length of the conditional modes, `u`, plus the determinant of `Λ'Z'WZΛ + I`, plus the sum of the squared deviance residuals. """ function StatsBase.deviance(m::GeneralizedLinearMixedModel{T}, nAGQ = 1) where {T} - # TODO: fix dispersion's contribution for non Gaussians + # TODO: fix dispersion's contribution for non Gaussians disp = dispersion_parameter(m) ? nobs(m) * (1 + log2π + log(varest(m))) : zero(T) nAGQ == 1 && return T(sum(m.resp.devresid) + logdet(m) + sum(u -> sum(abs2, u), m.u) + disp) @@ -415,7 +415,6 @@ function StatsBase.loglikelihood(m::GeneralizedLinearMixedModel{T}) where {T} mu = r.mu d = r.d if length(wts) == length(y) - # in GLM.jl, they use the deviance of the ϕ = deviance(r)/sum(wts) @inbounds for i in eachindex(y, mu, wts) accum += GLM.loglik_obs(d, y[i], mu[i], wts[i], ϕ) @@ -426,7 +425,7 @@ function StatsBase.loglikelihood(m::GeneralizedLinearMixedModel{T}) where {T} accum += GLM.loglik_obs(d, y[i], mu[i], 1, ϕ) end end - accum + accum - (mapreduce(u -> sum(abs2, u), +, m.u) + logdet(m)) / 2 end StatsBase.nobs(m::GeneralizedLinearMixedModel) = length(m.η) @@ -556,7 +555,7 @@ function Base.setproperty!(m::GeneralizedLinearMixedModel, s::Symbol, y) end end -sdest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? √varest(m) : convert(T, NaN) +sdest(m::GeneralizedLinearMixedModel{T}) where {T} = dispersion_parameter(m) ? √varest(m) : convert(T, NaN) function Base.show(io::IO, m::GeneralizedLinearMixedModel) if m.optsum.feval < 0 From ebe79065a2d531cf0e248a718541cc12fd0da2d3 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 13 Apr 2020 23:28:28 +0200 Subject: [PATCH 08/16] start preparing tests for GLMMs with dispersion parameter --- test/pirls.jl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/pirls.jl b/test/pirls.jl index 5f0fbec90..87442fa1b 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -77,3 +77,15 @@ end #@test isapprox(sum(x -> sum(abs2, x), gm4.u), 196.8695297987013, atol=0.1) #@test isapprox(sum(gm4.resp.devresid), 220.92685781326136, atol=0.1) end + +@testset "dispersion parameter" begin + @testset "gaussian with non identity link" begin + dyestuff = MixedModels.dataset(:dyestuff) + gauss = fit(MixedModel, @formula(yield ~ 1 + (1|batch)), dyestuff, Normal(), SqrtLink(), fast=true) + @test only(gauss.θ) ≈ 0.5528913 atol=0.001 # value from lme4 + end + @testset "inverse gaussian" begin + end + @testset "gamma" begin + end +end From 7300ef3bd9839d940eae5e64226e2bae789e7486 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 13 Apr 2020 23:38:07 +0200 Subject: [PATCH 09/16] change GLMM objective to use NLopt's tracking of function calls (like in LMM) --- src/generalizedlinearmixedmodel.jl | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 12bec0389..583ee49b9 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -244,19 +244,15 @@ function fit!( optsum.final = copy(optsum.initial) end setpar! = fast ? setθ! : setβθ! - feval = 0 function obj(x, g) - isempty(g) || error("gradient not defined for this model") - feval += 1 + isempty(g) || throw(ArgumentError("g should be empty for this objective")) val = deviance(pirls!(setpar!(m, x), fast, verbose), nAGQ) - feval == 1 && (optsum.finitial = val) - if verbose - println("f_", feval, ": ", val, " ", x) - end + verbose && println(round(val, digits = 5), " ", x) val end opt = Opt(optsum) NLopt.min_objective!(opt, obj) + optsum.finitial = obj(optsum.initial, T[]) fmin, xmin, ret = NLopt.optimize(opt, copyto!(optsum.final, optsum.initial)) ## check if very small parameter values bounded below by zero can be set to zero xmin_ = copy(xmin) @@ -274,7 +270,7 @@ function fit!( ## ensure that the parameter values saved in m are xmin pirls!(setpar!(m, xmin), fast, verbose) optsum.nAGQ = nAGQ - optsum.feval = feval + optsum.feval = opt.numevals optsum.final = xmin optsum.fmin = fmin optsum.returnvalue = ret From 0e591fefe6521074e4a9855b2dcacf0285f2d3b3 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Tue, 14 Apr 2020 02:00:10 +0200 Subject: [PATCH 10/16] rework tests for optimizer failure --- test/pirls.jl | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/test/pirls.jl b/test/pirls.jl index 87442fa1b..13fe0b6c0 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -6,6 +6,7 @@ using MixedModels: dataset const fms = Dict( :cbpp => [@formula((incid/hsz) ~ 1 + period + (1|herd))], :contra => [@formula(use ~ 1+age+abs2(age)+urban+livch+(1|urbdist))], + :dyestuff => [@formula(yield ~ 1 + (1|batch))], :grouseticks => [@formula(ticks ~ 1+year+ch+ (1|index) + (1|brood) + (1|location))], :verbagg => [@formula(r2 ~ 1+anger+gender+btype+situ+(1|subj)+(1|item))], ) @@ -81,9 +82,24 @@ end @testset "dispersion parameter" begin @testset "gaussian with non identity link" begin dyestuff = MixedModels.dataset(:dyestuff) - gauss = fit(MixedModel, @formula(yield ~ 1 + (1|batch)), dyestuff, Normal(), SqrtLink(), fast=true) + + gauss = fit(MixedModel, only(fms[:dyestuff]), dyestuff, Normal(), SqrtLink(), fast=true) @test only(gauss.θ) ≈ 0.5528913 atol=0.001 # value from lme4 + # this corresponds to the deviance(gauss) from lme4 + # NB: "deviance" is complicated in lme4 + # this is the way deviance(glmm) is computed in lme4 + # but there are several "deviances" defined: + # https://github.com/lme4/lme4/issues/375#issuecomment-214494445 + @test sum(gauss.resp.devresid) ≈ 58830. atol=0.001 # value from lme4 + + # bobyqa fails here + neldermead = GeneralizedLinearMixedModel(only(fms[:dyestuff]), dyestuff, Normal(), SqrtLink()); + neldermead.optsum.optimizer = :LN_NELDERMEAD; + fit!(neldermead) + @test only(neldermead.θ) ≈ 0.5528913 atol=0.001 # value from lme4 + @test sum(neldermead.resp.devresid) ≈ 58830. atol=0.001 # value from lme4 end + @testset "inverse gaussian" begin end @testset "gamma" begin From 8bc5254e9585869d2b3923011a0cdf2c8eb83774 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Tue, 14 Apr 2020 02:00:35 +0200 Subject: [PATCH 11/16] remove explicit dispersion parameter from deviance --- src/generalizedlinearmixedmodel.jl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/generalizedlinearmixedmodel.jl b/src/generalizedlinearmixedmodel.jl index 583ee49b9..08fa2645e 100644 --- a/src/generalizedlinearmixedmodel.jl +++ b/src/generalizedlinearmixedmodel.jl @@ -69,11 +69,7 @@ is the squared length of the conditional modes, `u`, plus the determinant of `Λ'Z'WZΛ + I`, plus the sum of the squared deviance residuals. """ function StatsBase.deviance(m::GeneralizedLinearMixedModel{T}, nAGQ = 1) where {T} - # TODO: fix dispersion's contribution for non Gaussians - disp = dispersion_parameter(m) ? nobs(m) * (1 + log2π + log(varest(m))) : zero(T) - nAGQ == 1 && return T(sum(m.resp.devresid) + logdet(m) + - sum(u -> sum(abs2, u), m.u) + disp) - # TODO: incorporate disp into nAGQ > 1 + nAGQ == 1 && return T(sum(m.resp.devresid) + logdet(m) + sum(u -> sum(abs2, u), m.u)) u = vec(first(m.u)) u₀ = vec(first(m.u₀)) copyto!(u₀, u) From 72e4ffc08d6073617d9d8f00b7825e21e569910d Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Sat, 16 May 2020 20:53:26 +0200 Subject: [PATCH 12/16] simulate data for InverseGaussian and Gamma --- test/pirls.jl | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/test/pirls.jl b/test/pirls.jl index 13fe0b6c0..9a9bce6ea 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -1,4 +1,7 @@ +using PooledArrays +using Distributions using MixedModels +using Random using Test using MixedModels: dataset @@ -100,8 +103,27 @@ end @test sum(neldermead.resp.devresid) ≈ 58830. atol=0.001 # value from lme4 end + ## simulate some data ## + rng = MersenneTwister(42); + ng = 25 + ns = 500 + # random effect + u = repeat(randn(rng, ns) .* 0.1, ng); + id = PooledArray(string.(repeat(1:ns, ng))); + # fixed effect + x = rand(rng, ns*ng); + @testset "inverse gaussian" begin + rng = MersenneTwister(42); + y = map(d -> rand(rng, d), InverseGaussian.(1. ./ sqrt.(1. .+ u + x))); + dat = (u=u, id=id, x=x, y=y) + # invgauss = fit(MixedModel, @formula(y ~ 1 + x + (1|id)), dat, InverseGaussian()) end + @testset "gamma" begin + rng = MersenneTwister(42); + y = map(d -> rand(rng, d), Gamma.(1. ./ (1. .+ u + x))); + dat = (u=u, id=id, x=x, y=y) + # gamma = fit(MixedModel, @formula(y ~ 1 + x + (1|id)), dat, Gamma()) end end From 12538df29506e8f5cfadbd4335576eb626cfe442 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Tue, 19 May 2020 23:39:36 +0200 Subject: [PATCH 13/16] separate construct and fit for simulated data --- test/pirls.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/pirls.jl b/test/pirls.jl index 9a9bce6ea..acf59fe59 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -117,13 +117,17 @@ end rng = MersenneTwister(42); y = map(d -> rand(rng, d), InverseGaussian.(1. ./ sqrt.(1. .+ u + x))); dat = (u=u, id=id, x=x, y=y) - # invgauss = fit(MixedModel, @formula(y ~ 1 + x + (1|id)), dat, InverseGaussian()) + invgauss = GeneralizedLinearMixedModel(@formula(y ~ 1 + x + (1|id)), dat, InverseGaussian()); + #invgauss.optsum.optimizer = :LN_NELDERMEAD; + # fit!(invgauss) end @testset "gamma" begin rng = MersenneTwister(42); y = map(d -> rand(rng, d), Gamma.(1. ./ (1. .+ u + x))); dat = (u=u, id=id, x=x, y=y) - # gamma = fit(MixedModel, @formula(y ~ 1 + x + (1|id)), dat, Gamma()) + gamma = GeneralizedLinearMixedModel(@formula(y ~ 1 + x + (1|id)), dat, Gamma()); + #gamma.optsum.optimizer = :LN_NELDERMEAD; + # fit!(gamma) end end From 6c6aaa0bdc853183b0eafc807b886fd54507bd07 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Wed, 20 May 2020 23:36:32 +0200 Subject: [PATCH 14/16] new gamma dataset --- Artifacts.toml | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/Artifacts.toml b/Artifacts.toml index a877d92c6..a26cfeb59 100644 --- a/Artifacts.toml +++ b/Artifacts.toml @@ -1,7 +1,19 @@ +# when it's time to update the data agin, this code snippet will help: +# using Pkg.Artifacts +# import Base.Filesystem.cp +# datasets = "/home/phillip/Desktop/datasets/" +# create_artifact() do artifact_dir +# for f in readdir(datasets) +# cp(joinpath(datasets,f), joinpath(artifact_dir,f)) +# end +# end +# the returned sha is the git-tree-sha1 +# osf will show the tarball's sha256 in the revisions page for the current version + [TestData] -git-tree-sha1 = "9d575764bc1c1a7860c34c5b153251e5f2ee6704" +git-tree-sha1 = "bf65e73c846c30aa5ec8bcb0443d12ffe1456671" lazy = true [[TestData.download]] - sha256 = "0b63ae3e9e457ee4b33482d3bf8cc7f20c8ed7c8b2c863af311ba0944c6d46e4" - url = "https://ndownloader.figshare.com/files/21085968" + sha256 = "24b1778f89c8b9f809d3f372bda50b4944eeb3ad9a993379c81170768c8907af" + url = "https://osf.io/pcjk6/download" From a6d518ff2279ab370367bf22ffa97aaa78d837e0 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Wed, 20 May 2020 23:39:27 +0200 Subject: [PATCH 15/16] formulae for new gamma dataset --- test/pirls.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/pirls.jl b/test/pirls.jl index acf59fe59..66646b7b7 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -12,6 +12,8 @@ const fms = Dict( :dyestuff => [@formula(yield ~ 1 + (1|batch))], :grouseticks => [@formula(ticks ~ 1+year+ch+ (1|index) + (1|brood) + (1|location))], :verbagg => [@formula(r2 ~ 1+anger+gender+btype+situ+(1|subj)+(1|item))], + :sdio_relaxivity => [@formula(T2 ~ 1 + C + (1 + C|Trial)), # canonical inverse link + @formula(R2 ~ 1 + C + (1 + C|Trial))], # identity link ) @testset "contra" begin From 3fbaaf8755509b39b4a3c543602736b3c371ed86 Mon Sep 17 00:00:00 2001 From: Phillip Alday Date: Mon, 3 Jan 2022 20:00:55 -0600 Subject: [PATCH 16/16] ditch MersenneTwister --- test/pirls.jl | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/test/pirls.jl b/test/pirls.jl index 3ad0d80fe..dd8d4afe0 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -213,7 +213,7 @@ end end ## simulate some data ## - rng = MersenneTwister(42); + rng = StableRNG(42); ng = 25 ns = 500 # random effect @@ -223,16 +223,15 @@ end x = rand(rng, ns*ng); @testset "inverse gaussian" begin - rng = MersenneTwister(42); - y = map(d -> rand(rng, d), InverseGaussian.(1. ./ sqrt.(1. .+ u + x))); - dat = (u=u, id=id, x=x, y=y) + rng = StableRNG(42); + l = GLM.canonicallink(InverseGaussian()) invgauss = GeneralizedLinearMixedModel(@formula(y ~ 1 + x + (1|id)), dat, InverseGaussian()); #invgauss.optsum.optimizer = :LN_NELDERMEAD; # fit!(invgauss) end @testset "gamma" begin - rng = MersenneTwister(42); + rng = StableRNG(42); y = map(d -> rand(rng, d), Gamma.(1. ./ (1. .+ u + x))); dat = (u=u, id=id, x=x, y=y) gamma = GeneralizedLinearMixedModel(@formula(y ~ 1 + x + (1|id)), dat, Gamma());