diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 00000000..f5fc0e11 Binary files /dev/null and b/.DS_Store differ diff --git a/src/power_divergence.jl b/src/power_divergence.jl index baa2cbf3..eeabdc87 100644 --- a/src/power_divergence.jl +++ b/src/power_divergence.jl @@ -319,6 +319,13 @@ function PowerDivergenceTest(x::AbstractMatrix{T}; lambda::U=1.0, theta0::Vector stat += x[i] * (log(x[i]) - log(xhat[i])) end stat *= 2 + #yates correction + elseif lambda==1 && df==1 + for i in 1:length(x) + num = abs(x[i] - xhat[i]) - 0.5 + den = sqrt(xhat[i]) + stat += (num / den)^2 + end elseif lambda == -1 for i in 1:length(x) stat += xhat[i] * (log(xhat[i]) - log(x[i])) diff --git a/test/power_divergence.jl b/test/power_divergence.jl index 6c058b08..b41d2531 100644 --- a/test/power_divergence.jl +++ b/test/power_divergence.jl @@ -143,6 +143,11 @@ testname(m) pvalue(m) show(IOBuffer(), m) +#Yates's correction +mat = [8 404; 12 212] +m = PowerDivergenceTest(mat, lambda=1.0) +@test pvalue(m) ≈ 0.034041225309689796 + m = ChisqTest(d) m = MultinomialLRTest(d)