Skip to content

Commit b377c25

Browse files
authored
Merge pull request #301 from lcontento/flux016
Test fixes for Flux 0.16
2 parents 5f87797 + a069170 commit b377c25

File tree

3 files changed

+11
-11
lines changed

3 files changed

+11
-11
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "MLJFlux"
22
uuid = "094fc8d1-fd35-5302-93ea-dabda2abf845"
33
authors = ["Anthony D. Blaom <anthony.blaom@gmail.com>", "Ayush Shridhar <ayush.shridhar1999@gmail.com>"]
4-
version = "0.6.3"
4+
version = "0.6.4"
55

66
[deps]
77
CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597"

src/mlj_embedder_interface.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,16 +32,16 @@ end
3232
# 8. Extra metadata
3333
MMI.metadata_pkg(
3434
EntityEmbedder,
35-
package_name = "MLJTransforms",
36-
package_uuid = "23777cdb-d90c-4eb0-a694-7c2b83d5c1d6",
37-
package_url = "https://github.com/JuliaAI/MLJTransforms.jl",
35+
package_name = "MLJFlux",
36+
package_uuid = "094fc8d1-fd35-5302-93ea-dabda2abf845",
37+
package_url = "https://github.com/FluxML/MLJFlux.jl",
3838
is_pure_julia = true,
3939
is_wrapper = true
4040
)
4141

4242
MMI.metadata_model(
4343
EntityEmbedder,
44-
load_path = "MLJTransforms.EntityEmbedder",
44+
load_path = "MLJFlux.EntityEmbedder",
4545
)
4646

4747
MMI.target_in_fit(::Type{<:EntityEmbedder}) = true
@@ -153,4 +153,4 @@ Xnew
153153
See also
154154
[`NeuralNetworkClassifier`, `NeuralNetworkRegressor`](@ref)
155155
"""
156-
EntityEmbedder
156+
EntityEmbedder

test/mlj_model_interface.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,21 +53,21 @@ end
5353
model = MLJFlux.NeuralNetworkRegressor(; alpha=0, lambda=0.3, optimiser)
5454
chain = MLJFlux.regularized_optimiser(model, 1)
5555
@test chain isa Optimisers.OptimiserChain{
56-
Tuple{Optimisers.WeightDecay, Optimisers.Momentum}
56+
<:Tuple{Optimisers.WeightDecay, Optimisers.Momentum}
5757
}
5858

5959
# alpha = 1:
6060
model = MLJFlux.NeuralNetworkRegressor(; alpha=1, lambda=0.3, optimiser)
6161
chain = MLJFlux.regularized_optimiser(model, 1)
6262
@test chain isa Optimisers.OptimiserChain{
63-
Tuple{Optimisers.SignDecay, Optimisers.Momentum}
63+
<:Tuple{Optimisers.SignDecay, Optimisers.Momentum}
6464
}
6565

6666
# general case:
6767
model = MLJFlux.NeuralNetworkRegressor(; alpha=0.4, lambda=0.3, optimiser)
6868
chain = MLJFlux.regularized_optimiser(model, 1)
6969
@test chain isa Optimisers.OptimiserChain{
70-
Tuple{Optimisers.SignDecay, Optimisers.WeightDecay, Optimisers.Momentum}
70+
<:Tuple{Optimisers.SignDecay, Optimisers.WeightDecay, Optimisers.Momentum}
7171
}
7272
end
7373

@@ -133,8 +133,8 @@ mutable struct LisasBuilder
133133
n1::Int
134134
end
135135

136-
# UndefVarError accepts two inputs from julia > v"1.9"
137-
_UndefVarError(var, scope) = @static if VERSION < v"1.10"
136+
# UndefVarError accepts two inputs from julia > v"1.10"
137+
_UndefVarError(var, scope) = @static if VERSION < v"1.11"
138138
UndefVarError(var)
139139
else
140140
UndefVarError(var, scope)

0 commit comments

Comments
 (0)