diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 36384a9..d29561f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,7 +18,7 @@ jobs: fail-fast: false matrix: version: - - '1.6' + - '1.10' - '1' # automatically expands to the latest stable 1.x release of Julia. os: - ubuntu-latest diff --git a/Project.toml b/Project.toml index 2f1d9b3..0ae8b54 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "FeatureSelection" uuid = "33837fe5-dbff-4c9e-8c2f-c5612fe2b8b6" authors = ["Anthony D. Blaom ", "Samuel Okon collect selector = FeatureSelector() - f, = MLJBase.fit(selector, 1, X) + f, = MLJBase.fit(selector, 0, X) @test f == namesX Xt = MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2)) @test Set(MLJBase.schema(Xt).names) == Set(namesX) @@ -20,13 +20,13 @@ # Test on selecting features if `features` keyword is defined selector = FeatureSelector(features=[:Zn, :Crim]) - f, = MLJBase.fit(selector, 1, X) + f, = MLJBase.fit(selector, 0, X) @test MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2)) == MLJBase.select(X, 1:2, [:Zn, :Crim]) # test on ignoring a feature, even if it's listed in the `features` selector.ignore = true - f, = MLJBase.fit(selector, 1, X) + f, = MLJBase.fit(selector, 0, X) Xnew = MLJBase.transform(selector, f, X) @test MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2)) == MLJBase.select(X, 1:2, [:x3, :x4]) @@ -35,7 +35,7 @@ selector = FeatureSelector(features=[:x1, :mickey_mouse]) @test_throws( ArgumentError, - MLJBase.fit(selector, 1, X) + MLJBase.fit(selector, 0, X) ) selector.ignore = true @test_logs( @@ -50,13 +50,13 @@ selector = FeatureSelector(features= x-> x == (:x1)) @test_throws( ArgumentError, - MLJBase.fit(selector, 1, X) + MLJBase.fit(selector, 0, X) ) selector.ignore = true selector.features = x-> x in [:Zn, :Crim, :x3, :x4] @test_throws( ArgumentError, - MLJBase.fit(selector, 1, X) + MLJBase.fit(selector, 0, X) ) # Test model Metadata @@ -67,4 +67,4 @@ end # To be added with FeatureSelectorRule X = (n1=["a", "b", "a"], n2=["g", "g", "g"], n3=[7, 8, 9], # n4 =UInt8[3,5,10], o1=[4.5, 3.6, 4.0], ) # MLJBase.schema(X) -# Xc = coerce(X, :n1=>Multiclass, :n2=>Multiclass) \ No newline at end of file +# Xc = coerce(X, :n1=>Multiclass, :n2=>Multiclass) diff --git a/test/models/rfe.jl b/test/models/rfe.jl index 4c5e2d0..fb635c9 100644 --- a/test/models/rfe.jl +++ b/test/models/rfe.jl @@ -62,9 +62,9 @@ const DTM = DummyTestModels selector_mach3 = machine(selector3, Xt, y) selector_mach4 = machine(selector4, Xt, y) - fit!(selector_mach) - fit!(selector_mach2) - fit!(selector_mach3) + fit!(selector_mach, verbosity=0) + fit!(selector_mach2, verbosity=0) + fit!(selector_mach3, verbosity=0) @test_logs( (:warn, "n_features > number of features in training data, hence no feature will be eliminated."), match_mode=:any, @@ -149,7 +149,7 @@ end svm = SVR(kernel="linear") rfe = RecursiveFeatureElimination(model=svm, n_features=5) mach = machine(rfe, Xs, ys) - fit!(mach) + fit!(mach, verbosity=0) rfecv = RecursiveFeatureElimination(model=svm) tuning_rfe_model = TunedModel( @@ -160,7 +160,7 @@ end range=range(rfecv, :n_features, values=1:10) ) self_tuning_rfe_mach = machine(tuning_rfe_model, Xs, ys) - fit!(self_tuning_rfe_mach) + fit!(self_tuning_rfe_mach, verbosity=0) # Compare results # Convert MLJ RFE scores to rankings