Skip to content

Commit

Permalink
formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
pat-alt committed Sep 6, 2024
1 parent ffd16ef commit 79a89fd
Show file tree
Hide file tree
Showing 4 changed files with 43 additions and 64 deletions.
24 changes: 12 additions & 12 deletions docs/make.jl
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
using TaijaPlotting
using Documenter

DocMeta.setdocmeta!(TaijaPlotting, :DocTestSetup, :(using TaijaPlotting); recursive = true)
DocMeta.setdocmeta!(TaijaPlotting, :DocTestSetup, :(using TaijaPlotting); recursive=true)

makedocs(;
modules = [TaijaPlotting],
authors = "Patrick Altmeyer",
repo = "https://github.com/JuliaTrustworthyAI/TaijaPlotting.jl/blob/{commit}{path}#{line}",
sitename = "TaijaPlotting.jl",
format = Documenter.HTML(;
prettyurls = get(ENV, "CI", "false") == "true",
canonical = "https://JuliaTrustworthyAI.github.io/TaijaPlotting.jl",
edit_link = "main",
assets = String[],
modules=[TaijaPlotting],
authors="Patrick Altmeyer",
repo="https://github.com/JuliaTrustworthyAI/TaijaPlotting.jl/blob/{commit}{path}#{line}",
sitename="TaijaPlotting.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://JuliaTrustworthyAI.github.io/TaijaPlotting.jl",
edit_link="main",
assets=String[],
),
pages = ["Home" => "index.md"],
pages=["Home" => "index.md"],
)

deploydocs(; repo = "github.com/JuliaTrustworthyAI/TaijaPlotting.jl", devbranch = "main")
deploydocs(; repo="github.com/JuliaTrustworthyAI/TaijaPlotting.jl", devbranch="main")
58 changes: 20 additions & 38 deletions test/ConformalPrediction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,82 +6,64 @@ using Plots
isplot(plt) = typeof(plt) <: Plots.Plot

@testset "ConformalPrediction.jl" begin

@testset "Classification" begin

# Data:
X, y = make_moons(500; noise = 0.15)
X, y = make_moons(500; noise=0.15)
X = MLJBase.table(convert.(Float32, MLJBase.matrix(X)))
train, test = partition(eachindex(y), 0.8, shuffle = true)
train, test = partition(eachindex(y), 0.8; shuffle=true)

# Model:
model = MLJLinearModels.LogisticClassifier()
conf_model = conformal_model(model; coverage = 0.9)
conf_model = conformal_model(model; coverage=0.9)
mach = machine(conf_model, X, y)
fit!(mach, rows = train)
fit!(mach; rows=train)

@test isplot(bar(mach.model, mach.fitresult, X))
@test isplot(plot(mach.model, mach.fitresult, X, y))
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var = 1))
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var = :x1))
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var=1))
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var=:x1))
@test isplot(contourf(mach.model, mach.fitresult, X, y))
@test isplot(
contourf(mach.model, mach.fitresult, X, y; zoom = -1, plot_set_size = true),
contourf(mach.model, mach.fitresult, X, y; zoom=-1, plot_set_size=true)
)
@test isplot(
contourf(mach.model, mach.fitresult, X, y; zoom=-1, plot_set_loss=true),
contourf(mach.model, mach.fitresult, X, y; zoom=-1, plot_set_loss=true)
)
@test isplot(
contourf(mach.model, mach.fitresult, X, y; zoom=-1, plot_classification_loss=true),
contourf(
mach.model, mach.fitresult, X, y; zoom=-1, plot_classification_loss=true
),
)
@test isplot(contourf(mach.model, mach.fitresult, X, y; target = 1))

@test isplot(contourf(mach.model, mach.fitresult, X, y; target=1))
end

@testset "Regression" begin

# Data:
X, y = make_regression(500)
X = MLJBase.table(convert.(Float32, MLJBase.matrix(X)))
train, test = partition(eachindex(y), 0.8, shuffle = true)
train, test = partition(eachindex(y), 0.8; shuffle=true)

# Model:
model = MLJLinearModels.LinearRegressor()
conf_model = conformal_model(model; coverage = 0.9)
conf_model = conformal_model(model; coverage=0.9)
mach = machine(conf_model, X, y)
fit!(mach, rows = train)
fit!(mach; rows=train)

# Plotting:
@test isplot(plot(mach.model, mach.fitresult, X, y))
@test isplot(
plot(
mach.model,
mach.fitresult,
X,
y;
input_var = 1,
xlims = (-1, 1),
ylims = (-1, 1),
mach.model, mach.fitresult, X, y; input_var=1, xlims=(-1, 1), ylims=(-1, 1)
),
)
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var = :x1))
@test isplot(
plot(mach.model, mach.fitresult, X, y; target = 1, plot_set_size = true),
)
@test isplot(plot(mach.model, mach.fitresult, X, y; input_var=:x1))
@test isplot(plot(mach.model, mach.fitresult, X, y; target=1, plot_set_size=true))
@test isplot(plot(mach.model, mach.fitresult, X, y; target=1, plot_set_loss=true))
@test isplot(
plot(mach.model, mach.fitresult, X, y; target = 1, plot_set_loss = true),
)
@test isplot(
plot(
mach.model,
mach.fitresult,
X,
y;
target = 1,
plot_classification_loss = true,
),
plot(mach.model, mach.fitresult, X, y; target=1, plot_classification_loss=true)
)
@test isplot(bar(mach.model, mach.fitresult, X))
end

end
10 changes: 5 additions & 5 deletions test/CounterfactualExplanations.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ using Plots
using TaijaData

@testset "CounterfactualExplanations.jl" begin

@testset "2D" begin

# Counteractual data and model:
Expand All @@ -22,22 +21,23 @@ using TaijaData

plot(M, counterfactual_data)
plot(ce)
plot(ce; plot_proba = true, zoom = -0.1f32)
plot(ce; plot_proba=true, zoom=-0.1f32)
TaijaPlotting.animate_path(ce)

@test true

@testset "Multiple counterfactuals" begin
ce = generate_counterfactual(x, target, counterfactual_data, M, generator; num_counterfactuals = 5)
ce = generate_counterfactual(
x, target, counterfactual_data, M, generator; num_counterfactuals=5
)
plot(ce)
@test true
end

end

@testset "Multi-dim" begin
# Counteractual data and model:
counterfactual_data = CounterfactualData(TaijaData.load_blobs(; k = 5)...)
counterfactual_data = CounterfactualData(TaijaData.load_blobs(; k=5)...)
M = fit_model(counterfactual_data, :Linear)
target = 2
factual = 1
Expand Down
15 changes: 6 additions & 9 deletions test/LaplaceRedux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ using Plots
using TaijaData

@testset "LaplaceRedux.jl" begin

@testset "Regression" begin
# Data:
x, y = toy_data_regression()
Expand All @@ -22,15 +21,14 @@ using TaijaData
nn = Chain(Dense(D, n_hidden, tanh), Dense(n_hidden, 1))

# Fit:
la = LaplaceRedux.Laplace(nn; likelihood = :regression)
la = LaplaceRedux.Laplace(nn; likelihood=:regression)
LaplaceRedux.fit!(la, data)

plot(la, X, y)
@test true
end

@testset "Classification" begin

@testset "Single class" begin
# Data:
x, y = TaijaData.load_linearly_separable()
Expand All @@ -43,19 +41,19 @@ using TaijaData
nn = Chain(Dense(D, n_hidden, tanh), Dense(n_hidden, 1, σ))

# Fit:
la = LaplaceRedux.Laplace(nn; likelihood = :classification)
la = LaplaceRedux.Laplace(nn; likelihood=:classification)
LaplaceRedux.fit!(la, data)

# Very minimal testing for basic functionality:
plot(la, x, y)
plot(la, x, y; zoom = -0.1f32)
plot(la, x, y; zoom=-0.1f32)
@test true
end

@testset "Multi-class" begin
# Data:
nout = 4
x, y = TaijaData.load_blobs(centers = nout)
x, y = TaijaData.load_blobs(; centers=nout)
x = Float32.(x)
y = onehotbatch(y, 1:nout)
data = zip(eachcol(x), eachcol(y))
Expand All @@ -66,15 +64,14 @@ using TaijaData
nn = Chain(Dense(D, n_hidden, tanh), Dense(n_hidden, nout, σ))

# Fit:
la = LaplaceRedux.Laplace(nn; likelihood = :classification)
la = LaplaceRedux.Laplace(nn; likelihood=:classification)
LaplaceRedux.fit!(la, data)

# Very minimal testing for basic functionality:
y = onecold(y)
plot(la, x, y)
plot(la, x, y; zoom = -0.1f32)
plot(la, x, y; zoom=-0.1f32)
@test true
end
end

end

0 comments on commit 79a89fd

Please sign in to comment.