Code
include("../utils.jl")
import MLJ:fit!,fitted_params
using GLMakie,MLJ,CSV,DataFrames
GLMakie:contourf 方法
include("../utils.jl")
import MLJ:fit!,fitted_params
using GLMakie,MLJ,CSV,DataFrames
= load_iris();
iris
#selectrows(iris, 1:3) |> pretty
= DataFrames.DataFrame(iris);
iris first(iris,5)|>display
= unpack(iris, ==(:target); rng=123);
y, X
=select!(X,3:4)
X
= iris.target
byCat = unique(byCat)
categ = [:orange,:lightgreen,:purple]; colors1
Row | sepal_length | sepal_width | petal_length | petal_width | target |
---|---|---|---|---|---|
Float64 | Float64 | Float64 | Float64 | Cat… | |
1 | 5.1 | 3.5 | 1.4 | 0.2 | setosa |
2 | 4.9 | 3.0 | 1.4 | 0.2 | setosa |
3 | 4.7 | 3.2 | 1.3 | 0.2 | setosa |
4 | 4.6 | 3.1 | 1.5 | 0.2 | setosa |
5 | 5.0 | 3.6 | 1.4 | 0.2 | setosa |
生成决策边界实际是利用训练模型对区间内的每个点都做出预测,利用两个属性的最大值和最小值 生成 grid 数据,这是
test
数据
# grid data
= n2 = 200
n1 = LinRange(0, 8, 200)
tx = LinRange(-1, 4, 200)
ty = mapreduce(collect, hcat, Iterators.product(tx, ty))
X_test = MLJ.table(X_test') X_test
Tables.MatrixTable{LinearAlgebra.Adjoint{Float64, Matrix{Float64}}} with 40000 rows, 2 columns, and schema:
:x1 Float64
:x2 Float64
= @load LogisticClassifier pkg=MLJLinearModels
LogisticClassifier
= machine(LogisticClassifier(), X,y )
model fit!(model)
import MLJLinearModels ✔
[ Info: For silent loading, specify `verbosity=0`.
[ Info: Training machine(LogisticClassifier(lambda = 2.220446049250313e-16, …), …).
┌ Info: Solver: MLJLinearModels.LBFGS{Optim.Options{Float64, Nothing}, NamedTuple{(), Tuple{}}}
│ optim_options: Optim.Options{Float64, Nothing}
└ lbfgs_options: NamedTuple{(), Tuple{}} NamedTuple()
trained Machine; caches model-specific representations of data
model: LogisticClassifier(lambda = 2.220446049250313e-16, …)
args:
1: Source @794 ⏎ Table{AbstractVector{Continuous}}
2: Source @579 ⏎ AbstractVector{Multiclass{3}}
= MLJ.predict(model, X_test)
ŷ
=mode.(ŷ)|>d->reshape(d,200,200)
resfunction trans(i)
if i=="setosa"
=1
reselseif i=="versicolor"
=2
res
else
=3
resend
end
=[trans(res[i,j]) for i in 1:200, j in 1:200] ypred
200×200 Matrix{Int64}:
1 1 1 1 1 1 1 1 1 1 1 1 1 … 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 … 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 … 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2 2
⋮ ⋮ ⋮ ⋱ ⋮ ⋮
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 … 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 … 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
2 2 2 2 2 2 2 2 2 2 2 2 2 3 3 3 3 3 3 3 3 3 3 3 3
function add_legend(axs)
Legend(fig[1,2], axs,"Label";width=100,height=200)
end
function desision_boundary(ax)
=[]
axsfor (k, c) in enumerate(categ)
= findall(x -> x == c, byCat)
indc #@show indc
=scatter!(iris[:,3][indc],iris[:,4][indc];color=colors1[k],markersize=14)
xpush!(axs,x)
end
return axs
end
= Figure(resolution=(800,600))
fig =Axis(fig[1,1],xlabel="Petal length",ylabel="Petal width",title=L"Iris Logistics classfication")
axcontourf!(ax,tx, ty, ypred, levels=length(categ))
=desision_boundary(ax)
axsLegend(fig[1,2],[axs...],categ)
fig