-
Notifications
You must be signed in to change notification settings - Fork 17
/
regressor.jl
101 lines (80 loc) · 2.9 KB
/
regressor.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
Random.seed!(123)
N = 200
X = MLJBase.table(randn(Float32, N, 5));
# TODO: replace Short2 -> Short when
# https://github.com/FluxML/Flux.jl/pull/1618 is resolved:
builder = Short2(σ=identity)
optimiser = Flux.Optimise.ADAM()
losses = []
Random.seed!(123)
y = 1 .+ X.x1 - X.x2 .- 2X.x4 + X.x5
train, test = MLJBase.partition(1:N, 0.7)
@testset_accelerated "NeuralNetworkRegressor" accel begin
Random.seed!(123)
basictest(MLJFlux.NeuralNetworkRegressor,
X,
y,
builder,
optimiser,
0.7,
accel)
# test model is a bit better than constant predictor:
stable_rng = StableRNGs.StableRNG(123)
model = MLJFlux.NeuralNetworkRegressor(builder=builder,
acceleration=accel,
rng=stable_rng)
@time fitresult, _, rpt =
fit(model, 0, MLJBase.selectrows(X, train), y[train])
first_last_training_loss = rpt[1][[1, end]]
push!(losses, first_last_training_loss[2])
# @show first_last_training_loss
yhat = predict(model, fitresult, selectrows(X, test))
truth = y[test]
goal = 0.9*model.loss(truth .- mean(truth), 0)
@test model.loss(yhat, truth) < goal
optimisertest(MLJFlux.NeuralNetworkRegressor,
X,
y,
builder,
optimiser,
accel)
end
# check different resources (CPU1, CUDALibs, etc)) give about the same loss:
reference = losses[1]
@test all(x->abs(x - reference)/reference < 1e-6, losses[2:end])
Random.seed!(123)
ymatrix = hcat(1 .+ X.x1 - X.x2, 1 .- 2X.x4 + X.x5);
y = MLJBase.table(ymatrix);
losses = []
@testset_accelerated "MultitargetNeuralNetworkRegressor" accel begin
Random.seed!(123)
basictest(MLJFlux.MultitargetNeuralNetworkRegressor,
X,
y,
builder,
optimiser,
1.0,
accel)
# test model is a bit better than constant predictor
model = MLJFlux.MultitargetNeuralNetworkRegressor(acceleration=accel,
builder=builder)
@time fitresult, _, rpt =
fit(model, 0, MLJBase.selectrows(X, train), selectrows(y, train))
first_last_training_loss = rpt[1][[1, end]]
push!(losses, first_last_training_loss[2])
# @show first_last_training_loss
yhat = predict(model, fitresult, selectrows(X, test))
truth = ymatrix[test,:]
goal = 0.8*model.loss(truth .- mean(truth), 0)
@test model.loss(Tables.matrix(yhat), truth) < goal
optimisertest(MLJFlux.MultitargetNeuralNetworkRegressor,
X,
y,
builder,
optimiser,
accel)
end
# check different resources (CPU1, CUDALibs, etc)) give about the same loss:
reference = losses[1]
@test all(x->abs(x - reference)/reference < 1e-6, losses[2:end])
true