Skip to content

Commit

Permalink
Merge pull request #265 from FluxML/dev
Browse files Browse the repository at this point in the history
Update docs
  • Loading branch information
ablaom authored Jun 12, 2024
2 parents 3766e44 + 3427fcb commit aea9436
Show file tree
Hide file tree
Showing 33 changed files with 629 additions and 174 deletions.
15 changes: 7 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ Grab some data and split into features and target:

```julia
iris = RDatasets.dataset("datasets", "iris");
y, X = unpack(iris, ==(:Species), colname -> true, rng=123);
y, X = unpack(iris, ==(:Species), rng=123);
X = Float32.(X); # To optmise for GPUs
```

Expand Down Expand Up @@ -83,17 +83,16 @@ Train the wrapped model:
julia> mach = machine(iterated_model, X, y)
julia> fit!(mach)
[ Info: Training machine(ProbabilisticIteratedModel(model = NeuralNetworkClassifier(builder = MLP(hidden = (5, 4), …), …), …), …).
[ Info: No iteration parameter specified. Using `iteration_parameter=:(epochs)`.
[ Info: final loss: 0.10431026246922499
[ Info: final training loss: 0.046286315
[ Info: Stop triggered by Patience(4) stopping criterion.
[ Info: Total of 349 iterations.
[ Info: No iteration parameter specified. Using `iteration_parameter=:(epochs)`.
[ Info: final loss: 0.1284184007796247
[ Info: final training loss: 0.055630706
[ Info: Stop triggered by NumberSinceBest(5) stopping criterion.
[ Info: Total of 811 iterations.
```
Inspect results:

```julia-repl
julia> plot(train_losses, label="Validation Loss", linewidth=2, size=(800,400))
julia> plot(train_losses, label="Training Loss")
julia> plot!(validation_losses, label="Validation Loss", linewidth=2, size=(800,400))
```

Expand Down
10 changes: 5 additions & 5 deletions docs/src/common_workflows/architecture_search/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
"cell_type": "code",
"source": [
"iris = RDatasets.dataset(\"datasets\", \"iris\");\n",
"y, X = unpack(iris, ==(:Species), colname -> true, rng = 123);\n",
"y, X = unpack(iris, ==(:Species), rng = 123);\n",
"X = Float32.(X); # To be compatible with type of network network parameters\n",
"first(X, 5)"
],
Expand Down Expand Up @@ -130,7 +130,7 @@
{
"output_type": "execute_result",
"data": {
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (1, 1, 1), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 10, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = ComputationalResources.CPU1{Nothing}(nothing))"
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (1, 1, 1), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 10, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = CPU1{Nothing}(nothing))"
},
"metadata": {},
"execution_count": 4
Expand Down Expand Up @@ -306,7 +306,7 @@
{
"output_type": "execute_result",
"data": {
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (21, 57, 25), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 10, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = ComputationalResources.CPU1{Nothing}(nothing))"
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (45, 49, 21), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 10, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = CPU1{Nothing}(nothing))"
},
"metadata": {},
"execution_count": 8
Expand Down Expand Up @@ -341,9 +341,9 @@
{
"output_type": "execute_result",
"data": {
"text/plain": "\u001b[1m10×2 DataFrame\u001b[0m\n\u001b[1m Row \u001b[0m│\u001b[1m mlp \u001b[0m\u001b[1m measurement \u001b[0m\n\u001b[90m MLP… \u001b[0m\u001b[90m Float64 \u001b[0m\n─────┼────────────────────────────────────────────\n 1 │ MLP(hidden = (21, 57, 25), …) 0.0867019\n 2 │ MLP(hidden = (45, 17, 13), …) 0.0929803\n 3 │ MLP(hidden = (33, 13, 49), …) 0.0973896\n 4 │ MLP(hidden = (21, 41, 61), …) 0.0981502\n 5 │ MLP(hidden = (57, 49, 61), …) 0.100331\n 6 │ MLP(hidden = (25, 25, 29), …) 0.101083\n 7 │ MLP(hidden = (29, 61, 21), …) 0.101466\n 8 │ MLP(hidden = (29, 61, 5), …) 0.107513\n 9 │ MLP(hidden = (21, 61, 17), …) 0.107874\n 10 │ MLP(hidden = (45, 49, 61), …) 0.111292",
"text/plain": "\u001b[1m10×2 DataFrame\u001b[0m\n\u001b[1m Row \u001b[0m│\u001b[1m mlp \u001b[0m\u001b[1m measurement \u001b[0m\n\u001b[90m MLP… \u001b[0m\u001b[90m Float64 \u001b[0m\n─────┼────────────────────────────────────────────\n 1 │ MLP(hidden = (45, 49, 21), …) 0.0860875\n 2 │ MLP(hidden = (25, 45, 33), …) 0.0877367\n 3 │ MLP(hidden = (29, 17, 53), …) 0.0970372\n 4 │ MLP(hidden = (61, 9, 29), …) 0.0970978\n 5 │ MLP(hidden = (49, 49, 9), …) 0.0971594\n 6 │ MLP(hidden = (21, 33, 61), …) 0.0984172\n 7 │ MLP(hidden = (57, 61, 61), …) 0.099232\n 8 │ MLP(hidden = (41, 13, 25), …) 0.101498\n 9 │ MLP(hidden = (53, 29, 21), …) 0.105323\n 10 │ MLP(hidden = (57, 33, 45), …) 0.110168",
"text/html": [
"<div><div style = \"float: left;\"><span>10×2 DataFrame</span></div><div style = \"clear: both;\"></div></div><div class = \"data-frame\" style = \"overflow-x: scroll;\"><table class = \"data-frame\" style = \"margin-bottom: 6px;\"><thead><tr class = \"header\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">Row</th><th style = \"text-align: left;\">mlp</th><th style = \"text-align: left;\">measurement</th></tr><tr class = \"subheader headerLastRow\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\"></th><th title = \"MLJFlux.MLP{3}\" style = \"text-align: left;\">MLP…</th><th title = \"Float64\" style = \"text-align: left;\">Float64</th></tr></thead><tbody><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">1</td><td style = \"text-align: left;\">MLP(hidden = (21, 57, 25), …)</td><td style = \"text-align: right;\">0.0867019</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">2</td><td style = \"text-align: left;\">MLP(hidden = (45, 17, 13), …)</td><td style = \"text-align: right;\">0.0929803</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">3</td><td style = \"text-align: left;\">MLP(hidden = (33, 13, 49), …)</td><td style = \"text-align: right;\">0.0973896</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">4</td><td style = \"text-align: left;\">MLP(hidden = (21, 41, 61), …)</td><td style = \"text-align: right;\">0.0981502</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">5</td><td style = \"text-align: left;\">MLP(hidden = (57, 49, 61), …)</td><td style = \"text-align: right;\">0.100331</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">6</td><td style = \"text-align: left;\">MLP(hidden = (25, 25, 29), …)</td><td style = \"text-align: right;\">0.101083</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">7</td><td style = \"text-align: left;\">MLP(hidden = (29, 61, 21), …)</td><td style = \"text-align: right;\">0.101466</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">8</td><td style = \"text-align: left;\">MLP(hidden = (29, 61, 5), …)</td><td style = \"text-align: right;\">0.107513</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">9</td><td style = \"text-align: left;\">MLP(hidden = (21, 61, 17), …)</td><td style = \"text-align: right;\">0.107874</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">10</td><td style = \"text-align: left;\">MLP(hidden = (45, 49, 61), …)</td><td style = \"text-align: right;\">0.111292</td></tr></tbody></table></div>"
"<div><div style = \"float: left;\"><span>10×2 DataFrame</span></div><div style = \"clear: both;\"></div></div><div class = \"data-frame\" style = \"overflow-x: scroll;\"><table class = \"data-frame\" style = \"margin-bottom: 6px;\"><thead><tr class = \"header\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">Row</th><th style = \"text-align: left;\">mlp</th><th style = \"text-align: left;\">measurement</th></tr><tr class = \"subheader headerLastRow\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\"></th><th title = \"MLJFlux.MLP{3}\" style = \"text-align: left;\">MLP…</th><th title = \"Float64\" style = \"text-align: left;\">Float64</th></tr></thead><tbody><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">1</td><td style = \"text-align: left;\">MLP(hidden = (45, 49, 21), …)</td><td style = \"text-align: right;\">0.0860875</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">2</td><td style = \"text-align: left;\">MLP(hidden = (25, 45, 33), …)</td><td style = \"text-align: right;\">0.0877367</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">3</td><td style = \"text-align: left;\">MLP(hidden = (29, 17, 53), …)</td><td style = \"text-align: right;\">0.0970372</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">4</td><td style = \"text-align: left;\">MLP(hidden = (61, 9, 29), …)</td><td style = \"text-align: right;\">0.0970978</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">5</td><td style = \"text-align: left;\">MLP(hidden = (49, 49, 9), …)</td><td style = \"text-align: right;\">0.0971594</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">6</td><td style = \"text-align: left;\">MLP(hidden = (21, 33, 61), …)</td><td style = \"text-align: right;\">0.0984172</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">7</td><td style = \"text-align: left;\">MLP(hidden = (57, 61, 61), …)</td><td style = \"text-align: right;\">0.099232</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">8</td><td style = \"text-align: left;\">MLP(hidden = (41, 13, 25), …)</td><td style = \"text-align: right;\">0.101498</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">9</td><td style = \"text-align: left;\">MLP(hidden = (53, 29, 21), …)</td><td style = \"text-align: right;\">0.105323</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">10</td><td style = \"text-align: left;\">MLP(hidden = (57, 33, 45), …)</td><td style = \"text-align: right;\">0.110168</td></tr></tbody></table></div>"
]
},
"metadata": {},
Expand Down
2 changes: 1 addition & 1 deletion docs/src/common_workflows/architecture_search/notebook.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import Optimisers # native Flux.jl optimisers no longer supported
# ### Loading and Splitting the Data

iris = RDatasets.dataset("datasets", "iris");
y, X = unpack(iris, ==(:Species), colname -> true, rng = 123);
y, X = unpack(iris, ==(:Species), rng = 123);
X = Float32.(X); # To be compatible with type of network network parameters
first(X, 5)

Expand Down
2 changes: 1 addition & 1 deletion docs/src/common_workflows/architecture_search/notebook.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import Optimisers # native Flux.jl optimisers no longer supported

````@example architecture_search
iris = RDatasets.dataset("datasets", "iris");
y, X = unpack(iris, ==(:Species), colname -> true, rng = 123);
y, X = unpack(iris, ==(:Species), rng = 123);
X = Float32.(X); # To be compatible with type of network network parameters
first(X, 5)
````
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
"cell_type": "code",
"source": [
"iris = RDatasets.dataset(\"datasets\", \"iris\");\n",
"y, X = unpack(iris, ==(:Species), colname -> true, rng = 123);\n",
"y, X = unpack(iris, ==(:Species), rng = 123);\n",
"X = Float32.(X); # To be compatible with type of network network parameters\n",
"first(X, 5)"
],
Expand Down
8 changes: 4 additions & 4 deletions docs/src/common_workflows/comparison/notebook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
"cell_type": "code",
"source": [
"iris = RDatasets.dataset(\"datasets\", \"iris\");\n",
"y, X = unpack(iris, ==(:Species), colname -> true, rng=123);"
"y, X = unpack(iris, ==(:Species), rng=123);"
],
"metadata": {},
"execution_count": 3
Expand All @@ -107,7 +107,7 @@
{
"output_type": "execute_result",
"data": {
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (5, 4), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 50, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = ComputationalResources.CPU1{Nothing}(nothing))"
"text/plain": "NeuralNetworkClassifier(\n builder = MLP(\n hidden = (5, 4), \n σ = NNlib.relu), \n finaliser = NNlib.softmax, \n optimiser = Adam(0.01, (0.9, 0.999), 1.0e-8), \n loss = Flux.Losses.crossentropy, \n epochs = 50, \n batch_size = 8, \n lambda = 0.0, \n alpha = 0.0, \n rng = 42, \n optimiser_changes_trigger_retraining = false, \n acceleration = CPU1{Nothing}(nothing))"
},
"metadata": {},
"execution_count": 4
Expand Down Expand Up @@ -271,9 +271,9 @@
{
"output_type": "execute_result",
"data": {
"text/plain": "\u001b[1m4×2 DataFrame\u001b[0m\n\u001b[1m Row \u001b[0m│\u001b[1m mlp \u001b[0m\u001b[1m measurement \u001b[0m\n\u001b[90m Probabil… \u001b[0m\u001b[90m Float64 \u001b[0m\n─────┼────────────────────────────────────────────────\n 1 │ BayesianLDA(method = gevd, …) 0.0610826\n 2 │ NeuralNetworkClassifier(builder … 0.0857014\n 3 │ RandomForestClassifier(max_depth… 0.102881\n 4 │ ProbabilisticTunedModel(model = … 0.221056",
"text/plain": "\u001b[1m4×2 DataFrame\u001b[0m\n\u001b[1m Row \u001b[0m│\u001b[1m mlp \u001b[0m\u001b[1m measurement \u001b[0m\n\u001b[90m Probabil… \u001b[0m\u001b[90m Float64 \u001b[0m\n─────┼────────────────────────────────────────────────\n 1 │ BayesianLDA(method = gevd, …) 0.0610826\n 2 │ NeuralNetworkClassifier(builder … 0.0857014\n 3 │ RandomForestClassifier(max_depth… 0.107885\n 4 │ ProbabilisticTunedModel(model = … 0.221056",
"text/html": [
"<div><div style = \"float: left;\"><span>4×2 DataFrame</span></div><div style = \"clear: both;\"></div></div><div class = \"data-frame\" style = \"overflow-x: scroll;\"><table class = \"data-frame\" style = \"margin-bottom: 6px;\"><thead><tr class = \"header\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">Row</th><th style = \"text-align: left;\">mlp</th><th style = \"text-align: left;\">measurement</th></tr><tr class = \"subheader headerLastRow\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\"></th><th title = \"MLJModelInterface.Probabilistic\" style = \"text-align: left;\">Probabil…</th><th title = \"Float64\" style = \"text-align: left;\">Float64</th></tr></thead><tbody><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">1</td><td style = \"text-align: left;\">BayesianLDA(method = gevd, …)</td><td style = \"text-align: right;\">0.0610826</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">2</td><td style = \"text-align: left;\">NeuralNetworkClassifier(builder = MLP(hidden = (5, 4), …), …)</td><td style = \"text-align: right;\">0.0857014</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">3</td><td style = \"text-align: left;\">RandomForestClassifier(max_depth = -1, …)</td><td style = \"text-align: right;\">0.102881</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">4</td><td style = \"text-align: left;\">ProbabilisticTunedModel(model = XGBoostClassifier(test = 1, …), …)</td><td style = \"text-align: right;\">0.221056</td></tr></tbody></table></div>"
"<div><div style = \"float: left;\"><span>4×2 DataFrame</span></div><div style = \"clear: both;\"></div></div><div class = \"data-frame\" style = \"overflow-x: scroll;\"><table class = \"data-frame\" style = \"margin-bottom: 6px;\"><thead><tr class = \"header\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">Row</th><th style = \"text-align: left;\">mlp</th><th style = \"text-align: left;\">measurement</th></tr><tr class = \"subheader headerLastRow\"><th class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\"></th><th title = \"Probabilistic\" style = \"text-align: left;\">Probabil…</th><th title = \"Float64\" style = \"text-align: left;\">Float64</th></tr></thead><tbody><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">1</td><td style = \"text-align: left;\">BayesianLDA(method = gevd, …)</td><td style = \"text-align: right;\">0.0610826</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">2</td><td style = \"text-align: left;\">NeuralNetworkClassifier(builder = MLP(hidden = (5, 4), …), …)</td><td style = \"text-align: right;\">0.0857014</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">3</td><td style = \"text-align: left;\">RandomForestClassifier(max_depth = -1, …)</td><td style = \"text-align: right;\">0.107885</td></tr><tr><td class = \"rowNumber\" style = \"font-weight: bold; text-align: right;\">4</td><td style = \"text-align: left;\">ProbabilisticTunedModel(model = XGBoostClassifier(test = 1, …), …)</td><td style = \"text-align: right;\">0.221056</td></tr></tbody></table></div>"
]
},
"metadata": {},
Expand Down
2 changes: 1 addition & 1 deletion docs/src/common_workflows/comparison/notebook.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import Optimisers # native Flux.jl optimisers no longer supported
# ### Loading and Splitting the Data

iris = RDatasets.dataset("datasets", "iris");
y, X = unpack(iris, ==(:Species), colname -> true, rng=123);
y, X = unpack(iris, ==(:Species), rng=123);


# ### Instantiating the models Now let's construct our model. This follows a similar setup
Expand Down
2 changes: 1 addition & 1 deletion docs/src/common_workflows/comparison/notebook.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import Optimisers # native Flux.jl optimisers no longer supported

````@example comparison
iris = RDatasets.dataset("datasets", "iris");
y, X = unpack(iris, ==(:Species), colname -> true, rng=123);
y, X = unpack(iris, ==(:Species), rng=123);
nothing #hide
````

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
"cell_type": "code",
"source": [
"iris = RDatasets.dataset(\"datasets\", \"iris\");\n",
"y, X = unpack(iris, ==(:Species), colname -> true, rng=123);"
"y, X = unpack(iris, ==(:Species), rng=123);"
],
"metadata": {},
"execution_count": null
Expand Down
Loading

0 comments on commit aea9436

Please sign in to comment.