From 94e38c05b84dfc283e863f1d96ca5d48be3d3c68 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Wed, 27 Sep 2017 18:33:23 +0100 Subject: [PATCH 1/2] more informative --- src/optimise/train.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/optimise/train.jl b/src/optimise/train.jl index 8ad437db..0a91e978 100644 --- a/src/optimise/train.jl +++ b/src/optimise/train.jl @@ -8,8 +8,8 @@ function train!(m, data, opt; cb = () -> ()) cb = tocb(cb) @progress for x in data l = m(x...) - isinf(l.data[]) && error("Inf") - isnan(l.data[]) && error("NaN") + isinf(l.data[]) && error("Loss is Inf") + isnan(l.data[]) && error("Loss is NaN") back!(l) opt() cb() From c51f5afb3d2d14247e332ae0cf711a904f374288 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Wed, 27 Sep 2017 18:37:07 +0100 Subject: [PATCH 2/2] clarity --- docs/src/training/training.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/training/training.md b/docs/src/training/training.md index 1eaa8a46..d4bed5fe 100644 --- a/docs/src/training/training.md +++ b/docs/src/training/training.md @@ -9,7 +9,7 @@ To actually train a model we need three things: With these we can call `Flux.train!`: ```julia -Flux.train!(model, data, opt) +Flux.train!(modelLoss, data, opt) ``` There are plenty of examples in the [model zoo](https://github.com/FluxML/model-zoo).