From 4ea75ed5e4b340b7400313ca7c114d328c1851a3 Mon Sep 17 00:00:00 2001 From: autodocs Date: Tue, 21 Feb 2017 17:19:13 +0000 Subject: [PATCH] build based on b2b2c20 --- latest/apis/backends.html | 2 +- latest/apis/batching.html | 4 ++-- latest/contributing.html | 2 +- latest/examples/logreg.html | 2 +- latest/index.html | 2 +- latest/internals.html | 2 +- latest/models/basics.html | 2 +- latest/models/debugging.html | 2 +- latest/models/recurrent.html | 2 +- latest/models/templates.html | 2 +- latest/search_index.js | 2 +- 11 files changed, 12 insertions(+), 12 deletions(-) diff --git a/latest/apis/backends.html b/latest/apis/backends.html index 8180d29c..97eee1ce 100644 --- a/latest/apis/backends.html +++ b/latest/apis/backends.html @@ -129,7 +129,7 @@ Backends - + diff --git a/latest/apis/batching.html b/latest/apis/batching.html index f8d0a2f9..1ffc311d 100644 --- a/latest/apis/batching.html +++ b/latest/apis/batching.html @@ -145,7 +145,7 @@ Batching - + @@ -298,7 +298,7 @@ Right now, the - + diff --git a/latest/examples/logreg.html b/latest/examples/logreg.html index e9f8c16c..52ef0a24 100644 --- a/latest/examples/logreg.html +++ b/latest/examples/logreg.html @@ -129,7 +129,7 @@ Logistic Regression - + diff --git a/latest/index.html b/latest/index.html index 4bd761c7..3e9f79a3 100644 --- a/latest/index.html +++ b/latest/index.html @@ -132,7 +132,7 @@ Home - + diff --git a/latest/internals.html b/latest/internals.html index 32a9ecae..9d671e8c 100644 --- a/latest/internals.html +++ b/latest/internals.html @@ -126,7 +126,7 @@ Internals - + diff --git a/latest/models/basics.html b/latest/models/basics.html index c920f6eb..8af18f24 100644 --- a/latest/models/basics.html +++ b/latest/models/basics.html @@ -145,7 +145,7 @@ Model Building Basics - + diff --git a/latest/models/debugging.html b/latest/models/debugging.html index f5985939..3ef33299 100644 --- a/latest/models/debugging.html +++ b/latest/models/debugging.html @@ -129,7 +129,7 @@ Debugging - + diff --git a/latest/models/recurrent.html b/latest/models/recurrent.html index 1254de90..e57a9ba6 100644 --- a/latest/models/recurrent.html +++ b/latest/models/recurrent.html @@ -129,7 +129,7 @@ Recurrence - + diff --git a/latest/models/templates.html b/latest/models/templates.html index 2f8e51ad..184c9a55 100644 --- a/latest/models/templates.html +++ b/latest/models/templates.html @@ -145,7 +145,7 @@ Model Templates - + diff --git a/latest/search_index.js b/latest/search_index.js index b1e87ba0..5cdc52b7 100644 --- a/latest/search_index.js +++ b/latest/search_index.js @@ -173,7 +173,7 @@ var documenterSearchIndex = {"docs": [ "page": "Batching", "title": "Future Work", "category": "section", - "text": "The design of batching is still a fairly early work in progress, though it's used in a few places in the system. For example, all Flux models expect to be given Batch objects which are unwrapped into raw arrays for the computation. Models will convert their arguments if necessary, so it's convenient to call a model with a single data point like f([1,2,3]).Right now, the Batch or Seq types always stack along the left-most dimension. In future, this will be customisable, and Flux will provide implementations of common functions that are generic across the batch dimension. This brings the following benefits:Code can be written in a batch-agnostic way, i.e. as if working with a single data point, with batching happening independently.\nAutomatic batching can be done with correctness assured, reducing programmer errors when manipulating dimensions.\nOptimisations, like switching batch dimensions, can be expressed by the programmer with compiler support; fewer code changes are required and optimisations are guaranteed not to break the model.\nThis also opens the door for more automatic optimisations, e.g. having the compiler explore the search base of possible batching combinations." + "text": "The design of batching is still a fairly early work in progress, though it's used in a few places in the system. For example, all Flux models expect to be given Batch objects which are unwrapped into raw arrays for the computation. Models will convert their arguments if necessary, so it's convenient to call a model with a single data point like f([1,2,3]).Right now, the Batch or Seq types always stack along the left-most dimension. In future, this will be customisable, and Flux will provide implementations of common functions that are generic across the batch dimension. This brings the following benefits:Code can be written in a batch-agnostic way or be generic across batching setups. Code works with a single data point, and batching happens independently.\nAutomatic batching can be done with correctness assured, reducing programmer errors when manipulating dimensions.\nOptimisations, like switching batch dimensions, can be expressed by the programmer with compiler support; fewer code changes are required and optimisations are guaranteed not to break the model.\nThis also opens the door for more automatic optimisations, e.g. having the compiler explore the search base of possible batching combinations." }, {