From 91aadf65846432102e6fdf4a8bede618f6cf07b4 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Fri, 16 Feb 2018 16:39:52 +0000 Subject: [PATCH] joss paper --- paper/paper.bib | 39 +++++++++++++++++++++++++++++++++++++++ paper/paper.md | 31 +++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 paper/paper.bib create mode 100644 paper/paper.md diff --git a/paper/paper.bib b/paper/paper.bib new file mode 100644 index 00000000..82361739 --- /dev/null +++ b/paper/paper.bib @@ -0,0 +1,39 @@ +@misc{Julia, + author = {{Jeff Bezanson, Alan Edelman, Stefan Karpinski and Viral B. Shah}}, + title = {Julia: A Fresh Approach to Numerical Computing}, + doi = {10.1137/141000671}, + howpublished = {\url{julialang.org/publications/julia-fresh-approach-BEKS.pdf}} +} + +@article{besard:2017, + author = {Tim Besard and Christophe Foket and De Sutter, Bjorn}, + title = {Effective Extensible Programming: Unleashing {Julia} on {GPUs}}, + journal = {arXiv}, + volume = {abs/11712.03112}, + year = {2017}, + url = {http://arxiv.org/abs/1712.03112}, +} + +@online{CuArrays, + author = {Mike Innes}, + title = {Generic GPU Kernels}, + year = 2017, + url = {http://mikeinnes.github.io/2017/08/24/cudanative.html}, + urldate = {2018-02-16} +} + +@online{MLPL, + author = {Mike Innes, et al.}, + title = {On Machine Learning and Programming Languages}, + year = 2017, + url = {https://julialang.org/blog/2017/12/ml&pl}, + urldate = {2018-02-16} +} + +@online{Fusion, + author = {Steven G. Johnson}, + title = {More Dots: Syntactic Loop Fusion in Julia}, + year = 2017, + url = {https://julialang.org/blog/2017/01/moredots}, + urldate = {2018-02-16} +} diff --git a/paper/paper.md b/paper/paper.md new file mode 100644 index 00000000..aa487cf6 --- /dev/null +++ b/paper/paper.md @@ -0,0 +1,31 @@ +--- +title: 'Flux: Seamless machine learning with Julia' +tags: + - deep learning + - machine learning + - natural language processing + - computer vision + - reinforcement learning + - robotics + - automatic differentiation + - compiler +authors: + - name: Mike Innes + orcid: 0000-0003-0788-0242 + affiliation: 1 +affiliations: + - name: Julia Computing + index: 1 +date: 16 February 2018 +bibliography: paper.bib +--- + +# Summary + +Flux is library for machine learning (ML), written using the numerical computing language Julia [@Julia]. The package allows models to be written using Julia's simple mathematical syntax, and applies automatic differentiation (AD) to seamlessly calculate derivatives and train the model. Meanwhile, it makes heavy use of Julia's advanced language features to carry out code analysis and make optimisations. For example, Julia's GPU compilation support [@besard:2017] can be used to JIT-compile custom GPU kernels for model layers [@CuArrays]. + +The machine learning community has traditionally been divided between "static" and "dynamic" frameworks that are easy to optimise and easy to use, respectively [@MLPL]. Flux blurs the line between these two approaches, combining a highly intuitive programming model with the advanced compiler techniques needed by ML. As a result of this approach, it already supports several features not available in any other dynamic framework, such as kernel fusion [@Fusion], memory usage optimisations, importing of models via ONNX, and deployment of models to JavaScript for running in the browser. + +Flux has been used heavily for natural language processing, but can also support state-of-the-art research models in areas like computer vision, reinforcement learning and robotics. + +# References