From 0fd701c7fecb16f71304ed0b75eeeb528a81ade2 Mon Sep 17 00:00:00 2001 From: Mike Innes Date: Sat, 14 Apr 2018 02:01:22 +0100 Subject: [PATCH] update paper --- paper/paper.bib | 28 ++++++++++++++++++---------- paper/paper.md | 4 ++-- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/paper/paper.bib b/paper/paper.bib index b7f0bd0a4f..c8e6dbd570 100644 --- a/paper/paper.bib +++ b/paper/paper.bib @@ -17,26 +17,34 @@ @article{besard:2017 url = {http://arxiv.org/abs/1712.03112}, } +@online{MLPL, + author = {Mike Innes and others}, + title = {On Machine Learning and Programming Languages}, + year = 2017, + url = {https://julialang.org/blog/2017/12/ml&pl}, + urldate = {2018-02-16} +} + @online{CuArrays, - author = {Mike Innes}, + author = {Mike Innes and others}, title = {Generic GPU Kernels}, year = 2017, url = {http://mikeinnes.github.io/2017/08/24/cudanative.html}, urldate = {2018-02-16} } -@online{MLPL, +@online{Zoo, author = {Mike Innes and others}, - title = {On Machine Learning and Programming Languages}, - year = 2017, - url = {https://julialang.org/blog/2017/12/ml&pl}, + title = {Flux Model Zoo}, + year = 2018, + url = {https://github.com/FluxML/model-zoo/}, urldate = {2018-02-16} } -@online{Fusion, - author = {Steven G. Johnson}, - title = {More Dots: Syntactic Loop Fusion in Julia}, - year = 2017, - url = {https://julialang.org/blog/2017/01/moredots}, +@online{Minibatch, + author = {James Bradbury}, + title = {Minibatch.jl}, + year = 2018, + url = {https://github.com/jekbradbury/Minibatch.jl}, urldate = {2018-02-16} } diff --git a/paper/paper.md b/paper/paper.md index b3be017f25..1188afb5e0 100644 --- a/paper/paper.md +++ b/paper/paper.md @@ -24,8 +24,8 @@ bibliography: paper.bib Flux is library for machine learning (ML), written using the numerical computing language Julia [@Julia]. The package allows models to be written using Julia's simple mathematical syntax, and applies automatic differentiation (AD) to seamlessly calculate derivatives and train the model. Meanwhile, it makes heavy use of Julia's language and compiler features to carry out code analysis and make optimisations. For example, Julia's GPU compilation support [@besard:2017] can be used to JIT-compile custom GPU kernels for model layers [@CuArrays]. -The machine learning community has traditionally been divided between "static" and "dynamic" frameworks that are easy to optimise and easy to use, respectively [@MLPL]. Flux blurs the line between these two approaches, combining a highly intuitive programming model with the compiler techniques needed by ML. As a result of this approach, it already supports several features not available in any other dynamic framework, such as kernel fusion [@Fusion], memory usage optimisations, importing of models via ONNX, and deployment of models to JavaScript for running in the browser. +The machine learning community has traditionally been divided between "static" and "dynamic" frameworks that are easy to optimise and easy to use, respectively [@MLPL]. Flux blurs the line between these two approaches, combining a highly intuitive programming model with the compiler techniques needed by ML. This enables research into advanced compiler transforms such as batching [@Minibatch] without changing any user code. -Flux has been used heavily for natural language processing, but can also support state-of-the-art research models in areas like computer vision, reinforcement learning and robotics. +Flux has been used heavily for natural language processing, but can also support state-of-the-art research models in areas like computer vision, reinforcement learning and robotics. Many examples of such models can be found in the model zoo [@Zoo]. # References