├── .github
└── workflows
│ └── CI.yml
├── .gitignore
├── Project.toml
├── README.md
├── config.toml
├── contents
├── about.md
├── appendix.md
├── index.md
├── preface.md
├── references.md
├── rnn.md
├── transfer_learning.md
└── why_julia.md
├── files
├── DL-icon.png
├── DLGitHubPreview.png
└── bibliography.bib
├── metadata.yml
├── pandoc
├── favicon.ico
├── favicon.png
└── favicon_package_v0.16
│ ├── android-chrome-192x192.png
│ ├── android-chrome-384x384.png
│ ├── apple-touch-icon.png
│ ├── browserconfig.xml
│ ├── favicon-16x16.png
│ ├── mstile-150x150.png
│ └── site.webmanifest
└── src
├── DeepLearningWithJulia.jl
├── ci.jl
└── transfer_learning.ipynb
/.github/workflows/CI.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | workflow_dispatch:
9 |
10 | jobs:
11 | BuildAndDeploy:
12 | runs-on: ubuntu-20.04
13 | steps:
14 | - uses: actions/checkout@v2
15 | with:
16 | persist-credentials: false
17 |
18 | - uses: julia-actions/setup-julia@v1
19 | with:
20 | version: "1.6"
21 |
22 | - uses: actions/cache@v1
23 | env:
24 | cache-name: cache-artifacts
25 | with:
26 | path: ~/.julia/artifacts
27 | key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }}
28 | restore-keys: |
29 | ${{ runner.os }}-test-${{ env.cache-name }}-
30 | ${{ runner.os }}-test-
31 | ${{ runner.os }}-
32 |
33 | - name: Install dependencies
34 | run: julia --color=yes --project -e 'using Pkg; Pkg.instantiate();
35 | using Books; Books.install_dependencies()'
36 |
37 | - run: >
38 | DISPLAY=:0 xvfb-run -s '-screen 0 1024x768x24' julia --project -e 'using DeepLearningWithJulia; DeepLearningWithJulia.build()'
39 |
40 | - name: Deploy to secondary branch
41 | if: ${{ github.event_name != 'pull_request' }}
42 | uses: peaceiris/actions-gh-pages@v3
43 | with:
44 | cname: deeplearningwithjulia.com
45 | github_token: ${{ secrets.GITHUB_TOKEN }}
46 | force_orphan: true
47 | publish_dir: ./_build/
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | _build/
2 | _gen/
3 | *.log
4 | Manifest.toml
5 | .vscode/
6 | .DS_Store
7 |
--------------------------------------------------------------------------------
/Project.toml:
--------------------------------------------------------------------------------
1 | name = "DeepLearningWithJulia"
2 | uuid = "9bab8e88-9c62-4226-b11f-da5a636a4fd2"
3 | authors = ["Logan Kilpatrick"]
4 | version = "0.0.1"
5 |
6 | [deps]
7 | Books = "939d5c6b-51ae-42e7-97ca-7564d0d4ad91"
8 | DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
9 | Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
10 | Metalhead = "dbeba491-748d-5e0e-a39e-b530a07fa0cc"
11 | Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Deep Learning with Julia
2 |
3 | 
4 |
5 | DL with Julia is a book about how to do various deep learning tasks using the Julia programming language and specifically the Flux.jl package. The intent of the book is to prove that serious deep learning can be done in Julia and that the ecosystem as a whole is ready for the spotlight.
6 |
7 | ## Getting Started
8 |
9 | All of the code and resources for this book are stored here on GitHub and deployed to [https://deeplearningwithjulia.com](https://deeplearningwithjulia.com).
10 |
11 |
12 | ## Content
13 |
14 | At the present moment, my focus is on writing materials in the following areas:
15 |
16 | - [] Basic Recurrent Neural Networks with Flux.jl (WIP)
17 | - [] CNN Basics with Flux.jl
18 | - [] Transfer Learning for Computer Vision with Flux.jl (WIP)
19 | - [] Solving basic NLP problems with Flux.jl
20 | - [] Preparing and using data with Flux.jl (inspired by out image augmentation assignment somewhat)
21 | - [] Saving and loading machine learning models in Flux.jl
22 | - [] Automatic Differentiation in Flux (https://www.microsoft.com/en-us/research/video/the-simple-essence-of-automatic-differentiation/)
23 |
--------------------------------------------------------------------------------
/config.toml:
--------------------------------------------------------------------------------
1 | [projects]
2 |
3 | [projects.default]
4 | contents = [
5 | "about",
6 | "preface",
7 | "why_julia",
8 | "transfer_learning",
9 | "appendix",
10 | "references",
11 | ]
12 |
13 | # Full URL, required for the sitemap and robots.txt.
14 | online_url = "https://deeplearningwithjulia.com"
15 |
16 | # Extra directories to be copied.
17 | extra_directories = []
18 |
19 | # Port used by serve()
20 | port = 8004
21 |
22 | output_filename = "DeepLearningWithJulia"
23 |
--------------------------------------------------------------------------------
/contents/about.md:
--------------------------------------------------------------------------------
1 | # About {#sec:about}
2 |
3 | 
4 |
5 | Deep Learning with Julia is a book about how to do various deep learning tasks using the Julia programming language and specifically the Flux.jl package. The intent of the book is to prove that serious deep learning can be done in Julia and that the ecosystem as a whole is ready for the spotlight.
6 |
7 | ## Getting Started
8 |
9 | All of the code and resources for this book are stored here on GitHub and deployed to [https://deeplearningwithjulia.com](https://deeplearningwithjulia.com).
10 |
11 |
12 | This book is built via [Books.jl](https://books.huijzer.xyz) and is made possible by the Julia programming language [@bezanson2017julia] and [pandoc](https://github.com/jgm/pandoc).
13 |
--------------------------------------------------------------------------------
/contents/appendix.md:
--------------------------------------------------------------------------------
1 | # Appendix {-}
2 |
3 | This is the appendix.
4 |
5 |
--------------------------------------------------------------------------------
/contents/index.md:
--------------------------------------------------------------------------------
1 | # Welcome {-}
2 |
3 | ```{=comment}
4 | This file is not included in the PDF.
5 | ```
6 |
7 | 
8 |
9 | Deep Learning with Julia is a book about how to do various deep learning tasks using the Julia programming language and specifically the Flux.jl package. The intent of the book is to prove that serious deep learning can be done in Julia and that the ecosystem as a whole is ready for the spotlight.
10 |
11 | You can find the code for this website [on GitHub](https://github.com/logankilpatrick/DeepLearningWithJulia).
12 |
--------------------------------------------------------------------------------
/contents/preface.md:
--------------------------------------------------------------------------------
1 | # Preface {#sec:preface}
2 |
3 | The world is undergoing a radical shift. Machine Learning (and specifically Deep Learning) uses are being accelerated across every industry and domain. Humans have unlocked a significant new tool, the likes of which has not been seen since the software explosion took place in the early 2000's. But what does this mean for developers and those interested in working with these tools?
4 |
5 | Like many field in the technology industry, there is a great deal of gate keeping in the Machine Learning community. Those in positions of power often make it seem as though using and understanding machine learning is reserved for those with PhD's and years of experience. This could not be farther from the truth. As the use of machine learning has increased, the barrier to entry have been slowly torn down. The result is that students with minimal programming and machine learning experience can now enter into the field and make advancements on the current state of the art.
6 |
7 | In this book, we will touch on:
8 | - What the Julia Programming Language is and why we will be using it
9 | - What Deep Learning is
10 | - Various applications of Deep Learning
11 |
12 | ## Why Deep Learning? {#sec:why_deep_learning}
13 |
14 | Why are we focusing on Deep Learning? How is that any different than machine learning? These are both great questions. We will delve more into the details in chapter 2, but the quick answer is that deep learning is a specific machine learning technique and the reason we want to focus on it is that many of the advancements as well as applications you have read about under the "AI" or "Machine Learning" title, are actually deep learning solutions under the hood. Use cases like Self Driving cars, digital voice assistants, recommendation engines (like on YouTube and Netflix) are all powered by Deep Learning.
15 |
16 | ## Why does this book exist? {#sec:book_motivation}
17 |
18 | What was the point of writing this book? Currently, almost all deep learning practitioners use Python and most Deep Learning books focus on Python. Given the popularity of the language, this is a natural choice, especially given the prevalence of high quality libraries like Pandas, Numpy, Tensorflow, Pytorch, etc. However, as the Julia programming language continues to grow and gain adoption, more and more users are coming and expecting a world class deep learning experience. While we have Flux.jl for deep learning in Julia, there are not currently any resources for learning about deep learning in Julia. Conversely, in the Python ecosystem, there are at least 4-5 foundational deep learning books which I personally used during my learning journey and I found to be excellent. The goal for this book is to show people that doing deep learning in Julia is a viable choice and more so than that, could actually come with significant advantages over other languages and frameworks.
19 |
20 | ## Acknowledgements {#sec:acknowledgements}
21 |
22 | I could write an entire book itself just going through all of the folks who have helped me get here. In general, this book, my career and life, are a product of support from a large group of amazing folks. From my parents and family, to teachers and professors, and especially the Julia community, without which, there would be no one reading this text. I will save you all from the rest of the sappy narrative but know that I appreciate each and every person who helped me get here.
--------------------------------------------------------------------------------
/contents/references.md:
--------------------------------------------------------------------------------
1 | # References
2 |
--------------------------------------------------------------------------------
/contents/rnn.md:
--------------------------------------------------------------------------------
1 | # Recurrent Neural Networks {#sec:rnn}
2 |
3 | At a high level, recurrence is built on the idea that the order (or sequence) of input data is important when trying to derive some final output. For example, spoken or written words are a great example of this. If you randomize the order of the words, a doesn't with up sentence you sense end make that. Okay, you get the idea. But how can we model the idea of sequential data in a neural network?
4 |
5 | The simplest way to do so is to take a neuron, which normally only takes in a set of new input data, and also pass the internal state of the previous neurons to the new neuron. This idea is fundamentally refereed to as neuron memory and is the mechanism which enables sequential data to be processed. To re-iterate, rather than a model taking in and processing some discrete input (i.e. a single word), it now takes in that word along with all previous words. This context makes it significantly more feasible to predict the next word that might be stated. And again, the sequential aspect of this is important because in the context of words, sequence is everything.
6 |
7 |
--------------------------------------------------------------------------------
/contents/transfer_learning.md:
--------------------------------------------------------------------------------
1 | # Transfer Learning {#sec:transfer_learning}
2 |
3 | Transfer learning is one of the most useful and underrated deep learning tools. It allows us to take a model which was trained on a large data set, and "fine tune" it (more on what that means later) to work well for our specific use-case. It gets its name from the idea of learned information being transferred from one use case to another.
4 |
5 | You might be asking yourself, why is this so powerful? Well, during the learning process, a deep learning model has to figure out things like what an edge looks like in the case of computer vision. This might seem like an abstract idea but it is a critical step for the model to learn how to distinguish between multiple objects. When we use transfer learning, many of these ideas have already been learned by the existing model we are starting with. This means that we do not need to spend as long training since already know some info about objects, even though the pre-trained model might never has seen data similar to the data you will be feeding into it.
6 |
7 | Here is a simple example to try and illustrate why transfer learning works so well: imagine you are trying to teach someone what a car is. This person has never seen a car nor knows what it does. This person has seen a bicycle before and in fact uses one everyday. You can now explain to the person what a car is in terms of how it relates to a bike. The transfer learning process is much like this. Use the existing info that the model has learned and build off of that for some specific situation.
8 |
9 | ## Pre-trained Models {#sec:pre-trained_models}
10 |
11 | The way in which we do transfer learning in the context of deep learning is with pre-trained models. But what is a pre-trained model? In general, we are referring to a model which has been trained on a specific data set. We will be exploring transfer learning in the context of computer vision so this means the model saw many images, each with a label which says what it is, and over time the model learned to correctly say the label given the image. There are a lot of different ideas going on here: computer vision, datasets, transfer learning, and more. *If any of this is not making sense, that is totally expected, there is both a lot of jargon as well as many new ideas being introduced. Try to stay focused on the high level and we will come back to many of these topics in more detail.*
12 |
13 | Now that we know the high level idea of transfer learning, let us dive into a real example using Flux. To give some context, in other machine learning frameworks like PyTorch, the pre-trained models are built right into PyTorch itself. In the Flux ecosystem however, the pre-trained models live in a package called [Metalhead.jl](https://github.com/FluxML/Metalhead.jl). Metalhead is built to work with the Flux ecosystem so you do not need to work about writing compatibility issues.
14 |
15 | ## Metalhead.jl {#sec:metalhead}
16 |
17 | Let us start out by installing Metalhead in the package manager by doing `add Metalhead`. Then we can type `using Flux, MetalHead` into a Julia terminal session. Metalhead provides a number of different model types like `resnet`, `vgg`, and more. Over the course of your deep learning experience, you will become more familiar with these model types as they represent some of the most common models for transfer learning. Before we dive into actually using pre-trained models, we will first take a quick look at the model structure which we get from Metalhead and Flux.
18 |
19 | ```julia
20 | julia> model = ResNet50(pretrain=false)
21 | ResNet(
22 | Chain(
23 | Chain(
24 | Conv((7, 7), 3 => 64, pad=3, stride=2), # 9_472 parameters
25 | BatchNorm(64, relu), # 128 parameters, plus 128
26 | MaxPool((3, 3), pad=1, stride=2),
27 | Parallel(
28 | Metalhead.var"#18#20"(),
29 | Chain(
30 | Conv((1, 1), 64 => 64, bias=false), # 4_096 parameters
31 | BatchNorm(64, relu), # 128 parameters, plus 128
32 | Conv((3, 3), 64 => 64, pad=1, bias=false), # 36_864 parameters
33 | BatchNorm(64, relu), # 128 parameters, plus 128
34 | Conv((1, 1), 64 => 256, bias=false), # 16_384 parameters
35 | BatchNorm(256), # 512 parameters, plus 512
36 | ),
37 | Chain(
38 | Conv((1, 1), 64 => 256, bias=false), # 16_384 parameters
39 | BatchNorm(256), # 512 parameters, plus 512
40 | ),
41 | ),
42 | ...
43 | ...
44 | ...
45 | Chain(
46 | AdaptiveMeanPool((1, 1)),
47 | Flux.flatten,
48 | Dense(2048, 1000), # 2_049_000 parameters
49 | ),
50 | ),
51 | ) # Total: 162 trainable arrays, 25_557_096 parameters,
52 | # plus 106 non-trainable, 53_120 parameters, summarysize 97.749 MiB.
53 | ```
54 | Now that is a lot to take in (even though most of the model has been omitted for space reasons), but at a high level, it represents the structure of the `ResNet50` model as defined in Flux. Notably, the `50` in `ResNet50` comes from the fact that the model has 50 layers. There are other ResNet like models with different numbers of layers but with the same overall structure.
55 |
56 |
--------------------------------------------------------------------------------
/contents/why_julia.md:
--------------------------------------------------------------------------------
1 |
2 | # Why Julia {#sec:why_julia}
3 |
4 | If you have decided to pick up this book, you likely have heard or been told things about the awesome power of the Julia programming language. This chapter is dedicated for those who have not yet been convinced that Julia is the language of the future. If I don’t need to convince you, please skip to the next chapter to dive into the fun. My personal hope is that one day soon, the Julia community will be large and mature enough that authors of Julia books need not include a “Why Julia” chapter. Until we get to that point, it is still worth it to talk about the benefits. Now back to Julia!
5 |
6 | The Julia programming language was created in 2012 by a group of folks who believed that the scientific computing ecosystem could be better. They were fed up with MATLAB and Python because the former is not Open Source and pay to play while the latter is generally not performant enough to scale up in production environments. Researchers and programmers alike would generally use these tools for prototyping, but when it came time to deploy, they would be forced to rewrite their code in C++ or C in order to meet the performance thresholds required.
7 |
8 | This phenomenon was coined as the “Two Language Problem” and Julia was created, in large part, to address it. After many years of hard work by Stefan Karpinski, Alan Edelman, Viral Shah, Jeff Bezanson, and enthusiastic contributors around the world, the language hit its 1.0 version release in 2018. The 1.0 release marked a huge milestone for the Julia community in terms of stability and the gave confidence to users that Julia would be along for the long haul.
9 |
10 | In late 2021, Julia 1.6 was selected as the long term supported release. We will be using Julia 1.6 in this book so that the content will be as stable as possible for years to come.
11 |
12 | Now that we have some historical context on Julia and why it was created, let us next move through some additional features which make Julia a natural choice for Deep Learning, Machine Learning, and more generally, science.
13 |
14 | ## Multiple Dispatch {#sec:dispatch}
15 |
16 | There is no one better to talk about the idea Multiple Dispatch and its use in Julia than Stefan Karpinski. In a 2019 JuliaCon talk titled “The Unreasonable effectiveness of Multiple Dispatch” (https://youtu.be/kc9HwsxE1OY), Stefan went on to state that the Julia ecosystem has more code re-use than any ecosystem he has ever seen. Multiple Dispatch is the paradigm that allows this to happen. So what is Multiple Dispatch and why is it so unreasonably effective? For the latter point, I suggest watching Stefan’s talk, there is no sense in re-stating what he already put eloquently. So back to the main question, what is multiple dispatch?
17 | The main idea here is that you can write multiple functions with the same name, which dispatch (or are called dynamically) depending on the types of the input arguments. This idea is not something necessarily unique to Julia, other languages have multiple dispatch or similar concepts. But the way in which it is used and implemented in Julia is the secret sauce. Let us now look at a quick example:
18 |
19 |
20 | ## Package Management {#sec:packages}
21 |
22 | Most people are unlikely to choose a programming language based on its package manager (or lack thereof). Despite this reality, the Julia package manager is one of those features that really makes me appreciate the language. Julia’s package manager is extremely simple to work with and also enables much more reproducible code. Let us explore and see how this is the case:
23 |
24 | There are two different fundamental ways of working with packages in Julia: via the REPL's Pkg mode and via the Pkg package. I will focus on using the REPL since it is extremely intuitive for new users. You can start by launching Julia in the terminal. Then, type `]` in which should take you into Pkg mode:
25 |
26 | ```julia
27 | _
28 | _ _ _(_)_ | Documentation: https://docs.julialang.org
29 | (_) | (_) (_) |
30 | _ _ _| |_ __ _ | Type "?" for help, "]?" for Pkg help.
31 | | | | | | | |/ _` | |
32 | | | |_| | | | (_| | | Version 1.6.3 (2021-09-23)
33 | _/ |\__'_|_|_|\__'_| | Official https://julialang.org/ release
34 | |__/ |
35 |
36 | (@v1.6) pkg>
37 | ```
38 | From here, one of the natural things to do is check what commands you can run in the package manager. To do this, type an `?` in and press enter/return.
39 |
40 | You will see all the possible commands:
41 |
42 | ```julia
43 | (@v1.6) pkg> ?
44 | Welcome to the Pkg REPL-mode. To return to the julia> prompt, either press
45 | backspace when the input line is empty or press Ctrl+C.
46 |
47 | Synopsis
48 |
49 | pkg> cmd [opts] [args]
50 |
51 | Multiple commands can be given on the same line by interleaving a ; between
52 | the commands. Some commands have an alias, indicated below.
53 |
54 | Commands
55 |
56 | activate: set the primary environment the package manager manipulates
57 |
58 | add: add packages to project
59 |
60 | build: run the build script for packages
61 |
62 | develop, dev: clone the full package repo locally for development
63 | ...
64 | ...
65 | ```
66 | Some of the most common commands you will use are `add`, `activate`, `status` (or the shorthand `st`), and `remove` (or the shorthand `rm`). In this book, we will be using Flux.jl, so if you want to play around with the package, you can simply install it by typing `add Flux`.
67 |
68 | After the install finishes, you can check your package environment by typing `status`:
69 |
70 | ```julia
71 | (@v1.6) pkg> status
72 | Status `~/.julia/environments/v1.6/Project.toml`
73 | [587475ba] Flux v0.12.7
74 | ```
75 | The package manager automatically shows the file which is managing the packages and their versions. In this case, it is `~/.julia/environments/v1.6/Project.toml`. As a general best practice, it is recommend to always use local environments instead of making changes to your main Julia environment. You can do this by activating a new environment.
76 |
77 | ```julia
78 | julia> pwd()
79 | "/Users/logankilpatrick"
80 |
81 | shell> cd Desktop # type `;` to enter the shell mode from the REPL
82 | /Users/logankilpatrick/Desktop
83 |
84 | (@v1.6) pkg> activate .
85 | Activating new environment at `~/Desktop/Project.toml`
86 | ```
87 | In the code above, you can see I started out in my main user folder. I then entered the shell mode by typing `;` and used the change directory command to switch to my Desktop folder. From there, I did `activate .` which activates the current folder I am in. I can confirm this by typing `status`:
88 |
89 | ```julia
90 | (Desktop) pkg> status
91 | Status `~/Desktop/Project.toml` (empty project)
92 | ```
93 | and you can see the newly created project is empty.
94 |
95 | Hopefully these examples give you a sense of how easy to user and powerful the Julia package manager is. You can read more about working with packages from the Pkg.jl docs: https://pkgdocs.julialang.org/v1/getting-started/.
96 |
97 | ## The Julia Community {#sec:community}
98 |
99 | I would be remiss if I did not mention the Julia community itself as one of the core features of the ecosystem. Without such an incredible community, Julia would not be what it is today. But what makes the Julia community so great you might ask?
100 |
--------------------------------------------------------------------------------
/files/DL-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/files/DL-icon.png
--------------------------------------------------------------------------------
/files/DLGitHubPreview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/files/DLGitHubPreview.png
--------------------------------------------------------------------------------
/files/bibliography.bib:
--------------------------------------------------------------------------------
1 | @article{bezanson2017julia,
2 | title={Julia: A fresh approach to numerical computing},
3 | author={Bezanson, Jeff and Edelman, Alan and Karpinski, Stefan and Shah, Viral B},
4 | journal={SIAM review},
5 | volume={59},
6 | number={1},
7 | pages={65--98},
8 | year={2017},
9 | publisher={SIAM}
10 | }
11 |
--------------------------------------------------------------------------------
/metadata.yml:
--------------------------------------------------------------------------------
1 | ---
2 | title: Deep Learning with Julia
3 | subtitle: using Flux.jl
4 | author:
5 | - Logan Kilpatrick
6 |
7 | html-license: CC BY-NC-SA 4.0
8 | tex-license: Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
9 |
10 | bibliography: files/bibliography.bib
11 |
12 | # Only used in PDF.
13 | titlepage-top: >
14 | \begin{tabular}{l}
15 | Logan Kilpatrick\\
16 | The Julia Language\\
17 | logan@julialang.org\\
18 | \end{tabular}
19 |
20 | titlepage-bottom: >
21 | \url{https://deeplearningwithjulia.com}
22 | ---
23 |
--------------------------------------------------------------------------------
/pandoc/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon.ico
--------------------------------------------------------------------------------
/pandoc/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon_package_v0.16/android-chrome-192x192.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/android-chrome-384x384.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon_package_v0.16/android-chrome-384x384.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon_package_v0.16/apple-touch-icon.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/browserconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | #da532c
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon_package_v0.16/favicon-16x16.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/mstile-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/logankilpatrick/DeepLearningWithJulia/e7527bba8b38da6643278707cfa6a11a83551ef4/pandoc/favicon_package_v0.16/mstile-150x150.png
--------------------------------------------------------------------------------
/pandoc/favicon_package_v0.16/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "",
3 | "short_name": "",
4 | "icons": [
5 | {
6 | "src": "/android-chrome-192x192.png",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | },
10 | {
11 | "src": "/android-chrome-384x384.png",
12 | "sizes": "384x384",
13 | "type": "image/png"
14 | }
15 | ],
16 | "theme_color": "#ffffff",
17 | "background_color": "#ffffff",
18 | "display": "standalone"
19 | }
20 |
--------------------------------------------------------------------------------
/src/DeepLearningWithJulia.jl:
--------------------------------------------------------------------------------
1 | module DeepLearningWithJulia
2 |
3 | # using Reexport: @reexport
4 | # @reexport begin
5 |
6 | using Books:
7 | build_all,
8 | gen
9 |
10 | include("ci.jl")
11 |
12 | end # module
13 |
14 |
--------------------------------------------------------------------------------
/src/ci.jl:
--------------------------------------------------------------------------------
1 | """
2 | build()
3 | This method is called during CI.
4 | """
5 | function build()
6 | println("Building Deep Learning with Julia")
7 | fail_on_error = true
8 | gen(; fail_on_error)
9 | build_all(; fail_on_error)
10 | end
--------------------------------------------------------------------------------