├── .dockerignore
├── .github
└── workflows
│ └── docker.yml
├── .gitignore
├── 0_demo.ipynb
├── 1_intro_to_scala.ipynb
├── 2.1_first_module.ipynb
├── 2.2_comb_logic.ipynb
├── 2.3_control_flow.ipynb
├── 2.4_sequential_logic.ipynb
├── 2.5_exercise.ipynb
├── 2.6_chiseltest.ipynb
├── 3.1_parameters.ipynb
├── 3.2_collections.ipynb
├── 3.2_interlude.ipynb
├── 3.3_higher-order_functions.ipynb
├── 3.4_functional_programming.ipynb
├── 3.5_object_oriented_programming.ipynb
├── 3.6_types.ipynb
├── 4.1_firrtl_ast.ipynb
├── 4.2_firrtl_ast_traversal.ipynb
├── 4.3_firrtl_common_idioms.ipynb
├── 4.4_firrtl_add_ops_per_module.ipynb
├── Dockerfile
├── Install.md
├── LICENSE
├── OneDayAgenda.md
├── README.md
├── binder
├── apt.txt
└── postBuild
├── images
├── Sorter4.png
├── arbiter.png
├── chisel_1024.png
├── circuit.png
├── counter2.svg
├── counter3.svg
├── demo_fir_filter.svg
├── fir.jpg
├── fir_filter.png
├── fsm.png
├── integrator.svg
├── lfsr4.svg
├── lfsr4blocks.svg
├── lfsr4combinational.svg
├── myTicker.svg
├── playbutton.png
├── shifter4.svg
└── shifter4gated.svg
├── runtest.py
└── source
├── custom.js
└── load-ivy.sc
/.dockerignore:
--------------------------------------------------------------------------------
1 | /Dockerfile
2 | /diagrams/*
3 | /.ipynb_checkpoints/
4 |
--------------------------------------------------------------------------------
/.github/workflows/docker.yml:
--------------------------------------------------------------------------------
1 | name: Publish Docker image
2 | on:
3 | push:
4 | branches: [ master ]
5 | pull_request:
6 |
7 | jobs:
8 | docker:
9 | name: Build Docker Image and Publish (only on push)
10 | runs-on: ubuntu-latest
11 | steps:
12 |
13 | - name: Check out the repo
14 | uses: actions/checkout@v2
15 |
16 | # Steps required by docker/build-push-action@v2
17 | - name: Set up QEMU
18 | uses: docker/setup-qemu-action@v1
19 | - name: Set up Docker Buildx
20 | uses: docker/setup-buildx-action@v1
21 | - name: Login to DockerHub
22 | uses: docker/login-action@v1
23 | if: github.event_name == 'push'
24 | with:
25 | username: ${{ secrets.DOCKERHUB_USERNAME }}
26 | password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
27 |
28 | - name: Build (always) and Publish (only on push)
29 | uses: docker/build-push-action@v2
30 | with:
31 | tags: ucbbar/chisel-bootcamp:latest
32 | push: ${{ github.event_name == 'push' }}
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.idea
2 | /.ipynb_checkpoints
3 | /test_run_dir
4 | /.DS_Store
5 | /*/.DS_Store
6 | /*/*/.DS_Store
7 | *.anno
8 | *.fir
9 | *.v
10 | *.xml
11 |
12 | # Jupyter-scala scripts
13 | coursier
14 | almond
15 |
--------------------------------------------------------------------------------
/0_demo.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# WARNING. THIS FILE IS NOT PART OF THE BOOTCAMP \n",
15 | "# TEACHING STEPS. IT IS A MID TO END LEVEL EXAMPLE\n",
16 | "# OF THE THINGS YOU WILL LEARN. IF YOU ARE DOING THE\n",
17 | "# BOOTCAMP TO LEARN CHISEL DON'T START HERE\n",
18 | "# START AT [Introduction to Scala](1_intro_to_scala.ipynb)\n",
19 | "\n",
20 | "# Chisel Demo\n",
21 | "**Next: [Introduction to Scala](1_intro_to_scala.ipynb)**\n",
22 | "\n",
23 | "Welcome! Perhaps you're an interested student who heard the name \"Chisel\" tossed about, or maybe you're a seasoned hardware design veteran who has been tasked by your manager to explore Chisel as a new HDL alternative. Either way if you are new to Chisel, you want to figure out as fast as possible what all the fuss is about. Look no futher - let's see what Chisel has to offer!\n",
24 | "\n",
25 | "## Setup\n",
26 | "Before we start, we need to download and imports the dependencies needed for the demo. \n",
27 | "\n",
28 | "**Please run the following two cell blocks by either pressing SHIFT+ENTER on your keyboard or the Run button in the menu**."
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {
35 | "scrolled": false
36 | },
37 | "outputs": [],
38 | "source": [
39 | "val path = System.getProperty(\"user.dir\") + \"/source/load-ivy.sc\"\n",
40 | "interp.load.module(ammonite.ops.Path(java.nio.file.FileSystems.getDefault().getPath(path)))"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": null,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "import chisel3._\n",
50 | "import chisel3.util._\n",
51 | "import chisel3.iotesters.{ChiselFlatSpec, Driver, PeekPokeTester}"
52 | ]
53 | },
54 | {
55 | "cell_type": "markdown",
56 | "metadata": {
57 | "collapsed": true
58 | },
59 | "source": [
60 | "## Hardware Generators: Type-Safe Meta-Programming for RTL\n",
61 | "\n",
62 | "All hardware description languages support writing single instances of an RTL design - Chisel is no different.\n",
63 | "In fact, most Verilog/VHDL digital logic designs can be directly transcribed into Chisel!\n",
64 | "While Chisel provides other awesome features that we will get to, we want to emphasize that users switching to Chisel will retain the exact same degree of control over their design as any other hardware language.\n",
65 | "\n",
66 | "Take the following example of a 3-point moving average implemented in the style of a FIR filter.\n",
67 | "\n",
68 | "\n",
69 | "\n",
70 | "Chisel provides similar base primitives as synthesizable Verilog and *can* be used as such! Run next cell to declare our Chisel module."
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "execution_count": null,
76 | "metadata": {},
77 | "outputs": [],
78 | "source": [
79 | "// 3-point moving average implemented in the style of a FIR filter\n",
80 | "class MovingAverage3(bitWidth: Int) extends Module {\n",
81 | " val io = IO(new Bundle {\n",
82 | " val in = Input(UInt(bitWidth.W))\n",
83 | " val out = Output(UInt(bitWidth.W))\n",
84 | " })\n",
85 | "\n",
86 | " val z1 = RegNext(io.in) // Create a register whose input is connected to the argument io.in\n",
87 | " val z2 = RegNext(z1) // Create a register whose input is connected to the argument z1\n",
88 | "\n",
89 | " io.out := (io.in * 1.U) + (z1 * 1.U) + (z2 * 1.U) // `1.U` is an unsigned literal with value 1\n",
90 | "}"
91 | ]
92 | },
93 | {
94 | "cell_type": "markdown",
95 | "metadata": {},
96 | "source": [
97 | "After defining `class MovingAverage3`, let's instantiate it and take a look at its structure:"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": null,
103 | "metadata": {
104 | "scrolled": true
105 | },
106 | "outputs": [],
107 | "source": [
108 | "// same 3-point moving average filter as before\n",
109 | "visualize(() => new MovingAverage3(8))"
110 | ]
111 | },
112 | {
113 | "cell_type": "markdown",
114 | "metadata": {},
115 | "source": [
116 | "In this visualization of the Chisel instance, the inputs on the left, and the z1 and z2 registers in gold. Both registers and io_in are multiplied by their coefficients and which are then added successively. The `tail` and `bits` elements are used to keep the additions from growing.\n",
117 | "\n",
118 | "You may now ask: \"Oh well and good - you can do stuff in Verilog in Chisel, but then why would I want to use Chisel?\"\n",
119 | "\n",
120 | "We are so glad you asked! The real power of Chisel comes from the ability to create **generators, not instances**. Suppose instead of only a `MovingAverage3` module, we wanted to create a generic `FIRFilter` module that is parameterized by a list of coefficients.\n",
121 | "\n",
122 | "Below we have rewritten `MovingAverage3` to accept into a sequence of coefficients. The number of coefficients will determine the size of the filter."
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": null,
128 | "metadata": {},
129 | "outputs": [],
130 | "source": [
131 | "// Generalized FIR filter parameterized by the convolution coefficients\n",
132 | "class FirFilter(bitWidth: Int, coeffs: Seq[UInt]) extends Module {\n",
133 | " val io = IO(new Bundle {\n",
134 | " val in = Input(UInt(bitWidth.W))\n",
135 | " val out = Output(UInt())\n",
136 | " })\n",
137 | " // Create the serial-in, parallel-out shift register\n",
138 | " val zs = Reg(Vec(coeffs.length, UInt(bitWidth.W)))\n",
139 | " zs(0) := io.in\n",
140 | " for (i <- 1 until coeffs.length) {\n",
141 | " zs(i) := zs(i-1)\n",
142 | " }\n",
143 | "\n",
144 | " // Do the multiplies\n",
145 | " val products = VecInit.tabulate(coeffs.length)(i => zs(i) * coeffs(i))\n",
146 | "\n",
147 | " // Sum up the products\n",
148 | " io.out := products.reduce(_ +& _)\n",
149 | "}"
150 | ]
151 | },
152 | {
153 | "cell_type": "markdown",
154 | "metadata": {},
155 | "source": [
156 | "Now by changing our `coeffs` parameters during instantiation, our `FIRFilter` module can be used to instantiate an endless number of different hardware modules! Below we create three different instances of `FIRFiler`"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": null,
162 | "metadata": {},
163 | "outputs": [],
164 | "source": [
165 | "// same 3-point moving average filter as before\n",
166 | "visualize(() => new FirFilter(8, Seq(1.U, 1.U, 1.U)))"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "metadata": {},
173 | "outputs": [],
174 | "source": [
175 | "// 1-cycle delay as a FIR filter\n",
176 | "visualize(() => new FirFilter(8, Seq(0.U, 1.U)))"
177 | ]
178 | },
179 | {
180 | "cell_type": "code",
181 | "execution_count": null,
182 | "metadata": {},
183 | "outputs": [],
184 | "source": [
185 | "// 5-point FIR filter with a triangle impulse response\n",
186 | "visualize(() => new FirFilter(8, Seq(1.U, 2.U, 3.U, 2.U, 1.U)))"
187 | ]
188 | },
189 | {
190 | "cell_type": "markdown",
191 | "metadata": {},
192 | "source": [
193 | "Without this powerful parameterization, we would need many more module definitions, likely one for each of these FIR filters. Ideally, we want our generators to be (1) composable, (2) powerful, and (3) enable fine-grained control over the generated design.\n",
194 | "\n",
195 | "The benefits of Chisel are in how you use it, not in the language itself.\n",
196 | "If you decide to write instances instead of generators, you will see fewer advantages of Chisel over Verilog.\n",
197 | "But if you take the time to learn how to write generators, then the power of Chisel will become apparent and you will realize you can never go back to writing Verilog.\n",
198 | "Learning to write generators is difficult, but we hope this tutorial will pave the way for you to become a better hardware designer, programmer, and thinker!"
199 | ]
200 | },
201 | {
202 | "cell_type": "markdown",
203 | "metadata": {},
204 | "source": [
205 | "---\n",
206 | "# All done!\n",
207 | "\n",
208 | "[Return to the top.](#top)"
209 | ]
210 | }
211 | ],
212 | "metadata": {
213 | "anaconda-cloud": {},
214 | "kernelspec": {
215 | "display_name": "Scala",
216 | "language": "scala",
217 | "name": "scala"
218 | },
219 | "language_info": {
220 | "codemirror_mode": "text/x-scala",
221 | "file_extension": ".scala",
222 | "mimetype": "text/x-scala",
223 | "name": "scala",
224 | "nbconvert_exporter": "script",
225 | "version": "2.12.8"
226 | }
227 | },
228 | "nbformat": 4,
229 | "nbformat_minor": 2
230 | }
231 |
--------------------------------------------------------------------------------
/2.1_first_module.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {
13 | "collapsed": true
14 | },
15 | "source": [
16 | "# Module 2.1: Your First Chisel Module\n",
17 | "**Prev: [Introduction to Scala](1_intro_to_scala.ipynb)** \n",
18 | "**Next: [Combinational Logic](2.2_comb_logic.ipynb)**\n",
19 | "\n",
20 | "## Motivation\n",
21 | "Now that you are familiar with Scala, let's start carving out some hardware! Chisel stands for **C**onstructing **H**ardware **I**n a **S**cala **E**mbedded **L**anguage. That means it is a DSL in Scala, allowing you to take advantage of both Scala and Chisel programming within the same code. It is important to understand which code is \"Scala\" and which code is \"Chisel\", but we will discuss that more later. For now, think of Chisel and the code in Module 2 as a better way to write Verilog. This module throws an entire Chisel `Module` and tester at you. Just get the gist of it for now. You'll see plenty more examples later."
22 | ]
23 | },
24 | {
25 | "cell_type": "markdown",
26 | "metadata": {},
27 | "source": [
28 | "## Setup\n",
29 | "The following cell downloads the dependencies needed for Chisel. You will see it in all future notebooks. **Run this cell now**."
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": null,
35 | "metadata": {},
36 | "outputs": [],
37 | "source": [
38 | "val path = System.getProperty(\"user.dir\") + \"/source/load-ivy.sc\"\n",
39 | "interp.load.module(ammonite.ops.Path(java.nio.file.FileSystems.getDefault().getPath(path)))"
40 | ]
41 | },
42 | {
43 | "cell_type": "markdown",
44 | "metadata": {},
45 | "source": [
46 | "As mentioned in the last module, these statements are needed to import Chisel. **Run this cell now** before running any future code blocks."
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "metadata": {},
53 | "outputs": [],
54 | "source": [
55 | "import chisel3._\n",
56 | "import chisel3.util._\n",
57 | "import chisel3.tester._\n",
58 | "import chisel3.tester.RawTester.test\n",
59 | "import dotvisualizer._"
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "---\n",
67 | "# Your First Module\n",
68 | "This section will present your first hardware module, a test case, and how to run it. It will contain many things that you will not understand, and that is OK. We want you to take away the broad strokes, so you can continually return to this complete and working example to reinforce what you've learned.\n",
69 | "\n",
70 | "**Example: A Module** \n",
71 | "Like Verilog, we can declare module definitions in Chisel. The following example is a Chisel `Module`, `Passthrough`, that has one 4-bit input, `in`, and one 4-bit output, `out`. The module combinationally connects `in` and `out`, so `in` drives `out`."
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "// Chisel Code: Declare a new module definition\n",
81 | "class Passthrough extends Module {\n",
82 | " val io = IO(new Bundle {\n",
83 | " val in = Input(UInt(4.W))\n",
84 | " val out = Output(UInt(4.W))\n",
85 | " })\n",
86 | " io.out := io.in\n",
87 | "}"
88 | ]
89 | },
90 | {
91 | "cell_type": "markdown",
92 | "metadata": {},
93 | "source": [
94 | "There's a lot here! The following explains how to think of each line in terms of the hardware we are describing.\n",
95 | "\n",
96 | "```scala\n",
97 | "class Passthrough extends Module {\n",
98 | "```\n",
99 | "We declare a new module called `Passthrough`. `Module` is a built-in Chisel class that all hardware modules must extend.\n",
100 | "\n",
101 | "```scala \n",
102 | "val io = IO(...)\n",
103 | "```\n",
104 | "We declare all our input and output ports in a special `io` `val`. It must be called `io` and be an `IO` object or instance, which requires something of the form `IO(_instantiated_bundle_)`.\n",
105 | "\n",
106 | "```scala\n",
107 | "new Bundle {\n",
108 | " val in = Input(...)\n",
109 | " val out = Output(...)\n",
110 | "}\n",
111 | "```\n",
112 | "We declare a new hardware struct type (Bundle) that contains some named signals `in` and `out` with directions Input and Output, respectively.\n",
113 | "\n",
114 | "```scala\n",
115 | "UInt(4.W)\n",
116 | "```\n",
117 | "We declare a signal's hardware type. In this case, it is an unsigned integer of width 4.\n",
118 | "\n",
119 | "```scala\n",
120 | "io.out := io.in\n",
121 | "```\n",
122 | "We connect our input port to our output port, such that `io.in` *drives* `io.out`. Note that the `:=` operator is a ***Chisel*** operator that indicates that the right-hand signal drives the left-hand signal. It is a directioned operator.\n",
123 | "\n",
124 | "The neat thing about hardware construction languages (HCLs) is that we can use the underlying programming language as a scripting language. For example, after declaring our Chisel module, we then use Scala to call the Chisel compiler to translate Chisel `Passthrough` into Verilog `Passthrough`. This process is called ***elaboration***."
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": null,
130 | "metadata": {},
131 | "outputs": [],
132 | "source": [
133 | "// Scala Code: Elaborate our Chisel design by translating it to Verilog\n",
134 | "// Don't worry about understanding this code; it is very complicated Scala\n",
135 | "println(getVerilog(new Passthrough))"
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "**Example: A Module Generator** \n",
143 | "If we apply what we learned about Scala to this example, we can see that a Chisel module is implemented as a Scala class. Just like any other Scala class, we could make a Chisel module take some construction parameters. In this case, we make a new class `PassthroughGenerator` which will accept an integer `width` that dictates the widths of its input and output ports:"
144 | ]
145 | },
146 | {
147 | "cell_type": "code",
148 | "execution_count": null,
149 | "metadata": {},
150 | "outputs": [],
151 | "source": [
152 | "// Chisel Code, but pass in a parameter to set widths of ports\n",
153 | "class PassthroughGenerator(width: Int) extends Module { \n",
154 | " val io = IO(new Bundle {\n",
155 | " val in = Input(UInt(width.W))\n",
156 | " val out = Output(UInt(width.W))\n",
157 | " })\n",
158 | " io.out := io.in\n",
159 | "}\n",
160 | "\n",
161 | "// Let's now generate modules with different widths\n",
162 | "println(getVerilog(new PassthroughGenerator(10)))\n",
163 | "println(getVerilog(new PassthroughGenerator(20)))"
164 | ]
165 | },
166 | {
167 | "cell_type": "markdown",
168 | "metadata": {},
169 | "source": [
170 | "Notice that the generated Verilog uses different bitwidths for the input/output depending on the value assigned to the `width` parameter. Let's dig into how this works. Because Chisel Modules are normal Scala classes, we can use the power of Scala's class constructors to parameterize the elaboration of our design.\n",
171 | "\n",
172 | "You may notice that this parameterization is enabled by *Scala*, not *Chisel*; Chisel has no extra APIs for parameterization, but a designer can simply leverage Scala features to parameterize his/her designs.\n",
173 | "\n",
174 | "Because `PassthroughGenerator` no longer describes a single Module, but instead describes a family of modules parameterized by `width`, we refer to this `Passthrough` as a ***generator***."
175 | ]
176 | },
177 | {
178 | "cell_type": "markdown",
179 | "metadata": {},
180 | "source": [
181 | "---\n",
182 | "# Testing Your Hardware\n",
183 | "\n",
184 | "No hardware module or generator should be complete without a tester. Chisel has built-in test features that you will explore throughout this bootcamp. The following example is a Chisel test harness that passes values to an instance of `Passthrough`'s input port `in`, and checks that the same value is seen on the output port `out`.\n",
185 | "\n",
186 | "**Example: A Tester** \n",
187 | "There is some advanced Scala going on here. However, there is no need for you to understand anything except the `poke` and `expect` commands. You can think of the rest of the code as simply boilerplate to write these simple tests."
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": null,
193 | "metadata": {},
194 | "outputs": [],
195 | "source": [
196 | "// Scala Code: `test` runs the unit test. \n",
197 | "// test takes a user Module and has a code block that applies pokes and expects to the \n",
198 | "// circuit under test (c)\n",
199 | "test(new Passthrough()) { c =>\n",
200 | " c.io.in.poke(0.U) // Set our input to value 0\n",
201 | " c.io.out.expect(0.U) // Assert that the output correctly has 0\n",
202 | " c.io.in.poke(1.U) // Set our input to value 1\n",
203 | " c.io.out.expect(1.U) // Assert that the output correctly has 1\n",
204 | " c.io.in.poke(2.U) // Set our input to value 2\n",
205 | " c.io.out.expect(2.U) // Assert that the output correctly has 2\n",
206 | "}\n",
207 | "println(\"SUCCESS!!\") // Scala Code: if we get here, our tests passed!\n"
208 | ]
209 | },
210 | {
211 | "cell_type": "markdown",
212 | "metadata": {},
213 | "source": [
214 | "What's going on? The test accepts a `Passthrough` module, assigns values to the module's inputs, and checks its outputs. To set an input, we call `poke`. To check an output, we call `expect`. If we don't want to compare the output to an expected value (no assertion), we can `peek` the output instead.\n",
215 | "\n",
216 | "If all `expect` statements are true, then our boilerplate code will return pass.\n",
217 | "\n",
218 | ">Note that the `poke` and `expect` use chisel hardware literal notation. Both operations expect literals of the correct type.\n",
219 | "If `poke`ing a `UInt()` you must supply a `UInt` literal (example: `c.io.in.poke(10.U)`, likewise if the input is a `Bool()` the `poke` would expect either `true.B` or `false.B`.\n",
220 | "\n"
221 | ]
222 | },
223 | {
224 | "cell_type": "markdown",
225 | "metadata": {},
226 | "source": [
227 | "**Exercise: Writing Your Own Testers** \n",
228 | "Write and execute two tests, one that tests `PassthroughGenerator` for a width of 10 and a second that tests `PassthroughGenerator` for a width of 20. Check at least two values for each: zero and the maximum value supported by the specified width. Note that the triple question mark has a special meaning in Scala. You may see it frequently in these bootcamp exercises. Running code with the `???` will produce the `NotImplementedError`. Replace `???` with your own code."
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "metadata": {},
235 | "outputs": [],
236 | "source": [
237 | "// Test with width 10\n",
238 | "\n",
239 | "test(???) { c =>\n",
240 | " ???\n",
241 | "}\n",
242 | "\n",
243 | "// Test with width 20\n",
244 | "\n",
245 | "test(???) { c =>\n",
246 | " ???\n",
247 | "}\n",
248 | "\n",
249 | "println(\"SUCCESS!!\") // Scala Code: if we get here, our tests passed!"
250 | ]
251 | },
252 | {
253 | "cell_type": "markdown",
254 | "metadata": {},
255 | "source": [
256 | "
"
387 | ]
388 | },
389 | {
390 | "cell_type": "markdown",
391 | "metadata": {},
392 | "source": [
393 | "**Exercise: Parameterized Adder (Optional)** \n",
394 | "This optional exercise exposes you to one of the most powerful features of Chisel, it's parameterization capabilities. To demonstrate this, we'll construct a parameterized adder that can either saturate the output when overflow occurs, or truncate the results (i.e. wrap around).\n",
395 | "\n",
396 | "First, look at the `Module` below. The parameter we pass into it is called `saturate` and has type *Scala* `Boolean`. This is not a Chisel `Bool`. So, we're not creating a single hardware adder that can either saturate or truncate, but rather we're creating a *generator* that produces either a saturating hardware adder *or* a truncating hardware adder. The decision is made at compile time.\n",
397 | "\n",
398 | "Next, notice the inputs and outputs are all 4-bit `UInt`s. Chisel has built-in width inferencing, and if you look at the [cheatsheet](https://github.com/freechipsproject/chisel-cheatsheet/releases/latest/download/chisel_cheatsheet.pdf), you'll see that the bitwidth of a normal summation is equal to the maximum bitwidth of the two inputs. This means that\n",
399 | "\n",
400 | "```scala\n",
401 | "val sum = io.in_a + io.in_b\n",
402 | "```\n",
403 | "\n",
404 | "will make `sum` a 4-bit wire, and the value will be the truncated result for 4-bit inputs. To check if the summation should saturate, you need to place the result in a 5-bit wire. This can be done with the `+&` summation, as seen on the cheatsheet.\n",
405 | "\n",
406 | "```scala\n",
407 | "val sum = io.in_a +& io.in_b\n",
408 | "```\n",
409 | "\n",
410 | "Finally, note that connecting a 4-bit `UInt` wire to a 5-bit `UInt` wire will truncate the MSB by default. You can use this to easily truncate the 5-bit sum for the non-saturating adder."
411 | ]
412 | },
413 | {
414 | "cell_type": "code",
415 | "execution_count": null,
416 | "metadata": {},
417 | "outputs": [],
418 | "source": [
419 | "class ParameterizedAdder(saturate: Boolean) extends Module {\n",
420 | " val io = IO(new Bundle {\n",
421 | " val in_a = Input(UInt(4.W))\n",
422 | " val in_b = Input(UInt(4.W))\n",
423 | " val out = Output(UInt(4.W))\n",
424 | " })\n",
425 | "\n",
426 | " ???\n",
427 | "}\n",
428 | "\n",
429 | "for (saturate <- Seq(true, false)) {\n",
430 | " test(new ParameterizedAdder(saturate)) { c =>\n",
431 | " // 100 random tests\n",
432 | " val cycles = 100\n",
433 | " import scala.util.Random\n",
434 | " import scala.math.min\n",
435 | " for (i <- 0 until cycles) {\n",
436 | " val in_a = Random.nextInt(16)\n",
437 | " val in_b = Random.nextInt(16)\n",
438 | " c.io.in_a.poke(in_a.U)\n",
439 | " c.io.in_b.poke(in_b.U)\n",
440 | " if (saturate) {\n",
441 | " c.io.out.expect(min(in_a + in_b, 15).U)\n",
442 | " } else {\n",
443 | " c.io.out.expect(((in_a + in_b) % 16).U)\n",
444 | " }\n",
445 | " }\n",
446 | " \n",
447 | " // ensure we test saturation vs. truncation\n",
448 | " c.io.in_a.poke(15.U)\n",
449 | " c.io.in_b.poke(15.U)\n",
450 | " if (saturate) {\n",
451 | " c.io.out.expect(15.U)\n",
452 | " } else {\n",
453 | " c.io.out.expect(14.U)\n",
454 | " }\n",
455 | " }\n",
456 | "}\n",
457 | "println(\"SUCCESS!!\")"
458 | ]
459 | },
460 | {
461 | "cell_type": "markdown",
462 | "metadata": {},
463 | "source": [
464 | "
"
199 | ]
200 | },
201 | {
202 | "cell_type": "markdown",
203 | "metadata": {},
204 | "source": [
205 | "---\n",
206 | "# FIR Filter Generator\n",
207 | "\n",
208 | "For this module, we'll be using a slightly modified example from [Module 3.2: Generators: Collection](3.2_collections.ipynb).\n",
209 | "If you haven't started Module 3.2, don't worry.\n",
210 | "You'll learn about the details of how `MyManyDynamicElementVecFir` works, but the basic idea is that it is a FIR filter generator.\n",
211 | "\n",
212 | "The generator has one parameter: length.\n",
213 | "That parameter dictates how many taps the filter has, and the taps are inputs to the hardware `Module`.\n",
214 | "\n",
215 | "The generator has 3 inputs:\n",
216 | "* in, the input to the filter\n",
217 | "* valid, a boolean that says when the input is valid\n",
218 | "* consts, a vector for all the taps\n",
219 | "\n",
220 | "and 1 output:\n",
221 | "* out, the filtered input\n",
222 | "\n",
223 | ""
224 | ]
225 | },
226 | {
227 | "cell_type": "code",
228 | "execution_count": null,
229 | "metadata": {},
230 | "outputs": [],
231 | "source": [
232 | "class MyManyDynamicElementVecFir(length: Int) extends Module {\n",
233 | " val io = IO(new Bundle {\n",
234 | " val in = Input(UInt(8.W))\n",
235 | " val valid = Input(Bool())\n",
236 | " val out = Output(UInt(8.W))\n",
237 | " val consts = Input(Vec(length, UInt(8.W)))\n",
238 | " })\n",
239 | " \n",
240 | " // Such concision! You'll learn what all this means later.\n",
241 | " val taps = Seq(io.in) ++ Seq.fill(io.consts.length - 1)(RegInit(0.U(8.W)))\n",
242 | " taps.zip(taps.tail).foreach { case (a, b) => when (io.valid) { b := a } }\n",
243 | "\n",
244 | " io.out := taps.zip(io.consts).map { case (a, b) => a * b }.reduce(_ + _)\n",
245 | "}\n",
246 | "\n",
247 | "visualize(() => new MyManyDynamicElementVecFir(4))"
248 | ]
249 | },
250 | {
251 | "cell_type": "markdown",
252 | "metadata": {},
253 | "source": [
254 | "---\n",
255 | "# DspBlock\n",
256 | "\n",
257 | "Integrating DSP components into a larger system can be challenging and error prone.\n",
258 | "The [rocket section of the dsptools repository](https://github.com/ucb-bar/dsptools/tree/master/rocket) consists of useful generators that should help with such tasks.\n",
259 | "\n",
260 | "One of the core abstractions is the notion of a `DspBlock`.\n",
261 | "A `DspBlock` has:\n",
262 | "* AXI-4 Stream input and output\n",
263 | "* Memory-mapped status and control (in this example, AXI4)\n",
264 | "\n",
265 | "\n",
266 | "\n",
267 | "`DspBlock`s use diplomatic interfaces from rocket.\n",
268 | "[This site](https://www.lowrisc.org/docs/diplomacy/) has a good overview of the basic of diplomacy, but don't worry too much about how it's working for this example.\n",
269 | "Diplomacy really shines when you're connecting a lot of different blocks together to form a complex SoC.\n",
270 | "In this example, we're just making a single peripheral.\n",
271 | "The `StandaloneBlock` traits are mixed in to make diplomatic interfaces work as top-level IOs.\n",
272 | "You only need them when the `DspBlock` is being used as a top level interface without any diplomatic connections.\n",
273 | "\n",
274 | "The following code wraps the FIR filter in AXI4 interfaces.\n"
275 | ]
276 | },
277 | {
278 | "cell_type": "code",
279 | "execution_count": null,
280 | "metadata": {},
281 | "outputs": [],
282 | "source": [
283 | "import dspblocks._\n",
284 | "import freechips.rocketchip.amba.axi4._\n",
285 | "import freechips.rocketchip.amba.axi4stream._\n",
286 | "import freechips.rocketchip.config._\n",
287 | "import freechips.rocketchip.diplomacy._\n",
288 | "import freechips.rocketchip.regmapper._\n",
289 | "\n",
290 | "//\n",
291 | "// Base class for all FIRBlocks.\n",
292 | "// This can be extended to make TileLink, AXI4, APB, AHB, etc. flavors of the FIR filter\n",
293 | "//\n",
294 | "abstract class FIRBlock[D, U, EO, EI, B <: Data](val nFilters: Int, val nTaps: Int)(implicit p: Parameters)\n",
295 | "// HasCSR means that the memory interface will be using the RegMapper API to define status and control registers\n",
296 | "extends DspBlock[D, U, EO, EI, B] with HasCSR {\n",
297 | " // diplomatic node for the streaming interface\n",
298 | " // identity node means the output and input are parameterized to be the same\n",
299 | " val streamNode = AXI4StreamIdentityNode()\n",
300 | " \n",
301 | " // define the what hardware will be elaborated\n",
302 | " lazy val module = new LazyModuleImp(this) {\n",
303 | " // get streaming input and output wires from diplomatic node\n",
304 | " val (in, _) = streamNode.in(0)\n",
305 | " val (out, _) = streamNode.out(0)\n",
306 | "\n",
307 | " require(in.params.n >= nFilters,\n",
308 | " s\"\"\"AXI-4 Stream port must be big enough for all \n",
309 | " |the filters (need $nFilters,, only have ${in.params.n})\"\"\".stripMargin)\n",
310 | "\n",
311 | " // make registers to store taps\n",
312 | " val taps = Reg(Vec(nFilters, Vec(nTaps, UInt(8.W))))\n",
313 | "\n",
314 | " // memory map the taps, plus the first address is a read-only field that says how many filter lanes there are\n",
315 | " val mmap = Seq(\n",
316 | " RegField.r(64, nFilters.U, RegFieldDesc(\"nFilters\", \"Number of filter lanes\"))\n",
317 | " ) ++ taps.flatMap(_.map(t => RegField(8, t, RegFieldDesc(\"tap\", \"Tap\"))))\n",
318 | "\n",
319 | " // generate the hardware for the memory interface\n",
320 | " // in this class, regmap is abstract (unimplemented). mixing in something like AXI4HasCSR or TLHasCSR\n",
321 | " // will define regmap for the particular memory interface\n",
322 | " regmap(mmap.zipWithIndex.map({case (r, i) => i * 8 -> Seq(r)}): _*)\n",
323 | "\n",
324 | " // make the FIR lanes and connect inputs and taps\n",
325 | " val outs = for (i <- 0 until nFilters) yield {\n",
326 | " val fir = Module(new MyManyDynamicElementVecFir(nTaps))\n",
327 | " \n",
328 | " fir.io.in := in.bits.data((i+1)*8, i*8)\n",
329 | " fir.io.valid := in.valid && out.ready\n",
330 | " fir.io.consts := taps(i) \n",
331 | " fir.io.out\n",
332 | " }\n",
333 | "\n",
334 | " val output = if (outs.length == 1) {\n",
335 | " outs.head\n",
336 | " } else {\n",
337 | " outs.reduce((x: UInt, y: UInt) => Cat(y, x))\n",
338 | " }\n",
339 | "\n",
340 | " out.bits.data := output\n",
341 | " in.ready := out.ready\n",
342 | " out.valid := in.valid\n",
343 | " }\n",
344 | "}\n",
345 | "\n",
346 | "// make AXI4 flavor of FIRBlock\n",
347 | "abstract class AXI4FIRBlock(nFilters: Int, nTaps: Int)(implicit p: Parameters) extends FIRBlock[AXI4MasterPortParameters, AXI4SlavePortParameters, AXI4EdgeParameters, AXI4EdgeParameters, AXI4Bundle](nFilters, nTaps) with AXI4DspBlock with AXI4HasCSR {\n",
348 | " override val mem = Some(AXI4RegisterNode(\n",
349 | " AddressSet(0x0, 0xffffL), beatBytes = 8\n",
350 | " ))\n",
351 | "}\n",
352 | "\n",
353 | "// running the code below will show what firrtl is generated\n",
354 | "// note that LazyModules aren't really chisel modules- you need to call \".module\" on them when invoking the chisel driver\n",
355 | "// also note that AXI4StandaloneBlock is mixed in- if you forget it, you will get weird diplomacy errors because the memory\n",
356 | "// interface expects a master and the streaming interface expects to be connected. AXI4StandaloneBlock will add top level IOs\n",
357 | "// println(chisel3.Driver.emit(() => LazyModule(new AXI4FIRBlock(1, 8)(Parameters.empty) with AXI4StandaloneBlock).module))"
358 | ]
359 | },
360 | {
361 | "cell_type": "markdown",
362 | "metadata": {},
363 | "source": [
364 | "## Testing\n",
365 | "\n",
366 | "Testing `DspBlock`s is a little different.\n",
367 | "Now we're dealing with memory interfaces and `LazyModule`s.\n",
368 | "dsptools has some features that help test `DspBlock`s.\n",
369 | "\n",
370 | "One important feature is `MemMasterModel`.\n",
371 | "The trait defines functions like `memReadWord` and `memWriteWord`- generic functions for generating memory traffic.\n",
372 | "This allows you to write one generic test that can be specialized to the memory interface you are using- for example, you write one test and then specialize it for the TileLink and AXI4 interfaces.\n",
373 | "\n",
374 | "The code below tests the `FIRBlock` this way."
375 | ]
376 | },
377 | {
378 | "cell_type": "code",
379 | "execution_count": null,
380 | "metadata": {},
381 | "outputs": [],
382 | "source": [
383 | "import dsptools.tester.MemMasterModel\n",
384 | "import freechips.rocketchip.amba.axi4\n",
385 | "\n",
386 | "abstract class FIRBlockTester[D, U, EO, EI, B <: Data](c: FIRBlock[D, U, EO, EI, B]) extends PeekPokeTester(c.module) with MemMasterModel {\n",
387 | " // check that address 0 is the number of filters\n",
388 | " require(memReadWord(0) == c.nFilters)\n",
389 | " // write 1 to all the taps\n",
390 | " for (i <- 0 until c.nFilters * c.nTaps) {\n",
391 | " memWriteWord(8 + i * 8, 1)\n",
392 | " }\n",
393 | "}\n",
394 | "\n",
395 | "// specialize the generic tester for axi4\n",
396 | "class AXI4FIRBlockTester(c: AXI4FIRBlock with AXI4StandaloneBlock) extends FIRBlockTester(c) with AXI4MasterModel {\n",
397 | " def memAXI = c.ioMem.get\n",
398 | "}\n",
399 | "\n",
400 | "// invoking testers on lazymodules is a little strange.\n",
401 | "// note that the firblocktester takes a lazymodule, not a module (it calls .module in \"extends PeekPokeTester()\").\n",
402 | "val lm = LazyModule(new AXI4FIRBlock(1, 8)(Parameters.empty) with AXI4StandaloneBlock)\n",
403 | "chisel3.iotesters.Driver(() => lm.module) { _ => new AXI4FIRBlockTester(lm) }"
404 | ]
405 | },
406 | {
407 | "cell_type": "markdown",
408 | "metadata": {},
409 | "source": [
410 | "**Exercise: TileLink** \n",
411 | "\n",
412 | "Add a version of `FIRBlock` that uses TileLink for its memory interconnect, and extend the `FIRBlockTester` to use TileLink."
413 | ]
414 | },
415 | {
416 | "cell_type": "markdown",
417 | "metadata": {},
418 | "source": [
419 | "---\n",
420 | "# You're done!\n",
421 | "\n",
422 | "[Return to the top.](#top)"
423 | ]
424 | }
425 | ],
426 | "metadata": {
427 | "kernelspec": {
428 | "display_name": "Scala",
429 | "language": "scala",
430 | "name": "scala"
431 | },
432 | "language_info": {
433 | "codemirror_mode": "text/x-scala",
434 | "file_extension": ".scala",
435 | "mimetype": "text/x-scala",
436 | "name": "scala",
437 | "nbconvert_exporter": "script",
438 | "version": "2.12.10"
439 | }
440 | },
441 | "nbformat": 4,
442 | "nbformat_minor": 2
443 | }
444 |
--------------------------------------------------------------------------------
/3.3_higher-order_functions.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Module 3.3: Higher-Order Functions\n",
15 | "**Prev: [Interlude: Chisel Standard Library](3.2_interlude.ipynb)** \n",
16 | "**Next: [Functional Programming](3.4_functional_programming.ipynb)**\n",
17 | "\n",
18 | "## Motivation\n",
19 | "Those pesky `for` loops in the previous module are verbose and defeat the purpose of functional programming! In this module, your generators will get funct-ky.\n",
20 | "\n",
21 | "## Setup"
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": null,
27 | "metadata": {},
28 | "outputs": [],
29 | "source": [
30 | "val path = System.getProperty(\"user.dir\") + \"/source/load-ivy.sc\"\n",
31 | "interp.load.module(ammonite.ops.Path(java.nio.file.FileSystems.getDefault().getPath(path)))"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "import chisel3._\n",
41 | "import chisel3.util._\n",
42 | "import chisel3.tester._\n",
43 | "import chisel3.tester.RawTester.test"
44 | ]
45 | },
46 | {
47 | "cell_type": "markdown",
48 | "metadata": {},
49 | "source": [
50 | "---\n",
51 | "# A Tale of Two FIRs \n",
52 | "From the last module, we had the convolution part of the FIR filter written like this:\n",
53 | "\n",
54 | "```scala\n",
55 | "val muls = Wire(Vec(length, UInt(8.W)))\n",
56 | "for(i <- 0 until length) {\n",
57 | " if(i == 0) muls(i) := io.in * io.consts(i)\n",
58 | " else muls(i) := regs(i - 1) * io.consts(i)\n",
59 | "}\n",
60 | "\n",
61 | "val scan = Wire(Vec(length, UInt(8.W)))\n",
62 | "for(i <- 0 until length) {\n",
63 | " if(i == 0) scan(i) := muls(i)\n",
64 | " else scan(i) := muls(i) + scan(i - 1)\n",
65 | "}\n",
66 | "\n",
67 | "io.out := scan(length - 1)\n",
68 | "```\n",
69 | "\n",
70 | "As a recap, the idea is to multiply each element of `io.in` with the corresponding element of `io.consts`, and store it in `muls`.\n",
71 | "Then, the elements in `muls` are accumulated into `scan`, with `scan(0) = muls(0)`, `scan(1) = scan(0) + muls(1) = muls(0) + muls(1)`, and in general `scan(n) = scan(n-1) + muls(n) = muls(0) + ... + muls(n-1) + muls(n)`.\n",
72 | "The last element in `scan` (equal to the sum of all `muls`) is assigned to `io.out`.\n",
73 | "\n",
74 | "However, it's very verbose for what might be considered quite a simple operation. In fact, all that could be written in one line:\n",
75 | "\n",
76 | "```scala\n",
77 | "io.out := (taps zip io.consts).map { case (a, b) => a * b }.reduce(_ + _)\n",
78 | "```\n",
79 | "\n",
80 | "What is it doing?! Let's break it down:\n",
81 | "- assume `taps` is the list of all samples, with `taps(0) = io.in`, `taps(1) = regs(0)`, etc.\n",
82 | "- `(taps zip io.consts)` takes two lists, `taps` and `io.consts`, and combines them into one list where each element is a tuple of the elements at the inputs at the corresponding position. Concretely, its value would be `[(taps(0), io.consts(0)), (taps(1), io.consts(1)), ..., (taps(n), io.consts(n))]`. Remember that periods are optional, so this is equivalent to `(taps.zip(io.consts))`.\n",
83 | "- `.map { case (a, b) => a * b }` applies the anonymous function (takes a tuple of two elements returns their product) to the elements of the list, and returns the result. In this case, the result is equivalent to `muls` in the verbose example, and has the value `[taps(0) * io.consts(0), taps(1) * io.consts(1), ..., taps(n) * io.consts(n)]`. You'll revisit anonymous functions in the next module. For now, just learn this syntax.\n",
84 | "- Finally, `.reduce(_ + _)` also applies the function (addition of elements) to elements of the list. However, it takes two arguments: the first is the current accumulation, and the second is the list element (in the first iteration, it just adds the first two elements). These are given by the two underscores in the parentheses. The result would then be, assuming left-to-right traversal, `(((muls(0) + muls(1)) + muls(2)) + ...) + muls(n)`, with the result of deeper-nested parentheses evaluated first. This is the output of the convolution.\n",
85 | "\n",
86 | "---\n",
87 | "# Functions as Arguments\n",
88 | "Formally, functions like `map` and `reduce` are called _higher-order functions_ : they are functions that take functions as arguments.\n",
89 | "As it turns out (and hopefully, as you can see from the above example), these are very powerful constructs that encapsulate a general computational pattern, allowing you to concentrate on the application logic instead of flow control, and resulting in very concise code.\n",
90 | "\n",
91 | "## Different ways of specifying functions\n",
92 | "You may have noticed that there were two ways of specifying functions in the examples above:\n",
93 | "- For functions where each argument is referred to exactly once, you *may* be able to use an underscore (`_`) to refer to each argument. In the example above, the `reduce` argument function took two arguments and could be specified as `_ + _`. While convenient, this is subject to an additional set of arcane rules, so if it doesn't work, try:\n",
94 | "- Specifying the inputs argument list explicitly. The reduce could have been explicitly written as `(a, b) => a + b`, with the general form of putting the argument list in parentheses, followed by `=>`, followed by the function body referring to those arguments.\n",
95 | "- When tuple unpacking is needed, using a `case` statement, as in `case (a, b) => a * b`. That takes a single argument, a tuple of two elements, and unpacks it into variables `a` and `b`, which can then be used in the function body.\n",
96 | "\n",
97 | "## Practice in Scala\n",
98 | "In the last module, we've seen major classes in the Scala Collections API, like `List`s.\n",
99 | "These higher-order functions are part of these APIs - and in fact, the above example uses the `map` and `reduce` API on `List`s.\n",
100 | "In this section, we'll familiarize ourselves with these methods through examples and exercises.\n",
101 | "In these examples, we'll operate on Scala numbers (`Int`s) for the sake of simplicity and clarity, but because Chisel operators behave similarly, the concepts should generalize.\n",
102 | "\n",
103 | "**Example: Map** \n",
104 | "`List[A].map` has type signature `map[B](f: (A) ⇒ B): List[B]`. You'll learn more about types in a later module. For now, think of types A and B as `Int`s or `UInt`s, meaning they could be software or hardware types.\n",
105 | "\n",
106 | "In plain English, it takes an argument of type `(f: (A) ⇒ B)`, or a function that takes one argument of type `A` (the same type as the element of the input List) and returns a value of type `B` (which can be anything). `map` then returns a new list of type `B` (the return type of the argument function).\n",
107 | "\n",
108 | "As we've already explained the behavior of List in the FIR example, let's get straight into the examples and exercises:"
109 | ]
110 | },
111 | {
112 | "cell_type": "code",
113 | "execution_count": null,
114 | "metadata": {},
115 | "outputs": [],
116 | "source": [
117 | "println(List(1, 2, 3, 4).map(x => x + 1)) // explicit argument list in function\n",
118 | "println(List(1, 2, 3, 4).map(_ + 1)) // equivalent to the above, but implicit arguments\n",
119 | "println(List(1, 2, 3, 4).map(_.toString + \"a\")) // the output element type can be different from the input element type\n",
120 | "\n",
121 | "println(List((1, 5), (2, 6), (3, 7), (4, 8)).map { case (x, y) => x*y }) // this unpacks a tuple, note use of curly braces\n",
122 | "\n",
123 | "// Related: Scala has a syntax for constructing lists of sequential numbers\n",
124 | "println(0 to 10) // to is inclusive , the end point is part of the result\n",
125 | "println(0 until 10) // until is exclusive at the end, the end point is not part of the result\n",
126 | "\n",
127 | "// Those largely behave like lists, and can be useful for generating indices:\n",
128 | "val myList = List(\"a\", \"b\", \"c\", \"d\")\n",
129 | "println((0 until 4).map(myList(_)))"
130 | ]
131 | },
132 | {
133 | "cell_type": "markdown",
134 | "metadata": {},
135 | "source": [
136 | "**Exercise: Map** "
137 | ]
138 | },
139 | {
140 | "cell_type": "code",
141 | "execution_count": null,
142 | "metadata": {},
143 | "outputs": [],
144 | "source": [
145 | "// Now you try: \n",
146 | "// Fill in the blanks (the ???) such that this doubles the elements of the input list.\n",
147 | "// This should return: List(2, 4, 6, 8)\n",
148 | "println(List(1, 2, 3, 4).map(???))"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "**Example: zipWithIndex** \n",
156 | "`List.zipWithIndex` has type signature `zipWithIndex: List[(A, Int)]`.\n",
157 | "\n",
158 | "It takes no arguments, but returns a list where each element is a tuple of the original elements, and the index (with the first one being zero).\n",
159 | "So `List(\"a\", \"b\", \"c\", \"d\").zipWithIndex` would return `List((\"a\", 0), (\"b\", 1), (\"c\", 2), (\"d\", 3))`\n",
160 | "\n",
161 | "This is useful when the element index is needed in some operation.\n",
162 | "\n",
163 | "Since this is pretty straightforward, we'll just have some examples:"
164 | ]
165 | },
166 | {
167 | "cell_type": "code",
168 | "execution_count": null,
169 | "metadata": {},
170 | "outputs": [],
171 | "source": [
172 | "println(List(1, 2, 3, 4).zipWithIndex) // note indices start at zero\n",
173 | "println(List(\"a\", \"b\", \"c\", \"d\").zipWithIndex)\n",
174 | "println(List((\"a\", \"b\"), (\"c\", \"d\"), (\"e\", \"f\"), (\"g\", \"h\")).zipWithIndex) // tuples nest"
175 | ]
176 | },
177 | {
178 | "cell_type": "markdown",
179 | "metadata": {},
180 | "source": [
181 | "**Example: Reduce** \n",
182 | "`List[A].map` has type signature similar to `reduce(op: (A, A) ⇒ A): A`. (it's actually more lenient, `A` only has to be a supertype of the List type, but we're not going to deal with that syntax here)\n",
183 | "\n",
184 | "As it's also been explained above, here are some examples:"
185 | ]
186 | },
187 | {
188 | "cell_type": "code",
189 | "execution_count": null,
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "println(List(1, 2, 3, 4).reduce((a, b) => a + b)) // returns the sum of all the elements\n",
194 | "println(List(1, 2, 3, 4).reduce(_ * _)) // returns the product of all the elements\n",
195 | "println(List(1, 2, 3, 4).map(_ + 1).reduce(_ + _)) // you can chain reduce onto the result of a map"
196 | ]
197 | },
198 | {
199 | "cell_type": "code",
200 | "execution_count": null,
201 | "metadata": {},
202 | "outputs": [],
203 | "source": [
204 | "// Important note: reduce will fail with an empty list\n",
205 | "println(List[Int]().reduce(_ * _))"
206 | ]
207 | },
208 | {
209 | "cell_type": "markdown",
210 | "metadata": {},
211 | "source": [
212 | "**Exercise: Reduce** "
213 | ]
214 | },
215 | {
216 | "cell_type": "code",
217 | "execution_count": null,
218 | "metadata": {},
219 | "outputs": [],
220 | "source": [
221 | "// Now you try: \n",
222 | "// Fill in the blanks (the ???) such that this returns the product of the double of the elements of the input list.\n",
223 | "// This should return: (1*2)*(2*2)*(3*2)*(4*2) = 384\n",
224 | "println(List(1, 2, 3, 4).map(???).reduce(???))"
225 | ]
226 | },
227 | {
228 | "cell_type": "markdown",
229 | "metadata": {},
230 | "source": [
231 | "**Example: Fold** \n",
232 | "`List[A].fold` is very similar to reduce, except that you can specify the initial accumulation value.\n",
233 | "It has type signature similar to `fold(z: A)(op: (A, A) ⇒ A): A`. (like `reduce`, the type of `A` is also more lenient)\n",
234 | "\n",
235 | "Notably, it takes two argument lists, the first (`z`) is the initial value, and the second is the accumulation function.\n",
236 | "Unlike `reduce`, it will not fail with an empty list, instead returning the initial value directly.\n",
237 | "\n",
238 | "Here's some examples:"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": null,
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "println(List(1, 2, 3, 4).fold(0)(_ + _)) // equivalent to the sum using reduce\n",
248 | "println(List(1, 2, 3, 4).fold(1)(_ + _)) // like above, but accumulation starts at 1\n",
249 | "println(List().fold(1)(_ + _)) // unlike reduce, does not fail on an empty input"
250 | ]
251 | },
252 | {
253 | "cell_type": "markdown",
254 | "metadata": {},
255 | "source": [
256 | "**Exercise: Fold** "
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": null,
262 | "metadata": {},
263 | "outputs": [],
264 | "source": [
265 | "// Now you try: \n",
266 | "// Fill in the blanks (the ???) such that this returns the double the product of the elements of the input list.\n",
267 | "// This should return: 2*(1*2*3*4) = 48\n",
268 | "// Note: unless empty list tolerance is needed, reduce is a much better fit here.\n",
269 | "println(List(1, 2, 3, 4).fold(???)(???))"
270 | ]
271 | },
272 | {
273 | "cell_type": "markdown",
274 | "metadata": {},
275 | "source": [
276 | "**Exercise: Decoupled Arbiter** \n",
277 | "Let's put everything together now into an exercise.\n",
278 | "\n",
279 | "For this example, we're going to build a Decoupled arbiter: a module that has _n_ Decoupled inputs and one Decoupled output. \n",
280 | "The arbiter selects the lowest channel that is valid and forwards it to the output.\n",
281 | "\n",
282 | "Some hints:\n",
283 | "- Architecturally:\n",
284 | " - `io.out.valid` is true if any of the inputs are valid\n",
285 | " - Consider having an internal wire of the selected channel\n",
286 | " - Each input's `ready` is true if the output is ready, AND that channel is selected (this does combinationally couple ready and valid, but we'll ignore it for now...)\n",
287 | "- These constructs may help:\n",
288 | " - `map`, especially for returning a Vec of sub-elements, for example `io.in.map(_.valid)` returns a list of valid signals of the input Bundles\n",
289 | " - `PriorityMux(List[Bool, Bits])`, which takes in a list of valid signals and bits, returning the first element that is valid\n",
290 | " - Dynamic index on a Vec, by indexing with a UInt, for example `io.in(0.U)`"
291 | ]
292 | },
293 | {
294 | "cell_type": "code",
295 | "execution_count": null,
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "class MyRoutingArbiter(numChannels: Int) extends Module {\n",
300 | " val io = IO(new Bundle {\n",
301 | " val in = Vec(numChannels, Flipped(Decoupled(UInt(8.W))))\n",
302 | " val out = Decoupled(UInt(8.W))\n",
303 | " } )\n",
304 | "\n",
305 | " // YOUR CODE BELOW\n",
306 | " ???\n",
307 | "}\n",
308 | "\n",
309 | "test(new MyRoutingArbiter(4)) { c =>\n",
310 | " // verify that the computation is correct\n",
311 | " // Set input defaults\n",
312 | " for(i <- 0 until 4) {\n",
313 | " c.io.in(i).valid.poke(false.B)\n",
314 | " c.io.in(i).bits.poke(i.U)\n",
315 | " c.io.out.ready.poke(true.B)\n",
316 | " }\n",
317 | "\n",
318 | " c.io.out.valid.expect(false.B)\n",
319 | "\n",
320 | " // Check single input valid behavior with backpressure\n",
321 | " for (i <- 0 until 4) {\n",
322 | " c.io.in(i).valid.poke(true.B)\n",
323 | " c.io.out.valid.expect(true.B)\n",
324 | " c.io.out.bits.expect(i.U)\n",
325 | "\n",
326 | " c.io.out.ready.poke(false.B)\n",
327 | " c.io.in(i).ready.expect(false.B)\n",
328 | "\n",
329 | " c.io.out.ready.poke(true.B)\n",
330 | " c.io.in(i).valid.poke(false.B)\n",
331 | " }\n",
332 | "\n",
333 | " // Basic check of multiple input ready behavior with backpressure\n",
334 | " c.io.in(1).valid.poke(true.B)\n",
335 | " c.io.in(2).valid.poke(true.B)\n",
336 | " c.io.out.bits.expect(1.U)\n",
337 | " c.io.in(1).ready.expect(true.B)\n",
338 | " c.io.in(0).ready.expect(false.B)\n",
339 | "\n",
340 | " c.io.out.ready.poke(false.B)\n",
341 | " c.io.in(1).ready.expect(false.B)\n",
342 | "}\n",
343 | "\n",
344 | "println(\"SUCCESS!!\") // Scala Code: if we get here, our tests passed!"
345 | ]
346 | },
347 | {
348 | "cell_type": "markdown",
349 | "metadata": {},
350 | "source": [
351 | "
"
373 | ]
374 | },
375 | {
376 | "cell_type": "markdown",
377 | "metadata": {},
378 | "source": [
379 | "---\n",
380 | "# You're done!\n",
381 | "\n",
382 | "[Return to the top.](#top)"
383 | ]
384 | }
385 | ],
386 | "metadata": {
387 | "anaconda-cloud": {},
388 | "kernelspec": {
389 | "display_name": "Scala",
390 | "language": "scala",
391 | "name": "scala"
392 | },
393 | "language_info": {
394 | "codemirror_mode": "text/x-scala",
395 | "file_extension": ".scala",
396 | "mimetype": "text/x-scala",
397 | "name": "scala",
398 | "nbconvert_exporter": "script",
399 | "version": "2.12.10"
400 | }
401 | },
402 | "nbformat": 4,
403 | "nbformat_minor": 1
404 | }
--------------------------------------------------------------------------------
/4.1_firrtl_ast.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Module 4.1: Introduction to FIRRTL\n",
15 | "\n",
16 | "**Prev: [Generators: Types](3.6_types.ipynb)** \n",
17 | "**Next: [FIRRTL AST Traversal](4.2_firrtl_ast_traversal.ipynb)**\n",
18 | "\n",
19 | "## Motivation\n",
20 | "You've learned some Scala and written some Chisel, and for 90% of users, that should be enough to become a Chisel aficionado.\n",
21 | "\n",
22 | "However, some use cases are better expressed as a programmatic transformation of a Chisel design, rather than as a generator.\n",
23 | "\n",
24 | "For example, suppose we want to count the number of registers in a design. This would be difficult to do as a generator, so instead, we can write a FIRRTL pass to do it for us.\n",
25 | "\n",
26 | "## Setup"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": null,
32 | "metadata": {
33 | "collapsed": true
34 | },
35 | "outputs": [],
36 | "source": [
37 | "val path = System.getProperty(\"user.dir\") + \"/source/load-ivy.sc\"\n",
38 | "interp.load.module(ammonite.ops.Path(java.nio.file.FileSystems.getDefault().getPath(path)))"
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": null,
44 | "metadata": {
45 | "collapsed": true
46 | },
47 | "outputs": [],
48 | "source": [
49 | "import chisel3._\n",
50 | "import chisel3.util._\n",
51 | "import chisel3.iotesters.{ChiselFlatSpec, Driver, PeekPokeTester}\n",
52 | "import firrtl._"
53 | ]
54 | },
55 | {
56 | "cell_type": "markdown",
57 | "metadata": {},
58 | "source": [
59 | "## What is FIRRTL?\n",
60 | "As you've probably become aware, when you execute a Chisel design, it elaborates (executes the surrounding Scala code) to construct an instance of your generator, with all Scala parameters resolved.\n",
61 | "\n",
62 | "Instead of directly emitting Verilog, Chisel emits an intermediate representation called FIRRTL, which represents the elaborated (parameter-resolved) RTL instance. It can be serialized (converted to a String for writing to a file), and this serialized syntax is human readable. Internally, however, it is not represented as a long string. Instead, it is a datastructure organized as a tree of nodes, called an abstract-syntax-tree (AST).\n",
63 | "\n",
64 | "Let's take a look! We will take a simple Chisel design, elaborate it, and inspect what FIRRTL it generates!\n",
65 | "\n",
66 | "First, we define a Chisel module, which delays its input signal by two cycles."
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": null,
72 | "metadata": {
73 | "collapsed": true
74 | },
75 | "outputs": [],
76 | "source": [
77 | "class DelayBy2(width: Int) extends Module {\n",
78 | " val io = IO(new Bundle {\n",
79 | " val in = Input(UInt(width.W))\n",
80 | " val out = Output(UInt(width.W))\n",
81 | " })\n",
82 | " val r0 = RegNext(io.in)\n",
83 | " val r1 = RegNext(r0)\n",
84 | " io.out := r1\n",
85 | "}"
86 | ]
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": [
92 | "Next, let's elaborate it, serialize, and print out the FIRRTL it generates."
93 | ]
94 | },
95 | {
96 | "cell_type": "code",
97 | "execution_count": null,
98 | "metadata": {
99 | "collapsed": true
100 | },
101 | "outputs": [],
102 | "source": [
103 | "println(chisel3.Driver.emit(() => new DelayBy2(4)))"
104 | ]
105 | },
106 | {
107 | "cell_type": "markdown",
108 | "metadata": {},
109 | "source": [
110 | "As you can see, the serialized FIRRTL looks very similar to what our Chisel design would look like, with all generator parameters resolved."
111 | ]
112 | },
113 | {
114 | "cell_type": "markdown",
115 | "metadata": {},
116 | "source": [
117 | "## The FIRRTL AST\n",
118 | "\n",
119 | "As mentioned earlier, the FIRRTL representation can be serialized as a String, but internally, it is a datastructure called an AST (abstract syntax tree). This data structure is a tree of nodes, where one node can contain children nodes. There are no cycles in this datastructure.\n",
120 | "\n",
121 | "Let's take a look at what the internal datastructure looks like:"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {
128 | "collapsed": true
129 | },
130 | "outputs": [],
131 | "source": [
132 | "val firrtlSerialization = chisel3.Driver.emit(() => new DelayBy2(4))\n",
133 | "val firrtlAST = firrtl.Parser.parse(firrtlSerialization.split(\"\\n\").toIterator, Parser.GenInfo(\"file.fir\"))\n",
134 | "\n",
135 | "println(firrtlAST)"
136 | ]
137 | },
138 | {
139 | "cell_type": "markdown",
140 | "metadata": {},
141 | "source": [
142 | "Obviously, the serialization of a datastructure isn't as pretty, but you can see some of the classes and such that internally represent the RTL design. Let's try to pretty that up a bit to make it understandable."
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "metadata": {
149 | "collapsed": true
150 | },
151 | "outputs": [],
152 | "source": [
153 | "println(stringifyAST(firrtlAST))"
154 | ]
155 | },
156 | {
157 | "cell_type": "markdown",
158 | "metadata": {},
159 | "source": [
160 | "This is the internal datastructure that holds the FIRRTL AST. It is a tree structure whose root node is **Circuit**, which has 3 children: **@[file.fir@2.0]**, **ArrayBuffer**, and **cmd5WrapperHelperDelayBy2**. The following is the definition of `Circuit`'s actual Scala class that was serialized:\n",
161 | "\n"
162 | ]
163 | },
164 | {
165 | "cell_type": "markdown",
166 | "metadata": {},
167 | "source": [
168 | "As you can see, it has three children nodes: `info: Info`, `Modules: Seq[DefModule]`, and `main: String`. It extends `FirrtlNode`, of which all FIRRTL AST nodes must do. Ignore the `def mapXXXX` functions for now.\n",
169 | "\n",
170 | "Many FIRRTL nodes contain an `info: Info` field, which the parser can either insert file information like line number and column number, or insert a `NoInfo` token. In this example, **@[file.fir@2.0]** would refer to the FIRRTL file, line 2, column 0.\n",
171 | "\n",
172 | "The following section will outline all of these FIRRTL nodes in detail."
173 | ]
174 | },
175 | {
176 | "cell_type": "markdown",
177 | "metadata": {
178 | "collapsed": true
179 | },
180 | "source": [
181 | "# FIRRTL Node Descriptions\n",
182 | "\n",
183 | "This section describes common FirrtlNodes found in [firrtl/src/main/scala/firrtl/ir/IR.scala](https://github.com/ucb-bar/firrtl/blob/master/src/main/scala/firrtl/ir/IR.scala).\n",
184 | "\n",
185 | "For more detail on components not mentioned here, please refer to [The FIRRTL Specification](https://github.com/ucb-bar/firrtl/blob/master/spec/spec.pdf).\n",
186 | "\n",
187 | "\n",
188 | "## Circuit\n",
189 | "Circuit is the root node of any Firrtl datastructure. There is only ever one Circuit, and that Circuit contains a list of module definitions and the name of the top-level module.\n",
190 | "\n",
191 | "#### FirrtlNode Declaration\n",
192 | "```scala \n",
193 | "Circuit(info: Info, modules: Seq[DefModule], main: String)\n",
194 | "```\n",
195 | "\n",
196 | "#### Concrete Syntax\n",
197 | "```\n",
198 | "circuit Adder:\n",
199 | " ... //List of modules\n",
200 | "```\n",
201 | "#### In-memory Representation\n",
202 | "```scala\n",
203 | "Circuit(NoInfo, Seq(...), \"Adder\")\n",
204 | "```\n",
205 | "\n",
206 | "## Module\n",
207 | "\n",
208 | "Modules are the unit of modularity within Firrtl and are never directly nested (declaring an instance of a module has its own concrete syntax and AST representation). Each Module has a name, and a list of ports, and a body containing its implementation.\n",
209 | "\n",
210 | "#### FirrtlNode declaration\n",
211 | "```scala\n",
212 | "Module(info: Info, name: String, ports: Seq[Port], body: Stmt) extends DefModule\n",
213 | "```\n",
214 | "\n",
215 | "#### Concrete Syntax\n",
216 | "```\n",
217 | "module Adder:\n",
218 | " ... // list of ports\n",
219 | " ... // statements\n",
220 | "```\n",
221 | "#### In-memory representation\n",
222 | "```scala\n",
223 | "Module(NoInfo, \"Adder\", Seq(...), )\n",
224 | "```\n",
225 | "\n",
226 | "## Port\n",
227 | "A port defines part of a Module's io, and has a name, direction (input or output), and type.\n",
228 | "\n",
229 | "#### FirrtlNode Declaration\n",
230 | "```scala\n",
231 | "class Port(info: Info, name: String, direction: Direction, tpe: Type)\n",
232 | "```\n",
233 | "#### Concrete Syntax\n",
234 | "```\n",
235 | "input x: UInt\n",
236 | "```\n",
237 | "\n",
238 | "#### In-memory representation\n",
239 | "```scala\n",
240 | "Port(NoInfo, \"x\", INPUT, UIntType(UnknownWidth))\n",
241 | "```\n",
242 | "\n",
243 | "## Statement\n",
244 | "A statement is used to describe the components within a module and how they interact. Below are some commonly used statements:\n",
245 | "\n",
246 | "### Block of Statements\n",
247 | "A group of statements. Commonly used as the body field in a Module declaration.\n",
248 | "\n",
249 | "### Wire Declaration\n",
250 | "A wire declaration, containing a name and type. It can be both a source (connected *from*) and a sink (connected *to\").\n",
251 | "#### FirrtlNode declaration\n",
252 | "```scala\n",
253 | "DefWire(info: Info, name: String, tpe: Type)\n",
254 | "```\n",
255 | "#### Concrete syntax\n",
256 | "```\n",
257 | "wire w: UInt\n",
258 | "```\n",
259 | "#### In-memory Representation\n",
260 | "```scala\n",
261 | "DefWire(NoInfo, \"w\", UIntType(UnknownWidth))\n",
262 | "```\n",
263 | "\n",
264 | "### Register Declaration\n",
265 | "A register declaration, containing a name, type, clock signal, reset signal, and reset value.\n",
266 | "#### FirrtlNode declaration\n",
267 | "```scala\n",
268 | "DefRegister(info: Info, name: String, tpe: Type, clock: Expression, reset: Expression, init: Expression)\n",
269 | "```\n",
270 | "\n",
271 | "### Connection\n",
272 | "Represents a directioned connection from a source to a sink. Note that it abides by last-connect-semantics, as described in Chisel.\n",
273 | "\n",
274 | "#### FirrtlNode declaration\n",
275 | "```scala\n",
276 | "Connect(info: Info, loc: Expression, expr: Expression)\n",
277 | "```\n",
278 | "\n",
279 | "### Other Statements\n",
280 | "Other statement types like `DefMemory`, `DefNode`, `IsInvalid`, `Conditionally`, and others are omitted here; please refer to [firrtl/src/main/scala/firrtl/ir/IR.scala](https://github.com/freechipsproject/firrtl/blob/master/src/main/scala/firrtl/ir/IR.scala) for more detail.\n",
281 | "\n",
282 | "## Expression\n",
283 | "Expressions represent references to declared components or logical and arithmetic operations. Below are some commonly used expressions:\n",
284 | "\n",
285 | "### Reference\n",
286 | "A reference to a declared component, such as a wire, register, or port. It has a name and type field. Note that it does not contain a pointer to the actual declaration, but instead just contains the name as a String.\n",
287 | "\n",
288 | "#### FirrtlNode declaration\n",
289 | "```scala\n",
290 | "Reference(name: String, tpe: Type)\n",
291 | "```\n",
292 | "\n",
293 | "### DoPrim\n",
294 | "An anonymous primitive operation, such as `Add`, `Sub`, or `And`, `Or`, or subword-selection (`Bits`). The type of operation is indicated by the `op: PrimOp` field. Note that the number of required arguments and constants are determined by the `op`.\n",
295 | "\n",
296 | "#### FirrtlNode declaration\n",
297 | "```scala\n",
298 | "DoPrim(op: PrimOp, args: Seq[Expression], consts: Seq[BigInt], tpe: Type)\n",
299 | "```\n",
300 | "\n",
301 | "### Other Expressions\n",
302 | "Other expressions including `SubField`, `SubIndex`, `SubAccess`, `Mux`, `ValidIf` etc. are described in more detail in [firrtl/src/main/scala/firrtl/ir/IR.scala](https://github.com/ucb-bar/firrtl/blob/master/src/main/scala/firrtl/ir/IR.scala) and [The FIRRTL Specification](https://github.com/ucb-bar/firrtl/blob/master/spec/spec.pdf).\n",
303 | "\n",
304 | "# Back to our example\n",
305 | "\n",
306 | "Let's take another look at the FIRRTL AST from our example. Hopefully, the structure of the design makes more sense!"
307 | ]
308 | },
309 | {
310 | "cell_type": "code",
311 | "execution_count": null,
312 | "metadata": {
313 | "collapsed": true
314 | },
315 | "outputs": [],
316 | "source": [
317 | "println(stringifyAST(firrtlAST))"
318 | ]
319 | },
320 | {
321 | "cell_type": "markdown",
322 | "metadata": {},
323 | "source": [
324 | "That's it for this section! In the next section, we will look at how a FIRRTL transformation walks this AST and modifies it."
325 | ]
326 | }
327 | ],
328 | "metadata": {
329 | "kernelspec": {
330 | "display_name": "Scala",
331 | "language": "scala",
332 | "name": "scala"
333 | },
334 | "language_info": {
335 | "codemirror_mode": "text/x-scala",
336 | "file_extension": ".scala",
337 | "mimetype": "text/x-scala",
338 | "name": "scala211",
339 | "nbconvert_exporter": "script",
340 | "pygments_lexer": "scala",
341 | "version": "2.11.11"
342 | }
343 | },
344 | "nbformat": 4,
345 | "nbformat_minor": 2
346 | }
347 |
--------------------------------------------------------------------------------
/4.2_firrtl_ast_traversal.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Module 4.2: FIRRTL AST Traversal\n",
15 | "\n",
16 | "**Prev: [Introduction to FIRRTL](4.1_firrtl_ast.ipynb)** \n",
17 | "**Next: [Common Pass Idioms](4.3_firrtl_common_idioms.ipynb)**\n",
18 | "\n",
19 | "### Understanding IR node children\n",
20 | "\n",
21 | "Writing a Firrtl pass usually requires writing functions which walk the Firrtl datastructure to either collect information or replace IR nodes with new IR nodes.\n",
22 | "\n",
23 | "The IR datastructure is a tree, where each IR node can have some number of children nodes (which in turn can have more children nodes, etc.). IR nodes without children are called leaves.\n",
24 | "\n",
25 | "Different IR nodes can have different children types. The following table shows the possible children type for each IR node type:\n",
26 | "\n",
27 | "```\n",
28 | "+------------+-----------------------------+\n",
29 | "| Node | Children |\n",
30 | "+------------+-----------------------------+\n",
31 | "| Circuit | DefModule |\n",
32 | "| DefModule | Port, Statement |\n",
33 | "| Port | Type, Direction |\n",
34 | "| Statement | Statement, Expression, Type |\n",
35 | "| Expression | Expression, Type |\n",
36 | "| Type | Type, Width |\n",
37 | "| Width | |\n",
38 | "| Direction | |\n",
39 | "+------------+-----------------------------+\n",
40 | "```\n",
41 | "\n",
42 | "### The map function\n",
43 | "\n",
44 | "To write a function that traverses a `Circuit`, we need to first understand the functional programming concept `map`.\n",
45 | "\n",
46 | "#### Understanding Seq.map\n",
47 | "A Scala sequence of strings, can be represented as a tree with a root node `Seq` and children nodes `\"a\"`, `\"b\"`, and `\"c\"`:\n",
48 | "```scala\n",
49 | "val s = Seq(\"a\", \"b\", \"c\")\n",
50 | "```\n",
51 | "```\n",
52 | " Seq\n",
53 | " / | \\\n",
54 | "\"a\" \"b\" \"c\"\n",
55 | "```\n",
56 | "\n",
57 | "Suppose we define a function `f` that, given a String argument `x`, concatenates `x` with itself:\n",
58 | "```scala\n",
59 | "def f(x: String): String = x + x\n",
60 | "```\n",
61 | "\n",
62 | "We can call `s.map` to return a new `Seq[String]` whose children are the result of applying `f` to every child of s:\n",
63 | "```scala\n",
64 | "val s = Seq(\"a\", \"b\", \"c\")\n",
65 | "def f(x: String): String = x + x // repeated declaration for clarity\n",
66 | "val t = s.map(f)\n",
67 | "println(t) // Seq(\"aa\", \"bb\", \"cc\")\n",
68 | "```\n",
69 | "```\n",
70 | " Seq\n",
71 | " / | \\\n",
72 | "\"aa\" \"bb\" \"cc\"\n",
73 | "```\n",
74 | "\n",
75 | "#### Understanding Firrtl's map\n",
76 | "\n",
77 | "We use this \"mapping\" idea to create our own, custom `map` methods on IR nodes. Suppose we have a `DoPrim` expression representing 1 + 1; this can be depicted as a tree of expressions with a root node `DoPrim`:\n",
78 | "```\n",
79 | " DoPrim\n",
80 | " / \\\n",
81 | "UIntValue UIntValue\n",
82 | "```\n",
83 | "\n",
84 | "If we have a function `f` that takes an `Expression` argument and returns a new `Expression`, we can \"map\" it onto all children `Expression` of a given IR node, like our `DoPrim`. This would return the following new `DoPrim`, whose children are the result of applying `f` to every `Expression` child of `DoPrim`:\n",
85 | "```\n",
86 | " DoPrim\n",
87 | " / \\\n",
88 | "f(UIntValue) f(UIntValue)\n",
89 | "```\n",
90 | "\n",
91 | "Sometimes IR nodes have children of multiple types. For example, `Conditionally` has both `Expression` and `Statement` children. In this case, the map will only apply its function to the children whose type matches the function's argument type (and return value type):\n",
92 | "```scala\n",
93 | "val c = Conditionally(info, e, s1, s2) // e: Expression, s1, s2: Statement, info: FileInfo\n",
94 | "def fExp(e: Expression): Expression = ...\n",
95 | "def fStmt(s: Statement): Statement = ...\n",
96 | "c.map(fExp) // Conditionally(fExp(e), s1, s2)\n",
97 | "c.map(fStmt) // Conditionally(e, fStmt(s1), fStmt(s2))\n",
98 | "```\n",
99 | "\n",
100 | "Scala has \"infix notation\", which allows you to drop the `.` and parenthesis when calling a function which has one argument. Often, we write these map functions with infix notation:\n",
101 | "```scala\n",
102 | "c map fExp // equivalent to c.map(fExp)\n",
103 | "c map fStmt // equivalent to c.map(fStmt)\n",
104 | "```\n",
105 | "\n",
106 | "### Pre-order traversal\n",
107 | "\n",
108 | "To traverse a Firrtl tree, we use `map` to write recursive functions which visit every child of every node we care about.\n",
109 | "\n",
110 | "Suppose we want to collect the names of every register declared in the design; we know this requires visiting every `Statement`. However, some `Statement` nodes can have children `Statement`. Thus, we need to write a function that will both check if its input argument is a `DefRegister` and, if not, will recursively apply `f` to all `Statement` children of its input argument:\n",
111 | "\n",
112 | "The following function, `f`, is similar to our described function yet it takes two arguments: a mutable hashset of register names, and a `Statement`. Using function currying, we can pass only the first argument to return a new function with the desired type signature (`Statement=>Statement`):\n",
113 | "\n",
114 | "```scala\n",
115 | "def f(regNames: mutable.HashSet[String]())(s: Statement): Statement = s match {\n",
116 | " // If register, add name to regNames\n",
117 | " case r: DefRegister =>\n",
118 | " regNames += r.name\n",
119 | " r // Return argument unchanged (ok because DefRegister has no Statement children)\n",
120 | " // If not, apply f(regNames) to all children Statement\n",
121 | " case _ => s map f(regNames) // Note that f(regNames) is of type Statement=>Statement\n",
122 | "}\n",
123 | "```\n",
124 | "\n",
125 | "This pattern is very common in Firrtl, and is called \"pre-order traversal\" because the recursive function matches on the original IR node before recursively applying to its children nodes.\n",
126 | "\n",
127 | "### Post-order traversal\n",
128 | "\n",
129 | "We can write the previous example in a \"post-order traversal\" as follows:\n",
130 | "\n",
131 | "```scala\n",
132 | "def f(regNames: mutable.HashSet[String]())(s: Statement): Statement = \n",
133 | " // Not we immediately recurse to the children nodes, then match\n",
134 | " s map f(regName) match {\n",
135 | " // If register, add name to regNames\n",
136 | " case r: DefRegister =>\n",
137 | " regNames += r.name\n",
138 | " r // Return argument unchanged (ok because DefRegister has no Statement children)\n",
139 | " // If not, return s\n",
140 | " case _ => s // Note that all Statement children of s have had f(regNames) already applied\n",
141 | " }\n",
142 | "```\n",
143 | "\n",
144 | "While the traversal ordering is different between these two examples, it makes no difference for this use case (and many others). However, it is an important tool to keep in your back pocket for when the traversal ordering matters."
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": null,
150 | "metadata": {
151 | "collapsed": true
152 | },
153 | "outputs": [],
154 | "source": []
155 | }
156 | ],
157 | "metadata": {
158 | "kernelspec": {
159 | "display_name": "Scala",
160 | "language": "scala",
161 | "name": "scala"
162 | },
163 | "language_info": {
164 | "codemirror_mode": "text/x-scala",
165 | "file_extension": ".scala",
166 | "mimetype": "text/x-scala",
167 | "name": "scala211",
168 | "nbconvert_exporter": "script",
169 | "pygments_lexer": "scala",
170 | "version": "2.11.11"
171 | }
172 | },
173 | "nbformat": 4,
174 | "nbformat_minor": 2
175 | }
176 |
--------------------------------------------------------------------------------
/4.3_firrtl_common_idioms.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Module 4.3: Common Pass Idioms\n",
15 | "\n",
16 | "**Prev: [FIRRTL AST Traversal](4.2_firrtl_ast_traversal.ipynb)** \n",
17 | "**Next: [A FIRRTL Transform Example](4.4_firrtl_add_ops_per_module.ipynb)**\n",
18 | "\n",
19 | "### Adding statements\n",
20 | "Suppose we want to write a pass that splits nested DoPrim expressions, thus transforming this:\n",
21 | "```\n",
22 | "circuit Top:\n",
23 | " module Top :\n",
24 | " input x: UInt<3>\n",
25 | " input y: UInt<3>\n",
26 | " input z: UInt<3>\n",
27 | " output o: UInt<3>\n",
28 | " o <= add(x, add(y, z))\n",
29 | "```\n",
30 | "into this:\n",
31 | "```\n",
32 | "circuit Top:\n",
33 | " module Top :\n",
34 | " input x: UInt<3>\n",
35 | " input y: UInt<3>\n",
36 | " input z: UInt<3>\n",
37 | " output o: UInt<3>\n",
38 | " node GEN_1 = add(y, z)\n",
39 | " o <= add(x, GEN_1)\n",
40 | "```\n",
41 | "\n",
42 | "We first need to traverse the AST to every Statement and Expression. Then, when we see a DoPrim, we need to add a new DefNode to the module's body and insert a reference to that DefNode in place of the DoPrim. The code below implements this (and preserves the Info token). Note that `Namespace` is a utility function located in [Namespace.scala](https://github.com/ucb-bar/firrtl/blob/master/src/main/scala/firrtl/Namespace.scala).\n",
43 | "\n",
44 | "```scala\n",
45 | "object Splitter extends Pass {\n",
46 | " def name = \"Splitter!\"\n",
47 | " /** Run splitM on every module **/\n",
48 | " def run(c: Circuit): Circuit = c.copy(modules = c.modules map(splitM(_)))\n",
49 | "\n",
50 | " /** Run splitS on the body of every module **/\n",
51 | " def splitM(m: DefModule): DefModule = m map splitS(Namespace(m))\n",
52 | "\n",
53 | " /** Run splitE on all children Expressions.\n",
54 | " * If stmts contain extra statements, return a Block containing them and \n",
55 | " * the new statement; otherwise, return the new statement. */\n",
56 | " def splitS(namespace: Namespace)(s: Statement): Statement = {\n",
57 | " val block = mutable.ArrayBuffer[Statement]()\n",
58 | " s match {\n",
59 | " case s: HasInfo => \n",
60 | " val newStmt = s map splitE(block, namespace, s.info)\n",
61 | " block.length match {\n",
62 | " case 0 => newStmt\n",
63 | " case _ => Block(block.toSeq :+ newStmt)\n",
64 | " }\n",
65 | " case s => s map splitS(namespace)\n",
66 | " }\n",
67 | "\n",
68 | " /** Run splitE on all children expressions.\n",
69 | " * If e is a DoPrim, add a new DefNode to block and return reference to\n",
70 | " * the DefNode; otherwise return e.*/\n",
71 | " def splitE(block: mutable.ArrayBuffer[Statement], namespace: Namespace, \n",
72 | " info: Info)(e: Expression): Expression = e map splitE(block, namespace, info) match {\n",
73 | " case e: DoPrim =>\n",
74 | " val newName = namespace.newTemp\n",
75 | " block += DefNode(info, newName, e)\n",
76 | " Ref(newName, e.tpe)\n",
77 | " case _ => e\n",
78 | " }\n",
79 | "}\n",
80 | "```\n",
81 | "### Deleting statements\n",
82 | "Suppose we want to write a pass that inlined all DefNodes whose value is a literal, thus transforming this:\n",
83 | "```\n",
84 | "circuit Top:\n",
85 | " module Top :\n",
86 | " input x: UInt<3>\n",
87 | " output o: UInt<4>\n",
88 | " node y = UInt(1)\n",
89 | " o <= add(x, y)\n",
90 | "```\n",
91 | "into this:\n",
92 | "```\n",
93 | "circuit Top:\n",
94 | " module Top :\n",
95 | " input x: UInt<3>\n",
96 | " output y: UInt<4>\n",
97 | " o <= add(x, UInt(1))\n",
98 | "```\n",
99 | "\n",
100 | "We first need to traverse the AST to every Statement and Expression. Then, when we see a DefNode pointing to a Literal, we need to store it into a hashmap and return an EmptyStmt (thus deleting that DefNode). Then, whenever we see a reference to the deleted DefNode, we must insert the corresponding Literal.\n",
101 | "\n",
102 | "```scala\n",
103 | "object Inliner extends Pass {\n",
104 | " def name = \"Inliner!\"\n",
105 | " /** Run inlineM on every module **/\n",
106 | " def run(c: Circuit): Circuit = c.copy(modules = c.modules map(inlineM(_)))\n",
107 | "\n",
108 | " /** Run inlineS on the body of every module **/\n",
109 | " def inlineM(m: DefModule): DefModule = m map inlineS(mutable.HashMap[String, Expression]())\n",
110 | "\n",
111 | " /** Run inlineE on all children Expressions, and then run inlineS on children statements.\n",
112 | " * If statement is a DefNode containing a literal, update values and\n",
113 | " * return EmptyStmt; otherwise return statement. */\n",
114 | " def inlineS(values: mutable.HashMap[String, Expression])(s: Statement): Statement =\n",
115 | " s map inlineE(values) map inlineS(values) match {\n",
116 | " case d: DefNode => d.value match {\n",
117 | " case l: Literal =>\n",
118 | " values(d.name) = l\n",
119 | " EmptyStmt\n",
120 | " case _ => d\n",
121 | " }\n",
122 | " case o => o \n",
123 | " }\n",
124 | "\n",
125 | " /** If e is a reference whose name is contained in values, \n",
126 | " * return values(e.name); otherwise run inlineE on all \n",
127 | " * children expressions.*/\n",
128 | " def inlineE(values: mutable.HashMap[String, Expression])(e: Expression): Expression = e match {\n",
129 | " case e: Ref if values.contains(e.name) => values(e.name)\n",
130 | " case _ => e map inlineE(values)\n",
131 | " }\n",
132 | "}\n",
133 | "```\n",
134 | "\n",
135 | "### Add a Primop\n",
136 | "Would this be useful? Let [@azidar](https://github.com/azidar) know by submitting an issue to [the firrtl repo](https://github.com/freechipsproject/firrtl)!\n",
137 | "\n",
138 | "### Swap a statement\n",
139 | "Would this be useful? Let [@azidar](https://github.com/azidar) know by submitting an issue to [the firrtl repo](https://github.com/freechipsproject/firrtl)!\n"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "metadata": {
146 | "collapsed": true
147 | },
148 | "outputs": [],
149 | "source": []
150 | }
151 | ],
152 | "metadata": {
153 | "kernelspec": {
154 | "display_name": "Scala",
155 | "language": "scala",
156 | "name": "scala"
157 | },
158 | "language_info": {
159 | "codemirror_mode": "text/x-scala",
160 | "file_extension": ".scala",
161 | "mimetype": "text/x-scala",
162 | "name": "scala211",
163 | "nbconvert_exporter": "script",
164 | "pygments_lexer": "scala",
165 | "version": "2.11.11"
166 | }
167 | },
168 | "nbformat": 4,
169 | "nbformat_minor": 2
170 | }
171 |
--------------------------------------------------------------------------------
/4.4_firrtl_add_ops_per_module.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | ""
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Module 4.4: A FIRRTL Transform Example\n",
15 | "\n",
16 | "**Prev: [Common Pass Idioms](4.3_firrtl_common_idioms.ipynb)** \n",
17 | "\n",
18 | "This AnalyzeCircuit Transform walks a `firrtl.ir.Circuit`, and records the number of add ops it finds, per module.\n",
19 | "\n",
20 | "## Setup\n",
21 | "\n",
22 | "Please run the following:"
23 | ]
24 | },
25 | {
26 | "cell_type": "code",
27 | "execution_count": null,
28 | "metadata": {
29 | "collapsed": true
30 | },
31 | "outputs": [],
32 | "source": [
33 | "val path = System.getProperty(\"user.dir\") + \"/source/load-ivy.sc\"\n",
34 | "interp.load.module(ammonite.ops.Path(java.nio.file.FileSystems.getDefault().getPath(path)))"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": null,
40 | "metadata": {
41 | "collapsed": true
42 | },
43 | "outputs": [],
44 | "source": [
45 | "// Compiler Infrastructure\n",
46 | "\n",
47 | "// Firrtl IR classes\n",
48 | "\n",
49 | "// Map functions\n",
50 | "\n",
51 | "// Scala's mutable collections\n",
52 | "import scala.collection.mutable\n",
53 | "\n"
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "## Counting Adders Per Module\n",
61 | "\n",
62 | "As described, earlier, a Firrtl circuit is represented using a tree representation:\n",
63 | " - A Firrtl `Circuit` contains a sequence of `DefModule`s.\n",
64 | " - A `DefModule` contains a sequence of `Port`s, and maybe a `Statement`.\n",
65 | " - A `Statement` can contain other `Statement`s, or `Expression`s.\n",
66 | " - A `Expression` can contain other `Expression`s.\n",
67 | "\n",
68 | "To visit all Firrtl IR nodes in a circuit, we write functions that recursively walk down this tree. To record statistics, we will pass along a `Ledger` class and use it when we come across an add op:"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {
75 | "collapsed": true
76 | },
77 | "outputs": [],
78 | "source": [
79 | "class Ledger {\n",
80 | " import firrtl.Utils\n",
81 | " private var moduleName: Option[String] = None\n",
82 | " private val modules = mutable.Set[String]()\n",
83 | " private val moduleAddMap = mutable.Map[String, Int]()\n",
84 | " def foundAdd(): Unit = moduleName match {\n",
85 | " case None => sys.error(\"Module name not defined in Ledger!\")\n",
86 | " case Some(name) => moduleAddMap(name) = moduleAddMap.getOrElse(name, 0) + 1\n",
87 | " }\n",
88 | " def getModuleName: String = moduleName match {\n",
89 | " case None => Utils.error(\"Module name not defined in Ledger!\")\n",
90 | " case Some(name) => name\n",
91 | " }\n",
92 | " def setModuleName(myName: String): Unit = {\n",
93 | " modules += myName\n",
94 | " moduleName = Some(myName)\n",
95 | " }\n",
96 | " def serialize: String = {\n",
97 | " modules map { myName =>\n",
98 | " s\"$myName => ${moduleAddMap.getOrElse(myName, 0)} add ops!\"\n",
99 | " } mkString \"\\n\"\n",
100 | " }\n",
101 | "}"
102 | ]
103 | },
104 | {
105 | "cell_type": "markdown",
106 | "metadata": {},
107 | "source": [
108 | "Now, let's define a FIRRTL Transform that walks the circuit and updates our `Ledger` whenever it comes across an adder (`DoPrim` with op argument `Add`). Don't worry about `inputForm` or `outputForm` for now.\n",
109 | "\n",
110 | "Take some time to understand how `walkModule`, `walkStatement`, and `walkExpression` enable traversing all `DefModule`, `Statement`, and `Expression` nodes in the FIRRTL AST.\n",
111 | "\n",
112 | "Questions to answer:\n",
113 | " - **Why doesn't walkModule call walkExpression?**\n",
114 | " - **Why does walkExpression do a post-order traversal?**\n",
115 | " - **Can you modify walkExpression to do a pre-order traversal of Expressions?**"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "metadata": {
122 | "collapsed": true
123 | },
124 | "outputs": [],
125 | "source": [
126 | "class AnalyzeCircuit extends firrtl.Transform {\n",
127 | " import firrtl._\n",
128 | " import firrtl.ir._\n",
129 | " import firrtl.Mappers._\n",
130 | " import firrtl.Parser._\n",
131 | " import firrtl.annotations._\n",
132 | " import firrtl.PrimOps._\n",
133 | " \n",
134 | " // Requires the [[Circuit]] form to be \"low\"\n",
135 | " def inputForm = LowForm\n",
136 | " // Indicates the output [[Circuit]] form to be \"low\"\n",
137 | " def outputForm = LowForm\n",
138 | "\n",
139 | " // Called by [[Compiler]] to run your pass. [[CircuitState]] contains\n",
140 | " // the circuit and its form, as well as other related data.\n",
141 | " def execute(state: CircuitState): CircuitState = {\n",
142 | " val ledger = new Ledger()\n",
143 | " val circuit = state.circuit\n",
144 | "\n",
145 | " // Execute the function walkModule(ledger) on every [[DefModule]] in\n",
146 | " // circuit, returning a new [[Circuit]] with new [[Seq]] of [[DefModule]].\n",
147 | " // - \"higher order functions\" - using a function as an object\n",
148 | " // - \"function currying\" - partial argument notation\n",
149 | " // - \"infix notation\" - fancy function calling syntax\n",
150 | " // - \"map\" - classic functional programming concept\n",
151 | " // - discard the returned new [[Circuit]] because circuit is unmodified\n",
152 | " circuit map walkModule(ledger)\n",
153 | "\n",
154 | " // Print our ledger\n",
155 | " println(ledger.serialize)\n",
156 | "\n",
157 | " // Return an unchanged [[CircuitState]]\n",
158 | " state\n",
159 | " }\n",
160 | "\n",
161 | " // Deeply visits every [[Statement]] in m.\n",
162 | " def walkModule(ledger: Ledger)(m: DefModule): DefModule = {\n",
163 | " // Set ledger to current module name\n",
164 | " ledger.setModuleName(m.name)\n",
165 | "\n",
166 | " // Execute the function walkStatement(ledger) on every [[Statement]] in m.\n",
167 | " // - return the new [[DefModule]] (in this case, its identical to m)\n",
168 | " // - if m does not contain [[Statement]], map returns m.\n",
169 | " m map walkStatement(ledger)\n",
170 | " }\n",
171 | "\n",
172 | " // Deeply visits every [[Statement]] and [[Expression]] in s.\n",
173 | " def walkStatement(ledger: Ledger)(s: Statement): Statement = {\n",
174 | "\n",
175 | " // Execute the function walkExpression(ledger) on every [[Expression]] in s.\n",
176 | " // - discard the new [[Statement]] (in this case, its identical to s)\n",
177 | " // - if s does not contain [[Expression]], map returns s.\n",
178 | " s map walkExpression(ledger)\n",
179 | "\n",
180 | " // Execute the function walkStatement(ledger) on every [[Statement]] in s.\n",
181 | " // - return the new [[Statement]] (in this case, its identical to s)\n",
182 | " // - if s does not contain [[Statement]], map returns s.\n",
183 | " s map walkStatement(ledger)\n",
184 | " }\n",
185 | "\n",
186 | " // Deeply visits every [[Expression]] in e.\n",
187 | " // - \"post-order traversal\" - handle e's children [[Expression]] before e\n",
188 | " def walkExpression(ledger: Ledger)(e: Expression): Expression = {\n",
189 | "\n",
190 | " // Execute the function walkExpression(ledger) on every [[Expression]] in e.\n",
191 | " // - return the new [[Expression]] (in this case, its identical to e)\n",
192 | " // - if s does not contain [[Expression]], map returns e.\n",
193 | " val visited = e map walkExpression(ledger)\n",
194 | "\n",
195 | " visited match {\n",
196 | " // If e is an adder, increment our ledger and return e.\n",
197 | " case DoPrim(Add, _, _, _) =>\n",
198 | " ledger.foundAdd\n",
199 | " e\n",
200 | " // If e is not an adder, return e.\n",
201 | " case notadd => notadd\n",
202 | " }\n",
203 | " }\n",
204 | "}"
205 | ]
206 | },
207 | {
208 | "cell_type": "markdown",
209 | "metadata": {},
210 | "source": [
211 | "## Running our Transform\n",
212 | "\n",
213 | "Now that we've defined it, let's run it on a Chisel design! First, let's define a Chisel module."
214 | ]
215 | },
216 | {
217 | "cell_type": "code",
218 | "execution_count": null,
219 | "metadata": {
220 | "collapsed": true
221 | },
222 | "outputs": [],
223 | "source": [
224 | "// Chisel stuff\n",
225 | "import chisel3._\n",
226 | "import chisel3.Input // Technicality: avoid a conflict with _root_.almond.input.Input\n",
227 | "import chisel3.util._"
228 | ]
229 | },
230 | {
231 | "cell_type": "code",
232 | "execution_count": null,
233 | "metadata": {},
234 | "outputs": [],
235 | "source": [
236 | "class AddMe(val nInputs: Int, val width: Int) extends Module {\n",
237 | " val io = IO(new Bundle {\n",
238 | " val in = Input(Vec(nInputs, UInt(width.W)))\n",
239 | " val out = Output(UInt(width.W))\n",
240 | " })\n",
241 | " io.out := io.in.reduce(_ +& _)\n",
242 | "}"
243 | ]
244 | },
245 | {
246 | "cell_type": "markdown",
247 | "metadata": {},
248 | "source": [
249 | "Next, let's elaborate it into FIRRTL AST syntax."
250 | ]
251 | },
252 | {
253 | "cell_type": "code",
254 | "execution_count": null,
255 | "metadata": {
256 | "collapsed": true
257 | },
258 | "outputs": [],
259 | "source": [
260 | "val firrtlSerialization = chisel3.Driver.emit(() => new AddMe(8, 4))"
261 | ]
262 | },
263 | {
264 | "cell_type": "markdown",
265 | "metadata": {},
266 | "source": [
267 | "Finally, let's compile our FIRRTL into Verilog, but include our custom transform into the compilation. Note that it prints out the number of add ops it found!\n",
268 | "\n",
269 | "**Note** (January 2021): The following line may be broken due to a [bug](https://github.com/freechipsproject/chisel-bootcamp/issues/129)."
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": null,
275 | "metadata": {
276 | "collapsed": true
277 | },
278 | "outputs": [],
279 | "source": [
280 | "val verilog = compileFIRRTL(firrtlSerialization, new firrtl.VerilogCompiler(), Seq(new AnalyzeCircuit()))"
281 | ]
282 | },
283 | {
284 | "cell_type": "markdown",
285 | "metadata": {},
286 | "source": [
287 | "The `compileFIRRTL` function is defined only in this tutorial - in a future section, we will describe how the process of inserting customTransforms.\n",
288 | "\n",
289 | "That's it for this section!"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": null,
295 | "metadata": {
296 | "collapsed": true
297 | },
298 | "outputs": [],
299 | "source": []
300 | }
301 | ],
302 | "metadata": {
303 | "kernelspec": {
304 | "display_name": "Scala",
305 | "language": "scala",
306 | "name": "scala"
307 | },
308 | "language_info": {
309 | "codemirror_mode": "text/x-scala",
310 | "file_extension": ".scala",
311 | "mimetype": "text/x-scala",
312 | "name": "scala211",
313 | "nbconvert_exporter": "script",
314 | "pygments_lexer": "scala",
315 | "version": "2.11.11"
316 | }
317 | },
318 | "nbformat": 4,
319 | "nbformat_minor": 2
320 | }
321 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # First stage : setup the system and environment
2 | FROM ubuntu:20.04 as base
3 |
4 | RUN \
5 | apt-get update && \
6 | DEBIAN_FRONTEND=noninteractive apt-get install -y \
7 | ca-certificates-java \
8 | curl \
9 | graphviz \
10 | openjdk-8-jre-headless \
11 | python3-distutils \
12 | && \
13 | rm -rf /var/lib/apt/lists/*
14 |
15 | RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
16 | RUN python3 get-pip.py
17 | RUN pip3 install notebook
18 |
19 | RUN useradd -ms /bin/bash bootcamp
20 |
21 | ENV SCALA_VERSION=2.12.10
22 | ENV ALMOND_VERSION=0.9.1
23 |
24 | ENV COURSIER_CACHE=/coursier_cache
25 |
26 | ADD . /chisel-bootcamp/
27 | WORKDIR /chisel-bootcamp
28 |
29 | ENV JUPYTER_CONFIG_DIR=/jupyter/config
30 | ENV JUPITER_DATA_DIR=/jupyter/data
31 |
32 | RUN mkdir -p $JUPYTER_CONFIG_DIR/custom
33 | RUN cp source/custom.js $JUPYTER_CONFIG_DIR/custom/
34 |
35 | # Second stage - download Scala requirements and the Scala kernel
36 | FROM base as intermediate-builder
37 |
38 | RUN mkdir /coursier_cache
39 |
40 | RUN \
41 | curl -L -o coursier https://git.io/coursier-cli && \
42 | chmod +x coursier && \
43 | ./coursier \
44 | bootstrap \
45 | -r jitpack \
46 | sh.almond:scala-kernel_$SCALA_VERSION:$ALMOND_VERSION \
47 | --sources \
48 | --default=true \
49 | -o almond && \
50 | ./almond --install --global && \
51 | \rm -rf almond couriser /root/.cache/coursier
52 |
53 | # Execute a notebook to ensure Chisel is downloaded into the image for offline work
54 | RUN jupyter nbconvert --to notebook --output=/tmp/0_demo --execute 0_demo.ipynb
55 |
56 | # Last stage
57 | FROM base as final
58 |
59 | # copy the Scala requirements and kernel into the image
60 | COPY --from=intermediate-builder /coursier_cache/ /coursier_cache/
61 | COPY --from=intermediate-builder /usr/local/share/jupyter/kernels/scala/ /usr/local/share/jupyter/kernels/scala/
62 |
63 | RUN chown -R bootcamp:bootcamp /chisel-bootcamp
64 |
65 | USER bootcamp
66 | WORKDIR /chisel-bootcamp
67 |
68 | EXPOSE 8888
69 | CMD jupyter notebook --no-browser --ip 0.0.0.0 --port 8888
70 |
--------------------------------------------------------------------------------
/Install.md:
--------------------------------------------------------------------------------
1 | ## Local Setup Instructions
2 |
3 | If you want to run the bootcamp locally, run the following instructions below for your particular situation.
4 | Note that we include a custom javascript file for Jupyter, so if you already have Jupyter installed, you still need to install the custom.js file.
5 |
6 | Note: Make sure you are using **Java 8** (NOT Java 9) and have the JDK8 installed. Coursier/jupyter-scala does not appear to be compatible with Java 9 yet as of January 2018.
7 |
8 | If you do have multiple version of Java, make sure to select Java 8 (1.8) before running `jupyter notebook`:
9 |
10 | * On Windows: https://gist.github.com/rwunsch/d157d5fe09e9f7cdc858cec58c8462d6
11 | * On Mac OS: https://stackoverflow.com/questions/21964709/how-to-set-or-change-the-default-java-jdk-version-on-os-x
12 |
13 | ### Local Installation using Docker - Linux/Mac/Windows
14 |
15 | Make sure you have Docker [installed](https://docs.docker.com/get-docker/) on your system.
16 |
17 | Run the following command:
18 |
19 | ```
20 | docker run -it --rm -p 8888:8888 ucbbar/chisel-bootcamp
21 | ```
22 |
23 | This will download a Dokcer image for the bootcamp and run it. The output will end in the following message:
24 |
25 | ```
26 | To access the notebook, open this file in a browser:
27 | file:///home/bootcamp/.local/share/jupyter/runtime/nbserver-6-open.html
28 | Or copy and paste one of these URLs:
29 | http://79b8df8411f2:8888/?token=LONG_RANDOM_TOKEN
30 | or http://127.0.0.1:8888/?token=LONG_RANDOM_TOKEN
31 | ```
32 |
33 | Copy the last link, the one starting with https://127.0.0.1:8888 to your browser and follow the Bootcamp.
34 |
35 | ### Local Installation - Mac/Linux
36 |
37 | This bootcamp uses Jupyter notebooks.
38 | Jupyter notebooks allow you to interactively run code in your browser.
39 | It supports multiple programming languages.
40 | For this bootcamp, we'll install jupyter first and then the Scala-specific jupyter backend (now called almond).
41 |
42 |
43 | #### Jupyter
44 | First install Jupyter.
45 |
46 | Dependencies: openssh-client, openjdk-8-jre, openjdk-8-jdk (-headless OK for both), ca-certificates-java
47 |
48 | First, use pip3 to install jupyter (or pip for python 2): http://jupyter.org/install.html
49 | ```
50 | pip3 install --upgrade pip
51 | pip3 install jupyter --ignore-installed
52 | ```
53 |
54 | If pip3 isn't working out of the box (possibly because your Python3 version is out of date), you can try `python3 -m pip` in lieu of `pip3`.
55 |
56 | (To reinstall jupyter later for whatever reason, you can use `--no-deps` to avoid re-installing all the dependencies.)
57 |
58 | You may want to try out Jupyter lab, the newer interface developed by Project Jupyter.
59 | It is especially useful if you want to be able to run a terminal emulator in your browser.
60 | It can be installed with `pip3`:
61 | ```
62 | pip3 install jupyterlab
63 | ```
64 |
65 | #### Jupyter Backend for Scala
66 |
67 | If you experience errors or issues with this section, try running `rm -rf ~/.local/share/jupyter/kernels/scala/` first.
68 |
69 | Next, download coursier and use it to install almond (see [here](https://almond.sh/docs/quick-start-install) for the source for these instructions):
70 | ```
71 | curl -L -o coursier https://git.io/coursier-cli && chmod +x coursier
72 | SCALA_VERSION=2.12.10 ALMOND_VERSION=0.9.1
73 | ./coursier bootstrap -r jitpack \
74 | -i user -I user:sh.almond:scala-kernel-api_$SCALA_VERSION:$ALMOND_VERSION \
75 | sh.almond:scala-kernel_$SCALA_VERSION:$ALMOND_VERSION \
76 | --sources --default=true \
77 | -o almond
78 | ./almond --install
79 | ```
80 |
81 | You can delete `coursier` and `almond` files if you so desire.
82 |
83 | #### Visualizations
84 |
85 | [Graphviz](https://graphviz.org/download/) is required to show visualizations of Chisel modules, such as in the demo page. However, visualizations are optional as the other Chisel and Scala features will work without it.
86 |
87 | #### Install bootcamp
88 | Now clone the bootcamp repo and install the customization script.
89 | If you already have one, append this script to it.
90 |
91 | ```
92 | git clone https://github.com/freechipsproject/chisel-bootcamp.git
93 | cd chisel-bootcamp
94 | mkdir -p ~/.jupyter/custom
95 | cp source/custom.js ~/.jupyter/custom/custom.js
96 | ```
97 |
98 | And to start the bootcamp on your local machine:
99 | ```
100 | jupyter notebook
101 | ```
102 |
103 | If you installed Jupyter Lab, run `jupyter-lab` instead.
104 |
105 |
106 | ### Local Installation - Windows
107 |
108 | These notes describe, in general, the way to install the Generator Bootcamp under Windows 10.
109 | Many different Windows configurations may be encountered and some changes may be required.
110 | Please let us know of there are things out of date, or should otherwise be covered here.
111 |
112 | >There are several times where you may want to launch a Command (shell) window.
113 | I have discovered that launching the command window in Administrator Mode is helpful.
114 | To do that from the bottom left Launcher, find or search for 'CMD' when selecting it from
115 | the menu, right click and choose, "Launch in Administrator Mode".
116 | Find more details on this [here](http://www.thewindowsclub.com/how-to-run-command-prompt-as-an-administrator)
117 | and other places.
118 | It is also best to relauch any command windows between steps in the process (e.g. after installing Java)
119 | so that any newly installed software will be recognized.
120 |
121 | #### Be sure Java is installed (ideally Java 8).
122 | If you type `java` into a command prompt and it says command not found, you need to install
123 | [Java](https://adoptopenjdk.net/installation.html).
124 |
125 | #### Install Jupyter
126 | Jupyter recommends using the Anaconda distribution, here is the
127 | [Windows download](https://www.anaconda.com/download/#windows).
128 |
129 | Near the end of the Jupyter installation is a question about whether to add Jupyter to the PATH.
130 | Windows does not recommend this, but I do. It will make it easier to run using the command prompt.
131 |
132 | If you did not elect to add Jupyter to the PATH, start a prompt using the
133 | "Anaconda Prompt (Anaconda3)" shortcut from the Start Menu.
134 |
135 | #### Install Scala components.
136 |
137 | The simplest way seems to be to download Coursier from [here](https://github.com/coursier/coursier/releases/download/v2.0.0-RC6-24/coursier).
138 |
139 | Go to download folder, where `coursier` (file) is
140 |
141 | ```
142 | java -noverify -jar coursier launch --fork almond:0.10.6 --scala 2.12.8 -- --install
143 | ```
144 |
145 | #### Visualizations
146 |
147 | [Graphviz](https://graphviz.org/download/) is required to show visualizations of Chisel modules, such as in the demo page. However, visualizations are optional as the other Chisel and Scala features will work without it.
148 |
149 | #### Install the chisel-bootcamp repo.
150 | Download the [chisel-bootcamp](https://github.com/freechipsproject/chisel-bootcamp) as a zip file (or use a Windows git client)
151 | and unpack it in a directory you have access to.
152 | Ideally, you should put it in a path that has no spaces.
153 |
154 | Install the customization script by moving `chisel-bootcamp/source/custom.js` to
155 | `%HOMEDRIVE%%HOMEPATH%\.jupyter\custom\custom.js`.
156 | If you already have a custom.js file, append this script to it.
157 |
158 | #### Launch the Jupyter and the bootcamp
159 | In the directory containing the unpacked chisel-bootcamp repo, from a new command window type:
160 | ```bash
161 | jupyter notebook
162 | ```
163 | This should start the bootcamp server and open a top page bootcamp menu in your default browser. If it does not
164 | look for the something like the following in the command window and copy and paste the link you see into
165 | a browser window.
166 | ```bash
167 | Copy/paste this URL into your browser when you connect for the first time,
168 | to login with a token:
169 | http://localhost:8888/?token=9c503729c379fcb3c7a17087f05462c733c1733eb8b31d07
170 | ```
171 |
172 | ##### Proxy usage
173 | If you require a proxy, try uncommenting and changing the relevant lines at the start of `source/load-ivy.sc`.
174 |
175 | Good Luck!
176 |
177 | ### Cadence AWS Setup
178 |
179 | If you don't know what is Cadence AWS, or don't have access to Cadence AWS, skip this section.
180 |
181 | Navigate to your working directory, which is probably your home directory.
182 |
183 | ```
184 | cd ~
185 | ```
186 |
187 | Then run the following commands.
188 | The default shell is c-shell, but if you switch to bash, source `jupyter_sh` instead of `jupyter_csh`.
189 | ```
190 | source /craft/tools/jupyter/jupyter_csh
191 | ```
192 |
193 | The default browser, Konqueror, won't work with Jupyter.
194 | Launch Firefox in the background and set it as your default browser when it asks.
195 | ```
196 | /craft/cdns_sw_inst/firefox/45.3.0esr/firefox &
197 | ```
198 |
199 | Clone the repo and launch Jupyter.
200 | If it asks for a token, copy and paste the *to login with a token* URL seen in the terminal.
201 | Future launches will be happy for a while.
202 | ```
203 | git clone /craft/tools/chisel/generator-bootcamp.git
204 | cd generator-bootcamp
205 | jupyter notebook
206 | ```
207 |
208 | ### Cadence Chamber Setup
209 |
210 | If you don't know what the Cadence Chamber is, skip this section.
211 | Navigate to your work directory, likely `/projects/craft_flow/work//`.
212 | Then run the following commands.
213 | Note that `/proj/` is an alias to `/projects/`.
214 | If you are in bash, source `jupyter_sh` instead of `jupyter_csh`.
215 |
216 | ```
217 | source /proj/craft_flow/tools/jupyter/jupyter_csh
218 | git clone /proj/craft_flow/source/chisel/generator-bootcamp
219 | cd generator-bootcamp
220 | jupyter notebook
221 | ```
222 |
223 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | ------------------------------------------------------------------------
180 | Note:
181 | Individual files contain the following tag instead of the full license text.
182 |
183 | // SPDX-License-Identifier: Apache-2.0
184 |
185 |
--------------------------------------------------------------------------------
/OneDayAgenda.md:
--------------------------------------------------------------------------------
1 | # One Day : Chisel Bootcamp
2 | The goal of this bootcamp is to get the attendee up to speed with Scala
3 | and Chisel as quickly as possible. The format should be casual with a lot
4 | of questions and immediate feedback.
5 | There's a lot of content here so it
6 | is very unlikely that we will get through everything so instead we will
7 | try and get some hands work in each section to get a taste of the concepts.
8 | It is a very good idea to follow up this bootcamp by fully going through every
9 | section.
10 |
11 | This will all go faster and smoother if you can prepare a development environment beforehand.
12 | Checkout [The Chisel Bootcamp](https://github.com/freechipsproject/chisel-bootcamp) on [github](https://github.com)
13 |
14 | ## Presenters
15 | -
16 | ## Schedule
17 |
18 | - 9:00 AM -- Quick Introduction to Chisel & the Bootcamp
19 | - Why Chisel.
20 | - How the bootcamp works
21 |
22 | - 9:15 AM -- Section 1: Introduction to Scala
23 | - Why Scala? Coming up to Speed. 5 min.
24 | - Hands on Section 1
25 |
26 |
27 | - 10:00 AM -- Section 2.1 Combinational Logic
28 | - Pep talk on building circuits with Chisel 5+ minutes
29 | - Hands on Section 2.1
30 |
31 | - 11:00 AM -- Sections 2.2, 2.3, and 2.4
32 | - After this hour, you can build anything ~3 minutes
33 | - Hands on Sections 2.2, 2.3, and 2.4
34 |
35 | - 12:00 PM -- Sections 2.5
36 | - Putting it all together ~3 minutes
37 | - Hands on Section 2.5. Up to DSP Example
38 |
39 | - 12:30 -- **Lunch**
40 |
41 | - 1:30 PM -- Section 3.1 and 3.2 and Interlude
42 | - About generators. 5 min.
43 | - Hands on Section 3.1 and 3.2 and Interlude
44 |
45 | - 2:30 PM -- Section 3.3 and 3.4
46 | - About functional programming. 5 min.
47 | - Hands on Section 3.3 and 3.4
48 |
49 | - 3:30 PM -- Section 3.5 and 3.6
50 | - About object ans types. 5 min.
51 | - Hands on Section 3.5 and 3.6
52 |
53 | - 4:30 PM -- Further topics
54 | - Firrtl
55 | - Chisel template
56 | - sbt
57 | - IntelliJ
58 | - Thank you
59 |
60 | - 5:00 PM
61 | ## Done!
62 |
63 |
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | > :warning: Some features of the Jupyter Binder project have bit-rotted. Whilst the teachings are still valid and the majority of the exercises still work, you will encounter errors.
2 |
3 | [](https://mybinder.org/v2/gh/freechipsproject/chisel-bootcamp/master)
4 |
5 | **_For previous users of the bootcamp, we have upgraded from Scala 2.11 to Scala 2.12. If you are encountering errors, please follow the installation instructions to upgrade to 2.12._**
6 |
7 | # Chisel Bootcamp
8 |
9 | Elevate the level of your hardware design from instances to generators!
10 | This bootcamp teaches you Chisel, a Berkeley hardware construction DSL written in Scala.
11 | It teaches you Scala along the way, and it frames the learning of Chisel on the idea of *hardware generators*.
12 |
13 | ## What you'll learn
14 |
15 | - Why hardware designs are better expressed as generators, not instances
16 | - Basics and some advanced features of Scala, a modern programming language
17 | - Basics and some advanced features of Chisel, a hardware description language embedded in Scala
18 | - How to write unit tests for Chisel designs
19 | - Basic introduction to some useful features in Chisel libraries, including [dsptools](https://github.com/ucb-bar/dsptools/) and [rocketchip](https://github.com/freechipsproject/rocket-chip).
20 |
21 | ## Prerequisites
22 |
23 | - Familiarity with Verilog, VHDL, or at least some digital hardware design
24 | - Programming experience in with a "high-level" language, be it in Python, Java, C++, etc.
25 | - An earnest desire to learn
26 |
27 | ## Getting Started
28 |
29 | Try it out [HERE](https://mybinder.org/v2/gh/freechipsproject/chisel-bootcamp/master)! No local installation required!
30 |
31 | If you want to try it out locally, [look at installation instructions here](Install.md).
32 |
33 | ## Outline
34 |
35 | The bootcamp is divided into modules, which are further subdivided.
36 | This README serves as *Module 0*, an introduction and motivation to learning the material contained within.
37 | *Module 1* gives a quick introduction to Scala.
38 | It teaches you enough to start writing Chisel, but many more Scala concepts are taught along the way.
39 | Chisel is introduced in *Module 2*, starting with a hardware example and breaking it down.
40 | The rest of *Module 2* covers combinational and sequential logic, as well as software and hardware control flow.
41 | *Module 3* teaches you how to write hardware generators in Chisel that take advantage of Scala's high-level programming language features.
42 | By the end, you will be able to read and understand most of the [Chisel code base](https://github.com/freechipsproject/chisel3) and begin using [Rocket Chip](https://github.com/freechipsproject/rocket-chip).
43 | This tutorial *does not* yet cover SBT, build systems, backend flows for FPGA or ASIC processes, or analog circuits.
44 |
45 | ## Motivation
46 | All hardware description languages support writing single instances.
47 | However, writing instances is tedious.
48 | Why make the same mistakes writing a slightly modified version of something somebody else has likely already designed?
49 | Verilog supports limited parameterization, such as bitwidths and generate statements, but this only gets you so far.
50 | If we can't write a Verilog generator, we need to write a new instance, thus doubling our code size.
51 | As a better option, we should write one program that generates both hardware instances, which would reduce our code size and make tedious things easier.
52 | These programs are called generators.
53 |
54 | Ideally, we want our generators to be (1) composable, (2) powerful, and (3) enable fine-grained control over the generated design.
55 | Error checking is necessary to make sure a composition is legal; without it, debugging is difficult.
56 | This requires a generator language to understand the semantics of the design (to know what’s legal and what’s not).
57 | Also, the generator should not be overly verbose!
58 | We want the generator program to concisely express many different designs, without rewriting it in if statements for each instance.
59 | Finally, it should be a zero-cost abstraction.
60 | Hardware design performance is very sensitive to small changes, and because of that, you need to be able to exactly specify the microarchitecture.
61 | Generators are very different than high-level-synthesis (HLS).
62 |
63 | The benefits of Chisel are in how you use it, not in the language itself.
64 | If you decide to write instances instead of generators, you will see fewer advantages of Chisel over Verilog.
65 | But if you take the time to learn how to write generators, then the power of Chisel will become apparent and you will realize you can never go back to writing Verilog.
66 | Learning to write generators is difficult, but we hope this tutorial will pave the way for you to become a better hardware designer, programmer, and thinker!
67 |
68 | ## FAQ
69 |
70 | ### Kernel Crashes Upon Startup
71 |
72 | I get the following error upon launching a Scala notebook and Jupyter says that the kernel has crashed:
73 |
74 | ```
75 | Exception in thread "main" java.lang.RuntimeException: java.lang.NullPointerException
76 | at jupyter.kernel.server.ServerApp$.apply(ServerApp.scala:174)
77 | at jupyter.scala.JupyterScalaApp.delayedEndpoint$jupyter$scala$JupyterScalaApp$1(JupyterScala.scala:93)
78 | at jupyter.scala.JupyterScalaApp$delayedInit$body.apply(JupyterScala.scala:13)
79 | ...
80 |
81 | Caused by: java.lang.NullPointerException
82 | at ammonite.runtime.Classpath$.classpath(Classpath.scala:31)
83 | at ammonite.interp.Interpreter.init(Interpreter.scala:93)
84 | at ammonite.interp.Interpreter.processModule(Interpreter.scala:409)
85 | at ammonite.interp.Interpreter$$anonfun$10.apply(Interpreter.scala:151)
86 | at ammonite.interp.Interpreter$$anonfun$10.apply(Interpreter.scala:148)
87 | ...
88 | ```
89 |
90 | Make sure that you have **Java 8** selected for running Jupyter (see the instructions above).
91 |
92 | ## Contributors
93 | - Stevo Bailey ([stevo@berkeley.edu](mailto:stevo@berkeley.edu))
94 | - Adam Izraelevitz ([adamiz@berkeley.edu](mailto:azidar@berkeley.edu))
95 | - Richard Lin ([richard.lin@berkeley.edu](mailto:edwardw@berkeley.edu))
96 | - Chick Markley ([chick@berkeley.edu](mailto:chick@berkeley.edu))
97 | - Paul Rigge ([rigge@berkeley.edu](mailto:rigge@berkeley.edu))
98 | - Edward Wang ([edwardw@berkeley.edu](mailto:edwardw@berkeley.edu))
99 |
--------------------------------------------------------------------------------
/binder/apt.txt:
--------------------------------------------------------------------------------
1 | openjdk-8-jre-headless
2 | graphviz
3 |
--------------------------------------------------------------------------------
/binder/postBuild:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | SCALA_VERSION=2.12.10 ALMOND_VERSION=0.9.1
4 |
5 | # Install coursier
6 | curl -Lo coursier https://git.io/coursier-cli
7 | chmod +x coursier
8 |
9 | # Install almond
10 | ./coursier bootstrap \
11 | -r jitpack \
12 | -i user -I user:sh.almond:scala-kernel-api_$SCALA_VERSION:$ALMOND_VERSION \
13 | sh.almond:scala-kernel_$SCALA_VERSION:$ALMOND_VERSION \
14 | --sources --default=true \
15 | -o almond
16 | ./almond --install
17 |
18 | # Install required Jupyter/JupyterLab extensions
19 | jupyter labextension install @jupyterlab/plotly-extension
20 |
21 | # Install custom Javascript for solutions
22 | mkdir -p ~/.jupyter/custom
23 | cp source/custom.js ~/.jupyter/custom/custom.js
24 |
--------------------------------------------------------------------------------
/images/Sorter4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/Sorter4.png
--------------------------------------------------------------------------------
/images/arbiter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/arbiter.png
--------------------------------------------------------------------------------
/images/chisel_1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/chisel_1024.png
--------------------------------------------------------------------------------
/images/circuit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/circuit.png
--------------------------------------------------------------------------------
/images/counter2.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
403 |
--------------------------------------------------------------------------------
/images/demo_fir_filter.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/images/fir.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/fir.jpg
--------------------------------------------------------------------------------
/images/fir_filter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/fir_filter.png
--------------------------------------------------------------------------------
/images/fsm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/fsm.png
--------------------------------------------------------------------------------
/images/integrator.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
68 |
--------------------------------------------------------------------------------
/images/playbutton.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/freechipsproject/chisel-bootcamp/8b6f137013fc17ba1b7c77fcb82368461bae09e6/images/playbutton.png
--------------------------------------------------------------------------------
/images/shifter4.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
62 |
--------------------------------------------------------------------------------
/runtest.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import itertools
3 | import os
4 | import sys
5 | import subprocess
6 | import tempfile
7 |
8 | from typing import Any, Dict, List
9 |
10 | import nbformat
11 |
12 |
13 | def _notebook_run(path):
14 | """Execute a notebook via nbconvert and collect output.
15 | :returns (parsed nb object, execution errors)
16 | """
17 | dirname, __ = os.path.split(path)
18 | if len(dirname) > 0:
19 | os.chdir(dirname)
20 | with tempfile.NamedTemporaryFile(suffix=".ipynb", mode='w+') as fout:
21 | args = ["jupyter-nbconvert", "--to", "notebook", "--execute",
22 | "--allow-errors",
23 | "--ExecutePreprocessor.timeout=60",
24 | "--output", fout.name, path]
25 | subprocess.check_call(args, stderr=True)
26 |
27 | fout.seek(0)
28 | nb = nbformat.read(fout, nbformat.current_nbformat)
29 |
30 | errors = [output for cell in nb.cells if "outputs" in cell
31 | for output in cell["outputs"] \
32 | if output.output_type == "error"]
33 |
34 | return nb, errors
35 |
36 |
37 | def check_errors(file_name, expected: List[str], actual: List[Any]) -> bool:
38 | """When errors occur they are due to a mismatch in the errors that occurred at runtime
39 | and the expected error that are defined in notebooks
40 | This produces huge output but the relevant information is between bars of '=' at the end.
41 | Look at what was expected and either fix the error or add the text of the error that occurred
42 | :note Errors shown may have hidden escape sequence for colors etc. When pasting be careful about this.
43 |
44 | :param file_name: name of file where errors occurred
45 | :param expected: errors that were expected
46 | :param actual: errors that occurred.
47 | :return:
48 | """
49 | actual_tracebacks: List[str] = list(map(lambda x: str(x['traceback'][0][:100]), actual))
50 |
51 | return_value = True
52 | for i, (e, a) in enumerate(itertools.zip_longest(expected, actual_tracebacks, fillvalue="-- No Error --")):
53 | if e not in a:
54 | if return_value:
55 | print("=" * 100, file=sys.stderr)
56 | print(f"Errors detected in {file_name}", file=sys.stderr)
57 | print(f"No match for {i}-th expected error '{e}' got '{a[:100]}'", file=sys.stderr)
58 | return_value = False
59 | if not return_value:
60 | print("=" * 100, file=sys.stderr)
61 | return return_value
62 |
63 |
64 | notebooks: Dict[str, List[str]] = {
65 | # This is the list of pages to try and rum along with each pages expected errors.
66 |
67 | "1_intro_to_scala.ipynb": [],
68 | "2.1_first_module.ipynb": ["chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation"],
69 | "2.2_comb_logic.ipynb": ['Compilation Failed'] +
70 | ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'] +
71 | ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'] +
72 | ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'],
73 | "2.3_control_flow.ipynb": ['scala.NotImplementedError'] * 2 +
74 | ['Compilation Failed'] +
75 | ['scala.NotImplementedError'] +
76 | ['Compilation Failed'],
77 | "2.4_sequential_logic.ipynb": ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'] * 2,
78 | "2.5_exercise.ipynb": ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'] * 3 +
79 | ['Compilation Failed'],
80 | "2.6_chiseltest.ipynb": [],
81 | "3.1_parameters.ipynb": ['java.util.NoSuchElementException'],
82 | "3.2_collections.ipynb": [
83 | 'chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'],
84 | "3.2_interlude.ipynb": [],
85 | "3.3_higher-order_functions.ipynb": ['scala.NotImplementedError'] +
86 | ['java.lang.UnsupportedOperationException'] +
87 | ['scala.NotImplementedError'] * 2 +
88 | ['chisel3.internal.ChiselException: Exception thrown when elaborating ChiselGeneratorAnnotation'],
89 | "3.4_functional_programming.ipynb": ['scala.NotImplementedError'] +
90 | ['Compilation Failed'] +
91 | ['scala.NotImplementedError'] +
92 | ['Compilation Failed'],
93 | "3.5_object_oriented_programming.ipynb": ['Compilation Failed'],
94 | "3.6_types.ipynb": ['chisel3.internal.ChiselException: Connection between sink'] +
95 | ['Failed to elaborate Chisel circuit'] +
96 | ['expected ")"'] +
97 | ['scala.MatchError: ChiselExecutionFailure'] +
98 | ['Compilation Failed'] * 5,
99 | "4.1_firrtl_ast.ipynb": [],
100 | "4.2_firrtl_ast_traversal.ipynb": [],
101 | "4.3_firrtl_common_idioms.ipynb": [],
102 | "4.4_firrtl_add_ops_per_module.ipynb": ['FirrtlInternalException'], # bug 129
103 | }
104 |
105 | if __name__ == "__main__":
106 | notebooks_to_run: List[str] = []
107 | if len(sys.argv) > 1:
108 | if sys.argv[1] == "--help":
109 | print("Usage: {} [notebook_name.ipynb] [notebook_name_2.ipynb] [...]".format(sys.argv[0]))
110 | print("By default, check all notebooks if notebooks are not specified.")
111 | sys.exit(0)
112 | else:
113 | notebooks_to_run = sys.argv[1:]
114 | else:
115 | notebooks_to_run = sorted(notebooks) # all notebooks
116 | for n in notebooks_to_run:
117 | expected = notebooks[n]
118 | nb, errors = _notebook_run(n)
119 | assert check_errors(n, expected, errors)
120 |
--------------------------------------------------------------------------------
/source/custom.js:
--------------------------------------------------------------------------------
1 |
2 | // load accordian stuff
3 | var styleAcc = document.createElement('style');
4 | styleAcc.innerHTML = `
5 | #container {
6 | margin: 0 auto;
7 | width: 100%;
8 | }
9 | #accordion input {
10 | display: none;
11 | }
12 | #accordion label {
13 | background: #eee;
14 | border-radius: .25em;
15 | cursor: pointer;
16 | display: block;
17 | margin-bottom: .125em;
18 | padding: .25em 1em;
19 | z-index: 20;
20 | }
21 | #accordion label:hover {
22 | background: #ccc;
23 | }
24 |
25 | #accordion input:checked + label {
26 | background: #ccc;
27 | border-bottom-right-radius: 0;
28 | border-bottom-left-radius: 0;
29 | color: white;
30 | margin-bottom: 0;
31 | }
32 | #accordion article {
33 | background: #f7f7f7;
34 | height:0px;
35 | overflow:hidden;
36 | z-index:10;
37 | }
38 | #accordion article p {
39 | padding: 1em;
40 | }
41 | #accordion input:checked article {
42 | }
43 | #accordion input:checked ~ article {
44 | border-bottom-left-radius: .25em;
45 | border-bottom-right-radius: .25em;
46 | height: auto;
47 | margin-bottom: .125em;
48 | }
49 | `;
50 | document.head.appendChild(styleAcc);
51 |
--------------------------------------------------------------------------------
/source/load-ivy.sc:
--------------------------------------------------------------------------------
1 | interp.repositories() ::: List(
2 | coursierapi.MavenRepository.of("https://oss.sonatype.org/content/repositories/snapshots")
3 | )
4 |
5 | @
6 |
7 | interp.configureCompiler(x => x.settings.source.value = scala.tools.nsc.settings.ScalaVersion("2.11.12"))
8 |
9 | // Uncomment and change to use proxy
10 | // System.setProperty("https.proxyHost", "proxy.example.com")
11 | // System.setProperty("https.proxyPort", "3128")
12 |
13 | import $ivy.`edu.berkeley.cs::chisel3:3.4.+`
14 | import $ivy.`edu.berkeley.cs::chisel-iotesters:1.5.+`
15 | import $ivy.`edu.berkeley.cs::chiseltest:0.3.+`
16 | import $ivy.`edu.berkeley.cs::dsptools:1.4.+`
17 | import $ivy.`org.scalanlp::breeze:0.13.2`
18 | import $ivy.`edu.berkeley.cs::rocket-dsptools:1.2.0`
19 | import $ivy.`edu.berkeley.cs::firrtl-diagrammer:1.3.+`
20 |
21 | import $ivy.`org.scalatest::scalatest:3.2.2`
22 |
23 | // Convenience function to invoke Chisel and grab emitted Verilog.
24 | def getVerilog(dut: => chisel3.core.UserModule): String = {
25 | import firrtl._
26 | return chisel3.Driver.execute(Array[String](), {() => dut}) match {
27 | case s:chisel3.ChiselExecutionSuccess => s.firrtlResultOption match {
28 | case Some(f:FirrtlExecutionSuccess) => f.emitted
29 | }
30 | }
31 | }
32 |
33 | // Convenience function to invoke Chisel and grab emitted FIRRTL.
34 | def getFirrtl(dut: => chisel3.core.UserModule): String = {
35 | return chisel3.Driver.emit({() => dut})
36 | }
37 |
38 | def compileFIRRTL(
39 | inputFirrtl: String,
40 | compiler: firrtl.Compiler,
41 | customTransforms: Seq[firrtl.Transform] = Seq.empty,
42 | infoMode: firrtl.Parser.InfoMode = firrtl.Parser.IgnoreInfo,
43 | annotations: firrtl.AnnotationSeq = firrtl.AnnotationSeq(Seq.empty)
44 | ): String = {
45 | import firrtl.{Compiler, AnnotationSeq, CircuitState, ChirrtlForm, FIRRTLException}
46 | import firrtl.Parser._
47 | import scala.io.Source
48 | import scala.util.control.ControlThrowable
49 | import firrtl.passes._
50 | val outputBuffer = new java.io.CharArrayWriter
51 | try {
52 | //val parsedInput = firrtl.Parser.parse(Source.fromFile(input).getLines(), infoMode)
53 | val parsedInput = firrtl.Parser.parse(inputFirrtl.split("\n").toIterator, infoMode)
54 | compiler.compile(
55 | CircuitState(parsedInput, ChirrtlForm, annotations),
56 | outputBuffer,
57 | customTransforms)
58 | }
59 |
60 | catch {
61 | // Rethrow the exceptions which are expected or due to the runtime environment (out of memory, stack overflow)
62 | case p: ControlThrowable => throw p
63 | case p: PassException => throw p
64 | case p: FIRRTLException => throw p
65 | // Treat remaining exceptions as internal errors.
66 | case e: Exception => firrtl.Utils.throwInternalError(exception = Some(e))
67 | }
68 |
69 | val outputString = outputBuffer.toString
70 | outputString
71 | }
72 |
73 | def stringifyAST(firrtlAST: firrtl.ir.Circuit): String = {
74 | var ntabs = 0
75 | val buf = new StringBuilder
76 | val string = firrtlAST.toString
77 | string.zipWithIndex.foreach { case (c, idx) =>
78 | c match {
79 | case ' ' =>
80 | case '(' =>
81 | ntabs += 1
82 | buf ++= "(\n" + "| " * ntabs
83 | case ')' =>
84 | ntabs -= 1
85 | buf ++= "\n" + "| " * ntabs + ")"
86 | case ','=> buf ++= ",\n" + "| " * ntabs
87 | case c if idx > 0 && string(idx-1)==')' =>
88 | buf ++= "\n" + "| " * ntabs + c
89 | case c => buf += c
90 | }
91 | }
92 | buf.toString
93 | }
94 |
95 | // Returns path to module viz and hierarchy viz
96 | def generateVisualizations(gen: () => chisel3.RawModule): (String, String) = {
97 | import dotvisualizer._
98 | import dotvisualizer.transforms._
99 |
100 | import java.io._
101 | import firrtl._
102 | import firrtl.annotations._
103 |
104 | import almond.interpreter.api.DisplayData
105 | import almond.api.helpers.Display
106 |
107 | import chisel3._
108 | import chisel3.stage._
109 | import firrtl.ir.Module
110 | import sys.process._
111 |
112 | val sourceFirrtl = scala.Console.withOut(new PrintStream(new ByteArrayOutputStream())) {
113 | (new ChiselStage).emitChirrtl(gen())
114 | }
115 | val ast = Parser.parse(sourceFirrtl)
116 |
117 | val uniqueTopName = ast.main + ast.hashCode().toHexString
118 |
119 | val targetDir = s"diagrams/$uniqueTopName/"
120 |
121 | val cmdRegex = "cmd[0-9]+([A-Za-z]+.*)".r
122 | val readableTop = ast.main match {
123 | case cmdRegex(n) => n
124 | case other => other
125 | }
126 | val newTop = readableTop
127 |
128 | // Console hack prevents unnecessary chatter appearing in cell
129 | scala.Console.withOut(new PrintStream(new ByteArrayOutputStream())) {
130 | val sourceFirrtl = (new ChiselStage).emitChirrtl(gen())
131 |
132 | val newModules: Seq[firrtl.ir.DefModule] = ast.modules.map {
133 | case m: Module if m.name == ast.main => m.copy(name = newTop)
134 | case other => other
135 | }
136 | val newAst = ast.copy(main = newTop, modules = newModules)
137 |
138 | val controlAnnotations: Seq[Annotation] = Seq(
139 | firrtl.stage.FirrtlSourceAnnotation(sourceFirrtl),
140 | firrtl.options.TargetDirAnnotation(targetDir),
141 | dotvisualizer.stage.OpenCommandAnnotation("")
142 | )
143 |
144 | (new dotvisualizer.stage.DiagrammerStage).execute(Array.empty, controlAnnotations)
145 | }
146 | val moduleView = s"""$targetDir/$newTop.dot.svg"""
147 | val instanceView = s"""$targetDir/${newTop}_hierarchy.dot.svg"""
148 |
149 | val svgModuleText = FileUtils.getText(moduleView)
150 | val svgInstanceText = FileUtils.getText(instanceView)
151 |
152 | val x = s"""