├── .gitignore
├── .gitmodules
├── .travis.yml
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── build.sbt
├── project
├── build.properties
└── plugins.sbt
├── scalastyle-config.xml
├── scalastyle-test-config.xml
└── src
├── main
├── resources
│ └── esp_acc_iface.v
└── scala
│ └── esp
│ ├── AcceleratorWrapper.scala
│ ├── Annotations.scala
│ ├── Generator.scala
│ ├── Implementation.scala
│ ├── Specification.scala
│ ├── examples
│ ├── AdderAccelerator.scala
│ ├── CounterAccelerator.scala
│ └── FFTAccelerator.scala
│ ├── simulation
│ └── Dma.scala
│ └── transforms
│ └── EmitXML.scala
└── test
├── resources
└── linear-mem.txt
└── scala
└── esptests
├── AcceleratorSpec.scala
├── AcceleratorWrapperSpec.scala
├── examples
├── AdderAcceleratorSpec.scala
├── CounterAcceleratorSpec.scala
└── FFTAcceleratorSpec.scala
└── simulation
└── DmaSpec.scala
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Project Specific stuff
2 | test_run_dir/*
3 | ### XilinxISE template
4 | # intermediate build files
5 | *.bgn
6 | *.bit
7 | *.bld
8 | *.cmd_log
9 | *.drc
10 | *.ll
11 | *.lso
12 | *.msd
13 | *.msk
14 | *.ncd
15 | *.ngc
16 | *.ngd
17 | *.ngr
18 | *.pad
19 | *.par
20 | *.pcf
21 | *.prj
22 | *.ptwx
23 | *.rbb
24 | *.rbd
25 | *.stx
26 | *.syr
27 | *.twr
28 | *.twx
29 | *.unroutes
30 | *.ut
31 | *.xpi
32 | *.xst
33 | *_bitgen.xwbt
34 | *_envsettings.html
35 | *_map.map
36 | *_map.mrp
37 | *_map.ngm
38 | *_map.xrpt
39 | *_ngdbuild.xrpt
40 | *_pad.csv
41 | *_pad.txt
42 | *_par.xrpt
43 | *_summary.html
44 | *_summary.xml
45 | *_usage.xml
46 | *_xst.xrpt
47 |
48 | # project-wide generated files
49 | *.gise
50 | par_usage_statistics.html
51 | usage_statistics_webtalk.html
52 | webtalk.log
53 | webtalk_pn.xml
54 |
55 | # generated folders
56 | iseconfig/
57 | xlnx_auto_0_xdb/
58 | xst/
59 | _ngo/
60 | _xmsgs/
61 | ### Eclipse template
62 | *.pydevproject
63 | .metadata
64 | .gradle
65 | bin/
66 | tmp/
67 | *.tmp
68 | *.bak
69 | *.swp
70 | *~.nib
71 | local.properties
72 | .settings/
73 | .loadpath
74 |
75 | # Eclipse Core
76 | .project
77 |
78 | # External tool builders
79 | .externalToolBuilders/
80 |
81 | # Locally stored "Eclipse launch configurations"
82 | *.launch
83 |
84 | # CDT-specific
85 | .cproject
86 |
87 | # JDT-specific (Eclipse Java Development Tools)
88 | .classpath
89 |
90 | # Java annotation processor (APT)
91 | .factorypath
92 |
93 | # PDT-specific
94 | .buildpath
95 |
96 | # sbteclipse plugin
97 | .target
98 |
99 | # TeXlipse plugin
100 | .texlipse
101 | ### C template
102 | # Object files
103 | *.o
104 | *.ko
105 | *.obj
106 | *.elf
107 |
108 | # Precompiled Headers
109 | *.gch
110 | *.pch
111 |
112 | # Libraries
113 | *.lib
114 | *.a
115 | *.la
116 | *.lo
117 |
118 | # Shared objects (inc. Windows DLLs)
119 | *.dll
120 | *.so
121 | *.so.*
122 | *.dylib
123 |
124 | # Executables
125 | *.exe
126 | *.out
127 | *.app
128 | *.i*86
129 | *.x86_64
130 | *.hex
131 |
132 | # Debug files
133 | *.dSYM/
134 | ### SBT template
135 | # Simple Build Tool
136 | # http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control
137 |
138 | target/
139 | lib_managed/
140 | src_managed/
141 | project/boot/
142 | .history
143 | .cache
144 | ### Emacs template
145 | # -*- mode: gitignore; -*-
146 | *~
147 | \#*\#
148 | /.emacs.desktop
149 | /.emacs.desktop.lock
150 | *.elc
151 | auto-save-list
152 | tramp
153 | .\#*
154 |
155 | # Org-mode
156 | .org-id-locations
157 | *_archive
158 |
159 | # flymake-mode
160 | *_flymake.*
161 |
162 | # eshell files
163 | /eshell/history
164 | /eshell/lastdir
165 |
166 | # elpa packages
167 | /elpa/
168 |
169 | # reftex files
170 | *.rel
171 |
172 | # AUCTeX auto folder
173 | /auto/
174 |
175 | # cask packages
176 | .cask/
177 | ### Vim template
178 | [._]*.s[a-w][a-z]
179 | [._]s[a-w][a-z]
180 | *.un~
181 | Session.vim
182 | .netrwhist
183 | *~
184 | ### JetBrains template
185 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
186 |
187 | *.iml
188 |
189 | ## Directory-based project format:
190 | .idea/
191 | # if you remove the above rule, at least ignore the following:
192 |
193 | # User-specific stuff:
194 | # .idea/workspace.xml
195 | # .idea/tasks.xml
196 | # .idea/dictionaries
197 |
198 | # Sensitive or high-churn files:
199 | # .idea/dataSources.ids
200 | # .idea/dataSources.xml
201 | # .idea/sqlDataSources.xml
202 | # .idea/dynamic.xml
203 | # .idea/uiDesigner.xml
204 |
205 | # Gradle:
206 | # .idea/gradle.xml
207 | # .idea/libraries
208 |
209 | # Mongo Explorer plugin:
210 | # .idea/mongoSettings.xml
211 |
212 | ## File-based project format:
213 | *.ipr
214 | *.iws
215 |
216 | ## Plugin-specific files:
217 |
218 | # IntelliJ
219 | /out/
220 |
221 | # mpeltonen/sbt-idea plugin
222 | .idea_modules/
223 |
224 | # JIRA plugin
225 | atlassian-ide-plugin.xml
226 |
227 | # Crashlytics plugin (for Android Studio and IntelliJ)
228 | com_crashlytics_export_strings.xml
229 | crashlytics.properties
230 | crashlytics-build.properties
231 | ### C++ template
232 | # Compiled Object files
233 | *.slo
234 | *.lo
235 | *.o
236 | *.obj
237 |
238 | # Precompiled Headers
239 | *.gch
240 | *.pch
241 |
242 | # Compiled Dynamic libraries
243 | *.so
244 | *.dylib
245 | *.dll
246 |
247 | # Fortran module files
248 | *.mod
249 |
250 | # Compiled Static libraries
251 | *.lai
252 | *.la
253 | *.a
254 | *.lib
255 |
256 | # Executables
257 | *.exe
258 | *.out
259 | *.app
260 | ### OSX template
261 | .DS_Store
262 | .AppleDouble
263 | .LSOverride
264 |
265 | # Icon must end with two \r
266 | Icon
267 |
268 | # Thumbnails
269 | ._*
270 |
271 | # Files that might appear in the root of a volume
272 | .DocumentRevisions-V100
273 | .fseventsd
274 | .Spotlight-V100
275 | .TemporaryItems
276 | .Trashes
277 | .VolumeIcon.icns
278 |
279 | # Directories potentially created on remote AFP share
280 | .AppleDB
281 | .AppleDesktop
282 | Network Trash Folder
283 | Temporary Items
284 | .apdisk
285 | ### Xcode template
286 | # Xcode
287 | #
288 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
289 |
290 | ## Build generated
291 | build/
292 | DerivedData
293 |
294 | ## Various settings
295 | *.pbxuser
296 | !default.pbxuser
297 | *.mode1v3
298 | !default.mode1v3
299 | *.mode2v3
300 | !default.mode2v3
301 | *.perspectivev3
302 | !default.perspectivev3
303 | xcuserdata
304 |
305 | ## Other
306 | *.xccheckout
307 | *.moved-aside
308 | *.xcuserstate
309 | ### Scala template
310 | *.class
311 | *.log
312 |
313 | # sbt specific
314 | .cache
315 | .history
316 | .lib/
317 | dist/*
318 | target/
319 | lib_managed/
320 | src_managed/
321 | project/boot/
322 | project/plugins/project/
323 |
324 | # Scala-IDE specific
325 | .scala_dependencies
326 | .worksheet
327 | ### Java template
328 | *.class
329 |
330 | # Mobile Tools for Java (J2ME)
331 | .mtj.tmp/
332 |
333 | # Package Files #
334 | *.jar
335 | *.war
336 | *.ear
337 |
338 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
339 | hs_err_pid*
340 |
341 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "ofdm"]
2 | path = ofdm
3 | url = https://github.com/seldridge/ofdm
4 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: scala
2 | dist: xenial
3 |
4 | cache:
5 | directories:
6 | $HOME/.ivy2
7 | $HOME/.sbt
8 |
9 | git:
10 | depth: 10
11 |
12 | env:
13 | global:
14 | SBT_ARGS="-Dsbt.log.noformat=true"
15 |
16 | jobs:
17 | include:
18 | - stage: test
19 | name: "test"
20 | script: sbt $SBT_ARGS test
21 | -
22 | name: "run"
23 | script: sbt $SBT_ARGS run
24 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | All contributors must agree to the Developer Certificate of Origin Version 1.1. (DCO 1.1) by signing their commits with:
2 |
3 | ```
4 | Signed-off-by: [NAME] <[EMAIL]>
5 | ```
6 |
7 | This can be simply achieved with `git commit -s` when formatting your commit message.
8 |
9 | The full text of the DCO 1.1 is as follows:
10 |
11 | ```
12 | Developer Certificate of Origin
13 | Version 1.1
14 |
15 | Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
16 | 660 York Street, Suite 102,
17 | San Francisco, CA 94110 USA
18 |
19 | Everyone is permitted to copy and distribute verbatim copies of this
20 | license document, but changing it is not allowed.
21 |
22 |
23 | Developer's Certificate of Origin 1.1
24 |
25 | By making a contribution to this project, I certify that:
26 |
27 | (a) The contribution was created in whole or in part by me and I
28 | have the right to submit it under the open source license
29 | indicated in the file; or
30 |
31 | (b) The contribution is based upon previous work that, to the best
32 | of my knowledge, is covered under an appropriate open source
33 | license and I have the right under that license to submit that
34 | work with modifications, whether created in whole or in part
35 | by me, under the same open source license (unless I am
36 | permitted to submit under a different license), as indicated
37 | in the file; or
38 |
39 | (c) The contribution was provided directly to me by some other
40 | person who certified (a), (b) or (c) and I have not modified
41 | it.
42 |
43 | (d) I understand and agree that this project and the contribution
44 | are public and that a record of the contribution (including all
45 | personal information I submit with it, including my sign-off) is
46 | maintained indefinitely and may be redistributed consistent with
47 | this project or the open source license(s) involved.
48 | ```
49 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ESP Accelerators in Chisel
2 |
3 | [](https://travis-ci.com/IBM/esp-chisel-accelerators)
4 |
5 | This project provides an Embedded Scalable Platform (ESP) Accelerator socket that can be used for writing ESP-compatible accelerators in [chisel3](https://github.com/freechipsproject/chisel3).
6 |
7 | A concrete ESP-compliant accelerator is composed from an [`esp.Implementation`](../master/src/main/scala/esp/Implementation.scala) that aligns to an [`esp.Specification`](../master/src/main/scala/esp/Specification.scala). The resulting accelerator is then wrapped with an [`esp.AcceleratorWrapper`](../master/src/main/scala/esp/AcceleratorWrapper.scala) that maps the interfaces of the accelerator to the expected top-level interface. The `esp.Specification` is abstract in a configuration that defines metadata that the ESP framework requires.
8 |
9 | When generating Verilog from an `esp.Implementation`, a FIRRTL annotation is emitted containing the accelerator configuration. A custom FIRRTL transform [`EmitXML`](../master/src/main/scala/esp/transforms/EmitXML.scala) will convert this configuration information to XML that the ESP framework needs.
10 |
11 | We currently provide one example accelerator, [`esp.examples.CounterAccelerator`](../master/src/main/scala/esp/examples/CounterAccelerator.scala) that always reports as being finished a run-time configurable number of cycles in the future.
12 |
13 | To build the example accelerator, simply run:
14 |
15 | ```bash
16 | sbt run
17 | ```
18 |
19 | To run our existing tests use:
20 |
21 | ```bash
22 | sbt test
23 | ```
24 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | def scalacOptionsVersion(scalaVersion: String): Seq[String] = {
2 | Seq() ++ {
3 | // If we're building with Scala > 2.11, enable the compile option
4 | // switch to support our anonymous Bundle definitions:
5 | // https://github.com/scala/bug/issues/10047
6 | CrossVersion.partialVersion(scalaVersion) match {
7 | case Some((2, scalaMajor: Long)) if scalaMajor < 12 => Seq()
8 | case _ => Seq("-Xsource:2.11")
9 | }
10 | }
11 | }
12 |
13 | def javacOptionsVersion(scalaVersion: String): Seq[String] = {
14 | Seq() ++ {
15 | // Scala 2.12 requires Java 8. We continue to generate
16 | // Java 7 compatible code for Scala 2.11
17 | // for compatibility with old clients.
18 | CrossVersion.partialVersion(scalaVersion) match {
19 | case Some((2, scalaMajor: Long)) if scalaMajor < 12 =>
20 | Seq("-source", "1.7", "-target", "1.7")
21 | case _ =>
22 | Seq("-source", "1.8", "-target", "1.8")
23 | }
24 | }
25 | }
26 |
27 | lazy val ofdm = (project in file("ofdm"))
28 |
29 | // Provide a managed dependency on X if -DXVersion="" is supplied on the command line.
30 | val defaultVersions = Map(
31 | "chisel-iotesters" -> "1.3.+",
32 | "chisel-testers2" -> "0.1.+",
33 | "dsptools" -> "1.2.+"
34 | )
35 |
36 | lazy val espChisel = (project in file("."))
37 | .settings(
38 | name := "esp-chisel-accelerators",
39 | version := "1.0.0",
40 | scalaVersion := "2.12.10",
41 | crossScalaVersions := Seq("2.11.12", "2.12.10"),
42 | libraryDependencies ++= Seq("chisel-iotesters", "chisel-testers2", "dsptools")
43 | .map { dep: String => "edu.berkeley.cs" %% dep % sys.props.getOrElse(dep + "Version", defaultVersions(dep)) },
44 | libraryDependencies += "com.thoughtworks.xstream" % "xstream" % "1.4.11.1",
45 | scalacOptions ++= scalacOptionsVersion(scalaVersion.value) ++
46 | Seq("-unchecked", "-deprecation", "-Ywarn-unused-import"),
47 | javacOptions ++= javacOptionsVersion(scalaVersion.value))
48 | .dependsOn(ofdm)
49 |
50 | resolvers ++= Seq(
51 | Resolver.sonatypeRepo("snapshots"),
52 | Resolver.sonatypeRepo("releases")
53 | )
54 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.3.2
2 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | logLevel := Level.Warn
--------------------------------------------------------------------------------
/scalastyle-config.xml:
--------------------------------------------------------------------------------
1 |
2 | Scalastyle standard configuration
3 |
4 |
5 |
6 |
7 |
8 |
9 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | No lines ending with a ;
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 | |\|\||&&|:=|<>|<=|>=|!=|===|<<|>>|##|unary_(~|\-%?|!))$]]>
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
--------------------------------------------------------------------------------
/scalastyle-test-config.xml:
--------------------------------------------------------------------------------
1 |
2 | Scalastyle configuration for Chisel3 unit tests
3 |
4 |
5 |
6 |
7 |
8 |
9 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 | No lines ending with a ;
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 | |\|\||&&|:=|<>|<=|>=|!=|===|<<|>>|##|unary_(~|\-%?|!))$]]>
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
--------------------------------------------------------------------------------
/src/main/resources/esp_acc_iface.v:
--------------------------------------------------------------------------------
1 | // Copyright 2018 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | module sort_basic_dma32
16 | (
17 | clk,
18 | rst,
19 | conf_info_len,
20 | conf_info_batch,
21 | conf_done,
22 | acc_done,
23 | debug,
24 | dma_read_chnl_valid,
25 | dma_read_chnl_data,
26 | dma_read_chnl_ready,
27 | dma_read_ctrl_valid,
28 | dma_read_ctrl_data_index,
29 | dma_read_ctrl_data_length,
30 | dma_read_ctrl_ready,
31 | dma_write_ctrl_valid,
32 | dma_write_ctrl_data_index,
33 | dma_write_ctrl_data_length,
34 | dma_write_ctrl_ready,
35 | dma_write_chnl_valid,
36 | dma_write_chnl_data,
37 | dma_write_chnl_ready
38 | );
39 |
40 | input clk;
41 | input rst;
42 |
43 | // Configuration input (assigned from memory-mapped registers in
44 | // the tile. There can be up to 14 32-bits user-defined registers.
45 | // We've reserved registers 15 and 16 to control a small on-tile
46 | // memory in case more memory-mapped registers are needed.
47 | input [31:0] conf_info_len;
48 | input [31:0] conf_info_batch;
49 |
50 | // Start accelerator (assigned from memory-mapped command register
51 | // in the tile
52 | input conf_done;
53 |
54 | // DMA Read control
55 | input dma_read_ctrl_ready;
56 | output dma_read_ctrl_valid;
57 | reg dma_read_ctrl_valid;
58 | // Offset within contiguous accelerator virtual address (gets
59 | // translated by TLB in the tile)
60 | output [31:0] dma_read_ctrl_data_index;
61 | reg [31:0] dma_read_ctrl_data_index;
62 | // Number of 32-bit words to be read
63 | // This can be converted to number of Bytes, but it was convenient
64 | // since we design accelerators in SytemC
65 | output [31:0] dma_read_ctrl_data_length;
66 | reg [31:0] dma_read_ctrl_data_length;
67 |
68 | // DMA Read data channel directly connected to the NoC queues.
69 | output dma_read_chnl_ready;
70 | input dma_read_chnl_valid;
71 | input [31:0] dma_read_chnl_data;
72 |
73 | // DMA Write control (same as Read)
74 | input dma_write_ctrl_ready;
75 | output dma_write_ctrl_valid;
76 | reg dma_write_ctrl_valid;
77 | output [31:0] dma_write_ctrl_data_index;
78 | reg [31:0] dma_write_ctrl_data_index;
79 | output [31:0] dma_write_ctrl_data_length;
80 | reg [31:0] dma_write_ctrl_data_length;
81 |
82 | // DMA Write data channel directly connected to the NoC queues
83 | input dma_write_chnl_ready;
84 | output dma_write_chnl_valid;
85 | output [31:0] dma_write_chnl_data;
86 | reg [31:0] dma_write_chnl_data;
87 |
88 | // Latency-insensitive protocol.
89 | // Note that read/valid may or may not be marked as "reg"
90 | // depending on which latency-incensitive channel was chosen
91 | // for HLS. Some are blocking, some are may-be-blocking.
92 | // Regardless, the protocol is simple: when both ready and
93 | // valid are set, the producer knows that data have been
94 | // consumed. There is no combinational loop between ready
95 | // and valid, because accelerators are implemented using
96 | // SC_CTHREADS only. Therefore, inputs are implicitly
97 | // registered.
98 |
99 | // one-cycle pulse (triggers the interrupt from the tile.
100 | // Interrupts are routed through the NoC to the tile that
101 | // hosts the interrupt controller.
102 | output acc_done;
103 | reg acc_done;
104 |
105 | // Optional debug port to set an error code.
106 | // Currently we are not using this though.
107 | output [31:0] debug;
108 |
109 |
110 | ///.. HLS-generated code
111 | endmodule
112 |
--------------------------------------------------------------------------------
/src/main/scala/esp/AcceleratorWrapper.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp
16 |
17 | import chisel3._
18 |
19 | trait AcceleratorWrapperIO { this: RawModule =>
20 | val dmaWidth: Int
21 |
22 | val clk = IO(Input(Clock()))
23 | val rst = IO(Input(Bool()))
24 |
25 | // Start accelerator (assigned from memory-mapped command register in the tile
26 | val conf_done = IO(Input(Bool()))
27 |
28 | // One-cycle pulse (triggers the interrupt from the tile). Interrupts are routed through the NoC to the tile that
29 | // hosts the interrupt controller.
30 | val acc_done = IO(Output(Bool()))
31 |
32 | // Optional debug port to set an error code. Currently we are not using this though.
33 | val debug = IO(Output(UInt(32.W)))
34 |
35 | // DMA Read data channel directly connected to the NoC queues.
36 | val dma_read_chnl_ready = IO(Output(Bool()))
37 | val dma_read_chnl_valid = IO(Input(Bool()))
38 | val dma_read_chnl_data = IO(Input(UInt(dmaWidth.W)))
39 |
40 | // DMA Read control
41 | val dma_read_ctrl_ready = IO(Input(Bool()))
42 | val dma_read_ctrl_valid = IO(Output(Bool()))
43 | // Offset within contiguous accelerator virtual address (gets translated by TLB in the tile)
44 | val dma_read_ctrl_data_index = IO(Output(UInt(32.W)))
45 | // Number of 32-bit words to be read This can be converted to number of Bytes, but it was convenient since we design
46 | // accelerators in SytemC
47 | val dma_read_ctrl_data_length = IO(Output(UInt(32.W)))
48 | // Size of the data tokens encoded according to AXI bus standard (e.g. double-word, word, half-word, byte).
49 | val dma_read_ctrl_data_size = IO(Output(DmaSize.gen))
50 |
51 | // DMA Write control (same as Read)
52 | val dma_write_ctrl_ready = IO(Input(Bool()))
53 | val dma_write_ctrl_valid = IO(Output(Bool()))
54 | val dma_write_ctrl_data_index = IO(Output(UInt(32.W)))
55 | val dma_write_ctrl_data_length = IO(Output(UInt(32.W)))
56 | val dma_write_ctrl_data_size = IO(Output(DmaSize.gen))
57 |
58 | // DMA Write data channel directly connected to the NoC queues
59 | val dma_write_chnl_ready = IO(Input(Bool()))
60 | val dma_write_chnl_valid = IO(Output(Bool()))
61 | val dma_write_chnl_data = IO(Output(UInt(dmaWidth.W)))
62 | }
63 |
64 | /** Wraps a given [[Accelerator]] in a predicatable top-level interface. This is intended for direct integration with
65 | * the ESP acclerator socket.
66 | * @param gen the accelerator to wrap
67 | * accelerator from anoter
68 | */
69 | final class AcceleratorWrapper(val dmaWidth: Int, gen: Int => Implementation) extends RawModule with AcceleratorWrapperIO {
70 |
71 | override lazy val desiredName = s"${acc.config.name}_${acc.implementationName}_dma$dmaWidth"
72 | val acc = withClockAndReset(clk, ~rst)(Module(gen(dmaWidth)))
73 |
74 | val conf_info = acc.io.config.map(a => IO(Input(a.cloneType)))
75 |
76 | if (conf_info.isDefined) {
77 | acc.io.config.get := conf_info.get
78 | }
79 |
80 | acc.io.enable := conf_done
81 |
82 | acc_done := acc.io.done
83 |
84 | debug := acc.io.debug
85 |
86 | acc.io.dma.readControl.ready := dma_read_ctrl_ready
87 | dma_read_ctrl_valid := acc.io.dma.readControl.valid
88 | dma_read_ctrl_data_index := acc.io.dma.readControl.bits.index
89 | dma_read_ctrl_data_length := acc.io.dma.readControl.bits.length
90 | dma_read_ctrl_data_size := acc.io.dma.readControl.bits.size
91 |
92 | acc.io.dma.writeControl.ready := dma_write_ctrl_ready
93 | dma_write_ctrl_valid := acc.io.dma.writeControl.valid
94 | dma_write_ctrl_data_index := acc.io.dma.writeControl.bits.index
95 | dma_write_ctrl_data_length := acc.io.dma.writeControl.bits.length
96 | dma_write_ctrl_data_size := acc.io.dma.writeControl.bits.size
97 |
98 | dma_read_chnl_ready := acc.io.dma.readChannel.ready
99 | acc.io.dma.readChannel.valid := dma_read_chnl_valid
100 | acc.io.dma.readChannel.bits := dma_read_chnl_data
101 |
102 | acc.io.dma.writeChannel.ready := dma_write_chnl_ready
103 | dma_write_chnl_valid := acc.io.dma.writeChannel.valid
104 | dma_write_chnl_data := acc.io.dma.writeChannel.bits
105 | }
106 |
--------------------------------------------------------------------------------
/src/main/scala/esp/Annotations.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp
16 |
17 | import firrtl.annotations.{ModuleName, SingleTargetAnnotation}
18 |
19 | import com.thoughtworks.xstream.XStream
20 | import com.thoughtworks.xstream.io.{HierarchicalStreamReader, HierarchicalStreamWriter}
21 | import com.thoughtworks.xstream.io.xml.{DomDriver, XmlFriendlyNameCoder}
22 | import com.thoughtworks.xstream.converters.{Converter, MarshallingContext, UnmarshallingContext}
23 |
24 | class ParameterConverter extends Converter {
25 |
26 | override def marshal(source: scala.Any, writer: HierarchicalStreamWriter, context: MarshallingContext): Unit = {
27 | val c = source.asInstanceOf[Parameter]
28 | writer.addAttribute("name", c.name)
29 | if (c.description.isDefined) { writer.addAttribute("desc", c.description.get) }
30 | if (c.value.isDefined) { writer.addAttribute("value", c.value.get.toString) }
31 | }
32 |
33 | override def unmarshal(reader: HierarchicalStreamReader, context: UnmarshallingContext): AnyRef = {
34 | ??? /* This is currently unimplemented */
35 | }
36 |
37 | override def canConvert(c: Class[_]): Boolean = c.isAssignableFrom(classOf[Parameter])
38 |
39 | }
40 |
41 | /** Encodes ESP configuration and can serialize to SLD-compatible XML.
42 | * @param target the module this configuration applies to
43 | * @param config the ESP accelerator configuration
44 | * @param dir either a (left) absolute path or (right) a path relative to a [[TargetDirAnnotation]]
45 | */
46 | case class EspConfigAnnotation(target: ModuleName, config: Config, dir: Either[String, String] = Right(".."))
47 | extends SingleTargetAnnotation[ModuleName] {
48 |
49 | def duplicate(targetx: ModuleName): EspConfigAnnotation = this.copy(target=targetx)
50 |
51 | def toXML: String = {
52 | val xs = new XStream(new DomDriver("UTF-8", new XmlFriendlyNameCoder("_", "_")))
53 |
54 | xs.registerConverter(new ParameterConverter)
55 | // xs.aliasSystemAttribute(null, "class")
56 | xs.alias("sld", this.getClass)
57 | xs.aliasField("accelerator", this.getClass, "config")
58 | xs.useAttributeFor(config.getClass, "name")
59 | xs.useAttributeFor(config.getClass, "description")
60 | xs.aliasField("desc", config.getClass, "description")
61 | xs.useAttributeFor(config.getClass, "memoryFootprintMiB")
62 | xs.aliasField("data_size", config.getClass, "memoryFootprintMiB")
63 | xs.useAttributeFor(config.getClass, "deviceId")
64 | xs.aliasField("device_id", config.getClass, "deviceId")
65 | xs.addImplicitArray(config.getClass, "param")
66 | xs.alias("param", classOf[Parameter])
67 | xs.useAttributeFor(classOf[Parameter], "name")
68 | xs.aliasField("desc", classOf[Parameter], "description")
69 | xs.useAttributeFor(classOf[Parameter], "description")
70 | xs.omitField(classOf[Parameter], "readOnly")
71 | xs.omitField(config.getClass, "paramMap")
72 | xs.omitField(this.getClass, "target")
73 | xs.omitField(this.getClass, "dir")
74 | xs.toXML(this)
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/main/scala/esp/Generator.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp
16 |
17 | import chisel3.Driver
18 |
19 | import esp.examples.{AdderAccelerator, CounterAccelerator, DefaultFFTAccelerator}
20 |
21 | object Generator {
22 |
23 | def main(args: Array[String]): Unit = {
24 | val examples: Seq[(String, String, () => AcceleratorWrapper)] =
25 | Seq( ("CounterAccelerator", "Default", (a: Int) => new CounterAccelerator(a)),
26 | ("FFTAccelerator", DefaultFFTAccelerator.architecture, (a: Int) => new DefaultFFTAccelerator(a)),
27 | ("AdderAccelerator", "Default", (a: Int) => new AdderAccelerator(a) ))
28 | .flatMap( a => Seq(32).map(b => (a._1, s"${a._2}_dma$b", () => new AcceleratorWrapper(b, a._3))) )
29 |
30 | examples.map { case (name, impl, gen) =>
31 | val argsx = args ++ Array("--target-dir", s"build/$name/${name}_$impl",
32 | "--custom-transforms", "esp.transforms.EmitXML",
33 | "--log-level", "info")
34 | Driver.execute(argsx, gen)
35 | }
36 |
37 | }
38 |
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/scala/esp/Implementation.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp
16 |
17 | import chisel3._
18 | import chisel3.experimental.ChiselAnnotation
19 | import chisel3.util.{Decoupled, Valid, Enum}
20 |
21 | import firrtl.annotations.Annotation
22 |
23 | import scala.collection.immutable
24 |
25 | class ConfigIO private (espConfig: Config) extends Record {
26 | val elements = immutable.ListMap(espConfig.param.collect{ case a if !a.readOnly => a.name -> UInt(a.size.W)}: _*)
27 | override def cloneType: this.type = (new ConfigIO(espConfig)).asInstanceOf[this.type]
28 | def apply(a: String): Data = elements(a)
29 | }
30 |
31 | object ConfigIO {
32 |
33 | def apply(espConfig: Config): Option[ConfigIO] = {
34 | val rwParameters = espConfig.param.collect{ case a if !a.readOnly => a.name -> UInt(a.size.W) }
35 | if (rwParameters.isEmpty) { None }
36 | else { Some(new ConfigIO(espConfig)) }
37 | }
38 |
39 | }
40 |
41 | object DmaSize {
42 | private val enums = Enum(8)
43 | val Seq(bytes, wordHalf, word, wordDouble, wordQuad, word8, word16, word32) = enums
44 | def gen: UInt = chiselTypeOf(enums.head)
45 | }
46 |
47 | class DmaControl extends Bundle {
48 | val index = UInt(32.W)
49 | val length = UInt(32.W)
50 | val size = DmaSize.gen
51 | }
52 |
53 | class DmaIO(val dmaWidth: Int) extends Bundle {
54 | val Seq(readControl, writeControl) = Seq.fill(2)(Decoupled(new DmaControl))
55 | val readChannel = Flipped(Decoupled(UInt(dmaWidth.W)))
56 | val writeChannel = Decoupled(UInt(dmaWidth.W))
57 | }
58 |
59 | class AcceleratorIO(val dmaWidth: Int, val espConfig: Config) extends Bundle {
60 | val enable = Input(Bool())
61 | val config = ConfigIO(espConfig).map(Input(_))
62 | val dma = new DmaIO(dmaWidth)
63 | val done = Output(Bool())
64 | val debug = Output(UInt(32.W))
65 | }
66 |
67 |
68 | /** This contains the underlying hardware that implements an ESP accelerator [[Specification]]. A concrete subclass of
69 | * [[Implementation]] represents one point in the design space for all accelerators meeting the [[Specification]].
70 | * @param dmaWidth the width of the connection to the memory bus
71 | */
72 | abstract class Implementation(val dmaWidth: Int) extends Module with Specification { self: Implementation =>
73 |
74 | lazy val io = IO(new AcceleratorIO(dmaWidth, config))
75 |
76 | /** This defines a name describing this implementation. */
77 | def implementationName: String
78 |
79 | chisel3.experimental.annotate(
80 | new ChiselAnnotation {
81 | def toFirrtl: Annotation = EspConfigAnnotation(self.toNamed, config)
82 | }
83 | )
84 |
85 | io.done := false.B
86 | io.debug := 0.U
87 |
88 | io.dma.readControl.valid := false.B
89 | io.dma.readControl.bits.index := 0.U
90 | io.dma.readControl.bits.length := 0.U
91 | io.dma.readControl.bits.size := DmaSize.word
92 |
93 | io.dma.writeControl.valid := false.B
94 | io.dma.writeControl.bits.index := 0.U
95 | io.dma.writeControl.bits.length := 0.U
96 | io.dma.writeControl.bits.size := DmaSize.word
97 |
98 | io.dma.readChannel.ready := 0.U
99 |
100 | io.dma.writeChannel.valid := 0.U
101 | io.dma.writeChannel.bits := 0.U
102 | }
103 |
--------------------------------------------------------------------------------
/src/main/scala/esp/Specification.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | package esp
15 |
16 | /** A parameter used to configure the [[Specification]].
17 | * @param name the name of the parameter
18 | * @param description an optional string describing what this parameter sets/controls
19 | * @param value an optional read-only, default value for this parameter
20 | * @param size the width of this parameter in bits (1--32)
21 | */
22 | case class Parameter(
23 | name: String,
24 | description: Option[String] = None,
25 | value: Option[Int] = None,
26 | size: Int = 32) {
27 |
28 | val readOnly = value.isDefined
29 |
30 | require(size >= 0, s"AccleratorParamater '$name' must be greater than 0 bits in size!")
31 | require(size <= 32, s"AccleratorParamater '$name' must be less than or equal to 32 bits in size!")
32 |
33 | def espString: String = name + (if (value.isDefined) s"_${value.get}" else "")
34 | }
35 |
36 | /** Mandatory configuration information that defines an ESP accelerator [[Specification]].
37 | * @param name the specification name
38 | * @param description a string describing what this specification does
39 | * @param memoryFootprintMiB the accelerator's memory footprint
40 | * @param deviceId a unique device identifier
41 | * @param param an optional array of parameters describing configuration registers
42 | */
43 | case class Config(
44 | name: String,
45 | description: String,
46 | memoryFootprintMiB: Int,
47 | deviceId: Int,
48 | param: Array[Parameter] = Array.empty) {
49 |
50 | require(memoryFootprintMiB >= 0, s"AcceleratorConfig '$name' memory footprint must be greater than 0 MiB!")
51 |
52 | def espString: String = (name +: param).mkString("_")
53 |
54 | val paramMap: Map[String, Parameter] = param
55 | .groupBy(_.name)
56 | .map{ case (k, v) =>
57 | require(v.size == 1, s"AcceleratorConfig '$name' has non-uniquely named parameter '$k'")
58 | k -> v.head
59 | }
60 |
61 | }
62 |
63 | /** This defines ESP configuration information shared across a range of accelerator [[Implementation]]s. */
64 | trait Specification {
65 |
66 | /** An ESP [[Config]] that provides information to the ESP framework necessary to insert an accelerator into an ESP
67 | * SoC.
68 | */
69 | def config: Config
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/scala/esp/examples/AdderAccelerator.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp.examples
16 |
17 | import chisel3._
18 | import chisel3.experimental.ChiselEnum
19 |
20 | import esp.{Config, Implementation, Parameter, Specification}
21 |
22 | trait AdderSpecification extends Specification {
23 |
24 | override lazy val config = Config(
25 | name = "AdderAccelerator",
26 | description = "Reduces a vector via addition",
27 | memoryFootprintMiB = 1,
28 | deviceId = 0xF,
29 | param = Array(
30 | Parameter( name = "readAddr" ),
31 | Parameter( name = "size" ),
32 | Parameter( name = "writeAddr" )
33 | )
34 | )
35 |
36 | }
37 |
38 | object AdderAccelerator {
39 |
40 | private object S extends ChiselEnum {
41 | val Idle, DMALoad, Compute, DMAStore, Done = Value
42 | }
43 |
44 | /** FFTAccelerator error codes */
45 | object Errors extends ChiselEnum {
46 | val None = Value(0.U)
47 | val InvalidSize, Unimplemented = Value
48 | }
49 |
50 | }
51 |
52 | class AdderAccelerator(dmaWidth: Int) extends Implementation(dmaWidth) with AdderSpecification {
53 | require(dmaWidth == 32)
54 |
55 | import AdderAccelerator._
56 |
57 | override val implementationName = "AdderAccelerator"
58 |
59 | private val readAddr, size, writeAddr = Reg(UInt(32.W))
60 |
61 | private val state = RegInit(S.Idle)
62 |
63 | private val acc, count = Reg(UInt(32.W))
64 |
65 | private val storeReqSent = RegInit(false.B)
66 |
67 | when (io.enable && state === S.Idle) {
68 | Seq((readAddr, "readAddr"), (size, "size"), (writeAddr, "writeAddr")).foreach{
69 | case (lhs, name) => lhs := io.config.get(name).asUInt
70 | }
71 | when (io.config.get("size").asUInt === 0.U) {
72 | state := S.DMAStore
73 | }.otherwise {
74 | state := S.DMALoad
75 | }
76 | acc := 0.U
77 | count := 0.U
78 | storeReqSent := false.B
79 | }
80 |
81 | when (state === S.DMALoad) {
82 | io.dma.readControl.valid := true.B
83 | io.dma.readControl.bits.index := readAddr
84 | io.dma.readControl.bits.length := size
85 | when (io.dma.readControl.fire) {
86 | state := S.Compute
87 | }
88 | }
89 |
90 | when (state === S.Compute) {
91 | io.dma.readChannel.ready := true.B
92 | when (io.dma.readChannel.fire) {
93 | acc := acc + io.dma.readChannel.bits
94 | count := count + (dmaWidth / 32).U
95 | when (count === size - 1.U) {
96 | state := S.DMAStore
97 | }
98 | }
99 | }
100 |
101 | when (state === S.DMAStore) {
102 | io.dma.writeChannel.bits := acc
103 | when (storeReqSent =/= true.B) {
104 | io.dma.writeControl.valid := true.B
105 | io.dma.writeControl.bits.index := writeAddr
106 | io.dma.writeControl.bits.length := 1.U
107 | storeReqSent := io.dma.writeControl.fire
108 | }.otherwise {
109 | io.dma.writeChannel.valid := true.B
110 | when (io.dma.writeChannel.fire) {
111 | state := S.Done
112 | }
113 | }
114 | }
115 |
116 | when (state === S.Done) {
117 | io.done := true.B
118 | state := S.Idle
119 | }
120 |
121 | }
122 |
--------------------------------------------------------------------------------
/src/main/scala/esp/examples/CounterAccelerator.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp.examples
16 |
17 | import chisel3._
18 |
19 | import esp.{Config, AcceleratorWrapperIO, AcceleratorIO, Implementation, Parameter, Specification}
20 |
21 | import sys.process._
22 |
23 | /** An ESP accelerator that is done a parameterized number of clock ticks in the future
24 | * @param ticks the number of clock ticks until done
25 | */
26 | trait CounterSpecification extends Specification {
27 |
28 | /* This defines the abstract member config that provides necessary information for the ESP framework to generate an XML
29 | * accelerator configuration. At the Chisel level, this will be used to emit an [[esp.EspConfigAnnotation]] which will
30 | * be converted to an XML description by a custom FIRRTL transform, [[esp.transforms.EmitXML]]. */
31 | override lazy val config: Config = Config(
32 | name = "CounterAccelerator",
33 | description = s"Fixed-count timer",
34 | memoryFootprintMiB = 0,
35 | deviceId = 0xC,
36 | param = Array(
37 | Parameter(
38 | name = "gitHash",
39 | description = Some("Git short SHA hash of the repo used to generate this accelerator"),
40 | value = Some(Integer.parseInt(("git log -n1 --format=%h" !!).filter(_ >= ' '), 16))
41 | ),
42 | Parameter(
43 | name = "ticks",
44 | description = Some("Ticks to timeout"),
45 | value = None)
46 | )
47 | )
48 |
49 | }
50 |
51 | class CounterAccelerator(dmaWidth: Int) extends Implementation(dmaWidth) with CounterSpecification {
52 |
53 | override val implementationName: String = "Default"
54 |
55 | val ticks, value = Reg(UInt(config.paramMap("ticks").size.W))
56 | val enabled = RegInit(false.B)
57 | val fire = enabled && (value === ticks)
58 |
59 | when (io.enable) {
60 | enabled := true.B
61 | ticks := io.config.get("ticks").asUInt
62 | value := 0.U
63 | }
64 |
65 | when (enabled) {
66 | value := value + 1.U
67 | }
68 |
69 | when (fire) {
70 | enabled := false.B
71 | }
72 |
73 | io.done := fire
74 | }
75 |
--------------------------------------------------------------------------------
/src/main/scala/esp/examples/FFTAccelerator.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp.examples
16 |
17 | import chisel3._
18 | import chisel3.experimental.{ChiselEnum, FixedPoint}
19 | import chisel3.util.{Queue, Valid}
20 |
21 | import dsptools.numbers._
22 |
23 | import esp.{Config, Implementation, Parameter, Specification}
24 |
25 | import java.io.File
26 |
27 | import ofdm.fft.{DirectFFTType, DISOIO, FFT, FFTParams, PacketSerializer, PacketSerDesParams, SDFFFTType}
28 |
29 | import sys.process.Process
30 |
31 | /* 64-bit, 40 binary point */
32 |
33 | trait FFTSpecification extends Specification {
34 |
35 | val params: FFTParams[_]
36 |
37 | private def gitHash(dir: Option[File] = None): Int = {
38 | val cmd = Seq("git", "log", "-n1", "--format=%h")
39 |
40 | val hash = dir match {
41 | case Some(file) => Process(cmd, file).!!
42 | case None => Process(cmd).!!
43 | }
44 |
45 | java.lang.Integer.parseInt(hash.filter(_ >= ' '), 16)
46 | }
47 |
48 | override lazy val config: Config = Config(
49 | name = "FFTAccelerator",
50 | description = params.protoIQ.real match {
51 | case a: FixedPoint => s"${params.numPoints}-point ${a.getWidth}.${a.binaryPoint.get} FFT"
52 | },
53 | memoryFootprintMiB = 1,
54 | deviceId = 0xD,
55 | param = Array(
56 | Parameter(
57 | name = "gitHash",
58 | description = Some("Git short SHA hash of the repo used to generate this accelerator"),
59 | value = Some(gitHash())
60 | ),
61 | Parameter(
62 | name = "ofdmGitHash",
63 | description = Some("Git short SHA hash of the grebe/ofdm repo used to generate this accelerator"),
64 | value = Some(gitHash(Some(new File("ofdm"))))
65 | ),
66 | Parameter(
67 | name = "startAddr",
68 | description = Some("The memory address to start the FFT (output written here)")
69 | ),
70 | Parameter(
71 | name = "count",
72 | description = Some("The number of 1D FFTs to do (only 1 supported)")
73 | ),
74 | Parameter(
75 | name = "stride",
76 | description = Some("The stride between each FFT (must be point size)")
77 | )
78 | )
79 | )
80 |
81 | }
82 |
83 | object FFTAccelerator {
84 |
85 | /** FFTAccelerator states for internal state machines */
86 | object S extends ChiselEnum {
87 | val Idle, DMALoad, DMAStore, Done = Value
88 | }
89 |
90 | /** FFTAccelerator error codes */
91 | object Errors extends ChiselEnum {
92 | val None = Value(0.U)
93 | val InvalidWidth, Unimplemented = Value
94 | }
95 |
96 | }
97 |
98 | /** An ESP accelerator that performs an N-point Fast Fourier Transform (FFT)
99 | * @param dmaWidth the width of the ESP DMA bus
100 | * @param params parameters describing the FFT
101 | */
102 | class FFTAccelerator[A <: Data : Real : BinaryRepresentation](dmaWidth: Int, val params: FFTParams[A])
103 | extends Implementation(dmaWidth) with FFTSpecification {
104 |
105 | require(params.protoIQ.real.getWidth <= 32, "This FFT has bugs for bit widths > 32 bits!")
106 |
107 | import FFTAccelerator._
108 |
109 | private def unimplemented(): Unit = {
110 | state := S.Done
111 | debug := Errors.Unimplemented
112 | }
113 |
114 | override val implementationName: String = {
115 | val tpe = params.fftType match {
116 | case DirectFFTType => "Direct"
117 | case SDFFFTType => "SDF"
118 | }
119 | val fixedPoint = params.protoIQ.real match {
120 | case a: FixedPoint => s"${a.getWidth}p${a.binaryPoint.get}"
121 | }
122 |
123 | s"${params.numPoints}PointFP$fixedPoint$tpe"
124 | }
125 |
126 | /** The underlying FFT hardware */
127 | val fft = Module(
128 | new MultiIOModule {
129 | val underlyingFFT = Module(new FFT(params))
130 | val desser = Module(new PacketSerializer(PacketSerDesParams(params.protoIQ, params.numPoints)))
131 | val in = IO(chiselTypeOf(underlyingFFT.io.in))
132 | underlyingFFT.io.in <> in
133 | desser.io.in <> underlyingFFT.io.out
134 | val out = IO(chiselTypeOf(desser.io.out))
135 | out <> desser.io.out
136 | }
137 | )
138 | dontTouch(fft.in)
139 | dontTouch(fft.out)
140 |
141 | /** Indicates that this unit is busy computing a computation */
142 | val state = RegInit(S.Idle)
143 | val addr = Reg(chiselTypeOf(io.config.get("startAddr")).asUInt)
144 | val count = Reg(chiselTypeOf(io.config.get("count")).asUInt)
145 | val stride = Reg(chiselTypeOf(io.config.get("stride")).asUInt)
146 | val debug = RegInit(Errors.None)
147 |
148 | io.debug := debug.asUInt
149 | io.done := state === S.Done
150 |
151 | fft.in.bits.real := DontCare
152 | fft.in.bits.imag := DontCare
153 | fft.in.valid := false.B
154 |
155 | fft.out.ready := false.B
156 |
157 | val dmaRead, dmaWrite = Reg(Valid(UInt(32.W)))
158 |
159 | when (io.enable && state === S.Idle) {
160 | addr := io.config.get("startAddr")
161 | count := io.config.get("count")
162 | stride := io.config.get("stride")
163 |
164 | when (io.config.get("stride").asUInt =/= params.numPoints.U) {
165 | state := S.Done
166 | debug := Errors.InvalidWidth
167 | }.elsewhen(io.config.get("count").asUInt =/= 1.U) {
168 | unimplemented()
169 | }.otherwise {
170 | state := S.DMALoad
171 | Seq(dmaRead, dmaWrite).foreach { a =>
172 | a.valid := false.B
173 | a.bits := 0.U
174 | }
175 | }
176 | }
177 |
178 | /* @todo cleanup this jank */
179 | val real_d = Reg(params.protoIQ.real)
180 | val readQueue = Module(new Queue(params.protoIQ, entries=2))
181 | io.dma.readChannel.ready := readQueue.io.enq.ready
182 | readQueue.io.enq.valid := io.dma.readChannel.valid && dmaRead.bits(0)
183 | readQueue.io.enq.bits := DspComplex.wire(real_d, io.dma.readChannel.bits.asTypeOf(params.protoIQ.imag))
184 | fft.in <> readQueue.io.deq
185 |
186 | when (state === S.DMALoad) {
187 | io.dma.readControl.valid := ~dmaRead.valid
188 | io.dma.readControl.bits.index := addr
189 | io.dma.readControl.bits.length := stride * count * 2
190 |
191 | when (io.dma.readControl.fire) {
192 | dmaRead.valid := true.B
193 | }
194 |
195 | when (io.dma.readChannel.fire) {
196 | dmaRead.bits := dmaRead.bits + 1.U
197 | when (~dmaRead.bits(0)) {
198 | real_d := io.dma.readChannel.bits.asTypeOf(real_d)
199 | }
200 | when (dmaRead.bits === stride * count * 2 - 1) {
201 | state := S.DMAStore
202 | }
203 | }
204 | }
205 |
206 | io.dma.writeChannel.valid := dmaWrite.valid && fft.out.valid
207 | fft.out.ready := dmaWrite.valid && dmaWrite.bits(0) && io.dma.writeChannel.ready
208 | io.dma.writeChannel.bits := Mux(dmaWrite.bits(0), fft.out.bits.imag.asUInt, fft.out.bits.real.asUInt)
209 |
210 | when (state === S.DMAStore) {
211 | io.dma.writeControl.valid := ~dmaWrite.valid
212 | io.dma.writeControl.bits.index := addr
213 | io.dma.writeControl.bits.length := stride * count * 2
214 |
215 | when (io.dma.writeControl.fire) {
216 | dmaWrite.valid := true.B
217 | }
218 |
219 | when (io.dma.writeChannel.fire) {
220 | dmaWrite.bits := dmaWrite.bits + 1.U
221 | }
222 | when (dmaWrite.bits === stride * count * 2 - 1) {
223 | state := S.Done
224 | }
225 | }
226 |
227 | when (state === S.Done) {
228 | state := S.Idle
229 | debug := Errors.None
230 | }
231 |
232 | }
233 |
234 | /** A 32-point 64.40 fixed point FFT accelerator */
235 | class DefaultFFTAccelerator(dmaWidth: Int) extends FFTAccelerator(dmaWidth, FFTParams.fixed(32, 20, 32, 32))
236 |
237 | private[esp] object DefaultFFTAccelerator {
238 | val architecture = "32PointFP32p20SDF"
239 | }
240 |
--------------------------------------------------------------------------------
/src/main/scala/esp/simulation/Dma.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp.simulation
16 |
17 | import chisel3._
18 | import chisel3.util.{log2Up, Queue, RRArbiter, Valid}
19 | import chisel3.util.experimental.loadMemoryFromFile
20 |
21 | import esp.{DmaIO, DmaControl}
22 |
23 | class DmaRequest(val memorySize: Int) extends Bundle {
24 | val index = UInt(log2Up(memorySize).W)
25 | val length = UInt(log2Up(memorySize).W)
26 | val tpe = Bool()
27 | }
28 |
29 | object DmaRequest {
30 | val read: Bool = false.B
31 | val write: Bool = true.B
32 |
33 | def init(memorySize: Int) = {
34 | val a = Wire(new Valid(new DmaRequest(memorySize)))
35 | a.valid := false.B
36 | a.bits.index := DontCare
37 | a.bits.length := DontCare
38 | a.bits.tpe := DontCare
39 | a
40 | }
41 | }
42 |
43 | /** Simulation model of the ESP DMA controller and some backing simulation memory
44 | * @tparam A the type of memory held by the backing memory
45 | * @param size the number of words of gen
46 | * @param gen the type of underlying data, e.g., [[chisel3.UInt UInt]]
47 | * @param initFile an optional file to preload the memory with
48 | */
49 | class Dma[A <: Data](size: Int, gen: A, initFile: Option[String] = None) extends Module {
50 |
51 | private val dmaWidth = gen.getWidth
52 |
53 | val io = IO(Flipped(new DmaIO(dmaWidth)))
54 |
55 | val req = RegInit(DmaRequest.init(size))
56 |
57 | /* Only one outstanding read or write request at a time */
58 | Seq(io.readControl, io.writeControl).map(_.ready := !req.valid)
59 |
60 | val arb = Module(new RRArbiter(new DmaControl, 2))
61 | arb.io.in
62 | .zip(Seq(io.readControl, io.writeControl))
63 | .map{ case (a, b) => a <> b }
64 |
65 | arb.io.out.ready := !req.valid
66 | when (arb.io.out.fire) {
67 | req.valid := true.B
68 | req.bits.index := arb.io.out.bits.index
69 | req.bits.length := arb.io.out.bits.length
70 | req.bits.tpe := arb.io.chosen
71 | }
72 |
73 | /* Defaults */
74 | io.writeChannel.ready := false.B
75 |
76 | /** Queue of read responses */
77 | val readQueue: Queue[A] = Module(new Queue(gen, 8))
78 | readQueue.io.deq <> io.readChannel
79 | assert(!readQueue.io.enq.valid || readQueue.io.enq.ready, "Response Queue dropped input data!")
80 |
81 | /** Queue of write requests */
82 | val writeQueue: Queue[A] = Module(new Queue(gen, 8))
83 | writeQueue.io.enq.valid := io.writeChannel.valid
84 | writeQueue.io.enq.bits := io.writeChannel.bits
85 | io.writeChannel.ready := writeQueue.io.enq.ready && (req.bits.tpe === DmaRequest.write)
86 |
87 | /** Asserted if it is safe to send a ballistic request to the memory that will be caught by the [[readQueue]]. This
88 | * implies that the [[Dma]] unit is processing a read request and the [[readQueue]] will not be full by the time the
89 | * data gets there.
90 | */
91 | val doRead: Bool = req.valid &&
92 | (req.bits.tpe === DmaRequest.read) &&
93 | (readQueue.io.count < (readQueue.entries - 2).U) &&
94 | (req.bits.length =/= 0.U)
95 |
96 | /** Asserted if it is safe to send a write to the memory. */
97 | val doWrite: Bool = req.valid &&
98 | (req.bits.tpe === DmaRequest.write) &&
99 | writeQueue.io.deq.valid &&
100 | (req.bits.length =/= 0.U)
101 |
102 | /* Synchronous Read Memory that encapsulates the virtual memory space of the accelerator */
103 | val mem: SyncReadMem[A] = SyncReadMem(size, gen.cloneType)
104 | initFile.map(loadMemoryFromFile(mem, _))
105 |
106 | readQueue.io.enq.bits := mem.read(req.bits.index)
107 |
108 | /* Allow a read to go to the Arbiter */
109 | readQueue.io.enq.valid := RegNext(doRead)
110 | when (doRead) {
111 | req.bits.index := req.bits.index + 1.U
112 | req.bits.length := req.bits.length - 1.U
113 | }
114 |
115 | /* When the request is done, then reset the request register */
116 | when (req.valid && (req.bits.length === 0.U) && !readQueue.io.deq.valid && !writeQueue.io.deq.valid) {
117 | req.valid := false.B
118 | }
119 |
120 | /* Allow a write to go to the memory */
121 | writeQueue.io.deq.ready := doWrite
122 | when (doWrite) {
123 | req.bits.index := req.bits.index + 1.U
124 | req.bits.length := req.bits.length - 1.U
125 | mem.write(req.bits.index, writeQueue.io.deq.bits)
126 | }
127 |
128 | }
129 |
--------------------------------------------------------------------------------
/src/main/scala/esp/transforms/EmitXML.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esp.transforms
16 |
17 | import esp.EspConfigAnnotation
18 |
19 | import java.io.{File, PrintWriter}
20 |
21 | import firrtl.{CircuitForm, CircuitState, FIRRTLException, HighForm, TargetDirAnnotation, Transform}
22 |
23 | class EmitXML extends Transform {
24 | def inputForm: CircuitForm = HighForm
25 | def outputForm: CircuitForm = HighForm
26 |
27 | def execute(state: CircuitState): CircuitState = {
28 | lazy val targetDir: String = state.annotations.collectFirst{ case TargetDirAnnotation(d) => d }.getOrElse{
29 | throw new FIRRTLException("EmitXML expected to see a TargetDirAnnotation, but none found?") }
30 | state.annotations.collect{ case a @ EspConfigAnnotation(_, c, d) =>
31 | val dir = d match {
32 | case Left(absolute) => new File(absolute, s"${c.name}.xml")
33 | case Right(relative) => new File(targetDir, new File(relative, s"${c.name}.xml").toString)
34 | }
35 | val w = new PrintWriter(dir)
36 | w.write(a.toXML)
37 | w.close()
38 | }
39 |
40 | state
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/test/resources/linear-mem.txt:
--------------------------------------------------------------------------------
1 | 0
2 | 1
3 | 2
4 | 3
5 | 4
6 | 5
7 | 6
8 | 7
9 | 8
10 | 9
11 | a
12 | b
13 | c
14 | d
15 | e
16 | f
17 | 10
18 | 11
19 | 12
20 | 13
21 | 14
22 | 15
23 | 16
24 | 17
25 | 18
26 | 19
27 | 1a
28 | 1b
29 | 1c
30 | 1d
31 | 1e
32 | 1f
33 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/AcceleratorSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests
16 |
17 | import esp.Implementation
18 |
19 | import chisel3._
20 | import chisel3.tester._
21 |
22 | object AcceleratorSpec {
23 |
24 | implicit class AcceleratorHelpers[A <: Implementation](dut: A) {
25 | def doReset() = {
26 | dut.reset.poke(true.B)
27 | dut.io.enable.poke(false.B)
28 | dut.io.dma.readControl.ready.poke(false.B)
29 | dut.io.dma.writeControl.ready.poke(false.B)
30 | dut.io.dma.readChannel.valid.poke(false.B)
31 | dut.io.dma.writeChannel.ready.poke(false.B)
32 | dut.clock.step(1)
33 | dut.reset.poke(false.B)
34 | }
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/AcceleratorWrapperSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests
16 |
17 | import chisel3._
18 | import firrtl.{ir => fir}
19 | import org.scalatest.{FlatSpec, Matchers}
20 | import scala.io.Source
21 | import scala.util.matching.Regex
22 | import esp.{AcceleratorWrapper, Config, Implementation, Parameter, Specification}
23 |
24 | class AcceleratorWrapperSpec extends FlatSpec with Matchers {
25 |
26 | /** Extract the port definitions from Verilog strings
27 | * @param strings some Verilog strings
28 | * @return the equivalent FIRRTL [[firrtl.ir.Port Port]]s
29 | */
30 | def collectVerilogIO(strings: Seq[String]): Seq[fir.Port] = {
31 |
32 | def n2z(s: String): Int = s match {
33 | case null => 0
34 | case x => x.toInt
35 | }
36 |
37 | /* Match a one-line input/output statement in Verilog */
38 | val regex = new Regex(raw"^\s*(input|output)\s*(\[(\d+):(\d+)\])?\s*(\w+)", "direction", "width", "high", "low", "name")
39 |
40 | strings
41 | .map(regex.findFirstMatchIn)
42 | .flatten
43 | .map(m => fir.Port(info=fir.NoInfo,
44 | name=m.group("name"),
45 | direction=m.group("direction") match { case "input" => fir.Input; case _ => fir.Output },
46 | tpe=fir.UIntType(fir.IntWidth(math.abs(n2z(m.group("high")) - n2z(m.group("low")) + 1)))))
47 | }
48 |
49 | trait FooSpecification extends Specification {
50 | override lazy val config: Config = Config(
51 | name = "foo",
52 | description = "a dummy accelerator used for unit tests",
53 | memoryFootprintMiB = 0,
54 | deviceId = 0,
55 | param = Array(
56 | Parameter("len"),
57 | Parameter("batch")
58 | )
59 | )
60 | }
61 |
62 | class BarImplementation(dmaWidth: Int) extends Implementation(dmaWidth: Int) with FooSpecification {
63 |
64 | override val implementationName: String = "bar"
65 |
66 | }
67 |
68 | behavior of "AcceleratorWrapper"
69 |
70 | it should "have the expected top-level IO when lowered to Verilog" in {
71 | val targetDir = "test_run_dir/AcceleratorWrapper"
72 |
73 | info("Verilog generation okay")
74 | Driver.execute(Array("-X", "verilog", "--target-dir", targetDir),
75 | () => new AcceleratorWrapper(32, (a: Int) => new BarImplementation(a)))
76 |
77 | val expectedIO = collectVerilogIO(Source.fromFile("src/main/resources/esp_acc_iface.v").getLines.toSeq)
78 | val generatedIO = collectVerilogIO(Source.fromFile(s"$targetDir/foo_bar_dma32.v").getLines.toSeq).toSet
79 |
80 | for (g <- expectedIO) {
81 | info(s"Contains: ${g.serialize}")
82 | generatedIO should contain (g)
83 | }
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/examples/AdderAcceleratorSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests.examples
16 |
17 | import chisel3._
18 | import chisel3.experimental.BundleLiterals._
19 | import chisel3.tester._
20 |
21 | import org.scalatest.{FlatSpec, Matchers}
22 |
23 | import esp.{DmaControl, DmaSize}
24 | import esp.examples.AdderAccelerator
25 |
26 | import esptests.AcceleratorSpec._
27 |
28 | class AdderAcceleratorSpec extends FlatSpec with ChiselScalatestTester with Matchers {
29 |
30 | behavior of "AdderAccelerator"
31 |
32 | private def adderTest(input: Seq[Int], readAddr: Int = 0, writeAddr: Int = 0) = {
33 | val expectedOutput = input.foldLeft(0){ case (acc, x) => acc + x }
34 | it should s"""reduce [${input.mkString(",")}] to ${expectedOutput}""" in {
35 | test(new AdderAccelerator(32)) { dut =>
36 |
37 | dut.doReset()
38 |
39 | dut.io.dma.readControl.initSink().setSinkClock(dut.clock)
40 | dut.io.dma.readChannel.initSource().setSourceClock(dut.clock)
41 | dut.io.dma.writeControl.initSink().setSinkClock(dut.clock)
42 | dut.io.dma.writeChannel.initSink().setSinkClock(dut.clock)
43 |
44 | timescope {
45 | dut.io.config.get("readAddr").poke(readAddr.U)
46 | dut.io.config.get("size").poke(input.length.U)
47 | dut.io.config.get("writeAddr").poke(writeAddr.U)
48 | dut.io.enable.poke(true.B)
49 | dut.clock.step()
50 | }
51 |
52 | input.length match {
53 | case 0 =>
54 | case _ =>
55 | dut.io.dma.readControl
56 | .expectDequeue((new DmaControl).Lit(_.index -> readAddr.U, _.length -> input.length.U, _.size -> DmaSize.word))
57 | dut.io.dma.readChannel.enqueueSeq(input.map(_.U))
58 | }
59 |
60 | dut.io.dma.writeControl
61 | .expectDequeue((new DmaControl).Lit(_.index -> writeAddr.U, _.length -> 1.U, _.size -> DmaSize.word))
62 |
63 | dut.io.dma.writeChannel
64 | .expectDequeue(input.foldLeft(0){ case (acc, x) => acc + x }.U)
65 |
66 | dut.io.done.expect(true.B)
67 |
68 | }
69 | }
70 |
71 | }
72 |
73 | Seq( Seq.empty[Int],
74 | Seq(0),
75 | Seq(1),
76 | Seq(1, 2, 3),
77 | Seq(100, 200, 300, 400, 500, 600, 700, 800, 900, 1000))
78 | .foreach(adderTest(_))
79 |
80 | }
81 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/examples/CounterAcceleratorSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests.examples
16 |
17 | import chisel3._
18 | import chisel3.tester._
19 |
20 | import org.scalatest._
21 |
22 | import esp.examples.CounterAccelerator
23 |
24 | class CounterAcceleratorSpec extends FlatSpec with ChiselScalatestTester with Matchers {
25 |
26 | behavior of "CounterAccelerator"
27 |
28 | Seq(8, 64, 512).foreach{ cycles =>
29 | it should s"assert done after $cycles cycles" in {
30 | test(new CounterAccelerator(32)) { dut =>
31 | dut.io.enable.poke(false.B)
32 | dut.io.dma.readControl.ready.poke(false.B)
33 | dut.io.dma.writeControl.ready.poke(false.B)
34 | dut.io.dma.readChannel.valid.poke(false.B)
35 | dut.io.dma.writeChannel.ready.poke(false.B)
36 |
37 | dut.clock.step(1)
38 |
39 | dut.io.config.get("ticks").poke(cycles.U)
40 | dut.io.enable.poke(true.B)
41 |
42 | dut.clock.step(1)
43 | dut.io.enable.poke(false.B)
44 |
45 | for (i <- 0 to cycles - 2) {
46 | dut.clock.step(1)
47 | dut.io.done.expect(false.B)
48 | }
49 |
50 | dut.clock.step(1)
51 | dut.io.done.expect(true.B)
52 |
53 | dut.clock.step(1)
54 | dut.io.done.expect(false.B)
55 | }
56 | }
57 | }
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/examples/FFTAcceleratorSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2018-2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests.examples
16 |
17 | import breeze.linalg.{DenseVector, randomDouble}
18 | import breeze.signal.fourierTr
19 | import breeze.math.Complex
20 |
21 | import chisel3._
22 | import chisel3.experimental.BundleLiterals._
23 | import chisel3.experimental.FixedPoint
24 | import chisel3.internal.firrtl.KnownBinaryPoint
25 | import chisel3.stage.{ChiselStage, ChiselGeneratorAnnotation}
26 | import chisel3.tester._
27 | import chisel3.tester.experimental.TestOptionBuilder._
28 | import chisel3.tester.internal.WriteVcdAnnotation
29 |
30 | import dsptools.numbers.DspComplex
31 |
32 | import esp.{DmaControl, DmaSize}
33 | import esp.examples.FFTAccelerator
34 |
35 | import firrtl.options.OptionsException
36 |
37 | import ofdm.fft.FFTParams
38 |
39 | import org.scalatest._
40 |
41 | import org.scalactic.Equality
42 | import org.scalactic.TripleEquals._
43 | import org.scalactic.Tolerance._
44 |
45 | class FFTAcceleratorSpec extends FlatSpec with ChiselScalatestTester with Matchers {
46 |
47 | private implicit class FFTAcceleratorHelpers(dut: FFTAccelerator[_]) {
48 | def doReset() = {
49 | dut.reset.poke(true.B)
50 | dut.io.enable.poke(false.B)
51 | dut.io.dma.readControl.ready.poke(false.B)
52 | dut.io.dma.writeControl.ready.poke(false.B)
53 | dut.io.dma.readChannel.valid.poke(false.B)
54 | dut.io.dma.writeChannel.ready.poke(false.B)
55 | dut.clock.step(1)
56 | dut.reset.poke(false.B)
57 | }
58 | }
59 |
60 | private implicit class BigIntHelpers(a: BigInt) {
61 | /** Convert from a BigInt to two's complement BigInt */
62 | def toTwosComplement(width: Int, binaryPoint: BigInt) = a match {
63 | case _ if a < 0 => a + BigInt(2).pow(width)
64 | case _ => a
65 | }
66 |
67 | /** Convert from a BigInt in two's complement to a signed BigInt */
68 | def fromTwosComplement(width: Int, binaryPoint: BigInt) = a match {
69 | case _ if a >= BigInt(2).pow(width - 1) => a - BigInt(2).pow(width)
70 | case _ => a
71 | }
72 | }
73 |
74 | private implicit class FixedPointHelpers(a: FixedPoint) {
75 | def toDouble = a.binaryPoint match {
76 | case KnownBinaryPoint(value) => a.litValue.toDouble / math.pow(2, value)
77 | }
78 | }
79 |
80 | private class ToleranceFixture(t: Double) {
81 | implicit val theTolerance = new Equality[Double] {
82 | def areEqual(a: Double, b: Any): Boolean = b match {
83 | case bb: Double =>a === bb +- t
84 | case _ => false
85 | }
86 | }
87 | }
88 |
89 | behavior of "FFTAccelerator"
90 |
91 | it should "fail to elaborate for non-power-of-2 numbers of points" in {
92 | /* @todo: It would be better to verify that this was more than just an OptionsException */
93 | assertThrows[OptionsException] {
94 | (new ChiselStage)
95 | .execute(Array.empty, Seq(ChiselGeneratorAnnotation(() => new FFTAccelerator(32, FFTParams.fixed(8, 0, 8, 3)))))
96 | }
97 | }
98 |
99 | it should "error for an FFT with stride not equal to its points size" in {
100 | val numPoints = 4
101 |
102 | info("errors for stride < points size")
103 | test(new FFTAccelerator(32, FFTParams.fixed(32, 16, 32, numPoints))){ dut =>
104 | dut.doReset()
105 | dut.io.config.get("stride").poke((numPoints - 1).U)
106 | dut.io.enable.poke(true.B)
107 |
108 | dut.clock.step(1)
109 | dut.io.done.expect(true.B)
110 | dut.io.debug.expect(1.U) // @todo: Change this to check the enum vlaue
111 | }
112 |
113 | info("errors for stride > points size")
114 | test(new FFTAccelerator(32, FFTParams.fixed(32, 16, 32, numPoints))){ dut =>
115 | dut.doReset()
116 | dut.io.config.get("stride").poke((numPoints + 1).U)
117 | dut.io.enable.poke(true.B)
118 |
119 | dut.clock.step(1)
120 | dut.io.done.expect(true.B)
121 | dut.io.debug.expect(1.U) // @todo: Change this to check the enum vlaue
122 | }
123 | }
124 |
125 | it should "fail to elaborate for bit widths > 32" in {
126 | assertThrows[OptionsException] {
127 | (new ChiselStage)
128 | .execute(Array.empty, Seq(ChiselGeneratorAnnotation(() => new FFTAccelerator(32, FFTParams.fixed(33, 16, 32, 8)))))
129 | }
130 | }
131 |
132 | def testRandom(numPoints: Int, width: Int, binaryPoint: Int, tolerance: Double): Unit = {
133 | val description = s"do a 1-D $numPoints-point $width.$binaryPoint fixed point FFT within $tolerance of double"
134 | it should description in new ToleranceFixture(tolerance) {
135 | val input = DenseVector.fill(numPoints) { Complex(randomDouble() * 2 - 1, 0) }
136 | val output = fourierTr(input).toScalaVector
137 |
138 | test(new FFTAccelerator(32, FFTParams.fixed(width, binaryPoint, width, numPoints)))
139 | .withAnnotations(Seq(WriteVcdAnnotation)) { dut =>
140 | dut.doReset()
141 |
142 | dut.io.dma.readControl.initSink().setSinkClock(dut.clock)
143 | dut.io.dma.readChannel.initSource().setSourceClock(dut.clock)
144 | dut.io.dma.writeControl.initSink().setSinkClock(dut.clock)
145 | dut.io.dma.writeChannel.initSink().setSinkClock(dut.clock)
146 |
147 | dut.io.config.get("startAddr").poke(0.U)
148 | dut.io.config.get("count").poke(1.U)
149 | dut.io.config.get("stride").poke(numPoints.U)
150 | dut.io.enable.poke(true.B)
151 | dut.clock.step(1)
152 |
153 | dut.io.enable.poke(false.B)
154 |
155 | dut.io.dma.readControl
156 | .expectDequeue((new DmaControl).Lit(_.index -> 0.U, _.length -> (numPoints * 2).U, _.size -> DmaSize.word))
157 |
158 | {
159 | val inputx = input
160 | .toArray
161 | .flatMap(a => Seq(a.real, a.imag))
162 | .map(FixedPoint.toBigInt(_, binaryPoint))
163 | .map(_.toTwosComplement(width, binaryPoint))
164 | .map(_.U)
165 | dut.io.dma.readChannel.enqueueSeq(inputx)
166 | }
167 |
168 | dut.io.dma.writeControl
169 | .expectDequeue((new DmaControl).Lit(_.index -> 0.U, _.length -> (numPoints * 2).U, _.size -> DmaSize.word))
170 |
171 | {
172 | val outputx = output
173 | .toArray
174 | .flatMap(a => Seq(a.real, a.imag))
175 | .map(FixedPoint.fromDouble(_, width.W, binaryPoint.BP))
176 | val fftOut = for (i <- 0 until numPoints * 2) yield {
177 | dut.io.dma.writeChannel.ready.poke(true.B)
178 | while (dut.io.dma.writeChannel.valid.peek().litToBoolean == false) {
179 | dut.clock.step(1)
180 | }
181 | dut.io.dma.writeChannel.valid.expect(true.B)
182 | val tmp = FixedPoint
183 | .fromBigInt(dut.io.dma.writeChannel.bits.peek().litValue.fromTwosComplement(width, binaryPoint), width, binaryPoint)
184 | dut.clock.step(1)
185 | tmp
186 | }
187 | fftOut.zip(outputx).foreach{ case (a: FixedPoint, b: FixedPoint) =>
188 | val Seq(ax, bx) = Seq(a, b).map(_.toDouble)
189 | ax should === (bx)
190 | }
191 | }
192 |
193 | dut.io.done.expect(true.B)
194 | dut.io.debug.expect(0.U)
195 |
196 | }
197 | }
198 | }
199 |
200 | Seq(
201 | (2, 32, 20, 0.001),
202 | (4, 32, 20, 0.001),
203 | (8, 32, 20, 0.001),
204 | (16, 32, 20, 0.001),
205 | (32, 32, 20, 0.001),
206 | (64, 32, 20, 0.001),
207 | (128, 32, 20, 0.001)).foreach((testRandom _).tupled)
208 |
209 | it should "perform a 2-D convolution" in (pending)
210 |
211 | }
212 |
--------------------------------------------------------------------------------
/src/test/scala/esptests/simulation/DmaSpec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2019 IBM
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package esptests.simulation
16 |
17 | import chisel3._
18 | import chisel3.iotesters.{ChiselFlatSpec, Driver, AdvTester}
19 |
20 | import java.io.File
21 |
22 | import esp.simulation.Dma
23 |
24 | import scala.collection.mutable
25 |
26 | class DmaTester[A <: Data](dut: Dma[A], delay: Option[Int]) extends AdvTester(dut) {
27 |
28 | /** Reset the inputs to some known-safe state */
29 | protected def reset(): Unit =
30 | Seq( dut.io.readControl.valid,
31 | dut.io.writeControl.valid,
32 | dut.io.readChannel.ready,
33 | dut.io.writeChannel.valid )
34 | .map( wire_poke(_, false.B) )
35 |
36 | /** Read some data from the simulation memory
37 | * @param addr base address
38 | * @param length number of words to read
39 | * @param delay optional delay between consecutive reads
40 | * @return a sequence of values read
41 | */
42 | protected def read(addr: Int, length: Int, delay: Option[Int]): Seq[BigInt] = {
43 | /* Before starting, there should be nothing on the read channel */
44 | expect(dut.io.readChannel.valid, false)
45 |
46 | /* Output data written to a mutable buffer */
47 | val data = mutable.ListBuffer[BigInt]()
48 |
49 | /* Assert the read request */
50 | wire_poke(dut.io.readControl.valid, true.B)
51 | wire_poke(dut.io.readControl.bits.index, addr.U)
52 | wire_poke(dut.io.readControl.bits.length, length.U)
53 |
54 | /* Wait until readControl is ready */
55 | eventually(peek(dut.io.readControl.ready) == 1)
56 |
57 | step(1)
58 |
59 | reset()
60 |
61 | /* Wait until all expected things are read */
62 | for (i <- 0 until length) {
63 | eventually(peek(dut.io.readChannel.valid) == 1)
64 |
65 | wire_poke(dut.io.readChannel.ready, true.B)
66 | peek(dut.io.readChannel.bits) +=: data
67 | step(1)
68 | reset()
69 |
70 | delay.map { case d =>
71 | wire_poke(dut.io.readChannel.ready, false.B)
72 | step(d)
73 | }
74 | }
75 |
76 | reset()
77 | step(1)
78 |
79 | data.reverse
80 | }
81 |
82 | /**
83 | *
84 | */
85 | protected def write(addr: Int, data: Seq[Int], delay: Option[Int]): Unit = {
86 | /* Before starting, the writeChannel should act like it can't accept data */
87 | expect(dut.io.writeChannel.ready, false)
88 |
89 | /* Assert the write request */
90 | wire_poke(dut.io.writeControl.valid, true)
91 | wire_poke(dut.io.writeControl.bits.index, addr)
92 | wire_poke(dut.io.writeControl.bits.length, data.size)
93 |
94 | /* Wait until writeControl is ready */
95 | eventually(peek(dut.io.writeControl.ready) == 1)
96 |
97 | step(1)
98 | reset()
99 |
100 | /* Wait until all data is written */
101 | data.map { case word =>
102 | eventually(peek(dut.io.writeChannel.ready) == 1)
103 |
104 | wire_poke(dut.io.writeChannel.valid, true)
105 | wire_poke(dut.io.writeChannel.bits, word)
106 | step(1)
107 | reset()
108 |
109 | delay.map { case d =>
110 | wire_poke(dut.io.writeChannel.ready, false)
111 | step(d)
112 | }
113 | }
114 |
115 | }
116 |
117 | }
118 |
119 | class DmaReadTester[A <: Data](dut: Dma[A], delay: Option[Int]) extends DmaTester(dut, delay) {
120 |
121 | step(1)
122 | reset()
123 |
124 | Seq( (0, 1, Seq(0)),
125 | (0, 2, Seq(0, 1)),
126 | (8, 8, 8.until(16)),
127 | (0, 16, 0.until(16)),
128 | (31, 0, Seq.empty) )
129 | .map{ case (addr, length, expected) =>
130 | val out: Seq[BigInt] = read(addr, length, delay)
131 | assert(out == expected, s"Read sequence '$out', expected '$expected'")
132 | println(s"""Read mem[$addr+:$length]: ${out.mkString(", ")}""")
133 | }
134 |
135 | }
136 |
137 | class DmaWriteTester[A <: Data](dut: Dma[A], delay: Option[Int]) extends DmaTester(dut, delay) {
138 |
139 | step(1)
140 | reset()
141 |
142 | val tests: Seq[(Int, Seq[Int])] = Seq(
143 | (0, Seq(10)),
144 | (0, Seq(20, 21)),
145 | (8, 30.until(38)),
146 | (0, 40.until(40 + 16)),
147 | (31, Seq.empty[Int]) )
148 |
149 | tests
150 | .map{ case (addr, data) =>
151 | println(s"""Write mem[$addr]: ${data.mkString(", ")}""")
152 | write(addr, data, delay)
153 | val out: Seq[BigInt] = read(addr, data.size, None)
154 | println(s"""Read mem[$addr+:${data.size}]: ${out.mkString(", ")}""")
155 | assert(out == data, s"Read '$out' did not match written '$data'")
156 | }
157 |
158 | }
159 |
160 | class DmaSpec extends ChiselFlatSpec {
161 |
162 | val resourceDir: File = new File(System.getProperty("user.dir"), "src/test/resources")
163 |
164 | behavior of classOf[esp.simulation.Dma[UInt]].getName
165 |
166 | it should "Read from memory without delays" in {
167 |
168 | Driver(() => new Dma(1024, UInt(8.W), Some(new File(resourceDir, "linear-mem.txt").toString)), "treadle") {
169 | dut => new DmaReadTester(dut, None)
170 | } should be (true)
171 |
172 | }
173 |
174 | it should "Read from memory with delays between reads" in {
175 |
176 | Driver(() => new Dma(1024, UInt(8.W), Some(new File(resourceDir, "linear-mem.txt").toString)), "treadle") {
177 | dut => new DmaReadTester(dut, Some(16))
178 | } should be (true)
179 |
180 | }
181 |
182 | it should "Write to memory without delays" in {
183 |
184 | Driver(() => new Dma(1024, UInt(8.W), Some(new File(resourceDir, "linear-mem.txt").toString)), "treadle") {
185 | dut => new DmaWriteTester(dut, None)
186 | } should be (true)
187 |
188 | }
189 |
190 | it should "Write to memory with delays between writes" in {
191 |
192 | Driver(() => new Dma(1024, UInt(8.W), Some(new File(resourceDir, "linear-mem.txt").toString)), "treadle") {
193 | dut => new DmaWriteTester(dut, Some(16))
194 | } should be (true)
195 |
196 | }
197 |
198 | }
199 |
--------------------------------------------------------------------------------