├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── build.sbt ├── project ├── build.properties └── plugins.sbt └── src ├── main ├── proto │ ├── coverage.proto │ └── firrtl.proto └── scala │ └── coverage │ ├── CoverAspect.scala │ ├── CoverGroup.scala │ ├── CoverPoint.scala │ ├── CoverTransform.scala │ ├── Coverage.scala │ ├── CoverageTracker.scala │ ├── Database.scala │ ├── HistogramSignal.scala │ ├── Main.scala │ ├── Quanta.scala │ ├── ResetFinder.scala │ ├── SignalTracker.scala │ ├── View.scala │ └── cone │ ├── Cone.scala │ ├── ConeFinder.scala │ └── ConeStatement.scala └── test └── scala └── coverage ├── CoverageSpec.scala ├── DatabaseSpec.scala ├── MiniCoverages.scala ├── TestUtils.scala └── cone └── ConeSpec.scala /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/* 2 | project/project 3 | project/target 4 | target 5 | test_run_dir 6 | 7 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "riscv-mini"] 2 | path = riscv-mini 3 | url = git@github.com:ucb-bar/riscv-mini.git 4 | [submodule "chisel3"] 5 | path = chisel3 6 | url = git@github.com:freechipsproject/chisel3.git 7 | [submodule "firrtl"] 8 | path = firrtl 9 | url = git@github.com:freechipsproject/firrtl.git 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # chisel-coverage 2 | A coverage library for Chisel designs 3 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | def scalacOptionsVersion(scalaVersion: String): Seq[String] = { 2 | Seq() ++ { 3 | // If we're building with Scala > 2.11, enable the compile option 4 | // switch to support our anonymous Bundle definitions: 5 | // https://github.com/scala/bug/issues/10047 6 | CrossVersion.partialVersion(scalaVersion) match { 7 | case Some((2, scalaMajor: Long)) if scalaMajor < 12 => Seq() 8 | case _ => Seq("-Xsource:2.11") 9 | } 10 | } 11 | } 12 | 13 | def javacOptionsVersion(scalaVersion: String): Seq[String] = { 14 | Seq() ++ { 15 | // Scala 2.12 requires Java 8. We continue to generate 16 | // Java 7 compatible code for Scala 2.11 17 | // for compatibility with old clients. 18 | CrossVersion.partialVersion(scalaVersion) match { 19 | case Some((2, scalaMajor: Long)) if scalaMajor < 12 => 20 | Seq("-source", "1.7", "-target", "1.7") 21 | case _ => 22 | Seq("-source", "1.8", "-target", "1.8") 23 | } 24 | } 25 | } 26 | 27 | updateOptions := updateOptions.value.withLatestSnapshots(true) 28 | 29 | scalaVersion := "2.12.4" 30 | 31 | crossScalaVersions := Seq("2.11.12", "2.12.4") 32 | 33 | scalacOptions := Seq("-deprecation", "-feature") ++ scalacOptionsVersion(scalaVersion.value) 34 | 35 | javacOptions ++= javacOptionsVersion(scalaVersion.value) 36 | 37 | lazy val commonSettings = Seq ( 38 | organization := "edu.berkeley.cs", 39 | scalaVersion := "2.12.4", 40 | 41 | resolvers ++= Seq( 42 | Resolver.sonatypeRepo("snapshots"), 43 | Resolver.sonatypeRepo("releases") 44 | ), 45 | 46 | libraryDependencies ++= Seq( 47 | "org.scalatest" %% "scalatest" % "3.0.1" 48 | ), 49 | ) 50 | 51 | 52 | 53 | //val commonSettings = Seq( 54 | // scalaVersion := "2.12.4", 55 | // crossScalaVersions := Seq("2.11.12", "2.12.4"), 56 | // resolvers ++= Seq( 57 | // Resolver.sonatypeRepo("snapshots"), 58 | // Resolver.sonatypeRepo("releases") 59 | // ) 60 | //) 61 | 62 | val srcSettings = commonSettings ++ Seq( 63 | libraryDependencies ++= Seq( 64 | "edu.berkeley.cs" %% "treadle" % "1.1-SNAPSHOT", 65 | "edu.berkeley.cs" %% "chisel-iotesters" % "1.3-SNAPSHOT" 66 | ) 67 | ) 68 | 69 | val miniSettings = commonSettings ++ Seq( 70 | name := "riscv-mini", 71 | version := "2.0-SNAPSHOT", 72 | organization := "edu.berkeley.cs" 73 | ) 74 | 75 | lazy val firrtl = project in file("./firrtl") settings commonSettings 76 | lazy val chisel = project in file("./chisel3") settings commonSettings settings srcSettings dependsOn firrtl 77 | lazy val lib = project in file("./riscv-mini/lib") settings commonSettings settings srcSettings dependsOn chisel dependsOn firrtl 78 | lazy val mini = project in file("./riscv-mini") settings miniSettings settings srcSettings dependsOn lib dependsOn chisel dependsOn firrtl 79 | lazy val coverage = project in file(".") settings commonSettings settings srcSettings dependsOn (mini % "test->test;test->compile;compile->test") dependsOn chisel dependsOn (firrtl % "compile->compile;test->test") 80 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.2.7 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.url("scalasbt", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases")) (Resolver.ivyStylePatterns) 2 | 3 | resolvers += Classpaths.sbtPluginReleases 4 | 5 | resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven" 6 | 7 | addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") 8 | 9 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") 10 | 11 | addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.3.1") 12 | 13 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.7.0") 14 | 15 | addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.1") 16 | 17 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.6") 18 | 19 | addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.1") 20 | 21 | addSbtPlugin("com.github.gseitz" % "sbt-protobuf" % "0.6.3") 22 | 23 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.4") 24 | 25 | libraryDependencies += "com.github.os72" % "protoc-jar" % "3.5.1.1" 26 | -------------------------------------------------------------------------------- /src/main/proto/coverage.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package coverage; 4 | 5 | option java_package = "coverage"; 6 | option java_outer_classname = "CoverageProtos"; 7 | 8 | import "firrtl.proto"; 9 | 10 | message Coverage { 11 | repeated Coverpoint coverpoint = 1; 12 | 13 | message Coverpoint { 14 | uint64 tag = 1; 15 | 16 | Expression cone = 2; 17 | 18 | message Endpoint { 19 | string name = 1; 20 | string 21 | } 22 | 23 | message Cone { 24 | 25 | } 26 | message Tag { 27 | uint64 28 | } 29 | } 30 | 31 | message SourceInfo { 32 | message None { 33 | enum Reason { 34 | NONE_SOURCE_INFO_REASON_UNKNOWN = 0; 35 | NONE_SOURCE_INFO_REASON_UNLOCATABLE = 1; 36 | NONE_SOURCE_INFO_REASON_SUPPRESSED = 2; 37 | NONE_SOURCE_INFO_REASON_DEPRECATED = 3; 38 | } 39 | 40 | // Required. 41 | Reason reason = 1; 42 | } 43 | 44 | message Position { 45 | // Required. 46 | string filename = 1; 47 | // Required. 48 | uint32 line = 2; 49 | // Required. 50 | uint32 column = 3; 51 | } 52 | 53 | // Required. 54 | oneof source_info { 55 | None none = 1; 56 | Position position = 2; 57 | string text = 3; 58 | } 59 | } 60 | 61 | message BigInt { 62 | // 2's complement binary representation 63 | bytes value = 1; 64 | } 65 | 66 | message Top { 67 | // Required. 68 | string name = 1; 69 | } 70 | 71 | message Circuit { 72 | repeated Module module = 1; 73 | repeated Top top = 2; 74 | } 75 | 76 | message Module { 77 | message ExternalModule { 78 | message Parameter { 79 | string id = 1; 80 | oneof value { 81 | BigInt integer = 2; 82 | double double = 3; 83 | string string = 4; 84 | string raw_string = 5; 85 | } 86 | } 87 | // Required. 88 | string id = 1; 89 | repeated Port port = 2; 90 | string defined_name = 3; 91 | repeated Parameter parameter = 4; 92 | } 93 | 94 | message UserModule { 95 | // Required. 96 | string id = 1; 97 | repeated Port port = 2; 98 | repeated Statement statement = 3; 99 | } 100 | 101 | // Required. 102 | oneof module { 103 | ExternalModule external_module = 1; 104 | UserModule user_module = 2; 105 | } 106 | } 107 | 108 | message Statement { 109 | message Wire { 110 | // Required. 111 | string id = 1; 112 | // Required. 113 | Type type = 2; 114 | } 115 | 116 | message Register { 117 | // Required. 118 | string id = 1; 119 | // Required. 120 | Type type = 2; 121 | // Required. 122 | Expression clock = 3; 123 | Expression reset = 4; 124 | Expression init = 5; 125 | } 126 | 127 | message Memory { 128 | // Required. 129 | string id = 1; 130 | // Required. 131 | Type type = 2; 132 | // Required. 133 | oneof depth { 134 | uint32 uint_depth = 3; 135 | BigInt bigint_depth = 9; 136 | } 137 | // Required. 138 | uint32 write_latency = 4; 139 | // Required. 140 | uint32 read_latency = 5; 141 | repeated string reader_id = 6; 142 | repeated string writer_id = 7; 143 | repeated string readwriter_id = 8; 144 | } 145 | 146 | message CMemory { 147 | // As alternative to using VectorType as type 148 | message TypeAndDepth { 149 | Type data_type = 1; 150 | BigInt depth = 2; 151 | } 152 | // Required. 153 | string id = 1; 154 | // Required. 155 | oneof type { 156 | Type.VectorType vector_type = 2; 157 | TypeAndDepth type_and_depth = 4; 158 | } 159 | // Required. 160 | bool sync_read = 3; 161 | } 162 | 163 | message Instance { 164 | // Required. 165 | string id = 1; 166 | // Required. 167 | string module_id = 2; 168 | } 169 | 170 | message Node { 171 | // Required. 172 | string id = 1; 173 | // Required. 174 | Expression expression = 2; 175 | } 176 | 177 | message When { 178 | // Required. 179 | Expression predicate = 1; 180 | repeated Statement consequent = 2; 181 | repeated Statement otherwise = 3; 182 | } 183 | 184 | message Stop { 185 | // Required. 186 | int32 return_value = 1; 187 | // Required. 188 | Expression clk = 2; 189 | // Required. 190 | Expression en = 3; 191 | } 192 | 193 | message Printf { 194 | // Required. 195 | string value = 1; 196 | repeated Expression arg = 2; 197 | // Required. 198 | Expression clk = 3; 199 | // Required. 200 | Expression en = 4; 201 | } 202 | 203 | message Skip { 204 | // Empty 205 | } 206 | 207 | message Connect { 208 | // This should be limited to Reference, SubField, SubIndex, or SubAccess. 209 | // Required. 210 | Expression location = 1; 211 | // Required. 212 | Expression expression = 2; 213 | } 214 | 215 | message PartialConnect { 216 | // This should be limited to Reference, SubField, SubIndex, or SubAccess. 217 | // Required. 218 | Expression location = 1; 219 | // Required. 220 | Expression expression = 2; 221 | } 222 | 223 | message IsInvalid { 224 | // Required. 225 | Expression expression = 1; 226 | } 227 | 228 | message MemoryPort { 229 | enum Direction { 230 | MEMORY_PORT_DIRECTION_UNKNOWN = 0; 231 | MEMORY_PORT_DIRECTION_INFER = 1; 232 | MEMORY_PORT_DIRECTION_READ = 2; 233 | MEMORY_PORT_DIRECTION_WRITE = 3; 234 | MEMORY_PORT_DIRECTION_READ_WRITE = 4; 235 | } 236 | 237 | // Required. 238 | Direction direction = 1; 239 | // Required. 240 | string id = 2; 241 | // Required. 242 | string memory_id = 3; 243 | // Required. 244 | Expression memory_index = 4; 245 | // Required. 246 | Expression expression = 5; 247 | } 248 | 249 | message Attach { 250 | repeated Expression expression = 1; 251 | } 252 | 253 | // Required. 254 | oneof statement { 255 | Wire wire = 1; 256 | Register register = 2; 257 | Memory memory = 3; 258 | CMemory cmemory = 4; 259 | Instance instance = 5; 260 | Node node = 6; 261 | When when = 7; 262 | Stop stop = 8; 263 | Printf printf = 10; 264 | Skip skip = 14; 265 | Connect connect = 15; 266 | PartialConnect partial_connect = 16; 267 | IsInvalid is_invalid = 17; 268 | MemoryPort memory_port = 18; 269 | Attach attach = 20; 270 | } 271 | 272 | SourceInfo source_info = 19; 273 | } 274 | 275 | // Using proto3 means that there is no has* method for primitives. This 276 | // necesitates boxing width values because there are cases where a width of 277 | // zero (the default value of uint32) is a valid width. 278 | message Width { 279 | // Required 280 | uint32 value = 1; 281 | } 282 | 283 | message Type { 284 | message UIntType { 285 | Width width = 1; 286 | } 287 | 288 | message SIntType { 289 | Width width = 1; 290 | } 291 | 292 | message ClockType { 293 | // Empty. 294 | } 295 | 296 | message AsyncResetType { 297 | // Empty. 298 | } 299 | 300 | message ResetType { 301 | // Empty. 302 | } 303 | 304 | message BundleType { 305 | message Field { 306 | // Required. 307 | bool is_flipped = 1; 308 | // Required. 309 | string id = 2; 310 | // Required. 311 | Type type = 3; 312 | } 313 | repeated Field field = 1; 314 | } 315 | 316 | message VectorType { 317 | // Required. 318 | Type type = 1; 319 | // Required. 320 | uint32 size = 2; 321 | } 322 | 323 | message FixedType { 324 | Width width = 1; 325 | Width point = 2; 326 | } 327 | 328 | message AnalogType { 329 | Width width = 3; 330 | } 331 | 332 | // Required. 333 | oneof type { 334 | UIntType uint_type = 2; 335 | SIntType sint_type = 3; 336 | ClockType clock_type = 4; 337 | BundleType bundle_type = 5; 338 | VectorType vector_type = 6; 339 | FixedType fixed_type = 7; 340 | AnalogType analog_type = 8; 341 | AsyncResetType async_reset_type = 9; 342 | ResetType reset_type = 10; 343 | } 344 | } 345 | 346 | message Port { 347 | enum Direction { 348 | PORT_DIRECTION_UNKNOWN = 0; 349 | PORT_DIRECTION_IN = 1; 350 | PORT_DIRECTION_OUT = 2; 351 | } 352 | 353 | // Required. 354 | string id = 1; 355 | // Required. 356 | Direction direction = 2; 357 | // Required. 358 | Type type = 3; 359 | } 360 | 361 | message Expression { 362 | message Reference { 363 | // Required. 364 | string id = 1; 365 | } 366 | 367 | message IntegerLiteral { 368 | // Base 10 value. May begin with a sign (+|-). Only zero can begin with a 369 | // '0'. 370 | // Required 371 | string value = 1; 372 | } 373 | 374 | message UIntLiteral { 375 | // Required. 376 | IntegerLiteral value = 1; 377 | Width width = 2; 378 | } 379 | 380 | message SIntLiteral { 381 | // Required. 382 | IntegerLiteral value = 1; 383 | Width width = 2; 384 | } 385 | 386 | message FixedLiteral { 387 | BigInt value = 1; 388 | Width width = 2; 389 | Width point = 3; 390 | } 391 | 392 | message ValidIf { 393 | // Required. 394 | Expression condition = 1; 395 | // Required. 396 | Expression value = 2; 397 | } 398 | 399 | message Mux { 400 | // Required. 401 | Expression condition = 1; 402 | // Required. 403 | Expression t_value = 2; 404 | // Required. 405 | Expression f_value = 3; 406 | } 407 | 408 | message SubField { 409 | // Required. 410 | Expression expression = 1; 411 | // Required. 412 | string field = 2; 413 | } 414 | 415 | message SubIndex { 416 | // Required. 417 | Expression expression = 1; 418 | // Required. 419 | IntegerLiteral index = 2; 420 | } 421 | 422 | message SubAccess { 423 | // Required. 424 | Expression expression = 1; 425 | // Required. 426 | Expression index = 2; 427 | } 428 | 429 | message PrimOp { 430 | 431 | enum Op { 432 | OP_UNKNOWN = 0; 433 | OP_ADD = 1; 434 | OP_SUB = 2; 435 | OP_TAIL = 3; 436 | OP_HEAD = 4; 437 | OP_TIMES = 5; 438 | OP_DIVIDE = 6; 439 | OP_REM = 7; 440 | OP_SHIFT_LEFT = 8; 441 | OP_SHIFT_RIGHT = 9; 442 | OP_DYNAMIC_SHIFT_LEFT = 10; 443 | OP_DYNAMIC_SHIFT_RIGHT = 11; 444 | OP_BIT_AND = 12; 445 | OP_BIT_OR = 13; 446 | OP_BIT_XOR = 14; 447 | OP_BIT_NOT = 15; 448 | OP_CONCAT = 16; 449 | OP_LESS = 17; 450 | OP_LESS_EQ = 18; 451 | OP_GREATER = 19; 452 | OP_GREATER_EQ = 20; 453 | OP_EQUAL = 21; 454 | OP_PAD = 22; 455 | OP_NOT_EQUAL = 23; 456 | OP_NEG = 24; 457 | OP_XOR_REDUCE = 26; 458 | OP_CONVERT = 27; 459 | OP_AS_UINT = 28; 460 | OP_AS_SINT = 29; 461 | OP_EXTRACT_BITS = 30; 462 | OP_AS_CLOCK = 31; 463 | OP_AS_FIXED_POINT = 32; 464 | OP_AND_REDUCE = 33; 465 | OP_OR_REDUCE = 34; 466 | OP_SHIFT_BINARY_POINT_LEFT = 35; 467 | OP_SHIFT_BINARY_POINT_RIGHT = 36; 468 | OP_SET_BINARY_POINT = 37; 469 | OP_AS_ASYNC_RESET = 38; 470 | } 471 | 472 | // Required. 473 | Op op = 1; 474 | repeated Expression arg = 2; 475 | repeated IntegerLiteral const = 3; 476 | } 477 | 478 | reserved 5; 479 | 480 | // Required. 481 | oneof expression { 482 | Reference reference = 1; 483 | UIntLiteral uint_literal = 2; 484 | SIntLiteral sint_literal = 3; 485 | FixedLiteral fixed_literal = 11; 486 | ValidIf valid_if = 4; 487 | //ExtractBits extract_bits = 5; 488 | Mux mux = 6; 489 | SubField sub_field = 7; 490 | SubIndex sub_index = 8; 491 | SubAccess sub_access = 9; 492 | PrimOp prim_op = 10; 493 | } 494 | } 495 | } 496 | 497 | 498 | 499 | 500 | 501 | -------------------------------------------------------------------------------- /src/main/proto/firrtl.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package firrtl; 4 | 5 | option java_package = "firrtl"; 6 | option java_outer_classname = "FirrtlProtos"; 7 | 8 | message Firrtl { 9 | repeated Circuit circuit = 1; 10 | 11 | message SourceInfo { 12 | message None { 13 | enum Reason { 14 | NONE_SOURCE_INFO_REASON_UNKNOWN = 0; 15 | NONE_SOURCE_INFO_REASON_UNLOCATABLE = 1; 16 | NONE_SOURCE_INFO_REASON_SUPPRESSED = 2; 17 | NONE_SOURCE_INFO_REASON_DEPRECATED = 3; 18 | } 19 | 20 | // Required. 21 | Reason reason = 1; 22 | } 23 | 24 | message Position { 25 | // Required. 26 | string filename = 1; 27 | // Required. 28 | uint32 line = 2; 29 | // Required. 30 | uint32 column = 3; 31 | } 32 | 33 | // Required. 34 | oneof source_info { 35 | None none = 1; 36 | Position position = 2; 37 | string text = 3; 38 | } 39 | } 40 | 41 | message BigInt { 42 | // 2's complement binary representation 43 | bytes value = 1; 44 | } 45 | 46 | message Top { 47 | // Required. 48 | string name = 1; 49 | } 50 | 51 | message Circuit { 52 | repeated Module module = 1; 53 | repeated Top top = 2; 54 | } 55 | 56 | message Module { 57 | message ExternalModule { 58 | message Parameter { 59 | string id = 1; 60 | oneof value { 61 | BigInt integer = 2; 62 | double double = 3; 63 | string string = 4; 64 | string raw_string = 5; 65 | } 66 | } 67 | // Required. 68 | string id = 1; 69 | repeated Port port = 2; 70 | string defined_name = 3; 71 | repeated Parameter parameter = 4; 72 | } 73 | 74 | message UserModule { 75 | // Required. 76 | string id = 1; 77 | repeated Port port = 2; 78 | repeated Statement statement = 3; 79 | } 80 | 81 | // Required. 82 | oneof module { 83 | ExternalModule external_module = 1; 84 | UserModule user_module = 2; 85 | } 86 | } 87 | 88 | message Statement { 89 | message Wire { 90 | // Required. 91 | string id = 1; 92 | // Required. 93 | Type type = 2; 94 | } 95 | 96 | message Register { 97 | // Required. 98 | string id = 1; 99 | // Required. 100 | Type type = 2; 101 | // Required. 102 | Expression clock = 3; 103 | Expression reset = 4; 104 | Expression init = 5; 105 | } 106 | 107 | message Memory { 108 | // Required. 109 | string id = 1; 110 | // Required. 111 | Type type = 2; 112 | // Required. 113 | oneof depth { 114 | uint32 uint_depth = 3; 115 | BigInt bigint_depth = 9; 116 | } 117 | // Required. 118 | uint32 write_latency = 4; 119 | // Required. 120 | uint32 read_latency = 5; 121 | repeated string reader_id = 6; 122 | repeated string writer_id = 7; 123 | repeated string readwriter_id = 8; 124 | } 125 | 126 | message CMemory { 127 | // As alternative to using VectorType as type 128 | message TypeAndDepth { 129 | Type data_type = 1; 130 | BigInt depth = 2; 131 | } 132 | // Required. 133 | string id = 1; 134 | // Required. 135 | oneof type { 136 | Type.VectorType vector_type = 2; 137 | TypeAndDepth type_and_depth = 4; 138 | } 139 | // Required. 140 | bool sync_read = 3; 141 | } 142 | 143 | message Instance { 144 | // Required. 145 | string id = 1; 146 | // Required. 147 | string module_id = 2; 148 | } 149 | 150 | message Node { 151 | // Required. 152 | string id = 1; 153 | // Required. 154 | Expression expression = 2; 155 | } 156 | 157 | message When { 158 | // Required. 159 | Expression predicate = 1; 160 | repeated Statement consequent = 2; 161 | repeated Statement otherwise = 3; 162 | } 163 | 164 | message Stop { 165 | // Required. 166 | int32 return_value = 1; 167 | // Required. 168 | Expression clk = 2; 169 | // Required. 170 | Expression en = 3; 171 | } 172 | 173 | message Printf { 174 | // Required. 175 | string value = 1; 176 | repeated Expression arg = 2; 177 | // Required. 178 | Expression clk = 3; 179 | // Required. 180 | Expression en = 4; 181 | } 182 | 183 | message Skip { 184 | // Empty 185 | } 186 | 187 | message Connect { 188 | // This should be limited to Reference, SubField, SubIndex, or SubAccess. 189 | // Required. 190 | Expression location = 1; 191 | // Required. 192 | Expression expression = 2; 193 | } 194 | 195 | message PartialConnect { 196 | // This should be limited to Reference, SubField, SubIndex, or SubAccess. 197 | // Required. 198 | Expression location = 1; 199 | // Required. 200 | Expression expression = 2; 201 | } 202 | 203 | message IsInvalid { 204 | // Required. 205 | Expression expression = 1; 206 | } 207 | 208 | message MemoryPort { 209 | enum Direction { 210 | MEMORY_PORT_DIRECTION_UNKNOWN = 0; 211 | MEMORY_PORT_DIRECTION_INFER = 1; 212 | MEMORY_PORT_DIRECTION_READ = 2; 213 | MEMORY_PORT_DIRECTION_WRITE = 3; 214 | MEMORY_PORT_DIRECTION_READ_WRITE = 4; 215 | } 216 | 217 | // Required. 218 | Direction direction = 1; 219 | // Required. 220 | string id = 2; 221 | // Required. 222 | string memory_id = 3; 223 | // Required. 224 | Expression memory_index = 4; 225 | // Required. 226 | Expression expression = 5; 227 | } 228 | 229 | message Attach { 230 | repeated Expression expression = 1; 231 | } 232 | 233 | // Required. 234 | oneof statement { 235 | Wire wire = 1; 236 | Register register = 2; 237 | Memory memory = 3; 238 | CMemory cmemory = 4; 239 | Instance instance = 5; 240 | Node node = 6; 241 | When when = 7; 242 | Stop stop = 8; 243 | Printf printf = 10; 244 | Skip skip = 14; 245 | Connect connect = 15; 246 | PartialConnect partial_connect = 16; 247 | IsInvalid is_invalid = 17; 248 | MemoryPort memory_port = 18; 249 | Attach attach = 20; 250 | } 251 | 252 | SourceInfo source_info = 19; 253 | } 254 | 255 | // Using proto3 means that there is no has* method for primitives. This 256 | // necesitates boxing width values because there are cases where a width of 257 | // zero (the default value of uint32) is a valid width. 258 | message Width { 259 | // Required 260 | uint32 value = 1; 261 | } 262 | 263 | message Type { 264 | message UIntType { 265 | Width width = 1; 266 | } 267 | 268 | message SIntType { 269 | Width width = 1; 270 | } 271 | 272 | message ClockType { 273 | // Empty. 274 | } 275 | 276 | message AsyncResetType { 277 | // Empty. 278 | } 279 | 280 | message ResetType { 281 | // Empty. 282 | } 283 | 284 | message BundleType { 285 | message Field { 286 | // Required. 287 | bool is_flipped = 1; 288 | // Required. 289 | string id = 2; 290 | // Required. 291 | Type type = 3; 292 | } 293 | repeated Field field = 1; 294 | } 295 | 296 | message VectorType { 297 | // Required. 298 | Type type = 1; 299 | // Required. 300 | uint32 size = 2; 301 | } 302 | 303 | message FixedType { 304 | Width width = 1; 305 | Width point = 2; 306 | } 307 | 308 | message AnalogType { 309 | Width width = 3; 310 | } 311 | 312 | // Required. 313 | oneof type { 314 | UIntType uint_type = 2; 315 | SIntType sint_type = 3; 316 | ClockType clock_type = 4; 317 | BundleType bundle_type = 5; 318 | VectorType vector_type = 6; 319 | FixedType fixed_type = 7; 320 | AnalogType analog_type = 8; 321 | AsyncResetType async_reset_type = 9; 322 | ResetType reset_type = 10; 323 | } 324 | } 325 | 326 | message Port { 327 | enum Direction { 328 | PORT_DIRECTION_UNKNOWN = 0; 329 | PORT_DIRECTION_IN = 1; 330 | PORT_DIRECTION_OUT = 2; 331 | } 332 | 333 | // Required. 334 | string id = 1; 335 | // Required. 336 | Direction direction = 2; 337 | // Required. 338 | Type type = 3; 339 | } 340 | 341 | message Expression { 342 | message Reference { 343 | // Required. 344 | string id = 1; 345 | } 346 | 347 | message IntegerLiteral { 348 | // Base 10 value. May begin with a sign (+|-). Only zero can begin with a 349 | // '0'. 350 | // Required 351 | string value = 1; 352 | } 353 | 354 | message UIntLiteral { 355 | // Required. 356 | IntegerLiteral value = 1; 357 | Width width = 2; 358 | } 359 | 360 | message SIntLiteral { 361 | // Required. 362 | IntegerLiteral value = 1; 363 | Width width = 2; 364 | } 365 | 366 | message FixedLiteral { 367 | BigInt value = 1; 368 | Width width = 2; 369 | Width point = 3; 370 | } 371 | 372 | message ValidIf { 373 | // Required. 374 | Expression condition = 1; 375 | // Required. 376 | Expression value = 2; 377 | } 378 | 379 | message Mux { 380 | // Required. 381 | Expression condition = 1; 382 | // Required. 383 | Expression t_value = 2; 384 | // Required. 385 | Expression f_value = 3; 386 | } 387 | 388 | message SubField { 389 | // Required. 390 | Expression expression = 1; 391 | // Required. 392 | string field = 2; 393 | } 394 | 395 | message SubIndex { 396 | // Required. 397 | Expression expression = 1; 398 | // Required. 399 | IntegerLiteral index = 2; 400 | } 401 | 402 | message SubAccess { 403 | // Required. 404 | Expression expression = 1; 405 | // Required. 406 | Expression index = 2; 407 | } 408 | 409 | message PrimOp { 410 | 411 | enum Op { 412 | OP_UNKNOWN = 0; 413 | OP_ADD = 1; 414 | OP_SUB = 2; 415 | OP_TAIL = 3; 416 | OP_HEAD = 4; 417 | OP_TIMES = 5; 418 | OP_DIVIDE = 6; 419 | OP_REM = 7; 420 | OP_SHIFT_LEFT = 8; 421 | OP_SHIFT_RIGHT = 9; 422 | OP_DYNAMIC_SHIFT_LEFT = 10; 423 | OP_DYNAMIC_SHIFT_RIGHT = 11; 424 | OP_BIT_AND = 12; 425 | OP_BIT_OR = 13; 426 | OP_BIT_XOR = 14; 427 | OP_BIT_NOT = 15; 428 | OP_CONCAT = 16; 429 | OP_LESS = 17; 430 | OP_LESS_EQ = 18; 431 | OP_GREATER = 19; 432 | OP_GREATER_EQ = 20; 433 | OP_EQUAL = 21; 434 | OP_PAD = 22; 435 | OP_NOT_EQUAL = 23; 436 | OP_NEG = 24; 437 | OP_XOR_REDUCE = 26; 438 | OP_CONVERT = 27; 439 | OP_AS_UINT = 28; 440 | OP_AS_SINT = 29; 441 | OP_EXTRACT_BITS = 30; 442 | OP_AS_CLOCK = 31; 443 | OP_AS_FIXED_POINT = 32; 444 | OP_AND_REDUCE = 33; 445 | OP_OR_REDUCE = 34; 446 | OP_SHIFT_BINARY_POINT_LEFT = 35; 447 | OP_SHIFT_BINARY_POINT_RIGHT = 36; 448 | OP_SET_BINARY_POINT = 37; 449 | OP_AS_ASYNC_RESET = 38; 450 | } 451 | 452 | // Required. 453 | Op op = 1; 454 | repeated Expression arg = 2; 455 | repeated IntegerLiteral const = 3; 456 | } 457 | 458 | reserved 5; 459 | 460 | // Required. 461 | oneof expression { 462 | Reference reference = 1; 463 | UIntLiteral uint_literal = 2; 464 | SIntLiteral sint_literal = 3; 465 | FixedLiteral fixed_literal = 11; 466 | ValidIf valid_if = 4; 467 | //ExtractBits extract_bits = 5; 468 | Mux mux = 6; 469 | SubField sub_field = 7; 470 | SubIndex sub_index = 8; 471 | SubAccess sub_access = 9; 472 | PrimOp prim_op = 10; 473 | } 474 | } 475 | } 476 | -------------------------------------------------------------------------------- /src/main/scala/coverage/CoverAspect.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3.{Bits, Bool, Data} 4 | import chisel3.aop.injecting.{InjectingAspect, InjectingTransform} 5 | import chisel3.aop.{Aspect, Select} 6 | import chisel3.experimental.{ChiselAnnotation, RawModule, annotate} 7 | import chisel3.util.experimental.BoringUtils 8 | import firrtl.{AnnotationSeq, RenameMap} 9 | import firrtl.annotations.{Annotation, IsMember} 10 | import firrtl.options.Unserializable 11 | import firrtl.passes.wiring.WiringTransform 12 | import firrtl.stage.RunFirrtlTransformAnnotation 13 | 14 | import scala.collection.{MapLike, mutable} 15 | import scala.reflect.runtime.universe.TypeTag 16 | 17 | 18 | trait CoverageOption 19 | 20 | case object SimulatorDone extends CoverageOption 21 | 22 | case class CoverageOptions(options: Map[CoverageOption, Any]) { 23 | def simDone[T <: RawModule](top: T): Bool = options(SimulatorDone).asInstanceOf[T => Bool](top) 24 | } 25 | 26 | case class CoverAspect[T <: RawModule](buildCoverage: T => Seq[CoverPoint], 27 | databases: Seq[String], 28 | coverage: CoverageOptions) 29 | (implicit tTag: TypeTag[T]) extends Aspect[T] { 30 | 31 | override def toAnnotation(top: T): AnnotationSeq = { 32 | val coverpoints = buildCoverage(top) 33 | val pointMap = coverpoints.groupBy { point => point.signal.toTarget.module } 34 | val annoSeqs = Select.collectDeep(top) { 35 | case x: RawModule if pointMap.contains(x.toTarget.module) => 36 | val points = pointMap(x.toTarget.module).toList 37 | val firrtlAnnotations = mutable.ArrayBuffer[CoveragePoint]() 38 | InjectingAspect[T, RawModule]( 39 | (t: T) => Seq(x), 40 | { m: RawModule => 41 | import chisel3._ 42 | val done = Wire(Bool()) 43 | done := DontCare 44 | BoringUtils.bore(coverage.simDone(top), Seq(done)) 45 | points.foreach { cp => 46 | val tracker = Module(new CoverageTracker(cp)) 47 | tracker.in := cp.signal 48 | tracker.printCoverage := done 49 | firrtlAnnotations += tracker.collectCoveragePoint() 50 | } 51 | } 52 | ).toAnnotation(top) ++ firrtlAnnotations 53 | } 54 | val ret = annoSeqs.toList.foldRight(Seq[Annotation](RunFirrtlTransformAnnotation(new WiringTransform))) { case (sofar, next) => next ++ sofar } 55 | (RunFirrtlTransformAnnotation(new CoverTransform()) +: databases.map(DatabaseAnnotation(_, true))) ++ ret 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/scala/coverage/CoverGroup.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3.aop.Aspect 4 | import chisel3.aop.injecting.{InjectStatement, InjectingAspect, InjectingTransform} 5 | import chisel3.experimental.RawModule 6 | import chisel3.{Clock, Reset} 7 | import firrtl.annotations.Annotation 8 | import firrtl.options.Unserializable 9 | import firrtl.{AnnotationSeq, RenameMap} 10 | 11 | import scala.reflect.runtime.universe.TypeTag 12 | 13 | //object CoverGroup { 14 | // def apply(label: String, 15 | // module: RawModule, 16 | // clock: Clock, 17 | // reset: Reset, 18 | // points: Seq[CoverPoint], 19 | // options: GroupOptions = GroupOptions()): CoverGroup = { 20 | // CoverGroup(label, module, clock, reset, points, options) 21 | // } 22 | //} 23 | 24 | /* 25 | case class CoverGroup (label: String, 26 | module: RawModule, 27 | clock: Clock, 28 | reset: Reset, 29 | points: Seq[CoverPoint], 30 | options: GroupOptions = GroupOptions(), 31 | ) extends Annotation with Unserializable { 32 | override def update(renames: RenameMap): Seq[CoverGroup] = { 33 | def updateTracker(t: SignalTracker): SignalTracker = { 34 | val renamed = t.update(renames) 35 | renamed.head 36 | } 37 | Seq(this.copy(points = points.map(_.update(renames)))) 38 | } 39 | } 40 | */ 41 | 42 | case class GroupOptions(weight: Int = 1) 43 | 44 | /* 45 | weight=number, 1 46 | 47 | If set at the covergroup syntactic level, it specifies the weight of this covergroup instance for computing the overalla 48 | instance coverage of the simulation. 49 | If set at the coverpoint (or cross) syntactic level, it specifies the weight of a coverpoint (or cross) for computing 50 | the instance coverage of the enclosing covergroup. 51 | 52 | goal=number, 90 53 | 54 | Specifies the target goal for a covergroup instance or for a coverpoint or a cross of an instance. 55 | 56 | name=string, unique name 57 | 58 | Specifies a name for the covergroup instance. 59 | 60 | comment=string 61 | 62 | A comment that appears with a covergroup instance or with a coverpoint or cross of the covergroup instance 63 | 64 | at_least=number, 1 65 | 66 | Minimum number of times a bin needs to hit before it is declared as hit 67 | 68 | detect_overlap=boolean, 0 69 | 70 | When true, a warning is issued if there is an overlap between the range list (or transition list) of two bins of a 71 | coverpoint. 72 | 73 | auto_bin_max=number, 64 74 | 75 | Maximum number of automatically created bins when no bins are explicitly defined for a coverpoint. 76 | 77 | cross_num_print_missing = number, 0 78 | 79 | Number of missing (not covered) cross product bins that must be saved to the coverage database and printed in the 80 | coverage report. 81 | 82 | per_instance=boolean, 0 83 | 84 | Each instance contributes to the overall coverage information for the covergroup type. When true, coverage information 85 | for this covergroup instance is tracked as well. 86 | */ -------------------------------------------------------------------------------- /src/main/scala/coverage/CoverPoint.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3._ 4 | import chisel3.experimental.ChiselAnnotation 5 | import firrtl.RenameMap 6 | import firrtl.annotations.{Annotation, NoTargetAnnotation, Target} 7 | 8 | //trait CoverBase { val label: String } 9 | 10 | 11 | /* 12 | object CoverPoint { 13 | def apply(label: String, signal: Bits, endPoints: Seq[Bits], clock: Clock, reset: Reset, bins: Seq[Bin], pointOptions: CoverOptions = CoverOptions()): CoverPoint = { 14 | CoverPoint(label, SignalTracker(signal), endPoints.map(SignalTracker(_)), clock, reset, bins, pointOptions) 15 | } 16 | } 17 | */ 18 | case class CoverPoint (label: String, 19 | signal: Bits, 20 | endPoints: Seq[Bits], 21 | bins: Seq[Bin]) { 22 | def default: Option[Bin] = { 23 | bins.collectFirst { case b@Bin(l, Default) => b } 24 | } 25 | def intervals: Seq[(Bin, Int, Int)] = { 26 | val binIntervals = bins.filter { _.category.isInstanceOf[BinRange] }.sortBy { 27 | case Bin(l, BinRange(low, high)) => low 28 | } 29 | val intervals = binIntervals.map{ 30 | case b@Bin(l, BinRange(low, high)) => (b, low.toInt, high.toInt) 31 | } 32 | intervals 33 | } 34 | } 35 | 36 | case class CoverOptions(weights: Seq[Int] = Seq(1), maxCount: Int = 32) 37 | 38 | // TODO: I think you can get away without representing this directly and generating it programmatically 39 | // case class CrossPoint(name: String, points: Seq[CoverPoint], bins: Seq[BaseBin]) extends CoverBase 40 | 41 | abstract class BaseBin { 42 | val labelOption: Option[String] 43 | val category: BinCategory 44 | } 45 | 46 | // Explicit bin, bins based on category 47 | case class Bin(label: String, category: BinCategory) extends BaseBin { 48 | override val labelOption: Option[String] = Some(label) 49 | } 50 | 51 | // Implicit bin, bins based on category 52 | // Not user created 53 | case class ImplicitBin(category: BinCategory) extends BaseBin { 54 | override val labelOption: Option[String] = None 55 | } 56 | 57 | // Ignores when bin matches (usually paired with ImplicitBin 58 | case class IgnoreBin(label: String, category: BinCategory) extends BaseBin { 59 | override val labelOption: Option[String] = Some(label) 60 | } 61 | 62 | 63 | trait BinCategory { 64 | def serialize: String 65 | } 66 | 67 | // Defaults to all non-specified categories 68 | case object Default extends BinCategory { 69 | override def serialize: String = "default" 70 | } 71 | 72 | // Low and High are inclusive 73 | case class BinRange(low: BigInt, high: BigInt) extends BinCategory { 74 | override def serialize: String = s"$low->$high" 75 | } 76 | 77 | // A sequence of values that must be transitioned to, in order 78 | // Wait on this... 79 | //case class BinTransition(sequence: Seq[BinValue]) extends BinCategory 80 | 81 | // Unnecessary! 82 | //trait BinValue 83 | 84 | // A value in a sequence that must match immediately 85 | //case class BinConstant(value: BigInt) extends BinValue 86 | 87 | // A value that must be hit eventually, but not necessarily at this time 88 | //case class BinEventually(value: BigInt) extends BinValue 89 | 90 | 91 | -------------------------------------------------------------------------------- /src/main/scala/coverage/CoverTransform.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import mini.{TagCapture, TaggedLineProcessor} 4 | import coverage.cone.{Cone, ConeFinder} 5 | import firrtl.analyses.ConnectionGraph 6 | import firrtl.annotations.TargetToken.Index 7 | import firrtl.annotations.{Annotation, ReferenceTarget, Target} 8 | import firrtl.{AnnotationSeq, CircuitForm, CircuitState, MALE, MidForm, RenameMap, ResolveAndCheck, ResolvedAnnotationPaths, Transform, WRef, WSubIndex} 9 | import firrtl.ir._ 10 | 11 | case class CoveragePoint(start: ReferenceTarget, 12 | ends: Seq[ReferenceTarget], 13 | label: String, 14 | bins: Seq[(ReferenceTarget, Bin)], 15 | clock: ReferenceTarget, 16 | printEnable: ReferenceTarget) extends Annotation { 17 | def toView(cone: Cone): View = { 18 | View(cone, start, bins.map(_._2), 1, 100) 19 | } 20 | 21 | def coverageModule: String = { 22 | val enclosingModules = (clock +: printEnable +: bins.map(_._1)).map(_.pathlessTarget.module).toSet 23 | assert(enclosingModules.size == 1, s"Cannot have bins, clock and printEnable not share the same module: $enclosingModules") 24 | enclosingModules.head 25 | } 26 | 27 | override def getTargets: Seq[Target] = (start +: ends) ++ bins.map(_._1) ++ Seq(clock, printEnable) 28 | 29 | override def update(renames: RenameMap): Seq[CoveragePoint] = { 30 | def rename(r: ReferenceTarget): ReferenceTarget = { 31 | renames.get(r).map(_.toSet.toSeq) match { 32 | case None => r 33 | case Some(Seq(n: ReferenceTarget)) => n 34 | case other => sys.error(s"Cannot propagate $this with 1:Many rename of $r: $other") 35 | } 36 | } 37 | 38 | Seq(CoveragePoint( 39 | rename(start), 40 | ends.map(rename), 41 | label, 42 | bins.map(x => (rename(x._1), x._2)), 43 | rename(clock), 44 | rename(printEnable) 45 | )) 46 | } 47 | } 48 | 49 | class CoverTransform extends Transform { 50 | override def inputForm: CircuitForm = MidForm 51 | 52 | override def outputForm: CircuitForm = MidForm 53 | 54 | //override val annotationClasses: Traversable[Class[_]] = List(classOf[CoveragePoint]) 55 | 56 | def instrument(circuit: Circuit, coverages: Seq[CoveragePoint], database: Database): (Circuit, AnnotationSeq) = { 57 | val coneGraph = ConeFinder(ConnectionGraph(circuit).reverseConnectionGraph) 58 | 59 | val moduleMap = circuit.modules.map(m => m.name -> m).toMap 60 | 61 | val (newModuleMap, newAnnos, newDatabase, views) = coverages.foldLeft((moduleMap, Seq.empty[Annotation], database, Seq.empty[(String, View)])) { 62 | case ((moduleMapSoFar, annos, db, vs), cp@CoveragePoint(start, ends, label, bins, clock, printEnable)) => 63 | val cone = coneGraph.getCone(start, ends) 64 | val results = bins.map { case (reg, Bin(l, category)) => category -> (false, Set(l)) }.toMap 65 | val quanta = Quanta(cone, results, Map(start -> ends)) 66 | val trimmedQuanta = db.trim(quanta) 67 | val newDatabase = db.add(trimmedQuanta) 68 | val uncovered = trimmedQuanta.uncoveredBinCategories 69 | val uncoveredBins = bins.collect { 70 | case (rt, bin@Bin(l, b)) if uncovered.contains(b) => (rt, bin) 71 | } 72 | val removedBins = bins.collect { 73 | case (rt, bin@Bin(l, b)) if !uncovered.contains(b) => rt 74 | } 75 | val message = Quanta.toMessage(start, cone, uncoveredBins.map(_._2), newDatabase) 76 | val expsAll = uncoveredBins.foldLeft(Seq.empty[Expression]) { 77 | case (exps, (ReferenceTarget(_, _, _, ref, Seq(Index(idx))), _)) => 78 | val exp = WSubIndex(WRef(ref), idx, UnknownType, MALE) 79 | exps :+ exp 80 | } 81 | val updatedModule = moduleMapSoFar(cp.coverageModule) mapStmt update(message, expsAll, removedBins, printEnable, clock) 82 | 83 | (moduleMapSoFar + (updatedModule.name -> updatedModule), TagCapture(newDatabase.fullTag(cone)) +: annos, newDatabase, (cp.label, cp.toView(cone)) +: vs) 84 | } 85 | newDatabase.writeCoverage() 86 | 87 | def processMessages(messages: Seq[String]): Unit = { 88 | /* 89 | val database = Database.readCoverage(newDatabase.name) 90 | val finalDatabase = Quanta.fromMessages(database, messages) 91 | finalDatabase.writeCoverage() 92 | */ 93 | val db = Database.updateCoverage(newDatabase.name, messages, Quanta.fromMessages) 94 | val viewMap = views.groupBy{kv => kv._1} 95 | val groups = viewMap.map { 96 | case (label, views) => GroupView(label, 1, 100, views.map(_._2)) 97 | } 98 | groups.foreach { g => println(g.serialize(db)) } 99 | } 100 | 101 | (circuit.copy(modules = circuit.modules.map(m => newModuleMap(m.name))), 102 | TaggedLineProcessor(processMessages) +: newAnnos) 103 | } 104 | 105 | def update(message: String, 106 | exps: Seq[Expression], 107 | removedBins: Seq[ReferenceTarget], 108 | printCoverage: ReferenceTarget, 109 | clockRef: ReferenceTarget 110 | )(body: Statement): Statement = { 111 | val removedNames = removedBins.map(_.ref).toSet 112 | 113 | def removeUnusedBinRegs(s: Statement): Statement = s match { 114 | case x: DefRegister if removedNames.contains(x.name) => 115 | // Rely on DCE to remove 116 | //DefWire(x.info, x.name, x.tpe) 117 | x 118 | case other => other mapStmt removeUnusedBinRegs 119 | } 120 | 121 | def addPrintf(s: Statement): Statement = { 122 | val print = Print(NoInfo, StringLit(message + "\n"), exps, WRef(clockRef.ref), WRef(printCoverage.ref)) 123 | Block(Seq(s, print)) 124 | } 125 | 126 | addPrintf(removeUnusedBinRegs(body)) 127 | } 128 | 129 | override def execute(state: CircuitState): CircuitState = { 130 | val coverages = state.annotations.collect { 131 | case c: CoveragePoint => c 132 | } 133 | 134 | 135 | val database = { 136 | val dbs = state.annotations.collect { 137 | case DatabaseAnnotation(dbName, true) => Database.populateCoverage(dbName) 138 | case DatabaseAnnotation(dbName, false) => Database.readCoverage(dbName) 139 | }.toSet 140 | assert(dbs.nonEmpty, s"Must have a database to use CoverTransform!") 141 | dbs.reduce(_.add(_)) 142 | } 143 | 144 | val (newCircuit, newAnnotations) = instrument(state.circuit, coverages, database) 145 | 146 | 147 | val newState = state.copy(circuit = newCircuit, annotations = newAnnotations ++ state.annotations.filter { 148 | case c: CoveragePoint => false 149 | case c: DatabaseAnnotation => false 150 | case other => true 151 | }) 152 | new ResolveAndCheck().execute(newState) 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/main/scala/coverage/Coverage.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import firrtl.analyses.{CircuitGraph, ConnectionGraph} 4 | import firrtl.annotations.{ModuleTarget, ReferenceTarget} 5 | import firrtl.ir.{Circuit, Expression} 6 | import firrtl.transforms.clockfinder.ClockFinder 7 | 8 | /* 9 | object Coverage { 10 | def computeCone(signal: ReferenceTarget, endPoint: ModuleTarget, circuit: Circuit): Expression = ??? 11 | def computeCone(signal: ReferenceTarget, endPoints: Seq[ReferenceTarget], circuit: Circuit): Expression = { 12 | val cg = CircuitGraph(circuit) 13 | val pathscg.path(signal, endPoints) 14 | val connectionGraph = ConnectionGraph(circuit) 15 | val clockFinder = ClockFinder.ClockFinderPML() 16 | 17 | } 18 | 19 | } 20 | */ 21 | -------------------------------------------------------------------------------- /src/main/scala/coverage/CoverageTracker.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3._ 4 | import chisel3.experimental.MultiIOModule 5 | import firrtl.annotations.ReferenceTarget 6 | 7 | /** Either records and bins the value of [in] every cycle, or cycles through all bins and prints the result 8 | * 9 | * @param point contains signal (and its type) as well as other histogramming information 10 | */ 11 | class CoverageTracker(point: CoverPoint) extends MultiIOModule { 12 | val in = IO(Input(chiselTypeOf(point.signal))) 13 | val printCoverage = IO(Input(Bool())) 14 | 15 | val intervals = point.intervals 16 | val default = point.default 17 | 18 | val lows = VecInit(intervals.map(_._2.U)) 19 | val highs = VecInit(intervals.map(_._3.U)) 20 | 21 | // Calculate in's address into histogram 22 | val defaultIndex = if(default.isDefined) intervals.length else 0 23 | 24 | val activeBinAddress = point.signal match { 25 | case _: UInt => 26 | val inU = in.asUInt() 27 | intervals.zipWithIndex.foldLeft(defaultIndex.U) { 28 | case (addr: UInt, ((_, min: Int, max: Int), index: Int)) => Mux((inU >= min.U) & (inU <= max.U), index.U, addr) 29 | } 30 | case _: SInt => 31 | val inS = in.asSInt() 32 | intervals.zipWithIndex.foldLeft(defaultIndex.U) { 33 | case (addr: UInt, ((_, min: Int, max: Int), index: Int)) => Mux((inS >= min.S) & (inS <= max.S), index.U, addr) 34 | } 35 | } 36 | 37 | // Records which bins have been written to (and which require initialization) 38 | val coverage = RegInit(VecInit(Seq.fill(math.pow(2, activeBinAddress.getWidth).toInt)(false.B))) 39 | 40 | // Then, do stuff 41 | when(reset.asBool() === false.B) { 42 | coverage(activeBinAddress) := true.B 43 | //printf(s"${point.signal.toTarget.serialize} is %d, bin %d\n", in, activeBinAddress) 44 | } 45 | 46 | def collectCoveragePoint(): CoveragePoint = { 47 | 48 | val bins = coverage.map(_.toTarget).zip(intervals.map(_._1) ++ default) 49 | CoveragePoint(point.signal.toAbsoluteTarget, point.endPoints.map(_.toAbsoluteTarget), point.label, bins, clock.toTarget, printCoverage.toTarget) 50 | } 51 | } 52 | 53 | /* 54 | 55 | covergroup address_cov (ref logic [7:0] address, 56 | 22 input int low, int high) @ (posedge ce); 57 | 23 ADDRESS : coverpoint address { 58 | 24 bins low = {0,low}; 59 | 25 bins med = {low,high}; 60 | 26 } 61 | 27 endgroup 62 | 28 //================================================= 63 | 29 // Instance of covergroup 64 | 30 //================================================= 65 | 31 address_cov acov_low = new(addr,0,10); 66 | 32 address_cov acov_med = new(addr,11,20); 67 | 33 address_cov acov_high = new(addr,21,30); 68 | 69 | =========================================================== 70 | Group : coverage_covergroup.miff::address_cov 71 | =========================================================== 72 | SCORE WEIGHT GOAL 73 | 100.00 1 100 74 | ----------------------------------------------------------- 75 | Summary for Group coverage_covergroup.miff::address_cov 76 | CATEGORY EXPECTED UNCOVERED COVERED PERCENT 77 | Variables 2 0 2 100.00 78 | 79 | Variables for Group coverage_covergroup.miff::address_cov 80 | 81 | VARIABLE EXPECTED UNCOVERED COVERED PERCENT GOAL WEIGHT 82 | ADDRESS 2 0 2 100.00 100 1 83 | ----------------------------------------------------------- 84 | Summary for Variable ADDRESS 85 | 86 | CATEGORY EXPECTED UNCOVERED COVERED PERCENT 87 | User Defined Bins 2 0 2 100.00 88 | 89 | User Defined Bins for ADDRESS 90 | Bins 91 | 92 | NAME COUNT AT LEAST 93 | med 2 1 94 | low 6 1 95 | */ 96 | 97 | /* 98 | covergroup datac @ (negedge cif.cb.ce); 99 | 83 data_in : coverpoint cif.cb.datai { 100 | 84 bins low = {0,50}; 101 | 85 bins med = {51,150}; 102 | 86 bins high = {151,255}; 103 | 87 } 104 | 88 data_out : coverpoint cif.cb.datao { 105 | 89 bins low = {0,50}; 106 | 90 bins med = {51,150}; 107 | 91 bins high = {151,255}; 108 | 92 } 109 | 93 read_write : coverpoint cif.cb.we { 110 | 94 bins read = {0}; 111 | 95 bins write = {1}; 112 | 96 } 113 | 97 endgroup 114 | 115 | VARIABLE EXPECTED UNCOVERED COVERED PERCENT GOAL WEIGHT 116 | address 3 2 1 33.33 100 1 117 | 118 | VARIABLE EXPECTED UNCOVERED COVERED PERCENT GOAL WEIGHT 119 | data_in 3 2 1 33.33 100 1 120 | data_out 3 3 0 0.00 100 1 121 | read_write 2 0 2 100.00 100 1 122 | */ -------------------------------------------------------------------------------- /src/main/scala/coverage/Database.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import java.io._ 4 | 5 | import coverage.cone.Cone 6 | import firrtl.annotations.{NoTargetAnnotation, ReferenceTarget} 7 | import firrtl.ir 8 | 9 | case class DatabaseAnnotation(name: String, createIfNew: Boolean) extends NoTargetAnnotation 10 | 11 | case class Database(quantas: Map[Cone, Quanta], name: String) { 12 | lazy val tagMap: Map[String, Seq[Cone]] = quantas.keysIterator.toSeq.groupBy(cone => cone.tag) 13 | 14 | def add(other: Database): Database = { 15 | other.quantas.foldLeft(this) { case (db, (e, q)) => db.add(q) } 16 | } 17 | def add(quantas: Iterable[Quanta]): Database = { 18 | quantas.foldLeft(this){ (db, q) => db.add(q) } 19 | } 20 | def add(quanta: Quanta): Database = { 21 | val newQuanta = quantas.get(quanta.cone) match { 22 | case Some(existing) => existing.update(quanta) 23 | case None => 24 | quanta 25 | } 26 | val ret = Database(quantas + (newQuanta.cone -> newQuanta), name) 27 | ret 28 | } 29 | 30 | def coneIndex(cone: Cone): Int = { 31 | tagMap(cone.tag).zipWithIndex.collectFirst{ 32 | case (c, idx) if cone == c => idx 33 | }.get 34 | } 35 | 36 | def fullTag(cone: Cone): String = { 37 | val index = coneIndex(cone).toString 38 | cone.tag + "#" + "0" * (32 - index.length) 39 | } 40 | 41 | def tagMapSerialize(): Iterable[String] = { 42 | tagMap.flatMap { 43 | case (tag, cones) => 44 | cones.zipWithIndex.map { 45 | case (cone, idx) => cone.serialize(s"${idx}$tag:") 46 | } 47 | } 48 | } 49 | 50 | def quantasSerialize(): Iterable[String] = { 51 | quantas.map { 52 | case (cone, quanta) => 53 | fullTag(cone) + ": " + quanta.results.toString() + ", " + quanta.mappings.toString() 54 | } 55 | } 56 | 57 | def prettySerialize(): String = { 58 | (tagMapSerialize() ++ quantasSerialize()).mkString("\n") 59 | } 60 | 61 | def hasTagConflict(tag: String): Boolean = { 62 | tagMap.get(tag) match { 63 | case Some(seq) if seq.size > 1 => true 64 | case other => false 65 | } 66 | } 67 | 68 | /** Returns a similar quanta on the same cone with a subset of bins which have not been covered 69 | * 70 | * @param quanta 71 | * @return 72 | */ 73 | def trim(quanta: Quanta): Quanta = { 74 | val updated = quantas.get(quanta.cone).map(_.update(quanta)).getOrElse(quanta) 75 | 76 | val unseenResults = quanta.results.collect { case x@(category, (seen, labels)) if !updated.results(category)._1 => x } 77 | quanta.copy(results = unseenResults) 78 | } 79 | 80 | def coverage(cone: Cone): Map[BinCategory, (Boolean, Set[String])] = { 81 | quantas(cone).results 82 | } 83 | 84 | def targets(cone: Cone): Map[ReferenceTarget, Seq[ReferenceTarget]] = { 85 | quantas(cone).mappings 86 | } 87 | 88 | 89 | def writeCoverage(name: String = name): Unit = { 90 | writeTo(s"artifacts/coverage", s"$name.db") 91 | } 92 | 93 | def writeTo(dir: String, fileName: String): Unit = { 94 | val newFile = new File(dir) 95 | newFile.mkdirs() 96 | println(newFile.getAbsolutePath) 97 | val path = newFile.getAbsolutePath 98 | 99 | val file = new FileOutputStream(path + "/" + fileName) 100 | val out = new ObjectOutputStream(file) 101 | out.flush() 102 | 103 | // Method for serialization of object 104 | out.writeObject(this) 105 | 106 | out.flush() 107 | out.close() 108 | file.flush() 109 | file.close() 110 | 111 | val filePretty = new FileOutputStream(path + "/" + fileName + ".txt") 112 | val outPretty = new ObjectOutputStream(filePretty) 113 | filePretty.flush() 114 | outPretty.flush() 115 | 116 | // Method for serialization of object 117 | outPretty.writeObject(this.prettySerialize()) 118 | 119 | outPretty.flush() 120 | outPretty.close() 121 | filePretty.flush() 122 | filePretty.close() 123 | 124 | println("Database has been serialized") 125 | } 126 | } 127 | 128 | object Database { 129 | def updateCoverage(name: String, messages: Seq[String], f: (Database, Seq[String]) => Database): Database = { 130 | val fileName = s"artifacts/coverage/$name.db" 131 | val fileIn = new FileInputStream(fileName) 132 | val in = new ObjectInputStream(fileIn) 133 | // Method for deserialization of object 134 | val database = try { 135 | in.readObject().asInstanceOf[Database] 136 | } catch { 137 | case e => 138 | println(fileName) 139 | throw e 140 | } 141 | 142 | println("Database has been deserialized") 143 | 144 | val finalDatabase = f(database, messages) 145 | 146 | //val x = Database(database.quantas.map { 147 | // case (cone, quanta) => quanta.cone -> quanta 148 | //}, database.name) 149 | 150 | 151 | val fileOut = new FileOutputStream(fileName) 152 | val out = new ObjectOutputStream(fileOut) 153 | out.flush() 154 | 155 | // Method for serialization of object 156 | out.writeObject(finalDatabase) 157 | in.close() 158 | fileIn.close() 159 | out.close() 160 | fileOut.close() 161 | 162 | finalDatabase 163 | } 164 | def populateCoverage(name: String): Database = { 165 | val file = new File(s"artifacts/coverage/$name.db") 166 | if(file.exists()) { 167 | readCoverage(name) 168 | } else { 169 | Database(Map.empty, name) 170 | } 171 | } 172 | def readCoverage(name: String): Database = { 173 | deserialize(s"artifacts/coverage/$name.db") 174 | } 175 | def deserialize(fileName: String): Database = { 176 | val file = new FileInputStream(fileName) 177 | val in = new ObjectInputStream(file) 178 | // Method for deserialization of object 179 | val database = 180 | in.readObject().asInstanceOf[Database] 181 | in.close() 182 | file.close() 183 | 184 | val x = Database(database.quantas.map { 185 | case (cone, quanta) => quanta.cone -> quanta 186 | }, database.name) 187 | 188 | println("Database has been deserialized") 189 | 190 | x 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /src/main/scala/coverage/HistogramSignal.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3._ 4 | 5 | /** Specifies signal whose values will be histogrammed after execution 6 | * 7 | * Can overwrite functions to customize the histogram behavior 8 | * 9 | * @param signal Signal to histogram 10 | */ 11 | class HistogramSignal(val signal: Bits) { 12 | def maxCount = 100 13 | def minValue: Int = signal match { 14 | case _: UInt => 0 15 | case s: SInt => Math.pow(2, s.getWidth - 1).toInt 16 | } 17 | /* Until max is the smallest illegal value, or the max legal value plus 1 */ 18 | def untilMax: Int = signal match { 19 | case u: UInt => Math.pow(2, u.getWidth).toInt 20 | case s: SInt => Math.pow(2, s.getWidth - 1).toInt 21 | } 22 | def nBins: Int = untilMax - minValue 23 | def ticks: Seq[Int] = { 24 | val binInterval = (untilMax - minValue) / nBins 25 | assert(binInterval * nBins + minValue == untilMax, 26 | s"nBins ${nBins} must divide evenly into the range from ${minValue} until ${untilMax}") 27 | val range = Range(minValue, untilMax + 1, binInterval) 28 | range.toList 29 | } 30 | def intervals: Seq[(Option[String], Int, Int)] = ticks.zip(ticks.tail).map { case (lo, hi) => (None, lo, hi) } 31 | def label: Option[String] = None 32 | } 33 | -------------------------------------------------------------------------------- /src/main/scala/coverage/Main.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | object Main extends App { 4 | 5 | 6 | 7 | } 8 | -------------------------------------------------------------------------------- /src/main/scala/coverage/Quanta.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import coverage.cone.Cone 4 | import firrtl.ir 5 | import firrtl.annotations._ 6 | 7 | 8 | /** 9 | * 10 | * @param cone Logic cone that describes quanta 11 | * @param results maps bin category to coverage (if its been triggered) and name of bins matching category 12 | * @param mappings maps cone tip to cone endpoints 13 | */ 14 | case class Quanta(cone: Cone, results: Map[BinCategory, (Boolean, Set[String])], mappings: Map[ReferenceTarget, Seq[ReferenceTarget]]) { 15 | 16 | def tag: String = cone.tag 17 | 18 | private val defaults = results.getOrElse(Default, (false, Set.empty[String])) 19 | require(defaults._2.size <= 1, s"Cannot create quanta with more than one Default bin: $defaults") 20 | 21 | def update(newQuanta: Quanta): Quanta = { 22 | assert(newQuanta.cone == cone) 23 | val newResults = newQuanta.results.keysIterator.map { key => 24 | results.get(key) match { 25 | case None => key -> newQuanta.results(key) 26 | case Some((false, names: Set[String])) => 27 | val (newRes, newNames) = newQuanta.results(key) 28 | key -> (newRes, names.union(newNames)) 29 | case Some((true, names)) => 30 | val (_, newNames) = newQuanta.results(key) 31 | key -> (true, names.union(newNames)) 32 | } 33 | }.toMap 34 | Quanta(cone, results ++ newResults, mappings ++ newQuanta.mappings) 35 | } 36 | 37 | def uncoveredBinCategories: Seq[BinCategory] = results.collect { 38 | case (k, (false, names)) => k 39 | }.toSeq 40 | 41 | def uncoveredIntervals: (Seq[(Int, Int)], Boolean) = { 42 | val bins = uncoveredBinCategories 43 | val default = uncoveredBinCategories.collectFirst { case Default => Default } 44 | val binIntervals = uncoveredBinCategories.filter { _.isInstanceOf[BinRange] }.sortBy { 45 | case BinRange(low, high) => low 46 | } 47 | val intervals = binIntervals.map{ 48 | case BinRange(low, high) => (low.toInt, high.toInt) 49 | } 50 | (intervals, default.isDefined) 51 | } 52 | 53 | } 54 | 55 | object Quanta { 56 | def toMessage(signal: ReferenceTarget, cone: Cone, bins: Seq[Bin], database: Database): String = { 57 | val message = bins.zipWithIndex.foldLeft(s"${database.fullTag(cone)}!!${signal.serialize}") { 58 | case (str, (bin@Bin(label, category), idx)) => 59 | category match { 60 | case BinRange(lo, hi) => str + s"!!$idx. $label $lo->$hi=%b" 61 | case Default => str + s"!!$idx. $label default=%b" 62 | } 63 | } 64 | message 65 | } 66 | def fromMessages(database: Database, messages: Seq[String]): Database = { 67 | val ret = messages.foldLeft(database) { (db, string) => 68 | val tokens = string.split("!!") 69 | val (index, tag) = tokens.head.split("#").toSeq match { 70 | case Seq(t: String, i: String) => (i.toInt, t) 71 | } 72 | val label = tokens(1) 73 | val rangedRegex = "([0-9]+)[.] (.+) ([0-9]+)->([0-9]+)=([01])".r 74 | val defaultRegex = "([0-9]+)[.] (.+) default=([01])".r 75 | val cone = db.tagMap(tag)(index) 76 | val quanta = db.quantas(cone) 77 | val results = tokens.drop(2).map { 78 | case rangedRegex(index, l, lo, hi, covered) => 79 | BinRange(BigInt(lo), BigInt(hi)) -> (covered == "1", Set(l)) 80 | case defaultRegex(index, l, covered) => Bin(label, Default) 81 | Default -> (covered == "1", Set(l)) 82 | }.toMap[BinCategory, (Boolean, Set[String])] 83 | db.add(Quanta(cone, results, Map.empty)) 84 | } 85 | ret 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/main/scala/coverage/ResetFinder.scala: -------------------------------------------------------------------------------- 1 | // See LICENSE for license details. 2 | 3 | package firrtl.transforms.clockfinder 4 | 5 | import firrtl.{FEMALE, MALE, MemKind, PortKind, RegKind} 6 | import firrtl.analyses._ 7 | import firrtl.annotations.TargetToken.{Clock, Reset} 8 | import firrtl.annotations._ 9 | import firrtl.ir._ 10 | 11 | import scala.collection.mutable 12 | 13 | 14 | object ResetFinder { 15 | implicit class ResetFinderPML(circuitGraph: CircuitGraph) { 16 | def getResetSource(target: ReferenceTarget): Set[ReferenceTarget] = { 17 | val finder = new ResetFinder(circuitGraph.reverseConnectionGraph) 18 | finder.getResetSource(target) 19 | } 20 | } 21 | } 22 | 23 | /** Instance-Viewed Graph to find clock sources of signals */ 24 | class ResetFinder(reverseGraph: ConnectionGraph) extends ConnectionGraph(reverseGraph.circuit, reverseGraph.digraph, reverseGraph.irLookup) { 25 | 26 | /** Returns the clock sources that are synchronized with given signal target 27 | * @param t 28 | * @return 29 | */ 30 | def getResetSource(t: ReferenceTarget): Set[ReferenceTarget] = { 31 | require( 32 | irLookup.contains(t), 33 | s"Cannot find\n${t.prettyPrint()}\nin circuit, when computing its clock source!" 34 | ) 35 | 36 | val tpe = irLookup.tpe(t) 37 | 38 | val finalSources = TagSet() 39 | t.leafSubTargets(tpe).foreach { x => 40 | BFS(x, Set.empty[ReferenceTarget]) 41 | 42 | finalSources.update(resetMap.getTag(x).getOrElse(TagSet())) 43 | } 44 | finalSources.targets.toSet 45 | } 46 | 47 | private val extModuleNames = circuit.modules.collect { case e: ExtModule => e.name }.toSet 48 | 49 | // Maps signal to set of clock sources it is synchronized with 50 | private val resetMap = TagMap[ReferenceTarget, TagSet]()//mutable.LinkedHashMap[(String, ReferenceTarget), mutable.HashSet[ReferenceTarget]]() 51 | 52 | // Utility function to determine if a target is a register 53 | private def isReg(t: ReferenceTarget): Boolean = { 54 | t.tryToComplete match { 55 | case rt: ReferenceTarget if !rt.isClock && !rt.isInit && !rt.isReset && irLookup.kind(t) == RegKind => true 56 | case other => false 57 | } 58 | } 59 | 60 | 61 | /** Returns instance-viewed combinational-edges or reg-to-clock edges 62 | * Ends early if visiting a node that was previously visited in another BFS 63 | * @param node the specified node 64 | * @param prevOpt 65 | * @return a Set[T] of all vertices that source has edges to 66 | */ 67 | override def getEdges(node: ReferenceTarget, 68 | prevOpt: Option[collection.Map[ReferenceTarget, ReferenceTarget]] 69 | ): collection.Set[ReferenceTarget] = { 70 | val prev = prevOpt.get 71 | node match { 72 | // If cached result, record clock and end. Exclude cached top-level signals as input port could be a new result 73 | case rt if resetMap.getTag(rt).nonEmpty => 74 | resetMap.tagPath(rt, prev, resetMap.getTag(rt).get) 75 | Set() 76 | 77 | // Top-level Input Port 78 | // Must check if not isClock because expression that is in the clock port of reg could be a port 79 | case rt@ ReferenceTarget(c, m, Nil, _, _) 80 | if irLookup.kind(rt) == PortKind && irLookup.gender(rt) == MALE && !rt.isClock => 81 | //resetMap.tagPath(rt, prev, TagSet(Set(rt))) 82 | Set() 83 | 84 | // Black-box Output Clock Port 85 | case rt: ReferenceTarget 86 | if extModuleNames.contains(rt.encapsulatingModule) && irLookup.gender(rt) == FEMALE => 87 | resetMap.tagPath(rt, prev, TagSet(Set(rt))) 88 | Set() 89 | 90 | // WInvalid Expression 91 | case rt if ConnectionGraph.isInvalid(rt) => Set() 92 | 93 | // Literal Expression 94 | case rt if ConnectionGraph.isLiteral(rt) => Set() 95 | 96 | case rt if irLookup.declaration(rt).isInstanceOf[DefRegister] => 97 | val reset = super.getEdges(rt).collectFirst { 98 | case rt: ReferenceTarget if rt.tokens.last == Reset => super.getEdges(rt).head 99 | }.get 100 | resetMap.tagPath(rt, prev, TagSet(Set(reset))) 101 | Set() 102 | 103 | case nonClockSource => super.getEdges(nonClockSource) 104 | } 105 | } 106 | } 107 | 108 | -------------------------------------------------------------------------------- /src/main/scala/coverage/SignalTracker.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3.Bits 4 | import firrtl.RenameMap 5 | import firrtl.annotations.{Annotation, IsMember} 6 | import firrtl.options.Unserializable 7 | 8 | 9 | object SignalTracker { 10 | def apply(signal: Bits, selector: Seq[IsMember] => Option[IsMember] = ts => Some(ts.head)): SignalTracker = { 11 | SignalTracker(signal, Nil, selector, expanded = false) 12 | } 13 | } 14 | 15 | case class SignalTracker(signal: Bits, targets: Seq[IsMember], finalSelection: Seq[IsMember] => Option[IsMember], expanded: Boolean) extends Annotation with Unserializable { 16 | def singleUpdate(renames: RenameMap): SignalTracker = { 17 | val renamed = update(renames) 18 | assert(renamed.size == 1, "Signal Tracker should always be renamed to a single other SignalTracker") 19 | renamed.head 20 | } 21 | Seq(1, 2).distinct 22 | 23 | override def update(renames: RenameMap): Seq[SignalTracker] = { 24 | val expandedTargets = if(!expanded) { 25 | assert(targets.isEmpty, "If SignalTracker isn't expanded, its targets should be empty.") 26 | Seq(signal.toTarget) 27 | } else targets 28 | val newMembers = expandedTargets.flatMap { m: IsMember => 29 | renames.get(m) match { 30 | case Some(seq) => seq 31 | case None => Seq(m) 32 | } 33 | } 34 | if(!expanded) Seq(this.copy(targets = newMembers, expanded=true)) else Seq(this.copy(targets = newMembers)) 35 | } 36 | } 37 | 38 | -------------------------------------------------------------------------------- /src/main/scala/coverage/View.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import coverage.cone.Cone 4 | import firrtl.annotations.ReferenceTarget 5 | 6 | import scala.collection.mutable 7 | 8 | 9 | case class GroupView(label: String, weight: Double, goal: Double, views: Seq[View]) { 10 | def uncovered(database: Database): Int = { 11 | views.count(v => v.percentage(database) < v.goal) 12 | } 13 | def covered(database: Database): Int = { 14 | views.count(v => v.percentage(database) >= v.goal) 15 | } 16 | def percentage(database: Database): Double = { 17 | covered(database).toDouble * 100 / views.length 18 | } 19 | def serialize(database: Database): String = { 20 | val group = 21 | s""" =========================================================== 22 | | Group : $label 23 | | =========================================================== 24 | | SCORE WEIGHT GOAL 25 | | ${score(database)} $weight $goal 26 | | ----------------------------------------------------------- 27 | | Summary for Group $label 28 | | CATEGORY EXPECTED UNCOVERED COVERED PERCENT 29 | | Signals ${views.size} ${uncovered(database)} ${covered(database)} ${percentage(database)} 30 | | =========================================================== 31 | |""".stripMargin 32 | 33 | val signals = 34 | s"""| =========================================================== 35 | | Signals for Group $label 36 | |""".stripMargin ++ 37 | views.sortBy{v => v.start.serialize} 38 | .map(_.serialize(database)) 39 | .mkString("\n") 40 | group + signals + " ===========================================================" 41 | } 42 | def score(database: Database): Double = { 43 | val allWeights = views.map(_.weight).sum 44 | val perViewScore = views.map(v => 1.0.min(v.percentage(database) / v.goal) * v.weight) 45 | perViewScore.sum * 100 / allWeights 46 | } 47 | } 48 | case class View(cone: Cone, start: ReferenceTarget, bins: Seq[Bin], weight: Double, goal: Double) { 49 | def coverage(database: Database): Seq[(Bin, Boolean)] = { 50 | val quanta = database.quantas(cone) 51 | bins.map { bin => 52 | (bin, quanta.results(bin.category)._1) 53 | } 54 | } 55 | 56 | def uncovered(database: Database): Int = { 57 | coverage(database).count(!_._2) 58 | } 59 | 60 | def covered(database: Database): Int = { 61 | coverage(database).count(_._2) 62 | } 63 | def percentage(database: Database): Double = { 64 | covered(database).toDouble * 100 / bins.length 65 | } 66 | 67 | def endPoints(database: Database): Seq[ReferenceTarget] = database.quantas(cone).mappings(start) 68 | 69 | def serialize(database: Database): String = { 70 | val x = Seq( 71 | Seq(" SIGNAL", "EXPECTED", "UNCOVERED", "COVERED", "PERCENT", "GOAL", "WEIGHT"), 72 | Seq(" " + start.serialize, bins.size, uncovered(database), covered(database), percentage(database), goal, weight).map(_.toString) 73 | ) 74 | val summary = " -----------------------------------------------------------\n" + alignRows(x) 75 | /* 76 | s""" 77 | | SIGNAL EXPECTED UNCOVERED COVERED PERCENT GOAL WEIGHT 78 | | $start $expected $uncovs $per $goal $weight 79 | | Summary for Signal $start 80 | | 81 | | CATEGORY EXPECTED UNCOVERED COVERED PERCENT 82 | | User Defined Bins $expected $uncovs $cov $per 83 | | 84 | | User Defined Bins for $start 85 | | Bins 86 | | 87 | |""" 88 | */ 89 | 90 | summary + "\n" + serializeBins(database) 91 | } 92 | def getWidth(seq: Seq[String]): Int = { 93 | seq.map(_.length()).max 94 | } 95 | def setWidth(seq: Seq[String]): Seq[String] = { 96 | val width = getWidth(seq) 97 | seq.map(s => 98 | s + (" " * (width - s.length())) 99 | ) 100 | } 101 | 102 | def alignRows(rows: Seq[Seq[String]]): String = { 103 | val ncols = rows.map(_.length).max 104 | val cols = mutable.ArrayBuffer[Seq[String]]() 105 | for(c <- 0 until ncols) { 106 | cols += rows.map(row => row(c)) 107 | } 108 | alignCols(cols) 109 | } 110 | 111 | def alignCols(columns: Seq[Seq[String]]): String = { 112 | 113 | val alignedColWidths = columns.map(setWidth) 114 | 115 | val nrows = columns.map(_.length).max 116 | val ncols = columns.length 117 | val stringBuilder = new StringBuilder() 118 | for(r <- 0 until nrows) { 119 | for(c <-0 until ncols) { 120 | stringBuilder ++= (alignedColWidths(c)(r) + " ") 121 | } 122 | stringBuilder ++= "\n" 123 | } 124 | stringBuilder.toString 125 | } 126 | 127 | def serializeBins(database: Database): String = { 128 | val rep = coverage(database) 129 | val (ns, cs, cvs) = rep.foldLeft(Vector(" NAME"), Vector("CATEGORY"), Vector("COVERED")) { 130 | case ((names, categories, covereds), (bin, covered)) => 131 | (names :+ (" " + bin.label), categories :+ bin.category.serialize, covereds :+ covered.toString) 132 | } 133 | alignCols(Seq(ns, cs, cvs)) 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/main/scala/coverage/cone/Cone.scala: -------------------------------------------------------------------------------- 1 | package coverage.cone 2 | 3 | case class Cone(statements: Seq[ConeStatement]) { 4 | lazy val withIndex = statements.zipWithIndex 5 | lazy val terminalIndexes = withIndex.collect { 6 | case (t: Terminal, idx) => idx 7 | } 8 | lazy val tag: String = { 9 | val hash = this.hashCode().toHexString 10 | "0" * (8 - hash.length) + hash 11 | } 12 | def serialize(indent: String = "") = { 13 | val idxWidth = statements.length.toString.size 14 | def serializeIndex(i: Int): String = { 15 | val idxStr = i.toString 16 | val nSpaces = idxWidth - idxStr.length 17 | idxStr + (" " * nSpaces) 18 | } 19 | s"${indent}Cone $tag:\n" + withIndex.map { 20 | case (Terminal(width, tpe), idx) => 21 | s"${indent} ${serializeIndex(idx)}: terminal $tpe<$width>" 22 | case (Literal(width, tpe, value), idx) => 23 | s"${indent} ${serializeIndex(idx)}: literal $tpe<$width>($value)" 24 | case (Invalid(width, tpe), idx) => 25 | s"${indent} ${serializeIndex(idx)}: invalid $tpe<$width>" 26 | case (Assign(op, args, consts), idx) => 27 | s"${indent} ${serializeIndex(idx)}: assign $op((${args.mkString(", ")}), (${consts.mkString(",")}))" 28 | }.mkString("\n") 29 | } 30 | 31 | } 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/main/scala/coverage/cone/ConeFinder.scala: -------------------------------------------------------------------------------- 1 | package coverage.cone 2 | 3 | import firrtl.analyses._ 4 | import firrtl.annotations.TargetToken.{Clock, Init, Reset} 5 | import firrtl.annotations._ 6 | import firrtl.ir._ 7 | import firrtl.{FEMALE, MALE, PortKind, WRef, WSubField} 8 | 9 | import scala.collection.mutable 10 | 11 | 12 | case class ConeFinder(reverseGraph: ConnectionGraph) extends ConnectionGraph(reverseGraph.circuit, reverseGraph.digraph, reverseGraph.irLookup) { 13 | private val extModuleNames = circuit.modules.collect { case e: ExtModule => e.name }.toSet 14 | private val ref2index = mutable.HashMap[ReferenceTarget, Int]() 15 | private val index2refs = mutable.HashMap[Int, Set[ReferenceTarget]]() 16 | private val stmts = mutable.HashMap[Int, ConeStatement]() 17 | private val endpoints = mutable.HashSet[ReferenceTarget]() 18 | 19 | 20 | /** Finds clock sources of specific signals 21 | * 22 | * If target is: 23 | * - ReferenceTarget; calculate clock source, must be ground type 24 | * - InstanceTarget; calculate clock source of all input ports to that instance 25 | * - ModuleTarget; calculate clock source of all output ports 26 | * - CircuitTarget; calculate clock source of all output ports of all modules 27 | * 28 | * @param targets 29 | * @return 30 | */ 31 | /* 32 | def getClockSources(targets: Seq[CompleteTarget]): Map[ReferenceTarget, Set[ReferenceTarget]] = { 33 | val memberTargets: Seq[IsMember] = targets.map { 34 | case ct: CircuitTarget => ct.module(ct.circuit) 35 | case other: IsMember => other 36 | }.distinct 37 | 38 | val moduleOrder = new InstanceGraph(circuit).moduleOrder.zipWithIndex.map { 39 | case (m, i) => m.name -> i 40 | }.toMap 41 | 42 | val topoTargets = memberTargets.sortWith { (t0, t1) => 43 | (t0, t1) match { 44 | case (x: CircuitTarget, _) => false 45 | case (_, x: CircuitTarget) => true 46 | case (x: IsMember, y: IsMember) => moduleOrder(x.module) > moduleOrder(y.module) 47 | } 48 | } 49 | 50 | val ret = memberTargets.foldLeft(Map.empty[ReferenceTarget, Set[ReferenceTarget]]) { (map, t) => 51 | t match { 52 | case it: InstanceTarget => 53 | val lit = it.asReference.pathlessTarget 54 | 55 | val inputTargets = lit.leafSubTargets(irLookup.tpe(lit)).collect { 56 | case r if irLookup.gender(r) == FEMALE => r 57 | } 58 | inputTargets.foldLeft(map){ (m, inTarget) => 59 | val x = it.addReference(inTarget) 60 | m ++ Map(x -> getClockSource(x)) 61 | } 62 | case rt: ReferenceTarget => map ++ Map(rt -> getClockSource(rt)) 63 | case mt: ModuleTarget => 64 | val outputTargets = irLookup.ports(mt).flatMap { irLookup.leafTargets }.collect { 65 | case r if irLookup.gender(r) == FEMALE => r 66 | } 67 | outputTargets.foldLeft(map) { (m, ot) => m ++ Map(ot -> getClockSource(ot)) } 68 | } 69 | } 70 | ret 71 | } 72 | */ 73 | 74 | 75 | /** Returns the clock sources that are synchronized with given signal target 76 | * @param t 77 | * @return 78 | */ 79 | def getCone(start: ReferenceTarget, ends: Seq[ReferenceTarget]): Cone = { 80 | // Check existence of targets 81 | // Check type of targets 82 | 83 | index2refs(0) = Set(start) 84 | ref2index(start) = 0 85 | endpoints ++= ends 86 | BFS(start) 87 | 88 | val cone = Cone(stmts.toSeq.sortBy { case (k, v) => k }.map(_._2)) 89 | 90 | stmts.clear() 91 | index2refs.clear() 92 | ref2index.clear() 93 | endpoints.clear() 94 | 95 | cone 96 | } 97 | 98 | def getConeInfo(rt: ReferenceTarget): (BigInt, String) = { 99 | irLookup.tpe(rt) match { 100 | case UIntType(IntWidth(w)) => (w, "UInt") 101 | case SIntType(IntWidth(w)) => (w, "SInt") 102 | case ClockType => (BigInt(1), "Clock") 103 | case AsyncResetType => (BigInt(1), "AsyncReset") 104 | case other => sys.error(s"Illegal type: $other") 105 | } 106 | } 107 | 108 | override def getEdges(node: ReferenceTarget, 109 | prevOpt: Option[collection.Map[ReferenceTarget, ReferenceTarget]] 110 | ): collection.Set[ReferenceTarget] = { 111 | val prev = prevOpt.get 112 | 113 | assert(ref2index.contains(node), s"${node.serialize}") 114 | val index = ref2index(node) 115 | //println(s"$index: ${node.serialize}, $stmts") 116 | 117 | node match { 118 | // If find endpoint, tag path and end 119 | case rt if endpoints.contains(rt) => 120 | val (width, tpe) = getConeInfo(rt) 121 | stmts(index) = Terminal(width, tpe) 122 | Set() 123 | 124 | // Top-level Input Port 125 | // Must check if not isClock because expression that is in the clock port of reg could be a port 126 | case rt@ ReferenceTarget(c, m, Nil, _, _) 127 | if irLookup.kind(rt) == PortKind && irLookup.gender(rt) == MALE && !rt.isClock => 128 | val (width, tpe) = getConeInfo(rt) 129 | stmts(index) = Terminal(width, tpe) 130 | Set() 131 | //sys.error("Shouldn't reach a top-level input port! must be included in the endpoints.") 132 | 133 | // Black-box Output Clock Port 134 | case rt: ReferenceTarget 135 | if extModuleNames.contains(rt.encapsulatingModule) && irLookup.gender(rt) == FEMALE => 136 | sys.error("Shouldn't reach a blackbox output port! must be included in the endpoints.") 137 | val (width, tpe) = getConeInfo(rt) 138 | stmts(index) = Terminal(width, tpe) 139 | Set() 140 | 141 | // WInvalid Expression 142 | case rt if ConnectionGraph.isInvalid(rt) => 143 | val (width, tpe) = getConeInfo(prev(rt)) 144 | stmts(index) = Invalid(width, tpe) 145 | Set() 146 | 147 | // Literal Expression 148 | case rt if ConnectionGraph.isLiteral(rt) => 149 | val int = rt.ref match { 150 | case TokenTagger.literalRegex(value) => value 151 | } 152 | val (width, tpe) = getConeInfo(rt) 153 | stmts(index) = Literal(width, tpe, BigInt(int)) 154 | Set() 155 | 156 | /* 157 | case nonClockSource if hasShortCut(nonClockSource) => 158 | val edges = super.getEdges(nonClockSource) 159 | val localSource = nonClockSource.pathlessTarget 160 | */ 161 | 162 | 163 | case other => 164 | val edges = super.getEdges(other) 165 | def getIndexes() = { 166 | edges.map{ rt => 167 | index2refs(index2refs.keys.size) = Set(rt) 168 | val index = index2refs.keys.size - 1 169 | ref2index(rt) = index 170 | index 171 | }.toSeq 172 | } 173 | irLookup.expr(other) match { 174 | case DoPrim(op, args, consts, tpe) => 175 | stmts(index) = Assign(op.serialize, getIndexes, consts) 176 | case Mux(c, t, f, tpe) => 177 | stmts(index) = Assign("mux", getIndexes, Nil) 178 | case ValidIf(c, v, tpe) => 179 | stmts(index) = Assign("validif", getIndexes, Nil) 180 | case nonOp => 181 | irLookup.declaration(other) match { 182 | case d: DefRegister 183 | if !(other.component.nonEmpty && 184 | Set[TargetToken](Clock, Reset, Init).contains(other.component.last)) => 185 | val indexes = getIndexes() 186 | stmts(index) = Assign("reg", indexes, Nil) 187 | case _ => 188 | edges.foreach { rt => 189 | ref2index(rt) = index 190 | index2refs(index) = index2refs(index) ++ Set(rt) 191 | } 192 | /* 193 | case d: DefMemory => 194 | def onExp(e: Expression): Expression = e match { 195 | case WSubField(WSubField(WRef(mem, _, _, _), port, _, _), sig, _, _) => 196 | port match { 197 | case r if d.readers.contains(port) => 198 | case w if d.writers.contains(port) => 199 | case rw if d.readwriters.contains(port) => 200 | } 201 | case o => o mapExpr onExp 202 | } 203 | nonOp 204 | stmts(index) = Assign("reg", indexes, Nil) 205 | */ 206 | 207 | } 208 | } 209 | edges 210 | } 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /src/main/scala/coverage/cone/ConeStatement.scala: -------------------------------------------------------------------------------- 1 | package coverage.cone 2 | 3 | sealed trait ConeStatement 4 | case class Terminal(width: BigInt, tpe: String) extends ConeStatement 5 | case class Literal(width: BigInt, tpe: String, value: BigInt) extends ConeStatement 6 | case class Invalid(width: BigInt, tpe: String) extends ConeStatement 7 | case class Assign(op: String, args: Seq[Int], consts: Seq[BigInt]) extends ConeStatement 8 | case class Reg(clock: Int, reset: Int, init: Int, in: Int) extends ConeStatement 9 | case class ConeTerminal(cone: String, name: String) extends ConeStatement 10 | case class ConeStart(cone: String, name: String) extends ConeStatement 11 | case class ConeCopy(ends: Seq[String], start: String, cone: String) extends ConeStatement 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/test/scala/coverage/CoverageSpec.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import mini._ 4 | 5 | case class TestBuilder(testSeq: String*) extends TestType { 6 | val tests = testSeq.toList 7 | val maxcycles = 15000L 8 | } 9 | class XORMiniCoverageSpec extends TileTests( 10 | TestBuilder("rv32ui-p-xor"), 11 | annotations = Seq(Coverages.testerALUCoverageAspect, DatabaseAnnotation("XORALUCoverage", true)), 12 | params = Some((new MiniConfig).toInstance alter { (site, here, up) => { 13 | case CacheBlockBytes => up(CacheBlockBytes) 14 | case Trace => false 15 | }}) 16 | ) 17 | 18 | class SHLMiniCoverageSpec extends TileTests( 19 | TestBuilder("rv32ui-p-sll"), 20 | annotations = Seq(Coverages.testerALUCoverageAspect, DatabaseAnnotation("SHLALUCoverage", true)), 21 | params = Some((new MiniConfig).toInstance alter { (site, here, up) => { 22 | case CacheBlockBytes => up(CacheBlockBytes) 23 | case Trace => false 24 | }}) 25 | ) 26 | 27 | class BothMiniCoverageSpec extends TileTests( 28 | TestBuilder("rv32ui-p-xor", "rv32ui-p-sll"), 29 | annotations = Seq(Coverages.testerALUCoverageAspect, DatabaseAnnotation("BothALUCoverage", true)), 30 | params = Some((new MiniConfig).toInstance alter { (site, here, up) => { 31 | case CacheBlockBytes => up(CacheBlockBytes) 32 | case Trace => false 33 | }}) 34 | ) 35 | 36 | class ISAMiniCoverageSpec extends TileTests( 37 | ISATests, 38 | annotations = Seq(Coverages.testerALUCoverageAspect, DatabaseAnnotation("ISAALUCoverage", true)), 39 | params = Some((new MiniConfig).toInstance alter { (site, here, up) => { 40 | case CacheBlockBytes => up(CacheBlockBytes) 41 | case Trace => false 42 | }}) 43 | ) 44 | -------------------------------------------------------------------------------- /src/test/scala/coverage/DatabaseSpec.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import coverage.cone.{Assign, Cone, Terminal} 4 | import firrtl.annotations.ReferenceTarget 5 | import org.scalatest.FlatSpec 6 | import firrtl.ir 7 | import firrtl.ir.UIntType 8 | import TestUtils._ 9 | 10 | object DatabaseSpec { 11 | case class IntWidth(width: BigInt) extends ir.Width { 12 | override def serialize: String = width.toString 13 | } 14 | 15 | val ref8 = ir.Reference("", UIntType(IntWidth(8))) 16 | val cone1 = Cone(Seq(Assign("add", Seq(1, 1), Nil), Terminal(8, "UInt"))) 17 | val cone2 = Cone(Seq(Assign("sub", Seq(1, 1), Nil), Terminal(8, "UInt"))) 18 | 19 | val lowFalse = BinRange(0, 10) -> (false, Set("low")) 20 | val lowTrue = BinRange(0, 10) -> (true, Set("low")) 21 | val defaultFalse = Default -> (false, Set.empty[String]) 22 | val defaultTrue = Default -> (true, Set.empty[String]) 23 | 24 | val quanta1 = Quanta( 25 | cone1, 26 | Map(lowFalse, defaultFalse), 27 | Map(("~Top|Top>add".RT, Seq("~Top|Top>x".RT, "~Top|Top>y".RT))) 28 | ) 29 | val quanta2 = Quanta( 30 | cone2, 31 | Map(lowFalse, defaultTrue), 32 | Map(("~Top|Top>sub".RT, Seq("~Top|Top>x".RT, "~Top|Top>y".RT))) 33 | ) 34 | val quanta3 = Quanta( 35 | cone2, 36 | Map(lowTrue, defaultTrue), 37 | Map(("~Top|Top>sub2".RT, Seq("~Top|Top>x".RT, "~Top|Top>z".RT))) 38 | ) 39 | 40 | val db = Database(Map(quanta1.cone -> quanta1, quanta2.cone -> quanta2), "Smoke") 41 | 42 | def writeDB: Database = { 43 | db.writeCoverage("Smoke") 44 | db 45 | } 46 | } 47 | 48 | class DatabaseSpec extends FlatSpec { 49 | import DatabaseSpec._ 50 | 51 | "Serialization and deserialization" should "work" in { 52 | val db = writeDB 53 | val readDB = Database.readCoverage("Smoke") 54 | assert(db == readDB) 55 | } 56 | 57 | "Updating quantas" should "work" in { 58 | val newDB = db.add(quanta3) 59 | val q3coverage = newDB.coverage(quanta3.cone) 60 | val q3mappings = newDB.targets(quanta3.cone) 61 | assert(q3coverage(lowFalse._1)._1) 62 | assert(q3coverage(defaultFalse._1)._1) 63 | assert(q3mappings.contains("~Top|Top>sub".RT)) 64 | assert(q3mappings.contains("~Top|Top>sub2".RT)) 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/test/scala/coverage/MiniCoverages.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import chisel3._ 4 | import chisel3.aop.Select 5 | import chisel3.aop.injecting.InjectingAspect 6 | import firrtl.options.{RegisteredLibrary, ShellOption} 7 | import mini.{ALU, Tile, TileTester} 8 | 9 | object Coverages { 10 | def someBins: Seq[Bin] = Seq( 11 | Bin("Zero", BinRange(0, 0)), 12 | Bin("One", BinRange(1, 1)), 13 | Bin("Two", BinRange(2, 2)), 14 | Bin("Three", BinRange(3, 3)), 15 | Bin("Little", BinRange(4, 100)), 16 | Bin("Big", BinRange(101, 1000)), 17 | Bin("Huge", BinRange(1001, 2000)), 18 | Bin("Default", Default)) 19 | def everyBin(signal: Bits): Seq[Bin] = { 20 | val maxValue = math.pow(2.0, signal.getWidth.toDouble) 21 | val ticks = Range(0, maxValue.toInt, 1).toList 22 | ticks.map { 23 | case tick => Bin(tick.toString, BinRange(tick, tick)) 24 | } 25 | } 26 | def ALUCoverage(alu: mini.ALU): Seq[CoverPoint] = { 27 | val endPoints = Seq(alu.io.A, alu.io.B, alu.io.alu_op) 28 | val covers = Seq( 29 | CoverPoint("alu", alu.io.out, endPoints, someBins), 30 | CoverPoint("alu", alu.io.sum, endPoints, someBins), 31 | CoverPoint("alu", alu.io.alu_op, endPoints, everyBin(alu.io.alu_op)) 32 | ) 33 | alu 34 | 35 | covers 36 | } 37 | 38 | def testerALUCoverage(tester: TileTester): Seq[CoverPoint] = { 39 | ALUCoverage(tester.dut.asInstanceOf[Tile].core.dpath.alu) 40 | } 41 | 42 | val testerALUCoverageAspect = CoverAspect(testerALUCoverage, 43 | Nil, 44 | CoverageOptions(Map( 45 | //DesignDone -> { tester: TileTester => tester.isDone }, 46 | SimulatorDone -> { tester: TileTester => tester.setDone } 47 | )) 48 | ) 49 | 50 | val testerALULogger = InjectingAspect({tester: TileTester => Seq(tester.dut.asInstanceOf[mini.Tile].core.dpath.alu)},{ 51 | alu: ALU => { 52 | printf("reset == %b, out == %d, sum == %d\n", alu.reset.asUInt(), alu.io.out, alu.io.sum) 53 | } 54 | }) 55 | } 56 | 57 | /* 58 | 59 | case class MiniCoverage() extends RegisteredLibrary { 60 | val name = "Mini-Coverage" 61 | val options = Seq(new ShellOption[String]( 62 | longOption = "floorplan", 63 | toAnnotationSeq = { 64 | case "dci" => Seq(FloorplanAspect("Mini_DCI","test_run_dir/html/myfloorplan",{ t: TileBase => Floorplans.layoutTile(t, 0) })) 65 | case "icd" => Seq(FloorplanAspect("Mini_ICD","test_run_dir/html/myfloorplan",{ t: TileBase => Floorplans.layoutTile(t, 1) })) 66 | }, 67 | helpText = "The name of a mini floorplan must be indicating the relative positions of the icache, core, and dcache.", 68 | helpValueName = Some(""))) 69 | } 70 | */ 71 | -------------------------------------------------------------------------------- /src/test/scala/coverage/TestUtils.scala: -------------------------------------------------------------------------------- 1 | package coverage 2 | 3 | import firrtl.annotations.ReferenceTarget 4 | 5 | object TestUtils { 6 | implicit class StringRefTypeClass(str: String) { 7 | def RT: ReferenceTarget = { 8 | firrtl.annotations.Target.deserialize(str) match { 9 | case r: ReferenceTarget => r 10 | case other => sys.error(s"Cannot convert $str to a ReferenceTarget: $other") 11 | } 12 | } 13 | } 14 | 15 | } 16 | -------------------------------------------------------------------------------- /src/test/scala/coverage/cone/ConeSpec.scala: -------------------------------------------------------------------------------- 1 | package coverage.cone 2 | 3 | import firrtl.analyses.{CircuitGraph, ConnectionGraph} 4 | import firrtl.{ChirrtlForm, CircuitState, LowFirrtlCompiler, LowForm} 5 | import firrtl.ir.Circuit 6 | import firrtl.stage.FirrtlStage 7 | import firrtlTests.FirrtlFlatSpec 8 | import org.scalatest.FlatSpec 9 | import coverage.TestUtils._ 10 | 11 | object ConeSpec { 12 | val circuitString = 13 | s"""circuit Top: 14 | | module Top: 15 | | input in: UInt<8>[4] 16 | | input clock: Clock 17 | | input reset: UInt<1> 18 | | output out: UInt<8>[5] 19 | | out[0] <= add(in[0], in[1]) 20 | | out[1] <= add(in[2], in[3]) 21 | | out[2] is invalid 22 | | reg r: UInt<8>, clock with: (reset => (reset, in[0])) 23 | | r <= in[3] 24 | | out[3] <= r 25 | | node x = add(in[0], UInt(0)) 26 | | out[4] <= x 27 | """.stripMargin 28 | 29 | def lower(circuit: Circuit): Circuit = { 30 | val xForms = firrtl.CompilerUtils.getLoweringTransforms(ChirrtlForm, LowForm) 31 | val res = xForms.foldLeft(CircuitState(circuit, ChirrtlForm)) { (cs, xform) => xform.runTransform(cs) } 32 | res.circuit 33 | } 34 | } 35 | 36 | class ConeSpec extends FirrtlFlatSpec { 37 | import ConeSpec._ 38 | 39 | val circuit = toMiddleFIRRTL(parse(circuitString)) 40 | val coneGraph = ConeFinder(ConnectionGraph(circuit).reverseConnectionGraph) 41 | 42 | "Identical add cones" should "match" in { 43 | val out0Cone = coneGraph.getCone("~Top|Top>out[0]".RT, Seq("~Top|Top>in[0]".RT, "~Top|Top>in[1]".RT)) 44 | val out1Cone = coneGraph.getCone("~Top|Top>out[1]".RT, Seq("~Top|Top>in[2]".RT, "~Top|Top>in[3]".RT)) 45 | out0Cone should be(out1Cone) 46 | out0Cone should be(Cone(Seq( 47 | Assign("add", Seq(1, 2), Nil), 48 | Terminal(8, "UInt"), 49 | Terminal(8, "UInt") 50 | ))) 51 | } 52 | 53 | "Invalidated signals" should "match" in { 54 | val a = coneGraph.getCone("~Top|Top>out[2]".RT, Nil) 55 | a should be(Cone(Seq(Invalid(BigInt(8), "UInt")))) 56 | } 57 | 58 | "Registered signals" should "unique" in { 59 | val r = coneGraph.getCone("~Top|Top>out[3]".RT,Seq( 60 | "~Top|Top>clock".RT, 61 | "~Top|Top>reset".RT, 62 | "~Top|Top>in[0]".RT, 63 | "~Top|Top>in[3]".RT 64 | )) 65 | r should be(Cone(Seq( 66 | Assign("reg", List(1, 2, 3, 4), Nil), 67 | Terminal(1, "Clock"), 68 | Terminal(1, "UInt"), 69 | Terminal(8, "UInt"), 70 | Terminal(8, "UInt") 71 | ))) 72 | } 73 | 74 | "Literals" should "work properly" in { 75 | val r = coneGraph.getCone("~Top|Top>out[4]".RT,Seq( 76 | "~Top|Top>in[0]".RT 77 | )) 78 | r should be(Cone(Seq( 79 | Assign("add", List(1, 2), Nil), 80 | Terminal(8, "UInt"), 81 | Literal(1, "UInt", 0) 82 | ))) 83 | } 84 | 85 | "Memories" should "work properly" ignore { 86 | sys.error("Not Implemented") 87 | } 88 | } 89 | --------------------------------------------------------------------------------