├── .gitignore ├── .gitmodules ├── .mill-version ├── .scalafmt.conf ├── Makefile ├── README.md ├── arithmetics ├── src │ ├── adders │ │ ├── Adder.scala │ │ ├── AdderGraph.scala │ │ ├── BitsAdderModule.scala │ │ ├── BitsPrefixAdder.scala │ │ ├── BrentKung.scala │ │ ├── CarryLookAhead.scala │ │ ├── FlaggedSklansky.scala │ │ ├── KoggeStone.scala │ │ ├── LadnerFischer.scala │ │ ├── PipelinableAdder.scala │ │ ├── PrefixAdder.scala │ │ ├── RippleCarry.scala │ │ ├── Sklansky.scala │ │ ├── drawio.scala │ │ └── package.scala │ ├── dividers │ │ └── LutDivider.scala │ ├── masking │ │ ├── DOM.scala │ │ ├── HPC2.scala │ │ ├── VerifModule.scala │ │ ├── adder │ │ │ ├── BKAdder.scala │ │ │ ├── BooleanMaskedAdderModule.scala │ │ │ ├── KSAdder.scala │ │ │ ├── LFAdder.scala │ │ │ ├── MaskedAdder.scala │ │ │ ├── RCAdder.scala │ │ │ └── SKAdder.scala │ │ └── conversion │ │ │ ├── A2B.scala │ │ │ └── b2a │ │ │ └── B2A.scala │ └── modular │ │ └── InvariantDivider.scala └── test │ └── src │ └── adders │ └── AdderSpec.scala ├── build.mill ├── inline_bind.py ├── mill ├── plot_prolead.py ├── requirements.txt ├── run_prolead.py ├── yosys_synth_bka.tcl ├── yosys_synth_ksa.tcl ├── yosys_synth_rca.tcl └── yosys_synth_ska.tcl /.gitignore: -------------------------------------------------------------------------------- 1 | .bloop/ 2 | .metals/ 3 | .vscode/ 4 | *.class 5 | *.log 6 | .idea/ 7 | .idea/* 8 | out/ 9 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "dependencies/chest"] 2 | path = chest 3 | url = https://github.com/kammoh/chest.git 4 | -------------------------------------------------------------------------------- /.mill-version: -------------------------------------------------------------------------------- 1 | 0.12.8-native -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.7.17" 2 | runner.dialect = scala213 3 | 4 | maxColumn = 120 5 | continuationIndent.defnSite = 2 6 | assumeStandardLibraryStripMargin = true 7 | docstrings.style = SpaceAsterisk 8 | lineEndings = preserve 9 | includeCurlyBraceInSelectChains = false 10 | danglingParentheses.preset = true 11 | danglingParentheses.defnSite = false 12 | trailingCommas = keep 13 | 14 | align.preset = some 15 | align.tokens = [ 16 | { 17 | code = "=>" 18 | owners = [{ 19 | regex = "case" 20 | }] 21 | }, { 22 | 23 | } 24 | ] 25 | 26 | newlines.beforeCurlyLambdaParams = false 27 | newlines.alwaysBeforeMultilineDef = false 28 | newlines.implicitParamListModifierForce = [] 29 | newlines.topLevelStatementBlankLines = [ 30 | { 31 | blanks { before = 1, after = 0, beforeEndMarker = 0 }, regex = "^case " 32 | } 33 | ] 34 | 35 | verticalMultiline.atDefnSite = true 36 | 37 | optIn.annotationNewlines = true 38 | optIn.configStyleArguments = true 39 | 40 | 41 | binPack.literalArgumentLists = true 42 | binPack.literalsMinArgCount = 5 43 | binPack.literalsInclude = [".*"] 44 | # binPack.literalsExclude = ["String", "Term.Name"] 45 | binPack.literalsIncludeSimpleExpr = true 46 | binPack.literalsSingleLine = false 47 | 48 | # rewrite.imports.groups = [ 49 | # ["java\..*"], 50 | # ["scala\..*"], 51 | # ["chisel3\..*"], 52 | # ["chisel3\.util\..*"], 53 | # ] 54 | 55 | rewrite.imports.sort = none 56 | rewrite.imports.contiguousGroups = no 57 | # rewrite.imports.expand = false 58 | rewrite.rules = [Imports, PreferCurlyFors] 59 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | PHONY: bloop, idea, bsp, clean 3 | 4 | 5 | MILL ?= ./mill 6 | BLOOP ?= bloop 7 | 8 | bloop: 9 | $(MILL) --import ivy:com.lihaoyi::mill-contrib-bloop: mill.contrib.bloop.Bloop/install 10 | 11 | idea: 12 | $(MILL) -j 0 mill.idea.GenIdea/idea 13 | 14 | bsp: 15 | $(MILL) -j 0 mill.bsp.BSP/install 16 | 17 | build: 18 | $(MILL) _.compile 19 | 20 | clean: 21 | # $(BLOOP) clean ntt --propagate 22 | $(MILL) __.clean 23 | $(MILL) __.shutdown 24 | $(RM) -r out/ .bloop/ .idea/ .metals/ 25 | # jps -l |grep bloop.Server | awk '{print $1}' | xargs kill -TERM 26 | # jps -l |grep mill.runner.MillServerMain | awk '{print $1}' | xargs kill -TERM 27 | $(RM) -r out/ .bloop/ .idea/ .metals/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # chisel-arithmetics 2 | Implementation of low-level hardware arithmatic operations in Chisel 3 | 4 | 5 | Adders 6 | ====== 7 | - Carry Look Ahead (CLA) 8 | 9 | - Prefix network adders: 10 | - Brent-Kung 11 | - Kogge-Stone 12 | 13 | **Work in progress...** 14 | -------------------------------------------------------------------------------- /arithmetics/src/adders/Adder.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3.experimental.SourceInfo 4 | 5 | trait Adder[T] { 6 | 7 | def filler(x: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = x 8 | 9 | def add(x: Seq[T], y: Seq[T], cin: Option[T] = None): Seq[T] 10 | 11 | def xor(a: T, b: T)(implicit sourceInfo: SourceInfo): T 12 | 13 | def xor(a: T, b: Option[T])(implicit sourceInfo: SourceInfo): T = { 14 | b.map(xor(a, _)).getOrElse(a) 15 | } 16 | 17 | // def xor(a: Option[T], b: T): T = { 18 | // a.map(xor(b, _)).getOrElse(b) 19 | // } 20 | 21 | def xor(a: Option[T], b: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = { 22 | a.map(xor(_, b)).orElse(b) 23 | } 24 | 25 | def xor(a: Option[T], b: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = xor(xor(a, b), c) 26 | 27 | def xor(a: T, b: T, c: T)(implicit sourceInfo: SourceInfo): T = xor(xor(a, b), c) 28 | 29 | def xor(a: T, b: T, c: Option[T])(implicit sourceInfo: SourceInfo): T = xor(xor(a, b), c) 30 | 31 | def xor(a: Seq[T], b: Seq[T]): Seq[T] = (a zip b).map { case (ai, bi) => xor(ai, bi) } 32 | 33 | def xorSeq(a: Seq[Option[T]], b: Seq[Option[T]]): Seq[Option[T]] = (a zip b).map { case (ai, bi) => xor(ai, bi) } 34 | 35 | def xor(a: Seq[T], b: Seq[T], c: Seq[T]): Seq[T] = (a zip b zip c).map { case ((ai, bi), ci) => xor(ai, bi, ci) } 36 | 37 | def and(a: T, b: T)(implicit sourceInfo: SourceInfo): T 38 | 39 | def and(a: Option[T], b: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = { 40 | (a, b) match { 41 | case (Some(ai), Some(bi)) => Some(and(ai, bi)) 42 | case _ => None 43 | } 44 | } 45 | 46 | def and(a: Seq[T], b: Seq[T]): Seq[T] = (a zip b).map { case (ai, bi) => and(ai, bi) } 47 | 48 | def and3(a: T, b: T, c: T)(implicit sourceInfo: SourceInfo): T = and(a, and(b, c)) 49 | 50 | def and3(a: Seq[T], b: Seq[T], c: Seq[T]): Seq[T] = and(a, and(b, c)) 51 | 52 | def and(a: Seq[T], b: Seq[T], c: Seq[T]): Seq[T] = (a zip b zip c).map { case ((ai, bi), ci) => and3(ai, bi, ci) } 53 | def zero: T 54 | 55 | def and3Xor(a: T, b: T, c: T, d: T): T = xor(and3(a, b, c), d) 56 | 57 | def and3Xor(a: Option[T], b: Option[T], c: Option[T], d: Option[T]): Option[T] = { 58 | (a, b, c, d) match { 59 | case (Some(ai), Some(bi), Some(ci), Some(di)) => Some(and3Xor(ai, bi, ci, di)) 60 | case (_, _, _, Some(di)) => Some(di) 61 | case _ => None 62 | } 63 | } 64 | 65 | def not(a: T)(implicit sourceInfo: SourceInfo): T 66 | 67 | /** [Toffoli](https://en.wikipedia.org/wiki/Toffoli_gate) (CCNOT) gate 68 | * 69 | * Computes the 3rd (non-trivial) output: c ^ (a & b) 70 | * 71 | * @param a 72 | * @param b 73 | * @param c 74 | * @return 75 | * c ^ (a & b) 76 | */ 77 | def toffoli(a: T, b: T, c: T)(implicit sourceInfo: SourceInfo): T = xor(c, and(a, b)) 78 | 79 | // TODO: final? 80 | // We should really try only overridding the (T,T,T)->T methods instead 81 | def toffoli(a: Option[T], b: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = { 82 | (a, b, c) match { 83 | case (Some(a), Some(b), Some(c)) => 84 | Some(toffoli(a, b, c)) 85 | case (Some(a), Some(b), None) => 86 | Some(and(a, b)) 87 | case (_, _, Some(c)) => 88 | Some(c) 89 | case _ => 90 | None 91 | } 92 | } 93 | 94 | def majority(a: Option[T], b: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = 95 | (a, b, c) match { 96 | case (None, _, _) => 97 | and(b, c) 98 | case (_, None, _) => 99 | and(a, c) 100 | case (_, _, None) => 101 | and(a, b) 102 | case (Some(a), Some(b), Some(c)) => 103 | Some(majority(a, b, c)) 104 | } 105 | 106 | // NOTE: a ^ b, b ^ c are NOT independent!!! Cannot be used with e.g., DOM gadget!!! 107 | def majority(a: T, b: T, c: T)(implicit sourceInfo: SourceInfo): T = toffoli(xor(a, b), xor(b, c), b) 108 | 109 | def majority(a: T, b: T, c: Option[T])(implicit sourceInfo: SourceInfo): T = 110 | majority(Some(a), Some(b), c).getOrElse(zero) 111 | 112 | // (p & c) | g <-> 113 | def genG(p: Option[T], g: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = 114 | toffoli(p, c, g) 115 | // and3Xor(p, c, g.map(not), g) 116 | 117 | def blackCell(pg: (Option[T], Option[T]), pgr: (Option[T], Option[T]))(implicit sourceInfo: SourceInfo) 118 | : (Option[T], Option[T]) = 119 | blackCell(pg._1, pg._2, pgr._1, pgr._2) 120 | 121 | def blackCell(p: Option[T], g: Option[T], pr: Option[T], gr: Option[T])(implicit sourceInfo: SourceInfo) 122 | : (Option[T], Option[T]) = 123 | (and(p, pr), genG(p, g, gr)) 124 | 125 | def halfAdder(a: Option[T], b: Option[T])(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 126 | val c = and(a, b) 127 | val s = xor(a, b) 128 | (s, c) 129 | } 130 | 131 | def pgSum(a: Option[T], b: Option[T]): (Option[T], Option[T]) = { 132 | (xor(a, b), None) 133 | } 134 | 135 | def fullAdder(a: Option[T], b: Option[T], cin: Option[T])(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 136 | val c = majority(a, b, cin) 137 | val s = xor(a, b, cin) 138 | (s, c) 139 | } 140 | 141 | def grayCell(p: Option[T], g: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = 142 | (None, genG(p, g, c)) 143 | 144 | } 145 | -------------------------------------------------------------------------------- /arithmetics/src/adders/AdderGraph.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import drawio._ 4 | import chisel3.experimental.SourceInfo 5 | 6 | trait Edge 7 | 8 | case class P[T](p: Option[T]) extends Edge 9 | case class G[T](g: Option[T]) extends Edge // == carry (generate) 10 | case class PG[T](p: T, g: T) extends Edge 11 | 12 | case object TempNill extends Edge 13 | 14 | object PG { 15 | def apply[T](pg: (Option[T], Option[T])): Edge = { 16 | pg match { 17 | case (Some(p), Some(g)) => PG(p, g) 18 | case (None, g) => G(g) 19 | case (p, None) => P(p) 20 | case _ => TempNill 21 | } 22 | } 23 | } 24 | 25 | case class AB[T](ab: (Option[T], Option[T])) extends Edge 26 | 27 | trait CellType 28 | 29 | case object HalfAdder extends CellType 30 | case object FullAdder extends CellType 31 | case object PGSum extends CellType 32 | case object GrayCellT extends CellType 33 | case object BlackCellT extends CellType 34 | case object NullIn extends CellType 35 | case object NullOut extends CellType 36 | 37 | trait CellNode { 38 | def level: Int 39 | def offset: Int 40 | 41 | def style: Map[String, String] = Map( 42 | "fillColor" -> "#ffffff", 43 | "strokeColor" -> "#000000", 44 | "rounded" -> "1" 45 | ) 46 | 47 | val label: Option[String] = None 48 | 49 | val incomings = Array.ofDim[(CellNode, Edge)](2) 50 | val outgoings = collection.mutable.ArrayBuffer[CellNode]() 51 | 52 | def connectFromRight(from: CellNode, edge: Edge) = { 53 | incomings(0) = (from, edge) 54 | from.outgoings += this 55 | } 56 | 57 | def connectFromTop(that: CellNode, edge: Edge) = { 58 | incomings(1) = (that, edge) 59 | that.outgoings += this 60 | } 61 | 62 | def name: String = s"${this.getClass.getSimpleName} [$level,$offset]" 63 | } 64 | 65 | case class BlackCell(level: Int, offset: Int) extends CellNode { 66 | override val style = super.style.updated("fillColor", "#000000") 67 | } 68 | case class GrayCell(level: Int, offset: Int) extends CellNode { 69 | override val style = super.style.updated("fillColor", "#cccccc") 70 | } 71 | 72 | case class HA(level: Int, offset: Int) extends CellNode { 73 | override val style = super.style.updated("fillColor", "#f4f4f4") 74 | override val label: Option[String] = Some("HA") 75 | } 76 | case class FA(level: Int, offset: Int) extends CellNode { 77 | 78 | override val style = super.style.updated("fillColor", "#dce9f1") 79 | override val label: Option[String] = Some("FA") 80 | } 81 | case class RegCell(level: Int, offset: Int) extends CellNode { 82 | override val style = super.style.updated("fillColor", "#FFF2CC") // yellow 83 | } 84 | 85 | case class InCell(level: Int, offset: Int) extends CellNode { 86 | override val style = super.style.updated("rounded", "0") 87 | } 88 | case class OutCell(level: Int, offset: Int) extends CellNode { 89 | override val style = super.style.updated("rounded", "0") 90 | } 91 | 92 | class Graph { 93 | val cells = 94 | collection.mutable.ArrayBuffer[Map[Int, CellNode]]() // array of rows, each row is a map of offset -> cells 95 | 96 | def addCell(cell: CellNode) = { 97 | while (cell.level >= cells.size) { 98 | cells += Map() 99 | } 100 | cells(cell.level) += cell.offset -> cell 101 | cell 102 | } 103 | 104 | def cellAt(level: Int, offset: Int): Option[CellNode] = { 105 | if (level < 0 || level >= cells.size) return None 106 | cells(level).get(offset) 107 | } 108 | 109 | def cellAbove(level: Int, offset: Int): Option[CellNode] = { 110 | if (level < 0 || offset < 0) { 111 | return None 112 | } 113 | (level - 1).to(0, -1).map(cells(_).get(offset)).collectFirst { case Some(value) => 114 | value 115 | } 116 | } 117 | 118 | def currentDepth: Int = cells.zipWithIndex.reverse.collectFirst { 119 | case (c, i) if c.nonEmpty => 120 | if ( 121 | c.values.exists(p => 122 | p match { 123 | case _: OutCell => false 124 | case _ => true 125 | } 126 | ) 127 | ) i + 1 128 | else i 129 | }.getOrElse(0) 130 | 131 | } 132 | 133 | trait AdderGraph[T] extends Adder[T] { 134 | 135 | def startX = 10 136 | def startY = 10 137 | def boxWidth = 30 138 | def boxHeight = 20 139 | def horizontalSpacing = 20 140 | def verticalSpacing = boxHeight + 25 141 | 142 | class Grapher(startX: Int, startY: Int, horizontalSpacing: Int, verticalSpacing: Int) { 143 | 144 | def getNodeOption(i: Int, j: Int): Option[Node] = nodes.get((i, j)) 145 | 146 | def addNodeAt( 147 | i: Int, 148 | j: Int, 149 | x: Int, 150 | y: Int, 151 | width: Int, 152 | height: Int): Node = { 153 | assert(!nodes.contains((i, j)), s"Node at $i, $j already exists") 154 | val node = doc.createNode( 155 | x, 156 | y, 157 | width, 158 | height 159 | ) 160 | nodes((i, j)) = node 161 | node 162 | } 163 | 164 | private val doc = new Document() 165 | 166 | def saveDoc(filename: String) = doc.save(filename) 167 | 168 | private val nodes = collection.mutable.HashMap[(Int, Int), Node]() 169 | 170 | def numNodes: Int = nodes.size 171 | 172 | def maxJ: Int = nodes.keys.maxBy(_._2)._2 173 | 174 | private var _n: Option[Int] = None 175 | 176 | def getN = _n 177 | 178 | def nOrMaxJ = _n.getOrElse(maxJ) 179 | 180 | def setN(n: Int) = { 181 | _n = Some(n) 182 | } 183 | 184 | def getNodeAbove(i: Int, j: Int): Option[Node] = 185 | (i to 0 by -1).map(getNodeOption(_, j)).collectFirst { case Some(node) => 186 | node 187 | } 188 | 189 | def addBox( 190 | i: Int, 191 | j: Int, 192 | inRight: Int, 193 | width: Int, 194 | height: Int, 195 | verticalSpacing: Int, 196 | nodeStyle: Map[String, Any], 197 | label: Option[String], 198 | edgeStyle: Map[String, Any]) = { 199 | 200 | val n = None 201 | 202 | val node = addNodeAt( 203 | i + 1, 204 | j, 205 | startX + n.orElse(getN).map(_ - j).getOrElse(j) * (width + horizontalSpacing), 206 | startY + i * verticalSpacing, 207 | width, 208 | height 209 | ).setStyle(nodeStyle) 210 | 211 | label.foreach(node.setLabel) 212 | 213 | node.setTooltip(s"$i:$j") 214 | getNodeAbove(i, j).foreach { x => 215 | x.createConnection( 216 | node, 217 | edgeStyle ++ Map( 218 | "exitX" -> 0.25, 219 | "entryX" -> 0.25, 220 | ) 221 | ) 222 | } 223 | 224 | if (inRight >= 0) { 225 | getNodeAbove(i, inRight).foreach { rightNode => 226 | val conn = rightNode.createConnection( 227 | node, 228 | edgeStyle ++ Map( 229 | "exitX" -> { if (inRight == j) 0.75 else 0.25 }, 230 | "entryX" -> { if (inRight <= j) 0.75 else 0.25 }, 231 | ) 232 | ) 233 | 234 | if (inRight < j) { 235 | // val outs = 236 | // rightNode.outgoingConnections.filter(_.target.y == node.y) 237 | conn.addPoint( 238 | rightNode.x + 0.25 * rightNode.width, 239 | node.y - (verticalSpacing - height) / 2.0 240 | ) 241 | } 242 | 243 | } 244 | } 245 | node 246 | } 247 | } 248 | 249 | private lazy val graph = new Graph() 250 | 251 | private lazy val grapher = new Grapher(startX, startY, horizontalSpacing, verticalSpacing) 252 | 253 | def edgeStyle = Map( 254 | "edgeStyle" -> "orthogonalEdgeStyle", 255 | // "curved" -> 0, 256 | "jumpStyle" -> "gap", 257 | "arcSize" -> boxHeight * 2, 258 | "jettySize" -> 5, 259 | "rounded" -> 1, 260 | "orthogonalLoop" -> 1, 261 | "html" -> 1, 262 | "endSize" -> 2, 263 | "exitX" -> 0.5, 264 | "exitY" -> 1, 265 | "exitDx" -> 0, 266 | "exitDy" -> 0, 267 | "entryX" -> 0.5, 268 | "entryY" -> 0, 269 | "entryDx" -> 0, 270 | "entryDy" -> 0 271 | ) 272 | 273 | def save(filename: String) = grapher.saveDoc(filename) 274 | 275 | def numBlackCells = graph.cells 276 | .map(_.values.count { case x => 277 | x match { 278 | case _: BlackCell => true 279 | case _ => false 280 | } 281 | }) 282 | .sum 283 | 284 | def numGrayCells = graph.cells 285 | .map(_.values.count { case x => 286 | x match { 287 | case _: GrayCell => true 288 | case _ => false 289 | } 290 | }) 291 | .sum 292 | 293 | def setN(n: Int) = grapher.setN(n) 294 | 295 | /// FIXME temporary 296 | private def addBox( 297 | cell: CellNode, 298 | right: Option[(CellNode, Edge)] = None): Node = { 299 | addBox(cell, None, right) 300 | } 301 | 302 | private def addBox( 303 | cell: CellNode, 304 | top: Option[(CellNode, Edge)], 305 | right: Option[(CellNode, Edge)]) = { 306 | val height = cell match { 307 | case _: InCell => boxHeight / 2 308 | // case _ if cell.level == 0 => boxHeight / 2 309 | case _: OutCell => boxHeight / 2 310 | case _: RegCell => boxHeight / 2 311 | case _ => boxHeight 312 | } 313 | graph.addCell(cell) 314 | top.orElse { 315 | graph.cellAbove(cell.level, cell.offset).map(_ -> PG((None, None))) 316 | }.foreach { case (topCell, edge) => cell.connectFromTop(topCell, edge) } 317 | right.foreach { case (rightCell, edge) => cell.connectFromRight(rightCell, edge) } 318 | 319 | val node = grapher.addBox( 320 | cell.level, 321 | cell.offset, 322 | right.map(_._1.offset).getOrElse(-1), 323 | boxWidth, 324 | height, 325 | verticalSpacing = verticalSpacing, 326 | nodeStyle = cell.style, 327 | label = cell.label, 328 | edgeStyle = edgeStyle, 329 | ) 330 | cell match { 331 | case _: InCell => node.setY(node.y + boxHeight / 2) 332 | // case _ if cell.level == 0 => node.setY(node.y + boxHeight / 2) 333 | case _: OutCell => node.setY(node.y - boxHeight / 2) 334 | case _ => 335 | } 336 | node 337 | } 338 | 339 | def mkBlackCell( 340 | pg: (Option[T], Option[T]), 341 | pgr: (Option[T], Option[T]), 342 | i: Int, 343 | j: Int, 344 | jr: Int 345 | )(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 346 | (pg, pgr) match { 347 | case ((_, g), (None, None)) => 348 | (None, g) 349 | case ((None, g), (_, _)) => 350 | (None, g) 351 | case (_, (None, gr)) => 352 | addBox(GrayCell(i, j), graph.cellAbove(i, j).map(_ -> PG(pg)), graph.cellAbove(i, jr).map(_ -> G(gr))) 353 | grayCell(pg._1, pg._2, gr) 354 | case _ => 355 | addBox(BlackCell(i, j), graph.cellAbove(i, j).map(_ -> PG(pg)), graph.cellAbove(i, jr).map(_ -> PG(pgr))) 356 | blackCell(pg, pgr) 357 | } 358 | } 359 | 360 | def mkCell[C <: CellType]( 361 | ct: C, 362 | pg: (Option[T], Option[T]), 363 | gr: Option[T], 364 | i: Int, 365 | j: Int, 366 | jr: Int = -1 367 | )(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = 368 | mkCell(ct, pg, (None, gr), i, j, jr) 369 | 370 | def mkCell[C <: CellType]( 371 | ct: C, 372 | pg: (Option[T], Option[T]), 373 | pgr: (Option[T], Option[T]), 374 | i: Int, 375 | j: Int, 376 | jr: Int 377 | )(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 378 | ct match { 379 | case NullIn => 380 | addBox(InCell(i, j)) 381 | pg 382 | 383 | case NullOut => 384 | addBox(OutCell(i, j)) 385 | pg 386 | 387 | case PGSum => 388 | addBox(OutCell(i, j)) 389 | pgSum(pg._1, pgr._2) 390 | 391 | case HalfAdder => 392 | addBox(HA(i, j)) 393 | halfAdder(pg._1, pg._2) 394 | 395 | case FullAdder if pgr._2.isEmpty => 396 | mkCell(HalfAdder, pg, None, i, j, jr) 397 | 398 | case FullAdder => 399 | addBox(FA(i, j), graph.cellAbove(i, j).map(_ -> AB(pg)), graph.cellAt(i - 1, jr).map(_ -> G(pg._2))) 400 | fullAdder(pg._1, pg._2, pgr._2) 401 | 402 | case GrayCellT => 403 | mkBlackCell(pg, (None, pgr._2), i, j, jr) 404 | 405 | case BlackCellT => 406 | mkBlackCell(pg, pgr, i, j, jr) 407 | 408 | case _ => 409 | throw new IllegalArgumentException(s"Unsupported cell type: $ct") 410 | } 411 | } 412 | 413 | def mkCell( 414 | pg: (Option[T], Option[T]), 415 | pgr: (Option[T], Option[T]), 416 | i: Int, 417 | j: Int, 418 | jr: Int 419 | )(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 420 | 421 | mkBlackCell(pg, pgr, i, j, jr) 422 | } 423 | 424 | def currentDepth: Int = graph.currentDepth 425 | } 426 | -------------------------------------------------------------------------------- /arithmetics/src/adders/BitsAdderModule.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.util.simpleClassName 5 | import chisel3.experimental.skipPrefix 6 | 7 | abstract class BitsAdderModule extends Module with BitsAdder { 8 | def withAsserts: Boolean = true 9 | def width: Int 10 | def withCin: Boolean 11 | 12 | lazy val io = IO(new Bundle { 13 | val a = Input(UInt(width.W)) 14 | val b = Input(UInt(width.W)) 15 | val cin = Option.when(withCin)(Input(Bool())) 16 | val sum = Output(UInt((width + 1).W)) 17 | }) 18 | 19 | override def desiredName: String = { 20 | val clzName = simpleClassName(this.getClass) 21 | clzName + (if (clzName.toLowerCase.endsWith("adder")) "" else "Adder") + width 22 | } 23 | 24 | io.sum :#= skipPrefix { add(io.a, io.b, io.cin) } 25 | 26 | if (withAsserts) { 27 | assert(io.sum === io.a +& io.b + io.cin.getOrElse(0.B)) 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /arithmetics/src/adders/BitsPrefixAdder.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.experimental.SourceInfo 5 | import chisel3.experimental.skipPrefix 6 | 7 | trait BitsAdder extends Adder[Bool] { 8 | 9 | def zero = 0.B 10 | 11 | def add(a: UInt, b: UInt, cin: Option[Bool]): UInt = skipPrefix { 12 | require(a.widthKnown && b.widthKnown, "Inputs must have known widths") 13 | val n = a.getWidth 14 | require(n == b.getWidth, "Inputs must have the same width") 15 | 16 | VecInit(add(a.asBools, b.asBools, cin)).asUInt 17 | } 18 | 19 | override def xor(a: Bool, b: Bool)(implicit sourceInfo: SourceInfo): Bool = a ^ b 20 | override def and(a: Bool, b: Bool)(implicit sourceInfo: SourceInfo): Bool = a & b 21 | override def and3(a: Bool, b: Bool, c: Bool)(implicit sourceInfo: SourceInfo): Bool = a & b & c 22 | override def not(a: Bool)(implicit sourceInfo: SourceInfo): Bool = ~a 23 | 24 | } 25 | -------------------------------------------------------------------------------- /arithmetics/src/adders/BrentKung.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.experimental.skipPrefix 5 | 6 | trait BrentKung[T <: Data] extends PrefixAdder[T] { self: Module => 7 | 8 | override def buildPrefixAdder(pg0: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 9 | buildPrefixAdderRec(pg0.map((_, 0)), 0).unzip._1 10 | } 11 | 12 | def buildPrefixAdderRec(prev: Seq[((Option[T], Option[T]), Int)], level: Int): Seq[((Option[T], Option[T]), Int)] = { 13 | 14 | if (prev.length == 1) { 15 | return prev 16 | } 17 | val two2Layer = 1 << (level) 18 | val topLayerOdds = prev.indices.collect { 19 | case j if j % 2 == 1 => 20 | val ((left, lDepth), (right, rDepth)) = (prev(j), prev(j - 1)) 21 | val l = lDepth.max(rDepth) + 1 22 | (mkCell(left, (if (j == 1) None else right._1, right._2), l, two2Layer * (j + 1) - 1, two2Layer * j - 1), l) 23 | } 24 | 25 | val midLayerOdds = skipPrefix { buildPrefixAdderRec(topLayerOdds, level + 1) } 26 | 27 | prev.indices.collect { 28 | case 0 => prev.head 29 | case j if j % 2 == 0 => 30 | val ((left, lDepth), (right, rDepth)) = (prev(j), midLayerOdds(j / 2 - 1)) 31 | val l = lDepth.max(rDepth) + 1 32 | mkCell( 33 | left, 34 | (None, right._2), 35 | l, 36 | two2Layer * (j + 1) - 1, // <- actual `j` 37 | two2Layer * j - 1 38 | ) -> l 39 | case i => midLayerOdds((i - 1) / 2) // Odd 40 | } 41 | } 42 | } 43 | 44 | class BKAdder(val width: Int, val withCin: Boolean) extends BitsAdderModule with BrentKung[Bool] { 45 | def this(width: Int) = this(width, false) 46 | } 47 | -------------------------------------------------------------------------------- /arithmetics/src/adders/CarryLookAhead.scala: -------------------------------------------------------------------------------- 1 | // package adders 2 | 3 | // import chisel3._ 4 | 5 | // class CarryLookAhead[T <: Data](blockSize: Int) extends Adder[T] { 6 | 7 | // def generate(g: Seq[T], p: Seq[T]): T = { 8 | // // p and g should be of equal size 9 | // assert(g.length == p.length) 10 | // g.indices.map(i => p.drop(i + 1).fold(g(i))(and)).reduce(xor(_, _)) 11 | // } 12 | 13 | // def blockGen0(blockSize: Int, p: Seq[T], g: Seq[T], c: T): (T, T, Seq[T]) = { 14 | 15 | // val p_ = p.reduce(and(_, _)) 16 | // val g_ = generate(g, p) 17 | // val cg = c +: g 18 | // val pp = zero +: p // dummy first element 19 | // val c_ = c +: (1 until blockSize).map(i => generate(cg.slice(0, i + 1), pp.slice(0, i + 1))) 20 | // (p_, g_, c_) 21 | // } 22 | 23 | // def blockGenerator(p: Seq[T], g: Seq[T], cin: T): (T, T, Seq[T]) = { // returns p, g, c[] 24 | // assert( 25 | // g.length == p.length && g.nonEmpty, 26 | // s"g.length:${g.length} ==? p.length:${p.length} &&? g.nonEmpty:${g.nonEmpty}" 27 | // ) 28 | 29 | // if (g.length == 1) { 30 | // return (p.head, g.head, Seq(cin)) 31 | // } 32 | 33 | 34 | // val bs = if (g.length <= blockSize) g.length else blockSize 35 | 36 | 37 | 38 | // def chunk(i: Int, s: Seq[Bool]) = { 39 | // val chunkSize = g.length / blockSize 40 | // s.slice(i * chunkSize, (i + 1) * chunkSize).padTo(chunkSize, 0.B) 41 | // } 42 | 43 | // var exportedCarries = Seq.empty[Bool] 44 | 45 | // for (i <- 0 until blockSize) { 46 | 47 | // val (p_, g_, c_) = blockGen0(bs, chunk(i, p), chunk(i, g), cin) 48 | 49 | // val (pN, gN, blockExportedCarries) = 50 | // blockGenerator(chunk(i, p), chunk(i, g), carryLookaheadAdderBlock.io.out.c(i)) 51 | // carryLookaheadAdderBlock.io.in.p(i) := pN 52 | // carryLookaheadAdderBlock.io.in.g(i) := gN 53 | 54 | // exportedCarries = exportedCarries ++ blockExportedCarries 55 | // } 56 | 57 | // (carryLookaheadAdderBlock.io.out.p, carryLookaheadAdderBlock.io.out.g, VecInit(exportedCarries)) 58 | // } 59 | 60 | // override def add(x: Seq[T], y: T, cin: Bool): T = { 61 | // val w = Math.min(x.getWidth, y.getWidth) 62 | 63 | // assert(x.getWidth == y.getWidth) // TODO 64 | 65 | // val p = Wire(Vec(w, Bool())) 66 | // val g = Wire(Vec(w, Bool())) 67 | // val s = Wire(Vec(w + 1, Bool())) 68 | 69 | // val (pOut, gOut, carries) = blockGenerator(p, g, cin) 70 | 71 | // for (i <- 0 until w) { 72 | // p(i) := x(i) ^ y(i) // or: transfer 73 | // g(i) := x(i) & y(i) 74 | // s(i) := p(i) ^ carries(i) 75 | // printf(p"x($i)=${x(i)} y($i)=${y(i)} p($i)=${p(i)}\n") 76 | // } 77 | 78 | // s(w) := gOut | (carries.last & pOut) 79 | 80 | // s.asUInt.asTypeOf(x.pad(math.max(x.getWidth, y.getWidth) + 1)) 81 | // } 82 | // } 83 | -------------------------------------------------------------------------------- /arithmetics/src/adders/FlaggedSklansky.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | 6 | class FlaggedSklanskyAdder(val width: Int, val withCin: Boolean = false) 7 | extends BitsAdderModule 8 | with FlaggedSklansky[Bool] {} 9 | 10 | trait FlaggedSklansky[T] extends AdderGraph[T] { 11 | 12 | /** @param a 13 | * @param b 14 | * @return 15 | * (P, G) 16 | */ 17 | def init(a: Seq[T], b: Seq[T]): (Seq[Option[T]], Seq[Option[T]]) = (xor(a, b).map(Some(_)), and(a, b).map(Some(_))) 18 | 19 | def nextLayer(p: Seq[Option[T]], g: Seq[Option[T]], i: Int): (Seq[Option[T]], Seq[Option[T]]) = { 20 | 21 | val l = 1 << i 22 | p.zip(g) 23 | .zipWithIndex 24 | .map { case ((pj, gj), j) => 25 | // checks whether a component is needed, else propagates the signals to the next stage 26 | 27 | if ((j / l) % 2 == 1) { // operators every other 2**i column in stage i 28 | val jj = j - (j % l) - 1 29 | val (pjj, gjj) = (p(jj), g(jj)) 30 | // Black cells only 31 | blackCell(pj, gj, gjj, pjj) 32 | } else { // No operator, propagate the P and G signals to the next stage 33 | (pj, gj) 34 | } 35 | } 36 | .unzip 37 | } 38 | 39 | override def add(a: Seq[T], b: Seq[T], cin: Option[T]): Seq[T] = { 40 | val n = a.length 41 | require(n == b.length, "Inputs must have the same width") 42 | val (p0, g0) = init(a, b) 43 | val (pk, gk) = (0 until log2Ceil(n)) 44 | .foldLeft((p0, g0)) { case ((pi, gi), i) => 45 | nextLayer(pi, gi, i) 46 | } 47 | val gBar = pk.zip(gk).map { case (pj, gj) => genG(pj, gj, cin) } 48 | 49 | xorSeq(cin +: gBar, p0).map(_.getOrElse(zero)) /// }.getOrElse(p0.head +: xor(gBar, p0.tail)) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /arithmetics/src/adders/KoggeStone.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | 5 | import scala.annotation.tailrec 6 | 7 | trait KoggeStone[T <: Data] extends PrefixAdder[T] with AdderGraph[T] { self: Module => 8 | 9 | override def buildPrefixAdder(in: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 10 | println(s"KoggeStone of width ${in.length}") 11 | 12 | @tailrec 13 | def genLevels(in: Seq[(Option[T], Option[T])], level: Int = 1): Seq[(Option[T], Option[T])] = { 14 | val twoToLevel = 1 << (level - 1) 15 | if (twoToLevel < in.length) 16 | genLevels( 17 | in.take(twoToLevel) ++ in.drop(twoToLevel).zip(in).zipWithIndex.map { case ((left, right), j) => 18 | if (j < twoToLevel) println(s"level:$level left=${j + twoToLevel} right=${j}") 19 | mkCell(if (j < twoToLevel) GrayCellT else BlackCellT, left, right, level, j + twoToLevel, j) 20 | }, 21 | level + 1 22 | ) 23 | else 24 | in 25 | } 26 | genLevels(in) 27 | } 28 | } 29 | 30 | class KSAdder(val width: Int, val withCin: Boolean = false) extends BitsAdderModule with KoggeStone[Bool] { 31 | def this(width: Int) = this(width, false) 32 | } 33 | -------------------------------------------------------------------------------- /arithmetics/src/adders/LadnerFischer.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.util.log2Ceil 5 | import chisel3.experimental.SourceInfo 6 | 7 | trait LadnerFischer[T <: Data] extends PrefixAdder[T] { self: Module => 8 | 9 | def filler1(pg: (Option[T], Option[T]), depth: Int, column: Int, msg:String = "")(implicit sourceInfo: SourceInfo) 10 | : (Option[T], Option[T]) = { 11 | print(s"filler1 depth=$depth column=$column $msg\n") 12 | // (pg._1, pg._2) 13 | filler(pg) 14 | } 15 | 16 | override def buildPrefixAdder(pg: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 17 | val n = pg.length 18 | val d = log2Ceil(n) 19 | val ll = (0 until d) 20 | .foldLeft(pg) { case (prevPgs, i) => 21 | nextLayer(prevPgs, i) 22 | } 23 | filler1(ll.head, d + 1, 0) +: ll.tail 24 | // ll.head +: ll.tail 25 | .grouped(2) 26 | .zipWithIndex 27 | .flatMap { 28 | case (Seq(pg_r, pg_l), i) => 29 | Seq(filler1(pg_r, d + 1, 2 * i + 1, "xx"), mkCell(GrayCellT, pg_l, pg_r, d + 1, 2 * i + 2, 2 * i + 1)) 30 | case (s, _) => s //.map(filler1(_, d + 1, 2 * i + 2)) 31 | } 32 | .toSeq 33 | 34 | } 35 | 36 | def nextLayer(prev: Seq[(Option[T], Option[T])], level: Int): Seq[(Option[T], Option[T])] = { 37 | val skip = 1 << level 38 | prev.grouped(2 * skip).zipWithIndex.foldLeft(Seq.empty[(Option[T], Option[T])]) { case (acc, (grp, grpIndex)) => 39 | acc ++ grp.take(skip).zipWithIndex.map { case (pg, i) => 40 | filler1(pg, level + 1, i + acc.length) 41 | } ++ (skip until grp.size).map { k => 42 | val j = grpIndex * 2 * skip + k 43 | val jj = grpIndex * 2 * skip + skip - 1 44 | if (k % 2 == 1) 45 | mkCell(if (grpIndex > 0) BlackCellT else GrayCellT, grp(k), grp(skip - 1), level + 1, j, jj) 46 | else filler1(grp(k), level + 1, j) 47 | } 48 | } 49 | } 50 | } 51 | 52 | class LFAdder(val width: Int, val withCin: Boolean) extends BitsAdderModule with LadnerFischer[Bool] { 53 | def this(width: Int) = this(width, false) 54 | } 55 | -------------------------------------------------------------------------------- /arithmetics/src/adders/PipelinableAdder.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | 6 | trait PipelinableAdder[T <: Data] extends Adder[T] { 7 | case class PGWithDelay(p: Option[T], pDelay: Int, g: Option[T], gDelay: Int) { 8 | def toPG: (Option[T], Option[T]) = (p, g) 9 | def delayed(delay: Int): PGWithDelay = 10 | PGWithDelay(p.map(ShiftRegister(_, delay)), pDelay + delay, g.map(ShiftRegister(_, delay)), gDelay + delay) 11 | } 12 | 13 | def buildLayer(pgs: Seq[PGWithDelay], layer: Int): Seq[PGWithDelay] 14 | } 15 | 16 | trait PipelinedSklanskyAdder[T <: Data] extends PrefixAdder[T] with PipelinableAdder[T] { self: Module => 17 | 18 | override def buildPrefixAdder(pg: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 19 | val n = pg.length 20 | (0 until log2Ceil(n)) 21 | .foldLeft(pg.map { case (p, g) => PGWithDelay(p, 0, g, 0) }) { case (prevPgs, i) => 22 | buildLayer(prevPgs, i) 23 | } 24 | .map { case PGWithDelay(p, _, g, _) => (p, g) } 25 | } 26 | 27 | val p_gr_delay = 1 28 | val p_pr_delay = 1 29 | 30 | def getDelays(cellType: CellType, pDelay: Int, gDelay: Int, pRightDelay: Int, gRightDelay: Int): (Int, Int) = { 31 | val pOutDelay = pDelay.max(pRightDelay) + p_pr_delay 32 | val gOutDelay = (pDelay.max(gRightDelay) + p_gr_delay).max(gDelay) 33 | (pOutDelay, gOutDelay) 34 | } 35 | 36 | def mkCellWithDelay( 37 | cellType: CellType, 38 | pgWD: PGWithDelay, 39 | pgRightWD: PGWithDelay, 40 | depth: Int, 41 | offset: Int, 42 | rightOffset: Int): PGWithDelay = { 43 | 44 | val PGWithDelay(p, pDelay, g, gDelay) = pgWD 45 | val PGWithDelay(pRight, pRightDelay, gRight, gRightDelay) = pgRightWD 46 | 47 | val (p_, g_) = mkCell(cellType, (p, g), (pRight, gRight), depth, offset, rightOffset) 48 | val (pDelay_, gDelay_) = getDelays(cellType, pDelay, gDelay, pRightDelay, gRightDelay) 49 | PGWithDelay(p_, pDelay_, g_, gDelay_) 50 | } 51 | 52 | def buildLayer(pgs: Seq[PGWithDelay], layer: Int): Seq[PGWithDelay] = { 53 | pgs.zipWithIndex.map { case (x, j) => 54 | val ll = j >> layer 55 | if ((ll & 1) == 1) { 56 | val jj = (ll << layer) - 1 // j - (j % l) - 1 57 | mkCellWithDelay(if (ll == 1) GrayCellT else BlackCellT, x, pgs(jj), layer + 1, j, jj) 58 | } else { 59 | x.delayed(p_gr_delay) 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /arithmetics/src/adders/PrefixAdder.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.experimental.skipPrefix 5 | import chisel3.experimental.SourceInfo 6 | 7 | trait PrefixAdderBase[T] extends Adder[T] { 8 | 9 | def buildPrefixAdder(firstLayer: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] 10 | 11 | def lastLayer(pgIn: Seq[(Option[T], Option[T])], pgs: Seq[(Option[T], Option[T])]): Seq[T] 12 | 13 | def initLayer(x: Seq[T], y: Seq[T], cin: Option[T]): Seq[(Option[T], Option[T])] 14 | } 15 | 16 | trait PrefixAdder[T <: Data] extends AdderGraph[T] with PrefixAdderBase[T] { self: Module => 17 | 18 | def filler(pg: (Option[T], Option[T]))(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = 19 | (filler(pg._1), filler(pg._2)) 20 | 21 | def lastLayer(p0g0: Seq[(Option[T], Option[T])], lastPg: Seq[(Option[T], Option[T])]): Seq[T] = { 22 | val gk = lastPg.unzip._2 23 | 24 | val depth = currentDepth 25 | 26 | ((None +: gk.init).zip(p0g0.zipWithIndex).map { case (ci, (pgi, j)) => 27 | mkCell(PGSum, pgi, ci, depth, j)._1 28 | } :+ gk.last).map(_.getOrElse(zero)) 29 | 30 | } 31 | 32 | def initLayer(x: Seq[T], y: Seq[T], cin: Option[T]): Seq[(Option[T], Option[T])] = { 33 | println("initLayer --- ") 34 | (mkCell(FullAdder, (x.headOption, y.headOption), cin, 0, 0) +: (x.tail).zip(y.tail).zipWithIndex.map { 35 | case ((xi, yi), j) => 36 | mkCell(HalfAdder, (Some(xi), Some(yi)), None, 0, j + 1) 37 | }).zipWithIndex.map { case ((p, g), i) => 38 | ( 39 | p.map(p => WireDefault(p.suggestName(f"p0_$i")).suggestName(f"p0_$i")), 40 | g.map(g => WireDefault(g.suggestName(f"g0_$i")).suggestName(f"g0_$i")) 41 | ) 42 | } 43 | 44 | } 45 | 46 | override def mkCell[C <: CellType]( 47 | ct: C, 48 | pg: (Option[T], Option[T]), 49 | pgr: (Option[T], Option[T]), 50 | i: Int, 51 | j: Int, 52 | jr: Int 53 | )(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 54 | val (pi, gi) = super.mkCell(ct, pg, pgr, i, j, jr) 55 | 56 | pi.foreach(_.suggestName(s"p${i}_${j}")) 57 | gi.foreach(_.suggestName(s"g${i}_${j}")) 58 | 59 | ( 60 | pi.map(_.suggestName(s"p${i}_${j}")).map(WireDefault(_).suggestName(s"p${i}_${j}")), 61 | gi.map(_.suggestName(s"g${i}_${j}")).map(WireDefault(_).suggestName(s"g${i}_${j}")) 62 | ) 63 | } 64 | 65 | override def add(x: Seq[T], y: Seq[T], cin: Option[T] = None): Seq[T] = { 66 | // x and y should be of equal size 67 | assert(x.length == y.length, s"widths must be the same! width of x: ${x.length}, width of y: ${y.length} ") 68 | val n = x.length 69 | setN(n + 1) 70 | 71 | val pg0 = { initLayer(x, y, cin) }.zipWithIndex.map { case ((p, g), i) => 72 | ( 73 | p.map(p => WireDefault(p.suggestName(f"p0_$i")).suggestName(f"p0_$i")), 74 | g.map(g => WireDefault(g.suggestName(f"g0_$i")).suggestName(f"g0_$i")) 75 | ) 76 | } 77 | val sum = skipPrefix { lastLayer(pg0, buildPrefixAdder(pg0)) }.zipWithIndex.map { case (si, i) => 78 | WireDefault(si.suggestName(f"sum$i")).suggestName(f"sum$i") 79 | } 80 | 81 | println( 82 | s"depth: ${currentDepth} blackCells: ${numBlackCells} grayCells: ${numGrayCells} total: ${numBlackCells + numGrayCells}" 83 | ) 84 | 85 | save(s"${desiredName}.drawio") 86 | sum 87 | } 88 | 89 | } 90 | -------------------------------------------------------------------------------- /arithmetics/src/adders/RippleCarry.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | 5 | trait RippleCarry[T <: Data] extends PrefixAdder[T] { self: Module => 6 | override def initLayer(x: Seq[T], y: Seq[T], cin: Option[T]): Seq[(Option[T], Option[T])] = { 7 | (None, cin) +: x.zip(y).zipWithIndex.map { case ((xi, yi), j) => 8 | mkCell(NullIn, Some(xi) -> Some(yi), None, 0, j) 9 | } 10 | } 11 | 12 | def nextLayer(pg: Seq[(Option[T], Option[T])], i: Int): Seq[(Option[T], Option[T])] = { 13 | pg.zipWithIndex.map { case (pgi, j) => 14 | if (j == i) 15 | mkCell(FullAdder, pgi, pg(j - 1), i, j - 1, j - 2) 16 | else 17 | pgi 18 | } 19 | } 20 | override def buildPrefixAdder(firstLayer: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 21 | (1 to firstLayer.length) 22 | .foldLeft(firstLayer) { case (prevPgs, i) => 23 | nextLayer(prevPgs, i) 24 | } 25 | .tail 26 | } 27 | 28 | def lastLayerOption(pg0: Seq[(Option[T], Option[T])], lastPg: Seq[(Option[T], Option[T])]): Seq[Option[T]] = { 29 | val depth = currentDepth 30 | ((lastPg.zipWithIndex).map { case (pg, j) => 31 | mkCell(NullOut, pg, None, depth, j)._1 32 | } :+ lastPg.last._2) 33 | } 34 | 35 | override def lastLayer(pg0: Seq[(Option[T], Option[T])], lastPg: Seq[(Option[T], Option[T])]): Seq[T] = { 36 | lastLayerOption(pg0, lastPg).map(_.getOrElse(zero)) 37 | } 38 | } 39 | 40 | class RCAdder(val width: Int, val withCin: Boolean) extends BitsAdderModule with RippleCarry[Bool] { 41 | def this(width: Int) = this(width, false) 42 | } 43 | -------------------------------------------------------------------------------- /arithmetics/src/adders/Sklansky.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | 6 | trait SklanskyAdder[T <: Data] extends PrefixAdder[T] { self: Module => 7 | 8 | override def buildPrefixAdder(pg: Seq[(Option[T], Option[T])]): Seq[(Option[T], Option[T])] = { 9 | val n = pg.length 10 | (0 until log2Ceil(n)) 11 | .foldLeft(pg) { case (prevPgs, i) => 12 | buildLayer(prevPgs, i) 13 | } 14 | } 15 | 16 | def buildLayer(pgs: Seq[(Option[T], Option[T])], i: Int): Seq[(Option[T], Option[T])] = { 17 | pgs.zipWithIndex.map { case (pg, j) => 18 | val ll = j >> i 19 | if ((ll & 1) == 1) { 20 | val jj = (ll << i) - 1 // j - (j % l) - 1 21 | mkCell(if (ll == 1) GrayCellT else BlackCellT, pg, pgs(jj), i + 1, j, jj) 22 | } else { 23 | pg 24 | } 25 | } 26 | } 27 | } 28 | 29 | class SKAdder(val width: Int, val withCin: Boolean) extends BitsAdderModule with SklanskyAdder[Bool] { 30 | def this(width: Int) = this(width, false) 31 | 32 | } 33 | -------------------------------------------------------------------------------- /arithmetics/src/adders/drawio.scala: -------------------------------------------------------------------------------- 1 | package adders 2 | 3 | object drawio { 4 | 5 | import org.nasdanika.{drawio => nas} 6 | 7 | import java.nio.file.Files 8 | import scala.jdk.CollectionConverters._ 9 | 10 | class Element[E <: nas.Element](val element: E) { 11 | 12 | def children = element.getChildren().asScala.map { case e: nas.Element => 13 | new Element(e) {} 14 | } 15 | } 16 | 17 | class LinkTarget[E <: nas.LinkTarget](element: E) extends Element(element) {} 18 | 19 | class ModelElement[E <: nas.ModelElement](element: E) extends Element(element) { 20 | def setLabel(label: String): this.type = { element.setLabel(label); this } 21 | def label: String = element.getLabel() 22 | def getLink() = element.getLink() 23 | def setLink(link: String): this.type = { element.setLink(link); this } 24 | def getTooltip() = element.getTooltip() 25 | def setTooltip(tooltip: String): this.type = { element.setTooltip(tooltip); this } 26 | 27 | def getParent() = element.getParent() 28 | 29 | def getPropertyNames() = element.getPropertyNames().asScala 30 | def getProperty(key: String) = element.getProperty(key) 31 | def setProperty(key: String, value: String): this.type = { element.setProperty(key, value); this } 32 | 33 | def setStyle(key: String, value: String): this.type = { element.getStyle().asScala(key) = value; this } 34 | 35 | def setStyle(style: Map[String, Any]): this.type = { 36 | element.getStyle().asScala ++= style.map { case (k, v) => k -> v.toString }; this 37 | } 38 | def setStyle(style: (String, Any)*): this.type = { 39 | element.getStyle().asScala ++= style.map { case (k, v) => k -> v.toString }; this 40 | } 41 | 42 | def setStyle(otherEl: nas.ModelElement): this.type = { 43 | element.getStyle().asScala.addAll(otherEl.getStyle().asScala) 44 | this 45 | } 46 | 47 | def isVisible: Boolean = element.isVisible() 48 | def setVisible(visible: Boolean = true): this.type = { element.setVisible(visible); this } 49 | 50 | def addTag(tag: String): this.type = { element.getTags.add(tag); this } 51 | 52 | } 53 | 54 | abstract class LayerElement[E <: nas.LayerElement](element: E) extends ModelElement(element) { 55 | lazy val layer = new Layer(element.getLayer()) 56 | } 57 | 58 | case class Connection(con: nas.Connection) extends LayerElement(con) { 59 | def source = Node(con.getSource()) 60 | def target = Node(con.getTarget()) 61 | 62 | def addPoint(x: Double, y: Double) = { 63 | 64 | val connEl: org.w3c.dom.Element = con.getElement() 65 | 66 | val mxGeometry = connEl.getFirstChild() 67 | assert(mxGeometry.getNodeName() == "mxGeometry") 68 | 69 | val geomChildren = mxGeometry.getChildNodes() 70 | val doc = connEl.getOwnerDocument() 71 | val array = (0 until geomChildren.getLength()) 72 | .map(geomChildren.item(_)) 73 | .collectFirst { 74 | case child: org.w3c.dom.Element if child.getNodeName() == "Array" && child.getAttribute("as") == "points" => 75 | child 76 | } 77 | .getOrElse { 78 | val array = doc.createElement("Array") 79 | mxGeometry.appendChild(array) 80 | array.setAttribute("as", "points") 81 | array 82 | } 83 | 84 | array.appendChild { 85 | val point = doc.createElement("mxPoint") 86 | point.setAttribute("x", x.toString) 87 | point.setAttribute("y", y.toString) 88 | point 89 | } 90 | 91 | } 92 | } 93 | 94 | case class Page(page: nas.Page) extends LinkTarget(page) { 95 | 96 | def model = page.getModel() 97 | def name = page.getName() 98 | def setName(name: String): this.type = { page.setName(name); this } 99 | def id = page.getId() 100 | def document = page.getDocument() 101 | 102 | def root = model.getRoot() 103 | 104 | def createLayer(): Layer = new Layer(root.createLayer()) 105 | 106 | def layers = root.getLayers().asScala.map(new Layer(_)) 107 | 108 | private var _activeLayer: Option[Layer] = None 109 | 110 | def setActiveLayer(layer: Layer): this.type = { _activeLayer = Some(layer); this } 111 | 112 | def activeLayer = _activeLayer.getOrElse { 113 | val layer = layers.headOption.getOrElse(createLayer()) 114 | _activeLayer = Some(layer) 115 | layer 116 | } 117 | 118 | } 119 | 120 | case class Node(node: nas.Node) extends LayerElement(node) { 121 | 122 | def setBounds(x: Double, y: Double, width: Double, height: Double): this.type = { 123 | geometry.setBounds(x, y, width, height); this 124 | } 125 | 126 | def x = geometry.getX() 127 | def y = geometry.getY() 128 | 129 | def setX(x: Double): this.type = { geometry.setX(x); this } 130 | def setY(y: Double): this.type = { geometry.setY(y); this } 131 | def setX(x: Int): this.type = setX(x.toDouble) 132 | def setY(y: Int): this.type = setY(y.toDouble) 133 | 134 | def width = geometry.getWidth() 135 | def height = geometry.getHeight() 136 | def setWidth(width: Int): this.type = { geometry.setWidth(width); this } 137 | def setHeight(height: Int): this.type = { geometry.setHeight(height); this } 138 | 139 | def geometry = node.getGeometry() 140 | 141 | def setGeometry(other: Node): this.type = { 142 | geometry.setBounds(other.x, other.y, other.width, other.height) 143 | this 144 | } 145 | 146 | def setGeometry(other: nas.Node): this.type = { 147 | val g = other.getGeometry() 148 | geometry.setBounds(g.getX(), g.getY(), g.getWidth(), g.getHeight()) 149 | this 150 | } 151 | 152 | def setStyle(other: nas.Node): this.type = { 153 | setStyle(other.getStyle().asScala.toMap) 154 | this 155 | } 156 | 157 | def setGeometry(g: nas.Rectangle): this.type = { 158 | geometry.setBounds(g.getX(), g.getY(), g.getWidth(), g.getHeight()) 159 | this 160 | } 161 | 162 | def createConnection(target: Node): Connection = new Connection( 163 | node.getLayer().createConnection(node, target.element) 164 | ) 165 | 166 | def createConnection(target: Node, style: Map[String, Any]): Connection = { 167 | val connection = createConnection(target) 168 | connection.setStyle(style) 169 | connection 170 | } 171 | 172 | def connect(target: Node, style: Map[String, Any]): this.type = { 173 | createConnection(target).setStyle(style) 174 | this 175 | } 176 | 177 | def outgoingConnections = node.getOutgoingConnections().asScala.map(Connection) 178 | def incomingConnections = node.getIncomingConnections().asScala.map(Connection) 179 | } 180 | 181 | case class Layer(layer: nas.Layer) extends ModelElement(layer) { 182 | def createNode(): Node = new Node(layer.createNode()) 183 | 184 | def createConnection(source: Node, target: Node): Connection = 185 | createConnection(source.element, target.element) 186 | 187 | def createConnection(source: nas.Node, target: nas.Node): Connection = new Connection( 188 | layer.createConnection(source, target) 189 | ) 190 | 191 | def elements = layer.getElements().asScala.map { 192 | case n: nas.Node => Node(n) 193 | case c: nas.Connection => Connection(c) 194 | case _ => ??? 195 | } 196 | } 197 | 198 | object Document { 199 | def apply(compressed: Boolean = false): Document = new Document(compressed) 200 | } 201 | 202 | case class Document(document: nas.Document) extends Element(document) { 203 | def this(compressed: Boolean = false) = this(nas.Document.create(compressed, null)) 204 | 205 | def autoLayout(int: Int) = org.nasdanika.drawio.Util.layout(activePage.root, int) 206 | 207 | def pages = document.getPages().asScala.map(Page).toSeq 208 | 209 | def createPage(): Page = new Page(document.createPage()) 210 | 211 | private var _activePage: Option[Page] = None 212 | 213 | def activePage: Page = _activePage.getOrElse { 214 | val page = pages.headOption.getOrElse(createPage()) 215 | _activePage = Some(page) 216 | page 217 | } 218 | 219 | def setActivePage(page: Page): this.type = { _activePage = Some(page); this } 220 | 221 | // activePage.setName("Page 1") 222 | 223 | def model = activePage.model 224 | 225 | def createLayer(): Layer = activePage.createLayer() 226 | 227 | def setActiveLayer(layer: Layer): this.type = { activePage.setActiveLayer(layer); this } 228 | 229 | def createNode(): Node = activePage.activeLayer.createNode() 230 | 231 | def createNode(x: Double, y: Double, width: Double, height: Double, style: (String, Any)*): Node = 232 | activePage.activeLayer.createNode().setBounds(x, y, width, height).setStyle(style: _*) 233 | 234 | def save(path: String, compressed: Boolean = false) = { 235 | val p = new java.io.File(path).toPath() 236 | println(s"Writing to $p") 237 | Files.writeString(p, document.save(compressed)) 238 | } 239 | } 240 | } 241 | 242 | object DrawTest extends App { 243 | 244 | import drawio._ 245 | 246 | val doc = new Document() 247 | 248 | // Add nodes 249 | val source = doc.createNode() 250 | source.setLabel("src") 251 | source.setX(200) 252 | source.setX(100) 253 | source.setWidth(70) 254 | source.setHeight(30) 255 | 256 | source.setStyle("fillColor", "#f8cecc") 257 | source.setStyle("strokeColor", "#b85450") 258 | // source.getTags.add("aws") 259 | 260 | val target = doc.createNode() 261 | target.setLabel("My target node") 262 | target.setBounds(300, 150, 100, 30) 263 | // val targetTags = target.getTags 264 | // targetTags.add("aws") 265 | // targetTags.add("azure") 266 | 267 | // Add connection 268 | val connection = source.createConnection(target) 269 | // connection.setLabel("My connection") 270 | connection.setStyle("edgeStyle", "orthogonalEdgeStyle") 271 | connection.setStyle("rounded", "1") 272 | connection.setStyle("orthogonalLoop", "1") 273 | connection.setStyle("jettySize", "auto") 274 | connection.setStyle("html", "1") 275 | 276 | doc.save("new-uncompressed.drawio") 277 | } 278 | -------------------------------------------------------------------------------- /arithmetics/src/adders/package.scala: -------------------------------------------------------------------------------- 1 | import chisel3._ 2 | 3 | 4 | package object adders { 5 | 6 | trait AdderType[T <: Data with Num[T]] { 7 | } 8 | 9 | // case class PimpedOpWithCarry[T <: Bits with Num[T]]( 10 | // lhsValue: T, 11 | // op: (T, T, Bool) => T, 12 | // rhsValue: T, 13 | // cinValue: Option[Bool]) 14 | // extends PimpedOp(lhsValue) { 15 | 16 | // def +(cin: Bool): T = { 17 | // println("adder has carry!") 18 | // PimpedOpWithCarry(lhsValue, op, rhsValue, Some(cin)).conv 19 | // } 20 | 21 | // def ++&(cin: Bool): T = this + cin 22 | 23 | // override def conv: T = op(lhsValue, rhsValue, cinValue match { case Some(cin) => cin; case _ => 0.B }) 24 | // } 25 | 26 | // implicit class PimpedOp[T <: Bits with Num[T]](val u: T) { 27 | 28 | // def ++&(other: T, cin: Bool = 0.B)(implicit adderType: AdderType[T]): PimpedOpWithCarry[T] = { 29 | // println(s"pimped adder ${adderType.getClass.getCanonicalName}") 30 | 31 | // PimpedOpWithCarry(this.conv, adderType.add, other, Some(cin)) 32 | // } 33 | 34 | // def conv: T = u 35 | 36 | // } 37 | 38 | // implicit def conv[T <: Bits with Num[T]](t: PimpedOp[T]): T = { 39 | // t.conv 40 | // } 41 | 42 | // def toffoli(a: Bool, b: Bool, c: Bool): Bool = (a & b) ^ c 43 | 44 | // def majority(a: Bool, b: Bool, c: Bool): Bool = toffoli(a ^ b, b ^ c, b) 45 | // ((a ^ b) & (b ^ c) ) ^ b // (a & b) ^ (a & c) ^ (b & c) 46 | 47 | } 48 | -------------------------------------------------------------------------------- /arithmetics/src/dividers/LutDivider.scala: -------------------------------------------------------------------------------- 1 | package dividers 2 | 3 | import chisel3._ 4 | 5 | class LutDivider(val width: Int) extends Module { 6 | val io = IO(new Bundle { 7 | val dividend = Input(UInt(width.W)) 8 | val divisor = Input(UInt(width.W)) 9 | val quotient = Output(UInt(width.W)) 10 | val remainder = Output(UInt(width.W)) 11 | }) 12 | 13 | val lut = Seq.tabulate(width, width)((i, j) => 14 | (i, j) match { 15 | case (0, _) => 0 16 | case (_, 0) => 0 17 | case (i, j) => if (i >= j) 1 << (i - j) else 0 18 | } 19 | ) 20 | 21 | 22 | } 23 | -------------------------------------------------------------------------------- /arithmetics/src/masking/DOM.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | import chisel3.experimental.skipPrefix 6 | 7 | import chest.markDontTouch 8 | import chest.crossProduct 9 | import chest.masking.SharedBool 10 | import chisel3.reflect.DataMirror 11 | import chisel3.experimental.SourceInfo 12 | import chest.masking.Shared 13 | 14 | trait Gadget { 15 | 16 | // if not pipelined, the inputs need to remain stable for the maximum delay 17 | def pipelined: Boolean = true 18 | 19 | /** force balanced delay from all inputs to the output by adding registers 20 | * 21 | * @return 22 | */ 23 | def balanced: Boolean = false 24 | 25 | def andMaxDelay: Int 26 | def andMinDelay: Int 27 | 28 | def reg[T <: Data](t: T, en: Bool = 1.B)(implicit sourceInfo: SourceInfo): T = markDontTouch( 29 | RegEnable(markDontTouch(WireDefault(t)), en) 30 | ) 31 | def reg[T <: Data](t: Option[T], en: Bool)(implicit sourceInfo: SourceInfo): Option[T] = t.map(reg(_, en)) 32 | def reg[T <: Data](t: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = t.map(reg(_)) 33 | 34 | def reg[T <: Data](input: T, en: Bool, clear: Bool)(implicit sourceInfo: SourceInfo): T = if (clear.isLit) { 35 | assert(clear.litValue == 0, "constant clear must be 0") 36 | reg(input, en) 37 | } else 38 | markDontTouch(RegEnable(markDontTouch(Mux(clear, 0.U.asTypeOf(input), input)), en | clear)) 39 | 40 | def optReg[T <: Data](u: T, valid: Bool)(implicit sourceInfo: SourceInfo): T = 41 | if (pipelined) RegEnable(u, valid) else u 42 | def optReg[T <: Data](u: T)(implicit sourceInfo: SourceInfo): T = if (pipelined) RegEnable(u, 1.B) else u 43 | 44 | def andRandBits(t: Int): Int 45 | 46 | def and( 47 | a: SharedBool, 48 | b: SharedBool, 49 | rand: Seq[Bool], 50 | randValid: Bool, 51 | clear: Bool = 0.B, 52 | )(implicit sourceInfo: SourceInfo): SharedBool 53 | 54 | def and( 55 | a: Shared, 56 | b: Shared, 57 | rand: Seq[Bool] 58 | )(implicit sourceInfo: SourceInfo): Shared = and(a, b, rand, 1.B) 59 | 60 | def and( 61 | a: Shared, 62 | b: Shared, 63 | rand: Seq[Bool], 64 | randValid: Bool, 65 | )(implicit sourceInfo: SourceInfo): Shared = { 66 | val randPerBit = andRandBits(a.numShares - 1) 67 | SharedBool.concat(a.asBools.zip(b.asBools).zipWithIndex.map { case ((ai, bi), i) => 68 | and(ai, bi, rand.slice(randPerBit * i, randPerBit * (i + 1)), randValid) 69 | }) 70 | } 71 | 72 | def toffoli( 73 | a: SharedBool, 74 | b: SharedBool, 75 | c: SharedBool, 76 | rand: Seq[Bool], 77 | randValid: Bool, 78 | enable: Option[Bool] = None 79 | )(implicit sourceInfo: SourceInfo): SharedBool 80 | 81 | def majorityRandBits(t: Int): Int 82 | 83 | def majority( 84 | a: SharedBool, 85 | b: SharedBool, 86 | c: SharedBool, 87 | rand: Seq[Bool], 88 | randValid: Bool 89 | )(implicit sourceInfo: SourceInfo): SharedBool 90 | 91 | } 92 | 93 | object Gadget { 94 | def apply(name: String, pipelined: Boolean = true): Gadget = name.toUpperCase match { 95 | case "DOM" => DOM(pipelined = pipelined) 96 | case "HPC2" => HPC2(pipelined = pipelined, balanced = false) 97 | case _ => throw new IllegalArgumentException(s"Unknown gadget: $name") 98 | } 99 | 100 | /** upper triangular to linear index 101 | * 102 | * @param i 103 | * row 104 | * @param j 105 | * column 106 | * @return 107 | * flattened index 108 | */ 109 | def upTriToLin(numShares: Int, i: Int, j: Int): Int = { 110 | require(0 <= i && i < numShares, s"i must be non-negative and less than $numShares but was $i") 111 | require(0 <= j && j < numShares, s"j must be non-negative and less than $numShares but was $j") 112 | require(j != i, s"i and j must be different but got i=$i, j=$j") 113 | if (j > i) { 114 | numShares * i - i * (i + 1) / 2 + (j - i - 1) 115 | } else upTriToLin(numShares, j, i) 116 | } 117 | } 118 | 119 | case class DOM(override val pipelined: Boolean = true) extends Gadget { 120 | 121 | override val balanced: Boolean = true 122 | 123 | def andRandBits(numInputs: Int, t: Int): Int = BigInt(t + 1).pow(numInputs - 1).toInt * t / 2 // FIXME!!! 124 | def andRandBits(t: Int): Int = (t + 1) * t / 2 125 | 126 | def andMaxDelay: Int = 1 127 | def andMinDelay: Int = 1 128 | 129 | def majorityRandBits(t: Int): Int = andRandBits(t) + t 130 | 131 | def majority( 132 | a: SharedBool, 133 | b: SharedBool, 134 | c: SharedBool, 135 | rand: Seq[Bool], 136 | randValid: Bool 137 | )(implicit sourceInfo: SourceInfo): SharedBool = { 138 | val numShares = a.numShares 139 | require(b.numShares == numShares) 140 | require(c.numShares == numShares) 141 | 142 | val i0 = reg((a ^ b).refreshed(rand.slice(0, numShares - 1))) 143 | 144 | toffoli( 145 | (b ^ c), 146 | i0, 147 | (b), 148 | rand.slice(numShares - 1, numShares - 1 + andRandBits(numShares - 1)), 149 | randValid, 150 | ) 151 | } 152 | 153 | /** @param a 154 | * input 155 | * @param b 156 | * input 157 | * @param rand 158 | * fresh random bits 159 | * @return 160 | * masked a & b 161 | */ 162 | def and( 163 | a: SharedBool, 164 | b: SharedBool, 165 | rand: Seq[Bool], 166 | randValid: Bool, 167 | clear: Bool = 0.B, 168 | )(implicit sourceInfo: SourceInfo): SharedBool = { 169 | val numShares = a.numShares 170 | require(b.numShares == numShares) 171 | require( 172 | rand.length == andRandBits(numShares - 1), 173 | s"rand.length=${rand.length} requiredRandBits=${andRandBits(numShares - 1)}" 174 | ) 175 | 176 | val en = randValid 177 | 178 | def reg[T <: Data](t: T): T = super.reg(t, en) 179 | 180 | def optReg[T <: Data](input: T, en: Bool = en): T = if (pipelined) RegEnable(input, en) else input 181 | 182 | def r(i: Int, j: Int): Bool = { 183 | require(0 <= i && i < numShares) 184 | require(0 <= j && j < numShares) 185 | require(j != i) 186 | if (j > i) { 187 | val k = numShares * i - i * (i + 1) / 2 + (j - i - 1) 188 | rand(k) 189 | } else r(j, i) 190 | } 191 | 192 | val a_name = DataMirror.queryNameGuess(a) 193 | val b_name = DataMirror.queryNameGuess(b) 194 | 195 | val prefix = 196 | if (a_name.nonEmpty && a_name != "?" && b_name.nonEmpty && b_name != "?") 197 | f"${a_name}_AND_${b_name}_" 198 | else "" 199 | 200 | SharedBool.from((0 until numShares).map { i => 201 | (0 until numShares).map { j => 202 | if (j == i) 203 | optReg(a.getShare(i) & b.getShare(i)).suggestName(prefix + s"dom${i}") 204 | else 205 | reg(r(i, j) ^ (a.getShare(i) & b.getShare(j))) 206 | .suggestName( 207 | prefix + s"dom${i}x${j}" 208 | ) 209 | }.reduce(_ ^ _) 210 | }) 211 | } 212 | 213 | /** ==Toffoli gate== 214 | * 215 | * Computes c ^ (a & b) 216 | * 217 | * @param a 218 | * AND input. 219 | * @param b 220 | * AND input. 221 | * @param c 222 | * XOR input. 223 | * @param rand 224 | * @param en 225 | * @param clear 226 | * @return 227 | * [c ^ (a & b)] 228 | * 229 | * @note 230 | * Output delay: 1 cycle 231 | */ 232 | def toffoli( 233 | a: SharedBool, 234 | b: SharedBool, 235 | c: SharedBool, 236 | rand: Seq[Bool], 237 | randValid: Bool, 238 | enable: Option[Bool] = None 239 | )(implicit sourceInfo: SourceInfo): SharedBool = { 240 | val numShares = a.numShares 241 | require(numShares == b.numShares) 242 | 243 | val requiredRandBits = numShares * (numShares - 1) / 2 244 | require(rand.length == requiredRandBits, s"rand.length=${rand.length} requiredRandBits=${requiredRandBits}") 245 | 246 | val en = randValid 247 | 248 | def reg[T <: Data](t: T)(implicit sourceInfo: SourceInfo): T = markDontTouch( 249 | RegEnable(markDontTouch(WireDefault(t)), en) 250 | ) 251 | 252 | def optReg[T <: Data](input: T, en: Bool = en)(implicit sourceInfo: SourceInfo): T = 253 | if (pipelined || balanced) RegEnable(input, en) else input 254 | 255 | def r(i: Int, j: Int): Bool = rand(Gadget.upTriToLin(numShares, i, j)) 256 | 257 | val a_name = DataMirror.queryNameGuess(a) 258 | val b_name = DataMirror.queryNameGuess(b) 259 | val c_name = DataMirror.queryNameGuess(c) 260 | 261 | val prefix = 262 | if (a_name.nonEmpty && a_name != "?" && b_name.nonEmpty && b_name != "?") 263 | f"${a_name}_AND_${b_name}_XOR_${c_name}_" 264 | else "" 265 | 266 | SharedBool.from((0 until numShares).map { i => 267 | (0 until numShares).map { j => 268 | if (j == i) 269 | optReg(c.getShare(i) ^ (a.getShare(i) & b.getShare(i))).suggestName(prefix + s"dom${i}") 270 | else 271 | reg(r(i, j) ^ (a.getShare(i) & b.getShare(j))).suggestName(prefix + s"dom${i}x${j}") 272 | }.reduce(_ ^ _) 273 | }) 274 | } 275 | 276 | /// FIXME not verified! 277 | def and(in: Seq[Vec[UInt]], rand: Seq[UInt], valid: Bool): Vec[UInt] = { 278 | val n = in.length 279 | val numShares = in.head.length 280 | require(in.forall(_.length == numShares), "all inputs must have the same number of shares") 281 | 282 | def optReg[T <: Data](input: T): T = super.optReg(input, valid) 283 | 284 | val ppMap = skipPrefix { 285 | crossProduct(in.map(_.zipWithIndex)).groupMapReduce(_.head._2) { case zz => 286 | val (inputs, shareIds) = zz.unzip 287 | val p = inputs.reduce(_ & _).suggestName(s"p_${shareIds.mkString("_")}") 288 | val ri = shareIds.foldLeft(0)((acc, j) => acc * numShares + (j + numShares - shareIds.head) % numShares) 289 | val pp = 290 | if (shareIds.distinct.length == 1) optReg(p) 291 | else { 292 | val r = 293 | if (shareIds.head == numShares - 1) rand(ri - 1) 294 | else rand(ri + ((BigInt(numShares).pow(n - 1).toInt - 1) * (shareIds.head % 2)) - 1) 295 | 296 | RegEnable(p ^ r, valid) 297 | } 298 | pp.suggestName(s"pp_${shareIds.mkString("_")}") 299 | markDontTouch(pp) 300 | }(_ ^ _) 301 | } 302 | 303 | assert(ppMap.size == numShares) 304 | 305 | VecInit((0 until numShares).map(ppMap)) 306 | } 307 | } 308 | 309 | class DOMModule1(t: Int, width: Int, n: Int = 2) extends Module { 310 | 311 | require(t >= 1, "masking order must be at least 1") 312 | require(n > 1, "number of inputs must be at least 2") 313 | require(width > 0, "width must be at least 1") 314 | 315 | def this(t: Int) = this(t, 1, 2) 316 | 317 | val numShares = t + 1 318 | 319 | val g = DOM() 320 | val randBits = g.andRandBits(n, t) 321 | 322 | println(s"numInputs: $n, order: $t, width: $width, numShares: $numShares, randBits: ${randBits * width}") 323 | 324 | val io = IO(new Bundle { 325 | val in = Input(Vec(n, Vec(numShares, UInt(width.W)))) 326 | val r = Input(Valid(Vec(randBits, UInt(width.W)))) // TODO 327 | val out = Output(Valid(Vec(numShares, UInt(width.W)))) 328 | }) 329 | 330 | io.out.bits :#= g.and(io.in, io.r.bits, io.r.valid) 331 | io.out.valid := RegNext(io.r.valid, 0.B) 332 | 333 | override def desiredName: String = s"DOM_n${n}_t${t}_w${width}" 334 | 335 | layer.block(layers.Verification) { 336 | // import chisel3.ltl._ 337 | // import chisel3.ltl.Sequence._ 338 | // AssertProperty( 339 | // io.r.valid ### (io.out.reduce(_ ^ _) === RegNext(io.in).map(_.reduce(_ ^ _)).reduce(_ & _)) // `io.in` delayed? 340 | // ) 341 | when(io.out.valid) { 342 | assert(io.out.bits.reduce(_ ^ _) === RegNext(io.in).map(_.reduce(_ ^ _)).reduce(_ & _)) 343 | } 344 | } 345 | 346 | } 347 | 348 | class DomModule(order: Int) extends Module { 349 | require(order >= 1, "masking order must be at least 1") 350 | 351 | override def desiredName: String = simpleClassName(this.getClass()) + s"_d${order}" 352 | 353 | val numShares = order + 1 354 | 355 | val w = 1 356 | 357 | def gen = Shared(numShares, w.W) 358 | 359 | val g = DOM(pipelined = false) 360 | 361 | val randPerBit = g.andRandBits(order) 362 | 363 | println(s"numShares: $numShares, randPerBit: $randPerBit") 364 | 365 | val io = IO(new Bundle { 366 | // val in = Input(Vec(numInputs, gen)) 367 | val a = Input(gen) 368 | val b = Input(gen) 369 | // val rand = Flipped(Valid(Vec(HPC2.requiredRandBits(numShares, numInputs), Bool()))) 370 | val rand = Input(UInt((randPerBit * gen.elWidth).W)) 371 | val out = Output(gen) 372 | }) 373 | 374 | io.out :#= g.and(io.a, io.b, io.rand.asBools, randValid = 1.B) 375 | 376 | } 377 | -------------------------------------------------------------------------------- /arithmetics/src/masking/HPC2.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | 6 | import chest._ 7 | import chest.masking.{SharedBool, Shared} 8 | import chisel3.experimental.SourceInfo 9 | import chisel3.layers.Verification 10 | 11 | case class HPC2(override val pipelined: Boolean = false, override val balanced: Boolean = false) extends Gadget { 12 | def andRandBits(t: Int): Int = (t + 1) * t / 2 13 | 14 | def andMaxDelay: Int = 2 15 | def andMinDelay: Int = if (balanced) 2 else 1 16 | 17 | def majorityRandBits(t: Int): Int = andRandBits(t) 18 | 19 | /** @param a 20 | * low delay input 21 | * @param b 22 | * high delay input 23 | * @param rand 24 | * @param en 25 | * @param clear 26 | * @return 27 | * masked a & b; 28 | * 29 | * if balanced == false: output has delay of 1 cycle with respect to input `a` and 2 cycles with respect to `b` 30 | */ 31 | def and( 32 | a: SharedBool, 33 | b: SharedBool, 34 | rand: Seq[Bool], 35 | randValid: Bool, 36 | clear: Bool = 0.B, 37 | )(implicit sourceInfo: SourceInfo): SharedBool = { 38 | val numShares = a.numShares 39 | require(numShares == b.numShares) 40 | 41 | val requiredRandBits = numShares * (numShares - 1) / 2 42 | require(rand.length == requiredRandBits, s"rand.length=${rand.length} requiredRandBits=${requiredRandBits}") 43 | 44 | val en0 = randValid 45 | 46 | def oReg[T <: Data](input: T, doReg: Boolean = true): T = if (doReg) RegEnable(input, 1.B) else input 47 | 48 | def r(i: Int, j: Int): Bool = { 49 | require(0 <= i && i < numShares) 50 | require(0 <= j && j < numShares) 51 | require(j != i) 52 | if (j < i) { 53 | val k = i * (i + 1) / 2 + j - i 54 | rand(k) 55 | } else r(j, i) 56 | } 57 | 58 | SharedBool.from(Seq.tabulate(numShares) { i => 59 | val a_i = a.getShare(i) 60 | val b_i = b.getShare(i) 61 | ( 62 | // same-domain terms 63 | (0 until numShares).map { j => 64 | if (i == j) 65 | if (balanced) 66 | oReg(a_i & b_i) 67 | else 68 | (a_i & oReg(b_i)) 69 | else if (balanced) 70 | oReg(!a_i & r(i, j)) 71 | else 72 | !a_i & oReg(r(i, j)) 73 | }.reduce(_ ^ _) +: 74 | // cross-domain terms 75 | (0 until numShares).collect { 76 | case j if j != i => 77 | val b_j = b.getShare(j) 78 | oReg(a_i, balanced) & reg(b_j ^ r(i, j), en0) 79 | } 80 | ).map(reg(_)).reduce(_ ^ _) 81 | }) 82 | 83 | // SharedBool.from(Seq.tabulate(numShares) { i => 84 | // val a_i = a.getShare(i) 85 | // xorReduceSeq(optReg(balanceReg(a_i) & b.getShare(i)) +: (0 until numShares).filter(_ != i).flatMap { j => 86 | // // val u_ij = reg(!a_i & r_ij) 87 | // val s = reg(b.shares(j) ^ r(i, j)) 88 | // val p1 = reg(s & balanceReg(a_i)) 89 | // val p0 = balanceReg(reg(!a_i & r(i, j))) 90 | 91 | // Seq(p0, p1) 92 | 93 | // }) 94 | // }) 95 | } 96 | 97 | /** ==Toffoli gate== 98 | * 99 | * Computes c ^ (a & b) 100 | * 101 | * @param a 102 | * The first irst AND input. Its delay to the output is 1 cycle. 103 | * @param b 104 | * The second AND input. Its delay to the output is 2 cycles. 105 | * @param c 106 | * delay 1 XOR input 107 | * @param rand 108 | * @param en 109 | * @return 110 | * [c ^ (a & [b])] 111 | * @note 112 | * ==Output delay== 113 | * - from `a`, `c`: 1 cycle 114 | * - from `b`: 2 cycles 115 | */ 116 | def toffoli( 117 | a: SharedBool, 118 | b: SharedBool, 119 | c: SharedBool, 120 | rand: Seq[Bool], 121 | randValid: Bool, 122 | enable: Option[Bool] = None 123 | )(implicit sourceInfo: SourceInfo): SharedBool = { 124 | val numShares = a.numShares 125 | require(numShares == b.numShares) 126 | 127 | val requiredRandBits = numShares * (numShares - 1) / 2 128 | require(rand.length == requiredRandBits, s"rand.length=${rand.length} requiredRandBits=${requiredRandBits}") 129 | 130 | val en0 = enable.map(_ & randValid).getOrElse(randValid) 131 | val en1 = enable.getOrElse(RegNext(en0)) 132 | 133 | // register is required for correctness, based on the pipelining or balancedness 134 | def optReg[T <: Data](input: T, en: Bool = en0): T = if (pipelined || balanced) RegEnable(input, en) else input 135 | 136 | def balanceReg[T <: Data](input: T, en: Bool = en0): T = if (balanced && pipelined) RegEnable(input, en) else input 137 | 138 | def r(i: Int, j: Int): Bool = { 139 | if (j < i) 140 | r(j, i) 141 | else { 142 | require(0 <= i && i < numShares && 0 <= j && j < numShares) 143 | require(j != i) 144 | val k = numShares * i - i * (i + 1) / 2 + (j - i - 1) 145 | rand(k) 146 | } 147 | } 148 | 149 | println(s"balanced: $balanced, pipelined: $pipelined") 150 | 151 | SharedBool.from(Seq.tabulate(numShares) { i => 152 | val a_i = balanceReg(a.getShare(i)) 153 | val b_i = optReg(b.getShare(i)) 154 | val c_i = balanceReg(c.getShare(i)) 155 | ( 156 | // same-domain terms 157 | (c_i +: (0 until numShares).map { j => 158 | (if (i == j) a_i & b_i else !a_i & reg(r(i, j), en0)) 159 | }).reduce(_ ^ _) +: 160 | // cross-domain terms 161 | (0 until numShares).collect { 162 | case j if j != i => 163 | a_i & reg(b.getShare(j) ^ r(i, j), en0) 164 | } 165 | ).map(reg(_, en1)).reduce(_ ^ _) 166 | }) 167 | } 168 | 169 | def majority( 170 | a: SharedBool, 171 | b: SharedBool, 172 | c: SharedBool, 173 | rand: Seq[Bool], 174 | randValid: Bool 175 | )(implicit sourceInfo: SourceInfo): SharedBool = { 176 | val numShares = a.numShares 177 | require(b.numShares == numShares) 178 | require(c.numShares == numShares) 179 | 180 | val br = optReg(b) 181 | 182 | toffoli( 183 | br ^ c, // 1 cycle delay from `c` (used for carry-in) 184 | a ^ b, // 2 cycle delay 185 | br, // 1 cycle delay, therefore needs optReg 186 | rand, 187 | randValid, 188 | ) 189 | } 190 | 191 | def requiredRandBits(numShares: Int, multDegree: Int = 2) = { 192 | numShares * (numShares - 1) / 2 + (multDegree - 2) * numShares * (numShares - 1) // FIXME!!! WRONG!! TEMPORARY!!! 193 | } 194 | 195 | /** @param a 196 | * low delay input 197 | * @param b 198 | * high delay input 199 | * @param rand 200 | * @param en 201 | * @param clear 202 | * @return 203 | * if balanced == false: output has delay of 1 cycle with respect to input `a` and 2 cycles with respect to `b` 204 | */ 205 | def and3( 206 | a: SharedBool, 207 | b: SharedBool, 208 | c: SharedBool, 209 | rand: Seq[Bool], 210 | randValid: Bool, 211 | clear: Bool = 0.B, 212 | pipelined: Boolean = true, 213 | balanced: Boolean = false): SharedBool = { 214 | 215 | require(a.hasSameNumShares(b, c)) 216 | 217 | val numShares = a.numShares 218 | 219 | val en0 = randValid 220 | val en1 = RegNext(en0) 221 | 222 | def optReg[T <: Data](input: T, en: Bool = en0): T = if (pipelined || balanced) RegEnable(input, en) else input 223 | 224 | def reg[T <: Data](input: T, en: Bool): T = 225 | markDontTouch(RegEnable(Mux(clear, 0.U.asTypeOf(input), markDontTouch(input)), en | clear)) 226 | 227 | require(numShares == 2, "TODO: implement for numShares > 2") 228 | 229 | val reqRand = requiredRandBits(numShares, 3) 230 | require(rand.length == reqRand, s"rand.length=${rand.length} requiredRandBits=${reqRand}") 231 | 232 | // def r(i: Int, j: Int): Bool = { 233 | // require(0 <= i && i < numShares) 234 | // require(0 <= j && j < numShares) 235 | // require(j != i) 236 | // if (j > i) { 237 | // val k = numShares * i - i * (i + 1) / 2 + (j - i - 1) 238 | // rand(k) 239 | // } else r(j, i) 240 | // } 241 | 242 | // TODO 243 | def r(i: Int): Bool = rand(i) 244 | 245 | val rr = reg(rand.xorReduce, en0) 246 | 247 | SharedBool.from(Seq.tabulate(numShares) { i => 248 | val a_i = if (balanced) optReg(a.getShare(i)) else a.getShare(i) 249 | val b_i = b.getShare(i) 250 | val bb_i = optReg(b_i) // <-- TODO: !!CRITICAL!! Verify! Make sure reg is not essential for security 251 | val c_i = c.getShare(i) 252 | val cc_i = optReg(c_i) // <-- TODO: !!CRITICAL!! Verify! Make sure reg is not essential for security 253 | 254 | val tt: Bool = reg(Seq(b_i & c_i, b_i & r(0), c_i & r(1), r(2)).reduce(_ ^ _), en0) 255 | val t0 = (a_i & tt) ^ rr 256 | 257 | ( 258 | t0 +: (0 until numShares).collect { 259 | case j if j != i => 260 | val b_j = b.getShare(j) 261 | val c_j = c.getShare(j) 262 | Seq( 263 | a_i & reg(b_j ^ r(1), en0) & cc_i, 264 | a_i & reg(c_j ^ r(0), en0) & bb_i, 265 | a_i & reg((b_j & c_j) ^ r(2), en0), 266 | ) 267 | }.flatten 268 | ).map(reg(_, en1)).reduce(_ ^ _) 269 | }) 270 | } 271 | 272 | /** @param a 273 | * low delay input 1 274 | * @param b 275 | * high delay input 2 276 | * @param c 277 | * high delay input 3 278 | * @param c 279 | * xored with the output 280 | * @param rand 281 | * @param en 282 | * @param clear 283 | * @return 284 | * if balanced == false: output has delay of 1 cycle with respect to input `a` and 2 cycles with respect to `b` 285 | */ 286 | def and3Xor( 287 | a: SharedBool, 288 | b: SharedBool, 289 | c: SharedBool, 290 | d: SharedBool, 291 | rand: Seq[Bool], 292 | randValid: Bool, 293 | clear: Bool = 0.B, 294 | pipelined: Boolean = true, 295 | balanced: Boolean = false): SharedBool = { 296 | 297 | require(a.hasSameNumShares(b, c)) 298 | 299 | val numShares = a.numShares 300 | 301 | val en0 = randValid 302 | val en1 = RegNext(en0) 303 | 304 | def optReg[T <: Data](input: T, en: Bool = en0): T = if (pipelined || balanced) RegEnable(input, en) else input 305 | 306 | def reg[T <: Data](input: T, en: Bool): T = 307 | markDontTouch(RegEnable(Mux(clear, 0.U.asTypeOf(input), markDontTouch(input)), en | clear)) 308 | 309 | require(numShares == 2, "TODO: implement for numShares > 2") 310 | 311 | val reqRand = requiredRandBits(numShares, 3) 312 | require(rand.length == reqRand, s"rand.length=${rand.length} requiredRandBits=${reqRand}") 313 | 314 | // def r(i: Int, j: Int): Bool = { 315 | // require(0 <= i && i < numShares) 316 | // require(0 <= j && j < numShares) 317 | // require(j != i) 318 | // if (j > i) { 319 | // val k = numShares * i - i * (i + 1) / 2 + (j - i - 1) 320 | // rand(k) 321 | // } else r(j, i) 322 | // } 323 | 324 | // TODO 325 | def r(i: Int): Bool = rand(i) 326 | 327 | val rr = reg(rand.xorReduce, en0) 328 | 329 | SharedBool.from(Seq.tabulate(numShares) { i => 330 | val a_i = if (balanced) optReg(a.getShare(i)) else a.getShare(i) 331 | val d_i = if (balanced) optReg(d.getShare(i)) else d.getShare(i) 332 | val b_i = b.getShare(i) 333 | val bb_i = optReg(b_i) // <-- TODO: !!CRITICAL!! Verify! Make sure reg is not essential for security 334 | val c_i = c.getShare(i) 335 | val cc_i = optReg(c_i) // <-- TODO: !!CRITICAL!! Verify! Make sure reg is not essential for security 336 | 337 | val tt: Bool = reg(Seq(b_i & c_i, b_i & r(0), c_i & r(1), r(2)).reduce(_ ^ _), en0) 338 | val t0 = (a_i & tt) ^ rr ^ d_i 339 | 340 | ( 341 | t0 +: (0 until numShares).collect { 342 | case j if j != i => 343 | val b_j = b.getShare(j) 344 | val c_j = c.getShare(j) 345 | Seq( 346 | a_i & reg(b_j ^ r(1), en0) & cc_i, 347 | a_i & reg(c_j ^ r(0), en0) & bb_i, 348 | a_i & reg((b_j & c_j) ^ r(2), en0), 349 | ) 350 | }.flatten 351 | ).map(reg(_, en1)).xorReduce 352 | }) 353 | } 354 | 355 | } 356 | 357 | class Hpc2Module(order: Int, w: Int = 1, balanced: Boolean = true) extends Module { 358 | // val numInputs: Int = 2 359 | require(order >= 1, "masking order must be at least 1") 360 | // require(numInputs > 1, "number of inputs must be at least 2") 361 | // require(width > 0, "width must be at least 1") 362 | 363 | override def desiredName: String = simpleClassName(this.getClass()) + s"_order${order}" 364 | 365 | val numShares = order + 1 366 | 367 | def gen = Shared(numShares, w.W) 368 | 369 | val g = HPC2(balanced = balanced) 370 | 371 | val randPerBit = g.requiredRandBits(numShares) 372 | 373 | println(s"numShares: $numShares, randPerBit: $randPerBit") 374 | 375 | val io = IO(new Bundle { 376 | // val in = Input(Vec(numInputs, gen)) 377 | val a = Input(gen) 378 | val b = Input(gen) 379 | // val rand = Flipped(Valid(Vec(HPC2.requiredRandBits(numShares, numInputs), Bool()))) 380 | val rand = Input(UInt((randPerBit * gen.elWidth).W)) 381 | val out = Output(gen) 382 | }) 383 | 384 | io.out :#= g.and(io.a, io.b, io.rand.asBools, randValid = 1.B) 385 | 386 | layer.block(Verification) { 387 | val verifDelay = Module(new VerifModule(2)) 388 | 389 | when(verifDelay.valid) { 390 | assert( 391 | io.out.unmasked() === ShiftRegister( 392 | ShiftRegister(io.b.unmasked(), if (balanced) 0 else 1) & io.a.unmasked(), 393 | if (balanced) 2 else 1 394 | ) 395 | ) 396 | } 397 | } 398 | } 399 | 400 | class Hpc2ToffoliModule extends Module { 401 | val order: Int = 1 402 | require(order >= 1, "masking order must be at least 1") 403 | 404 | override def desiredName: String = simpleClassName(this.getClass()) + s"_order${order}" 405 | 406 | val numShares = order + 1 407 | 408 | def gen = SharedBool(numShares) 409 | 410 | val balanced = false 411 | val pipelined = true 412 | 413 | val g = HPC2(pipelined = pipelined, balanced = balanced) 414 | 415 | val io = IO(new Bundle { 416 | val a = Input(gen) 417 | val b = Input(gen) 418 | val c = Input(gen) 419 | val rand = Input(Vec(g.requiredRandBits(numShares), Bool())) 420 | val out = Output(gen) 421 | }) 422 | 423 | io.out :#= g.toffoli(io.a, io.b, io.c, io.rand, 1.B) 424 | 425 | layer.block(Verification.Assert) { 426 | val a_us_delayed = ShiftRegister(io.a.unmasked(), if (balanced) 2 else 1) 427 | val c_us_delayed = ShiftRegister(io.c.unmasked(), if (balanced) 2 else 1) 428 | val bp = Pipe(!reset.asBool, io.b.unmasked(), 2) 429 | when(bp.valid) { 430 | assert(io.out.unmasked() === (bp.bits & a_us_delayed) ^ c_us_delayed) 431 | } 432 | } 433 | if (!pipelined) { 434 | layer.block(Verification.Assume) { 435 | when(!reset.asBool) { 436 | assume(io.a === RegNext(io.a)) 437 | assume(io.b === RegNext(io.b)) 438 | assume(io.c === RegNext(io.c)) 439 | } 440 | } 441 | } 442 | } 443 | -------------------------------------------------------------------------------- /arithmetics/src/masking/VerifModule.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | import chisel3.util.experimental.InlineInstance 6 | import chisel3.util.experimental.FlattenInstance 7 | 8 | class VerifModule(numCycles: Int) extends Module with FlattenInstance with InlineInstance { 9 | 10 | val valid = IO(Output(Bool())) 11 | val count = IO(Output(UInt(log2Ceil(numCycles + 1).W))) 12 | 13 | val inner = Module(new VerifBBox(numCycles)).io 14 | inner.clock := clock 15 | inner.reset := reset 16 | valid := inner.valid 17 | count :#= inner.count 18 | 19 | class VerifBBox(numCycles: Int) extends BlackBox with HasBlackBoxInline { 20 | val io = IO(new Bundle { 21 | val reset = Input(Reset()) 22 | val clock = Input(Clock()) 23 | val valid = Output(Bool()) 24 | val count = Output(UInt(log2Ceil(numCycles + 1).W)) 25 | }) 26 | // chisel3.Intrinsic("circt_init", VERIF__counter.cloneType, Seq(VERIF__counter)) 27 | 28 | override def desiredName: String = simpleClassName(this.getClass()) + numCycles 29 | 30 | setInline( 31 | desiredName + ".sv", 32 | s""" 33 | |module ${desiredName} #( 34 | | parameter FORMAL_START_CYCLE = $numCycles 35 | |) ( 36 | | input clock, 37 | | input reset, 38 | | output logic valid, 39 | | output logic [$$clog2(FORMAL_START_CYCLE + 1)-1:0] count 40 | |); 41 | | 42 | |`ifndef SYNTHESIS 43 | |`ifdef FORMAL 44 | | reg [$$clog2(FORMAL_START_CYCLE + 1)-1:0] VERIF__counter = 0; 45 | | initial begin 46 | | assume(reset); 47 | | end 48 | | always @(posedge clock) begin 49 | | if (!valid) begin 50 | | VERIF__counter <= VERIF__counter + 1; 51 | | end 52 | | if (VERIF__counter != 0) begin 53 | | assume(!reset); 54 | | end 55 | | end 56 | | assign valid = VERIF__counter == FORMAL_START_CYCLE; 57 | | assign count = VERIF__counter; 58 | |`else 59 | | assign valid = 1'b0; 60 | | assign count = 0; 61 | |`endif 62 | |`else 63 | | assign valid = 1'b0; 64 | | assign count = 0; 65 | |`endif 66 | | 67 | |endmodule 68 | """.stripMargin 69 | ) 70 | 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/BKAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chest.masking.SharedBool 5 | 6 | class BKAdder(val width: Int, val order: Int, gadget: String) 7 | extends BooleanMaskedAdderModule 8 | with adders.BrentKung[SharedBool] { 9 | override def g: Gadget = Gadget(gadget) 10 | } 11 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/BooleanMaskedAdderModule.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chisel3._ 5 | import chest.masking._ 6 | 7 | import adders.PrefixAdder 8 | import chisel3.layers.Verification 9 | import chisel3.experimental.noPrefix 10 | import chisel3.util.Pipe 11 | 12 | abstract class BooleanMaskedAdderModule extends Module with MaskedAdder with PrefixAdder[SharedBool] { 13 | 14 | def withCarryIn: Boolean = false 15 | def pipelined: Boolean = true 16 | 17 | val io = IO(new Bundle { 18 | val a = Input(Shared(numShares, width.W)) 19 | val b = Input(Shared(numShares, width.W)) 20 | val cin = Option.when(withCarryIn)(Input(SharedBool(numShares))) 21 | val sum = Output(Shared(numShares, (width + 1).W)) 22 | }) 23 | 24 | io.sum :#= noPrefix { add(io.a, io.b, io.cin) } 25 | 26 | def depth = currentDepth 27 | 28 | def delay = (depth) * g.andMinDelay 29 | 30 | println(s"delay=${delay} pipelined=${pipelined}") 31 | 32 | layer.block(Verification) { 33 | if (!pipelined) { 34 | layer.block(Verification.Assume) { 35 | when(!reset.asBool && !RegNext(reset.asBool)) { 36 | assume(io.a === RegNext(io.a)) 37 | assume(io.b === RegNext(io.b)) 38 | } 39 | } 40 | } 41 | layer.block(Verification.Assert) { 42 | val p = Pipe(!reset.asBool, io.a.unmasked() +& io.b.unmasked() + io.cin.map(_.unmasked()).getOrElse(0.B), delay) 43 | when(!reset.asBool && p.valid) { 44 | assert(io.sum.unmasked() === p.bits) 45 | } 46 | } 47 | } 48 | // layer.block(Verification) { 49 | // val verifDelay = Module(new VerifModule(delay)) 50 | 51 | // assume(randInValid === 1.B) 52 | 53 | // // when(verifDelay.count =/= 0.U) { 54 | // assume(io.a === RegNext(io.a)) 55 | // assume(io.b === RegNext(io.b)) 56 | // // assume(randomBits === RegNext(randomBits)) 57 | // // assume(!reset.asBool) 58 | // // }.otherwise {} 59 | 60 | // val sumUnmasked = io.sum.unmasked() 61 | // val expected = io.a.unmasked() +& io.b.unmasked() // RegInit(0.U) 62 | 63 | // when(verifDelay.valid) { 64 | 65 | // assert( 66 | // sumUnmasked === expected, 67 | // cf"Expected :$expected Received: ${sumUnmasked}" 68 | // ) 69 | 70 | // } 71 | 72 | // } 73 | } 74 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/KSAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chest.masking.SharedBool 5 | 6 | class KSAdder(val width: Int, val order: Int, gadget: String) 7 | extends BooleanMaskedAdderModule 8 | with adders.KoggeStone[SharedBool] { 9 | override def g = Gadget(gadget) 10 | } 11 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/LFAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chest.masking.SharedBool 5 | import chisel3.util.ShiftRegister 6 | import chisel3.experimental.SourceInfo 7 | 8 | class LFAdder(val width: Int, val order: Int, gadget: String) 9 | extends BooleanMaskedAdderModule 10 | with adders.LadnerFischer[SharedBool] { 11 | override def g = Gadget(gadget) 12 | 13 | 14 | } 15 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/MaskedAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chisel3._ 5 | import chisel3.util._ 6 | import chisel3.experimental.noPrefix 7 | 8 | import adders._ 9 | 10 | import chest.masking.{SharedBool, Shared} 11 | import chisel3.experimental.SourceInfo 12 | 13 | trait HasRandLedger { self: Module => 14 | 15 | private val _randsLedger = collection.mutable.ArrayBuffer.empty[Bool] 16 | 17 | val randInValid = Wire(Bool()) 18 | 19 | /** request fresh random bits 20 | * 21 | * @param w 22 | * number of random bits to get 23 | * @return 24 | * random bits as a UInt of width w 25 | */ 26 | def reqRand(w: Int): UInt = reqRands(w).asUInt 27 | 28 | def reqRands(w: Int): Vec[Bool] = { 29 | val r = Seq.fill(w)(dontTouch(Wire(Bool()))) 30 | _randsLedger ++= r 31 | VecInit(r) 32 | } 33 | 34 | def reqRand(): Bool = reqRand(1).asBool 35 | 36 | def randAlwaysValid: Boolean = true 37 | 38 | def randomBits = VecInit(_randsLedger.toSeq) 39 | 40 | atModuleBodyEnd { 41 | val randBits = _randsLedger.length 42 | println(s"randBits=${randBits}") 43 | 44 | if (randAlwaysValid) 45 | noPrefix { 46 | val rand = IO(Input(UInt(randBits.W))).suggestName("rand") 47 | _randsLedger.zip(rand.asBools).foreach { case (wi, ri) => wi :#= ri } 48 | randInValid := 1.B 49 | } 50 | else 51 | noPrefix { 52 | val rand = IO(Flipped(Valid(UInt(randBits.W)))) 53 | _randsLedger.zipWithIndex.foreach { case (r, i) => r :#= rand.bits(i) } 54 | randInValid := rand.valid 55 | } 56 | 57 | } 58 | } 59 | 60 | trait MaskedAdderBase[T] extends Adder[T] { 61 | def order: Int 62 | 63 | def width: Int 64 | 65 | def numShares: Int 66 | 67 | override def genG(p: Option[T], g: Option[T], c: Option[T])(implicit sourceInfo: SourceInfo): Option[T] = 68 | toffoli(p, c, g) 69 | } 70 | 71 | trait MaskedAdder extends MaskedAdderBase[SharedBool] with HasRandLedger { self: Module => 72 | 73 | def order: Int 74 | 75 | def width: Int 76 | 77 | def numShares: Int = order + 1 78 | 79 | def zero = SharedBool.const(0.B, numShares) 80 | 81 | def g: Gadget = DOM(pipelined = true) 82 | // def g: Gadget = HPC2(pipelined = false, balanced = false) 83 | 84 | def randBitsPerAnd2 = g.andRandBits(order) 85 | 86 | override def xor(a: SharedBool, b: SharedBool)(implicit sourceInfo: SourceInfo): SharedBool = a ^ b 87 | 88 | override def and(a: SharedBool, b: SharedBool)(implicit sourceInfo: SourceInfo): SharedBool = 89 | g.and(a, b, reqRands(randBitsPerAnd2), randInValid) 90 | 91 | override def toffoli(a: SharedBool, b: SharedBool, c: SharedBool)(implicit sourceInfo: SourceInfo): SharedBool = 92 | g.toffoli(a, b, c, reqRands(randBitsPerAnd2), randInValid) 93 | 94 | override def not(a: SharedBool)(implicit sourceInfo: SourceInfo): SharedBool = ~a 95 | 96 | def add(a: Shared, b: Shared, cin: Option[SharedBool]): Shared = 97 | SharedBool.concat(add(a.asBools, b.asBools, cin)) 98 | 99 | override def desiredName: String = { 100 | val clzName = simpleClassName(this.getClass) 101 | clzName + (if (clzName.toLowerCase.endsWith("adder")) "" 102 | else "Adder") + s"_w${width}_d${order}_${g.getClass.getSimpleName.toLowerCase}" 103 | } 104 | 105 | override def majority( 106 | a: SharedBool, 107 | b: SharedBool, 108 | c: SharedBool 109 | )(implicit sourceInfo: SourceInfo): SharedBool = { 110 | 111 | g.majority(a, b, c, reqRands(g.majorityRandBits(order)), randInValid) 112 | 113 | } 114 | 115 | // override def fullAdder( 116 | // a: Option[SharedBool], 117 | // b: Option[SharedBool], 118 | // cin: Option[SharedBool] 119 | // )(implicit sourceInfo: SourceInfo): (Option[SharedBool], Option[SharedBool]) = { 120 | // // val ref = reqRands(numShares - 1) 121 | // // val s = gadget.reg(xor(a, b, cin).map(_.refreshed(ref))) 122 | // val g = majority(a, b, cin) 123 | // val s = if(g.isDefined) filler(xor(a, b, cin)) else xor(a, b, cin) 124 | // // val g = majority(a, b, cin) 125 | // (s, g) 126 | // } 127 | 128 | } 129 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/RCAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chisel3.util.ShiftRegister 5 | import chisel3.experimental.SourceInfo 6 | 7 | import chest.masking.SharedBool 8 | 9 | import adders.FullAdder 10 | 11 | class RCAdder(val width: Int, val order: Int, gadget: String) 12 | extends BooleanMaskedAdderModule 13 | with adders.RippleCarry[SharedBool] { 14 | 15 | override def pipelined = true 16 | override def g = Gadget(gadget, pipelined) 17 | override def delay = (depth - 1) * g.andMinDelay + delayUnbalance 18 | 19 | type T = SharedBool 20 | 21 | def sr(x: T, n: Int): T = if (n == 0) x else ShiftRegister(x, n) 22 | def sr(x: Option[T], n: Int): Option[T] = x.map(sr(_, n)) 23 | def sr(pg: (Option[T], Option[T]), n: Int): (Option[T], Option[T]) = (sr(pg._1, n), sr(pg._2, n)) 24 | 25 | override def nextLayer(pg: Seq[(Option[T], Option[T])], level: Int): Seq[(Option[T], Option[T])] = { 26 | val d = (pg.length - 1) * g.andMinDelay + delayUnbalance 27 | pg.zipWithIndex.map { case (pgi, i) => 28 | if (i == level) { 29 | val l = level - 1 30 | val pd = d - l + 1 - g.andMaxDelay 31 | println(s"level=$level l=$l d=$d pd=$pd") 32 | val (s, c) = mkCell(FullAdder, sr(pgi, l), pg(i - 1), level, i - 1, i - 2) 33 | (sr(s, pd), c) 34 | } else 35 | pgi 36 | } 37 | } 38 | 39 | def delayUnbalance = g.andMaxDelay - g.andMinDelay 40 | override def halfAdder(a: Option[T], b: Option[T])(implicit sourceInfo: SourceInfo): (Option[T], Option[T]) = { 41 | val c = and(sr(a, delayUnbalance), b) // unblacned AND gadget, first input needs to be delayed 42 | val s = xor(sr(a, g.andMinDelay), sr(b, g.andMinDelay)) 43 | (s, c) 44 | } 45 | 46 | override def fullAdder(a: Option[T], b: Option[T], cin: Option[T])(implicit sourceInfo: SourceInfo) 47 | : (Option[T], Option[T]) = { 48 | val c = majority(a, b, cin) 49 | val s = xor(sr(a, g.andMinDelay), sr(b, g.andMinDelay), cin) 50 | (s, c) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /arithmetics/src/masking/adder/SKAdder.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package adder 3 | 4 | import chest.masking.SharedBool 5 | 6 | class SKAdder(val width: Int, val order: Int, gadget: String) 7 | extends BooleanMaskedAdderModule 8 | with adders.SklanskyAdder[SharedBool] { 9 | override def g = Gadget(gadget) 10 | } 11 | 12 | class PipelinedSKAdder(val width: Int, val order: Int, gadget: String) 13 | extends BooleanMaskedAdderModule 14 | with adders.PipelinedSklanskyAdder[SharedBool] { 15 | override def g = Gadget(gadget) 16 | } 17 | -------------------------------------------------------------------------------- /arithmetics/src/masking/conversion/A2B.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package conversion 3 | package a2b 4 | 5 | import chisel3._ 6 | import chisel3.layers._ 7 | import chisel3.util._ 8 | 9 | import chest._ 10 | import chest.masking._ 11 | import chisel3.reflect.DataMirror 12 | 13 | class GoubinA2B(W: Int) extends Module { 14 | val io = IO(new Bundle { 15 | val x = Input(Vec(2, UInt(W.W))) 16 | val rand = Input(UInt(W.W)) 17 | val out = Output(BooleanShared(2, UInt(W.W))) 18 | }) 19 | 20 | io.out :#= A2B(io.x, io.rand) 21 | 22 | layer.block(Verification.Assert) { 23 | val p = Pipe(!reset.asBool, io.x.reduce(_ + _), 1) 24 | when(p.valid) { 25 | assert(io.out.unmasked() === p.bits) 26 | } 27 | } 28 | } 29 | class A2B(order: Int = 1, W: Int) extends Module { 30 | val numShares = order + 1 31 | val io = IO(new Bundle { 32 | val x = Input(Vec(numShares, UInt(W.W))) 33 | val rand = Input(UInt(W.W)) 34 | val out = Output(BooleanShared(numShares, UInt(W.W))) 35 | }) 36 | 37 | io.out :#= A2B(io.x, io.rand) 38 | 39 | layer.block(Verification.Assert) { 40 | val p = Pipe(!reset.asBool, io.x.reduce(_ + _), 1) 41 | when(p.valid) { 42 | assert(io.out.unmasked() === p.bits) 43 | } 44 | } 45 | } 46 | 47 | object A2B { 48 | 49 | /** Secure Carry Save Adder from the paper ... CHES 2024 50 | */ 51 | def secCSA(x: BooleanShared, y: BooleanShared, cin: BooleanShared, rand: UInt): (BooleanShared, BooleanShared) = { 52 | val numShares = x.numShares 53 | require(numShares == y.numShares && numShares == cin.numShares) 54 | val a = x ^ y 55 | val s = cin ^ a 56 | val g = DOM(pipelined = true) 57 | val c = (x ^ g.and(a, x ^ cin, rand.asBools)) << 1 58 | (s, c) 59 | } 60 | 61 | 62 | 63 | def apply(x: Vec[UInt], rand: UInt): BooleanShared = { 64 | val numShares = x.length 65 | numShares match { 66 | case 2 => 67 | BooleanShared.from(x) // FIXME to be implemented 68 | case 3 => 69 | val y1 = BooleanShared.from(Seq(x(0), 0.U, 0.U)) 70 | val y2 = BooleanShared.from(Seq(0.U, x(1), 0.U)) 71 | val y3 = BooleanShared.from(Seq(0.U, 0.U, x(2))) 72 | val (s, c) = secCSA(y1, y2, y3, rand) 73 | BooleanShared.from(x) // FIXME to be implemented 74 | case _ => 75 | BooleanShared.from(x) // FIXME to be implemented 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /arithmetics/src/masking/conversion/b2a/B2A.scala: -------------------------------------------------------------------------------- 1 | package masking 2 | package conversion 3 | package b2a 4 | 5 | import chisel3._ 6 | import chisel3.layers._ 7 | import chisel3.util._ 8 | 9 | import chest._ 10 | import chest.masking._ 11 | 12 | class B2A(W: Int) extends Module { 13 | val io = IO(new Bundle { 14 | val x = Input(BooleanShared(2, UInt(W.W))) 15 | val rand = Input(UInt(W.W)) 16 | val out = Output(Vec(2, UInt(W.W))) 17 | }) 18 | 19 | io.out :#= B2A.pini(io.x, io.rand) 20 | 21 | layer.block(Verification.Assert) { 22 | val p = Pipe(!reset.asBool, io.x.shares.reduce(_ ^ _), 1) 23 | when(p.valid) { 24 | assert(io.out(0) - io.out(1) === p.bits) 25 | } 26 | } 27 | } 28 | 29 | object B2A { 30 | 31 | /** $\Phi(a, b) = (a \opus b) + b, a, b \in Z$ ( or $Z_{2^k}$ ) 32 | * 33 | * is affine over $F_2$ 34 | * 35 | * $\Phi(a, b) \oplus \Phi(a, 0) = \Phi(a, b) \oplus a$ is linear 36 | * 37 | * @param a 38 | * @param b 39 | * @return 40 | */ 41 | def phi(a: UInt, b: UInt): UInt = (a ^ b) + b 42 | 43 | def reg(x: UInt): UInt = markDontTouch(RegEnable(markDontTouch(x), 1.B)) 44 | 45 | def pipeReg(x: UInt, delay: Int): UInt = ShiftRegister(x, delay) 46 | def pipeReg(x: UInt): UInt = RegEnable(x, 1.B) 47 | 48 | def reg(x: UInt, en: Bool): UInt = markDontTouch(RegEnable(markDontTouch(x), en)) 49 | 50 | def refresh3(x: BooleanShared, rand: Seq[UInt], en: Bool): Seq[UInt] = { 51 | require(x.numShares == 2, "Only supports 1st order") 52 | 53 | require(rand.length == 2) 54 | 55 | Seq(x.shares(0) ^ rand(0), x.shares(1) ^ rand(0) ^ rand(1), WireDefault(chiselTypeOf(rand(1)), rand(1))) 56 | .map(reg(_, en)) 57 | } 58 | 59 | def refresh(x: BooleanShared, rand: Vec[UInt], en: Bool): Seq[UInt] = { 60 | require(rand.length == x.numShares) 61 | 62 | (x.shares.zip(rand).map { case (sh, r) => sh ^ r } :+ rand.reduce(_ ^ _)).map(reg(_, en)) 63 | 64 | } 65 | 66 | def apply(x: BooleanShared, rand: Valid[Vec[UInt]]): Seq[UInt] = { 67 | val a = refresh3(x, rand.bits, rand.valid) 68 | println(s"a: ${a.map(_.widthOption)}") 69 | val b = Seq(phi(a(0), a(2)) ^ a(1), phi(a(0), a(1)), a(1) ^ a(2)).map(reg) 70 | println(s"b: ${b.map(_.widthOption)}") 71 | Seq(b(0) ^ b(1), b(2)) 72 | } 73 | 74 | /** 1st order PINI B2A_{2^k} conversion from the paper "Efficient Boolean-to-Arithmetic Mask Conversion" by Shahmirzadi 75 | * and Hutter, IACR Communications in Cryptography, 2024 76 | * 77 | * @param x 78 | * @param rand 79 | * @return 80 | */ 81 | def pini(x: BooleanShared, rand: UInt): Vec[UInt] = { 82 | require(x.numShares == 2, "This B2A only supports 1st order masking") 83 | 84 | val a1 = pipeReg(x.shares(0)) 85 | val a2 = reg(x.shares(1) + rand) 86 | val a3 = reg(x.shares(1) ^ rand) 87 | 88 | val z = VecInit(a2(0) +: (1 until x.elWidth).map { i => 89 | val ts = (1 until i).map { d => 90 | val j = i - d - 1 91 | a1(j) & a3(j + d, j + 1).andR & ~a3(j) 92 | } 93 | a2(i) ^ (a1(i - 1) & ~a3(i - 1)) ^ ts.reduceOption(_ ^ _).getOrElse(0.B) 94 | }).asUInt 95 | 96 | VecInit(z, reg(x.shares(0) ^ rand)) 97 | } 98 | 99 | def goubin(x: BooleanShared, rand: Seq[UInt]): Seq[UInt] = { 100 | require(x.numShares == 2, "This B2A only supports 1st order masking") 101 | 102 | // cycle 1 103 | val a1 = reg(x.shares(0) ^ rand(0)) 104 | val a2 = reg(x.shares(1) ^ rand(0) ^ rand(1)) 105 | val a3 = pipeReg(rand(1)) 106 | val c2 = pipeReg(reg(x.shares(1) ^ rand(0)), 2) 107 | // cycle 2 108 | val b1 = reg(phi(a1, a3) ^ a1) 109 | val b2 = reg(phi(a1, a2)) 110 | // val b3 = reg(a2 ^ a3) 111 | // cycle 3 112 | val c1 = reg(b1 ^ b2) 113 | // val c2 = reg(b3) 114 | 115 | Seq(c1, c2) 116 | } 117 | 118 | /** Goubin's B2A conversion algorithm 119 | * 120 | * delay: 4 121 | * 122 | * area: O(n) 123 | * 124 | * @param x 125 | * @param rand 126 | * @return 127 | */ 128 | def goubin_bad(x: BooleanShared, rand: UInt, pipelined: Boolean = true): Vec[UInt] = { 129 | require(x.numShares == 2, "This B2A only supports 1st order masking") 130 | 131 | def optReg(x: UInt, delay: Int = 1): UInt = if (pipelined) ShiftRegister(x, delay) else x 132 | 133 | val x0_1 = optReg(x.shares(0)) // D0, t: 1 134 | 135 | val t2 = phi(x.shares(0), rand) // D0 136 | val t3 = reg(t2 ^ x.shares(0)) // D0, t: (2, 2) -> 3 137 | 138 | val g1 = reg(x.shares(1) ^ rand) // D1, t: (0, 0) -> 1 139 | val a1 = reg(x0_1 ^ g1) // D0+1, t: (1, 1) -> 2 140 | val a2 = reg(a1 + optReg(g1)) // D0+1, t: (2, 2) -> 3 141 | val a3 = reg(a2 ^ t3) // D0+1, t: (3, 3) -> 4 142 | 143 | VecInit(a3, optReg(x.shares(1), 4)) 144 | 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /arithmetics/src/modular/InvariantDivider.scala: -------------------------------------------------------------------------------- 1 | package modular 2 | 3 | import chisel3._ 4 | import chisel3.util._ 5 | 6 | 7 | //object ConstMult() 8 | 9 | class InvariantDivider(q: Int, wIn: Int) extends Module { 10 | val io = IO(new Bundle { 11 | val in = Flipped(Decoupled( UInt(wIn.W))) 12 | val out = Decoupled(new Bundle{ 13 | val rem = UInt(log2Ceil(q).W) 14 | val div = UInt((wIn - log2Ceil(q)).W) 15 | }) 16 | }) 17 | 18 | when(io.out.valid && io.out.ready){ 19 | // invariants 20 | // TODO combinational for now, need scoreboarding for pipelined case 21 | assert(io.in.bits === io.out.bits.div * q.U + io.out.bits.rem) 22 | assert(io.out.bits.rem < q.U) 23 | } 24 | 25 | 26 | // TODO combinational for now 27 | io.in.ready := io.out.ready 28 | io.out.valid := io.in.valid 29 | 30 | } 31 | -------------------------------------------------------------------------------- /arithmetics/test/src/adders/AdderSpec.scala: -------------------------------------------------------------------------------- 1 | // package adders 2 | 3 | // import chisel3._ 4 | // import chisel3.experimental.BundleLiterals._ 5 | // import chiseltest._ 6 | // import org.scalatest.flatspec.AnyFlatSpec 7 | // // import org.scalacheck.Gen 8 | // // import org.scalatest.prop.TableDrivenPropertyChecks 9 | 10 | // class AdderSpec extends AnyFlatSpec with ChiselScalatestTester { 11 | // behavior of "Adder" 12 | 13 | // it should "compute Add" in { 14 | 15 | // // def add(a: BigInt, b: BigInt): BigInt = { 16 | // // a + b 17 | // // } 18 | // val rnd = scala.util.Random 19 | 20 | // // val widths = for (n <- Gen.choose(2, 128)) yield n 21 | 22 | // // forAll(widths) { w: Int => 23 | // val w = 64 24 | // val numTests = 1000 25 | // val withCarry = true 26 | // test(new Adder(w, withCarry = withCarry)) { c => 27 | // // val inputInts = 28 | // // for (n <- Gen.chooseNum(0L, Math.pow(2, w).toLong - 1, 1)) 29 | // // yield BigInt(n) 30 | 31 | // val inputs = Seq.fill(numTests)( 32 | // if (withCarry) 33 | // new AdderInput(w, withCarry) 34 | // .Lit(_.x -> BigInt(w, rnd).U, _.y -> BigInt(w, rnd).U, _.cin.get -> BigInt(1, rnd).U) 35 | // else 36 | // new AdderInput(w, withCarry) 37 | // .Lit(_.x -> BigInt(w, rnd).U, _.y -> BigInt(w, rnd).U) 38 | // ) 39 | // fork { 40 | // c.io.in.enqueueSeq(inputs) 41 | // }.fork { 42 | // c.io.out.expectDequeueSeq( 43 | // inputs.map(i => (i.x.litValue + i.y.litValue).U + i.cin.getOrElse(0.U)) 44 | // ) 45 | // }.join() 46 | // } 47 | // // } 48 | // } 49 | // } 50 | 51 | // //object TestMain extends App1 { 52 | // // 53 | // // private val manager = new TesterOptionsManager with HasTreadleSuite { 54 | // // treadleOptions = treadleOptions.copy( 55 | // // writeVCD = true, 56 | // // vcdShowUnderscored = true, 57 | // // ) 58 | // // commonOptions = commonOptions.copy( 59 | // // targetDirName = "test_main", 60 | // // topName = "Adder", 61 | // // ) 62 | // // } 63 | // // 64 | // // val adderWidth = 8 65 | // // 66 | // // val testResult = Driver.execute(() => new Adder(adderWidth), manager) { 67 | // // 68 | // // c => 69 | // // new PeekPokeTester(c) { 70 | // // val rng = scala.util.Random 71 | // // for (_ <- 0 to 1000) { 72 | // // val x = rng.nextInt(Math.pow(2, adderWidth).toInt - 1) 73 | // // val y = rng.nextInt(Math.pow(2, adderWidth).toInt - 1) 74 | // // val cin = rng.nextInt(2) 75 | // // poke(c.io.in.bits.x, x) 76 | // // poke(c.io.in.bits.y, y) 77 | // // poke(c.io.in.bits.cin, cin) 78 | // // step(1) 79 | // // expect(c.io.out.bits, x + y + cin, s"failed for x=$x y=$y cin=$cin") 80 | // // } 81 | // // } 82 | // // } 83 | // // assert(testResult) 84 | // // println("SUCCESS!!") 85 | // //} 86 | // // 87 | // // 88 | // //object TestPrefix extends App { 89 | // // val a = new UIntBrentKungAdderType 90 | // // 91 | // // val x = (0 until 8).toList 92 | // // 93 | // // def op(x: Int, y: Int): Int = { 94 | // // println(s"op $x , $y -> ${x + y}") 95 | // // x + y 96 | // // } 97 | // // 98 | // // println(a.prefix(x, op)) 99 | // //} 100 | // // 101 | // //object GenerateVerilogAndDiagram extends App { 102 | // // val adderWidth = 8 103 | // // 104 | // // private val manager = new ExecutionOptionsManager("genVerilog") with HasChiselExecutionOptions with HasFirrtlOptions { 105 | // // commonOptions = commonOptions.copy( 106 | // // targetDirName = "generate", 107 | // // topName = "Adder", 108 | // // ) 109 | // // firrtlOptions.copy( 110 | // // customTransforms = firrtlOptions.customTransforms :+ new MacroCompilerTransform 111 | // // ) 112 | // // 113 | // // } 114 | // // 115 | // // val firrtl = chisel3.Driver.emit(() => new Adder(adderWidth)) 116 | // // val config = Config( 117 | // // targetDir = manager.targetDirName, 118 | // // firrtlSource = firrtl, 119 | // // useRanking = true 120 | // // ) 121 | // // 122 | // // chisel3.Driver.execute(manager, () => new Adder(adderWidth)) 123 | // // 124 | // // FirrtlDiagrammer.run(config) 125 | // // 126 | // //} 127 | -------------------------------------------------------------------------------- /build.mill: -------------------------------------------------------------------------------- 1 | // package build 2 | import mill._, scalalib._ 3 | 4 | import $file.chest.{common => chestBuild} 5 | 6 | import $packages._ 7 | 8 | object ivys { 9 | val scalaVersion = "2.13.15" 10 | 11 | // run `mill mill.scalalib.Dependency/showUpdates` to check for updates 12 | val ivyVersions = Map( 13 | "scala" -> scalaVersion, 14 | "org.chipsalliance::chisel" -> "7.0.0-M2+503-f3670a3f-SNAPSHOT", 15 | "org.chipsalliance:::chisel-plugin" -> "$chisel", 16 | "xyz.kamyar::chiseltest" -> "7.0.0-M2-2-4e0175-DIRTY3b1b8f08-SNAPSHOT", 17 | "org.scalatest::scalatest" -> "3.2.19", 18 | "org.scalacheck::scalacheck" -> "1.18.1+", 19 | "org.scalatestplus::scalacheck-1-18" -> "3.2.19.0", 20 | "com.outr::scribe" -> "3.15.0", 21 | "com.lihaoyi::pprint" -> "0.9.0+", 22 | "com.lihaoyi::mainargs" -> "0.7.0+", 23 | "com.lihaoyi::os-lib" -> "0.10.2+", 24 | "org.scala-lang.modules::scala-parallel-collections" -> "1.0.4+", 25 | "org.scala-lang:scala-reflect" -> scalaVersion, 26 | "org.json4s::json4s-jackson" -> "4.1.0-M8+", 27 | "org.json4s::json4s-native" -> "4.1.0-M8+", 28 | "com.chuusai::shapeless" -> "2.3.12", 29 | "org.rogach::scallop" -> "5.1.0", 30 | "com.lihaoyi::utest" -> "0.8.2", 31 | "org.scalanlp::breeze" -> "2.1.0", 32 | "com.github.jnr:jnr-ffi" -> "2.2.16", 33 | "org.typelevel::spire" -> "0.18.0", 34 | "org.nasdanika.core:drawio" -> "2024.10.0", 35 | ) 36 | } 37 | 38 | trait HasIvyVersions { 39 | def ivyVersions: Map[String, String] = ivys.ivyVersions 40 | } 41 | 42 | trait MacrosModule extends chestBuild.CommonMacrosModule with HasIvyVersions {} 43 | trait ChiselModule extends chestBuild.CommonChiselModule with HasIvyVersions {} 44 | 45 | // trait MacrosModule extends CommonScalaModule { 46 | // override def ivyDeps = super.ivyDeps() ++ Agg( 47 | // dep("scala-reflect") 48 | // ) 49 | 50 | // override def scalacOptions = super.scalacOptions() ++ Seq( 51 | // "-language:experimental.macros" 52 | // ) 53 | // } 54 | 55 | object arithmetics extends ChiselModule { 56 | override def ivyDeps = super.ivyDeps() ++ Agg( 57 | dep("mainargs"), 58 | dep("scallop"), 59 | dep("shapeless"), 60 | dep("spire"), 61 | dep("drawio"), 62 | ) 63 | 64 | override def moduleDeps = Seq(Chest) 65 | 66 | object test extends ScalaTests with TestModule.ScalaTest { 67 | override def ivyDeps = super.ivyDeps() ++ Agg( 68 | dep("chiseltest"), 69 | dep("scalatest"), 70 | dep("scalacheck"), 71 | ) 72 | } 73 | } 74 | 75 | object chestMacros extends MacrosModule { 76 | override def millSourcePath = super.millSourcePath / os.up / "chest" / "macros" 77 | override def ivyDeps = super.ivyDeps() ++ Agg( 78 | ) 79 | } 80 | 81 | object Chest extends ChiselModule { 82 | override def millSourcePath = super.millSourcePath / os.up / "chest" / "chest" 83 | 84 | override def ivyDeps = super.ivyDeps() ++ Agg( 85 | dep("pprint"), 86 | dep("scallop"), 87 | dep("mainargs"), 88 | dep("scribe"), 89 | dep("json4s-jackson"), 90 | dep("json4s-native"), 91 | ) 92 | override def moduleDeps = scala.Seq(chestMacros) 93 | } 94 | -------------------------------------------------------------------------------- /inline_bind.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pathlib import Path 3 | import re 4 | import argparse 5 | from textwrap import indent 6 | 7 | parser = argparse.ArgumentParser(description="Inline bind file") 8 | parser.add_argument("file", help="SV file to convert", type=Path) 9 | parser.add_argument("-b", "--bindfile", type=Path) 10 | parser.add_argument("-o", "--output", help="Output file") 11 | parser.add_argument("-i", "--inplace", help="Overwrite the input file", action="store_true") 12 | args = parser.parse_args() 13 | 14 | bindfile = args.bindfile or (args.file.parent / "bindfile.sv") 15 | 16 | 17 | with open(bindfile, "r", encoding="utf8") as f: 18 | bind_content = f.read() 19 | instances: dict[str, list[str]] = dict() 20 | for m in re.findall(r"bind\s+(\w+)\s+([^;]+;)", bind_content, re.MULTILINE | re.DOTALL): 21 | assert m 22 | module_name = m[0] 23 | print(f"found bind for {module_name}") 24 | submodule_inst = m[1] 25 | sub_insts = instances.get(module_name, []) 26 | sub_insts.append(submodule_inst) 27 | instances[module_name] = sub_insts 28 | with open(args.file, "r", encoding="utf8") as f: 29 | content = f.read() 30 | for module_name, submodule_instances in instances.items(): 31 | inject = "\n\n".join(indent(si, " ") for si in submodule_instances) 32 | inject = " initial assume(reset);\n\n" + inject 33 | inject = ( 34 | "`ifndef SYNTHESIS\n" 35 | # + "`ifdef FORMAL\n" 36 | + inject 37 | # + "\n`endif // FORMAL" 38 | + "\n`endif // not SYNTHESIS" 39 | ) 40 | (content, n) = re.subn( 41 | r"(.*module\s+" + module_name + r"\s*\(.*)(endmodule)", 42 | rf"\1\n{inject}\n\2", 43 | content, 44 | flags=re.MULTILINE | re.DOTALL, 45 | ) 46 | assert n == 1 47 | 48 | 49 | if args.output: 50 | assert not args.inplace, "Cannot use -o and -i together" 51 | with open(args.output, "w", encoding="utf8") as f: 52 | f.write(content) 53 | elif args.inplace: 54 | with open(args.file, "w", encoding="utf8") as f: 55 | f.write(content) 56 | else: 57 | print(content) 58 | -------------------------------------------------------------------------------- /mill: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # This is a wrapper script, that automatically download mill from GitHub release pages 4 | # You can give the required mill version with --mill-version parameter 5 | # If no version is given, it falls back to the value of DEFAULT_MILL_VERSION 6 | # 7 | # Original Project page: https://github.com/lefou/millw 8 | # Script Version: 0.4.12 9 | # 10 | # If you want to improve this script, please also contribute your changes back! 11 | # 12 | # Licensed under the Apache License, Version 2.0 13 | 14 | set -e 15 | 16 | if [ -z "${DEFAULT_MILL_VERSION}" ] ; then 17 | DEFAULT_MILL_VERSION=0.13.0-M0 18 | fi 19 | 20 | 21 | if [ -z "${GITHUB_RELEASE_CDN}" ] ; then 22 | GITHUB_RELEASE_CDN="" 23 | fi 24 | 25 | 26 | MILL_REPO_URL="https://github.com/com-lihaoyi/mill" 27 | 28 | if [ -z "${CURL_CMD}" ] ; then 29 | CURL_CMD=curl 30 | fi 31 | 32 | # Explicit commandline argument takes precedence over all other methods 33 | if [ "$1" = "--mill-version" ] ; then 34 | shift 35 | if [ "x$1" != "x" ] ; then 36 | MILL_VERSION="$1" 37 | shift 38 | else 39 | echo "You specified --mill-version without a version." 1>&2 40 | echo "Please provide a version that matches one provided on" 1>&2 41 | echo "${MILL_REPO_URL}/releases" 1>&2 42 | false 43 | fi 44 | fi 45 | 46 | # Please note, that if a MILL_VERSION is already set in the environment, 47 | # We reuse it's value and skip searching for a value. 48 | 49 | # If not already set, read .mill-version file 50 | if [ -z "${MILL_VERSION}" ] ; then 51 | if [ -f ".mill-version" ] ; then 52 | MILL_VERSION="$(tr '\r' '\n' < .mill-version | head -n 1 2> /dev/null)" 53 | elif [ -f ".config/mill-version" ] ; then 54 | MILL_VERSION="$(tr '\r' '\n' < .config/mill-version | head -n 1 2> /dev/null)" 55 | fi 56 | fi 57 | 58 | MILL_USER_CACHE_DIR="${XDG_CACHE_HOME:-${HOME}/.cache}/mill" 59 | 60 | if [ -z "${MILL_DOWNLOAD_PATH}" ] ; then 61 | MILL_DOWNLOAD_PATH="${MILL_USER_CACHE_DIR}/download" 62 | fi 63 | 64 | # If not already set, try to fetch newest from Github 65 | if [ -z "${MILL_VERSION}" ] ; then 66 | # TODO: try to load latest version from release page 67 | echo "No mill version specified." 1>&2 68 | echo "You should provide a version via '.mill-version' file or --mill-version option." 1>&2 69 | 70 | mkdir -p "${MILL_DOWNLOAD_PATH}" 71 | LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest" 2>/dev/null || ( 72 | # we might be on OSX or BSD which don't have -d option for touch 73 | # but probably a -A [-][[hh]mm]SS 74 | touch "${MILL_DOWNLOAD_PATH}/.expire_latest"; touch -A -010000 "${MILL_DOWNLOAD_PATH}/.expire_latest" 75 | ) || ( 76 | # in case we still failed, we retry the first touch command with the intention 77 | # to show the (previously suppressed) error message 78 | LANG=C touch -d '1 hour ago' "${MILL_DOWNLOAD_PATH}/.expire_latest" 79 | ) 80 | 81 | # POSIX shell variant of bash's -nt operator, see https://unix.stackexchange.com/a/449744/6993 82 | # if [ "${MILL_DOWNLOAD_PATH}/.latest" -nt "${MILL_DOWNLOAD_PATH}/.expire_latest" ] ; then 83 | if [ -n "$(find -L "${MILL_DOWNLOAD_PATH}/.latest" -prune -newer "${MILL_DOWNLOAD_PATH}/.expire_latest")" ]; then 84 | # we know a current latest version 85 | MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null) 86 | fi 87 | 88 | if [ -z "${MILL_VERSION}" ] ; then 89 | # we don't know a current latest version 90 | echo "Retrieving latest mill version ..." 1>&2 91 | LANG=C ${CURL_CMD} -s -i -f -I ${MILL_REPO_URL}/releases/latest 2> /dev/null | grep --ignore-case Location: | sed s'/^.*tag\///' | tr -d '\r\n' > "${MILL_DOWNLOAD_PATH}/.latest" 92 | MILL_VERSION=$(head -n 1 "${MILL_DOWNLOAD_PATH}"/.latest 2> /dev/null) 93 | fi 94 | 95 | if [ -z "${MILL_VERSION}" ] ; then 96 | # Last resort 97 | MILL_VERSION="${DEFAULT_MILL_VERSION}" 98 | echo "Falling back to hardcoded mill version ${MILL_VERSION}" 1>&2 99 | else 100 | echo "Using mill version ${MILL_VERSION}" 1>&2 101 | fi 102 | fi 103 | 104 | MILL_NATIVE_SUFFIX="-native" 105 | FULL_MILL_VERSION=$MILL_VERSION 106 | ARTIFACT_SUFFIX="" 107 | case "$MILL_VERSION" in 108 | *"$MILL_NATIVE_SUFFIX") 109 | MILL_VERSION=${MILL_VERSION%"$MILL_NATIVE_SUFFIX"} 110 | if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" = "Linux" ]; then 111 | if [ "$(uname -m)" = "aarch64" ]; then 112 | ARTIFACT_SUFFIX="-native-linux-aarch64" 113 | else 114 | ARTIFACT_SUFFIX="-native-linux-amd64" 115 | fi 116 | elif [ "$(uname)" = "Darwin" ]; then 117 | if [ "$(uname -m)" = "arm64" ]; then 118 | ARTIFACT_SUFFIX="-native-mac-aarch64" 119 | else 120 | ARTIFACT_SUFFIX="-native-mac-amd64" 121 | fi 122 | else 123 | echo "This native mill launcher supports only Linux and macOS." 1>&2 124 | exit 1 125 | fi 126 | esac 127 | 128 | MILL="${MILL_DOWNLOAD_PATH}/${FULL_MILL_VERSION}" 129 | 130 | try_to_use_system_mill() { 131 | if [ "$(uname)" != "Linux" ]; then 132 | return 0 133 | fi 134 | 135 | MILL_IN_PATH="$(command -v mill || true)" 136 | 137 | if [ -z "${MILL_IN_PATH}" ]; then 138 | return 0 139 | fi 140 | 141 | SYSTEM_MILL_FIRST_TWO_BYTES=$(head --bytes=2 "${MILL_IN_PATH}") 142 | if [ "${SYSTEM_MILL_FIRST_TWO_BYTES}" = "#!" ]; then 143 | # MILL_IN_PATH is (very likely) a shell script and not the mill 144 | # executable, ignore it. 145 | return 0 146 | fi 147 | 148 | SYSTEM_MILL_PATH=$(readlink -e "${MILL_IN_PATH}") 149 | SYSTEM_MILL_SIZE=$(stat --format=%s "${SYSTEM_MILL_PATH}") 150 | SYSTEM_MILL_MTIME=$(stat --format=%y "${SYSTEM_MILL_PATH}") 151 | 152 | if [ ! -d "${MILL_USER_CACHE_DIR}" ]; then 153 | mkdir -p "${MILL_USER_CACHE_DIR}" 154 | fi 155 | 156 | SYSTEM_MILL_INFO_FILE="${MILL_USER_CACHE_DIR}/system-mill-info" 157 | if [ -f "${SYSTEM_MILL_INFO_FILE}" ]; then 158 | parseSystemMillInfo() { 159 | LINE_NUMBER="${1}" 160 | # Select the line number of the SYSTEM_MILL_INFO_FILE, cut the 161 | # variable definition in that line in two halves and return 162 | # the value, and finally remove the quotes. 163 | sed -n "${LINE_NUMBER}p" "${SYSTEM_MILL_INFO_FILE}" |\ 164 | cut -d= -f2 |\ 165 | sed 's/"\(.*\)"/\1/' 166 | } 167 | 168 | CACHED_SYSTEM_MILL_PATH=$(parseSystemMillInfo 1) 169 | CACHED_SYSTEM_MILL_VERSION=$(parseSystemMillInfo 2) 170 | CACHED_SYSTEM_MILL_SIZE=$(parseSystemMillInfo 3) 171 | CACHED_SYSTEM_MILL_MTIME=$(parseSystemMillInfo 4) 172 | 173 | if [ "${SYSTEM_MILL_PATH}" = "${CACHED_SYSTEM_MILL_PATH}" ] \ 174 | && [ "${SYSTEM_MILL_SIZE}" = "${CACHED_SYSTEM_MILL_SIZE}" ] \ 175 | && [ "${SYSTEM_MILL_MTIME}" = "${CACHED_SYSTEM_MILL_MTIME}" ]; then 176 | if [ "${CACHED_SYSTEM_MILL_VERSION}" = "${MILL_VERSION}" ]; then 177 | MILL="${SYSTEM_MILL_PATH}" 178 | return 0 179 | else 180 | return 0 181 | fi 182 | fi 183 | fi 184 | 185 | SYSTEM_MILL_VERSION=$(${SYSTEM_MILL_PATH} --version | head -n1 | sed -n 's/^Mill.*version \(.*\)/\1/p') 186 | 187 | cat < "${SYSTEM_MILL_INFO_FILE}" 188 | CACHED_SYSTEM_MILL_PATH="${SYSTEM_MILL_PATH}" 189 | CACHED_SYSTEM_MILL_VERSION="${SYSTEM_MILL_VERSION}" 190 | CACHED_SYSTEM_MILL_SIZE="${SYSTEM_MILL_SIZE}" 191 | CACHED_SYSTEM_MILL_MTIME="${SYSTEM_MILL_MTIME}" 192 | EOF 193 | 194 | if [ "${SYSTEM_MILL_VERSION}" = "${MILL_VERSION}" ]; then 195 | MILL="${SYSTEM_MILL_PATH}" 196 | fi 197 | } 198 | try_to_use_system_mill 199 | 200 | # If not already downloaded, download it 201 | if [ ! -s "${MILL}" ] ; then 202 | 203 | # support old non-XDG download dir 204 | MILL_OLD_DOWNLOAD_PATH="${HOME}/.mill/download" 205 | OLD_MILL="${MILL_OLD_DOWNLOAD_PATH}/${MILL_VERSION}" 206 | if [ -x "${OLD_MILL}" ] ; then 207 | MILL="${OLD_MILL}" 208 | else 209 | case $MILL_VERSION in 210 | 0.0.* | 0.1.* | 0.2.* | 0.3.* | 0.4.* ) 211 | DOWNLOAD_SUFFIX="" 212 | DOWNLOAD_FROM_MAVEN=0 213 | ;; 214 | 0.5.* | 0.6.* | 0.7.* | 0.8.* | 0.9.* | 0.10.* | 0.11.0-M* ) 215 | DOWNLOAD_SUFFIX="-assembly" 216 | DOWNLOAD_FROM_MAVEN=0 217 | ;; 218 | *) 219 | DOWNLOAD_SUFFIX="-assembly" 220 | DOWNLOAD_FROM_MAVEN=1 221 | ;; 222 | esac 223 | 224 | DOWNLOAD_FILE=$(mktemp mill.XXXXXX) 225 | 226 | if [ "$DOWNLOAD_FROM_MAVEN" = "1" ] ; then 227 | DOWNLOAD_URL="https://repo1.maven.org/maven2/com/lihaoyi/mill-dist${ARTIFACT_SUFFIX}/${MILL_VERSION}/mill-dist${ARTIFACT_SUFFIX}-${MILL_VERSION}.jar" 228 | else 229 | MILL_VERSION_TAG=$(echo "$MILL_VERSION" | sed -E 's/([^-]+)(-M[0-9]+)?(-.*)?/\1\2/') 230 | DOWNLOAD_URL="${GITHUB_RELEASE_CDN}${MILL_REPO_URL}/releases/download/${MILL_VERSION_TAG}/${MILL_VERSION}${DOWNLOAD_SUFFIX}" 231 | unset MILL_VERSION_TAG 232 | fi 233 | 234 | # TODO: handle command not found 235 | echo "Downloading mill ${MILL_VERSION} from ${DOWNLOAD_URL} ..." 1>&2 236 | ${CURL_CMD} -f -L -o "${DOWNLOAD_FILE}" "${DOWNLOAD_URL}" 237 | chmod +x "${DOWNLOAD_FILE}" 238 | mkdir -p "${MILL_DOWNLOAD_PATH}" 239 | mv "${DOWNLOAD_FILE}" "${MILL}" 240 | 241 | unset DOWNLOAD_FILE 242 | unset DOWNLOAD_SUFFIX 243 | fi 244 | fi 245 | 246 | if [ -z "$MILL_MAIN_CLI" ] ; then 247 | MILL_MAIN_CLI="${0}" 248 | fi 249 | 250 | MILL_FIRST_ARG="" 251 | if [ "$1" = "--bsp" ] || [ "$1" = "-i" ] || [ "$1" = "--interactive" ] || [ "$1" = "--no-server" ] || [ "$1" = "--repl" ] || [ "$1" = "--help" ] ; then 252 | # Need to preserve the first position of those listed options 253 | MILL_FIRST_ARG=$1 254 | shift 255 | fi 256 | 257 | unset MILL_DOWNLOAD_PATH 258 | unset MILL_OLD_DOWNLOAD_PATH 259 | unset OLD_MILL 260 | unset MILL_VERSION 261 | unset MILL_REPO_URL 262 | 263 | # We don't quote MILL_FIRST_ARG on purpose, so we can expand the empty value without quotes 264 | # shellcheck disable=SC2086 265 | exec "${MILL}" $MILL_FIRST_ARG -D "mill.main.cli=${MILL_MAIN_CLI}" "$@" -------------------------------------------------------------------------------- /plot_prolead.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | from pathlib import Path 4 | 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | import seaborn as sns 8 | 9 | 10 | from rich.console import Console 11 | from rich.table import Table 12 | from rich.live import Live 13 | import time 14 | 15 | 16 | argparser = argparse.ArgumentParser(description="Plot ProLead") 17 | argparser.add_argument("data", help="Data file", type=Path) 18 | argparser.add_argument("--output", default=None, help="Output file", type=Path) 19 | argparser.add_argument("--dpi", default=600, help="DPI of the output image.", type=int) 20 | 21 | args = argparser.parse_args() 22 | 23 | console = Console() 24 | 25 | data_file = args.data 26 | assert data_file and isinstance(data_file, Path) 27 | 28 | sns.set_theme(style="whitegrid", context="paper") 29 | plt.figure() 30 | 31 | table = Table() 32 | table.add_column("ID") 33 | table.add_column("Value") 34 | 35 | 36 | 37 | 38 | with Live(table, console=console, refresh_per_second=4) as live: 39 | for i in range(10): 40 | table.add_row(str(i), str(i * 2)) 41 | time.sleep(1) 42 | 43 | with np.load(data_file) as data: 44 | if isinstance(data, np.lib.npyio.NpzFile): 45 | key = data.files[0] 46 | data_np = data[key] 47 | else: 48 | data_np = data 49 | 50 | print(f"Loaded data {data_np.shape}") 51 | 52 | x = data_np[:, 0] 53 | y = data_np[:, 1] 54 | 55 | num_sims = np.max(x) 56 | max_p_log = np.max(y) 57 | 58 | if num_sims > 10_000_000_000: 59 | x_scale = 1_000_000_000 60 | elif num_sims > 10_000_000: 61 | x_scale = 1_000_000 62 | elif num_sims > 10_000: 63 | x_scale = 1_000 64 | else: 65 | x_scale = 1 66 | 67 | if x_scale > 1: 68 | x = x / x_scale 69 | 70 | plot = sns.lineplot( 71 | x=x, 72 | y=y, 73 | # kind="line", 74 | label=r"$-\log_{10}(p)$", 75 | ) 76 | plot.axhline(y=max_p_log, linestyle="--", label="Minimum p-value", alpha=0.5) 77 | plot.axhline(y=5, color="r", linestyle="--", label="Threshold") 78 | plot.set_xlabel("Number of Simulations" + (fr" ($\times${int(x_scale):,})" if x_scale > 1 else "")) 79 | plot.set_ylabel(r"$-\log_{10}(p)$") 80 | plot.legend(loc="best", fancybox=True, framealpha=0.9) 81 | plt.tight_layout() 82 | 83 | fig_file = args.output or data_file.with_suffix(".png") 84 | 85 | print(f"Saving plot to {fig_file}") 86 | plt.savefig(fig_file, dpi=args.dpi) 87 | plt.show() 88 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy ~= 2.1 2 | matplotlib ~= 3.9 3 | seaborn 4 | quantiphy ~= 2.20 5 | rich ~= 13.9 6 | click 7 | -------------------------------------------------------------------------------- /run_prolead.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import argparse 3 | from dataclasses import dataclass 4 | import json 5 | import os 6 | from pathlib import Path 7 | import random 8 | import re 9 | import shutil 10 | import subprocess 11 | import time 12 | from typing import Literal, Optional, OrderedDict, Sequence, Union 13 | 14 | import numpy as np 15 | import matplotlib.pyplot as plt 16 | import rich 17 | import rich.text 18 | import seaborn as sns 19 | from quantiphy import Quantity 20 | from rich.console import Console 21 | from rich.table import Table 22 | from rich.live import Live 23 | 24 | console = Console() 25 | 26 | # Synthesize RTL sources using yosys and then run PROLEAD 27 | 28 | argparser = argparse.ArgumentParser(description="Run PROLEAD") 29 | 30 | argparser.add_argument("source_files", nargs="*", default=[], type=Path, help="Source files") 31 | argparser.add_argument( 32 | "--sources-list", default=None, type=Path, help="File containing list of source files" 33 | ) 34 | argparser.add_argument("--netlist", type=Path, help="Netlist file") 35 | argparser.add_argument("-t", "--top-module", default=None, help="Top module") 36 | argparser.add_argument("--force-synth", help="Force synthesis.", action="store_true") 37 | argparser.add_argument( 38 | "--quiet-synth", 39 | action=argparse.BooleanOptionalAction, 40 | type=bool, 41 | default=True, 42 | help="Supres synthesis output", 43 | ) 44 | argparser.add_argument( 45 | "--prolead-root-dir", help="Path to PROLEAD source directory", type=Path, default=None 46 | ) 47 | argparser.add_argument("--yosys-bin", help="Path to yosys binary", default="yosys") 48 | argparser.add_argument( 49 | "--yosys-verilog-lib", help="Path to Verilog cell library", default=None, type=Path 50 | ) 51 | argparser.add_argument("--yosys-lib", help="Path to .lib cell library", default=None, type=Path) 52 | argparser.add_argument("--prolead-bin", help="Path to PROLEAD binary", default=None) 53 | argparser.add_argument("--library-json", help="Path to library JSON file", type=Path, default=None) 54 | argparser.add_argument("--library-name", help="Library name", type=str, default="custom") 55 | argparser.add_argument("--random-seed", default=None, type=int, help="Random seed") 56 | argparser.add_argument("-d", "--order", default=1, type=int, help="SCA order") 57 | argparser.add_argument( 58 | "-N", "--num-simulations", default=Quantity("10 M"), type=Quantity, help="Number of simulations" 59 | ) 60 | argparser.add_argument( 61 | "-c", "--sim-cycles", type=int, default=None, help="Number of simulation cycles" 62 | ) 63 | argparser.add_argument( 64 | "--transitional", 65 | action=argparse.BooleanOptionalAction, 66 | type=bool, 67 | default=True, 68 | help="Enable transitional leakage", 69 | ) 70 | argparser.add_argument( 71 | "--compact", 72 | action=argparse.BooleanOptionalAction, 73 | type=bool, 74 | default=False, 75 | help="""Use compact distributions. Only the Hamming weight of the observations is stored, 76 | resulting in a more concise table. Reduces memory usage by storing less detailed data, 77 | which means that leakages can be overseen compared to normal mode.""", 78 | ) 79 | argparser.add_argument( 80 | "--ports-json", 81 | type=Path, 82 | default=None, 83 | help="Path to json file containing port information", 84 | ) 85 | argparser.add_argument( 86 | "--opt", 87 | help="Run optimizations during synthesis", 88 | default="none", 89 | choices=["full", "flatten", "none"], 90 | ) 91 | argparser.add_argument( 92 | "--show-figure", 93 | action="store_true", 94 | help="Show figure", 95 | ) 96 | argparser.add_argument( 97 | "--minimize-probing-sets", 98 | choices=["trivial", "aggressive", "no"], 99 | default="trivial", 100 | help="Minimize probing sets", 101 | ) 102 | argparser.add_argument( 103 | "--simulations-per-step", 104 | type=Quantity, 105 | default=None, 106 | help="Number of simulations per step", 107 | ) 108 | argparser.add_argument( 109 | "--pretty", 110 | action=argparse.BooleanOptionalAction, 111 | type=bool, 112 | default=True, 113 | help="Pretty print parsed output", 114 | ) 115 | argparser.add_argument( 116 | "--probing-sets-per-step", 117 | type=Quantity, 118 | default=None, 119 | help="Specifies the number of probing sets PROLEAD should evaluate. In each step, PROLEAD assesses the specified number of probing sets across all simulations. Once evaluated, the probing sets will be deleted.", 120 | ) 121 | argparser.add_argument( 122 | "--num-cores", 123 | type=str, 124 | default=None, 125 | help="Maximum number of CPU cores to use", 126 | ) 127 | argparser.add_argument( 128 | "--prolead-config", 129 | type=Path, 130 | default=None, 131 | help="Path to PROLEAD config file. All other PROLEAD options will be ignored.", 132 | ) 133 | 134 | 135 | def synthesize( 136 | yosys_bin: Union[Path, str], 137 | yosys_run_dir: Path, 138 | source_files: list[Path], 139 | top_module: Optional[str], 140 | verilog_lib: Path, 141 | liberty_lib: Path, 142 | verilog_netlist: Path, 143 | defines: Optional[dict[str, Optional[str]]] = None, 144 | parameters: Optional[dict[str, str]] = None, 145 | opt_flatten: bool = True, 146 | opt_full: bool = True, 147 | split_nets: bool = False, 148 | quiet: bool = True, 149 | ): 150 | yosys_run_dir.mkdir(parents=True, exist_ok=True) 151 | 152 | liberty_lib = liberty_lib.resolve() 153 | verilog_netlist = verilog_netlist.absolute() 154 | netlist_dir = verilog_netlist.parent 155 | 156 | # copy verilog_lib next to the netlist 157 | if verilog_lib: 158 | verilog_lib = verilog_lib.resolve() 159 | shutil.copyfile(verilog_lib, netlist_dir / verilog_lib.name, follow_symlinks=True) 160 | 161 | json_netlist = verilog_netlist.with_suffix(".json") 162 | 163 | yosys_script = [] 164 | 165 | vhdl_files = [] 166 | ghdl_args = ["--std=08"] 167 | 168 | if defines is None: 169 | defines = {} 170 | 171 | read_verilog_args = [ 172 | "-noautowire", 173 | "-defer", 174 | "-noassert", 175 | "-noassume", 176 | "-nolatches", 177 | ] 178 | 179 | define_args = [f"-D{k}" if v is None else f"-D{k}={v}" for k, v in defines.items()] 180 | read_verilog_args += define_args 181 | slang_args = ["--extern-modules", "--best-effort-hierarchy"] 182 | slang_args += define_args 183 | 184 | sv_slang = True 185 | has_sv_files = any(f.suffix == ".sv" for f in source_files) 186 | 187 | for src in source_files: 188 | src = Path(src) 189 | if src.suffix == ".sv": 190 | if sv_slang: 191 | yosys_script.append(f"read_slang {' '.join(slang_args + [str(src)])}") 192 | else: 193 | yosys_script.append( 194 | f"read_verilog {' '.join(read_verilog_args + ['-sv', str(src)])}" 195 | ) 196 | elif src.suffix == ".v": 197 | yosys_script.append(f"read_verilog {' '.join(read_verilog_args + [str(src)])}") 198 | elif src.suffix in (".vhd", ".vhdl"): 199 | vhdl_files.append(src) 200 | else: 201 | raise ValueError(f"Unsupported file type: {src}") 202 | if vhdl_files: 203 | yosys_script += [ 204 | f"ghdl {' '.join(ghdl_args)} {' '.join(map(str, vhdl_files))} -e", 205 | ] 206 | 207 | hierarchy_args = ["-check"] 208 | if top_module: 209 | hierarchy_args += ["-top", top_module] 210 | else: 211 | hierarchy_args.append("-auto-top") 212 | 213 | if parameters is not None: 214 | for k, v in parameters.items(): 215 | hierarchy_args += ["-chparam", k, str(v)] 216 | 217 | yosys_script += [ 218 | "hierarchy " + " ".join(hierarchy_args), 219 | "opt_clean -purge", 220 | "proc", 221 | "opt_clean -purge", 222 | "check -assert", 223 | ] 224 | 225 | yosys_script += [ 226 | f"write_verilog -noattr {netlist_dir / 'yosys_rtl.v'}", 227 | f"write_json {netlist_dir / 'yosys_rtl.json'}", 228 | ] 229 | synth_args = [ 230 | # "-noabc", 231 | # "-noshare", 232 | # "-nordff", 233 | ] 234 | 235 | if opt_flatten: 236 | synth_args.append("-flatten") 237 | yosys_script += [ 238 | # "setattr -set keep_hierarchy 1", 239 | # f"read_verilog -lib {verilog_lib}", 240 | f"read_liberty -lib {liberty_lib}", 241 | f"synth {' '.join(synth_args)}", 242 | "opt_clean -purge", 243 | ] 244 | if opt_full: 245 | yosys_script.append("opt -full -purge") 246 | else: 247 | yosys_script.append("opt_clean -purge") 248 | 249 | abc_flags = ["-liberty", liberty_lib] 250 | 251 | # if opt_full: 252 | # abc_flags += ["-dff"] 253 | # else: 254 | # abc_flags += ["-keepff", "-fast"] 255 | # abc_flags += ["-fast"] 256 | # abc_flags += ["-script", "+strash;&ifraig,-x;scorr;dc2;strash;&get,-n;&dch,-f;&nf,{D};&put"] 257 | abc_flags += [ 258 | "-script", 259 | "+strash;map,{D}", 260 | ] 261 | 262 | yosys_script += [ 263 | "write_verilog pre_abc_dump.v", 264 | # "async2sync -nolower", 265 | f"dfflibmap -prepare -liberty {liberty_lib}", 266 | "opt_clean -purge", 267 | f"abc " + " ".join(str(e) for e in abc_flags), 268 | "opt_clean -purge", 269 | f"dfflibmap -liberty {liberty_lib}", 270 | "opt_clean -purge", 271 | ] 272 | 273 | if opt_full: 274 | yosys_script += [ 275 | "opt -full -purge", 276 | "opt -full -fine -purge", 277 | "opt -full -fine -sat -purge", 278 | "opt -full -purge", 279 | ] 280 | 281 | yosys_script += [ 282 | "setundef -zero", 283 | "opt -full -purge" if opt_full else "opt_clean -purge", 284 | "setattr -set keep_hierarchy 0", 285 | "opt_clean -purge", 286 | "flatten", 287 | "opt_clean -purge", 288 | # "check -assert -noinit -mapped", 289 | ] 290 | 291 | if opt_full: 292 | yosys_script += [ 293 | "opt -full -purge", 294 | "opt -full -fine -purge", 295 | "opt -full -fine -sat -purge", 296 | "opt -full -purge", 297 | ] 298 | else: 299 | yosys_script.append("opt_clean -purge") 300 | 301 | if split_nets: 302 | yosys_script += ["splitnets -driver -format ___"] 303 | # yosys_script += ["splitnets -driver"] 304 | 305 | # if top_module: 306 | # yosys_script.append(f"select {top_module}") 307 | 308 | if opt_full: 309 | yosys_script.append("opt -full -purge") 310 | else: 311 | yosys_script.append("opt_clean -purge") 312 | 313 | write_verilog_args = [ 314 | "-noexpr", 315 | "-noattr", 316 | "-nodec", 317 | "-nostr", 318 | "-simple-lhs", 319 | ] 320 | write_verilog_args.append(str(verilog_netlist)) 321 | 322 | yosys_script += [ 323 | f"write_json {json_netlist}", 324 | # "check -assert -noinit -initdrv", 325 | f"stat -liberty {liberty_lib}", 326 | "check -mapped -noinit -initdrv", 327 | "check -assert -noinit -initdrv", 328 | f"write_verilog {' '.join(write_verilog_args)}", 329 | ] 330 | yosys_cmd = [yosys_bin, "-Q", "-T"] 331 | if quiet: 332 | yosys_cmd.append("-q") 333 | yosys_cmd += ["-l", "yosys.log"] 334 | # else: 335 | # yosys_cmd.append("-g") 336 | if vhdl_files: 337 | yosys_cmd += ["-m", "ghdl"] 338 | if sv_slang and has_sv_files: 339 | yosys_cmd += ["-m", "slang"] 340 | 341 | # write yosys_script to file 342 | yosys_script_file = yosys_run_dir / "yosys_script.ys" 343 | with open(yosys_script_file, "w") as f: 344 | f.write("\n".join(yosys_script)) 345 | # yosys_cmd += ["-p", "; ".join(yosys_script)] 346 | yosys_cmd += ["-s", yosys_script_file.relative_to(yosys_run_dir)] 347 | 348 | print("\n" + "=" * 20 + " YOSYS SYNTHESIS " + "=" * 20) 349 | yosys_cmd = [str(c) for c in yosys_cmd] 350 | print(f"** Running {' '.join(yosys_cmd)}\n") 351 | subprocess.run( 352 | yosys_cmd, 353 | cwd=yosys_run_dir, 354 | check=True, 355 | ) 356 | assert verilog_netlist.exists(), f"Failed to generate netlist {verilog_netlist}" 357 | assert json_netlist.exists(), f"Failed to generate json netlist {json_netlist}" 358 | print(f"** Generated netlist: {verilog_netlist}\n") 359 | print("" + "=" * 56 + "\n") 360 | 361 | 362 | NAME_FROM_PORT_SINGLE_REGEX = re.compile(r"^(?P.*)\[(?P\d+)\]$") 363 | NAME_FROM_PORT_RANGE_REGEX = re.compile(r"^(?P.*)\[(?P\d+):(?P\d+)\]$") 364 | 365 | 366 | @dataclass 367 | class Port: 368 | name: str 369 | width: Optional[int] = None # none for scalar 1-bit ports? 370 | type: Optional[str] = None 371 | direction: Optional[str] = "input" 372 | value: Union[int, str, None] = None 373 | share_id: Optional[int] = None # none for non-shared ports 374 | start_bit: Optional[int] = None 375 | 376 | @property 377 | def num_bits(self): 378 | if self.width is None: 379 | _, end, start = Port.range_from_name(self.name) 380 | return end - start + 1 if start is not None else 1 381 | return max(self.width, 1) 382 | 383 | @property 384 | def name_bits(self): 385 | if self.start_bit is None: 386 | name, end, start = Port.range_from_name(self.name) 387 | self.name = name 388 | self.start_bit = start 389 | if not self.width: 390 | self.width = end - start + 1 if start is not None else 1 391 | if self.start_bit is None and self.num_bits > 1: 392 | self.start_bit = 0 393 | if self.start_bit is not None: 394 | if self.width is None: 395 | self.width = 1 396 | return f"{self.name}[{self.start_bit + self.width - 1}:{self.start_bit}]" 397 | else: 398 | return self.name 399 | 400 | @property 401 | def is_input(self): 402 | return self.direction is None or self.direction == "input" 403 | 404 | @property 405 | def is_output(self): 406 | return self.direction == "output" 407 | 408 | @property 409 | def value_str(self): 410 | if self.value == "fixed": 411 | self.value = random.randint(0, self.num_bits - 1) 412 | elif isinstance(self.value, str) and self.value.isnumeric(): 413 | self.value = int(self.value) 414 | if isinstance(self.value, int): 415 | # return f"{self.num_bits}'b{self.value:0{self.num_bits}b}" 416 | return verilog_value(self.value, self.width) 417 | return self.value 418 | 419 | @classmethod 420 | def range_from_name(cls, name: str) -> tuple[str, int, Optional[int]]: 421 | m = NAME_FROM_PORT_SINGLE_REGEX.match(name) 422 | if m: 423 | name = m.group("name") 424 | start = int(m.group("start")) 425 | return (name, start, start) 426 | m = NAME_FROM_PORT_RANGE_REGEX.match(name) 427 | if m: 428 | start = int(m.group("start")) 429 | end = int(m.group("end")) 430 | name = m.group("name") 431 | return (name, end, start) 432 | return (name, 0, None) 433 | 434 | 435 | def get_top_module_and_ports(netlist: dict) -> tuple[Optional[str], list]: 436 | modules = netlist["modules"] 437 | assert modules and isinstance(modules, dict), "Failed to parse json netlist" 438 | top_name = None 439 | top_module = None 440 | ports = [] 441 | output_ports = [] 442 | for name, module in modules.items(): 443 | if int(module.get("attributes", {}).get("top", "0")) == 1: 444 | top_name = name 445 | top_module = module 446 | break 447 | if top_module: 448 | ports_dict: dict = top_module.get("ports", {}) 449 | for name, port in ports_dict.items(): 450 | direction = port.get("direction") 451 | assert direction in ["input", "output"], f"Invalid direction: {direction}" 452 | width = len(port["bits"]) 453 | p = {"name": name, "width": width, "direction": direction} 454 | ports.append(p) 455 | 456 | return top_name, ports 457 | 458 | 459 | def parse_json_netlist(json_netlist_file: Path) -> tuple[Optional[str], list]: 460 | with open(json_netlist_file, "r") as f: 461 | netlist = json.load(f) 462 | return get_top_module_and_ports(netlist) 463 | 464 | 465 | def format_time(seconds: float) -> str: 466 | seconds = int(seconds) 467 | return "{:2}:{:02}:{:02}".format(seconds // 3600, seconds % 3600 // 60, seconds % 60) 468 | 469 | 470 | def run_prolead( 471 | prolead_bin: Union[str, Path], 472 | prolead_run_dir: Path, 473 | netlist_file: Path, 474 | top_module: str, 475 | library_name: str, 476 | library_json: Path, 477 | sca_config: dict, 478 | config_file: Path, 479 | show_figure: bool = False, 480 | pretty: bool = True, 481 | result_folder: Union[str, Path] = "results", 482 | ): 483 | 484 | assert netlist_file.exists(), f"Netlist file {netlist_file} does not exist" 485 | 486 | print(f"** Running PROLEAD in {prolead_run_dir.absolute()}") 487 | 488 | # config_file = config_file.relative_to(prolead_run_dir) 489 | config_file = config_file.absolute() 490 | 491 | library_json = library_json.absolute() 492 | 493 | if not netlist_file.is_absolute(): 494 | netlist_file = netlist_file.resolve() # .relative_to(prolead_run_dir) 495 | 496 | if isinstance(result_folder, Path): 497 | result_folder.mkdir(parents=True, exist_ok=True) 498 | if not result_folder.is_absolute(): 499 | result_folder = result_folder.relative_to(prolead_run_dir) 500 | 501 | prolead_cmd = [ 502 | prolead_bin.resolve() if isinstance(prolead_bin, Path) else prolead_bin, 503 | "--libraryfile", 504 | library_json, 505 | "--libraryname", 506 | library_name, 507 | "--designfile", 508 | netlist_file, 509 | "--configfile", 510 | config_file, 511 | "--resultfolder", 512 | result_folder, 513 | ] 514 | if top_module: 515 | prolead_cmd += ["--modulename", top_module] 516 | 517 | print(f"** Running {' '.join(map(str, prolead_cmd))}") 518 | proc = subprocess.Popen( 519 | prolead_cmd, 520 | bufsize=0, 521 | cwd=prolead_run_dir, 522 | stdout=subprocess.PIPE, 523 | text=True, 524 | ) 525 | 526 | assert proc.stdout is not None, "stdout is None" 527 | 528 | result_line_regex = re.compile( 529 | r"^\s*\|\s*(?P\d+\.\d+)[s]\s*\|\s*(?P\d+\.\d+)\s*(?P[A-Z]+)\s*\|\s*(?P\d+)(\s*\/\s*(?P\d+))?\s*\|\s*(\[(?P([^\(]+\(\d+\)(,\s)?)+)\])?\s*\|\s*(?P(\d+\.\d+|inf))\s*\|\s*(?P[A-Z]+)\s*\|\s*$", 530 | ) 531 | 532 | first_result_line_regex = re.compile( 533 | r"\|\s*Elapsed Time\s*\|\s*.*\s*\|\s*[a-zA-Z\s]+\s*\|\s*[a-zA-Z\s]+\s*\|\s*-log10\(p\)\s*\|\s*Status\s*\|", 534 | re.IGNORECASE, 535 | ) 536 | first_line_done = False 537 | 538 | data = [] 539 | data_np = None 540 | 541 | write_every = 300 542 | 543 | prev_checkpoint = 0 544 | 545 | npy_file = prolead_run_dir / f"{top_module}_data.npz" 546 | 547 | def save_data(): 548 | if data_np is not None: 549 | np.savez_compressed(npy_file, data_np) 550 | 551 | table = Table() if pretty else None 552 | 553 | leaking_signals = set() 554 | 555 | stop_when_required_sims_reached = False 556 | 557 | terminated = False 558 | 559 | def print_header(table: Table): 560 | table.add_column(rich.text.Text("Time", justify="center"), width=8, justify="right") 561 | table.add_column(rich.text.Text("Memory (GB)", justify="center"), width=6, justify="right") 562 | table.add_column( 563 | rich.text.Text("#Simulations", justify="center"), 564 | justify="right", 565 | width=20, 566 | max_width=26, 567 | ) 568 | table.add_column(rich.text.Text("Highest Leakage", justify="center"), justify="left") 569 | table.add_column(rich.text.Text("-Log(p)", justify="center"), justify="right") 570 | table.add_column("Status", justify="center", width=8) 571 | 572 | def print_data_line( 573 | table: Table, 574 | leakage: bool, 575 | elapsed_time: float, 576 | ram_usage: float, 577 | ram_usage_unit: str, 578 | n_sim: int, 579 | required_sims: Optional[int], 580 | p_log: float, 581 | status: str, 582 | sigs: list[str], 583 | ): 584 | 585 | if leakage: 586 | stat_color = "red" if p_log > 7 else "yellow" 587 | elif p_log <= 0.0: 588 | stat_color = "yellow" 589 | else: 590 | stat_color = "green" 591 | table.add_row( 592 | format_time(elapsed_time), 593 | f"{ram_usage:.2f}{ram_usage_unit if ram_usage_unit != 'GB' else ''}", 594 | f"{n_sim:9,d} / {required_sims:6,d}" if required_sims else f"{n_sim:9,d}", 595 | f"{', '.join(sigs)}", 596 | f"{p_log:.2f}", 597 | f"[{stat_color}]{status}[/{stat_color}]", 598 | ) 599 | 600 | # catch KeyboardInterrupt 601 | try: 602 | with Live(table, console=console, vertical_overflow="visible", auto_refresh=False) as live: 603 | for line in map(str.strip, proc.stdout): 604 | if not first_line_done and first_result_line_regex.fullmatch(line): 605 | first_line_done = True 606 | if table is not None: 607 | print_header(table) 608 | live.refresh() 609 | else: 610 | print(line) 611 | continue 612 | m = result_line_regex.fullmatch(line) 613 | if m: 614 | elapsed_time = float(m.group("elapsed_time")) 615 | ram_usage = float(m.group("ram_usage")) 616 | ram_usage_unit = m.group("ram_usage_unit") 617 | n_sim = int(m.group("n_sim")) 618 | required_sims = int(m.group("required_sims") or 0) 619 | p_log = float(m.group("p_log")) 620 | status = m.group("status") 621 | leakage = status != "OKAY" 622 | signals = m.group("signals") 623 | sigs = [] if signals is None else signals.split(", ") 624 | if leakage and sigs: 625 | leaking_signals.update((s, n_sim, p_log) for s in sigs) 626 | if table is not None: 627 | print_data_line( 628 | table, 629 | leakage, 630 | elapsed_time, 631 | ram_usage, 632 | ram_usage_unit, 633 | n_sim, 634 | required_sims, 635 | p_log, 636 | status, 637 | sigs, 638 | ) 639 | live.refresh() 640 | else: 641 | print(line) 642 | data.append((n_sim, p_log)) 643 | if stop_when_required_sims_reached and required_sims and n_sim > required_sims: 644 | print(f"** Required simulations reached: {n_sim}/{required_sims}") 645 | terminated = True 646 | proc.terminate() 647 | break 648 | if elapsed_time - prev_checkpoint >= write_every: 649 | prev_checkpoint = elapsed_time 650 | print(f"Writing checkpoint to {npy_file}") 651 | data_np = np.array(data) 652 | save_data() 653 | # print(f"{n_sim}/{total_sim} {signals} {p_log} {status}") 654 | else: 655 | print(line) 656 | except KeyboardInterrupt: 657 | print("*** Caught KeyboardInterrupt, terminating PROLEAD... ***") 658 | proc.terminate() 659 | finally: 660 | if proc.poll() is None: 661 | proc.wait() 662 | if data: 663 | data_np = np.array(data) 664 | print(f"** Writing data to {npy_file}") 665 | save_data() 666 | else: 667 | print("No data captured") 668 | if leaking_signals: 669 | print(f"** Leakage Detected!!!") 670 | cycles_signals = [] 671 | for s, n_sim, p_log in leaking_signals: 672 | m = re.match(r"([^\(]+)\((\d+)\)", s) 673 | if m: 674 | cycles_signals.append((int(m.group(2)), m.group(1), p_log)) 675 | else: 676 | print(f"** Unmatched signal / cycle format: {s}") 677 | cycles_signals = list(set(cycles_signals)) 678 | cycles_signals.sort(key=lambda x: x[0]) 679 | 680 | with open(prolead_run_dir / f"{top_module}_leaking_signals.csv", "w") as f: 681 | f.write("Cycle,Signal,Log(p)\n") 682 | for c, s, p_log in cycles_signals: 683 | f.write(f"{c},{s},{p_log}\n") 684 | pr = "\n".join(f" {c:4d}: {s} [{p_log:3.2f}]" for c, s, p_log in cycles_signals) 685 | print(f"** Leaking signals:\n{pr}") 686 | 687 | ## https://github.com/ChairImpSec/PROLEAD/wiki/Results 688 | 689 | if data_np is not None: 690 | sns.set_theme(style="whitegrid", context="paper") 691 | plt.figure() 692 | 693 | x = data_np[:, 0] 694 | y = data_np[:, 1] # The smallest p-value from the g-test in logarithmic form 695 | 696 | num_sims = np.max(x) 697 | max_p_log = np.max(y) 698 | 699 | if num_sims >= 1e10: 700 | x_scale = 1e9 701 | elif num_sims >= 1e7: 702 | x_scale = 1e6 703 | elif num_sims >= 1e4: 704 | x_scale = 1e3 705 | else: 706 | x_scale = 1 707 | 708 | if x_scale > 1: 709 | x = x / x_scale 710 | 711 | plot = sns.lineplot( 712 | x=x, 713 | y=y, 714 | # kind="line", 715 | label=r"$-\log_{10}(p)$" 716 | + " [glitch" 717 | + ("+transition" if sca_config.get("transitional_leakage") else "") 718 | + "]", 719 | ) 720 | plot.axhline(y=max_p_log, linestyle="--", label="Minimum p-value", alpha=0.6) 721 | plot.axhline(y=5, color="r", linestyle="--", label="Threshold") 722 | plot.set_xlabel( 723 | "Number of Simulations" + (rf" ($\times${int(x_scale):,})" if x_scale > 1 else "") 724 | ) 725 | plot.set_ylabel(r"$-\log_{10}(p)$") 726 | plot.legend(loc="best", fancybox=True, framealpha=0.9) 727 | plt.tight_layout() 728 | 729 | fig_file = npy_file.with_suffix(".png") 730 | 731 | print(f"Saving plot to {fig_file}") 732 | plt.savefig(fig_file, dpi=600) 733 | if show_figure: 734 | plt.show() 735 | 736 | if not terminated and proc.returncode: 737 | print(f"PROLEAD failed with return code {proc.returncode}") 738 | exit(1) 739 | 740 | 741 | def div_ceil(a: int, b: int) -> int: 742 | return (a + b - 1) // b 743 | 744 | 745 | def verilog_value(value: Union[int, str], width: Optional[int]) -> str: 746 | if width is None: 747 | assert isinstance(value, int), "Expected int value for width=None" 748 | width = value.bit_length() 749 | if width <= 64: 750 | base = "b" 751 | vbase = "b" 752 | n_digits = width 753 | else: 754 | base = "X" 755 | vbase = "h" 756 | n_digits = div_ceil(width, 4) 757 | prefix = f"{width}'{vbase}" 758 | 759 | if isinstance(value, int): 760 | v = f"{value:0{n_digits}{base}}" 761 | elif value == "$": 762 | v = "$" * n_digits 763 | else: 764 | v = str(value) 765 | return prefix + v 766 | 767 | 768 | def generate_config( 769 | config_file: Path, 770 | ports: list[Port], 771 | sca_config: dict, 772 | sim_config: dict, 773 | perf_config: dict, 774 | ): 775 | 776 | # print(f" ports: {ports}") 777 | input_ports: list[Port] = [p for p in ports if p.is_input] 778 | # print(f" input_ports: {input_ports}") 779 | output_ports: list[Port] = [p for p in ports if p.is_output] 780 | 781 | # print(f" output_ports: {output_ports}") 782 | 783 | shared_inputs = [p for p in input_ports if p.share_id is not None] 784 | print(f" shared_inputs: {', '.join(f'{p.name_bits}::{p.share_id}' for p in shared_inputs)}") 785 | # shared_outputs = [Output(**p) for p in output_ports if p.get("share_id") is not None] 786 | rand_inputs = [p for p in input_ports if p.type == "random"] 787 | print(f" rand_inputs: {', '.join(p.name_bits for p in rand_inputs)}") 788 | clocks = [p for p in input_ports if p.type == "clock"] 789 | print(f" clocks: {', '.join(p.name_bits for p in clocks)}") 790 | resets = [p for p in input_ports if p.type == "reset"] 791 | print(f" resets: {', '.join(p.name_bits for p in resets)}") 792 | reset_signal = resets[0] if resets else None 793 | 794 | end_signals = [ 795 | p 796 | for p in output_ports 797 | if p.share_id is None and p.type == "end" and (p.value is not None or p.num_bits == 1) 798 | ] 799 | for p in end_signals: 800 | if p.value is None: 801 | p.value = 1 802 | print(f" end_signals: {', '.join(f'{p.name_bits} -> {p.value}' for p in end_signals)}") 803 | 804 | if reset_signal is not None: 805 | if reset_signal.value is None: 806 | reset_signal.value = 1 807 | sim_cycles = sim_config["number_of_clock_cycles"] 808 | if sca_config.get("clock_cycles") is None: 809 | assert sim_cycles > 1 810 | # Note: range is exclusive w.r.t. the end value 811 | sca_config["clock_cycles"] = [f"1-{sim_cycles}"] 812 | 813 | assert len(clocks) <= 1, "Expected at most one clock signal" 814 | assert len(resets) <= 1, "Expected at most one reset signal" 815 | clock_signal = clocks[0].name if clocks else None 816 | # sca_order = 2 817 | 818 | total_input_bits = max(1, sum(p.num_bits for p in shared_inputs if p.share_id == 0)) 819 | 820 | groups = [] 821 | fixed_group_value = random.randint(0, 2**total_input_bits - 1) 822 | 823 | # random group 824 | groups.append(verilog_value("$", total_input_bits)) 825 | # fixed group 826 | groups.append(verilog_value(fixed_group_value, total_input_bits)) 827 | 828 | end_cycles: Optional[int] = None 829 | 830 | sim_vcd = sim_config.pop("vcd", False) 831 | 832 | number_of_simulations: int = sim_config.pop("number_of_simulations", None) 833 | 834 | number_of_simulations_per_step = sim_config.get("number_of_simulations_per_step", 256) 835 | 836 | number_of_sim_steps = div_ceil(number_of_simulations, number_of_simulations_per_step) 837 | 838 | assert number_of_sim_steps, "number_of_sim_steps must be specified" 839 | 840 | assert ( 841 | number_of_simulations_per_step % 64 == 0 842 | ), "number_of_simulations_per_step must be a multiple of 64" 843 | 844 | if sim_vcd: 845 | sim_config["waveform_simulation"] = True 846 | print("** VCD waveform generation is enabled!") 847 | print("** Number of simulations was set to 64!") 848 | number_of_sim_steps = 1 849 | number_of_simulations_per_step = 64 850 | 851 | number_of_simulations = number_of_simulations_per_step * number_of_sim_steps 852 | 853 | print(f"** Total number of simulations: {number_of_simulations:,}") 854 | print(f"** Number of simulations per step: {number_of_simulations_per_step:,}") 855 | 856 | input_sequence = [] 857 | 858 | start_bits = {} 859 | 860 | end_condition = {} 861 | 862 | if end_cycles: 863 | end_condition["clock_cycles"] = end_cycles 864 | if end_signals: 865 | end_condition["signals"] = [ 866 | { 867 | "name": p.name_bits, ## FIXME??? p.name? 868 | "value": ( 869 | p.value 870 | if isinstance(p.value, str) 871 | else verilog_value(p.value if p.value is not None else 1, p.num_bits) 872 | ), 873 | } 874 | for p in end_signals 875 | ] 876 | 877 | sim_config["number_of_simulations_per_step"] = number_of_simulations_per_step 878 | sim_config["number_of_simulations"] = number_of_simulations 879 | 880 | sim_config["groups"] = groups 881 | sim_config["always_random_inputs"] = [p.name_bits for p in rand_inputs] 882 | sim_config["input_sequence"] = input_sequence 883 | sim_config["end_condition"] = end_condition 884 | 885 | def assign_fresh_values(inputs: Sequence[Port]) -> list[dict[str, str]]: 886 | signals = [] 887 | for p in inputs: 888 | value = p.value_str 889 | 890 | if value is None and p.share_id is not None: 891 | start_bit = start_bits.get(p.share_id, 0) 892 | value = "group_in" + ( 893 | f"{p.share_id}[{p.num_bits + start_bit - 1}:{start_bit}]" 894 | if p.num_bits > 1 895 | else f"{p.share_id}[{start_bit}]" 896 | ) 897 | start_bits[p.share_id] = start_bit + p.num_bits 898 | if value is not None: 899 | signals.append( 900 | { 901 | "name": p.name_bits, 902 | "value": value, 903 | } 904 | ) 905 | return signals 906 | 907 | initial_signals = assign_fresh_values( 908 | [p for p in input_ports if p.share_id is not None or p.type is None] 909 | ) 910 | if reset_signal: 911 | assert reset_signal.value_str is not None, "Expected reset value at this point" 912 | initial_signals.append({"name": reset_signal.name, "value": reset_signal.value_str}) 913 | 914 | input_sequence += [ 915 | { 916 | "signals": initial_signals, 917 | "hold_for_cycles": 1, 918 | }, 919 | ] 920 | if reset_signal: 921 | not_reset = ( 922 | (~reset_signal.value) & ((1 << (reset_signal.num_bits or 1)) - 1) 923 | if isinstance(reset_signal.value, int) 924 | else 1 if reset_signal.value in ("1'b0", "1'h0", "0") else 0 925 | ) 926 | print(f"** Reset signal: {reset_signal.name} = {reset_signal.value} -> {not_reset}") 927 | input_sequence += [ 928 | { 929 | "signals": [ 930 | { 931 | "name": reset_signal.name, 932 | "value": verilog_value(not_reset, 1), 933 | }, 934 | ], 935 | "hold_for_cycles": 1, 936 | }, 937 | ] 938 | 939 | hardware = {} 940 | 941 | if clock_signal: 942 | hardware["clock_signal_name"] = clock_signal 943 | 944 | config = { 945 | "performance": perf_config, 946 | "simulation": sim_config, 947 | "hardware": hardware, 948 | "side_channel_analysis": sca_config, 949 | } 950 | print(f"Writing config to {config_file.absolute()}") 951 | 952 | with open(config_file, "w") as f: 953 | f.write(json.dumps(config, indent=2)) 954 | 955 | 956 | if __name__ == "__main__": 957 | args = argparser.parse_args() 958 | 959 | if not args.source_files and not args.sources_list: 960 | if not args.netlist: 961 | print("No source files specified") 962 | exit(1) 963 | if not args.top_module: 964 | print("Top module not specified") 965 | exit(1) 966 | else: 967 | if args.netlist: 968 | print("Either specify source files or netlist file, not both") 969 | exit(1) 970 | 971 | if args.sources_list: 972 | with open(args.sources_list, "r") as f: 973 | args.source_files = [Path(l.strip()) for l in f] 974 | 975 | prolead_root_dir = args.prolead_root_dir or os.environ.get("PROLEAD_ROOT_DIR") 976 | 977 | if prolead_root_dir is None and args.prolead_bin: 978 | prolead_root_dir = Path(args.prolead_bin).parent.parent 979 | 980 | if prolead_root_dir: 981 | prolead_root_dir = Path(prolead_root_dir).resolve() 982 | if args.prolead_bin is None: 983 | args.prolead_bin = prolead_root_dir / "release" / "PROLEAD" 984 | 985 | if args.netlist is None: 986 | verilog_lib = args.yosys_verilog_lib 987 | liberty_lib = args.yosys_lib 988 | 989 | if liberty_lib is None: 990 | if prolead_root_dir is None: 991 | print("Neither --yosys-verilog-lib/--yosys-lib nor --prolead-root-dir where specified") 992 | exit(1) 993 | assert isinstance(prolead_root_dir, Path) 994 | LIBRARY_PATH = prolead_root_dir / "yosys" / "lib" 995 | liberty_lib = LIBRARY_PATH / "custom_cells.lib" 996 | 997 | if verilog_lib is None: 998 | verilog_lib = LIBRARY_PATH / "custom_cells.v" 999 | 1000 | if verilog_lib: 1001 | assert verilog_lib.exists(), f"Verilog library {verilog_lib} does not exist" 1002 | 1003 | assert liberty_lib, f"Liberty library not specified" 1004 | assert liberty_lib.exists(), f"Liberty library {liberty_lib} does not exist" 1005 | 1006 | prolead_run_dir: Path = Path("prolead_run") / (args.top_module or "top") 1007 | 1008 | if not prolead_run_dir.exists(): 1009 | prolead_run_dir.mkdir(parents=True) 1010 | 1011 | yosys_run_dir = prolead_run_dir 1012 | 1013 | if not yosys_run_dir.exists(): 1014 | yosys_run_dir.mkdir(parents=True) 1015 | 1016 | for f in args.source_files: 1017 | assert isinstance(f, Path), f"Expected Path, got {f}" 1018 | assert f.exists(), f"File {f} does not exist" 1019 | 1020 | assert isinstance(args.source_files, list) 1021 | 1022 | assert all(isinstance(f, Path) for f in args.source_files) 1023 | 1024 | args.source_files = [Path(f).absolute() for f in args.source_files] 1025 | 1026 | if args.netlist: 1027 | netlist_file = args.netlist 1028 | run_synth = False 1029 | else: 1030 | netlist_file = None 1031 | 1032 | if args.top_module: 1033 | netlist_file = prolead_run_dir / "netlist.v" 1034 | 1035 | run_synth = args.force_synth 1036 | 1037 | if not run_synth: 1038 | # Check if the netlist file exists 1039 | if not netlist_file or not netlist_file.exists(): 1040 | run_synth = True 1041 | else: 1042 | # check if modification time of the netlist file is older than the source files 1043 | netlist_mtime = netlist_file.stat().st_mtime 1044 | if any(netlist_mtime < Path(f).stat().st_mtime for f in args.source_files): 1045 | run_synth = True 1046 | netlist_file = prolead_run_dir / "netlist.v" 1047 | 1048 | if run_synth: 1049 | synthesize( 1050 | args.yosys_bin, 1051 | yosys_run_dir, 1052 | args.source_files, 1053 | args.top_module, 1054 | verilog_lib=verilog_lib, 1055 | liberty_lib=liberty_lib, 1056 | verilog_netlist=netlist_file, 1057 | opt_flatten=args.opt == "flatten" or args.opt == "full", 1058 | opt_full=args.opt == "full", 1059 | split_nets=True, 1060 | quiet=args.quiet_synth, 1061 | ) 1062 | else: 1063 | print(f"** Using existing netlist: {netlist_file}") 1064 | 1065 | if not args.netlist: 1066 | json_netlist = netlist_file.with_suffix(".json") 1067 | 1068 | print(f"** Parsing {json_netlist}") 1069 | top, ports = parse_json_netlist(json_netlist) 1070 | 1071 | if not args.top_module: 1072 | print(f"** Detected top module: {top}") 1073 | args.top_module = top 1074 | assert args.top_module, "Failed to detect top module" 1075 | # new_name = prolead_run_dir.parent / args.top_module 1076 | # print(f"Renaming {prolead_run_dir} to {new_name}") 1077 | # prolead_run_dir.rename(new_name) 1078 | # prolead_run_dir = new_name 1079 | # netlist_file = prolead_run_dir / "netlist.v" 1080 | else: 1081 | ports = [] 1082 | 1083 | assert netlist_file 1084 | 1085 | exclude_signals_regex = "" 1086 | 1087 | probe_placement = { 1088 | "include": {"signals": ".*", "paths": ".*"}, 1089 | "exclude": { 1090 | "signals": exclude_signals_regex if exclude_signals_regex else "(?!)", 1091 | "paths": "(?!)", 1092 | }, 1093 | } 1094 | 1095 | sca_config = { 1096 | "order": args.order, 1097 | "transitional_leakage": args.transitional, 1098 | "effect_size": 0.1, 1099 | } 1100 | 1101 | if probe_placement: 1102 | sca_config["probe_placement"] = probe_placement 1103 | 1104 | num_simulations = int(args.num_simulations) 1105 | 1106 | if args.simulations_per_step: 1107 | number_of_simulations_per_step = int(args.simulations_per_step) 1108 | else: 1109 | number_of_simulations_per_step = min(16, div_ceil(num_simulations, 1_000_000) * 2) * 1024 1110 | 1111 | ports_map = {p["name"]: p for p in ports} 1112 | 1113 | jports_map = OrderedDict() 1114 | 1115 | if args.ports_json: 1116 | jports = [] # merge? 1117 | with open(args.ports_json, "r") as f: 1118 | j = json.load(f) 1119 | p = j.get("ports", {}) 1120 | shares_maps = p.get("regex") 1121 | if isinstance(shares_maps, dict): 1122 | shares_maps = [(k, v) for k, v in shares_maps.items()] 1123 | elif shares_maps is None: 1124 | shares_maps = [] 1125 | for k, v in p.get("inputs", p.get("input", {})).items(): 1126 | v["direction"] = "input" 1127 | jports.append({"name": k, **v}) 1128 | for k, v in p.get("outputs", p.get("output", {})).items(): 1129 | v["direction"] = "output" 1130 | jports.append({"name": k, **v}) 1131 | for p in jports: 1132 | name = p["name"] 1133 | width = p.get("width") 1134 | direction = p.get("direction") 1135 | name, end, start = Port.range_from_name(name) 1136 | if not width and start is not None: 1137 | width = end - start + 1 1138 | port_from_yosys = ports_map.get(name) 1139 | if port_from_yosys: 1140 | assert isinstance(port_from_yosys, dict), "Expected dict" 1141 | if not width: 1142 | width = port_from_yosys.get("width") 1143 | if not direction: 1144 | direction = port_from_yosys.get("direction") 1145 | elif ports: 1146 | print(f"** [WARNING] Port {name} not found in yosys netlist!!!") 1147 | if width: 1148 | p["width"] = int(width) 1149 | if direction: 1150 | p["direction"] = direction 1151 | p["name"] = name 1152 | jports_map[name] = p 1153 | if not ports: 1154 | ports = jports 1155 | ports_map = {p["name"]: p for p in ports} 1156 | else: 1157 | shares_maps = [ 1158 | (r"^(io_)?rand.*", {"type": "random"}), 1159 | (r"\w+_(?P\d+)$", {}), 1160 | (r"^(clk|clock)", {"type": "clock"}), 1161 | (r"^(rst_n|reset_n)", {"type": "reset", "value": 0}), 1162 | (r"^(rst|reset)", {"type": "reset", "value": 1}), 1163 | ] 1164 | 1165 | for p_name, p in ports_map.items(): 1166 | for regex, d in shares_maps: 1167 | m = re.match(regex, p_name) 1168 | if m: 1169 | p.update(**d) 1170 | p.update(**m.groupdict()) 1171 | break 1172 | share_id = p.get("share_id") 1173 | if not p.get("width"): 1174 | name, end, start = Port.range_from_name(name) 1175 | p["width"] = end - start + 1 if start is not None else 1 1176 | p["name"] = name 1177 | 1178 | if share_id is None: 1179 | if ( 1180 | jports_map 1181 | and p_name not in jports_map 1182 | and p.get("direction") == "input" 1183 | and p.get("width", 0) > 1 1184 | ): 1185 | print( 1186 | f"** [WARNING] Input port {p_name} with width {p.get('width')} does not have a share_id!!!" 1187 | ) 1188 | else: 1189 | p["share_id"] = int(share_id) 1190 | 1191 | # print(f"** Ports Map: {ports_map}") 1192 | # print(f"** JSON Ports Map: {jports_map}") 1193 | 1194 | for k, v in jports_map.items(): 1195 | if k not in ports_map: 1196 | ports_map[k] = v 1197 | elif isinstance(ports_map[k], dict): 1198 | ports_map[k].update(v) 1199 | else: 1200 | print(f"** [WARNING] Port {k} already exists in ports_map!!!") 1201 | 1202 | if args.library_json is None: 1203 | assert isinstance(prolead_root_dir, Path) 1204 | library_json = prolead_root_dir / "library.json" 1205 | else: 1206 | library_json = args.library_json 1207 | 1208 | library_json = library_json.resolve() 1209 | 1210 | assert library_json.exists(), f"Library JSON file {library_json} does not exist" 1211 | 1212 | if args.minimize_probing_sets in (False, 0, "false", "none"): 1213 | args.minimize_probing_sets = "no" 1214 | 1215 | if not args.num_cores: 1216 | args.num_cores = "half" 1217 | 1218 | if args.prolead_config: 1219 | print( 1220 | f"** Using existing config file: {args.prolead_config}. All other prolead configuration arguments are ignored!" 1221 | ) 1222 | config_file = Path(args.prolead_config) 1223 | else: 1224 | random_seed = ( 1225 | args.random_seed if args.random_seed is not None else random.randint(0, 2**64 - 1) 1226 | ) 1227 | print(f"Using random seed: {random_seed}") 1228 | random.seed(random_seed) 1229 | 1230 | config_file = prolead_run_dir / "config.json" 1231 | ports = [Port(**p) for p in ports_map.values()] 1232 | 1233 | if not args.sim_cycles: 1234 | print(f"** Number of simulation cycles (--sim-cycles) must be specified!") 1235 | exit(1) 1236 | 1237 | sim_config = { 1238 | "number_of_simulations": num_simulations, 1239 | "number_of_simulations_per_step": number_of_simulations_per_step, 1240 | # "end_wait_cycles": 0, 1241 | "number_of_clock_cycles": args.sim_cycles, 1242 | "number_of_simulations_per_write": 1024 * number_of_simulations_per_step, 1243 | } 1244 | 1245 | perf_config = { 1246 | # "max_number_of_threads": "half", ### half of the available cores 1247 | "max_number_of_threads": args.num_cores, 1248 | # "minimize_probing_sets": "aggressive", # "trivial" ,"aggressive", "no" 1249 | "minimize_probing_sets": args.minimize_probing_sets, 1250 | "compact_distributions": args.compact, 1251 | } 1252 | 1253 | if args.probing_sets_per_step: 1254 | perf_config["number_of_probing_sets_per_step"] = int(args.probing_sets_per_step) 1255 | 1256 | generate_config(config_file, ports, sca_config, sim_config, perf_config) 1257 | 1258 | run_prolead( 1259 | args.prolead_bin, 1260 | prolead_run_dir, 1261 | netlist_file, 1262 | args.top_module, 1263 | library_name=args.library_name, 1264 | library_json=library_json, 1265 | sca_config=sca_config, 1266 | config_file=config_file, 1267 | show_figure=args.show_figure, 1268 | result_folder="results", 1269 | pretty=args.pretty, 1270 | ) 1271 | -------------------------------------------------------------------------------- /yosys_synth_bka.tcl: -------------------------------------------------------------------------------- 1 | yosys logger -notime -stderr 2 | yosys -import 3 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.BKAdder/BKAdder12_assert.sv 4 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.BKAdder/BKAdder12.sv 5 | 6 | yosys hierarchy -check -top BKAdder12 7 | 8 | prep 9 | 10 | # opt -full -purge -sat 11 | # opt -full -purge -sat 12 | 13 | #log -stdout "Running synthesis" 14 | ##synth -noabc -flatten 15 | #hierarchy -check 16 | #yosys proc 17 | #flatten 18 | #opt_expr 19 | #opt_clean 20 | #opt -nodffe -nosdff 21 | #fsm 22 | #opt 23 | #opt -full -purge -sat 24 | #wreduce 25 | #peepopt 26 | #opt_clean 27 | #alumacc 28 | #share 29 | #opt 30 | #memory -nomap 31 | #opt_clean 32 | # 33 | opt -fast -full 34 | #memory_map 35 | # 36 | #opt -full 37 | techmap 38 | opt -fast 39 | opt -full -purge -sat 40 | #opt -fast 41 | 42 | 43 | # abc -g AND,NAND,XOR 44 | #opt -full -purge -sat 45 | # opt -full -purge -sat 46 | # replace undefined values with 0 47 | # setundef -zero 48 | 49 | opt_clean -purge 50 | 51 | # opt -full 52 | clean -purge 53 | 54 | check 55 | stat 56 | show -href -color orange t:*AND* -stretch -viewer none -prefix bka12 -------------------------------------------------------------------------------- /yosys_synth_ksa.tcl: -------------------------------------------------------------------------------- 1 | yosys logger -notime -stderr 2 | yosys -import 3 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.KSAdder/KSAdder12_assert.sv 4 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.KSAdder/KSAdder12.sv 5 | 6 | yosys hierarchy -check -top KSAdder12 7 | 8 | prep 9 | 10 | # opt -full -purge -sat 11 | # opt -full -purge -sat 12 | 13 | #log -stdout "Running synthesis" 14 | ##synth -noabc -flatten 15 | #hierarchy -check 16 | #yosys proc 17 | #flatten 18 | #opt_expr 19 | #opt_clean 20 | #opt -nodffe -nosdff 21 | #fsm 22 | #opt 23 | #opt -full -purge -sat 24 | #wreduce 25 | #peepopt 26 | #opt_clean 27 | #alumacc 28 | #share 29 | #opt 30 | #memory -nomap 31 | #opt_clean 32 | # 33 | opt -fast -full 34 | #memory_map 35 | # 36 | #opt -full 37 | techmap 38 | opt -fast 39 | opt -full -purge -sat 40 | #opt -fast 41 | 42 | 43 | # abc -g AND,NAND,XOR 44 | #opt -full -purge -sat 45 | # opt -full -purge -sat 46 | # replace undefined values with 0 47 | # setundef -zero 48 | 49 | opt_clean -purge 50 | 51 | # opt -full 52 | clean -purge 53 | 54 | check 55 | stat 56 | show -href -color orange t:*AND* -stretch -viewer none -prefix KSAdder12 -------------------------------------------------------------------------------- /yosys_synth_rca.tcl: -------------------------------------------------------------------------------- 1 | yosys logger -notime -stderr 2 | yosys -import 3 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.RippleCarry/RippleCarryAdder12_assert.sv 4 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.RippleCarry/RippleCarryAdder12.sv 5 | 6 | yosys hierarchy -check -top RippleCarryAdder12 7 | 8 | prep 9 | 10 | # opt -full -purge -sat 11 | # opt -full -purge -sat 12 | 13 | #log -stdout "Running synthesis" 14 | ##synth -noabc -flatten 15 | #hierarchy -check 16 | #yosys proc 17 | #flatten 18 | #opt_expr 19 | #opt_clean 20 | #opt -nodffe -nosdff 21 | #fsm 22 | #opt 23 | #opt -full -purge -sat 24 | #wreduce 25 | #peepopt 26 | #opt_clean 27 | #alumacc 28 | #share 29 | #opt 30 | #memory -nomap 31 | #opt_clean 32 | # 33 | opt -fast -full 34 | memory_map 35 | # 36 | opt -full 37 | techmap 38 | #opt -fast 39 | opt -full -purge -sat 40 | opt -fast 41 | 42 | 43 | #abc -g AND,NAND,XOR 44 | #opt -full -purge -sat 45 | # opt -full -purge -sat 46 | # replace undefined values with 0 47 | # setundef -zero 48 | 49 | opt_clean -purge 50 | 51 | # opt -full 52 | clean -purge 53 | 54 | check 55 | stat 56 | #show -color orange t:*AND* -color orange t:*and* -stretch -------------------------------------------------------------------------------- /yosys_synth_ska.tcl: -------------------------------------------------------------------------------- 1 | yosys logger -notime -stderr 2 | yosys -import 3 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.Sklansky/SklanskyAdder12_assert.sv 4 | yosys read_verilog -defer -noautowire -sv gen_rtl/adders.Sklansky/SklanskyAdder12.sv 5 | 6 | yosys hierarchy -check -top SklanskyAdder12 7 | 8 | prep 9 | 10 | # opt -full -purge -sat 11 | # opt -full -purge -sat 12 | 13 | #log -stdout "Running synthesis" 14 | ##synth -noabc -flatten 15 | hierarchy -check 16 | #yosys proc 17 | flatten 18 | opt_expr 19 | opt_clean 20 | opt -nodffe -nosdff 21 | fsm 22 | opt 23 | opt -full -purge -sat 24 | wreduce 25 | peepopt 26 | opt_clean 27 | alumacc 28 | share 29 | opt 30 | #memory -nomap 31 | opt_clean 32 | # 33 | opt -fast -full 34 | memory_map 35 | # 36 | #opt -full 37 | techmap 38 | opt -fast 39 | opt -full -purge -sat 40 | opt -fast 41 | 42 | 43 | # abc -g AND,NAND,XOR 44 | opt -full -purge -sat 45 | opt -full -purge -sat 46 | setundef -zero 47 | 48 | opt_clean -purge 49 | 50 | opt -full 51 | clean -purge 52 | 53 | check 54 | stat 55 | show -href -color orange t:*AND* -stretch -viewer none -prefix ska12 --------------------------------------------------------------------------------