├── lef_def_parser
├── __init__.py
├── __pycache__
│ ├── util.cpython-36.pyc
│ ├── util.cpython-37.pyc
│ ├── util.cpython-38.pyc
│ ├── def_util.cpython-36.pyc
│ ├── def_util.cpython-37.pyc
│ ├── def_util.cpython-38.pyc
│ ├── lef_util.cpython-36.pyc
│ ├── lef_util.cpython-37.pyc
│ ├── lef_util.cpython-38.pyc
│ ├── main_util.cpython-37.pyc
│ ├── def_parser.cpython-36.pyc
│ ├── def_parser.cpython-37.pyc
│ ├── def_parser.cpython-38.pyc
│ ├── lef_parser.cpython-36.pyc
│ ├── lef_parser.cpython-37.pyc
│ └── lef_parser.cpython-38.pyc
├── split_def.ini
├── .idea
│ └── vcs.xml
├── importDate.py
├── run_from_terminal.py
├── name_remap.py
├── LICENSE
├── lef_parser.py
├── verilog_gen.py
├── extract_cell.py
├── libraries
│ └── FreePDK45
│ │ └── small.lef
├── def_parser.py
├── plot_cell.py
├── split_def.py
├── util.py
├── lef_util.py
├── def_util.py
├── plot_layout.py
├── cell_learn.py
└── plot_layout_new_model.py
├── .gitattributes
├── __pycache__
├── convertDEF.cpython-36.pyc
├── convertDEF.cpython-37.pyc
├── convertDEF.cpython-38.pyc
├── extractUnitsFromLEF.cpython-36.pyc
├── extractUnitsFromLEF.cpython-37.pyc
└── extractUnitsFromLEF.cpython-38.pyc
├── extractUnitsFromLEF.py
├── LICENSE
├── convertDEF.py
├── README.md
└── main.py
/lef_def_parser/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/__pycache__/convertDEF.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/convertDEF.cpython-36.pyc
--------------------------------------------------------------------------------
/__pycache__/convertDEF.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/convertDEF.cpython-37.pyc
--------------------------------------------------------------------------------
/__pycache__/convertDEF.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/convertDEF.cpython-38.pyc
--------------------------------------------------------------------------------
/__pycache__/extractUnitsFromLEF.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/extractUnitsFromLEF.cpython-36.pyc
--------------------------------------------------------------------------------
/__pycache__/extractUnitsFromLEF.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/extractUnitsFromLEF.cpython-37.pyc
--------------------------------------------------------------------------------
/__pycache__/extractUnitsFromLEF.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/__pycache__/extractUnitsFromLEF.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/util.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/util.cpython-36.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/util.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/util.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/util.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/util.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_util.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_util.cpython-36.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_util.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_util.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_util.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_util.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_util.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_util.cpython-36.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_util.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_util.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_util.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_util.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/main_util.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/main_util.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_parser.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_parser.cpython-36.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_parser.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_parser.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/def_parser.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/def_parser.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_parser.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_parser.cpython-36.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_parser.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_parser.cpython-37.pyc
--------------------------------------------------------------------------------
/lef_def_parser/__pycache__/lef_parser.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HanyMoussa/SPEF_EXTRACTOR/HEAD/lef_def_parser/__pycache__/lef_parser.cpython-38.pyc
--------------------------------------------------------------------------------
/lef_def_parser/split_def.ini:
--------------------------------------------------------------------------------
1 | INPUT_FILE_NAME = ./libraries/DEF/c1908.def
2 | BACK_END = False
3 | FRONT_END = True
4 | SPLIT_LAYER = metal3
5 | OUTPUT_FILE_NAME = ./def_write/c1908_feol_metal4.def
6 |
--------------------------------------------------------------------------------
/lef_def_parser/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/lef_def_parser/importDate.py:
--------------------------------------------------------------------------------
1 |
2 | import datetime
3 |
4 | now = datetime.datetime.now()
5 | #print("%A" % now.day," ")
6 | #print("%d" % now.day ," ")
7 |
8 | print (now.strftime("%a %m %d %H:%M:%S %Y"))
9 |
10 | #*DATE "Thu Dec 5 11:51:50 2019"
--------------------------------------------------------------------------------
/lef_def_parser/run_from_terminal.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | if(len(sys.argv) < 3):
4 | sys.exit("Arguments should be passed: python .py .lef .def")
5 | elif(sys.argv[1].find(".lef") == -1 or sys.argv[2].find(".def") == -1):
6 | sys.exit("Arguments should be passed: python .py .lef .def")
7 | else:
8 | lef_file_name = sys.argv[1]
9 | def_file_name = sys.argv[2]
10 |
--------------------------------------------------------------------------------
/lef_def_parser/name_remap.py:
--------------------------------------------------------------------------------
1 | from def_parser import *
2 | from lef_parser import *
3 |
4 |
5 |
6 |
7 | lef_parser = LefParser("def_lef_files/osu035.lef")
8 | lef_parser.parse()
9 |
10 | def_parser = DefParser("def_lef_files/uart.def")
11 | def_parser.parse()
12 |
13 | #List that contains old name, new name
14 |
15 | def remap_names():
16 | name_counter = 0
17 | map_of_names = []
18 | for key in def_parser.nets.net_dict:
19 | new_name = []
20 | new_name.append(def_parser.nets.net_dict[key].name)
21 | def_parser.nets.net_dict[key].name = "*" + str(name_counter)
22 | new_name.append(def_parser.nets.net_dict[key].name)
23 | name_counter += 1
24 | map_of_names.append(new_name)
25 | return(map_of_names)
26 |
27 |
28 | map_of_names = remap_names()
29 |
30 | print(map_of_names)
31 |
--------------------------------------------------------------------------------
/extractUnitsFromLEF.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Fri Jul 31 16:08:56 2020
4 |
5 | @author: Ramez Moussa
6 | """
7 | lef_file_name = 'sample-designs/cpu6502/merged_unpadded.lef'
8 |
9 |
10 | def extractLefUnits(lef_file_name):
11 |
12 | unitsDict = {}
13 | f = open(lef_file_name, "r+")
14 | while(1):
15 | line = f.readline()
16 | data = line.split()
17 | if(len(data) > 0):
18 | if(data[0] == "UNITS"):
19 | extractUnits(f, unitsDict)
20 | f.close()
21 | return unitsDict
22 |
23 |
24 | def extractUnits(f, unitsDict):
25 | # the maximum number of units defined in LEF is 8
26 | for i in range(10):
27 | line = f.readline()
28 | data = line.split()
29 | if(len(data) > 0):
30 | if(data[0] != "END"):
31 | unitsDict[data[0]] = data[1]
32 | else:
33 | return unitsDict
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 HanyMoussa
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/lef_def_parser/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Tri Minh Cao
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/convertDEF.py:
--------------------------------------------------------------------------------
1 | def covnertToDef57(fileName):
2 | reachedNets = False
3 | reading = open(fileName, "r+")
4 | writing = open(fileName[:-4] + '_new.def', 'w+')
5 |
6 | for line in reading:
7 | splitted_line = line.split()
8 | if(len(splitted_line) > 0):
9 | if(splitted_line[0] == 'NETS'):
10 | reachedNets = True
11 | if(reachedNets == True):
12 | if(len(splitted_line) > 0):
13 | if(splitted_line[0] == '-'):
14 | line1 = splitted_line[0] + " " + splitted_line[1] + '\n'
15 | writing.write(line1)
16 |
17 | line2 = ""
18 | for i in range(len(splitted_line)):
19 | if(i > 1):
20 | line2 += splitted_line[i] + " "
21 |
22 | writing.write(line2 + '\n')
23 | else:
24 | writing.write(line)
25 |
26 | else:
27 | writing.write(line)
28 |
29 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SPEF-Extractor
2 | A Python library that reads LEF and DEF files, extracts the RC parasitics and generates their corresponding SPEF file.:
3 |
4 | ## Dependancies:
5 | In order to parse the lef and def files, we used [trimcao's def and lef parser](https://github.com/trimcao/lef-parser)
6 |
7 | ## Build Instructions:
8 | To install the library run the following commands:
9 | ```
10 |
11 | pip install numpy
12 |
13 | pip install sympy
14 |
15 | pip install matplotlib
16 |
17 | git clone https://github.com/Cloud-V/SPEF_EXTRACTOR/
18 | ```
19 |
20 | ## Using the library
21 | In order to use the project, use the terminal to run `main.py` using the following format.
22 | `python3 main.py `
23 | For example:
24 | `python3 main.py osu035.lef rle_rec.def L 1`
25 | where `osu035.lef` is the provided lef file and `rle_rec.def` is its corresponding def file. Aftwards, we extract the RC parasitics and output them in a SPEF file named `rle_rec.spef` (holds the same name as the def file)
26 |
27 | ## Testing
28 | - Initially, we tested the generated SPEF manually. This was done through checking a number of nets, and comparing the parasitics in the file with the theoretical value.
29 | - Later on, we tested using "openSTA" that verified that the produced SPEF file is syntax error free. "openSTA" was able to successfuly read the SPEF file and produce timing reports based on the parasitics provided.
30 | - Additionally, we used openSTA to compare the delays for multiple designs using available spef files vs. our generated spef files.
31 |
32 | ## Assumptions
33 | During our development, we had to make some assumptions to for the sake of simplicity:
34 | 1. It is assumed that the values that do not exist in the LEF file are considered to be 0.
35 | 2. We represented each wire segment as a single resistance and a capacitance (In the L model)
36 | 3. We consider the capacitance of a segment to be at the end node of the segment (In the L model)
37 | 4. Testing was done using openSTA that verified our SPEF is syntax error free.
38 |
39 | ## Limitations
40 | 1. Testing was only done on a handful of designs and further testing might help
41 | 2. Only Pi and L models are available to represent the resistance and capacitance of a segment (even for long wire segments)
42 | 3. We don't handle the RC parasitics of the special nets
43 |
44 | ## Name Remapping
45 | 1. We implemented an algorithm to rename long names.
46 | 2. All nets are renamed to decrease the size of files.
47 | 3. Names were remapped based on the standard remapping scheme of SPEF files.
48 |
49 | ## Acknowledgement:
50 | This was initially created for the Digital Design 2 Course CSCE3304 at the American University in Cairo under the supervision of Doctor Mohamed Shalan. Its development was ongoing afterwards as a part of an undergraduate research internship at the American Univeristy in Cairo.
51 |
52 | ## Authors:
53 | * Ramez Moussa - [Github Profile](https://github.com/ramezmoussa)
54 | * Hany Moussa - [Github Profile](https://github.com/hanymoussa)
55 |
56 |
--------------------------------------------------------------------------------
/lef_def_parser/lef_parser.py:
--------------------------------------------------------------------------------
1 | """
2 | Lef Parser
3 | Author: Tri Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 | from lef_util import *
8 | from util import *
9 |
10 | SCALE = 2000
11 |
12 | class LefParser:
13 | """
14 | LefParser object will parse the LEF file and store information about the
15 | cell library.
16 | """
17 | def __init__(self, lef_file):
18 | self.lef_path = lef_file
19 | # dictionaries to map the definitions
20 | self.macro_dict = {}
21 | self.layer_dict = {}
22 | self.via_dict = {}
23 | # can make the stack to be an object if needed
24 | self.stack = []
25 | # store the statements info in a list
26 | self.statements = []
27 | self.cell_height = -1
28 |
29 | self.units_dict = {}
30 |
31 |
32 | def get_cell_height(self):
33 | """
34 | Get the general cell height in the library
35 | :return: void
36 | """
37 | for macro in self.macro_dict:
38 | self.cell_height = self.macro_dict[macro].info["SIZE"][1]
39 | break
40 |
41 | def parse(self):
42 | # Now try using my data structure to parse
43 | # open the file and start reading
44 | print ("Start parsing LEF file...")
45 | f = open(self.lef_path, "r")
46 | # the program will run until the end of file f
47 | for line in f:
48 | info = str_to_list(line)
49 | if len(info) != 0:
50 | # if info is a blank line, then move to next line
51 | # check if the program is processing a statement
52 | #print (info)
53 | if len(self.stack) != 0:
54 | curState = self.stack[len(self.stack) - 1]
55 | nextState = curState.parse_next(info)
56 | else:
57 | curState = Statement()
58 | nextState = curState.parse_next(info)
59 | # check the status return from parse_next function
60 | if nextState == 0:
61 | # continue as normal
62 | pass
63 | elif nextState == 1:
64 | # remove the done statement from stack, and add it to the statements
65 | # list
66 | if len(self.stack) != 0:
67 | # add the done statement to a dictionary
68 | done_obj = self.stack.pop()
69 | if isinstance(done_obj, Macro):
70 | self.macro_dict[done_obj.name] = done_obj
71 | elif isinstance(done_obj, Layer):
72 | self.layer_dict[done_obj.name] = done_obj
73 | elif isinstance(done_obj, Via):
74 | self.via_dict[done_obj.name] = done_obj
75 | self.statements.append(done_obj)
76 | elif nextState == -1:
77 | pass
78 | else:
79 | self.stack.append(nextState)
80 | # print (nextState)
81 | f.close()
82 | # get the cell height of the library
83 | self.get_cell_height()
84 | print ("Parsing LEF file done.")
85 |
86 |
87 | def draw_cells():
88 | """
89 | code to draw cells based on LEF information.
90 | :return: void
91 | """
92 | to_draw = []
93 | to_draw.append(input("Enter the first macro: "))
94 | to_draw.append(input("Enter the second macro: "))
95 | #to_draw = ["AND2X1", "AND2X2"]
96 |
97 |
98 | plt.figure(figsize=(12, 9), dpi=80)
99 | plt.axes()
100 |
101 | num_plot = 1
102 | for macro_name in to_draw:
103 | # check user's input
104 | if macro_name not in lef_parser.macro_dict:
105 | print ("Error: This macro does not exist in the parsed library.")
106 | quit()
107 | macro = lef_parser.macro_dict[macro_name]
108 | sub = plt.subplot(1, 2, num_plot)
109 | # need to add title
110 | sub.set_title(macro.name)
111 | draw_macro(macro)
112 | num_plot += 1
113 | # scale the axis of the subplot
114 | plt.axis('scaled')
115 |
116 |
117 | # start drawing
118 | print ("Start drawing...")
119 | plt.show()
120 |
121 | """
122 | # Main Class
123 | if __name__ == '__main__':
124 |
125 | path = "./libraries/Nangate/NangateOpenCellLibrary.lef"
126 | lef_parser = LefParser(path)
127 | lef_parser.parse()
128 |
129 | # test via_dict
130 | via1_2 = lef_parser.via_dict["via1_2"]
131 | print (via1_2.layers)
132 | for each in via1_2.layers:
133 | print (each.name)
134 | for each_shape in each.shapes:
135 | print (each_shape.type)
136 |
137 |
138 | """
139 |
--------------------------------------------------------------------------------
/lef_def_parser/verilog_gen.py:
--------------------------------------------------------------------------------
1 | from def_parser import *
2 | import pickle
3 | import time
4 |
5 | def recover_netlist(def_info, inputs, outputs, recovered_cells):
6 | """
7 | Method to create a netlist from predicted cells
8 | :param def_info: information from the DEF file
9 | :param inputs: input pins of the design
10 | :param outputs: output pins of the design
11 | :param recovered_cells: recovered cells with input nets and output nets
12 | :return: recovered netlist file name
13 | """
14 | # NOTE: the order of nets is not like that in original netlist
15 | design = def_info.design_name
16 | nets = set(def_info.nets.net_dict.keys())
17 | inputs_set = set(inputs)
18 | outputs_set = set(outputs)
19 | io = inputs_set | outputs_set
20 | wires = nets - io
21 | # print(wires)
22 | # print(len(wires))
23 |
24 | # save the cells_reco for later inspection
25 | # filename = './recovered/' + design + '.pickle'
26 | # try:
27 | # with open(filename, 'wb') as f:
28 | # pickle.dump(cells_reco, f, pickle.HIGHEST_PROTOCOL)
29 | # except Exception as e:
30 | # print('Unable to save data to', filename, ':', e)
31 |
32 | ## dd/mm/yyyy format
33 | date = time.strftime("%m/%d/%Y %H:%M:%S")
34 | s = '#############################\n'
35 | s += '# Generated by TMC\n'
36 | s += '# Design: ' + design + '\n'
37 | s += '# Date: ' + date + '\n'
38 | s += '#############################\n\n'
39 |
40 | # add module definition
41 | s += 'module ' + design + ' ( '
42 | num_ios = len(io)
43 | idx = 0
44 | for each_pin in io:
45 | s += each_pin
46 | idx += 1
47 | if idx < num_ios:
48 | s += ', '
49 | s += ' );\n'
50 |
51 | indent = ' '
52 | # add input
53 | num_in = len(inputs)
54 | idx = 0
55 | s += indent + 'input '
56 | for each_in in inputs:
57 | s += each_in
58 | idx += 1
59 | if idx < num_in:
60 | s += ', '
61 | s += ';\n'
62 | # add output
63 | num_out = len(outputs)
64 | idx = 0
65 | s += indent + 'output '
66 | for each_out in outputs:
67 | s += each_out
68 | idx += 1
69 | if idx < num_out:
70 | s += ', '
71 | s += ';\n'
72 | # add wire
73 | num_wire = len(wires)
74 | idx = 0
75 | s += indent + 'wire '
76 | for each_wire in wires:
77 | s += each_wire
78 | idx += 1
79 | if idx < num_wire:
80 | s += ', '
81 | s += ';\n'
82 | # add cells
83 | s += '\n'
84 | cell_idx = 2
85 | for each_cell in cells_reco:
86 | cell_idx += 1
87 | s += indent + each_cell[0] + ' U' + str(cell_idx) + ' ( '
88 | in_nets = each_cell[1]
89 | s += '.A(' + in_nets[0] + ')' + ', '
90 | if len(in_nets) == 2:
91 | s += '.B(' + in_nets[1] + ')' + ', '
92 | out_net = each_cell[2]
93 | s += '.Y(' + out_net + ')'
94 | s += ' );\n'
95 |
96 | # write to an output file
97 | # folder = './recovered/'
98 | # filename = design + '_recovered' + '.v'
99 | # print('Writing recovered netlist file...')
100 | # f = open(folder + filename, mode="w+")
101 | # f.write(s)
102 | # f.close()
103 | print('Writing done.')
104 | return filename
105 |
106 |
107 | inputs = ['N1', 'N4', 'N8', 'N11', 'N14', 'N17', 'N21', 'N24', 'N27', 'N30', 'N34', 'N37', 'N40', 'N43', 'N47', 'N50', 'N53', 'N56', 'N60', 'N63', 'N66', 'N69', 'N73', 'N76', 'N79', 'N82', 'N86', 'N89', 'N92', 'N95', 'N99', 'N102', 'N105', 'N108', 'N112', 'N115']
108 | outputs = ['N223', 'N329', 'N370', 'N421', 'N430', 'N431', 'N432']
109 | cells_reco = [['AND2X1', ['N8', 'n277'], 'n305'], ['INVX1', ['n305'], 'n195'], ['AND2X1', ['n170', 'n195'], 'n303'], ['INVX1', ['n304'], 'n170'], ['INVX1', ['n303'], 'n216'], ['OR2X1', ['n264', 'N8'], 'n353'], ['INVX1', ['n302'], 'n264'], ['OR2X1', ['n264', 'n216'], 'n301'], ['AND2X1', ['n254', 'n302'], 'n350'], ['INVX1', ['n301'], 'n217'], ['AND2X1', ['n353', 'n363'], 'n388'], ['AND2X1', ['n277', 'n363'], 'n361'], ['AND2X1', ['n183', 'n362'], 'n360'], ['INVX1', ['N105'], 'n362'], ['AND2X1', ['n181', 'n203'], 'n338'], ['OR2X1', ['n258', 'N99'], 'n363'], ['OR2X1', ['n258', 'n214'], 'n354'], ['AND2X1', ['N99', 'n277'], 'n294'], ['INVX1', ['n354'], 'n180'], ['OR2X1', ['n180', 'n212'], 'n349'], ['INVX1', ['n355'], 'n212'], ['OR2X1', ['n255', 'n213'], 'n355'], ['OR2X1', ['n255', 'N86'], 'n359'], ['AND2X1', ['n182', 'n358'], 'n356'], ['INVX1', ['n356'], 'n213'], ['AND2X1', ['N86', 'n277'], 'n312'], ['INVX1', ['N92'], 'n358'], ['AND2X1', ['n171', 'n196'], 'n309'], ['AND2X1', ['n309', 'n310'], 'n295']]
110 |
111 | design = 'c432'
112 | def_path = './libraries/layout_freepdk45/b14_1.def'
113 | def_parser = DefParser(def_path)
114 | def_parser.parse()
115 |
116 | all_via1 = get_all_vias(def_parser, via_type="M2_M1_via")
117 |
118 | # build the net_via dictionary
119 | nets = def_parser.nets.nets
120 | # initialize the nets_via_dict
121 | nets_vias_dict = {}
122 | for net in nets:
123 | net_name = net.name
124 | nets_vias_dict[net_name] = []
125 | # add vias to nets_dict
126 | for each_via in all_via1:
127 | net = each_via[2]
128 | nets_vias_dict[net].append(each_via)
129 |
130 | filename = './recovered/b14_1_C_debug.pickle'
131 | try:
132 | with open(filename, 'rb') as f:
133 | debug = pickle.load(f)
134 | except Exception as e:
135 | print('Unable to read data from', filename, ':', e)
136 |
137 | cells_reco = debug[0]
138 | vias_reco = debug[1]
139 | recover_netlist(def_parser, inputs, outputs, cells_reco)
140 |
141 |
--------------------------------------------------------------------------------
/lef_def_parser/extract_cell.py:
--------------------------------------------------------------------------------
1 | """
2 | Program to extract cell using DEF and LEF data.
3 |
4 | Author: Tri Minh Cao
5 | Email: tricao@utdallas.edu
6 | Date: October 2016
7 | """
8 | from def_parser import *
9 | from lef_parser import *
10 | import util
11 | import pickle
12 | import os
13 | import math
14 |
15 |
16 |
17 | def extract_comp(comp_name, lef_data, def_data, macro_via1_dict):
18 | """
19 | Extract the features and label of each cell
20 | :param comp_name: name of the component
21 | :param lef_data: data parsed from LEF file.
22 | :param def_data: data parsed from DEF file.
23 | :param macro_via_dict: dictionary contains macro and via1 data
24 | :return: void
25 | """
26 | # get info of the component and macro from DEF and LEF
27 | comp_info = def_data.components.comp_dict[comp_name]
28 | macro_name = comp_info.macro
29 | macro_info = lef_data.macro_dict[macro_name]
30 | macro_size = macro_info.info["SIZE"]
31 | scale = float(def_data.scale)
32 | # get the placement of the component from DEF file
33 | bottom_left_pt = comp_info.placed
34 | top_right_pt = [bottom_left_pt[0] + int(macro_size[0] * scale),
35 | bottom_left_pt[1] + int(macro_size[1] * scale)]
36 | corners = [bottom_left_pt, top_right_pt]
37 | # find the vias inside the component's area
38 | vias_in_comp = macro_via1_dict[comp_name]
39 | vias_draw = []
40 | for pin in vias_in_comp:
41 | if pin != "MACRO":
42 | for each_via in vias_in_comp[pin]:
43 | each_via_loc = each_via[0]
44 | via_type = each_via[1]
45 | if inside_area(each_via_loc, corners):
46 | vias_draw.append((each_via_loc, via_type))
47 |
48 | # sort the vias by x-coordinate
49 | vias_draw.sort(key=lambda x: x[0][0])
50 | # crop the cell by the vias location
51 |
52 | # margin = 350
53 | # left_pt = [vias_draw[0][0][0] - margin, bottom_left_pt[1]]
54 | # width = vias_draw[-1][0][0] - left_pt[0] + margin
55 | # height = macro_size[1] * scale
56 | # corners = [left_pt]
57 | # corners.append([left_pt[0] + width, left_pt[1] + height])
58 |
59 | # get the pins from LEF data
60 | pins = []
61 | for pin in macro_info.pin_dict.keys():
62 | pin_name = pin.lower()
63 | if pin_name != 'gnd' and pin_name != 'vdd':
64 | pins.append(pin)
65 |
66 | left_pt = bottom_left_pt
67 | # build the features
68 | features = []
69 | # number of vias
70 | num_vias = len(vias_draw)
71 | features.append(num_vias)
72 | x_bound = left_pt[0]
73 | y_bound = left_pt[1]
74 | # NOTE: some cell has 4 vias
75 | # We suppose maximum vias in a cell is 4
76 | for each_via in vias_draw:
77 | x_loc = each_via[0][0] - x_bound
78 | y_loc = each_via[0][1] - y_bound
79 | # features.append(x_loc)
80 | features.append(y_loc)
81 | # determine the type of each via
82 | via_loc = each_via[0]
83 | # print(via_loc)
84 | pin_found = False
85 | for pin in pins:
86 | pin_data = macro_info.pin_dict[pin]
87 | pin_direction = pin_data.info["DIRECTION"].lower()
88 | layers = pin_data.info["PORT"].info["LAYER"]
89 | for layer in layers:
90 | for shape in layer.shapes:
91 | # scale the points
92 | corners = util.scalePts(shape.points, scale)
93 | corners = relocate_area(bottom_left_pt, corners)
94 | # print(corners)
95 | if inside_area(via_loc, corners):
96 | # print(pin)
97 | # print(pin_direction)
98 | pin_found = True
99 | if pin_direction == 'output':
100 | features.append(1)
101 | elif pin_direction == 'input':
102 | features.append(0)
103 | break
104 | if pin_found:
105 | break
106 | if not pin_found:
107 | features.append(-1)
108 | # if there are only two vias, then there are no via3
109 | if num_vias < 4:
110 | # temp = [-1 for i in range((4 - num_vias) * 3)]
111 | # trial: only use num_vias, no x-coordinate and y-coordinate
112 | temp = [-1 for i in range((4 - num_vias) * 2)]
113 | features.extend(temp)
114 |
115 | # add the distance between vias
116 | for i in range(len(vias_draw) - 1):
117 | for j in range(i + 1, len(vias_draw)):
118 | x_dist = vias_draw[j][0][0] - vias_draw[i][0][0]
119 | y_dist = vias_draw[j][0][1] - vias_draw[i][0][1]
120 | features.append(x_dist)
121 | features.append(y_dist)
122 | # add extra features in case of having less vias
123 | if num_vias < 4:
124 | if num_vias == 1:
125 | remain_dists = 2 * int(util.nCr(4, 2))
126 | else:
127 | remain_dists = 2 * (int(util.nCr(4, 2) - util.nCr(num_vias, 2)))
128 | temp = [0 for i in range(remain_dists)]
129 | features.extend(temp)
130 | # print(macro_name)
131 | # print(features)
132 | # print(len(features))
133 | # add more features here
134 | label = macro_name
135 | return features, label
136 |
137 | # Main Class
138 | if __name__ == '__main__':
139 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
140 | lef_parser = LefParser(lef_file)
141 | lef_parser.parse()
142 |
143 | train_files = ['c1355.def', "c1355_INVX8.def", "c2670.def", "c2670_no_AND2.def",
144 | "c2670_OR2.def", "c3540.def", "c3540_no_AND2.def",
145 | "c3540_no_NAND2.def", "c5315.def", "c7552.def"]
146 | # train_files = ['c1355.def']
147 | folder = "./libraries/layout_freepdk45_old/"
148 | for i in range(len(train_files)):
149 | def_path = os.path.join(folder, train_files[i])
150 | print (def_path)
151 | # def_path = './libraries/layout_freepdk45/c1355.def'
152 | def_parser = DefParser(def_path)
153 | def_parser.parse()
154 |
155 | print ("Process file:", def_path)
156 | # test macro and via (note: only via1)
157 | macro_via1_dict = util.macro_and_via1(def_parser, via_type="M2_M1_via")
158 | samples = []
159 | labels = []
160 | num_comps = 0
161 | for each_comp in macro_via1_dict:
162 | comp_info = def_parser.components.comp_dict[each_comp]
163 | print (each_comp)
164 | features, label = extract_comp(each_comp, lef_parser,
165 | def_parser, macro_via1_dict)
166 | samples.append(features)
167 | labels.append(label)
168 | num_comps += 1
169 | # if num_comps > 10:
170 | # break
171 | # print the features
172 | # for i in range(len(samples)):
173 | # print(samples[i])
174 | # print(len(samples[i]))
175 | # print(labels[i])
176 | # print()
177 | dataset = (samples, labels)
178 |
179 | # save the training data
180 | result_folder = './training_data/'
181 | set_filename = os.path.join(result_folder, train_files[i])
182 | set_filename += '.pickle'
183 | try:
184 | with open(set_filename, 'wb') as f:
185 | pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
186 | except Exception as e:
187 | print('Unable to save data to', set_filename, ':', e)
188 | print ("Finished!")
189 |
190 |
191 |
--------------------------------------------------------------------------------
/lef_def_parser/libraries/FreePDK45/small.lef:
--------------------------------------------------------------------------------
1 |
2 | MACRO AND2X1
3 | CLASS CORE ;
4 | ORIGIN 0 0 ;
5 | FOREIGN AND2X1 0 0 ;
6 | SIZE 1.14 BY 2.47 ;
7 | SYMMETRY X Y ;
8 | SITE CoreSite ;
9 | PIN A
10 | DIRECTION INPUT ;
11 | USE SIGNAL ;
12 | PORT
13 | LAYER metal1 ;
14 | RECT 0.1475 1.2275 0.2825 1.3625 ;
15 | END
16 | END A
17 | PIN B
18 | DIRECTION INPUT ;
19 | USE SIGNAL ;
20 | PORT
21 | LAYER metal1 ;
22 | RECT 0.3475 1.2275 0.4825 1.3625 ;
23 | END
24 | END B
25 | PIN Y
26 | DIRECTION OUTPUT ;
27 | USE SIGNAL ;
28 | PORT
29 | LAYER metal1 ;
30 | RECT 0.9175 0.87 0.9825 2.1075 ;
31 | END
32 | END Y
33 | PIN gnd
34 | DIRECTION INOUT ;
35 | USE GROUND ;
36 | SHAPE ABUTMENT ;
37 | PORT
38 | LAYER metal1 ;
39 | RECT 0.7075 -0.065 0.7725 1.11 ;
40 | RECT 0 -0.065 1.14 0.065 ;
41 | END
42 | END gnd
43 | PIN vdd
44 | DIRECTION INOUT ;
45 | USE POWER ;
46 | SHAPE ABUTMENT ;
47 | PORT
48 | LAYER metal1 ;
49 | RECT 0.1425 1.5575 0.2075 2.535 ;
50 | RECT 0.6225 1.5575 0.6875 2.535 ;
51 | RECT 0 2.405 1.14 2.535 ;
52 | END
53 | END vdd
54 | OBS
55 | LAYER metal1 ;
56 | RECT 0.1425 0.59 0.2075 1.145 ;
57 | RECT 0.1425 1.08 0.6425 1.145 ;
58 | RECT 0.5775 1.08 0.6425 1.4925 ;
59 | RECT 0.4375 1.4275 0.7775 1.4925 ;
60 | RECT 0.4375 1.4275 0.5025 2.1075 ;
61 | END
62 | END AND2X1
63 |
64 | MACRO AND2X2
65 | CLASS CORE ;
66 | ORIGIN 0 0 ;
67 | FOREIGN AND2X2 0 0 ;
68 | SIZE 0.95 BY 2.47 ;
69 | SYMMETRY X Y ;
70 | SITE CoreSite ;
71 | PIN A
72 | DIRECTION INPUT ;
73 | USE SIGNAL ;
74 | PORT
75 | LAYER metal1 ;
76 | RECT 0.1475 0.8475 0.2225 0.9825 ;
77 | END
78 | END A
79 | PIN B
80 | DIRECTION INPUT ;
81 | USE SIGNAL ;
82 | PORT
83 | LAYER metal1 ;
84 | RECT 0.4175 0.9175 0.6025 1.0525 ;
85 | END
86 | END B
87 | PIN Y
88 | DIRECTION OUTPUT ;
89 | USE SIGNAL ;
90 | PORT
91 | LAYER metal1 ;
92 | RECT 0.7275 0.1725 0.7925 2.235 ;
93 | RECT 0.7275 1.1075 0.8025 2.235 ;
94 | END
95 | END Y
96 | PIN gnd
97 | DIRECTION INOUT ;
98 | USE GROUND ;
99 | SHAPE ABUTMENT ;
100 | PORT
101 | LAYER metal1 ;
102 | RECT 0.4425 -0.065 0.5075 0.7225 ;
103 | RECT 0 -0.065 0.95 0.065 ;
104 | END
105 | END gnd
106 | PIN vdd
107 | DIRECTION INOUT ;
108 | USE POWER ;
109 | SHAPE ABUTMENT ;
110 | PORT
111 | LAYER metal1 ;
112 | RECT 0.0675 1.265 0.1325 2.535 ;
113 | RECT 0.4425 1.265 0.5075 2.535 ;
114 | RECT 0 2.405 0.95 2.535 ;
115 | END
116 | END vdd
117 | OBS
118 | LAYER metal1 ;
119 | RECT 0.0325 0.2075 0.1675 0.6925 ;
120 | RECT 0.0325 0.6275 0.3525 0.6925 ;
121 | RECT 0.2875 0.7875 0.5975 0.8525 ;
122 | RECT 0.2875 0.6275 0.3525 1.78 ;
123 | RECT 0.2175 1.295 0.3525 1.78 ;
124 | END
125 | END AND2X2
126 |
127 | MACRO AOI21X1
128 | CLASS CORE ;
129 | ORIGIN 0 0 ;
130 | FOREIGN AOI21X1 0 0 ;
131 | SIZE 0.95 BY 2.47 ;
132 | SYMMETRY X Y ;
133 | SITE CoreSite ;
134 | PIN A
135 | DIRECTION INPUT ;
136 | USE SIGNAL ;
137 | PORT
138 | LAYER metal1 ;
139 | RECT 0.1575 0.8475 0.2825 0.9825 ;
140 | END
141 | END A
142 | PIN B
143 | DIRECTION INPUT ;
144 | USE SIGNAL ;
145 | PORT
146 | LAYER metal1 ;
147 | RECT 0.3475 0.7275 0.4475 0.8625 ;
148 | END
149 | END B
150 | PIN C
151 | DIRECTION INPUT ;
152 | USE SIGNAL ;
153 | PORT
154 | LAYER metal1 ;
155 | RECT 0.6675 0.8475 0.7925 0.9825 ;
156 | END
157 | END C
158 | PIN Y
159 | DIRECTION OUTPUT ;
160 | USE SIGNAL ;
161 | PORT
162 | LAYER metal1 ;
163 | RECT 0.5125 0.1475 0.5775 0.9825 ;
164 | RECT 0.5375 0.9175 0.6025 1.15 ;
165 | RECT 0.5375 1.085 0.7675 1.15 ;
166 | RECT 0.7025 1.085 0.7675 2.235 ;
167 | END
168 | END Y
169 | PIN gnd
170 | DIRECTION INOUT ;
171 | USE GROUND ;
172 | SHAPE ABUTMENT ;
173 | PORT
174 | LAYER metal1 ;
175 | RECT 0.1375 -0.065 0.2025 0.7025 ;
176 | RECT 0.7525 -0.065 0.8175 0.6725 ;
177 | RECT 0 -0.065 0.95 0.065 ;
178 | END
179 | END gnd
180 | PIN vdd
181 | DIRECTION INOUT ;
182 | USE POWER ;
183 | SHAPE ABUTMENT ;
184 | PORT
185 | LAYER metal1 ;
186 | RECT 0.3225 1.345 0.3875 2.535 ;
187 | RECT 0 2.405 0.95 2.535 ;
188 | END
189 | END vdd
190 | OBS
191 | LAYER metal1 ;
192 | RECT 0.1375 1.215 0.5775 1.28 ;
193 | RECT 0.1375 1.215 0.2025 2.235 ;
194 | RECT 0.5125 1.215 0.5775 2.235 ;
195 | END
196 | END AOI21X1
197 |
198 | MACRO AOI22X1
199 | CLASS CORE ;
200 | ORIGIN 0 0 ;
201 | FOREIGN AOI22X1 0 0 ;
202 | SIZE 1.14 BY 2.47 ;
203 | SYMMETRY X Y ;
204 | SITE CoreSite ;
205 | PIN A
206 | DIRECTION INPUT ;
207 | USE SIGNAL ;
208 | PORT
209 | LAYER metal1 ;
210 | RECT 0.8575 0.7675 0.9825 0.9825 ;
211 | END
212 | END A
213 | PIN B
214 | DIRECTION INPUT ;
215 | USE SIGNAL ;
216 | PORT
217 | LAYER metal1 ;
218 | RECT 0.6675 0.7675 0.7925 0.9825 ;
219 | END
220 | END B
221 | PIN C
222 | DIRECTION INPUT ;
223 | USE SIGNAL ;
224 | PORT
225 | LAYER metal1 ;
226 | RECT 0.0325 0.7675 0.2225 0.9825 ;
227 | END
228 | END C
229 | PIN D
230 | DIRECTION INPUT ;
231 | USE SIGNAL ;
232 | PORT
233 | LAYER metal1 ;
234 | RECT 0.4775 0.7675 0.6025 0.9825 ;
235 | END
236 | END D
237 | PIN Y
238 | DIRECTION OUTPUT ;
239 | USE SIGNAL ;
240 | PORT
241 | LAYER metal1 ;
242 | RECT 0.3475 0.13 0.4125 2.0675 ;
243 | RECT 0.3475 0.13 0.6 0.195 ;
244 | RECT 0.535 0.13 0.6 0.7025 ;
245 | END
246 | END Y
247 | PIN gnd
248 | DIRECTION INOUT ;
249 | USE GROUND ;
250 | SHAPE ABUTMENT ;
251 | PORT
252 | LAYER metal1 ;
253 | RECT 0.16 -0.065 0.225 0.7025 ;
254 | RECT 0.915 -0.065 0.98 0.7025 ;
255 | RECT 0 -0.065 1.14 0.065 ;
256 | END
257 | END gnd
258 | PIN vdd
259 | DIRECTION INOUT ;
260 | USE POWER ;
261 | SHAPE ABUTMENT ;
262 | PORT
263 | LAYER metal1 ;
264 | RECT 0.725 1.1775 0.79 2.535 ;
265 | RECT 0 2.405 1.14 2.535 ;
266 | END
267 | END vdd
268 | OBS
269 | LAYER metal1 ;
270 | RECT 0.535 1.0475 0.98 1.1125 ;
271 | RECT 0.915 1.0475 0.98 2.0675 ;
272 | RECT 0.16 1.0475 0.225 2.1975 ;
273 | RECT 0.535 1.0475 0.6 2.1975 ;
274 | RECT 0.16 2.1325 0.6 2.1975 ;
275 | END
276 | END AOI22X1
277 |
278 | MACRO BUFX2
279 | CLASS CORE ;
280 | ORIGIN 0 0 ;
281 | FOREIGN BUFX2 0 0 ;
282 | SIZE 0.76 BY 2.47 ;
283 | SYMMETRY X Y ;
284 | SITE CoreSite ;
285 | PIN A
286 | DIRECTION INPUT ;
287 | USE SIGNAL ;
288 | PORT
289 | LAYER metal1 ;
290 | RECT 0.53 0.8975 0.645 1.0325 ;
291 | END
292 | END A
293 | PIN Y
294 | DIRECTION OUTPUT ;
295 | USE SIGNAL ;
296 | PORT
297 | LAYER metal1 ;
298 | RECT 0.0325 0.13 0.0975 2.235 ;
299 | RECT 0.0325 0.13 0.225 0.7025 ;
300 | RECT 0.0325 0.7675 0.225 2.235 ;
301 | END
302 | END Y
303 | PIN gnd
304 | DIRECTION INOUT ;
305 | USE GROUND ;
306 | SHAPE ABUTMENT ;
307 | PORT
308 | LAYER metal1 ;
309 | RECT 0.345 -0.065 0.41 0.7025 ;
310 | RECT 0 -0.065 0.76 0.065 ;
311 | END
312 | END gnd
313 | PIN vdd
314 | DIRECTION INOUT ;
315 | USE POWER ;
316 | SHAPE ABUTMENT ;
317 | PORT
318 | LAYER metal1 ;
319 | RECT 0.345 1.2275 0.41 2.535 ;
320 | RECT 0 2.405 0.76 2.535 ;
321 | END
322 | END vdd
323 | OBS
324 | LAYER metal1 ;
325 | RECT 0.53 0.13 0.705 0.8325 ;
326 | RECT 0.29 0.7675 0.705 0.8325 ;
327 | RECT 0.29 0.7675 0.465 0.9025 ;
328 | RECT 0.29 0.7675 0.355 1.1625 ;
329 | RECT 0.29 1.0975 0.705 1.1625 ;
330 | RECT 0.53 1.0975 0.705 1.815 ;
331 | END
332 | END BUFX2
333 |
--------------------------------------------------------------------------------
/lef_def_parser/def_parser.py:
--------------------------------------------------------------------------------
1 | """
2 | DEF Parser
3 | Author: Tri Minh Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 |
8 | from def_util import *
9 | from util import *
10 |
11 |
12 | class DefParser:
13 | """
14 | DefParser will parse a DEF file and store related information of the design.
15 | """
16 |
17 | def __init__(self, def_file):
18 | self.file_path = def_file
19 | # can make the stack to be an object if needed
20 | self.stack = []
21 | # store the statements info in a list
22 | self.sections = []
23 | self.property = None
24 | self.components = None
25 | self.pins = None
26 | self.nets = None
27 | self.tracks = []
28 | self.gcellgrids = []
29 | self.rows = []
30 | self.diearea = None
31 | self.version = None
32 | self.dividerchar = None
33 | self.busbitchars = None
34 | self.design_name = None
35 | self.units = None
36 | self.scale = None
37 | # add support for custom VIAS
38 | self.vias = []
39 |
40 | def parse(self):
41 | """
42 | Main method to parse the DEF file
43 | :return: void
44 | """
45 | print ("Start parsing DEF file...")
46 | # open the file and start reading
47 | f = open(self.file_path, "r+")
48 | # the program will run until the end of file f
49 | for line in f:
50 | # split the string by the plus '+' sign
51 | parts = split_plus(line)
52 | for each_part in parts:
53 | # split each sub-string by space
54 | info = split_space(each_part)
55 | if len(info) > 0:
56 | #print info
57 | if(info[0] == "VIAS"):
58 | handleVias(self.vias, f)
59 |
60 | if info[0] == "PINS":
61 | new_pins = Pins(int(info[1]))
62 | self.stack.append(new_pins)
63 | # print (new_pins.type)
64 | elif info[0] == "VERSION":
65 | self.version = info[1]
66 | elif info[0] == "DIVIDERCHAR":
67 | self.dividerchar = info[1]
68 | elif info[0] == "BUSBITCHARS":
69 | self.busbitchars = info[1]
70 | elif info[0] == "DESIGN" and len(info) <= 3:
71 | # differentiate with the DESIGN statement inside
72 | # PROPERTYDEFINITIONS section.
73 | self.design_name = info[1]
74 | elif info[0] == "UNITS":
75 | self.units = info[2]
76 | self.scale = info[3]
77 | elif info[0] == "PROPERTYDEFINITIONS":
78 | new_property = Property()
79 | self.stack.append(new_property)
80 | elif info[0] == "DIEAREA":
81 | info_split = split_parentheses(info)
82 | pt1 = (int(info_split[1][0]), int(info_split[1][1]))
83 | pt2 = (int(info_split[2][0]), int(info_split[2][1]))
84 | self.diearea = [pt1, pt2]
85 | elif info[0] == "COMPONENTS":
86 | new_comps = Components(int(info[1]))
87 | self.stack.append(new_comps)
88 | elif info[0] == "NETS":
89 | new_nets = Nets(int(info[1]))
90 | self.stack.append(new_nets)
91 | elif info[0] == "TRACKS":
92 | new_tracks = Tracks(info[1])
93 | new_tracks.pos = int(info[2])
94 | new_tracks.do = int(info[4])
95 | new_tracks.step = int(info[6])
96 | new_tracks.layer = info[8]
97 | self.tracks.append(new_tracks)
98 | elif info[0] == "GCELLGRID":
99 | new_gcellgrid = GCellGrid(info[1])
100 | new_gcellgrid.pos = int(info[2])
101 | new_gcellgrid.do = int(info[4])
102 | new_gcellgrid.step = int(info[6])
103 | self.gcellgrids.append(new_gcellgrid)
104 | elif info[0] == "ROW":
105 | new_row = Row(info[1])
106 | new_row.site = info[2]
107 | new_row.pos = (int(info[3]), int(info[4]))
108 | new_row.orient = info[5]
109 | new_row.do = int(info[7])
110 | new_row.by = int(info[9])
111 | new_row.step = (int(info[11]), int(info[12]))
112 | self.rows.append(new_row)
113 | elif info[0] == "END":
114 | if len(self.stack) > 0:
115 | self.sections.append(self.stack.pop())
116 | # print ("finish")
117 | else:
118 | if len(self.stack) > 0:
119 | latest_obj = self.stack[-1]
120 | latest_obj.parse_next(info)
121 | f.close()
122 | # put the elements in sections list into separate variables
123 | for sec in self.sections:
124 | if sec.type == "PROPERTY_DEF":
125 | self.property = sec
126 | elif sec.type == "COMPONENTS_DEF":
127 | self.components = sec
128 | elif sec.type == "PINS_DEF":
129 | self.pins = sec
130 | elif sec.type == "NETS_DEF":
131 | self.nets = sec
132 | print ("Parsing DEF file done.\n")
133 |
134 | def to_def_format(self):
135 | s = ""
136 | s += "# Generated by tricao@utdallas.edu for testing only.\n\n"
137 | s += "VERSION " + self.version + " ;" + "\n"
138 | s += "DIVIDERCHAR " + self.dividerchar + " ;" + "\n"
139 | s += "BUSBITCHARS " + self.busbitchars + " ;" + "\n"
140 | s += "DESIGN " + self.design_name + " ;" + "\n"
141 | s += "UNITS DISTANCE " + self.units + " " + self.scale + " ;" + "\n"
142 | s += "\n"
143 | props = self.sections[0]
144 | s += props.to_def_format()
145 | s += "\n"
146 | s += "DIEAREA"
147 | s += (" ( " + str(self.diearea[0][0]) + " " + str(self.diearea[0][1]) +
148 | " )")
149 | s += (" ( " + str(self.diearea[1][0]) + " " + str(self.diearea[1][1]) +
150 | " )" + " ;")
151 | s += "\n\n"
152 | for each_row in self.rows:
153 | s += each_row.to_def_format()
154 | s += "\n"
155 | s += "\n"
156 | for each_tracks in self.tracks:
157 | s += each_tracks.to_def_format()
158 | s += "\n"
159 | s += "\n"
160 | for each_gcell in self.gcellgrids:
161 | s += each_gcell.to_def_format()
162 | s += "\n"
163 | s += "\n"
164 | comps = def_parser.sections[1]
165 | s += comps.to_def_format()
166 | s += "\n\n"
167 | pins = def_parser.sections[2]
168 | s += pins.to_def_format()
169 | s += "\n\n"
170 | nets = def_parser.sections[3]
171 | s += nets.to_def_format()
172 | return s
173 |
174 | def write_def(self, new_def, back_end=True, front_end=True):
175 | """
176 | Write a new def file based on the information in the DefParser object.
177 | Note: this method writes all information
178 | :param new_def: path of the new DEF file
179 | :param back_end: write BEOL information or not.
180 | :param front_end: write FEOL info or not.
181 | :return: void
182 | """
183 | f = open(new_def, mode="w+")
184 | print("Writing DEF file...")
185 | f.write(self.to_def_format())
186 | print("Writing done.")
187 | f.close()
188 |
189 |
190 |
191 | def handleVias(vias, f):
192 |
193 | viasData = []
194 | line = next(f)
195 |
196 | while(line.strip().lower() != 'end vias'):
197 | vias.append(line)
198 | line = next(f)
199 |
200 | # Main Class
201 | #if __name__ == '__main__':
202 | # read_path = "./libraries/DEF/c880_tri.def"
203 | """read_path = "./libraries/DEF/c1908.def"
204 | def_parser = DefParser(read_path)
205 | def_parser.parse()
206 |
207 | # for each_pin in def_parser.pins.pins:
208 | # print (each_pin)
209 |
210 | for net in def_parser.nets:
211 | print (net)
212 | """
213 | #print (def_parser.to_def_format())
214 |
215 | # test macro and via (note: only via1)
216 | # macro_dict = macro_and_via1(def_parser)
217 | # for comp in macro_dict:
218 | # print (comp)
219 | # for pin in macro_dict[comp]:
220 | # print (" " + pin + ": " + str(macro_dict[comp][pin]))
221 | # print ()
222 |
223 |
224 |
225 |
226 |
--------------------------------------------------------------------------------
/lef_def_parser/plot_cell.py:
--------------------------------------------------------------------------------
1 | """
2 | Program to plot cell using DEF and LEF data.
3 |
4 | Author: Tri Minh Cao
5 | Email: tricao@utdallas.edu
6 | Date: September 2016
7 | """
8 | from def_parser import *
9 | from lef_parser import *
10 | import util
11 | import matplotlib.pyplot as plt
12 | import time
13 |
14 | def inside_area(location, corners):
15 | """
16 | Check if the location is inside an area.
17 | :param location: location
18 | :param corners: corner points of the rectangle area.
19 | :return:
20 | """
21 | x1 = corners[0][0]
22 | x2 = corners[1][0]
23 | y1 = corners[0][1]
24 | y2 = corners[1][1]
25 | return (location[0] > x1 and location[0] < x2
26 | and location[1] > y1 and location[1] < y2)
27 |
28 |
29 | def macro_and_via1(def_info, via_type):
30 | """
31 | Method to get macros/cells info and via1 information.
32 | :param def_info: information from a DEF file
33 | :param via_type: the name of the via type, such as "via1" or "M2_M1_via"
34 | :return: a macro dictionary that contains via info
35 | """
36 | result_dict = {}
37 | # add components to the dictionary
38 | for each_comp in def_info.components.comps:
39 | result_dict[each_comp.name] = {}
40 | result_dict[each_comp.name]["MACRO"] = each_comp.macro
41 | # process the nets
42 | for net in def_info.nets.nets:
43 | for route in net.routed:
44 | if route.end_via != None:
45 | # check for the via type of the end_via
46 | if route.end_via[:len(via_type)] == via_type:
47 | via_loc = route.end_via_loc
48 | via_name = route.end_via
49 | via_info = (via_loc, via_name)
50 | # add the via to the component dict
51 | for each_comp in net.comp_pin:
52 | comp_name = each_comp[0]
53 | pin_name = each_comp[1]
54 | if comp_name in result_dict:
55 | if pin_name in result_dict[comp_name]:
56 | result_dict[comp_name][pin_name].append(via_info)
57 | else:
58 | result_dict[comp_name][pin_name] = [via_info]
59 | #print (result_dict)
60 | return result_dict
61 |
62 | def draw_via(location, via_info, color='blue'):
63 | """
64 | Method to draw a via using the location and VIA info from the LEF file.
65 | :param location: via location
66 | :param via_info: VIA data from LEF file.
67 | :return: void
68 | """
69 | for each_layer in via_info.layers:
70 | # print (each_layer.name)
71 | if each_layer.name == 'metal2':
72 | color = 'red'
73 | elif each_layer.name == 'metal1':
74 | color = 'blue'
75 | for shape in each_layer.shapes:
76 | scaled_pts = scalePts(shape.points, SCALE)
77 | for i in range(len(scaled_pts)):
78 | scaled_pts[i][0] += location[0]
79 | scaled_pts[i][1] += location[1]
80 | # print (scaled_pts)
81 | if shape.type == "RECT":
82 | scaled_pts = rect_to_polygon(scaled_pts)
83 | # print (scaled_pts)
84 | draw_shape = plt.Polygon(scaled_pts, closed=True, fill=True,
85 | color=color)
86 | plt.gca().add_patch(draw_shape)
87 |
88 | def plot_component(comp_name, lef_data, def_data, macro_via1_dict):
89 | """
90 | Use pyplot to plot a component from the DEF data
91 | :param comp_name: name of the component
92 | :param lef_data: data parsed from LEF file.
93 | :param def_data: data parsed from DEF file.
94 | :param macro_via_dict: dictionary contains macro and via1 data
95 | :return: void
96 | """
97 | # get info of the component and macro from DEF and LEF
98 | comp_info = def_data.components.comp_dict[comp_name]
99 | macro_name = comp_info.macro
100 | macro_info = lef_data.macro_dict[macro_name]
101 | macro_size = macro_info.info["SIZE"]
102 | scale = float(def_data.scale)
103 | # get the placement of the component
104 | bottom_left_pt = comp_info.placed
105 | top_right_pt = [int(macro_size[0] * scale),
106 | int(macro_size[1] * scale)]
107 | corners = [[0, 0], top_right_pt]
108 | # find the vias inside the component's area
109 | vias_in_comp = macro_via1_dict[comp_name]
110 | vias_draw = []
111 | for pin in vias_in_comp:
112 | if pin != "MACRO":
113 | for each_via in vias_in_comp[pin]:
114 | each_via_loc = each_via[0]
115 | via_type = each_via[1]
116 | new_via_loc = [0, 0]
117 | new_via_loc[0] = each_via_loc[0] - bottom_left_pt[0]
118 | new_via_loc[1] = each_via_loc[1] - bottom_left_pt[1]
119 | if inside_area(new_via_loc, corners):
120 | vias_draw.append((new_via_loc, via_type))
121 |
122 | # NOTE: figsize(6, 9) can be changed to adapt to other cell size
123 | plt.figure(figsize=(3, 5), dpi=80, frameon=False)
124 | # draw the cell boundary
125 | # scaled_pts = rect_to_polygon(corners)
126 | # draw_shape = plt.Polygon(scaled_pts, closed=True, fill=None,
127 | # color="blue")
128 | # plt.gca().add_patch(draw_shape)
129 | # plot vias
130 | for via in vias_draw:
131 | via_name = via[1]
132 | via_info = lef_data.via_dict[via_name]
133 | via_loc = via[0]
134 | draw_via(via_loc, via_info)
135 | # scale the axis of the subplot
136 | test_axis = [corners[0][0], corners[1][0], corners[0][1], corners[1][1]]
137 | # print (test_axis)
138 | plt.axis(test_axis)
139 | plt.axis('off')
140 | plt.gca().set_aspect('equal', adjustable='box')
141 | # plt.savefig('foo.png', bbox_inches='tight')
142 | # compose the output file name
143 | out_folder = './images/'
144 | current_time = time.strftime('%H%M%d%m%Y')
145 | out_file = comp_name + '_' + macro_name + '_' + current_time
146 | # plt.savefig(out_folder + out_file, transparent=True)
147 | plt.savefig(out_folder + out_file, transparent=False)
148 | # plt.show()
149 | plt.close('all')
150 |
151 | def plot_component2(comp_name, lef_data, def_data, macro_via1_dict):
152 | """
153 | Use pyplot to plot a component from the DEF data
154 | :param comp_name: name of the component
155 | :param lef_data: data parsed from LEF file.
156 | :param def_data: data parsed from DEF file.
157 | :param macro_via_dict: dictionary contains macro and via1 data
158 | :return: void
159 | """
160 | # get info of the component and macro from DEF and LEF
161 | comp_info = def_data.components.comp_dict[comp_name]
162 | macro_name = comp_info.macro
163 | macro_info = lef_data.macro_dict[macro_name]
164 | macro_size = macro_info.info["SIZE"]
165 | scale = float(def_data.scale)
166 | # get the placement of the component
167 | bottom_left_pt = comp_info.placed
168 | top_right_pt = [bottom_left_pt[0] + int(macro_size[0] * scale),
169 | bottom_left_pt[1] + int(macro_size[1] * scale)]
170 | corners = [bottom_left_pt, top_right_pt]
171 | # find the vias inside the component's area
172 | vias_in_comp = macro_via1_dict[comp_name]
173 | vias_draw = []
174 | for pin in vias_in_comp:
175 | if pin != "MACRO":
176 | for each_via in vias_in_comp[pin]:
177 | each_via_loc = each_via[0]
178 | via_type = each_via[1]
179 | # new_via_loc = [0, 0]
180 | # new_via_loc[0] = each_via_loc[0]
181 | # new_via_loc[1] = each_via_loc[1]
182 | if inside_area(each_via_loc, corners):
183 | vias_draw.append((each_via_loc, via_type))
184 |
185 | # sort the vias by x-coordinate
186 | vias_draw.sort(key=lambda x: x[0][0])
187 | # print (vias_draw)
188 | # NOTE: figsize(6, 9) can be changed to adapt to other cell size
189 | plt.figure(figsize=(1, 1.6), dpi=80, frameon=False)
190 | margin = 350
191 | left_pt = [vias_draw[0][0][0] - margin, bottom_left_pt[1]]
192 | width = vias_draw[-1][0][0] - left_pt[0] + margin
193 | height = macro_size[1] * scale
194 | # print (height)
195 | corners = [left_pt]
196 | corners.append((left_pt[0] + width, left_pt[1] + height))
197 | # draw the cell boundary
198 | # scaled_pts = rect_to_polygon(corners)
199 | # draw_shape = plt.Polygon(scaled_pts, closed=True, fill=None,
200 | # color="blue")
201 | # plt.gca().add_patch(draw_shape)
202 | # plot vias
203 | for via in vias_draw:
204 | via_name = via[1]
205 | via_info = lef_data.via_dict[via_name]
206 | via_loc = via[0]
207 | draw_via(via_loc, via_info)
208 |
209 | # scale the axis of the subplot
210 | axis = [corners[0][0], corners[1][0], corners[0][1], corners[1][1]]
211 | # print (test_axis)
212 | plt.axis(axis)
213 | plt.axis('off')
214 | plt.gca().set_aspect('equal', adjustable='box')
215 | # plt.savefig('foo.png', bbox_inches='tight')
216 | # compose the output file name
217 | out_folder = './images/'
218 | current_time = time.strftime('%H%M%S%d%m%Y')
219 | out_file = comp_name + '_' + macro_name + '_' + current_time
220 | # plt.savefig(out_folder + out_file, transparent=True)
221 | plt.savefig(out_folder + out_file, transparent=False)
222 | # plt.show()
223 | plt.close('all')
224 |
225 | # Main Class
226 | if __name__ == '__main__':
227 | # read_path = './libraries/DEF/c1908_tri_no_metal1.def'
228 | read_path = './libraries/layout_freepdk45/c3540.def'
229 | def_parser = DefParser(read_path)
230 | def_parser.parse()
231 |
232 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
233 | lef_parser = LefParser(lef_file)
234 | lef_parser.parse()
235 |
236 | print ("Process file:", read_path)
237 | # test macro and via (note: only via1)
238 | macro_via1_dict = macro_and_via1(def_parser, via_type="M2_M1_via")
239 | # for comp in macro_via1_dict:
240 | # print (comp)
241 | # for pin in macro_via1_dict[comp]:
242 | # print (" " + pin + ": " + str(macro_via1_dict[comp][pin]))
243 | # print ()
244 | # plot_component("U521", lef_parser, def_parser, macro_via1_dict)
245 | num_comps = 0
246 | for each_comp in macro_via1_dict:
247 | comp_info = def_parser.components.comp_dict[each_comp]
248 | # if (comp_info.macro == "INVX8"):
249 | print (each_comp)
250 | plot_component2(each_comp, lef_parser, def_parser, macro_via1_dict)
251 | num_comps += 1
252 | # if num_comps > 20:
253 | # break
254 | print ("Finished!")
255 | # plot_component("U4068", lef_parser, def_parser, macro_via1_dict)
256 |
257 |
--------------------------------------------------------------------------------
/lef_def_parser/split_def.py:
--------------------------------------------------------------------------------
1 | """
2 | DEF Splitter for Split Manufacturing
3 | Author: Tri Minh Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 | from def_parser import *
8 | from lef_parser import *
9 |
10 | def proper_layers(back_end, front_end, split_layer):
11 | layers = set()
12 | if back_end == False and front_end == False:
13 | return layers
14 | elif back_end == True and front_end == False:
15 | for each in LAYERS:
16 | if compare_metal(each, split_layer) >= 0:
17 | layers.add(each)
18 | return layers
19 | elif back_end == False and front_end == True:
20 | for each in LAYERS:
21 | if compare_metal(each, split_layer) < 0:
22 | layers.add(each)
23 | return layers
24 | else:
25 | return LAYERS
26 |
27 | # names of back-end and front-end layers
28 | LAYERS = {"poly", "metal1", "metal2", "metal3", "metal4", "metal5", "metal6",
29 | "metal7", "metal8", "metal9", "metal10"}
30 |
31 |
32 | # outside function needed to output the NETS data selectively, because
33 | # possibly we need to check LEF data and that requires bigger scope.
34 | def output_nets(nets, def_info, lef_info):
35 | """
36 | Output the NETS section information with possible back end and front
37 | end selections.
38 | :param def_info: a DefParser object that contains DEF info.
39 | :param lef_info: a LefParser object
40 | :return: string
41 | """
42 | s = ""
43 | # add each net's data to nets_str
44 | nets_str = ""
45 | num_nets = 0
46 | for net in nets.nets:
47 | net_data = output_net(net, def_info, lef_info)
48 | if net_data != "":
49 | nets_str += net_data
50 | nets_str += "\n"
51 | num_nets += 1
52 | if num_nets > 0:
53 | s += "NETS " + str(num_nets) + " ;\n"
54 | s += nets_str
55 | s += "END NETS"
56 | return s
57 |
58 |
59 | def output_net_routes(net, def_info, lef_info):
60 | """
61 | Return None if there are no routes in the
62 | :param net: a Net object
63 | :param def_info: a DefParser object that contains DEF info.
64 | :param lef_info: a LefParser object
65 | :return: routes if good route exists, None if no route available.
66 | """
67 | s = ""
68 | # output routes
69 | num_route = 0
70 | first_route_done = False
71 | for i in range(len(net.routed)):
72 | if net.routed[i].get_layer() in GOOD_LAYERS:
73 | num_route += 1
74 | if first_route_done:
75 | s += " " + "NEW " + net.routed[i].to_def_format() + "\n"
76 | else:
77 | s += " + ROUTED " + net.routed[i].to_def_format() + "\n"
78 | first_route_done = True
79 | if num_route == 0:
80 | return "no route"
81 | else:
82 | return s
83 |
84 | def output_net(net, def_info, lef_info):
85 | """
86 | Output a Net object inside the NETS section information with possible back
87 | end and front end selections.
88 | :param def_info: a DefParser object that contains DEF info.
89 | :param lef_info: a LefParser object
90 | :return: string
91 | """
92 | # check number of routes and get the routes
93 | routes = output_net_routes(net, def_info, lef_info)
94 | if routes == "no route":
95 | routes = ""
96 | # start setting up the string
97 | s = ""
98 | s += "- " + net.name + "\n"
99 | s += " "
100 | for each_comp in net.comp_pin:
101 | # study each comp/pin
102 | # if it's a pin, check the Pin object layer (already parsed)
103 | if each_comp[0] == "PIN":
104 | pin_name = each_comp[1]
105 | if def_info.pins.get_pin(pin_name).get_metal_layer() in GOOD_LAYERS:
106 | s += " ( " + " ".join(each_comp) + " )"
107 | else:
108 | # for component, need to check LEF info
109 | comp_id = each_comp[0]
110 | pin_name = each_comp[1]
111 | comp = def_info.components.get_comp(comp_id).get_macro()
112 | #print (comp)
113 | # get info from LEF Parser
114 | comp_info = lef_info.macro_dict[comp]
115 | # get pin layer info
116 | pin_info = comp_info.pin_dict[pin_name]
117 | if pin_info.get_top_metal() in GOOD_LAYERS:
118 | s += " ( " + " ".join(each_comp) + " )"
119 | # output routes
120 | s += "\n"
121 | s += routes
122 | s += " ;"
123 | return s
124 |
125 | def output_comps(comps):
126 | """
127 | Method to write/output a component to the DEF file
128 | :param comp: component to be written
129 | :param def_info: DEF file data
130 | :param lef_info: LEF file data
131 | :return: a string that contains Components section in DEF format.
132 | """
133 | # assume all components are in bottom layers
134 | if "metal1" in GOOD_LAYERS:
135 | return comps.to_def_format()
136 | else:
137 | return ""
138 |
139 | def output_pin(pin, def_info):
140 | """
141 | Method to write/output a pin to the DEF file
142 | :param pin: Pin object
143 | :param def_info: DEF data
144 | :return: a string that contains a Pin in DEF format.
145 | """
146 | #print (pin.get_layer())
147 | if pin.get_metal_layer() in GOOD_LAYERS:
148 | return pin.to_def_format()
149 | else:
150 | s = ""
151 | s += "- " + pin.name + " + NET " + pin.net
152 | s += " + DIRECTION " + pin.direction + " + USE " + pin.use + "\n ;"
153 | return s
154 |
155 | def output_pins(pins, def_info):
156 | """
157 | Method to write/output the PINS section to the DEF file.
158 | :param pins: Pin object
159 | :param def_info: DEF data
160 | :return: a tring that contains the PINS section in DEF format
161 | """
162 | s = ""
163 | num_pins = 0
164 | pins_string = ""
165 | for each_pin in pins.pins:
166 | pin_data = output_pin(each_pin, def_info)
167 | pins_string += pin_data
168 | pins_string += "\n"
169 | # only count the pin that has proper metal layer
170 | if each_pin.get_metal_layer() in GOOD_LAYERS:
171 | num_pins += 1
172 | # only write PINS section when we have > 0 pins
173 | s = "PINS " + str(num_pins) + " ;\n"
174 | s += pins_string
175 | s += "END PINS"
176 | return s
177 |
178 | def output_tracks(def_info):
179 | """
180 | Method to write/output TRACKS to DEF file.
181 | :param def_info: DEF data
182 | :return: a string that contains TRACKS info in DEF format.
183 | """
184 | s = ""
185 | for track in def_info.tracks:
186 | if track.get_layer() in GOOD_LAYERS:
187 | s += track.to_def_format()
188 | s += "\n"
189 | return s
190 |
191 | def output_new_def(def_info, lef_info):
192 | """
193 | Output DEF data to new DEF file with selected metal layers.
194 | :param def_info: DEF data
195 | :param lef_info: LEF data
196 | :return: a string that contains new DEF data in DEF format.
197 | """
198 | s = ""
199 | s += "# Generated by tricao@utdallas.edu for testing only.\n"
200 | s += "# Included Metal Layers:"
201 | for each in GOOD_LAYERS:
202 | s += " " + each
203 | s += "\n\n"
204 | s += "VERSION " + def_info.version + " ;" + "\n"
205 | s += "DIVIDERCHAR " + def_info.dividerchar + " ;" + "\n"
206 | s += "BUSBITCHARS " + def_info.busbitchars + " ;" + "\n"
207 | s += "DESIGN " + def_info.design_name + " ;" + "\n"
208 | s += "UNITS DISTANCE " + def_info.units + " " + def_info.scale + " ;" + "\n"
209 | s += "\n"
210 | props = def_info.property
211 | s += props.to_def_format()
212 | s += "\n"
213 | s += "DIEAREA"
214 | s += (" ( " + str(def_info.diearea[0][0]) + " " + str(def_info.diearea[0][1]) +
215 | " )")
216 | s += (" ( " + str(def_info.diearea[1][0]) + " " + str(def_info.diearea[1][1]) +
217 | " )" + " ;")
218 | s += "\n\n"
219 | for each_row in def_info.rows:
220 | s += each_row.to_def_format()
221 | s += "\n"
222 | s += "\n"
223 | s += output_tracks(def_info)
224 | s += "\n"
225 | for each_gcell in def_info.gcellgrids:
226 | s += each_gcell.to_def_format()
227 | s += "\n"
228 | s += "\n"
229 | comps = def_parser.components
230 | s += output_comps(comps)
231 | s += "\n\n"
232 | pins = def_parser.pins
233 | s += output_pins(pins, def_info)
234 | s += "\n\n"
235 | nets = def_parser.nets
236 | s += output_nets(nets, def_info, lef_info)
237 | return s
238 |
239 | def to_bool(str):
240 | if str.lower() == "false":
241 | return False
242 | else:
243 | return bool(str)
244 |
245 | # Main Class
246 | if __name__ == '__main__':
247 | # default settings
248 | BACK_END = True
249 | FRONT_END = True
250 | SPLIT_LAYER = "metal2"
251 | OUTPUT_FILE = "./def_write/test.def"
252 | INPUT_FILE = "./libraries/DEF/c1908.def"
253 | # load last setup from split_def.ini
254 | print ("Last setup: ")
255 | last_setup = open("split_def.ini", "r")
256 | for line in last_setup:
257 | print (line[:-1])
258 | text = line.split()
259 | if text[0] == "BACK_END":
260 | BACK_END = to_bool(text[2])
261 | elif text[0] == "FRONT_END":
262 | FRONT_END = to_bool(text[2])
263 | elif text[0] == "SPLIT_LAYER":
264 | SPLIT_LAYER = text[2]
265 | elif text[0] == "OUTPUT_FILE_NAME":
266 | OUTPUT_FILE = text[2]
267 | elif text[0] == "INPUT_FILE_NAME":
268 | INPUT_FILE = text[2]
269 |
270 | print ()
271 | last_setup.close()
272 |
273 | use_last_setup = input("Use last setup? (y/n): ")
274 | if use_last_setup == "n":
275 | input_name = input("Enter input DEF file path: ")
276 | INPUT_FILE = input_name
277 | # user will choose whether to keep back_end and/or front_end
278 | write_back_end = input("Want bottom layers? (y/n): ")
279 | if write_back_end == "n":
280 | FRONT_END = False
281 | else:
282 | FRONT_END = True
283 | write_front_end = input("Want top layers? (y/n): ")
284 | if write_front_end == "n":
285 | BACK_END = False
286 | else:
287 | BACK_END = True
288 | SPLIT_LAYER = input("Split layer? (choices from metal1 to metal10): ")
289 | if SPLIT_LAYER not in LAYERS:
290 | SPLIT_LAYER = "metal2"
291 | output_name = input("Enter DEF output file path: ")
292 | OUTPUT_FILE = output_name
293 | # write current settings to a file
294 | setup_file = open("split_def.ini", "w+")
295 | setup_file.write("INPUT_FILE_NAME = " + input_name +"\n")
296 | setup_file.write("BACK_END = " + str(BACK_END) + "\n")
297 | setup_file.write("FRONT_END = " + str(FRONT_END) + "\n")
298 | setup_file.write("SPLIT_LAYER = " + SPLIT_LAYER + "\n")
299 | setup_file.write("OUTPUT_FILE_NAME = " + output_name +"\n")
300 | setup_file.close()
301 | else:
302 | print ("The program will use the last setup listed above.")
303 |
304 | #print (BACK_END)
305 | #print (FRONT_END)
306 | #print (SPLIT_LAYER)
307 |
308 | # need to know what layers are good for the current back-end and
309 | # front-end settings
310 | GOOD_LAYERS = proper_layers(BACK_END, FRONT_END, SPLIT_LAYER)
311 |
312 | print ()
313 | lef_file = "./libraries/Nangate/NangateOpenCellLibrary.lef"
314 | lef_parser = LefParser(lef_file)
315 | lef_parser.parse()
316 | print ()
317 | def_file = INPUT_FILE
318 | def_parser = DefParser(def_file)
319 | def_parser.parse()
320 | print ("Writing data to new DEF file with path: " + OUTPUT_FILE )
321 | out_file = open(OUTPUT_FILE, "w+")
322 | out_file.write(output_new_def(def_parser, lef_parser))
323 | out_file.close()
324 | print ("Writing data done.")
325 |
--------------------------------------------------------------------------------
/lef_def_parser/util.py:
--------------------------------------------------------------------------------
1 | """
2 | Useful functions for DEF/LEF parsers.
3 | Author: Tri Minh Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 |
8 | SCALE = 2000
9 | import matplotlib.pyplot as plt
10 | import numpy as np
11 | import math
12 |
13 |
14 | def nCr(n,r):
15 | f = math.factorial
16 | return f(n) / f(r) / f(n-r)
17 |
18 |
19 | def str_to_list(s):
20 | """
21 | Function to turn a string separated by space into list of words
22 | :param s: input string
23 | :return: a list of words
24 | """
25 | result = s.split()
26 | # check if the last word is ';' and remove it
27 | #if len(result) >= 1:
28 | # if result[len(result) - 1] == ";":
29 | # result.pop()
30 | return result
31 |
32 | def scalePts(pts, alpha):
33 | """
34 | scale a list of points
35 | :return:
36 | """
37 | scaled = []
38 | for pt in pts:
39 | scaled_pt = [alpha*pt[0], alpha*pt[1]]
40 | scaled.append(scaled_pt)
41 | return scaled
42 |
43 | def rect_to_polygon(rect_pts):
44 | """
45 | Convert the rect point list into polygon point list (for easy plotting)
46 | :param pts:
47 | :return:
48 | """
49 | poly_pt = []
50 | pt1 = list(rect_pts[0])
51 | poly_pt.append(pt1)
52 | pt2 = [rect_pts[0][0], rect_pts[1][1]]
53 | poly_pt.append(pt2)
54 | pt3 = list(rect_pts[1])
55 | poly_pt.append(pt3)
56 | pt4 = [rect_pts[1][0], rect_pts[0][1]]
57 | poly_pt.append(pt4)
58 | return poly_pt
59 |
60 |
61 | def split_parentheses(info):
62 | """
63 | make all strings inside parentheses a list
64 | :param s: a list of strings (called info)
65 | :return: info list without parentheses
66 | """
67 | # if we see the "(" sign, then we start adding stuff to a temp list
68 | # in case of ")" sign, we append the temp list to the new_info list
69 | # otherwise, just add the string to the new_info list
70 | new_info = []
71 | make_list = False
72 | current_list = []
73 | for idx in range(len(info)):
74 | if info[idx] == "(":
75 | make_list = True
76 | elif info[idx] == ")":
77 | make_list = False
78 | new_info.append(current_list)
79 | current_list = []
80 | else:
81 | if make_list:
82 | current_list.append(info[idx])
83 | else:
84 | new_info.append(info[idx])
85 | return new_info
86 |
87 |
88 | def split_plus(line):
89 | """
90 | Split a line according to the + (plus) sign.
91 | :param line:
92 | :return:
93 | """
94 | new_line = line.split("+")
95 | return new_line
96 |
97 | def split_space(line):
98 | """
99 | Split a line according to space.
100 | :param line:
101 | :return:
102 | """
103 | new_line = line.split()
104 | return new_line
105 |
106 |
107 | def draw_obs(obs, color):
108 | """
109 | Helper method to draw a OBS object
110 | :return: void
111 | """
112 | # process each Layer
113 | for layer in obs.info["LAYER"]:
114 | for shape in layer.shapes:
115 | scaled_pts = scalePts(shape.points, SCALE)
116 | if (shape.type == "RECT"):
117 | scaled_pts = rect_to_polygon(scaled_pts)
118 | draw_shape = plt.Polygon(scaled_pts, closed=True, fill=True,
119 | color=color)
120 | plt.gca().add_patch(draw_shape)
121 |
122 |
123 | def draw_port(port, color):
124 | """
125 | Helper method to draw a PORT object
126 | :return: void
127 | """
128 | # process each Layer
129 | for layer in port.info["LAYER"]:
130 | for shape in layer.shapes:
131 | scaled_pts = scalePts(shape.points, SCALE)
132 | if (shape.type == "RECT"):
133 | scaled_pts = rect_to_polygon(scaled_pts)
134 | #print (scaled_pts)
135 | draw_shape = plt.Polygon(scaled_pts, closed=True, fill=True,
136 | color=color)
137 | plt.gca().add_patch(draw_shape)
138 |
139 |
140 | def draw_pin(pin):
141 | """
142 | function to draw a PIN object
143 | :param pin: a pin object
144 | :return: void
145 | """
146 | # chosen color of the PIN in the sketch
147 |
148 | color = "blue"
149 | pin_name = pin.name.lower()
150 | if pin_name == "vdd" or pin_name == "gnd":
151 | color = "blue"
152 | else:
153 | color = "red"
154 | draw_port(pin.info["PORT"], color)
155 |
156 | def draw_macro(macro):
157 | """
158 | function to draw a Macro (cell) object
159 | :param macro: a Macro object
160 | :return: void
161 | """
162 | # draw OBS (if it exists)
163 | if "OBS" in macro.info:
164 | draw_obs(macro.info["OBS"], "blue")
165 | # draw each PIN
166 | for pin in macro.info["PIN"]:
167 | draw_pin(pin)
168 |
169 | def compare_metal(metal_a, metal_b):
170 | """
171 | Compare metal layers
172 | :param metal_a: the first metal layer description
173 | :param metal_b: the second metal layer description
174 | :return:
175 | """
176 | if metal_a == "poly":
177 | if metal_b == "poly":
178 | return 0
179 | else:
180 | return -1
181 | else:
182 | if metal_b == "poly":
183 | return 1
184 | else:
185 | metal_a_num = get_metal_num(metal_a)
186 | metal_b_num = get_metal_num(metal_b)
187 | return (metal_a_num - metal_b_num)
188 |
189 |
190 | def get_metal_num(metal):
191 | """
192 | Get mental layer number from a string, such as "metal1" or "metal10"
193 | :param metal: string that describes the metal layer
194 | :return: metal number
195 | """
196 | len_metal = len("metal")
197 | parse_num = ""
198 | for idx in range(len_metal, len(metal)):
199 | parse_num += metal[idx]
200 | return int(parse_num)
201 |
202 |
203 | def inside_area(location, corners):
204 | """
205 | Check if the location is inside an area.
206 | :param location: location
207 | :param corners: corner points of the rectangle area.
208 | :return:
209 | """
210 | x1 = corners[0][0]
211 | x2 = corners[1][0]
212 | y1 = corners[0][1]
213 | y2 = corners[1][1]
214 | return (location[0] > x1 and location[0] < x2
215 | and location[1] > y1 and location[1] < y2)
216 |
217 |
218 | def relocate_area(left_pt, corners):
219 | """
220 | Relocate the corners based on the new bottom left point
221 | :param left_pt:
222 | :param corners:
223 | :return:
224 | """
225 | x = left_pt[0]
226 | y = left_pt[1]
227 | new_corners = []
228 | for each in corners:
229 | new_pt = [each[0] + x, each[1] + y]
230 | new_corners.append(new_pt)
231 | return new_corners
232 |
233 |
234 | def macro_and_via1(def_info, via_type):
235 | """
236 | Method to get macros/cells info and via1 information.
237 | :param def_info: information from a DEF file
238 | :param via_type: the name of the via type, such as "via1" or "M2_M1_via"
239 | :return: a macro dictionary that contains via info
240 | """
241 | result_dict = {}
242 | # add components to the dictionary
243 | for each_comp in def_info.components.comps:
244 | result_dict[each_comp.name] = {}
245 | result_dict[each_comp.name]["MACRO"] = each_comp.macro
246 | # process the nets
247 | for net in def_info.nets.nets:
248 | for route in net.routed:
249 | if route.end_via != None:
250 | # check for the via type of the end_via
251 | if route.end_via[:len(via_type)] == via_type:
252 | via_loc = route.end_via_loc
253 | via_name = route.end_via
254 | via_info = (via_loc, via_name)
255 | # add the via to the component dict
256 | for each_comp in net.comp_pin:
257 | comp_name = each_comp[0]
258 | pin_name = each_comp[1]
259 | if comp_name in result_dict:
260 | if pin_name in result_dict[comp_name]:
261 | result_dict[comp_name][pin_name].append(via_info)
262 | else:
263 | result_dict[comp_name][pin_name] = [via_info]
264 | #print (result_dict)
265 | return result_dict
266 |
267 |
268 | def predict_score(predicts, actuals):
269 | """
270 | Find the number of correct cell predictions.
271 | :param predicts: a list of predictions.
272 | :param actuals: a list of actual cells.
273 | :return: # correct predictions, # cells
274 | """
275 | len_preds = len(predicts)
276 | len_actuals = len(actuals)
277 | shorter_len = min(len_preds, len_actuals)
278 | gap_predict = 0
279 | gap_actual = 0
280 | num_correct = 0
281 | # print (shorter_len)
282 | for i in range(shorter_len):
283 | # print (i)
284 | # print (gap_predict)
285 | # print (gap_actual)
286 | # print ()
287 | if predicts[i + gap_predict] == actuals[i + gap_actual]:
288 | num_correct += 1
289 | else:
290 | if len_preds < len_actuals:
291 | gap_actual += 1
292 | len_preds += 1
293 | elif len_preds > len_actuals:
294 | gap_predict += 1
295 | len_actuals += 1
296 | return num_correct, len(actuals)
297 |
298 |
299 | def get_all_vias(def_info, via_type):
300 | """
301 | method to get all vias of the via_type and put them in a list
302 | :param def_info: DEF data
303 | :param via_type: via type
304 | :return: a list of all vias
305 | """
306 | vias = []
307 | # process the nets
308 | for net in def_info.nets.nets:
309 | for route in net.routed:
310 | if route.end_via != None:
311 | # check for the via type of the end_via
312 | if route.end_via[:len(via_type)] == via_type:
313 | via_loc = route.end_via_loc
314 | via_name = route.end_via
315 | default_via_type = -1 # 0 = input, 1 = output
316 | via_info = [via_loc, via_name, net.name, default_via_type]
317 | # add a via to the vias list
318 | vias.append(via_info)
319 | #print (result_dict)
320 | return vias
321 |
322 | def sort_vias_by_row(layout_area, row_height, vias):
323 | """
324 | Sort the vias by row
325 | :param layout_area: a list [x, y] that stores the area of the layout
326 | :param vias: a list of vias that need to be sorted
327 | :return: a list of rows, each containing a list of vias in that row.
328 | """
329 | num_rows = layout_area[1] // row_height + 1
330 | rows = []
331 | for i in range(num_rows):
332 | rows.append([])
333 | for via in vias:
334 | via_y = via[0][1]
335 | row_dest = via_y // row_height
336 | rows[row_dest].append(via)
337 | # sort vias in each row based on x-coordinate
338 | for each_row in rows:
339 | each_row.sort(key = lambda x: x[0][0])
340 | return rows
341 |
342 |
343 | def randomize(dataset, labels):
344 | permutation = np.random.permutation(labels.shape[0])
345 | shuffled_dataset = dataset[permutation, :]
346 | shuffled_labels = labels[permutation]
347 | return shuffled_dataset, shuffled_labels
348 |
349 |
350 | def group_via(via_list, max_number, max_distance):
351 | """
352 | Method to group the vias together to check if they belong to a cell.
353 | :param via_list: a list of all vias.
354 | :return: a list of groups of vias.
355 | """
356 | groups = []
357 | length = len(via_list)
358 | for i in range(length):
359 | # one_group = [via_list[i]]
360 | curr_via = via_list[i]
361 | curr_list = []
362 | for j in range(2, max_number + 1):
363 | if i + j - 1 < length:
364 | right_via = via_list[i + j - 1]
365 | dist = right_via[0][0] - curr_via[0][0]
366 | if dist < max_distance:
367 | curr_list.append(via_list[i:i+j])
368 | # only add via group list that is not empty
369 | if len(curr_list) > 0:
370 | groups.append(curr_list)
371 | return groups
372 |
373 |
374 | def sorted_components(layout_area, row_height, comps):
375 | """
376 | Sort the components by row
377 | :param layout_area: a list [x, y] that stores the area of the layout
378 | :param comps: a list of components that need to be sorted
379 | :return: a list of rows, each containing a list of components in that row.
380 | """
381 | num_rows = layout_area[1] // row_height + 1
382 | rows = []
383 | for i in range(num_rows):
384 | rows.append([])
385 | for comp in comps:
386 | comp_y = comp.placed[1]
387 | row_dest = comp_y // row_height
388 | rows[row_dest].append(comp)
389 | # sort vias in each row based on x-coordinate
390 | for each_row in rows:
391 | each_row.sort(key = lambda x: x.placed[0])
392 | return rows
393 |
--------------------------------------------------------------------------------
/lef_def_parser/lef_util.py:
--------------------------------------------------------------------------------
1 | """
2 | Data Structures for LEF Parser
3 | Author: Tri Minh Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 | from util import *
8 |
9 | class Statement:
10 | """
11 | General class for all types of Statements in the LEF file
12 | """
13 |
14 | def __init__(self):
15 | pass
16 |
17 | def parse_next(self, data):
18 | """
19 | Method to add information from a statement from LEF file to the
20 | Statement object.
21 | :param data: a list of strings that contains pieces of information
22 | :return: 1 if parsing is done, -1 if error, otherwise, return the
23 | object that will be parsed next.
24 | """
25 | # the program assumes the syntax of LEF file is correct
26 | if data[0] == "MACRO":
27 | name = data[1]
28 | new_state = Macro(name)
29 | return new_state
30 | elif data[0] == "LAYER" and len(data) == 2: # does not have ;
31 | name = data[1]
32 | new_state = Layer(name)
33 | return new_state
34 | elif data[0] == "VIA":
35 | name = data[1]
36 | new_state = Via(name)
37 | return new_state
38 | elif data[0] == "END":
39 | return 1
40 | return 0
41 |
42 | def __str__(self):
43 | """
44 | turn a statement object into string
45 | :return: string representation of Statement objects
46 | """
47 | s = ""
48 | s += self.type + " " + self.name
49 | return s
50 |
51 |
52 | class Macro(Statement):
53 | """
54 | Macro class represents a MACRO (cell) in the LEF file.
55 | """
56 |
57 | def __init__(self, name):
58 | # initiate the Statement superclass
59 | Statement.__init__(self)
60 | self.type = 'MACRO'
61 | self.name = name
62 | # other info is stored in this dictionary
63 | self.info = {}
64 | # pin dictionary
65 | self.pin_dict = {}
66 |
67 | def __str__(self):
68 | """
69 | turn a statement object into string
70 | :return: string representation of Statement objects
71 | """
72 | s = ""
73 | s += self.type + " " + self.name + "\n"
74 | for key in self.info:
75 | if key == "PIN":
76 | s += " " + key + ":\n"
77 | for pin in self.info[key]:
78 | s += " " + str(pin) + "\n"
79 | else:
80 | s += " " + key + ": " + str(self.info[key]) + "\n"
81 | return s
82 |
83 | def parse_next(self, data):
84 | """
85 | Method to add information from a statement from LEF file to a Macro
86 | object.
87 | :param data: a list of strings that contains pieces of information
88 | :return: 0 if in progress, 1 if parsing is done, -1 if error,
89 | otherwise, return the object that will be parsed next.
90 | """
91 | if data[0] == "CLASS":
92 | self.info["CLASS"] = data[1]
93 | elif data[0] == "ORIGIN":
94 | x_cor = float(data[1])
95 | y_cor = float(data[2])
96 | self.info["ORIGIN"] = (x_cor, y_cor)
97 | elif data[0] == "FOREIGN":
98 | self.info["FOREIGN"] = data[1:]
99 | elif data[0] == "SIZE":
100 | width = float(data[1])
101 | height = float(data[3])
102 | self.info["SIZE"] = (width, height)
103 | elif data[0] == "SYMMETRY":
104 | self.info["SYMMETRY"] = data[1:]
105 | elif data[0] == "SITE":
106 | self.info["SITE"] = data[1]
107 | elif data[0] == "PIN":
108 | new_pin = Pin(data[1])
109 | self.pin_dict[data[1]] = new_pin
110 | if "PIN" in self.info:
111 | self.info["PIN"].append(new_pin)
112 | else:
113 | self.info["PIN"] = [new_pin]
114 | return new_pin
115 | elif data[0] == "OBS":
116 | new_obs = Obs()
117 | self.info["OBS"] = new_obs
118 | return new_obs
119 | elif data[0] == "END":
120 | if data[1] == self.name:
121 | return 1
122 | else:
123 | return -1
124 | return 0
125 |
126 | def get_pin(self, pin_name):
127 | return self.pin_dict[pin_name]
128 |
129 |
130 | class Pin(Statement):
131 | """
132 | Class Pin represents a PIN statement in the LEF file.
133 | """
134 |
135 | def __init__(self, name):
136 | Statement.__init__(self)
137 | self.type = "PIN"
138 | self.name = name
139 | self.info = {}
140 |
141 | def __str__(self):
142 | s = ""
143 | for layer in self.info["PORT"].info["LAYER"]:
144 | s += layer.type + " " + layer.name + "\n"
145 | return s
146 |
147 | def parse_next(self, data):
148 | if data[0] == "DIRECTION":
149 | self.info["DIRECTION"] = data[1]
150 | elif data[0] == "USE":
151 | self.info["USE"] = data[1]
152 | elif data[0] == "PORT":
153 | new_port = Port()
154 | self.info["PORT"] = new_port
155 | return new_port
156 | elif data[0] == "SHAPE":
157 | self.info["SHAPE"] = data[1]
158 | elif data[0] == "END":
159 | if data[1] == self.name:
160 | return 1
161 | else:
162 | return -1
163 | # return 0 when we parse a undefined statement
164 | return 0
165 |
166 | def is_lower_metal(self, split_layer):
167 | return self.info["PORT"].is_lower_metal(split_layer)
168 |
169 | def get_top_metal(self):
170 | return self.info["PORT"].get_top_metal()
171 |
172 |
173 | class Port(Statement):
174 | """
175 | Class Port represents an PORT statement in the LEF file.
176 | """
177 |
178 | # Note: PORT statement does not have name
179 | def __init__(self):
180 | Statement.__init__(self)
181 | self.type = "PORT"
182 | self.name = ""
183 | self.info = {}
184 |
185 | def parse_next(self, data):
186 | if data[0] == "END":
187 | return 1
188 | elif data[0] == "LAYER":
189 | name = data[1]
190 | new_layerdef = LayerDef(data[1])
191 | if "LAYER" in self.info:
192 | self.info["LAYER"].append(new_layerdef)
193 | else:
194 | self.info["LAYER"] = [new_layerdef]
195 | elif data[0] == "RECT":
196 | # error if the self.info["LAYER"] does not exist
197 | self.info["LAYER"][-1].add_rect(data)
198 | elif data[0] == "POLYGON":
199 | self.info["LAYER"][-1].add_polygon(data)
200 | return 0
201 |
202 | def is_lower_metal(self, split_layer):
203 | lower = True
204 | for layer in self.info["LAYER"]:
205 | if compare_metal(layer.name, split_layer) >= 0:
206 | lower = False
207 | break
208 | return lower
209 |
210 | def get_top_metal(self):
211 | highest = "poly"
212 | for layer in self.info["LAYER"]:
213 | if compare_metal(layer.name, highest) > 0:
214 | highest = layer.name
215 | return highest
216 |
217 |
218 |
219 |
220 | class Obs(Statement):
221 | """
222 | Class Obs represents an OBS statement in the LEF file.
223 | """
224 |
225 | # Note: OBS statement does not have name
226 | def __init__(self):
227 | Statement.__init__(self)
228 | self.type = "OBS"
229 | self.name = ""
230 | self.info = {}
231 |
232 | def __str__(self):
233 | s = ""
234 | for layer in self.info["LAYER"]:
235 | s += layer.type + " " + layer.name + "\n"
236 | return s
237 |
238 | def parse_next(self, data):
239 | if data[0] == "END":
240 | return 1
241 | elif data[0] == "LAYER":
242 | name = data[1]
243 | new_layerdef = LayerDef(data[1])
244 | if "LAYER" in self.info:
245 | self.info["LAYER"].append(new_layerdef)
246 | else:
247 | self.info["LAYER"] = [new_layerdef]
248 | elif data[0] == "RECT":
249 | # error if the self.info["LAYER"] does not exist
250 | self.info["LAYER"][-1].add_rect(data) # [-1] means the latest layer
251 | elif data[0] == "POLYGON":
252 | self.info["LAYER"][-1].add_polygon(data)
253 | return 0
254 |
255 |
256 | class LayerDef:
257 | """
258 | Class LayerDef represents the Layer definition inside a PORT or OBS
259 | statement.
260 | """
261 |
262 | # NOTE: LayerDef has no END statement
263 | # I think I still need a LayerDef class, but it will not be a subclass of
264 | # Statement. It will be a normal object that stores information.
265 | def __init__(self, name):
266 | self.type = "LayerDef"
267 | self.name = name
268 | self.shapes = []
269 |
270 | def add_rect(self, data):
271 | x0 = float(data[1])
272 | y0 = float(data[2])
273 | x1 = float(data[3])
274 | y1 = float(data[4])
275 | points = [(x0, y0), (x1, y1)]
276 | rect = Rect(points)
277 | self.shapes.append(rect)
278 |
279 | def add_polygon(self, data):
280 | points = []
281 | # add each pair of (x, y) points to a list
282 | for idx in range(1, len(data) - 2, 2):
283 | x_cor = float(data[idx])
284 | y_cor = float(data[idx+1])
285 | points.append([x_cor, y_cor])
286 | polygon = Polygon(points)
287 | self.shapes.append(polygon)
288 |
289 |
290 | class Rect:
291 | """
292 | Class Rect represents a Rect definition in a LayerDef
293 | """
294 |
295 | # Question: Do I really need a Rect class?
296 | def __init__(self, points):
297 | self.type = "RECT"
298 | self.points = points
299 |
300 |
301 | class Polygon:
302 | """
303 | Class Polygon represents a Polygon definition in a LayerDef
304 | """
305 | def __init__(self, points):
306 | self.type = "POLYGON"
307 | self.points = points
308 |
309 |
310 | class Layer(Statement):
311 | """
312 | Layer class represents a LAYER section in LEF file.
313 | """
314 | def __init__(self, name):
315 | # initiate the Statement superclass
316 | Statement.__init__(self)
317 | self.type = "LAYER"
318 | self.name = name
319 | self.layer_type = None
320 | self.spacing_table = None
321 | self.spacing = None
322 | self.width = None
323 | self.pitch = None
324 | self.direction = None
325 | self.offset = None
326 | self.resistance = None
327 | self.thickness = None
328 | self.height = None
329 | self.capacitance = None
330 | self.edge_cap = None
331 | self.property = None
332 | # I added this spacingTable = 0 to indicate that the spacingTable
333 | # has not started yet.
334 | self.spacingTable = 0;
335 |
336 | def parse_next(self, data):
337 | """
338 | Method to add information from a statement from LEF file to a Layer
339 | object.
340 | :param data: a list of strings that contains pieces of information
341 | :return: 0 if in progress, 1 if parsing is done, -1 if error,
342 | otherwise, return the object that will be parsed next.
343 | """
344 | if data[0] == "TYPE":
345 |
346 | self.layer_type = data[1]
347 | elif data[0] == "SPACINGTABLE":
348 | self.spacingTable = 1
349 | pass
350 | elif data[0] == "SPACING":
351 | self.spacing = float(data[1])
352 | elif data[0] == "WIDTH":
353 |
354 | # I manually added this spacingTable variable to ignore the width if it comes after SPACINGTABLE section
355 | # this is done because earlier, it used overwrite the old resistence
356 | if(self.spacingTable == 0):
357 | self.width = float(data[1])
358 | elif data[0] == "PITCH":
359 | self.pitch = float(data[1])
360 | elif data[0] == "DIRECTION":
361 |
362 | self.direction = data[1]
363 | elif data[0] == "OFFSET":
364 | self.offset = (float(data[1]))
365 | #self.offset = (float(data[1]), float(data[2]))
366 | elif data[0] == "RESISTANCE":
367 | if self.layer_type == "ROUTING":
368 | self.resistance = (data[1], float(data[2]))
369 | elif self.layer_type == "CUT":
370 | self.resistance = float(data[1])
371 | elif data[0] == "THICKNESS":
372 | self.thickness = float(data[1])
373 | elif data[0] == "HEIGHT":
374 | self.height = float(data[1])
375 | elif data[0] == "CAPACITANCE":
376 | self.capacitance = (data[1], float(data[2]))
377 | elif data[0] == "EDGECAPACITANCE":
378 | self.edge_cap = float(data[1])
379 | elif data[0] == "PROPERTY":
380 | self.property = (data[1], float(data[2]))
381 | elif data[0] == "END":
382 |
383 | if data[1] == self.name:
384 | return 1
385 | else:
386 | return -1
387 | return 0
388 |
389 | class Via(Statement):
390 | """
391 | Via class represents a VIA section in LEF file.
392 | """
393 | def __init__(self, name):
394 | # initiate the Statement superclass
395 | Statement.__init__(self)
396 | self.type = "VIA"
397 | self.name = name
398 | self.layers = []
399 |
400 | def parse_next(self, data):
401 | if data[0] == "END":
402 | return 1
403 | elif data[0] == "LAYER":
404 | name = data[1]
405 | new_layerdef = LayerDef(data[1])
406 | self.layers.append(new_layerdef)
407 | elif data[0] == "RECT":
408 | self.layers[-1].add_rect(data) # [-1] means the latest layer
409 | elif data[0] == "POLYGON":
410 | self.layers.add_polygon(data)
411 | return 0
412 |
413 |
--------------------------------------------------------------------------------
/lef_def_parser/def_util.py:
--------------------------------------------------------------------------------
1 | """
2 | Data structures for DEF Parser
3 | Author: Tri Minh Cao
4 | Email: tricao@utdallas.edu
5 | Date: August 2016
6 | """
7 | from util import *
8 |
9 | class Pins:
10 | """
11 | Class Pins represents the PINS section in DEF file. It contains
12 | individual Pin objects.
13 | """
14 |
15 | def __init__(self, num_pins):
16 | self.type = "PINS_DEF"
17 | self.num_pins = num_pins
18 | self.pins = []
19 | self.pin_dict = {}
20 |
21 | def parse_next(self, info):
22 | if info[0] == "-":
23 | # create a new pin
24 | # print (info[1])
25 | current_pin = Pin(info[1])
26 | self.pins.append(current_pin)
27 | self.pin_dict[info[1]] = current_pin
28 | # print ("new")
29 | else:
30 | current_pin = self.get_last_pin()
31 | # print ("last")
32 | # parse the next info
33 | if info[0] == "NET":
34 | current_pin.net = info[1]
35 | elif info[0] == "DIRECTION":
36 | current_pin.direction = info[1]
37 | elif info[0] == "USE":
38 | current_pin.use = info[1]
39 | elif info[0] == "LAYER":
40 | new_layer = Layer(info[1])
41 | new_layer.points.append([int(info[3]), int(info[4])])
42 | new_layer.points.append([int(info[7]), int(info[8])])
43 | current_pin.layer = new_layer
44 | elif info[0] == "PLACED":
45 | current_pin.placed = [int(info[2]), int(info[3])]
46 | current_pin.orient = info[5]
47 |
48 | def __len__(self):
49 | return len(self.pins)
50 |
51 | def __iter__(self):
52 | return self.pins.__iter__()
53 |
54 | def __getitem__(self, pin_name):
55 | return self.get_pin(pin_name)
56 |
57 | def get_last_pin(self):
58 | return self.pins[-1]
59 |
60 | def to_def_format(self):
61 | s = ""
62 | s += "PINS" + " " + str(self.num_pins) + " ;\n"
63 | for each_pin in self.pins:
64 | # check if the each_pin has Layer and Placed != None
65 | s += each_pin.to_def_format() + "\n"
66 | s += "END PINS"
67 | return s
68 |
69 | def get_pin(self, pin_name):
70 | return self.pin_dict[pin_name]
71 |
72 |
73 | class Pin:
74 | """
75 | Class Pin represents an individual pin defined in the DEF file.
76 | """
77 |
78 | def __init__(self, name):
79 | self.type = "PIN_DEF"
80 | self.name = name
81 | self.net = None
82 | self.direction = None
83 | self.use = None
84 | self.layer = None
85 | self.placed = None
86 | self.orient = None
87 |
88 | # add methods to add information to the Pin object
89 | def __str__(self):
90 | s = ""
91 | s += self.type + ": " + self.name + "\n"
92 | s += " " + "Name: " + self.net + "\n"
93 | s += " " + "Direction: " + self.direction + "\n"
94 | s += " " + "Use: " + self.use + "\n"
95 | if self.layer:
96 | s += " " + "Layer: " + str(self.layer) + "\n"
97 | if self.placed:
98 | s += " " + "Placed: " + str(self.placed) + " " + self.orient + "\n"
99 | return s
100 |
101 | def to_def_format(self):
102 | #- N1 + NET N1 + DIRECTION INPUT + USE SIGNAL
103 | # + LAYER metal2 ( -70 0 ) ( 70 140 )
104 | # + PLACED ( 27930 0 ) N ;
105 | s = ""
106 | s += "- " + self.name + " + NET " + self.net
107 | s += " + DIRECTION " + self.direction + " + USE " + self.use + "\n"
108 | if self.layer:
109 | s += " + " + self.layer.to_def_format() + "\n"
110 | if self.placed:
111 | s += " + " + "PLACED " + "( " + str(self.placed[0]) + " "
112 | s += str(self.placed[1]) + " ) " + self.orient + "\n"
113 | s += " ;"
114 | return s
115 |
116 | def get_metal_layer(self):
117 | return self.layer.name
118 |
119 |
120 | class Layer:
121 | """
122 | Class Layer represents a layer defined inside a PIN object
123 | """
124 |
125 | def __init__(self, name):
126 | self.type = "LAYER_DEF"
127 | self.name = name
128 | self.points = []
129 |
130 | def __str__(self):
131 | s = ""
132 | s += self.name
133 | for pt in self.points:
134 | s += " " + str(pt)
135 | return s
136 |
137 | def to_def_format(self):
138 | s = ""
139 | s += "LAYER" + " " + self.name
140 | for pt in self.points:
141 | s += " ( " + str(pt[0]) + " " + str(pt[1]) + " )"
142 | return s
143 |
144 |
145 | class Components:
146 | """
147 | Class Components represents the COMPONENTS section in the DEF file.
148 | """
149 |
150 | def __init__(self, num_comps):
151 | self.type = "COMPONENTS_DEF"
152 | self.num_comps = num_comps
153 | self.comps = []
154 | self.comp_dict = {}
155 |
156 | def parse_next(self, info):
157 | if info[0] == "-":
158 | new_comp = Component(info[1])
159 | new_comp.macro = info[2]
160 | self.comps.append(new_comp)
161 | self.comp_dict[info[1]] = new_comp
162 | else:
163 | current_comp = self.get_last_comp()
164 | # parse the next info
165 | if info[0] == "PLACED":
166 | current_comp.placed = [int(info[2]), int(info[3])]
167 | current_comp.orient = info[5]
168 | elif info[0] == "FIXED":
169 | current_comp.placed = [int(info[2]), int(info[3])]
170 | current_comp.orient = info[5]
171 |
172 | def __len__(self):
173 | return len(self.comps)
174 |
175 | def __getitem__(self, comp_name):
176 | return self.get_comp(comp_name)
177 |
178 | def __iter__(self):
179 | return self.comps.__iter__()
180 |
181 | def get_last_comp(self):
182 | return self.comps[-1]
183 |
184 | def get_comp(self, comp_name):
185 | return self.comp_dict[comp_name]
186 |
187 | def to_def_format(self):
188 | s = ""
189 | s += "COMPONENTS" + " " + str(self.num_comps) + " ;\n"
190 | for each_comp in self.comps:
191 | s += each_comp.to_def_format() + "\n"
192 | s += "END COMPONENTS"
193 | return s
194 |
195 |
196 | class Component:
197 | """
198 | Represents individual component inside the COMPONENTS section in the DEF
199 | file.
200 | """
201 |
202 | def __init__(self, name):
203 | self.type = "COMPONENT_DEF"
204 | self.name = name
205 | self.macro = None
206 | self.placed = None
207 | self.orient = None
208 |
209 | def get_macro(self):
210 | return self.macro
211 |
212 | def __str__(self):
213 | s = ""
214 | s += self.type + ": " + self.name + "\n"
215 | s += " " + "Macro: " + self.macro + "\n"
216 | s += " " + "Placed: " + str(self.placed) + " " + self.orient + "\n"
217 | return s
218 |
219 | def to_def_format(self):
220 | s = ""
221 | s += "- " + self.name + " " + self.macro + " + " + "PLACED"
222 | s += " ( " + str(self.placed[0]) + " " + str(self.placed[1]) + " ) "
223 | s += self.orient + "\n ;"
224 | return s
225 |
226 |
227 | class Nets:
228 | """
229 | Represents the section NETS in the DEF file.
230 | """
231 |
232 | def __init__(self, num_nets):
233 | self.type = "NETS_DEF"
234 | self.num_nets = num_nets
235 | self.nets = []
236 | self.net_dict = {}
237 |
238 | def parse_next(self, info):
239 | # remember to check for "(" before using split_parentheses
240 | # if we see "(", then it means new component or new pin
241 | # another method is to check the type of the object, if it is a list
242 | # then we know it comes from parentheses
243 | info = split_parentheses(info)
244 | if info[0] == "-":
245 | net_name = info[1]
246 | new_net = Net(net_name)
247 | self.nets.append(new_net)
248 | self.net_dict[net_name] = new_net
249 | else:
250 | current_net = self.get_last_net()
251 | # parse next info
252 | if isinstance(info[0], list):
253 | for comp in info:
254 | current_net.comp_pin.append(comp)
255 | elif info[0] == "ROUTED" or info[0] == "NEW":
256 | new_routed = Routed()
257 | new_routed.layer = info[1]
258 | # add points to the new_routed
259 | for idx in range(2, len(info)):
260 | if isinstance(info[idx], list):
261 | # this is a point
262 | parsed_pt = info[idx]
263 | new_pt = []
264 | for j in range(len(parsed_pt)):
265 | # if we see "*", the new coordinate comes from last
266 | # point's coordinate
267 | if parsed_pt[j] == "*":
268 | last_pt = new_routed.get_last_pt()
269 | new_coor = last_pt[j]
270 | new_pt.append(new_coor)
271 | else:
272 | new_pt.append(int(parsed_pt[j]))
273 | # add new_pt to the new_routed
274 | new_routed.points.append(new_pt)
275 | else:
276 | # this should be via end point
277 | if(info[idx] != ';'):
278 | new_routed.end_via = info[idx]
279 | # the location of end_via is the last point in the route
280 | new_routed.end_via_loc = new_routed.points[-1]
281 | # add new_routed to the current_net
282 | current_net.routed.append(new_routed)
283 |
284 | def __iter__(self):
285 | return self.nets.__iter__()
286 |
287 | def __len__(self):
288 | return len(self.nets)
289 |
290 | def get_last_net(self):
291 | return self.nets[-1]
292 |
293 | def to_def_format(self):
294 | s = ""
295 | s += "NETS" + " " + str(self.num_nets) + " ;\n"
296 | for each_net in self.nets:
297 | s += each_net.to_def_format() + "\n"
298 | s += "END NETS"
299 | return s
300 |
301 |
302 | class Net:
303 | """
304 | Represents individual Net inside NETS section.
305 | """
306 |
307 | def __init__(self, name):
308 | self.type = "NET_DEF"
309 | self.name = name
310 | self.comp_pin = []
311 | self.routed = []
312 |
313 | def __str__(self):
314 | s = ""
315 | s += self.type + ": " + self.name + "\n"
316 | s += " " + "Comp/Pin: "
317 | for comp in self.comp_pin:
318 | s += " " + str(comp)
319 | s += "\n"
320 | s += " " + "Routed: " + "\n"
321 | for route in self.routed:
322 | s += " " + " " + str(route) + "\n"
323 | return s
324 |
325 | def to_def_format(self):
326 | s = ""
327 | s += "- " + self.name + "\n"
328 | s += " "
329 | for each_comp in self.comp_pin:
330 | # study each comp/pin
331 | # if it's a pin, check the Pin object layer (already parsed) -
332 | # but how can we check the Pin object layer?
333 | s += " ( " + " ".join(each_comp) + " )"
334 | if self.routed:
335 | s += "\n + ROUTED " + self.routed[0].to_def_format() + "\n"
336 | for i in range(1, len(self.routed)):
337 | s += " " + "NEW " + self.routed[i].to_def_format() + "\n"
338 | s += " ;"
339 | return s
340 |
341 | class Routed:
342 | """
343 | Represents a ROUTED definition inside a NET.
344 | """
345 |
346 | def __init__(self):
347 | self.type = "ROUTED_DEF"
348 | self.layer = None
349 | self.points = []
350 | self.end_via = None
351 | self.end_via_loc = None
352 |
353 | def __str__(self):
354 | s = ""
355 | s += self.layer
356 | for pt in self.points:
357 | s += " " + str(pt)
358 | if self.end_via != None:
359 | s += " " + self.end_via
360 | return s
361 |
362 | def get_last_pt(self):
363 | return self.points[-1]
364 |
365 | def get_layer(self):
366 | return self.layer
367 |
368 | def to_def_format(self):
369 | s = ""
370 | s += self.layer
371 | for pt in self.points:
372 | s += " ("
373 | for coor in pt:
374 | s += " " + str(coor)
375 | s += " )"
376 | if self.end_via != None:
377 | s += " " + self.end_via
378 | return s
379 |
380 |
381 | class Tracks:
382 | """
383 | Represents a TRACKS definition inside the DEF file.
384 | """
385 | def __init__(self, name):
386 | self.type = "TRACKS_DEF"
387 | self.name = name
388 | self.pos = None
389 | self.do = None
390 | self.step = None
391 | self.layer = None
392 |
393 | def to_def_format(self):
394 | s = ""
395 | s += "TRACKS" + " " + self.name + " " + str(self.pos) + " "
396 | s += "DO" + " " + str(self.do) + " " + "STEP" + " " + str(self.step)
397 | s += " " + "LAYER" + " " + self.layer + " ;"
398 | return s
399 |
400 | def get_layer(self):
401 | return self.layer
402 |
403 |
404 | class GCellGrid:
405 | """
406 | Represents a GCELLGRID definition in the DEF file.
407 | """
408 | def __init__(self, name):
409 | self.type = "GCELLGRID_DEF"
410 | self.name = name
411 | self.pos = None
412 | self.do = None
413 | self.step = None
414 |
415 | def to_def_format(self):
416 | s = ""
417 | s += "GCELLGRID" + " " + self.name + " " + str(self.pos) + " "
418 | s += "DO" + " " + str(self.do) + " " + "STEP" + " " + str(self.step)
419 | s += " ;"
420 | return s
421 |
422 | class Row:
423 | """
424 | Represents a ROW definition in the DEF file.
425 | """
426 | def __init__(self, name):
427 | self.type = "ROW_DEF"
428 | self.name = name
429 | self.site = None
430 | self.pos = None
431 | self.orient = None
432 | self.do = None
433 | self.by = None
434 | self.step = None
435 |
436 | def to_def_format(self):
437 | s = ""
438 | s += "ROW" + " " + self.name + " " + self.site + " "
439 | s += str(self.pos[0]) + " " + str(self.pos[1]) + " " + self.orient + " "
440 | s += "DO" + " " + str(self.do) + " " + "BY" + " " + str(self.by) + " "
441 | s += "STEP" + " " + str(self.step[0]) + " " + str(self.step[1])
442 | s += " ;"
443 | return s
444 |
445 | class Property:
446 | """
447 | Represents a PROPERTYDEFINITIONS in the DEF file.
448 | """
449 | def __init__(self):
450 | self.type = "PROPERTY_DEF"
451 | self.texts = []
452 |
453 | def parse_next(self, info):
454 | new_line = " ".join(info)
455 | self.texts.append(new_line)
456 |
457 | def to_def_format(self):
458 | s = ""
459 | s += "PROPERTYDEFINITIONS\n"
460 | for each_line in self.texts:
461 | s += " " + each_line + "\n"
462 | s += "END PROPERTYDEFINITIONS\n"
463 | return s
464 |
--------------------------------------------------------------------------------
/lef_def_parser/plot_layout.py:
--------------------------------------------------------------------------------
1 | """
2 | Program to plot vias in the whole layout using DEF and LEF data.
3 |
4 | Author: Tri Minh Cao
5 | Email: tricao@utdallas.edu
6 | Date: September 2016
7 | """
8 |
9 | from def_parser import *
10 | from lef_parser import *
11 | from util import *
12 | import plot_cell
13 | import matplotlib.pyplot as plt
14 | import numpy as np
15 | import time
16 | import img_util
17 | import pickle
18 | import random
19 | import os
20 | import time
21 | import shutil
22 |
23 |
24 | def sort_vias_by_row(layout_area, row_height, vias):
25 | """
26 | Sort the vias by row
27 | :param layout_area: a list [x, y] that stores the area of the layout
28 | :param vias: a list of vias that need to be sorted
29 | :return: a list of rows, each containing a list of vias in that row.
30 | """
31 | num_rows = layout_area[1] // row_height + 1
32 | rows = []
33 | for i in range(num_rows):
34 | rows.append([])
35 | for via in vias:
36 | via_y = via[0][1]
37 | row_dest = via_y // row_height
38 | rows[row_dest].append(via)
39 | # sort vias in each row based on x-coordinate
40 | for each_row in rows:
41 | each_row.sort(key = lambda x: x[0][0])
42 | return rows
43 |
44 |
45 | def plot_window(left_pt, width, height, vias, lef_data, macro=None, comp=None):
46 | """
47 | Method to plot a window from the layout with all vias inside it.
48 | :param left_pt: bottom left point (origin) of the window
49 | :param width: width of the window
50 | :param height: height of the window
51 | :param vias: a list containing all vias on a row
52 | :return: void
53 | """
54 | # get the corners for the window
55 | corners = [left_pt]
56 | corners.append((left_pt[0] + width, left_pt[1] + height))
57 | # compose the output file name
58 | out_folder = './images/'
59 | # current_time = time.strftime('%H%M%d%m%Y')
60 | pos = (str(corners[0][0]) + '_' + str(corners[0][1]) + '_' +
61 | str(corners[1][0]) + '_' + str(corners[1][1]))
62 | # out_file = out_folder + pos
63 | out_file = out_folder
64 | # out_file += str(corners[0][0])
65 | out_file += pos
66 | if macro:
67 | out_file += '_' + macro
68 | if comp:
69 | out_file += '_' + comp
70 | # current_time = time.strftime('%H%M%S%d%m%Y')
71 | # out_file += '_' + current_time
72 |
73 | if os.path.exists(out_file + '.png'):
74 | return out_file + '.png'
75 |
76 | plt.figure(figsize=(3, 5), dpi=80, frameon=False)
77 | # scale the axis of the subplot
78 | # draw the window boundary
79 | # scaled_pts = rect_to_polygon(corners)
80 | # draw_shape = plt.Polygon(scaled_pts, closed=True, fill=None,
81 | # color="blue")
82 | # plt.gca().add_patch(draw_shape)
83 |
84 | # plot the vias inside the windows
85 | # look for the vias
86 | for via in vias:
87 | if (via[0][0] - left_pt[0] > width):
88 | break
89 | via_name = via[1]
90 | via_info = lef_data.via_dict[via_name]
91 | via_loc = via[0]
92 | plot_cell.draw_via(via_loc, via_info)
93 |
94 | # scale the axis of the subplot
95 | axis = [corners[0][0], corners[1][0], corners[0][1], corners[1][1]]
96 | # print (test_axis)
97 | plt.axis(axis)
98 | plt.axis('off')
99 | plt.gca().set_aspect('equal', adjustable='box')
100 | plt.savefig(out_file)
101 | # plt.show()
102 | plt.close('all')
103 | return out_file + '.png'
104 |
105 |
106 | def group_via(via_list, max_number, max_distance):
107 | """
108 | Method to group the vias together to check if they belong to a cell.
109 | :param via_list: a list of all vias.
110 | :return: a list of groups of vias.
111 | """
112 | groups = []
113 | length = len(via_list)
114 | for i in range(length):
115 | # one_group = [via_list[i]]
116 | curr_via = via_list[i]
117 | curr_list = []
118 | for j in range(2, max_number + 1):
119 | if i + j - 1 < length:
120 | right_via = via_list[i + j - 1]
121 | dist = right_via[0][0] - curr_via[0][0]
122 | if dist < max_distance:
123 | curr_list.append(via_list[i:i+j])
124 | # only add via group list that is not empty
125 | if len(curr_list) > 0:
126 | groups.append(curr_list)
127 | return groups
128 |
129 |
130 | def predict_cell(candidates, row, model, lef_data, std_cells):
131 | """
132 | Use the trained model to choose the most probable cell from via groups.
133 | :param candidates: 2-via and 3-via groups that could make a cell
134 | :return: a tuple (chosen via group, predicted cell name)
135 | """
136 | margin = 350
137 | img_width = 200
138 | img_height = 400
139 | img_shape = img_width * img_height
140 | possible_candidates = []
141 | for i in range(len(candidates)):
142 | # dataset = np.ndarray(shape=(len(candidates), img_height, img_width),
143 | # dtype=np.float32)
144 | if candidates[i] != -1:
145 | possible_candidates.append(i)
146 | dataset = np.ndarray(shape=(1, img_height, img_width),
147 | dtype=np.float32)
148 | each_group = candidates[i]
149 | left_pt = [each_group[0][0][0] - margin, CELL_HEIGHT * row]
150 | width = each_group[-1][0][0] - left_pt[0] + margin
151 | # print (width)
152 | img_file = plot_window(left_pt, width, CELL_HEIGHT, each_group, lef_data)
153 | # print (img_file)
154 | image_data = img_util.load_image(img_file)
155 | # print (image_data.shape)
156 | dataset[0, :, :] = image_data
157 | X_test = dataset.reshape(dataset.shape[0], img_shape)
158 | result = model.decision_function(X_test)
159 | result = result[0]
160 | # check for result
161 | if result[i] == max(result):
162 | return candidates[i], i
163 | # if we cannot find a solution, randomly select a choice
164 | choice = random.choice(possible_candidates)
165 | return candidates[choice], choice
166 |
167 |
168 | def sorted_components(layout_area, row_height, comps):
169 | """
170 | Sort the components by row
171 | :param layout_area: a list [x, y] that stores the area of the layout
172 | :param comps: a list of components that need to be sorted
173 | :return: a list of rows, each containing a list of components in that row.
174 | """
175 | num_rows = layout_area[1] // row_height + 1
176 | rows = []
177 | for i in range(num_rows):
178 | rows.append([])
179 | for comp in comps:
180 | comp_y = comp.placed[1]
181 | row_dest = comp_y // row_height
182 | rows[row_dest].append(comp)
183 | # sort vias in each row based on x-coordinate
184 | for each_row in rows:
185 | each_row.sort(key = lambda x: x.placed[0])
186 | return rows
187 |
188 |
189 | def predict_score(predicts, actuals):
190 | """
191 | Find the number of correct cell predictions.
192 | :param predicts: a list of predictions.
193 | :param actuals: a list of actual cells.
194 | :return: # correct predictions, # cells
195 | """
196 | len_preds = len(predicts)
197 | len_actuals = len(actuals)
198 | shorter_len = min(len_preds, len_actuals)
199 | gap_predict = 0
200 | gap_actual = 0
201 | num_correct = 0
202 | # print (shorter_len)
203 | for i in range(shorter_len):
204 | # print (i)
205 | # print (gap_predict)
206 | # print (gap_actual)
207 | # print ()
208 | if predicts[i + gap_predict] == actuals[i + gap_actual]:
209 | num_correct += 1
210 | else:
211 | if len_preds < len_actuals:
212 | gap_actual += 1
213 | len_preds += 1
214 | elif len_preds > len_actuals:
215 | gap_predict += 1
216 | len_actuals += 1
217 | return num_correct, len(actuals)
218 |
219 |
220 | def plot_cell_w_vias():
221 | # process each row, plot all cells
222 | # for i in range(num_rows):
223 | margin = 350
224 | for i in range(1):
225 | via_idx = 0
226 | print (len(components[i]))
227 | print (len(via1_sorted[i]))
228 | for each_comp in components[i]:
229 | comp_name = each_comp.name
230 | macro_name = each_comp.macro
231 | macro_data = lef_parser.macro_dict[macro_name]
232 | num_vias = len(macro_data.pin_dict) - 2 # because of VDD and GND pins
233 | # get the vias
234 | cell_vias = via1_sorted[i][via_idx:via_idx + num_vias]
235 | # update via_idx
236 | via_idx += num_vias
237 | # plot the cell
238 | left_pt = [cell_vias[0][0][0] - margin, CELL_HEIGHT * i]
239 | width = cell_vias[-1][0][0] - left_pt[0] + margin
240 | # print (width)
241 | img_file = plot_window(left_pt, width, CELL_HEIGHT, cell_vias,
242 | lef_parser, macro=macro_name, comp = comp_name)
243 | print (comp_name)
244 | print (macro_name)
245 | print (cell_vias)
246 | print (via_idx)
247 | print('Finished!')
248 |
249 |
250 | def check_via_group(via_group, source_sink):
251 | """
252 | Check the validity of each via set in the via group.
253 | :param via_group: the via_group in question.
254 | :return: via_group with all valid candidate(s)
255 | """
256 | # valid for 2-via cell: 1 source, 1 sink
257 | # valid for 3-via cell: 2 sink, 1 source
258 | valid_group = []
259 | for each_group in via_group:
260 | num_vias = len(each_group)
261 | num_source = 0
262 | num_sink = 0
263 | for each_via in each_group:
264 | # 0 = sink, 1 = source
265 | if source_sink[each_via[2]] == 1:
266 | num_source += 1
267 | elif source_sink[each_via[2]] == 0:
268 | num_sink += 1
269 | if num_source <= 1 and num_sink <=2:
270 | valid_group.append(each_group)
271 | return valid_group
272 |
273 |
274 | def get_candidates(first_via_idx, via_list, std_cells):
275 | """
276 | Generate a list of candidates from the first via.
277 | Each standard cell will be considered for candidates.
278 | If the standard cell cannot be placed there, the value is -1,
279 | otherwise, it will be a list of vias.
280 | :param first_via_idx: first via index in the via_list
281 | :param via_list: the list of all vias (in a row)
282 | :param std_cells: a list that stores information of std cells
283 | :return: a list of groups of vias, or -1
284 | """
285 | # candidates = [-1 for i in range(len(std_cells))]
286 | candidates = []
287 | first_via = via_list[first_via_idx]
288 | # print (first_via)
289 | first_via_x = first_via[0][0]
290 | for i in range(len(std_cells)):
291 | cell_width = std_cells[i][2]
292 | min_vias = std_cell_info[i][0]
293 | max_vias = std_cells[i][1]
294 | pin_left_dist = std_cells[i][3]
295 | boundary = first_via_x + cell_width - pin_left_dist
296 | # possible vias contain the vias inside the boundary
297 | possible_vias = [first_via]
298 | for j in range(first_via_idx + 1, len(via_list)):
299 | if via_list[j][0][0] <= boundary:
300 | possible_vias.append(via_list[j])
301 | else:
302 | break
303 | # check the candidate against cell info
304 | if len(possible_vias) > max_vias or len(possible_vias) < min_vias:
305 | candidates.append(-1)
306 | else:
307 | candidates.append(possible_vias)
308 | return candidates
309 |
310 |
311 | def get_inputs_outputs(def_info):
312 | """
313 | Method to get all inputs and outputs nets from a DEF file.
314 | :param def_info: def info (already parsed).
315 | :return: inputs and outputs
316 | """
317 | pins = def_parser.pins.pins
318 | inputs = []
319 | outputs = []
320 | for each_pin in pins:
321 | pin_name = each_pin.name
322 | direction = each_pin.direction.lower()
323 | if direction == 'input':
324 | inputs.append(pin_name)
325 | elif direction == 'output':
326 | outputs.append(pin_name)
327 | return inputs, outputs
328 |
329 |
330 | def recover_netlist(def_info, inputs, outputs, recovered_cells):
331 | """
332 | Method to create a netlist from predicted cells
333 | :param def_info: information from the DEF file
334 | :param inputs: input pins of the design
335 | :param outputs: output pins of the design
336 | :param recovered_cells: recovered cells with input nets and output nets
337 | :return: recovered netlist file name
338 | """
339 | # NOTE: the order of nets is not like that in original netlist
340 | design = def_info.design_name
341 | nets = set(def_info.nets.net_dict.keys())
342 | inputs_set = set(inputs)
343 | outputs_set = set(outputs)
344 | io = inputs_set | outputs_set
345 | wires = nets - io
346 | # print(wires)
347 | # print(len(wires))
348 |
349 | ## dd/mm/yyyy format
350 | date = time.strftime("%m/%d/%Y %H:%M:%S")
351 | s = '#############################\n'
352 | s += '# Generated by TMC\n'
353 | s += '# Design: ' + design + '\n'
354 | s += '# Date: ' + date + '\n'
355 | s += '#############################\n\n'
356 |
357 | # add module definition
358 | s += 'module ' + design + ' ( '
359 | num_ios = len(io)
360 | idx = 0
361 | for each_pin in io:
362 | s += each_pin
363 | idx += 1
364 | if idx < num_ios:
365 | s += ', '
366 | s += ' );\n'
367 |
368 | indent = ' '
369 | # add input
370 | num_in = len(inputs)
371 | idx = 0
372 | s += indent + 'input '
373 | for each_in in inputs:
374 | s += each_in
375 | idx += 1
376 | if idx < num_in:
377 | s += ', '
378 | s += ';\n'
379 | # add output
380 | num_out = len(outputs)
381 | idx = 0
382 | s += indent + 'output '
383 | for each_out in outputs:
384 | s += each_out
385 | idx += 1
386 | if idx < num_out:
387 | s += ', '
388 | s += ';\n'
389 | # add wire
390 | num_wire = len(wires)
391 | idx = 0
392 | s += indent + 'wire '
393 | for each_wire in wires:
394 | s += each_wire
395 | idx += 1
396 | if idx < num_wire:
397 | s += ', '
398 | s += ';\n'
399 | # add cells
400 | s += '\n'
401 | cell_idx = 2
402 | for each_cell in cells_reco:
403 | cell_idx += 1
404 | s += indent + each_cell[0] + ' U' + str(cell_idx) + ' ( '
405 | in_nets = each_cell[1]
406 | s += '.A(' + in_nets[0] + ')' + ', '
407 | if len(in_nets) == 2:
408 | s += '.B(' + in_nets[1] + ')' + ', '
409 | out_net = each_cell[2]
410 | s += '.Y(' + out_net + ')'
411 | s += ' );\n'
412 |
413 | # write to an output file
414 | folder = './recovered/'
415 | filename = design + '_recovered' + '.v'
416 | print('Writing recovered netlist file...')
417 | f = open(folder + filename, mode="w+")
418 | f.write(s)
419 | f.close()
420 | print('Writing done.')
421 | return filename
422 |
423 |
424 | # Main Class
425 | if __name__ == '__main__':
426 | start_time = time.time()
427 | def_path = './libraries/layout_yujie/c2670_gscl45nm_tri_routing_layer6.def'
428 | def_parser = DefParser(def_path)
429 | def_parser.parse()
430 | scale = def_parser.scale
431 |
432 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
433 | lef_parser = LefParser(lef_file)
434 | lef_parser.parse()
435 | macro_dict = lef_parser.macro_dict
436 |
437 | CELL_HEIGHT = int(float(scale) * lef_parser.cell_height)
438 | # print (CELL_HEIGHT)
439 | print ("Process file:", def_path)
440 | all_via1 = get_all_vias(def_parser, via_type="M2_M1_via")
441 |
442 | # build the net_via dictionary
443 | nets = def_parser.nets.nets
444 | # initialize the nets_via_dict
445 | nets_vias_dict = {}
446 | for net in nets:
447 | net_name = net.name
448 | nets_vias_dict[net_name] = []
449 | # add vias to nets_dict
450 | for each_via in all_via1:
451 | net = each_via[2]
452 | nets_vias_dict[net].append(each_via)
453 |
454 | # sort the vias by row
455 | via1_sorted = sort_vias_by_row(def_parser.diearea[1], CELL_HEIGHT, all_via1)
456 |
457 | # add inputs and outputs from the design to via info
458 | inputs, outputs = get_inputs_outputs(def_parser)
459 | for each_in in inputs:
460 | for each_via in nets_vias_dict[each_in]:
461 | each_via[3] = 0
462 | for each_out in outputs:
463 | for each_via in nets_vias_dict[each_out]:
464 | each_via[3] = 1
465 |
466 | MAX_DISTANCE = 2280 # OR2 cell width, can be changed later
467 |
468 | components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
469 | def_parser.components.comps)
470 | num_rows = len(components)
471 |
472 | ###############
473 | # DO PREDICTION
474 | # predict_row()
475 | # We can load the trained model
476 | pickle_filename = "./trained_models/logit_model_100916_2.pickle"
477 | try:
478 | with open(pickle_filename, 'rb') as f:
479 | logit_model = pickle.load(f)
480 | except Exception as e:
481 | print('Unable to read data from', pickle_filename, ':', e)
482 |
483 | labels = {0: 'and2', 1: 'invx1', 2: 'invx8', 3: 'nand2', 4: 'nor2',
484 | 5: 'or2'}
485 | macro_from_labels = {0: 'AND2X1', 1: 'INVX1', 2: 'INVX8', 3: 'NAND2X1',
486 | 4: 'NOR2X1', 5: 'OR2X1'}
487 |
488 | cell_labels = {'AND2X1': 'and2', 'INVX1': 'invx1', 'NAND2X1': 'nand2',
489 | 'NOR2X1': 'nor2', 'OR2X1': 'or2', 'INVX8': 'invx8'}
490 |
491 | ##############
492 | # List of standard cells
493 | std_cell_info = {}
494 | # info includes (min num vias, max num vias, width,
495 | # distance from left boundary to first pin)
496 | # I wonder if max num vias should be used, actually I don't know what is the
497 | # maximum number of vias, but I guess +1 is fine.
498 | # 0 is and2, 1 is invx1, etc.
499 | std_cell_info[0] = (3, 4, 2280, 295)
500 | std_cell_info[1] = (2, 3, 1140, 315)
501 | std_cell_info[2] = (2, 3, 2660, 695)
502 | std_cell_info[3] = (3, 4, 1520, 90)
503 | std_cell_info[4] = (3, 4, 1520, 315)
504 | std_cell_info[5] = (3, 4, 2280, 695)
505 |
506 | # process
507 | # print the sorted components
508 | components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
509 | def_parser.components.comps)
510 | correct = 0
511 | total_cells = 0
512 | predicts = []
513 | actuals = []
514 | cells_reco = [] # a list of recovered cells
515 | # via_groups is only one row
516 | for i in range(len(via1_sorted)):
517 | # for i in range(0, 1):
518 | print ('Process row', (i + 1))
519 | # each via group in via_groups consist of two candidates
520 | # via_groups = group_via(via1_sorted[i], 3, MAX_DISTANCE)
521 | visited_vias = [] # later, make visited_vias a set to run faster
522 | cells_pred = []
523 | via_idx = 0
524 | while via_idx < len(via1_sorted[i]):
525 | # choosing candidates
526 | candidates = get_candidates(via_idx, via1_sorted[i], std_cell_info)
527 | best_group, prediction = predict_cell(candidates, i, logit_model,
528 | lef_parser, std_cell_info)
529 | # recover the cell information
530 | macro_name = macro_from_labels[prediction]
531 | macro_info = macro_dict[macro_from_labels[prediction]]
532 | num_pins = len(macro_info.info["PIN"]) - 2
533 | # NOTE: we assume inputs are A, B and output is Y
534 | # for each_pin in pins:
535 | # print(each_pin.name)
536 | recover = []
537 | output_net = best_group[-1][2]
538 | input_nets = []
539 | for each_via in best_group:
540 | if each_via[2] != output_net:
541 | input_nets.append(each_via[2])
542 | # NOTE: the following lines only work for 2-pin and 3-pin cell
543 | recover.append(macro_name)
544 | recover.append(input_nets)
545 | recover.append(output_net)
546 | cells_reco.append(recover)
547 |
548 | via_idx += len(best_group)
549 | # print (best_group)
550 | # print (labels[prediction])
551 | cells_pred.append(labels[prediction])
552 | for each_via in best_group:
553 | visited_vias.append(each_via)
554 |
555 | print (cells_pred)
556 | print (len(cells_pred))
557 |
558 | actual_comp = []
559 | actual_macro = []
560 | for each_comp in components[i]:
561 | actual_comp.append(cell_labels[each_comp.macro])
562 | actual_macro.append(each_comp.macro)
563 | print (actual_comp)
564 | print (len(actual_comp))
565 |
566 | num_correct, num_cells = predict_score(cells_pred, actual_comp)
567 | correct += num_correct
568 | total_cells += num_cells
569 | predicts.append(cells_pred)
570 | actuals.append(actual_comp)
571 | print ()
572 |
573 | print ("\nTotal number of cells: ", total_cells)
574 | print ("Number of correct cells predicted: ", correct)
575 | print ("Accuracy rate (%): ", correct / total_cells * 100)
576 | # print the execution time
577 | print("\n--- Execution time:")
578 | print("--- %s seconds ---" % (time.time() - start_time))
579 | print("\n")
580 | # remove images used
581 | shutil.rmtree("./images")
582 | if not os.path.exists("./images"):
583 | os.makedirs("./images")
584 |
585 | # count the time to generate the netlist separately
586 | start_time = time.time()
587 | # write the recovered verilog netlist
588 | recover_netlist(def_parser, inputs, outputs, cells_reco)
589 | print("\n--- Generate netlist time:")
590 | print("--- %s seconds ---" % (time.time() - start_time))
591 |
--------------------------------------------------------------------------------
/lef_def_parser/cell_learn.py:
--------------------------------------------------------------------------------
1 | """
2 | Train a ML model to predict cells based on vias location
3 |
4 | Name: Tri Minh Cao
5 | Email: tricao@utdallas.edu
6 | Date: October 2016
7 | """
8 |
9 | import pickle
10 | import random
11 | import os
12 | from def_parser import *
13 | from lef_parser import *
14 | import util
15 | from sklearn.linear_model import LogisticRegression
16 | import numpy as np
17 | import plot_layout
18 |
19 | FEATURE_LEN = 21
20 |
21 |
22 | def save_data_pickle(dataset, filename):
23 | # pickle the merged data
24 | # filename = "./merged_data/freepdk45_10_17_16.pickle"
25 | try:
26 | with open(filename, 'wb') as f:
27 | pickle.dump(dataset, f, pickle.HIGHEST_PROTOCOL)
28 | except Exception as e:
29 | print('Unable to save data to', filename, ':', e)
30 |
31 |
32 | def merge_data(data_folder, num_cells):
33 | """
34 | Read from data pickle files, and merge
35 | :return:
36 | """
37 | random.seed(12345)
38 |
39 | all_samples = []
40 | all_labels = []
41 | pickle_files = os.listdir(data_folder)
42 | for file in pickle_files:
43 | pickle_file = os.path.join(data_folder, file)
44 | data = load_data_pickle(pickle_file)
45 | # REMOVE
46 | # pickle_file = os.path.join(data_folder, file)
47 | # try:
48 | # with open(data_folder, 'rb') as f:
49 | # dataset = pickle.load(f)
50 | # except Exception as e:
51 | # print('Unable to read data from', pickle_file, ':', e)
52 | all_samples.extend(data[0])
53 | all_labels.extend(data[1])
54 |
55 | all_dataset = (all_samples, all_labels)
56 | dataset = {}
57 | dataset['AND2X1'] = []
58 | dataset['INVX1'] = []
59 | dataset['INVX8'] = []
60 | dataset['NAND2X1'] = []
61 | dataset['NOR2X1'] = []
62 | dataset['OR2X1'] = []
63 |
64 | choices = [i for i in range(len(all_samples))]
65 | random.shuffle(choices)
66 | for idx in choices:
67 | features = all_samples[idx]
68 | label = all_labels[idx]
69 | if len(dataset[label]) < num_cells:
70 | dataset[label].append(features)
71 | cont = False
72 | for each_macro in dataset:
73 | if len(dataset[each_macro]) < num_cells:
74 | cont = True
75 | if not cont:
76 | break
77 |
78 | for each_macro in dataset:
79 | print (each_macro)
80 | print (len(dataset[each_macro]))
81 |
82 | # should return the merged data set
83 | return dataset
84 |
85 |
86 | def train_model(dataset, data_len, num_to_label):
87 | """
88 | Method to train model
89 | :param dataset: dataset
90 | :param data_len: total length of training set
91 | :return: trained model
92 | """
93 | all_dataset = np.ndarray(shape=(data_len, FEATURE_LEN),
94 | dtype=np.int32)
95 | all_label = np.ndarray(data_len,
96 | dtype=np.int32)
97 | current_size = 0
98 | num_selected = [0, 0, 0, 0, 0, 0]
99 | while current_size < data_len:
100 | choice = random.randrange(6) # we have 6 types of cells
101 | cur_label = num_to_label[choice]
102 | cur_idx = num_selected[choice]
103 | cur_data = dataset[cur_label][cur_idx]
104 | all_dataset[current_size, :] = np.array(dataset[cur_label][cur_idx],
105 | dtype=np.int32)
106 | all_label[current_size] = choice
107 | current_size += 1
108 | num_selected[choice] += 1
109 |
110 | # shuffle the dataset
111 | random.seed(6789)
112 | all_dataset, all_label = util.randomize(all_dataset, all_label)
113 | num_train = int(0.85 * data_len)
114 |
115 | #print(max(all_label))
116 |
117 | test_dataset = all_dataset[num_train:]
118 | test_label = all_label[num_train:]
119 | train_dataset = all_dataset[:num_train]
120 | train_label = all_label[:num_train]
121 |
122 | # train a logistic regression model
123 | regr = LogisticRegression()
124 | X_train = train_dataset
125 | y_train = train_label
126 | X_test = test_dataset
127 | y_test = test_label
128 |
129 | regr.fit(X_train, y_train)
130 | score = regr.score(X_test, y_test)
131 | pred_labels = regr.predict(X_test)
132 | print(pred_labels[:100])
133 | print(score)
134 |
135 | # Save the trained model for later use
136 | # filename = "./trained_models/logit_model_103116.pickle"
137 | # save_data_pickle(regr, filename)
138 | # return the trained model
139 | return regr, X_train, y_train, X_test, y_test
140 |
141 |
142 | def predict_cell(candidates, row, model, lef_data, std_cells):
143 | """
144 | Use the trained model to choose the most probable cell from via groups.
145 | :param candidates: 2-via and 3-via groups that could make a cell
146 | :return: a tuple (chosen via group, predicted cell name)
147 | """
148 | # possibly I can use the current method of testing the width of each cell
149 | # margin = 350
150 | # dataset = np.ndarray(shape=(len(candidates), FEATURE_LEN),
151 | # dtype=np.float32)
152 | scores = [-100 for i in range(len(candidates))]
153 | for i in range(len(candidates)):
154 | if candidates[i] != -1:
155 | features = []
156 | each_group = candidates[i]
157 | # width = std_cells[2]
158 | left_margin = std_cells[i][-1]
159 | # for left_margin in range(50, 800, 50):
160 | left_pt = [each_group[0][0][0] - left_margin, CELL_HEIGHT * row]
161 | # width = each_group[-1][0][0] - left_pt[0] + margin
162 | num_vias = len(each_group)
163 | features.append(num_vias)
164 | x_bound = left_pt[0]
165 | y_bound = left_pt[1]
166 | # NOTE: some cell has 4 vias
167 | # We suppose maximum vias in a cell is 4
168 | for each_via in each_group:
169 | x_loc = each_via[0][0] - x_bound
170 | y_loc = each_via[0][1] - y_bound
171 | # features.append(x_loc)
172 | features.append(y_loc)
173 | # add via type
174 | features.append(each_via[3])
175 | # if there are only two vias, then there are no via3
176 | if num_vias < 4:
177 | temp = [-1 for i in range((4 - num_vias) * 2)]
178 | features.extend(temp)
179 | # add the distance between vias
180 | for i in range(num_vias - 1):
181 | for j in range(i + 1, num_vias):
182 | x_dist = each_group[j][0][0] - each_group[i][0][0]
183 | y_dist = each_group[j][0][1] - each_group[i][0][1]
184 | features.append(x_dist)
185 | features.append(y_dist)
186 | # add extra features in case of having less vias
187 | if num_vias < 4:
188 | if num_vias == 1:
189 | remain_dists = 2 * int(util.nCr(4, 2))
190 | else:
191 | remain_dists = 2 * (int(util.nCr(4, 2) - util.nCr(num_vias, 2)))
192 | temp = [0 for i in range(remain_dists)]
193 | features.extend(temp)
194 | # do predict
195 | dataset = np.array(features, dtype=np.int32)
196 | # print(dataset)
197 | X_test = dataset.reshape(1, FEATURE_LEN)
198 | result = model.decision_function(X_test)
199 | result = result[0]
200 | # print(each_group)
201 | # print(left_margin)
202 | print(labels[i])
203 | print(features)
204 | print(result)
205 | # print()
206 | features = []
207 | if result[i] == max(result):
208 | return candidates[i], i
209 | # scores[i] = result[i]
210 | # return the best score
211 | # print(scores)
212 | # max_score = -100
213 | # best_choice = -1
214 | # for i in range(len(candidates)):
215 | # if scores[i] > max_score:
216 | # best_choice = i
217 | # max_score = scores[i]
218 | # return candidates[best_choice], best_choice
219 |
220 | # possible_candidates = []
221 | # for i in range(len(candidates)):
222 | # if candidates[i] != -1:
223 | # possible_candidates.append(i)
224 | # dataset = np.ndarray(shape=(1, img_height, img_width),
225 | # dtype=np.float32)
226 | # each_group = candidates[i]
227 | # left_pt = [each_group[0][0][0] - margin, CELL_HEIGHT * row]
228 | # width = each_group[-1][0][0] - left_pt[0] + margin
229 | # # print (width)
230 | # img_file = plot_window(left_pt, width, CELL_HEIGHT, each_group, lef_data)
231 | # # print (img_file)
232 | # image_data = img_util.load_image(img_file)
233 | # # print (image_data.shape)
234 | # dataset[0, :, :] = image_data
235 | # X_test = dataset.reshape(dataset.shape[0], img_shape)
236 | # result = model.decision_function(X_test)
237 | # result = result[0]
238 | # # print (result)
239 | # # check for result
240 | # if result[i] == max(result):
241 | # return candidates[i], i
242 | # # if we cannot find a solution, randomly select a choice
243 | # choice = random.choice(possible_candidates)
244 | # return candidates[choice], choice
245 |
246 |
247 | def predict_row():
248 | # FIXME: restructure this method
249 | # We can load the trained model
250 | pickle_filename = "./trained_models/logit_model_101716.pickle"
251 | logit_model = load_data_pickle(pickle_filename)
252 |
253 | labels = {0: 'and2', 1: 'invx1', 2: 'invx8', 3: 'nand2', 4: 'nor2',
254 | 5: 'or2'}
255 | cell_labels = {'AND2X1': 'and2', 'INVX1': 'invx1', 'NAND2X1': 'nand2',
256 | 'NOR2X1': 'nor2', 'OR2X1': 'or2', 'INVX8': 'invx8'}
257 |
258 | # process
259 | components = util.sorted_components(def_parser.diearea[1], CELL_HEIGHT,
260 | def_parser.components.comps)
261 | num_rows = len(components)
262 | # print the sorted components
263 | correct = 0
264 | total_cells = 0
265 | predicts = []
266 | actuals = []
267 | # via_groups is only one row
268 | # for i in range(len(via1_sorted)):
269 | for i in range(0, 1):
270 | via_groups = util.group_via(via1_sorted[i], 3, MAX_DISTANCE)
271 | visited_vias = [] # later, make visited_vias a set to run faster
272 | cells_pred = []
273 | for each_via_group in via_groups:
274 | first_via = each_via_group[0][0]
275 | # print (first_via)
276 | if not first_via in visited_vias:
277 | best_group, prediction = predict_cell(each_via_group, i,
278 | logit_model, lef_parser)
279 | print (best_group)
280 | print (labels[prediction])
281 | cells_pred.append(labels[prediction])
282 | for each_via in best_group:
283 | visited_vias.append(each_via)
284 | # print (best_group)
285 | # print (labels[prediction])
286 |
287 | print (cells_pred)
288 | print (len(cells_pred))
289 |
290 | actual_comp = []
291 | actual_macro = []
292 | for each_comp in components[i]:
293 | actual_comp.append(cell_labels[each_comp.macro])
294 | actual_macro.append(each_comp.macro)
295 | print (actual_comp)
296 | print (len(actual_comp))
297 |
298 | num_correct, num_cells = predict_score(cells_pred, actual_comp)
299 |
300 | correct += num_correct
301 | total_cells += num_cells
302 | predicts.append(cells_pred)
303 | actuals.append(actual_comp)
304 |
305 | print ()
306 |
307 | print (correct)
308 | print (total_cells)
309 | print (correct / total_cells * 100)
310 |
311 |
312 | def load_data_pickle(filename):
313 | try:
314 | with open(filename, 'rb') as f:
315 | dataset = pickle.load(f)
316 | except Exception as e:
317 | print('Unable to read data from', filename, ':', e)
318 | return dataset
319 |
320 |
321 | def old_main_class():
322 | num_cells_required = 900
323 | # merge_data()
324 | # load data from selected pickle
325 | set_filename = "./merged_data/selected_10_17_16.pickle"
326 | dataset = load_data_pickle(set_filename)
327 |
328 | # build the numpy array
329 | label_to_num = {'AND2X1': 0, 'INVX1': 1, 'INVX8': 2, 'NAND2X1': 3,
330 | 'NOR2X1': 4, 'OR2X1': 5}
331 |
332 | num_to_label = {0: 'AND2X1', 1: 'INVX1', 2: 'INVX8', 3: 'NAND2X1',
333 | 4: 'NOR2X1', 5: 'OR2X1'}
334 |
335 | # train_model()
336 |
337 | #######
338 | # DO SOME PREDICTION
339 | def_path = './libraries/layout_freepdk45/c880a.def'
340 | def_parser = DefParser(def_path)
341 | def_parser.parse()
342 | scale = def_parser.scale
343 |
344 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
345 | lef_parser = LefParser(lef_file)
346 | lef_parser.parse()
347 |
348 | print ("Process file:", def_path)
349 | CELL_HEIGHT = int(float(scale) * lef_parser.cell_height)
350 | all_via1 = util.get_all_vias(def_parser, via_type="M2_M1_via")
351 | # print (all_via1)
352 | # sort the vias by row
353 | via1_sorted = util.sort_vias_by_row(def_parser.diearea[1], CELL_HEIGHT, all_via1)
354 |
355 | MAX_DISTANCE = 2280 # OR2 cell width, can be changed later
356 |
357 | # predict_row()
358 |
359 |
360 | ################
361 | # new section
362 | # FIXME: need to build the netlist
363 |
364 |
365 | # test the image-based method
366 |
367 | ##############
368 | # List of standard cells
369 | std_cell_info = {}
370 | # info includes (min num vias, max num vias, width,
371 | # distance from left boundary to first pin)
372 | # I wonder if max num vias should be used, actually I don't know what is the
373 | # maximum number of vias, but I guess +1 is fine.
374 | # 0 is and2, 1 is invx1, etc.
375 | std_cell_info[0] = (3, 4, 2280, 295)
376 | std_cell_info[1] = (2, 3, 1140, 315)
377 | std_cell_info[2] = (2, 3, 2660, 695)
378 | std_cell_info[3] = (3, 4, 1520, 90)
379 | std_cell_info[4] = (3, 4, 1520, 315)
380 | std_cell_info[5] = (3, 4, 2280, 695)
381 |
382 |
383 | def get_candidates(first_via_idx, via_list, std_cells):
384 | """
385 | Generate a list of candidates from the first via.
386 | Each standard cell will be considered for candidates.
387 | If the standard cell cannot be placed there, the value is -1,
388 | otherwise, it will be a list of vias.
389 | :param first_via_idx: first via index in the via_list
390 | :param via_list: the list of all vias (in a row)
391 | :param std_cells: a list that stores information of std cells
392 | :return: a list of groups of vias, or -1
393 | """
394 | # candidates = [-1 for i in range(len(std_cells))]
395 | candidates = []
396 | first_via = via_list[first_via_idx]
397 | # print (first_via)
398 | first_via_x = first_via[0][0]
399 | for i in range(len(std_cells)):
400 | cell_width = std_cells[i][2]
401 | min_vias = std_cell_info[i][0]
402 | max_vias = std_cells[i][1]
403 | pin_left_dist = std_cells[i][3]
404 | boundary = first_via_x + cell_width - pin_left_dist
405 | # possible vias contain the vias inside the boundary
406 | possible_vias = [first_via]
407 | for j in range(first_via_idx + 1, len(via_list)):
408 | if via_list[j][0][0] <= boundary:
409 | possible_vias.append(via_list[j])
410 | else:
411 | break
412 | # check the candidate against cell info
413 | if len(possible_vias) > max_vias or len(possible_vias) < min_vias:
414 | candidates.append(-1)
415 | # continue
416 | else:
417 | if possible_vias not in candidates:
418 | candidates.append(possible_vias)
419 | print(candidates)
420 | print(len(candidates))
421 | return candidates
422 |
423 |
424 | def get_inputs_outputs(def_info):
425 | """
426 | Method to get all inputs and outputs nets from a DEF file.
427 | :param def_info: def info (already parsed).
428 | :return: inputs and outputs
429 | """
430 | pins = def_parser.pins.pins
431 | inputs = []
432 | outputs = []
433 | for each_pin in pins:
434 | pin_name = each_pin.name
435 | direction = each_pin.direction.lower()
436 | if direction == 'input':
437 | inputs.append(pin_name)
438 | elif direction == 'output':
439 | outputs.append(pin_name)
440 | return inputs, outputs
441 |
442 |
443 | # Main Class
444 | if __name__ == '__main__':
445 | random.seed(12345)
446 | # CONSTANTS
447 | label_to_num = {'AND2X1': 0, 'INVX1': 1, 'INVX8': 2, 'NAND2X1': 3,
448 | 'NOR2X1': 4, 'OR2X1': 5}
449 |
450 | num_to_label = {0: 'AND2X1', 1: 'INVX1', 2: 'INVX8', 3: 'NAND2X1',
451 | 4: 'NOR2X1', 5: 'OR2X1'}
452 |
453 | # merge the data
454 | pickle_folder = './training_data/'
455 | dataset = merge_data(pickle_folder, 1100)
456 |
457 | # study the data
458 | # and2_data = dataset['AND2X1']
459 | # print(and2_data[:50])
460 |
461 | # pickle the merged data
462 | set_filename = "./merged_data/selected_11_03_16_less_feats.pickle"
463 | # save_data_pickle(dataset, set_filename)
464 |
465 | # train the model
466 | regr_model, X_train, y_train, X_test, y_test = train_model(dataset, 5500, num_to_label)
467 | save_data_pickle(regr_model, './trained_models/logit_110316_no_x.pickle')
468 |
469 | # study the test set
470 | for i in range(1, 100):
471 | print(num_to_label[y_test[i:i+1][0]])
472 | print(X_test[i:i+1])
473 | print(regr_model.decision_function(X_test[i:i+1]))
474 | print()
475 |
476 | # make up some cases here and see the result
477 | makeup = []
478 | # makeup.append([3, 190, 1710, 0, 950, 1710, 0, 1140, 1330, 1, -1, -1, -1])
479 | # no input/output data
480 |
481 | # makeup.append([3, 190+400, 1710, -1, 950+400, 1710, -1, 1140+400, 1330, -1, -1, -1, -1])
482 | # labels = []
483 | # labels.append(3)
484 | # X_makeup = np.array(makeup, dtype=np.int32)
485 | # for i in range(len(makeup)):
486 | # print(num_to_label[labels[i]])
487 | # print(X_makeup[i:i+1])
488 | # print(regr_model.decision_function(X_makeup[i:i+1]))
489 | # print(num_to_label[regr_model.predict(X_makeup[i:i+1])[0]])
490 | # print()
491 |
492 | # load the model
493 | # model_file = './trained_models/logit_110316_no_x.pickle'
494 | # regr_model = load_data_pickle(model_file)
495 |
496 |
497 | #######
498 | # PREDICTION
499 | # get information from DEF and LEF files
500 | def_path = './libraries/layout_freepdk45/c432.def'
501 | def_parser = DefParser(def_path)
502 | def_parser.parse()
503 | scale = def_parser.scale
504 |
505 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
506 | lef_parser = LefParser(lef_file)
507 | lef_parser.parse()
508 |
509 | print ("Process file:", def_path)
510 | CELL_HEIGHT = int(float(scale) * lef_parser.cell_height)
511 | all_via1 = util.get_all_vias(def_parser, via_type="M2_M1_via")
512 | # print (all_via1[:50])
513 |
514 | # build the net_via dictionary
515 | nets = def_parser.nets.nets
516 | # initialize the nets_via_dict
517 | nets_vias_dict = {}
518 | for net in nets:
519 | net_name = net.name
520 | nets_vias_dict[net_name] = []
521 | # add vias to nets_dict
522 | for each_via in all_via1:
523 | net = each_via[2]
524 | nets_vias_dict[net].append(each_via)
525 |
526 | # sort the vias by row
527 | via1_sorted = util.sort_vias_by_row(def_parser.diearea[1], CELL_HEIGHT, all_via1)
528 |
529 | # add inputs and outputs from the design to via info
530 | inputs, outputs = get_inputs_outputs(def_parser)
531 | # print(inputs)
532 | # print(outputs)
533 | for each_in in inputs:
534 | for each_via in nets_vias_dict[each_in]:
535 | each_via[3] = 0
536 | for each_out in outputs:
537 | for each_via in nets_vias_dict[each_out]:
538 | each_via[3] = 1
539 |
540 | # get candidates
541 | labels = {0: 'and2', 1: 'invx1', 2: 'invx8', 3: 'nand2', 4: 'nor2',
542 | 5: 'or2'}
543 | cell_labels = {'AND2X1': 'and2', 'INVX1': 'invx1', 'NAND2X1': 'nand2',
544 | 'NOR2X1': 'nor2', 'OR2X1': 'or2', 'INVX8': 'invx8'}
545 |
546 | ##############
547 | # List of standard cells
548 | std_cell_info = {}
549 | # info includes (min num vias, max num vias, width,
550 | # distance from left boundary to first pin)
551 | # I wonder if max num vias should be used, actually I don't know what is the
552 | # maximum number of vias, but I guess +1 is fine.
553 | # 0 is and2, 1 is invx1, etc.
554 | std_cell_info[0] = (3, 4, 2280, 295)
555 | std_cell_info[1] = (2, 3, 1140, 315)
556 | std_cell_info[2] = (2, 3, 2660, 695)
557 | std_cell_info[3] = (3, 4, 1520, 90)
558 | std_cell_info[4] = (3, 4, 1520, 315)
559 | std_cell_info[5] = (3, 4, 2280, 695)
560 | # find the sorted components
561 | components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
562 | def_parser.components.comps)
563 | correct = 0
564 | total_cells = 0
565 | predicts = []
566 | actuals = []
567 | # via_groups is only one row
568 | # for i in range(len(via1_sorted)):
569 | for i in range(0, 1):
570 | print ('Process row', (i + 1))
571 | visited_vias = [] # later, make visited_vias a set to run faster
572 | cells_pred = []
573 | via_idx = 3
574 | while via_idx < len(via1_sorted[i]):
575 | # while via_idx < 3:
576 | # choosing candidates
577 | candidates = get_candidates(via_idx, via1_sorted[i], std_cell_info)
578 | best_group, prediction = predict_cell(candidates, i, regr_model,
579 | lef_parser, std_cell_info)
580 | via_idx += len(best_group)
581 | print(best_group)
582 | print(labels[prediction])
583 | # cells_pred.append(labels[prediction])
584 | # for each_via in best_group:
585 | # visited_vias.append(each_via)
586 |
587 | """
588 | print (cells_pred)
589 | print (len(cells_pred))
590 |
591 | actual_comp = []
592 | actual_macro = []
593 | for each_comp in components[i]:
594 | actual_comp.append(cell_labels[each_comp.macro])
595 | actual_macro.append(each_comp.macro)
596 | print (actual_comp)
597 | print (len(actual_comp))
598 |
599 | # check predictions vs actual cells
600 | # for i in range(len(actual_comp)):
601 | # if cells_pred[i] == actual_comp[i]:
602 | # correct += 1
603 | num_correct, num_cells = predict_score(cells_pred, actual_comp)
604 |
605 | correct += num_correct
606 | total_cells += num_cells
607 | predicts.append(cells_pred)
608 | actuals.append(actual_comp)
609 |
610 | print ()
611 |
612 | print (correct)
613 | print (total_cells)
614 | print (correct / total_cells * 100)
615 | """
616 |
617 |
--------------------------------------------------------------------------------
/lef_def_parser/plot_layout_new_model.py:
--------------------------------------------------------------------------------
1 | """
2 | Program to plot vias in the whole layout using DEF and LEF data.
3 |
4 | Author: Tri Minh Cao
5 | Email: tricao@utdallas.edu
6 | Date: September 2016
7 | """
8 |
9 | from def_parser import *
10 | from lef_parser import *
11 | from util import *
12 | import plot_cell
13 | import matplotlib.pyplot as plt
14 | import numpy as np
15 | import time
16 | import img_util
17 | import pickle
18 | import random
19 | import os
20 | import time
21 | import shutil
22 |
23 |
24 | def sort_vias_by_row(layout_area, row_height, vias):
25 | """
26 | Sort the vias by row
27 | :param layout_area: a list [x, y] that stores the area of the layout
28 | :param vias: a list of vias that need to be sorted
29 | :return: a list of rows, each containing a list of vias in that row.
30 | """
31 | num_rows = layout_area[1] // row_height + 1
32 | rows = []
33 | for i in range(num_rows):
34 | rows.append([])
35 | for via in vias:
36 | via_y = via[0][1]
37 | row_dest = via_y // row_height
38 | rows[row_dest].append(via)
39 | # sort vias in each row based on x-coordinate
40 | for each_row in rows:
41 | each_row.sort(key = lambda x: x[0][0])
42 | return rows
43 |
44 |
45 | def plot_window(left_pt, width, height, vias, lef_data, macro=None, comp=None):
46 | """
47 | Method to plot a window from the layout with all vias inside it.
48 | :param left_pt: bottom left point (origin) of the window
49 | :param width: width of the window
50 | :param height: height of the window
51 | :param vias: a list containing all vias on a row
52 | :return: void
53 | """
54 | # get the corners for the window
55 | corners = [left_pt]
56 | corners.append((left_pt[0] + width, left_pt[1] + height))
57 | # compose the output file name
58 | out_folder = './images/'
59 | # current_time = time.strftime('%H%M%d%m%Y')
60 | pos = (str(corners[0][0]) + '_' + str(corners[0][1]) + '_' +
61 | str(corners[1][0]) + '_' + str(corners[1][1]))
62 | # out_file = out_folder + pos
63 | out_file = out_folder
64 | # out_file += str(corners[0][0])
65 | out_file += pos
66 | if macro:
67 | out_file += '_' + macro
68 | if comp:
69 | out_file += '_' + comp
70 | # current_time = time.strftime('%H%M%S%d%m%Y')
71 | # out_file += '_' + current_time
72 |
73 | if os.path.exists(out_file + '.png'):
74 | return out_file + '.png'
75 |
76 | plt.figure(figsize=(1, 1.6), dpi=80, frameon=False)
77 | # scale the axis of the subplot
78 | # draw the window boundary
79 | # scaled_pts = rect_to_polygon(corners)
80 | # draw_shape = plt.Polygon(scaled_pts, closed=True, fill=None,
81 | # color="blue")
82 | # plt.gca().add_patch(draw_shape)
83 |
84 | # plot the vias inside the windows
85 | # look for the vias
86 | for via in vias:
87 | if (via[0][0] - left_pt[0] > width):
88 | break
89 | via_name = via[1]
90 | via_info = lef_data.via_dict[via_name]
91 | via_loc = via[0]
92 | plot_cell.draw_via(via_loc, via_info)
93 |
94 | # scale the axis of the subplot
95 | axis = [corners[0][0], corners[1][0], corners[0][1], corners[1][1]]
96 | # print (test_axis)
97 | plt.axis(axis)
98 | plt.axis('off')
99 | plt.gca().set_aspect('equal', adjustable='box')
100 | plt.savefig(out_file)
101 | # plt.show()
102 | plt.close('all')
103 | return out_file + '.png'
104 |
105 |
106 | def group_via(via_list, max_number, max_distance):
107 | """
108 | Method to group the vias together to check if they belong to a cell.
109 | :param via_list: a list of all vias.
110 | :return: a list of groups of vias.
111 | """
112 | groups = []
113 | length = len(via_list)
114 | for i in range(length):
115 | # one_group = [via_list[i]]
116 | curr_via = via_list[i]
117 | curr_list = []
118 | for j in range(2, max_number + 1):
119 | if i + j - 1 < length:
120 | right_via = via_list[i + j - 1]
121 | dist = right_via[0][0] - curr_via[0][0]
122 | if dist < max_distance:
123 | curr_list.append(via_list[i:i+j])
124 | # only add via group list that is not empty
125 | if len(curr_list) > 0:
126 | groups.append(curr_list)
127 | return groups
128 |
129 |
130 | def predict_cell(candidates, row, model, lef_data, std_cells):
131 | """
132 | Use the trained model to choose the most probable cell from via groups.
133 | :param candidates: 2-via and 3-via groups that could make a cell
134 | :return: a tuple (chosen via group, predicted cell name)
135 | """
136 | margin = 350
137 | img_width = 126
138 | img_height = 66
139 | img_shape = img_width * img_height
140 | possible_candidates = []
141 | for i in range(len(candidates)):
142 | # dataset = np.ndarray(shape=(len(candidates), img_height, img_width),
143 | # dtype=np.float32)
144 | if candidates[i] != -1:
145 | possible_candidates.append(i)
146 | # dataset = np.ndarray(shape=(1, img_height, img_width),
147 | # dtype=np.float32)
148 | dataset = np.ndarray(shape=(1, img_width, img_height),
149 | dtype=np.float32)
150 | each_group = candidates[i]
151 | left_pt = [each_group[0][0][0] - margin, CELL_HEIGHT * row]
152 | width = each_group[-1][0][0] - left_pt[0] + margin
153 | # print (width)
154 | img_file = plot_window(left_pt, width, CELL_HEIGHT, each_group, lef_data)
155 | # print (img_file)
156 | image_data = img_util.load_image(img_file)
157 | # print (image_data.shape)
158 | dataset[0, :, :] = image_data
159 | X_test = dataset.reshape(dataset.shape[0], img_shape)
160 | result = model.decision_function(X_test)
161 | result = result[0]
162 | # check for result
163 | if result[i] == max(result):
164 | return candidates[i], i
165 | # if we cannot find a solution, randomly select a choice
166 | choice = random.choice(possible_candidates)
167 | return candidates[choice], choice
168 |
169 |
170 | def sorted_components(layout_area, row_height, comps):
171 | """
172 | Sort the components by row
173 | :param layout_area: a list [x, y] that stores the area of the layout
174 | :param comps: a list of components that need to be sorted
175 | :return: a list of rows, each containing a list of components in that row.
176 | """
177 | num_rows = layout_area[1] // row_height + 1
178 | rows = []
179 | for i in range(num_rows):
180 | rows.append([])
181 | for comp in comps:
182 | comp_y = comp.placed[1]
183 | row_dest = comp_y // row_height
184 | rows[row_dest].append(comp)
185 | # sort vias in each row based on x-coordinate
186 | for each_row in rows:
187 | each_row.sort(key = lambda x: x.placed[0])
188 | return rows
189 |
190 |
191 | def predict_score(predicts, actuals):
192 | """
193 | Find the number of correct cell predictions.
194 | :param predicts: a list of predictions.
195 | :param actuals: a list of actual cells.
196 | :return: # correct predictions, # cells
197 | """
198 | len_preds = len(predicts)
199 | len_actuals = len(actuals)
200 | shorter_len = min(len_preds, len_actuals)
201 | gap_predict = 0
202 | gap_actual = 0
203 | num_correct = 0
204 | # print (shorter_len)
205 | for i in range(shorter_len):
206 | # print (i)
207 | # print (gap_predict)
208 | # print (gap_actual)
209 | # print ()
210 | if predicts[i + gap_predict] == actuals[i + gap_actual]:
211 | num_correct += 1
212 | else:
213 | if len_preds < len_actuals:
214 | gap_actual += 1
215 | len_preds += 1
216 | elif len_preds > len_actuals:
217 | gap_predict += 1
218 | len_actuals += 1
219 | return num_correct, len(actuals)
220 |
221 |
222 | def plot_cell_w_vias():
223 | # process each row, plot all cells
224 | # for i in range(num_rows):
225 | margin = 350
226 | for i in range(1):
227 | via_idx = 0
228 | print (len(components[i]))
229 | print (len(via1_sorted[i]))
230 | for each_comp in components[i]:
231 | comp_name = each_comp.name
232 | macro_name = each_comp.macro
233 | macro_data = lef_parser.macro_dict[macro_name]
234 | num_vias = len(macro_data.pin_dict) - 2 # because of VDD and GND pins
235 | # get the vias
236 | cell_vias = via1_sorted[i][via_idx:via_idx + num_vias]
237 | # update via_idx
238 | via_idx += num_vias
239 | # plot the cell
240 | left_pt = [cell_vias[0][0][0] - margin, CELL_HEIGHT * i]
241 | width = cell_vias[-1][0][0] - left_pt[0] + margin
242 | # print (width)
243 | img_file = plot_window(left_pt, width, CELL_HEIGHT, cell_vias,
244 | lef_parser, macro=macro_name, comp = comp_name)
245 | print (comp_name)
246 | print (macro_name)
247 | print (cell_vias)
248 | print (via_idx)
249 | print('Finished!')
250 |
251 |
252 | def check_via_group(via_group, source_sink):
253 | """
254 | Check the validity of each via set in the via group.
255 | :param via_group: the via_group in question.
256 | :return: via_group with all valid candidate(s)
257 | """
258 | # valid for 2-via cell: 1 source, 1 sink
259 | # valid for 3-via cell: 2 sink, 1 source
260 | valid_group = []
261 | for each_group in via_group:
262 | num_vias = len(each_group)
263 | num_source = 0
264 | num_sink = 0
265 | for each_via in each_group:
266 | # 0 = sink, 1 = source
267 | if source_sink[each_via[2]] == 1:
268 | num_source += 1
269 | elif source_sink[each_via[2]] == 0:
270 | num_sink += 1
271 | if num_source <= 1 and num_sink <=2:
272 | valid_group.append(each_group)
273 | return valid_group
274 |
275 |
276 | def get_candidates(first_via_idx, via_list, std_cells):
277 | """
278 | Generate a list of candidates from the first via.
279 | Each standard cell will be considered for candidates.
280 | If the standard cell cannot be placed there, the value is -1,
281 | otherwise, it will be a list of vias.
282 | :param first_via_idx: first via index in the via_list
283 | :param via_list: the list of all vias (in a row)
284 | :param std_cells: a list that stores information of std cells
285 | :return: a list of groups of vias, or -1
286 | """
287 | # candidates = [-1 for i in range(len(std_cells))]
288 | candidates = []
289 | first_via = via_list[first_via_idx]
290 | # print (first_via)
291 | first_via_x = first_via[0][0]
292 | for i in range(len(std_cells)):
293 | cell_width = std_cells[i][2]
294 | min_vias = std_cell_info[i][0]
295 | max_vias = std_cells[i][1]
296 | pin_left_dist = std_cells[i][3]
297 | boundary = first_via_x + cell_width - pin_left_dist
298 | # boundary = first_via_x + cell_width
299 | # possible vias contain the vias inside the boundary
300 | possible_vias = [first_via]
301 | for j in range(first_via_idx + 1, len(via_list)):
302 | if via_list[j][0][0] <= boundary:
303 | possible_vias.append(via_list[j])
304 | else:
305 | break
306 | # check the candidate against cell info
307 | if len(possible_vias) > max_vias or len(possible_vias) < min_vias:
308 | candidates.append(-1)
309 | else:
310 | candidates.append(possible_vias)
311 | for each_cand in candidates:
312 | if each_cand != -1:
313 | return candidates
314 | return -1
315 |
316 |
317 | def get_inputs_outputs(def_info):
318 | """
319 | Method to get all inputs and outputs nets from a DEF file.
320 | :param def_info: def info (already parsed).
321 | :return: inputs and outputs
322 | """
323 | pins = def_parser.pins.pins
324 | inputs = []
325 | outputs = []
326 | for each_pin in pins:
327 | pin_name = each_pin.name
328 | direction = each_pin.direction.lower()
329 | if direction == 'input':
330 | inputs.append(pin_name)
331 | elif direction == 'output':
332 | outputs.append(pin_name)
333 | return inputs, outputs
334 |
335 |
336 | def recover_netlist(def_info, inputs, outputs, recovered_cells):
337 | """
338 | Method to create a netlist from predicted cells
339 | :param def_info: information from the DEF file
340 | :param inputs: input pins of the design
341 | :param outputs: output pins of the design
342 | :param recovered_cells: recovered cells with input nets and output nets
343 | :return: recovered netlist file name
344 | """
345 | # NOTE: the order of nets is not like that in original netlist
346 | design = def_info.design_name
347 | nets = set(def_info.nets.net_dict.keys())
348 | inputs_set = set(inputs)
349 | outputs_set = set(outputs)
350 | io = inputs_set | outputs_set
351 | wires = nets - io
352 | # print(wires)
353 | # print(len(wires))
354 |
355 | # save the cells_reco for later inspection
356 | # filename = './recovered/' + design + '.pickle'
357 | # try:
358 | # with open(filename, 'wb') as f:
359 | # pickle.dump(cells_reco, f, pickle.HIGHEST_PROTOCOL)
360 | # except Exception as e:
361 | # print('Unable to save data to', filename, ':', e)
362 |
363 | ## dd/mm/yyyy format
364 | date = time.strftime("%m/%d/%Y %H:%M:%S")
365 | s = '/////////////////////////////\n'
366 | s += '// Generated by TMC\n'
367 | s += '// Design: ' + design + '\n'
368 | s += '// Date: ' + date + '\n'
369 | s += '/////////////////////////////\n\n'
370 |
371 | # add module definition
372 | s += 'module ' + design + ' ( '
373 | num_ios = len(io)
374 | idx = 0
375 | for each_pin in io:
376 | s += each_pin
377 | idx += 1
378 | if idx < num_ios:
379 | s += ', '
380 | s += ' );\n'
381 |
382 | indent = ' '
383 | # add input
384 | num_in = len(inputs)
385 | idx = 0
386 | s += indent + 'input '
387 | for each_in in inputs:
388 | s += each_in
389 | idx += 1
390 | if idx < num_in:
391 | s += ', '
392 | s += ';\n'
393 | # add output
394 | num_out = len(outputs)
395 | idx = 0
396 | s += indent + 'output '
397 | for each_out in outputs:
398 | s += each_out
399 | idx += 1
400 | if idx < num_out:
401 | s += ', '
402 | s += ';\n'
403 | # add wire
404 | num_wire = len(wires)
405 | idx = 0
406 | s += indent + 'wire '
407 | for each_wire in wires:
408 | s += each_wire
409 | idx += 1
410 | if idx < num_wire:
411 | s += ', '
412 | s += ';\n'
413 | # add cells
414 | s += '\n'
415 | cell_idx = 2
416 | for each_cell in cells_reco:
417 | cell_idx += 1
418 | s += indent + each_cell[0] + ' U' + str(cell_idx) + ' ( '
419 | in_nets = each_cell[1]
420 | s += '.A(' + in_nets[0] + ')' + ', '
421 | if len(in_nets) == 2:
422 | s += '.B(' + in_nets[1] + ')' + ', '
423 | out_net = each_cell[2]
424 | s += '.Y(' + out_net + ')'
425 | s += ' );\n'
426 | s += 'endmodule'
427 |
428 | # write to an output file
429 | folder = './recovered/'
430 | filename = design + '_recovered' + '.v'
431 | print('Writing recovered netlist file...')
432 | f = open(folder + filename, mode="w+")
433 | f.write(s)
434 | f.close()
435 | print('Writing done.')
436 | return filename
437 |
438 |
439 | def closest_via_pair_y(via_group):
440 | """
441 | Method to find the closest pair of via based on y-coordinate.
442 | :param via_group: a list of vias
443 | :return: the pair of vias that are closest based on y-coordinate.
444 | """
445 | closest_pair = 0 # initialize the answer
446 | min_dist = float('inf')
447 | for i in range(len(via_group)):
448 | for j in range(i + 1, len(via_group)):
449 | y_i = via_group[i][0][1]
450 | y_j = via_group[j][0][1]
451 | if abs(y_i - y_j) < min_dist:
452 | min_dist = abs(y_i - y_j)
453 | closest_pair = (via_group[i], via_group[j])
454 | return closest_pair
455 |
456 |
457 |
458 |
459 | # Main Class
460 | if __name__ == '__main__':
461 |
462 | path = './libraries/layout_yujie/'
463 | files = os.listdir(path)
464 | files = ['b18_C_gscl45nm_tri_routing_layer9.def']
465 | for f in files:
466 | start_time = time.time()
467 | # def_path = './libraries/layout_yujie/c432.def'
468 | def_path = path + f
469 | def_parser = DefParser(def_path)
470 | def_parser.parse()
471 | scale = def_parser.scale
472 |
473 | lef_file = "./libraries/FreePDK45/gscl45nm.lef"
474 | lef_parser = LefParser(lef_file)
475 | lef_parser.parse()
476 | macro_dict = lef_parser.macro_dict
477 |
478 | CELL_HEIGHT = int(float(scale) * lef_parser.cell_height)
479 | # print (CELL_HEIGHT)
480 | print ("Process file:", def_path)
481 | all_via1 = get_all_vias(def_parser, via_type="M2_M1_via")
482 |
483 | # create the netlist on-the-fly, for each net, 0 = input only, 1 = input or output
484 | netlist_fly = dict()
485 | # build the net_via dictionary
486 | nets = def_parser.nets.nets
487 | # initialize the nets_via_dict
488 | nets_vias_dict = {}
489 | for net in nets:
490 | net_name = net.name
491 | nets_vias_dict[net_name] = []
492 | # initialize all the nets = 1
493 | netlist_fly[net_name] = 1
494 | # add vias to nets_dict
495 | for each_via in all_via1:
496 | net = each_via[2]
497 | nets_vias_dict[net].append(each_via)
498 |
499 | # sort the vias by row
500 | via1_sorted = sort_vias_by_row(def_parser.diearea[1], CELL_HEIGHT, all_via1)
501 |
502 | # add inputs and outputs from the design to via info
503 | inputs, outputs = get_inputs_outputs(def_parser)
504 | for each_in in inputs:
505 | # print(each_in)
506 | netlist_fly[each_in] = 0
507 | for each_via in nets_vias_dict[each_in]:
508 | each_via[3] = 0
509 | for each_out in outputs:
510 | netlist_fly[each_out] = 2
511 | for each_via in nets_vias_dict[each_out]:
512 | each_via[3] = 1
513 |
514 | MAX_DISTANCE = 2280 # OR2 cell width, can be changed later
515 |
516 | components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
517 | def_parser.components.comps)
518 | num_rows = len(components)
519 |
520 | ###############
521 | # DO PREDICTION
522 | # predict_row()
523 | # We can load the trained model
524 | pickle_filename = "./trained_models/logit_model_111816.pickle"
525 | try:
526 | with open(pickle_filename, 'rb') as f:
527 | logit_model = pickle.load(f)
528 | except Exception as e:
529 | print('Unable to read data from', pickle_filename, ':', e)
530 |
531 | labels = {0: 'and2', 1: 'invx1', 2: 'invx8', 3: 'nand2', 4: 'nor2',
532 | 5: 'or2'}
533 | macro_from_labels = {0: 'AND2X1', 1: 'INVX1', 2: 'INVX8', 3: 'NAND2X1',
534 | 4: 'NOR2X1', 5: 'OR2X1'}
535 |
536 | cell_labels = {'AND2X1': 'and2', 'INVX1': 'invx1', 'NAND2X1': 'nand2',
537 | 'NOR2X1': 'nor2', 'OR2X1': 'or2', 'INVX8': 'invx8'}
538 |
539 | ##############
540 | # List of standard cells
541 | std_cell_info = {}
542 | # info includes (min num vias, max num vias, width,
543 | # distance from left boundary to first pin)
544 | # also need distance from right most pin to right boundary
545 | # I wonder if max num vias should be used, actually I don't know what is the
546 | # maximum number of vias, but I guess +1 is fine.
547 | # 0 is and2, 1 is invx1, etc.
548 | std_cell_info[0] = (3, 4, 2280, 295 + 200)
549 | std_cell_info[1] = (2, 3, 1140, 315 + 245)
550 | std_cell_info[2] = (2, 3, 2660, 695 + 500)
551 | std_cell_info[3] = (3, 4, 1520, 90 + 200)
552 | std_cell_info[4] = (3, 4, 1520, 315 + 200)
553 | std_cell_info[5] = (3, 4, 2280, 695 + 150)
554 |
555 | # process
556 | # print the sorted components
557 | components = sorted_components(def_parser.diearea[1], CELL_HEIGHT,
558 | def_parser.components.comps)
559 | correct = 0
560 | total_cells = 0
561 | predicts = []
562 | actuals = []
563 | cells_reco = [] # a list of recovered cells
564 | # vias_reco = [] # a list of vias in the predicted cell, for debug purpose
565 | # via_groups is only one row
566 | # for i in range(len(via1_sorted)):
567 | for i in range(0, 1):
568 | print ('Process row', (i + 1))
569 | # each via group in via_groups consist of two candidates
570 | # via_groups = group_via(via1_sorted[i], 3, MAX_DISTANCE)
571 | visited_vias = [] # later, make visited_vias a set to run faster
572 | cells_pred = []
573 | via_idx = 0
574 | while via_idx < len(via1_sorted[i]):
575 | # choosing candidates
576 | candidates = get_candidates(via_idx, via1_sorted[i], std_cell_info)
577 | print(via_idx)
578 | print(via1_sorted[i][via_idx])
579 | print(candidates)
580 | if candidates == -1:
581 | print('Something wrong!')
582 | via_idx += 1
583 | else:
584 | # corner case: no possible candidates
585 | best_group, prediction = predict_cell(candidates, i, logit_model,
586 | lef_parser, std_cell_info)
587 | # recover the cell information
588 | macro_name = macro_from_labels[prediction]
589 | macro_info = macro_dict[macro_from_labels[prediction]]
590 | num_pins = len(macro_info.info["PIN"]) - 2
591 | # NOTE: we assume inputs are A, B and output is Y
592 | # for each_pin in pins:
593 | # print(each_pin.name)
594 | recover = []
595 | input_nets = []
596 | if macro_name == 'INVX1':
597 | output_net = False
598 | for each_via in best_group:
599 | net_name = each_via[2]
600 | if netlist_fly[net_name] == 2:
601 | output_net = net_name
602 | break
603 | for each_via in best_group:
604 | net_name = each_via[2]
605 | if net_name != output_net:
606 | input_nets.append(net_name)
607 | break
608 | if not output_net:
609 | output_net = best_group[-1][2]
610 | input_nets.append(best_group[0][2])
611 | elif macro_name == 'INVX8':
612 | output_net = False
613 | for each_via in best_group:
614 | net_name = each_via[2]
615 | if netlist_fly[net_name] == 2:
616 | output_net = net_name
617 | break
618 | for each_via in best_group:
619 | net_name = each_via[2]
620 | if net_name != output_net:
621 | input_nets.append(net_name)
622 | break
623 | # second approach
624 | if output_net == False:
625 | input_nets = []
626 | middle_y = CELL_HEIGHT * i + CELL_HEIGHT/2
627 | min_dist = float('inf')
628 | min_via = None
629 | for each_via in best_group:
630 | y_dist = abs(each_via[0][1] - middle_y)
631 | if y_dist < min_dist:
632 | min_via = each_via
633 | input_nets.append(min_via[2])
634 | for each_via in best_group:
635 | net_name = each_via[2]
636 | if net_name != input_nets[0]:
637 | output_net = net_name
638 | break
639 | else:
640 | input_vias = closest_via_pair_y(best_group)
641 | for each_via in input_vias:
642 | input_nets.append(each_via[2])
643 | for each_via in best_group:
644 | if each_via not in input_vias:
645 | output_net = each_via[2]
646 | netlist_fly[each_via[2]] = 0
647 | break
648 |
649 | # for j in range(len(best_group) - 1, -1, -1):
650 | # net_name = best_group[j][2]
651 | # if netlist_fly[net_name] == 1:
652 | # output_net = net_name
653 | # netlist_fly[net_name] = 0
654 | # break
655 | # for j in range(len(best_group)):
656 | # net_name = best_group[j][2]
657 | # if net_name != output_net and len(input_nets) + 1 < num_pins:
658 | # input_nets.append(net_name)
659 |
660 | # corner case: same nets for some or all vias in the group
661 | # in this case, actually we can ignore the result
662 | # num_inputs = len(input_nets)
663 | # for i in range(num_inputs, num_pins - 1):
664 | # input_nets.append(best_group[i][2])
665 |
666 | # NOTE: the following lines only work for 2-pin and 3-pin cell
667 | if len(input_nets) + 1 == num_pins:
668 | recover.append(macro_name)
669 | recover.append(input_nets)
670 | recover.append(output_net)
671 | cells_reco.append(recover)
672 | # vias_reco.append((best_group, macro_from_labels[prediction]))
673 |
674 | via_idx += len(best_group)
675 | # print (best_group)
676 | # print (labels[prediction])
677 | cells_pred.append(labels[prediction])
678 | for each_via in best_group:
679 | visited_vias.append(each_via)
680 |
681 | print (cells_pred)
682 | print (len(cells_pred))
683 |
684 | actual_comp = []
685 | actual_macro = []
686 | for each_comp in components[i]:
687 | actual_comp.append(cell_labels[each_comp.macro])
688 | actual_macro.append(each_comp.macro)
689 | print (actual_comp)
690 | print (len(actual_comp))
691 |
692 | num_correct, num_cells = predict_score(cells_pred, actual_comp)
693 | correct += num_correct
694 | total_cells += num_cells
695 | predicts.append(cells_pred)
696 | actuals.append(actual_comp)
697 | print ()
698 |
699 | print ("\nTotal number of cells: ", total_cells)
700 | print ("Number of correct cells predicted: ", correct)
701 | print ("Accuracy rate (%): ", correct / total_cells * 100)
702 | # print the execution time
703 | print("\n--- Execution time:")
704 | print("--- %s seconds ---" % (time.time() - start_time))
705 | print("\n")
706 | # remove images used
707 | shutil.rmtree("./images")
708 | if not os.path.exists("./images"):
709 | os.makedirs("./images")
710 |
711 | # count the time to generate the netlist separately
712 | start_time = time.time()
713 | # write the recovered verilog netlist
714 | recover_netlist(def_parser, inputs, outputs, cells_reco)
715 | print("\n--- Generate netlist time:")
716 | print("--- %s seconds ---" % (time.time() - start_time))
717 |
718 | # debug = (cells_reco, vias_reco)
719 | # filename = './recovered/debug/c5315_debug' + '.pickle'
720 | # try:
721 | # with open(filename, 'wb') as f:
722 | # pickle.dump(debug, f, pickle.HIGHEST_PROTOCOL)
723 | # except Exception as e:
724 | # print('Unable to save data to', filename, ':', e)
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | '''
2 | Authors:
3 | Ramez Moussa
4 | Hany Moussa
5 | '''
6 |
7 | import sys
8 | sys.path.insert(0, './lef_def_parser')
9 | import matplotlib as plt
10 | from def_parser import *
11 | from lef_parser import *
12 | import codecs
13 | from collections import defaultdict
14 | import datetime
15 | from convertDEF import *
16 | from extractUnitsFromLEF import *
17 | import os
18 | #in order to print Date in the SPEF file
19 | now = datetime.datetime.now()
20 |
21 |
22 |
23 | # this extracts the vias and viarules definied in the def file given the lines in which the vias are defined
24 | def extractViasFromDef(vias_data):
25 | vias = {}
26 | for line in vias_data:
27 | l = line.strip().split()
28 | if(len(l) > 0):
29 |
30 | if(l[0] == '-'):
31 | current_via_name = l[1]
32 | vias[current_via_name] = []
33 |
34 | elif(l[0] != ';'):
35 | vias[current_via_name].append(l)
36 |
37 |
38 |
39 | for via, lines in vias.items():
40 |
41 | current_via = {}
42 | viaRule = (lines[0][1].lower() == 'viarule')
43 | if(viaRule):
44 | for line in lines:
45 | current_via[line[1]] = line[2:]
46 |
47 |
48 | else:
49 | layers = []
50 | for line in lines:
51 | layers.append(line[2])
52 | current_via['LAYERS'] = layers
53 | vias_dict_def[via] = current_via
54 |
55 |
56 |
57 | #name mapping method that reduces all net names in order to minimize the SPEF size
58 | def remap_names():
59 | name_counter = 0
60 | map_of_names = []
61 | for key in def_parser.nets.net_dict:
62 | new_name = []
63 | new_name.append(def_parser.nets.net_dict[key].name)
64 | def_parser.nets.net_dict[key].name = "*" + str(name_counter)
65 | new_name.append(def_parser.nets.net_dict[key].name)
66 | name_counter += 1
67 | map_of_names.append(new_name)
68 | return(map_of_names)
69 |
70 |
71 | def printNameMap(map_of_names): #printing the keys of the name map into the SPEF file
72 | f.write('*NAME_MAP\n')
73 | for entry in map_of_names:
74 | f.write(entry[1] + " " + entry[0] + "\n")
75 | f.write("\n")
76 |
77 | # A method that takes an instance and a pin and returns a list of all
78 | # rectangles of that pin
79 | def getPinLocation(instanceName, pinName, metalLayer, listOfPinRects):
80 | #myInstance = def_parser.components.get_comp(instanceName)
81 | origin = def_parser.components.comp_dict[instanceName].placed
82 | orientation = def_parser.components.comp_dict[instanceName].orient
83 | cellType = def_parser.components.comp_dict[instanceName].macro
84 | cellWidth= lef_parser.macro_dict[cellType].info['SIZE'][0] * l2d
85 | cellHeight = lef_parser.macro_dict[cellType].info['SIZE'][1] * l2d
86 |
87 | pinObject = lef_parser.macro_dict[cellType].pin_dict[pinName]
88 | port_info = pinObject.info['PORT'].info['LAYER'][0]
89 |
90 |
91 |
92 | if(orientation == 'N'):
93 | for shape in port_info.shapes:
94 | llx = shape.points[0][0]*l2d + origin[0]
95 | lly = shape.points[0][1]*l2d + origin[1]
96 | urx = shape.points[1][0]*l2d + origin[0]
97 | ury = shape.points[1][1]*l2d + origin[1]
98 | ll = (llx, lly)
99 | ur = (urx, ury)
100 | listOfPinRects.append((ll, ur, metalLayer) )
101 |
102 | if(orientation == 'S'):
103 | # consider origin to be top right corner
104 | rotatedOrigin = (origin[0]+cellWidth, origin[1] + cellHeight)
105 | for shape in port_info.shapes:
106 | llx = rotatedOrigin[0] - shape.points[1][0]*l2d
107 | lly = rotatedOrigin[1] - shape.points[1][1]*l2d
108 | urx = rotatedOrigin[0] - shape.points[0][0]*l2d
109 | ury = rotatedOrigin[1] - shape.points[0][1]*l2d
110 | ll = (llx, lly)
111 | ur = (urx, ury)
112 | listOfPinRects.append((ll, ur, metalLayer))
113 |
114 | if(orientation == 'W'):
115 | # consider origin to be bottom right corner
116 | rotatedOrigin = (origin[0]+cellHeight, origin[1])
117 | for shape in port_info.shapes:
118 | lrx = rotatedOrigin[0] - shape.points[0][1]*l2d
119 | lry = rotatedOrigin[1] + shape.points[0][0]*l2d
120 | ulx = rotatedOrigin[0] - shape.points[1][1]*l2d
121 | uly = rotatedOrigin[1] + shape.points[1][0]*l2d
122 |
123 | ll = (ulx, lry)
124 | ur = (lrx, uly)
125 | listOfPinRects.append((ll, ur, metalLayer))
126 |
127 | if(orientation == 'E'):
128 | # consider origin to be top left corner
129 | rotatedOrigin = (origin[0], origin[1]+cellWidth)
130 | for shape in port_info.shapes:
131 | ulx = rotatedOrigin[0] + shape.points[0][1]*l2d
132 | uly = rotatedOrigin[1] - shape.points[0][0]*l2d
133 | lrx = rotatedOrigin[0] + shape.points[1][1]*l2d
134 | lry = rotatedOrigin[1] - shape.points[1][0]*l2d
135 |
136 | ll = (ulx, lry)
137 | ur = (lrx, uly)
138 | listOfPinRects.append((ll, ur, metalLayer))
139 |
140 | if(orientation == 'FN'):
141 | # consider origin to be bottom right corner
142 | rotatedOrigin = (origin[0]+cellWidth, origin[1])
143 | for shape in port_info.shapes:
144 | lrx = rotatedOrigin[0] - shape.points[0][0]*l2d
145 | lry = rotatedOrigin[1] + shape.points[0][1]*l2d
146 | ulx = rotatedOrigin[0] - shape.points[1][0]*l2d
147 | uly = rotatedOrigin[1] + shape.points[1][1]*l2d
148 |
149 | ll = (ulx, lry)
150 | ur = (lrx, uly)
151 | listOfPinRects.append((ll, ur, metalLayer))
152 |
153 | if(orientation == 'FS'):
154 | # consider origin to be upper left corner
155 | rotatedOrigin = (origin[0], origin[1]+cellHeight)
156 | for shape in port_info.shapes:
157 | lrx = rotatedOrigin[0] + shape.points[1][0]*l2d
158 | lry = rotatedOrigin[1] - shape.points[1][1]*l2d
159 | ulx = rotatedOrigin[0] + shape.points[0][0]*l2d
160 | uly = rotatedOrigin[1] - shape.points[0][1]*l2d
161 |
162 | ll = (ulx, lry)
163 | ur = (lrx, uly)
164 | listOfPinRects.append((ll, ur, metalLayer))
165 |
166 | if(orientation == 'FW'):
167 | # consider origin to be bottom left corner
168 | rotatedOrigin = (origin[0], origin[1])
169 | for shape in port_info.shapes:
170 | llx = rotatedOrigin[0] + shape.points[0][1]*l2d
171 | lly = rotatedOrigin[1] + shape.points[0][0]*l2d
172 | urx = rotatedOrigin[0] + shape.points[1][1]*l2d
173 | ury = rotatedOrigin[1] + shape.points[1][0]*l2d
174 |
175 | ll = (llx, lly)
176 | ur = (urx, ury)
177 | listOfPinRects.append((ll, ur, metalLayer))
178 |
179 | if(orientation == 'FE'):
180 | # consider origin to be top right corner
181 | rotatedOrigin = (origin[0] + cellHeight, origin[1] + cellWidth)
182 | for shape in port_info.shapes:
183 | llx = rotatedOrigin[0] - shape.points[1][1]*l2d
184 | lly = rotatedOrigin[1] - shape.points[1][0]*l2d
185 | urx = rotatedOrigin[0] - shape.points[0][1]*l2d
186 | ury = rotatedOrigin[1] - shape.points[0][0]*l2d
187 |
188 | ll = (llx, lly)
189 | ur = (urx, ury)
190 | listOfPinRects.append((ll, ur, metalLayer))
191 |
192 | def getViaType(via): #method to extract the via type by its name fromt the lef file
193 |
194 | # this 'met' and 'li1' have to be handeled design by design.
195 | if via in lef_parser.via_dict:
196 | firstLayer = lef_parser.via_dict[via].layers[0].name
197 | secondLayer = lef_parser.via_dict[via].layers[1].name
198 | thirdLayer = lef_parser.via_dict[via].layers[2].name
199 |
200 |
201 |
202 | elif via in vias_dict_def:
203 | firstLayer = vias_dict_def[via]['LAYERS'][0]
204 | secondLayer = vias_dict_def[via]['LAYERS'][1]
205 | thirdLayer = vias_dict_def[via]['LAYERS'][2]
206 |
207 |
208 | if(lef_parser.layer_dict[firstLayer].layer_type == 'CUT'):
209 | cutLayer = firstLayer
210 |
211 |
212 | if(lef_parser.layer_dict[secondLayer].layer_type == 'CUT'):
213 | cutLayer = secondLayer
214 |
215 |
216 | if(lef_parser.layer_dict[thirdLayer].layer_type == 'CUT'):
217 | cutLayer = thirdLayer
218 |
219 |
220 |
221 |
222 | return cutLayer
223 |
224 |
225 | #method to get the resistance of a certain segment (wire of via) using its length (distance between 2 points) and info from the lef file
226 | def get_resistance_modified(point1, point2, layer_name, via_type): #point is a list of (x, y)
227 | if(point1 == point2): #we have a via
228 | if(lef_parser.layer_dict[via_type].resistance != None):
229 | return lef_parser.layer_dict[via_type].resistance
230 | else:
231 | return 0 # return 0 if u cannot find the target via in the LEF file.
232 | else: #we have a wire
233 | rPerSquare = lef_parser.layer_dict[layer_name].resistance[1]
234 |
235 | width = lef_parser.layer_dict[layer_name].width #width in microns
236 | wire_len = (abs(point1[0] - point2[0]) + abs(point1[1] - point2[1]))/1000 #length in microns
237 | resistance = wire_len * rPerSquare / width #R in ohms
238 |
239 | return resistance
240 |
241 |
242 |
243 | #method to get the capacitance of a certain segment (wire of via) using its length (distance between 2 points) and info from the lef file
244 | def get_capacitance_modified(point1, point2, layer_name, via_type): #point is a list of (x, y)
245 | if(point1 == point2): #we have a via
246 | if(lef_parser.layer_dict[via_type].edge_cap == None):
247 | return 0
248 | else:
249 | return lef_parser.layer_dict[via_type].edge_cap
250 | else: #we have a wire
251 |
252 |
253 | if(lef_parser.layer_dict[layer_name].capacitance != None):
254 | cPerSquare = capacitanceFactor * lef_parser.layer_dict[layer_name].capacitance[1] # unit in lef is pF
255 | else:
256 | cPerSquare = 0;
257 | width = lef_parser.layer_dict[layer_name].width #width in microns
258 | length = (abs(point1[0] - point2[0]) + abs(point1[1] - point2[1]))/1000 #length in microns
259 | if(lef_parser.layer_dict[layer_name].edge_cap != None):
260 | edgeCapacitance = capacitanceFactor * lef_parser.layer_dict[layer_name].edge_cap
261 | else:
262 | edgeCapacitance = 0
263 |
264 | # the edge capacitance factor value is 1 by default
265 | capacitance = length * cPerSquare * width + edgeCapFactor[0] * 2 * edgeCapacitance * (length + width) #capactiance in pF
266 |
267 | return capacitance
268 |
269 |
270 | #method to look for intersetions between segment nodes in order to decide on creating a new node or add to the existing capacitance
271 | def checkPinsTable(point, layer, pinsTable):
272 | flag= "new"
273 |
274 | for pin in pinsTable:
275 | locations = pin[0]
276 | for location in locations:
277 | if(location[2] == layer or (location[2] == 'met1' and layer == 'li1') or (location[2] == 'li1' and layer == 'met1')):
278 | if((type(location[0]) == "") or (type(location[0]) =="")) :
279 | if(point[0]==location[0] and point[1]==location[1]):
280 | flag= pin
281 | return flag
282 | else: flag= "new"
283 | else:
284 | if ((location[0][0] - 5 <= float(point[0]) <= location[1][0] + 5) and (location[0][1] - 5<= float(point[1]) <= location[1][1] + 5)):
285 | flag= pin
286 | return flag
287 | else: flag= "new"
288 |
289 | return flag
290 |
291 | #method to print all nets in the net dictionay
292 | def printSPEFNets(netsDict):
293 | for key, value in netsDict.items():
294 | printNet(netsDict, key)
295 |
296 |
297 |
298 |
299 |
300 | #method to print a particular net into SPEF format
301 | def printNet(netsDict, wireName):
302 |
303 | if(netsDict[wireName]['maxC'] > maxCap[0]):
304 | maxCapNet[0] = wireName
305 | maxCap[0] = netsDict[wireName]['maxC']
306 |
307 | if(netsDict[wireName]['maxC'] < minCap[0]):
308 | minCapNet[0] = wireName
309 | minCap[0] = netsDict[wireName]['maxC']
310 |
311 | var=('*D_NET'+" "+ wireName+" "+ str(netsDict[wireName]['maxC']))
312 | f.write(var+'\n')
313 | var=('*CONN')
314 | f.write(var+'\n')
315 | for eachConnection in netsDict[wireName]['conn']:
316 | var=(eachConnection[0]+" "+ eachConnection[1]+" "+ eachConnection[2])
317 | f.write(var+'\n')
318 |
319 |
320 | var=('*CAP')
321 | f.write(var+'\n')
322 |
323 |
324 | for key,value in bigCapacitanceTable[wireName].items():
325 | var=(str(capCounter[0]) +" "+ str(key) +" "+ str(value))
326 | f.write(var+'\n')
327 | capCounter[0] += 1
328 |
329 | var=('*RES')
330 | f.write(var+'\n')
331 | for eachSegment in netsDict[wireName]['segments']:
332 | var=(str(resCounter[0])+" "+ str(eachSegment[0])+" "+ str(eachSegment[1])+" "+ str(eachSegment[2]))
333 | f.write(var+'\n')
334 | resCounter[0] += 1
335 | var=('*END\n')
336 | f.write(var+'\n')
337 |
338 |
339 |
340 |
341 |
342 |
343 | # main starts here:
344 |
345 |
346 | # create all the data structures that we will be using
347 | listOfLocations = []
348 | pinsTable = []
349 | segmentsList = []
350 | bigPinsTable={}
351 | bigSegmentsTable = {}
352 | bigCapacitanceTable = {}
353 | netsDict = {}
354 | vias_dict_def = {}
355 |
356 | edgeCapFactor = [1]
357 | wireModel = 'PI'
358 |
359 |
360 | # this section is responsible for allowing the script to run directly from a terminal
361 | if(len(sys.argv) < 5):
362 | if(len(sys.argv) < 4):
363 | if(len(sys.argv) < 3):
364 | sys.exit("Arguments should be passed: python .py .lef .def ")
365 | else:
366 | lef_file_name = sys.argv[1]
367 | def_file_name = sys.argv[2]
368 | else:
369 | lef_file_name = sys.argv[1]
370 | def_file_name = sys.argv[2]
371 | wireModel = sys.argv[3]
372 |
373 | else:
374 | lef_file_name = sys.argv[1]
375 | def_file_name = sys.argv[2]
376 | wireModel = sys.argv[3]
377 | edgeCapFactor[0] = float(sys.argv[4])
378 |
379 |
380 |
381 | # convert DEF to readable format
382 | covnertToDef57(def_file_name)
383 |
384 | # We had to modify the lef parser to ignore the second parameter for the offset
385 | # since our files provide only 1 value
386 | lef_parser = LefParser(lef_file_name)
387 | lef_parser.parse()
388 |
389 | # read the updated def
390 | def_parser = DefParser(def_file_name[:-4] + '_new.def')
391 | def_parser.parse()
392 |
393 | extractViasFromDef(def_parser.vias)
394 |
395 | lefUnits = extractLefUnits(lef_file_name)
396 |
397 | # l2d is the conversion factor between the scale in LEF and DEF
398 | l2d = 1000 # an initial value
399 | if(def_parser.scale != None):
400 | l2d = float(def_parser.scale)
401 |
402 |
403 |
404 | # Get a factor covnersion so that the unit of capacitance is PICOFARADS
405 | capacitanceFactor = 1
406 | if(lefUnits["CAPACITANCE"] == "NANOFARADS"):
407 | capacitanceFactor = 1e3
408 |
409 | elif(lefUnits["CAPACITANCE"] == "PICOFARADS"):
410 | capacitanceFactor = 1
411 |
412 | elif(lefUnits["CAPACITANCE"] == "FEMTOFARADS"):
413 | capacitanceFactor = 1e-3
414 |
415 |
416 | print("Parameters Used:")
417 | print("Edge Capacitance Factor:", edgeCapFactor[0])
418 | print("Wire model:", wireModel, '\n')
419 |
420 | #creation of the name map
421 | map_of_names = remap_names()
422 |
423 | for net in def_parser.nets:
424 | #traversing all nets in the def file to extract segments infromation
425 |
426 | # a list of the connections in the net
427 | conList = []
428 | # a list of all pins referenced in the net, including the internal nodes between each 2 segments
429 | pinsTable=[]
430 | segmentsList = []
431 |
432 | # generate the conn data structure for conn section
433 | if(net.name == "*518"):
434 | #print("test")
435 | testingMode = 1
436 | for con in net.comp_pin:
437 | #check if pin is (*P) an external input/output pin
438 | current_pin = []
439 | locationsOfCurrentPin = []
440 |
441 | #CHECK if con != ';'
442 | if(con[0] != ';'):
443 | if(con[0] == "PIN"):
444 | current_pin.append("*P")
445 | current_pin.append(con[1])
446 | x = def_parser.pins.get_pin(con[1])
447 | if(x.direction == "INPUT"):
448 | current_pin.append("I")
449 | else:
450 | current_pin.append("O")
451 |
452 | # these are used for the pinsTable
453 | pinLocation = def_parser.pins.pin_dict[con[1]].placed
454 | metalLayer = def_parser.pins.pin_dict[con[1]].layer.name
455 | locationsOfCurrentPin.append(((pinLocation[0], pinLocation[1]), (pinLocation[0], pinLocation[1]), metalLayer))
456 |
457 | else: #it is an internal pin, check for input or output
458 | current_pin.append("*I")
459 | current_pin.append(con[0]+":"+con[1])
460 | cell_type = def_parser.components.comp_dict[con[0]].macro
461 |
462 | # some cells do not have direction
463 | # check first if a cell has a direction or not
464 |
465 | pinInfo = lef_parser.macro_dict[cell_type].pin_dict[con[1]]
466 |
467 | # check if it has a direction
468 | if 'DIRECTION' in pinInfo.info:
469 | direction = lef_parser.macro_dict[cell_type].pin_dict[con[1]].info["DIRECTION"]
470 | else:
471 | # check if cell has 'in' or 'out' in its name
472 | if(cell_type.find("in")):
473 | direction = "INPUT"
474 | else:
475 | direction = "OUTPUT"
476 |
477 | if(direction == "INPUT"):
478 | current_pin.append("I")
479 | else:
480 | current_pin.append("O")
481 |
482 | #this is used for the pins table
483 | metalLayerInfo = lef_parser.macro_dict[cell_type].pin_dict[con[1]].info
484 | metalLayer = metalLayerInfo['PORT'].info['LAYER'][0].name
485 | getPinLocation(con[0], con[1], metalLayer,locationsOfCurrentPin)
486 |
487 |
488 | # we addpend list of pin locations - cellName - pinName - metalLayer
489 | pinsTable.append((locationsOfCurrentPin, con[0], con[1],metalLayer))
490 | conList.append(current_pin)
491 |
492 |
493 |
494 | counter = 1
495 |
496 | # the value will be incremented if more than 1 segment end at the same node
497 | currentNodeList = {}
498 | for segment in net.routed:
499 | if(segment.end_via == 'RECT'):
500 | continue
501 | #traversing all segments in a certain net to get all their information
502 | for it in range (len(segment.points)):
503 | ##traversing all points in a certain segment, classifyng them as starting and ending points and
504 | #checking for their existence in the pinstable, using checkPinsTable method
505 | last = 0
506 | if(it < (len(segment.points) - 1)):
507 | spoint = segment.points[it]
508 | epoint = segment.points[it+1]
509 | else: #last point in the line (either via or no via)
510 | spoint = segment.points[it]
511 | epoint = segment.points[it]
512 | last = 1
513 | #if we are at the last point and there is no via, then ignore the point
514 | #as it has already been considered with the previous point
515 | if((segment.end_via == ';' or segment.end_via == None)):
516 | continue
517 |
518 | sflag=checkPinsTable(spoint, segment.layer, pinsTable)
519 |
520 | if( sflag != "new"):
521 | snode = sflag
522 | else:
523 | snode = []
524 | snode.append([((spoint[0], spoint[1]), (spoint[0], spoint[1]), segment.layer)])
525 | snode.append(str(net.name) )
526 | snode.append(str(counter))
527 | snode.append(str(segment.layer))
528 | counter += 1
529 | pinsTable.append(snode)
530 |
531 |
532 | if ((last) and (segment.end_via != ';' and segment.end_via != None)):
533 | #special handeling for vias to tget the via types through the via name
534 | myVia = segment.end_via
535 | if(myVia[-1] == ';'):
536 | myVia = myVia[0:-1]
537 |
538 |
539 | if myVia in lef_parser.via_dict:
540 | firstLayer = lef_parser.via_dict[myVia].layers[0].name
541 | secondLayer = lef_parser.via_dict[myVia].layers[1].name
542 | thirdLayer = lef_parser.via_dict[myVia].layers[2].name
543 |
544 |
545 |
546 | elif myVia in vias_dict_def:
547 |
548 | firstLayer = vias_dict_def[myVia]['LAYERS'][0]
549 | secondLayer= vias_dict_def[myVia]['LAYERS'][1]
550 | thirdLayer = vias_dict_def[myVia]['LAYERS'][2]
551 |
552 |
553 | if lef_parser.layer_dict[firstLayer].layer_type == 'CUT':
554 | cutLayer = firstLayer
555 | first = secondLayer
556 | second = thirdLayer
557 |
558 | if(lef_parser.layer_dict[secondLayer].layer_type == 'CUT'):
559 | cutLayer = secondLayer
560 | first = firstLayer
561 | second = thirdLayer
562 |
563 | if(lef_parser.layer_dict[thirdLayer].layer_type == 'CUT'):
564 | cutLayer = thirdLayer
565 | first = firstLayer
566 | second = secondLayer
567 |
568 |
569 | if(first == segment.layer):
570 | choose = 2 # choose second layer in case of creating end node
571 | eflag=checkPinsTable(epoint, second, pinsTable)
572 | else:
573 | choose = 1 # choose first layer in case of creating end node
574 | eflag=checkPinsTable(epoint, first, pinsTable)
575 |
576 |
577 |
578 |
579 | else:
580 | eflag=checkPinsTable(epoint, segment.layer, pinsTable)
581 |
582 | if( eflag != "new"):
583 | enode = eflag
584 | else:
585 | enode = []
586 | if(last):
587 | # if it is a VIA and starting point was on second layer
588 | if(choose == 1):
589 | enode.append([((epoint[0], epoint[1]), (epoint[0], epoint[1]), first)])
590 | enode.append(str(net.name) )
591 | enode.append(str(counter))
592 | enode.append(first)
593 | else:
594 | enode.append([((epoint[0], epoint[1]), (epoint[0], epoint[1]), second)])
595 | enode.append(str(net.name) )
596 | enode.append(str(counter))
597 | enode.append(second)
598 | else:
599 | enode.append([((epoint[0], epoint[1]), (epoint[0], epoint[1]), segment.layer)])
600 | enode.append(str(net.name) )
601 | enode.append(str(counter))
602 | enode.append(str(segment.layer))
603 | counter += 1
604 | pinsTable.append(enode)
605 |
606 | seg=[]
607 |
608 | #TODO: pass segment.endvia to function to be used if 2 points are equal
609 |
610 | if(segment.end_via != None) & (segment.end_via != ';') :
611 | via_type = getViaType(segment.end_via)
612 | resistance = get_resistance_modified(spoint, epoint, segment.layer, via_type)
613 | capacitance = get_capacitance_modified(spoint, epoint, segment.layer, via_type)
614 | else:
615 | resistance = get_resistance_modified(spoint, epoint, segment.layer, 'via') # dummy via
616 | capacitance = get_capacitance_modified(spoint, epoint, segment.layer, 'via') #dummy via
617 |
618 | # the name of the first node of the segment
619 | currentSNodeName = str(snode[1]) + ':' + str(snode[2])
620 | # the name of the second node of the segment
621 | currentENodeName = str(enode[1]) + ':' + str(enode[2])
622 |
623 |
624 | # put the capacitance for the current node.
625 | existsS = 0
626 | existsE = 0
627 |
628 | if(wireModel == 'PI'):
629 | for key in currentNodeList:
630 | if(currentSNodeName == key):
631 | existsS = 1
632 | if(currentENodeName == key):
633 | existsE = 1
634 |
635 |
636 |
637 | # these 2 if-else statements add half the capactiances at each of the endpoints of thes egment
638 | # to use a pi model
639 | if(existsS == 1): #adding the capacitance to the previous capacitances in an existing node
640 | currentNodeList[currentSNodeName] += 0.5 *capacitance
641 | else: #assigning the new node capacitance
642 | currentNodeList[currentSNodeName] = 0.5 * capacitance
643 |
644 | if(existsE == 1): #adding the capacitance to the previous capacitances in an existing node
645 | currentNodeList[currentENodeName] += 0.5*capacitance
646 | else: #assigning the new node capacitance
647 | currentNodeList[currentENodeName] = 0.5 * capacitance
648 |
649 |
650 | if(snode[1] != 'PIN'):
651 | seg.append(snode[1] + ':' + snode[2])
652 | else:
653 | seg.append(snode[2])
654 | if(enode[1] != 'PIN'):
655 | seg.append(enode[1] + ':' + enode[2])
656 | else:
657 | seg.append(enode[2])
658 |
659 | # use the L wire model. Essentially, we will add the capacitance of the segment
660 | # at the starting node
661 | else:
662 |
663 | for key in currentNodeList:
664 | if(currentSNodeName == key):
665 | existsS = 1
666 |
667 | # these 2 if-else statements add half the capactiances at each of the endpoints of thes egment
668 | # to use a pi model
669 | if(existsS == 1): #adding the capacitance to the previous capacitances in an existing node
670 | currentNodeList[currentSNodeName] += capacitance
671 | else: #assigning the new node capacitance
672 | currentNodeList[currentSNodeName] = capacitance
673 |
674 |
675 | if(snode[1] != 'PIN'):
676 | seg.append(snode[1] + ':' + snode[2])
677 | else:
678 | seg.append(snode[2])
679 | if(enode[1] != 'PIN'):
680 | seg.append(enode[1] + ':' + enode[2])
681 | else:
682 | seg.append(enode[2])
683 |
684 |
685 |
686 | seg.append(resistance)
687 | seg.append(capacitance)
688 | segmentsList.append(seg)
689 |
690 |
691 | ##appending the pins, segments resistances and node capacitances into the big table dictionaries that will
692 | #be used for printing the final SPEF
693 | bigPinsTable[net.name] = pinsTable
694 | bigSegmentsTable[net.name] = segmentsList
695 | bigCapacitanceTable[net.name] = currentNodeList
696 |
697 |
698 | sumC=0
699 | lists= {}
700 | for k in currentNodeList:
701 | sumC+=currentNodeList[k]
702 | lists["conn"]=conList
703 | lists['maxC']=sumC
704 | lists['segments']=segmentsList
705 | netsDict[net.name]= lists
706 |
707 |
708 | #method for creating the header of the SPEF file
709 | def printSPEFHeader():
710 | f.write('*SPEF "IEEE 1481-1998"'+'\n')
711 | f.write('*DESIGN "'+ def_parser.design_name + '"'+'\n')
712 | f.write('*DATE "' + now.strftime("%a %b %d %H:%M:%S %Y") + '"\n')
713 | f.write('*VENDOR "AUC CSCE Department"\n')
714 | f.write('*PROGRAM "SPEF Extractor"\n')
715 | f.write('*VERSION "1.0"\n')
716 | f.write('*DESIGN_FLOW "PIN_CAP NONE"'+'\n')
717 | f.write('*DIVIDER ' + def_parser.dividerchar[1] +'\n')
718 | f.write('*DELIMITER :' + '\n')
719 | f.write('*BUS_DELIMITER ' + def_parser.busbitchars[1:3] +'\n')
720 | f.write('*T_UNIT 1.00000 NS' +'\n')
721 | f.write('*C_UNIT 1.00000 PF'+'\n')
722 | f.write('*R_UNIT 1.00000 OHM'+'\n')
723 | f.write('*L_UNIT 1.00000 HENRY'+'\n')
724 | f.write('\n'+'\n')
725 |
726 |
727 | print("RC Extraction is done")
728 |
729 |
730 | #writing into SPEF file
731 | capCounter = {}
732 | capCounter[0] = 0
733 | resCounter = {}
734 | resCounter[0] = 0
735 |
736 | # these are used to extract the net with the maximum capacitance
737 | maxCap = [0]
738 | maxCapNet = ["*0"]
739 | minCap = [1]
740 | minCapNet = ["*0"]
741 |
742 | f = open(str(def_file_name[:-4]) + ".spef","w+", newline='\n')
743 | print("Start writing SPEF file")
744 | printSPEFHeader()
745 | printNameMap(map_of_names)
746 | printSPEFNets(netsDict)
747 | f.close()
748 |
749 |
750 | content = open(str(def_file_name[:-4]) + ".spef", "r+").read()
751 | newContent = content.replace('<', '[')
752 | newContent = newContent.replace('>', ']')
753 |
754 | f = open(str(def_file_name[:-4]) + ".spef","w+", newline='\n')
755 | f.write(newContent)
756 | os.remove(def_file_name[:-4] + '_new.def')
757 | print("Writing SPEF is done")
758 |
--------------------------------------------------------------------------------