blob: 497351ec364efe19a3445aa5c78298fecb404bf3 [file] [log] [blame]
#!/usr/bin/env python3
# Copyright 2020 The Skywater PDK Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import glob
import os
import pprint
import re
import subprocess
import sys
import traceback
from collections import defaultdict
from shutil import copyfile, move
from pathlib import Path
from common import lib_extract_from_path, version_extract_from_path, lib_extract_from_name, get_cell_directory, extract_version_and_lib_from_path, copy_file_to_output
from common import convert_libname, convert_cell_fullname, convert_pinname
from verilog2full import Copyright_header
superdebug = False
debug = True
debug_print = lambda x: print(x) if debug else 0
Copyright_header = """/*\n
* Copyright 2020 The SkyWater PDK Authors\n
*\n
* Licensed under the Apache License, Version 2.0 (the "License");\n
* you may not use this file except in compliance with the License.\n
* You may obtain a copy of the License at\n
*\n
* https://www.apache.org/licenses/LICENSE-2.0\n
*\n
* Unless required by applicable law or agreed to in writing, software\n
* distributed under the License is distributed on an "AS IS" BASIS,\n
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n
* See the License for the specific language governing permissions and\n
* limitations under the License.\n
*/\n
\n
"""
def prepend_copyright(filename):
with open(filename, 'r+') as f:
content = f.read()
content = content.replace(Copyright_header, '')
f.seek(0, 0)
f.write(Copyright_header + content)
def _magic_tcl_header(ofile, gdsfile):
print('#!/bin/env wish', file=ofile)
print('drc off', file=ofile)
print('gds readonly true', file=ofile)
print('gds rescale false', file=ofile)
print('tech unlock *', file=ofile)
print('cif warning default', file=ofile)
print('gds read ' + gdsfile, file=ofile)
def magic_get_cells(input_gds, input_techfile):
destdir, gdsfile = os.path.split(input_gds)
tcl_path = destdir + '/get_cells.tcl'
# Generate a tcl script for Magic
with open(tcl_path, 'w') as ofile:
_magic_tcl_header(ofile, gdsfile)
print('quit -noprompt', file=ofile)
return run_magic(destdir, tcl_path, input_techfile)
def magic_rename_cells(input_gds, input_techfile, cell_list):
destdir, gdsfile = os.path.split(input_gds)
tcl_path = destdir + '/rename_cells.tcl'
# Generate a tcl script for Magic
with open(tcl_path, 'w') as ofile:
_magic_tcl_header(ofile, gdsfile)
# Rename the cells
for src_cell, dst_cell in cell_list:
escaped_src_cell = '{'+src_cell+'}'
escaped_dst_cell = '{'+dst_cell+'}'
print(f'load {escaped_src_cell}', file=ofile)
if src_cell != dst_cell:
print(f'cellname rename {escaped_src_cell} {escaped_dst_cell}', file=ofile)
print(f'save {escaped_dst_cell}', file=ofile)
print('quit -noprompt', file=ofile)
return run_magic(destdir, tcl_path, input_techfile)
def magic_split_gds(input_gds, input_techfile, cell_list):
destdir, gdsfile = os.path.split(input_gds)
tcl_path = destdir + '/split_gds.tcl'
# Generate a tcl script for Magic
with open(tcl_path, 'w') as ofile:
_magic_tcl_header(ofile, gdsfile)
# Write out the cells
for cell in cell_list:
escaped_cell = '{'+cell+'}'
print(f'load {escaped_cell}', file=ofile)
print(f'gds write {escaped_cell}', file=ofile)
print(f'lef write {escaped_cell}', file=ofile)
print(f'save {escaped_cell}', file=ofile)
print('quit -noprompt', file=ofile)
return run_magic(destdir, tcl_path, input_techfile)
FATAL_ERROR = re.compile('Error parsing')
READING_REGEX = re.compile('Reading "([^"]*)".')
def run_magic(destdir, tcl_path, input_techfile):
mproc = subprocess.run(['magic', '-dnull', '-noconsole',
'-T', input_techfile, os.path.abspath(tcl_path)],
stdin = subprocess.DEVNULL,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
cwd = destdir,
universal_newlines = True)
assert mproc.stdout
max_cellname_width = 0
output_by_cells = [('', [])]
fatal_errors = []
for line in mproc.stdout.splitlines():
if line.startswith('CIF file read warning: Input off lambda grid by 1/2; snapped to grid'):
continue
m = FATAL_ERROR.match(line)
if m:
fatal_errors.append(line)
m = READING_REGEX.match(line)
if m:
cell_name = m.group(1)
max_cellname_width = max(max_cellname_width, len(cell_name))
output_by_cells.append((cell_name, []))
output_by_cells[-1][-1].append(line)
for cell, lines in output_by_cells:
prefix = "magic " + cell.ljust(max_cellname_width) + ':'
for l in lines:
is_error = 'rror' in l
if superdebug or (debug and is_error):
print(prefix, l)
assert not mproc.stderr, mproc.stderr
if fatal_errors:
msg = 'ERROR: Magic had fatal errors in output:\n' + "\n".join(fatal_errors)
raise SystemError(msg)
if mproc.returncode != 0:
msg = 'ERROR: Magic exited with status ' + str(mproc.returncode)
print(msg)
raise SystemError(msg)
return output_by_cells
class Rewriter:
other_replacements = [
(re.compile('S8PIR[_-]10R', re.I),
lambda m: 'SKY130'),
(re.compile('S8', re.I),
lambda m: 'SKY130'),
]
def __init__(self, original_path):
self._cache_libname = None
self._cache_cellname = {}
self._cache_string = {}
self.structures_reset()
self.old_lib, self.new_lib, self.version = extract_version_and_lib_from_path(original_path)
self.rewrite_string = self.rewrite_generic_string
def structures(self):
return dict(self._structures)
def structures_usecount(self):
structs = list(self._structures_instances.items())
max_count = max(c for n, c in structs)
structs.sort(key=lambda x: (max_count-x[1], x[1]))
return structs
def structures_reset(self):
self._structures = {}
self._structures_instances = {}
self._structures_pins = {}
self._structure_last = None
def replacements(self):
r = []
r.append((self._cache_libname, self.new_lib))
r.extend(self._cache_cellname.items())
r.extend(self._cache_string.items())
return dict(r)
def rewrite_library_name(self, s):
if self._cache_libname is not None:
assert s == self._cache_libname, (s, self._cache_libname)
else:
self._cache_libname = s
return self.new_lib
def set_library_name(self, s):
return
def _convert_cell(self, old_name, is_real_structure=True):
if '$$' in old_name:
old_cellname, number = old_name.split('$$', 1)
self.rewrite_structure_name(old_cellname, False)
new_name = convert_cell_fullname(old_name, self.new_lib)
assert new_name.startswith(self.new_lib), (new_name, self.new_lib)
assert old_name not in self._cache_cellname, (old_name, self._cache_cellname)
self._cache_cellname[old_name] = new_name
def pins(self, s):
return self._structures_pins[s]
def rewrite_structure_name(self, s, is_real_structure=True):
assert (not is_real_structure) or (s in self._structures), s
if s not in self._cache_cellname:
self._convert_cell(s, is_real_structure)
ns = self._cache_cellname[s]
if is_real_structure:
assert s in self._structures, repr((s, ns))+'\n'+pprint.pformat(self._structures)
assert self._structures[s] is None, repr((sn, self._structures[s]))+'\n'+pprint.pformat(self._structures)
self._structures[s] = ns
return ns
def set_structure_name(self, sn):
assert sn not in self._structures, sn+'\n'+pprint.pformat(self._structures)
if debug:
if self._structure_last != None:
print("Clearing current structure (was", self._structure_last+")")
print()
self._structures[sn] = None
self._structures_instances[sn] = 0
self._structures_pins[sn] = []
self._structure_last = sn
if debug:
print()
print("Setting current structure to", self._structure_last)
def set_instance_struct(self, s):
assert s in self._structures, (s, self._structures)
self._structures_instances[s] += 1
if debug:
if self._structure_last != None:
print("Clearing current structure (was", self._structure_last+")")
print()
self._structure_last = None
def rewrite_instance_struct(self, s):
assert s in self._cache_cellname, (s, self._cache_cellname)
return self._cache_cellname[s]
def rewrite_pin_name(self, s):
assert self._structure_last is not None, s
pn = convert_pinname(s.upper())
print(" Rewriting pin on", self._structure_last, "from", repr(s), "to", repr(pn))
self._structures_pins[self._structure_last].append(pn)
return pn
overrides = {
('sky130_fd_sc_hvl','scs8ls', 'inv_2'): 'scs8hvl_inv_2',
('sky130_fd_sc_hvl','scs8ls', 'inv_4'): 'scs8hvl_inv_4',
('sky130_fd_sc_ls', 'scs8ms', 'tapvgndnovpb_1'): 'scs8ls_tapvgndnovpb_1',
('sky130_fd_sc_ls', 'scs8lp', 'diode_2'): 'scs8ls_diode_2',
('sky130_fd_sc_ls', 'scs8lp', 'tap_2'): 'scs8ls_tap_2',
('sky130_fd_sc_ls', 'scs8ms', 'tapvgnd2_1'): 'scs8ls_tapvgnd2_1',
('sky130_fd_sc_ls', 'scs8ms', 'tapvgnd_1'): 'scs8ls_tapvgnd_1',
('sky130_fd_sc_ls', 'scs8ms', 'tapvpwrvgnd_1'): 'scs8ls_tapvpwrvgnd_1',
('sky130_fd_sc_ms', 'scs8ls', 'clkdlyinv3sd1_1'): 'scs8ms_clkdlyinv3sd1_1',
('sky130_fd_sc_ms', 'scs8ls', 'clkdlyinv3sd2_1'): 'scs8ms_clkdlyinv3sd2_1',
('sky130_fd_sc_ms', 'scs8ls', 'clkdlyinv3sd3_1'): 'scs8ms_clkdlyinv3sd3_1',
('sky130_fd_sc_ms', 'scs8ls', 'clkdlyinv5sd1_1'): 'scs8ms_clkdlyinv5sd1_1',
('sky130_fd_sc_ms', 'scs8lp', 'dlygate4s15_1'): 'scs8ms_dlygate4s15_1',
('sky130_fd_sc_ms', 'scs8ls', 'tap_1'): 'scs8ms_tap_1',
('sky130_fd_sc_ms', 'scs8lp', 'tap_2'): 'scs8ms_tap_2',
}
def rewrite_generic_string(self, old_str):
if old_str in self._cache_string:
return self._cache_string[old_str]
# Is this a simple cell name string?
if old_str in self._cache_cellname:
cell_fullname = self._cache_cellname[old_str]
lib_name, cell_name = cell_fullname.split('__', 1)
print("Rewriting string (cell name) from", repr(old_str), "to", repr(cell_name))
return cell_name
# Check this isn't for a different library....
ext_libname, ext_cellname = lib_extract_from_name(old_str)
okey = (self.new_lib, ext_libname, ext_cellname)
if okey in self.overrides:
override = self.overrides[okey]
print('Overriding {} with {}'.format(old_str, override))
return self.rewrite_generic_string(override)
elif (self.new_lib, ext_libname) == ('sky130_fd_sc_hdll', 'scs8hd'):
override = 'scs8hdll_'+ext_cellname
print('Overriding {} with {}'.format(old_str, override))
return self.rewrite_generic_string(override)
else:
assert ext_libname is None or ext_libname == self.old_lib, (
old_str, ext_libname, ext_cellname, self.old_lib)
# Does this string contain a cell name?
new_str = old_str
for old_cellname, new_cellname in self._cache_cellname.items():
if old_cellname in new_str:
new_str = new_str.replace(old_cellname, new_cellname)
# Does this contain other things needed to replaced?
for regex, rep in self.other_replacements:
new_str = regex.sub(rep, new_str)
self._cache_string[old_str] = new_str
print("Rewriting string from", repr(old_str), "to", repr(new_str))
return new_str
def set_string_type(self, i):
if i == 5:
if superdebug:
print("Next string is a pin name on", self._structure_last)
self.rewrite_string = self.rewrite_pin_name
else:
if superdebug:
print("Next string is a generic string")
self.rewrite_string = self.rewrite_generic_string
def replace_gds_strings(gds_filename, call_rewrite_on_rtypes, rewriter):
source = gds_filename
dest = gds_filename
sourcedir, gdsinfile = os.path.split(source)
destdir, gdsoutfile = os.path.split(dest)
with open(source, 'rb') as ifile:
gdsdata = bytearray(ifile.read())
# If we are rewriting the structure names, we need to rewrite the instances
# which point to the structure.
if 'structure_name' in call_rewrite_on_rtypes:
call_rewrite_on_rtypes.append('instance_struct')
rtype_mapping = {
2: 'library_name', # libname
6: 'structure_name', # strname - Structure Definition Name
18: 'instance_struct',# sname - Instance's structure
22: 'string_type', # Indicates what the next string is in reference too..
25: 'string', # string
}
rtype_rmapping = {v:k for k,v in rtype_mapping.items()}
call_set_on_rtype = ['structure_name', 'instance_struct', 'string_type']
for r in call_rewrite_on_rtypes:
assert r in rtype_rmapping, r
datalen = len(gdsdata)
if superdebug:
print('Original data length = ' + str(datalen))
dataptr = 0
while dataptr < datalen:
# Read stream records up to any string, then search for search text.
bheader = gdsdata[dataptr:dataptr + 2]
reclen = int.from_bytes(bheader, 'big')
newlen = reclen
# The GDS files seem to occasionally end up with trailing zero bytes.
if newlen == 0:
if debug:
print("{:10d} (of {:10d} - {:10d} left)".format(dataptr, datalen, datalen-dataptr), 'Found zero-length record at position in', source)
if superdebug:
print('Remaining data', repr(gdsdata[dataptr:]))
for i in range(dataptr, datalen):
if gdsdata[i] != 0:
raise SystemError('Found non-zero pad byte at {} ({}): {}'.format(i, hex(i), repr(gdsdata[i])))
break
rtype = gdsdata[dataptr + 2]
rtype_name = rtype_mapping.get(rtype, '??? - {}'.format(rtype))
datatype = gdsdata[dataptr + 3]
# FIXME: Hack to use different method for pin names...
if datatype == 2 and rtype_name in call_set_on_rtype:
assert datatype == 2, (rtype, datatype)
value = int.from_bytes(gdsdata[dataptr+4:dataptr+6], 'big')
if superdebug:
print(
"{:10d} (of {:10d} - {:10d} left)".format(dataptr, datalen, datalen-dataptr),
'Record type = {:15s} '.format(rtype_name),
value,
)
getattr(rewriter, 'set_'+rtype_name)(value)
# Datatype 6 is STRING
if datatype == 6:
bstring = gdsdata[dataptr + 4: dataptr + reclen]
if bstring[-1] == 0:
# Was original string padded with null byte? If so,
# remove the null byte.
decoded = bstring[:-1].decode('ascii')
else:
decoded = bstring.decode('ascii')
if rtype_name in call_set_on_rtype:
getattr(rewriter, 'set_'+rtype_name)(decoded)
if rtype_name in call_rewrite_on_rtypes:
skipped = False
repstring = getattr(rewriter, 'rewrite_'+rtype_name)(decoded)
else:
skipped = True
repstring = decoded
assert repstring is not None
changed = (decoded != repstring)
if superdebug:
print(
"{:10d} (of {:10d} - {:10d} left)".format(dataptr, datalen, datalen-dataptr),
'Record type = {:15s} '.format(rtype_name),
'Skipped = {:5s}'.format(str(skipped)),
end=" ",
)
if changed:
print(repr(decoded), '->', repr(repstring))
else:
print(repr(decoded), '==', repr(repstring))
brepstring = repstring.encode('ascii')
newlen = len(brepstring) + 4
# Record sizes must be even
if newlen % 2 != 0:
brepstring += b'\x00'
newlen += 1
if changed:
#before = gdsdata[0:dataptr]
#after = gdsdata[dataptr + reclen:]
bnewlen = newlen.to_bytes(2, byteorder='big')
brtype = rtype.to_bytes(1, byteorder='big')
bdatatype = datatype.to_bytes(1, byteorder='big')
# Assemble the new record
newrecord = bnewlen + brtype + bdatatype + brepstring
# Reassemble the GDS data around the new record
#gdsdata = before + newrecord[0:newlen] + after
gdsdata[dataptr:dataptr+reclen] = newrecord[0:newlen]
# Adjust the data end location
datalen += (newlen - reclen)
# Advance the pointer past the data
dataptr += newlen
with open(dest, 'wb') as ofile:
ofile.write(gdsdata)
def strings_get(pathname):
string_counts = {}
lines = subprocess.check_output(
"strings {} | sort | uniq -c".format(pathname), shell=True).decode('utf-8').splitlines()
for l in lines:
c, s = l.strip().split(' ', 1)
string_counts[s] = int(c)
return string_counts
def strings_print(h, strings):
max_len = max(len(s) for s in strings.keys())
max_count = max(strings.values())
print()
print(h)
print("---")
for s, c in sorted(strings.items(), key=lambda x: (max_count-x[-1], x[0])):
print(" ", s.rjust(max_len)+':', c)
print("---")
def filemain(input_path, temp_dir, techfile, final_dir):
assert not os.path.exists(temp_dir), temp_dir+" exists!"
os.makedirs(temp_dir)
global debug_print
tmp_gds = temp_dir + '/input.gds'
copyfile(input_path, tmp_gds)
rewriter = Rewriter(input_path)
# First rewrite the cell names
if debug:
print()
print("Rewriting library and structure names")
print("-------------------------------------")
replace_gds_strings(tmp_gds, ['library_name', 'structure_name'], rewriter)
if debug:
print("-------------------------------------")
cell_rewrites = list(rewriter.structures().items())
if not cell_rewrites:
print("WARNING: No cells found!")
return
pprint.pprint(cell_rewrites)
# Write out the cell list
with open(temp_dir + '/cells.list', 'w') as f:
max_len = max(len(s) for s in cell_rewrites)
for from_str, to_str in sorted(cell_rewrites):
f.write(" {:s} -> {:s}".format(from_str.rjust(max_len), to_str))
f.write('\n')
# Second rewrite any remaining strings.
# -----
# The strings could have references to cell names and there is no guarantee
# that the cell instance definition will appear before a string which
# happens to contain the name.
# Hence, we want to know all the cell rewrites before rewriting strings.
if debug:
print()
print("Rewriting strings (inc pin names)")
print("-------------------------------------")
rewriter.structures_reset()
replace_gds_strings(tmp_gds, ['string'], rewriter)
if debug:
print("-------------------------------------")
if debug:
strings_print("Strings in GDS file:", strings_get(tmp_gds))
structs = list(rewriter.structures_usecount())
with open(temp_dir + '/structures.list', 'w') as f:
f.write("Structures ({} found)\n".format(len(structs)))
f.write("----------------------\n")
for name, count in structs:
f.write("{:10d} {:s}\n".format(count, name))
for pin in rewriter.pins(name):
f.write("{:10s} - {}\n".format('', pin))
f.write('\n')
for name, count in structs:
with open(os.path.join(temp_dir, name+'.gds.pins'), 'w') as f:
for pin in rewriter.pins(name):
f.write(pin)
f.write('\n')
with open(os.path.join(temp_dir, name+'.lef.pins'), 'w') as f:
for pin in rewriter.pins(name):
f.write(pin)
f.write('\n')
# Write out the rewrite list
with open(temp_dir + '/rewrite.list', 'w') as f:
r = rewriter.replacements()
max_len = max(len(x) for x in r)
for from_str, to_str in sorted(r.items()):
f.write(from_str.rjust(max_len))
f.write(" -> ")
f.write(to_str)
f.write('\n')
filtered_cells = []
for name in rewriter.structures():
if 'libcell' in name:
continue
if 'vcells' in name:
continue
filtered_cells.append(name)
filtered_cells.sort()
# Split apart the GDS file
output_by_cells = magic_split_gds(tmp_gds, os.path.abspath(techfile), filtered_cells)
for new_cellname in filtered_cells:
lef_file = os.path.join(temp_dir, new_cellname+'.lef')
lef_pin_file = os.path.join(temp_dir, new_cellname+'.lef.pins')
gds_file = os.path.join(temp_dir, new_cellname+'.gds')
gds_pin_file = os.path.join(temp_dir, new_cellname+'.gds.pins')
assert os.path.exists(lef_file), ("Missing lef:", lef_file)
assert os.path.exists(gds_file), ("Missing gds:", gds_file)
# LEF file -> output
prepend_copyright(lef_file)
final_path = copy_file_to_output(
lef_file, final_dir, rewriter.new_lib, rewriter.version, new_cellname, okay_exists=True)
# LEF PINS file -> output
if final_path:
copy_file_to_output(
lef_pin_file, final_dir, rewriter.new_lib, rewriter.version, new_cellname)
# GDS file -> output
subprocess.check_call(CHANGE_GDS_DATE+" 1 0 "+gds_file, shell=True)
copy_file_to_output(
gds_file, final_dir, rewriter.new_lib, rewriter.version, new_cellname)
# GDS PINS file -> output
copy_file_to_output(
gds_pin_file, final_dir, rewriter.new_lib, rewriter.version, new_cellname)
return
__dir__ = os.path.dirname(os.path.realpath(__file__))
CHANGE_GDS_DATE = os.path.join(__dir__, 'change_gds_date.py')
def main(args, infile):
if not os.path.isfile(infile):
all_input_files = sorted(infile.rglob('*.gds'))
for f in all_input_files:
f = str(f)
main(args, os.path.join(infile, f))
else:
infile = str(infile)
if 'scs8' in infile and 'oa' not in infile:
print("Skipping", infile)
return 0
if 'vcells_drc' in infile:
print("Skipping", infile)
return 0
path = os.path.abspath(infile)
ver = version_extract_from_path(path)
if ver is None:
ver = 'XXXX'
else:
ver = "v{}.{}.{}".format(*ver)
filename = os.path.basename(path)
tempdir = os.path.join(args.temp, 'gds_split', filename, ver)
print()
print()
print("Processing", filename, "in", tempdir)
print('-'*75)
try:
filemain(path, tempdir, args.techfile, str(args.output))
except Exception as e:
sys.stdout.flush()
sys.stderr.flush()
traceback.print_exc(file=sys.stdout)
# Write error to stdout
sys.stdout.write('\n')
sys.stdout.write('\n')
sys.stdout.write('Error while processing: ')
sys.stdout.write(infile)
sys.stdout.write('\n')
sys.stdout.flush()
# Write error to stderr too
sys.stderr.write('\n')
sys.stderr.write('\n')
sys.stderr.write('Error while processing: ')
sys.stderr.write(infile)
sys.stderr.write('\n')
sys.stderr.flush()
raise
print('-'*75)
if __name__ == "__main__":
import doctest
fails, _ = doctest.testmod()
if fails != 0:
sys.exit("Some test failed")
parser = argparse.ArgumentParser()
parser.add_argument(
"input",
help="The path to the source directory/file",
type=Path)
parser.add_argument(
"output",
help="The path to the output directory",
type=Path)
parser.add_argument(
"techfile",
help="Full path to the techfile",
type=str)
parser.add_argument(
"temp",
help="The path to the temp directory",
type=Path)
args = parser.parse_args()
main(args, args.input)