blob: e5aef14d3710059a1d868578e4e85048f7ca90de [file] [log] [blame] [edit]
import argparse
import json
from pathlib import Path
from timings_from_verilog.specify_parser import extract_timings
from termcolor import colored
import common
from pprint import pprint as pp
from collections import defaultdict
from liberty_parser.quicklogic_timings_importer import json_to_liberty
import os
import re
import sys
parsedfiles = {}
copyright = [
'// Copyright 2019 The Skywater PDK Authors',
'//',
'// Licensed under the Apache License, Version 2.0 (the "License");',
'// you may not use this file except in compliance with the License.',
'// You may obtain a copy of the License at',
'//',
'// https://www.apache.org/licenses/LICENSE-2.0',
'//',
'// Unless required by applicable law or agreed to in writing, software',
'// distributed under the License is distributed on an "AS IS" BASIS,',
'// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
'// See the License for the specific language governing permissions and',
'// limitations under the License.',
''
]
sourcetodests = defaultdict(list)
def convert_specify_to_libertyjson(libraryname, parsedentry, modulename, containingcellnames, oldlibname, newlibname):
librarycontent = {}
for key, entry in parsedentry.items():
if modulename in key and not any([name in key for name in containingcellnames[modulename]]):
newname = key
if oldlibname:
newname = key.replace(oldlibname, newlibname)
cell = "cell {}".format(newname)
librarycontent[cell] = {}
allpaths = entry['pathdelays']
for paths in entry['ifstatements'].values():
allpaths.extend(paths)
for pathdelay in allpaths:
pinname = 'pin {}'.format(pathdelay['output_port'])
if not pinname in librarycontent[cell]:
librarycontent[cell][pinname] = {}
if not 'timing ' in librarycontent[cell][pinname]:
librarycontent[cell][pinname]['timing '] = []
timing = {}
timing['related_pin'] = pathdelay['input_port']
if pathdelay['cond']:
timing['when'] = pathdelay['cond']
if pathdelay['source']:
if 'related_pin' in timing and pathdelay['source'] not in timing['related_pin'].split(' '):
timing['related_pin'] += ' {}'.format(pathdelay['source'])
if pathdelay['inverted']:
if 'when' in timing:
timing['when'] = '!{}&'.format(pathdelay['input_port']) + timing['when']
else:
timing['when'] = '!{}'.format(pathdelay['input_port'])
if pathdelay['edge'] == 'posedge':
timing['timing_type'] = 'rising_edge'
elif pathdelay['edge'] == 'negedge':
timing['timing_type'] = 'falling_edge'
if pathdelay['delaylist']['rise']:
if float(pathdelay['delaylist']['rise'][0]) == float(pathdelay['delaylist']['rise'][2]):
timing['intrinsic_rise'] = pathdelay['delaylist']['rise'][0]
else:
print(f'{pathdelay["delaylist"]["fall"][0]} {pathdelay["delaylist"]["fall"][2]}')
timing['intrinsic_rise_min'] = pathdelay['delaylist']['rise'][0]
timing['intrinsic_rise'] = pathdelay['delaylist']['rise'][1] if float(pathdelay['delaylist']['rise'][1]) > float(pathdelay['delaylist']['rise'][0]) else pathdelay['delaylist']['rise'][0]
timing['intrinsic_rise_max'] = pathdelay['delaylist']['rise'][2]
if pathdelay['delaylist']['fall']:
if float(pathdelay['delaylist']['fall'][0]) == float(pathdelay['delaylist']['fall'][2]):
timing['intrinsic_fall'] = pathdelay['delaylist']['fall'][0]
else:
print(f'{pathdelay["delaylist"]["fall"][0]} {pathdelay["delaylist"]["fall"][2]}')
timing['intrinsic_fall_min'] = pathdelay['delaylist']['fall'][0]
timing['intrinsic_fall'] = pathdelay['delaylist']['fall'][1] if float(pathdelay['delaylist']['fall'][1]) > float(pathdelay['delaylist']['fall'][0]) else pathdelay['delaylist']['fall'][0]
timing['intrinsic_fall_max'] = pathdelay['delaylist']['fall'][2]
if pathdelay['edge'] not in ['posedge', 'negedge']:
timing['timing_type'] = 'rising_edge'
librarycontent[cell][pinname]['timing '].append(timing.copy())
timing['timing_type'] = 'falling_edge'
librarycontent[cell][pinname]['timing '].append(timing)
else:
librarycontent[cell][pinname]['timing '].append(timing)
for constraintcheck in entry['constraintchecks']:
if constraintcheck['type'] in ['setup', 'hold', 'skew', 'recovery']:
pinname = 'pin {}'.format(constraintcheck['data_event']['signals'][0])
if not pinname in librarycontent[cell]:
librarycontent[cell][pinname] = {}
if not 'timing ' in librarycontent[cell][pinname]:
librarycontent[cell][pinname]['timing '] = []
timing = {}
if constraintcheck['data_event']['edge'] == 'posedge':
if float(constraintcheck['limit'][0]) == float(constraintcheck['limit'][2]):
timing['intrinsic_rise'] = constraintcheck['limit'][0]
else:
timing['intrinsic_rise_min'] = constraintcheck['limit'][0]
timing['intrinsic_rise'] = constraintcheck['limit'][1] if float(constraintcheck['limit'][1]) > float(constraintcheck['limit'][0]) else constraintcheck['limit'][0]
timing['intrinsic_rise_max'] = constraintcheck['limit'][2]
if constraintcheck['data_event']['edge'] == 'negedge':
if float(constraintcheck['limit'][0]) == float(constraintcheck['limit'][2]):
timing['intrinsic_fall'] = constraintcheck['limit'][0]
else:
timing['intrinsic_fall_min'] = constraintcheck['limit'][0]
timing['intrinsic_fall'] = constraintcheck['limit'][1] if float(constraintcheck['limit'][1]) > float(constraintcheck['limit'][0]) else constraintcheck['limit'][0]
timing['intrinsic_fall_max'] = constraintcheck['limit'][2]
if len(constraintcheck['data_event']['signals']) > 1:
cond = '&'.join(constraintcheck['data_event']['signals'][1:])
timing['when'] = cond
timing['timing_type'] = '{}_{}'.format(constraintcheck['type'], 'rising' if constraintcheck['reference_event']['edge'] == 'posedge' else 'falling')
timing['related_pin'] = constraintcheck['reference_event']['signals'][0]
librarycontent[cell][pinname]['timing '].append(timing)
elif constraintcheck['type'] in ['setuphold', 'recrem']:
pinname = 'pin {}'.format(constraintcheck['data_event']['signals'][0])
if not pinname in librarycontent[cell]:
librarycontent[cell][pinname] = {}
if not 'timing ' in librarycontent[cell]:
librarycontent[cell][pinname]['timing '] = []
timing = {}
timing['related_pin'] = constraintcheck['reference_event']['signals'][0]
limit = 'setup_limit' if constraintcheck['type'] == 'setuphold' else 'recovery_limit'
if constraintcheck['data_event']['edge'] == 'posedge':
if float(constraintcheck[limit][0]) == float(constraintcheck[limit][2]):
timing['intrinsic_rise'] = constraintcheck[limit][0]
else:
timing['intrinsic_rise_min'] = constraintcheck[limit][0]
timing['intrinsic_rise'] = constraintcheck[limit][1] if float(constraintcheck[limit][1]) > float(constraintcheck[limit][0]) else constraintcheck[limit][0]
timing['intrinsic_rise_max'] = constraintcheck[limit][2]
if constraintcheck['data_event']['edge'] == 'negedge':
if float(constraintcheck[limit][0]) == float(constraintcheck[limit][2]):
timing['intrinsic_fall'] = constraintcheck[limit][0]
else:
timing['intrinsic_fall_min'] = constraintcheck[limit][0]
timing['intrinsic_fall'] = constraintcheck[limit][1] if float(constraintcheck[limit][1]) > float(constraintcheck[limit][0]) else constraintcheck[limit][0]
timing['intrinsic_fall_max'] = constraintcheck[limit][2]
if len(constraintcheck['data_event']['signals']) > 1:
cond = '&'.join(constraintcheck['data_event']['signals'][1:])
timing['when'] = cond
timing['timing_type'] = '{}_{}'.format(
'setup' if constraintcheck['type'] == 'setuphold' else 'recovery',
'rising' if constraintcheck['reference_event']['edge'] == 'posedge' else 'falling')
librarycontent[cell][pinname]['timing '].append(timing)
timing = {}
timing['related_pin'] = constraintcheck['reference_event']['signals'][0]
limit = 'hold_limit' if constraintcheck['type'] == 'setuphold' else 'removal_limit'
if constraintcheck['data_event']['edge'] == 'posedge':
if float(constraintcheck[limit][0]) == float(constraintcheck[limit][2]):
timing['intrinsic_rise'] = constraintcheck[limit][0]
else:
timing['intrinsic_rise_min'] = constraintcheck[limit][0]
timing['intrinsic_rise'] = constraintcheck[limit][1] if float(constraintcheck[limit][1]) > float(constraintcheck[limit][0]) else constraintcheck[limit][0]
timing['intrinsic_rise_max'] = constraintcheck[limit][2]
if constraintcheck['data_event']['edge'] == 'negedge':
if float(constraintcheck[limit][0]) == float(constraintcheck[limit][2]):
timing['intrinsic_fall'] = constraintcheck[limit][0]
else:
timing['intrinsic_fall_min'] = constraintcheck[limit][0]
timing['intrinsic_fall'] = constraintcheck[limit][1] if float(constraintcheck[limit][1]) > float(constraintcheck[limit][0]) else constraintcheck[limit][0]
timing['intrinsic_fall_max'] = constraintcheck[limit][2]
if len(constraintcheck['data_event']['signals']) > 1:
cond = '&'.join(constraintcheck['data_event']['signals'][1:])
timing['when'] = cond
timing['timing_type'] = '{}_{}'.format(
'hold' if constraintcheck['type'] == 'setuphold' else 'removal',
'rising' if constraintcheck['reference_event']['edge'] == 'posedge' else 'falling')
librarycontent[cell][pinname]['timing '].append(timing)
elif constraintcheck['type'] == 'period':
pinname = 'pin {}'.format(constraintcheck['reference_event']['signals'][0])
if not pinname in librarycontent[cell]:
librarycontent[cell][pinname] = {}
if not 'minimum_period ' in librarycontent[cell]:
librarycontent[cell][pinname]['minimum_period '] = []
period = {}
if len(constraintcheck['reference_event']['signals']) > 1:
period['when'] = '&'.join(constraintcheck['reference_event']['signals'][1:])
constraint_attr = 'constraint'
limit = 'limit'
period['{}_min'.format(constraint_attr)] = constraintcheck[limit][0]
period['{}'.format(constraint_attr)] = constraintcheck[limit][1]
period['{}_max'.format(constraint_attr)] = constraintcheck[limit][2]
librarycontent[cell][pinname]['minimum_period '].append(period)
else:
pinname = 'pin {}'.format(constraintcheck['reference_event']['signals'][0])
if not pinname in librarycontent[cell]:
librarycontent[cell][pinname] = {}
# if not 'min_pulse_width ' in librarycontent[cell]:
# librarycontent[cell][pinname]['min_pulse_width '] = []
# width = {}
# if len(constraintcheck['reference_event']['signals']) > 1:
# width['when'] = '&'.join(constraintcheck['reference_event']['signals'][1:])
# constraint_attr = 'constraint_high'
# if constraintcheck['reference_event']['edge'] == 'negedge':
# constraint_attr = 'constraint_low'
limit = 'width_limit'
# width['{}_min'.format(constraint_attr)] = constraintcheck[limit][0]
# width['{}'.format(constraint_attr)] = constraintcheck[limit][1]
# width['{}_max'.format(constraint_attr)] = constraintcheck[limit][2]
librarycontent[cell][pinname]['min_pulse_width_low'] = constraintcheck[limit][0]
librarycontent[cell][pinname]['min_pulse_width_high'] = constraintcheck[limit][2]
# librarycontent[cell][pinname]['min_pulse_width '].append(width)
if len(librarycontent[cell]) == 0:
del librarycontent[cell]
if len(librarycontent) > 0:
library = {'library {}'.format(libraryname): librarycontent}
return library
else:
return None
def remap_path(old_path, ext, base = "output/skywater-pdk/libraries", modname=None):
lib, mod = common.mod_extract_from_path(old_path)
if common.version_extract_from_path(old_path):
ver = "V" + ".".join([str(v) for v in common.version_extract_from_path(old_path)])
else:
ver = ''
if modname is not None:
mod = modname
if lib is None or lib in ['???', '']:
if 'openfpga' in str(old_path):
lib = 'openfpga'
elif mod is None:
mod = lib
rest, f_name= os.path.split(old_path)
if f_name == "verilog.v":
# _, d_name = os.path.split(rest)
if rest.split('/')[-2] != 'skywater-src-nda':
d_name = f"{rest.split('/')[-2]}_{rest.split('/')[-1]}"
# if 'tmax' in rest.split('/')[-1]:
# d_name += '_tmax'
else:
d_name = rest.split('/')[-1]
# d_name = f"{rest.split('/')[-2]}_{rest.split('/')[-1]}"
finfilename = f'{lib}-{mod}-{d_name}{ext}'
finfilename = finfilename.replace('Models_', '')
finfilename = finfilename.replace('SPECTRE_', '')
old_path = Path(base) / lib / ver / 'cells' / mod / finfilename
else:
print(f'{old_path} {lib} {ver} {mod} {f_name}')
old_path = Path(base) / lib / ver / 'cells' / re.sub(r'_[0-9]$', '', mod) / f'{lib}-{mod}-{Path(f_name).stem}{ext}'
old_path = str(old_path).replace("/s8iom0s8/", "/sky130_fd_io/")
old_path = str(old_path).replace("/s8/", "/sky130_fd_pr/")
old_path = old_path.replace("/VirtuosoOA/libs", "")
new_lib = common.convert_libname(lib)
if lib != None and new_lib != None:
new_path = old_path.replace(lib, new_lib)
else:
new_path = old_path
return new_path
def check_all_zero(data):
if type(data) in [str, int, float]:
try:
val = float(data)
if val != 0:
return False
else:
return True
except:
return True
elif type(data) == list:
for val in data:
if not check_all_zero(val):
return False
return True
elif type(data) == dict:
for val in data.values():
if not check_all_zero(val):
return False
return True
else:
raise Exception(f'Unsupported type: {type(data)}')
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"input",
help="JSON file containing cell names and Liberty filenames",
type=Path)
parser.add_argument(
"outputdir",
help="The directory that will contain the output",
type=Path)
parser.add_argument(
"--sourcetodests",
help="Mapping from source files to destination files",
type=Path)
parser.add_argument(
"--path-prefix-to-remove",
help="The substring that needs to be removed before generating subdirectories for Liberty files",
type=Path)
parser.add_argument(
"--print",
help="Prints additional info",
action="store_true")
# parser.add_argument(
# "--num-jobs",
# help="Number of jobs to process files",
# type=int,
# default=1)
args = parser.parse_args()
with open(args.input, 'r') as infile:
celltolibs = json.load(infile)
containingcellnames = defaultdict(list)
cellnames = list(celltolibs.keys())
for i, cell in enumerate(cellnames):
for cell2 in cellnames[i + 1:]:
if cell in cell2:
containingcellnames[cell].append(cell2)
elif cell2 in cell:
containingcellnames[cell2].append(cell)
pp(containingcellnames, indent=4)
toprocess = []
for cell, files in celltolibs.items():
celldir = args.outputdir / cell
# celldir.mkdir(parents=True, exist_ok=True)
for f in files:
if 'sram' in f:
print(colored(f, 'cyan'))
continue
try:
oldlibname = common.lib_extract_from_path(str(f))
except Exception as ex:
print(f)
print(ex)
if not oldlibname or oldlibname == '???':
oldlibname = None
newlibname = None
else:
newlibname = common.convert_libname(oldlibname)
newf = Path(remap_path(f, '.specify.lib', base=args.outputdir, modname=cell))
toprocess.append((f, cell, newf, oldlibname, newlibname))
newfiles = {}
repeated = False
for data in toprocess:
if data[2] in newfiles:
repeated = True
print(f'This file will be overwritten: {(data[1], data[0])} = {newfiles[data[2]]}')
print(f'{data[2]}')
else:
newfiles[data[2]] = (data[1], data[0])
assert not repeated
allfilescount = len(toprocess)
numfailed = 0
errortypes = set()
for num, data in enumerate(toprocess):
try:
# filename, cell, findir, newfilename, oldlibname, newlibname = data
filename, cell, newfilename, oldlibname, newlibname = data
print('{} : {}'.format(filename, cell))
if filename not in parsedfiles:
with open(filename, 'r') as f:
veriloglines = f.readlines()
extractor = extract_timings.VerilogSpecifyExtractor(veriloglines)
extractor.parse()
parsedfiles[filename] = extractor
if args.print:
print('-------------------')
print(''.join(veriloglines))
print('-------------------')
for module, parsedentry in extractor.parsedspecifyblocks.items():
print('-------------------')
print('Module: {}'.format(module))
print('-------------------')
print('Specparams')
for param, value in parsedentry["specparams"].items():
pp('{} = {}'.format(param, value))
print('-------------------')
print('Constraint checks')
for c in parsedentry["constraintchecks"]:
pp(c)
print('-------------------')
print('Path delays')
for p in parsedentry["pathdelays"]:
pp(p)
print('-------------------')
print('Conditioned path delays')
for v in parsedentry["ifstatements"].values():
for e in v:
pp(e)
else:
print('Already parsed')
extractor = parsedfiles[filename]
if len(extractor.parsedspecifyblocks) > 0:
jsonliberty = convert_specify_to_libertyjson(newlibname, extractor.parsedspecifyblocks, cell, containingcellnames, oldlibname, newlibname)
if jsonliberty and not check_all_zero(jsonliberty):
liblines = json_to_liberty.JSONToLibertyWriter.convert_json_to_liberty(jsonliberty)
if liblines:
if check_all_zero(jsonliberty):
print('------------- ALL ZEROS ---------')
print(f'---------- {filename} ----------')
with open(filename, 'r') as f:
veriloglines = f.readlines()
print(''.join(veriloglines))
print(f'--------------------')
print('\n'.join(liblines))
print('----------------------------------')
# filestem = str(Path(newfilename).stem)
# if not filestem.endswith(cell):
# # newfilename = newfilename.replace(filestem, filestem + '_' + cell)
# newfilename = Path(str(newfilename).replace(filestem, filestem + '_' + cell))
# libtarget = (findir / Path(newfilename).name).with_suffix('.lib')
if not os.path.isdir(newfilename.parent):
newfilename.parent.mkdir(parents=True, exist_ok=True)
with open(newfilename, 'w') as outputlib:
outputlib.write('\n'.join(copyright + liblines))
sourcetodests[str(filename)].append(str(newfilename))
with open(newfilename.with_suffix('.json'), 'w') as outputlib:
json.dump(jsonliberty, outputlib, indent=2)
sourcetodests[str(filename)].append(str(newfilename.with_suffix('.json')))
else:
print('No specify block')
print(colored('[{:05d}/{:05d},failed={:05d}] {} : {} | DONE'.format(num + 1, allfilescount, numfailed, data[1], data[0]), 'green'))
except Exception as ex:
raise
print(colored('[{:05d}/{:05d},failed={:05d}] {} : {} | ERROR : {}'.format(num + 1, allfilescount, numfailed, data[1], data[0], type(ex).__name__), 'red'))
errortypes.add(type(ex).__name__)
numfailed += 1
print('{} out of {} failed'.format(numfailed, allfilescount))
print('Error types:')
print(errortypes)
if numfailed > 0:
sys.exit(1)
with open(args.sourcetodests, 'w') as srctodst:
json.dump(sourcetodests, srctodst, indent=2)