blob: e05776200ccd13738237113725e514d4a25dd4e7 [file] [log] [blame]
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 The SkyWater PDK Authors.
#
# Use of this source code is governed by the Apache 2.0
# license that can be found in the LICENSE file or at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
import hashlib
import json
import os
import pprint
import re
import sys
import traceback
from skywater_pdk import base, corners, drives
import hdlparse.verilog_parser as vlog
copyright_header = """\
// Copyright 2020 The Skywater PDK Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: Apache-2.0
"""
vlog_ex = vlog.VerilogExtractor()
IMPORTANT = [
'cell.json',
'full.v',
'specify.v',
'gds',
'cdl',
'sp',
]
IGNORE = [
re.compile('README.rst$'),
re.compile('.Cat$'),
re.compile('metadata.json$'),
re.compile('wrap.json'),
re.compile('wrap.lib'),
re.compile('define_functional'),
]
ALLOW_ERRORS = [
re.compile('/pg_u_'),
re.compile('/udb_pg_u_'),
re.compile('/udb_u_'),
re.compile('/u_'),
re.compile('_pg$'),
re.compile('fill'),
re.compile('tap'),
re.compile('decap'),
re.compile('diode'),
re.compile('probe'),
re.compile('libcell'),
re.compile('lpflow_'),
re.compile('macro_sparecell'),
re.compile('macro_sync'),
]
def should_ignore(f, x=IGNORE):
"""
>>> should_ignore('README.rst')
True
>>> should_ignore('metadata.json')
True
>>> should_ignore('asdfasdfasdf/README.rst')
True
>>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/README.rst')
True
>>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/XXXX')
False
"""
for i in x:
if i.search(f):
return True
return False
def get_description(cellpath):
readme_fn = os.path.join(cellpath, 'README.rst')
if not os.path.exists(readme_fn):
return ''
readme = open(readme_fn).read()
desc = """\
Description
***********
"""
logic = """\
Logic
*****
"""
assert desc in readme, readme
assert logic in readme, readme
_, readme = readme.split(desc, 1)
readme, _ = readme.split(logic, 1)
return readme.strip()
def process(cellpath):
assert os.path.exists(cellpath), cellpath
assert os.path.isdir(cellpath), cellpath
files = [
(f, os.path.abspath(os.path.join(cellpath, f)))
for f in os.listdir(cellpath)]
files.sort()
dcell, fname = base.parse_pathname(cellpath)
assert isinstance(dcell, base.Cell), (cellpath, dcell, fname)
assert fname is None, (cellpath, dcell, fname)
extensions = set()
dcorners = set()
ddrives = set()
checksums = {}
errors = []
for fname, fpath in files:
if should_ignore(fpath):
continue
try:
fcell, fextra, fext = base.parse_filename(fpath)
except Exception as e:
traceback.print_exc()
errors.append(e)
assert isinstance(fcell, base.Cell), (fpath, fcell, fextra, ext)
if fext in IMPORTANT:
checksums[fname] = hashlib.sha1(open(fpath, 'rb').read()).hexdigest()
extensions.add(fext)
assert fcell.library == dcell.library, (fcell, dcell)
if not fextra:
continue
if fextra.startswith('extracted') or 'spectre' in fextra:
continue
try:
fcorner = corners.parse_filename(fextra)
dcorners.add(fcorner)
except Exception as e:
traceback.print_exc()
errors.append('Invalid corner: {} -- {} (from {})'.format(e, fextra, fpath))
assert fcell.name.startswith(dcell.name), (fcell, dcell)
if dcell.name != fcell.name:
try:
fdrive = fcell.name[len(dcell.name):]
ddrives.add(drives.parse_drive(fdrive))
except Exception as e:
traceback.print_exc()
errors.append('Invalid drive: {} -- {} (from {})'.format(e, fdrive, fpath))
basepath = cellpath.split("libraries", 1)[0]
cellrelpath = os.path.relpath(cellpath, basepath)
metadata = dcell.to_dict()
metadata['fullname'] = dcell.fullname
metadata['description'] = get_description(cellpath)
if 'full.v' in extensions:
o = None
for ext in ['full.v', 'simple.v']:
fname = os.path.join(cellpath, "{}.{}".format(dcell.fullname, ext))
if not os.path.exists(fname):
errors.append("Missing {} file".format(fname))
continue
o = vlog_ex.extract_objects(fname)
if not o or len(o) != 1:
errors.append("Invalid {} file ({})".format(fname, o))
continue
o = o[0]
if not o:
errors.append("Invalid {} file ({})".format(fname, o))
break
if o:
assert dcell.fullname in o.name, (dcell.fullname, o)
assert not o.generics, (dcell.fullname, o)
non_pwr = []
pwr = []
current_list = non_pwr
p = list(o.ports)
while len(p) > 0:
a = p.pop(0)
if a.name == 'ifdef':
assert len(p) > 2, p
pg_pin = p.pop(0)
assert 'SC_USE_PG_PIN' == pg_pin.name, pg_pin
current_list = pwr
continue
elif a.name == 'endif':
assert len(p) == 0, p
break
else:
current_list.append((a.name, a.mode))
metadata['ports'] = {
'signal': non_pwr,
'power': pwr,
}
extensions.add('metadata.json')
if not checksums:
errors.append('No important files for {}: {}'.format(cellpath, files))
else:
metadata['files'] = checksums
if dcorners:
metadata['corners'] = [d.to_dict() for d in sorted(dcorners)]
else:
errors.append('Missing corners for: {}\n'.format(cellpath))
assert extensions
metadata['extensions'] = list(sorted(extensions))
if ddrives:
metadata['drives'] = [d.to_dict() for d in sorted(ddrives)]
# Save the metadata file.
mdata_file = os.path.join(cellpath, 'metadata.json')
with open(mdata_file, 'w') as f:
json.dump(metadata, f, sort_keys=True, indent=" ")
print("Wrote:", mdata_file)
if errors:
raise ValueError("\n".join(str(e) for e in errors))
def main(args):
for a in args:
p = os.path.abspath(a)
if should_ignore(p):
continue
try:
process(p)
except Exception as e:
if not should_ignore(p, ALLOW_ERRORS):
raise
print("Failed to process ignorable:", p)
traceback.print_exc()
if __name__ == "__main__":
import doctest
doctest.testmod()
sys.exit(main(sys.argv[1:]))