| #!/usr/bin/env python3 |
| # -*- coding: utf-8 -*- |
| # |
| # Copyright 2020 The SkyWater PDK Authors. |
| # |
| # Use of this source code is governed by the Apache 2.0 |
| # license that can be found in the LICENSE file or at |
| # https://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # SPDX-License-Identifier: Apache-2.0 |
| |
| import hashlib |
| import json |
| import os |
| import pprint |
| import re |
| import sys |
| import traceback |
| |
| from skywater_pdk import base, corners, drives |
| |
| import hdlparse.verilog_parser as vlog |
| |
| copyright_header = """\ |
| // Copyright 2020 The Skywater PDK Authors |
| // |
| // Licensed under the Apache License, Version 2.0 (the "License"); |
| // you may not use this file except in compliance with the License. |
| // You may obtain a copy of the License at |
| // |
| // https://www.apache.org/licenses/LICENSE-2.0 |
| // |
| // Unless required by applicable law or agreed to in writing, software |
| // distributed under the License is distributed on an "AS IS" BASIS, |
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| // See the License for the specific language governing permissions and |
| // limitations under the License. |
| // |
| // SPDX-License-Identifier: Apache-2.0 |
| """ |
| |
| vlog_ex = vlog.VerilogExtractor() |
| |
| IMPORTANT = [ |
| 'cell.json', |
| 'full.v', |
| 'specify.v', |
| 'gds', |
| 'cdl', |
| ] |
| |
| IGNORE = [ |
| re.compile('README.rst$'), |
| re.compile('metadata.json$'), |
| re.compile('wrap.json'), |
| re.compile('wrap.lib'), |
| ] |
| ALLOW_ERRORS = [ |
| re.compile('/pg_u_'), |
| re.compile('fill'), |
| re.compile('tap'), |
| re.compile('lpflow_'), |
| ] |
| |
| def should_ignore(f, x=IGNORE): |
| """ |
| >>> should_ignore('README.rst') |
| True |
| >>> should_ignore('metadata.json') |
| True |
| >>> should_ignore('asdfasdfasdf/README.rst') |
| True |
| >>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/README.rst') |
| True |
| >>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/XXXX') |
| False |
| |
| """ |
| for i in x: |
| if i.search(f): |
| return True |
| return False |
| |
| |
| def get_description(cellpath): |
| readme_fn = os.path.join(cellpath, 'README.rst') |
| if not os.path.exists(readme_fn): |
| return '' |
| readme = open(readme_fn).read() |
| |
| desc = """\ |
| Description |
| *********** |
| """ |
| logic = """\ |
| Logic |
| ***** |
| """ |
| assert desc in readme, readme |
| assert logic in readme, readme |
| |
| _, readme = readme.split(desc, 1) |
| readme, _ = readme.split(logic, 1) |
| return readme.strip() |
| |
| |
| def process(cellpath): |
| assert os.path.exists(cellpath), cellpath |
| assert os.path.isdir(cellpath), cellpath |
| |
| files = [ |
| (f, os.path.abspath(os.path.join(cellpath, f))) |
| for f in os.listdir(cellpath)] |
| files.sort() |
| |
| dcell, fname = base.parse_pathname(cellpath) |
| assert isinstance(dcell, base.Cell), (cellpath, dcell, fname) |
| assert fname is None, (cellpath, dcell, fname) |
| |
| extensions = set() |
| dcorners = set() |
| ddrives = set() |
| checksums = {} |
| errors = [] |
| for fname, fpath in files: |
| print("Processing:", fname) |
| if should_ignore(fpath): |
| continue |
| |
| try: |
| fcell, fextra, fext = base.parse_filename(fpath) |
| except Exception as e: |
| traceback.print_exc() |
| errors.append(e) |
| assert isinstance(fcell, base.Cell), (fpath, fcell, fextra, ext) |
| |
| if fext in IMPORTANT: |
| checksums[fname] = hashlib.sha1(open(fpath, 'rb').read()).hexdigest() |
| |
| extensions.add(fext) |
| |
| assert fcell.library == dcell.library, (fcell, dcell) |
| if not fextra: |
| continue |
| |
| try: |
| fcorner = corners.parse_filename(fextra) |
| except Exception as e: |
| traceback.print_exc() |
| errors.append(e) |
| dcorners.add(fcorner) |
| |
| assert fcell.name.startswith(dcell.name), (fcell, dcell) |
| if dcell.name != fcell.name: |
| try: |
| fdrive = fcell.name[len(dcell.name):] |
| |
| ddrives.add(drives.parse_drive(fdrive)) |
| except Exception as e: |
| traceback.print_exc() |
| errors.append(e) |
| |
| basepath = cellpath.split("libraries", 1)[0] |
| cellrelpath = os.path.relpath(cellpath, basepath) |
| print(cellrelpath) |
| |
| metadata = dcell.to_dict() |
| metadata['fullname'] = dcell.fullname |
| metadata['description'] = get_description(cellpath) |
| |
| if 'blackbox.v' in extensions: |
| bbv_fname = os.path.join(cellpath, "{}.blackbox.v".format(dcell.fullname)) |
| assert os.path.exists(bbv_fname), bbv_fname |
| o = vlog_ex.extract_objects(bbv_fname) |
| assert len(o) == 1, o |
| o = o[0] |
| assert dcell.fullname in o.name, (dcell.fullname, o) |
| assert not o.generics, (dcell.fullname, o) |
| #metadata['all_ports'] = [(p.name, p.mode, p.data_type) for p in o.ports] |
| |
| if 'full.v' in extensions: |
| full_fname = os.path.join(cellpath, "{}.full.v".format(dcell.fullname)) |
| assert os.path.exists(full_fname), full_fname |
| o = vlog_ex.extract_objects(full_fname) |
| if not o: |
| simple_fname = os.path.join(cellpath, "{}.simple.v".format(dcell.fullname)) |
| assert os.path.exists(simple_fname), simple_fname |
| o = vlog_ex.extract_objects(simple_fname) |
| assert len(o) == 1, o |
| o = o[0] |
| assert dcell.fullname in o.name, (dcell.fullname, o) |
| assert not o.generics, (dcell.fullname, o) |
| non_pwr = [] |
| pwr = [] |
| |
| current_list = non_pwr |
| p = list(o.ports) |
| while len(p) > 0: |
| a = p.pop(0) |
| if a.name == 'ifdef': |
| assert len(p) > 2, p |
| pg_pin = p.pop(0) |
| assert 'SC_USE_PG_PIN' == pg_pin.name, pg_pin |
| current_list = pwr |
| continue |
| elif a.name == 'endif': |
| assert len(p) == 0, p |
| break |
| else: |
| current_list.append((a.name, a.mode)) |
| metadata['ports'] = { |
| 'signal': non_pwr, |
| 'power': pwr, |
| } |
| |
| extensions.add('metadata.json') |
| |
| assert checksums |
| metadata['files'] = checksums |
| if dcorners: |
| metadata['corners'] = [d.to_dict() for d in sorted(dcorners)] |
| else: |
| errors.append('Missing corners for: {}\n'.format(cellpath)) |
| |
| assert extensions |
| metadata['extensions'] = list(sorted(extensions)) |
| |
| if ddrives: |
| metadata['drives'] = [d.to_dict() for d in sorted(ddrives)] |
| |
| # Save the metadata file. |
| with open(os.path.join(cellpath, 'metadata.json'), 'w') as f: |
| json.dump(metadata, f, sort_keys=True, indent=" ") |
| |
| # Create verilog files for each drive strength |
| print() |
| print() |
| print(dcell.name) |
| print("-"*75) |
| pprint.pprint(metadata) |
| |
| if errors: |
| raise ValueError("\n".join(str(e) for e in errors)) |
| |
| |
| def main(args): |
| for a in args: |
| print() |
| print() |
| p = os.path.abspath(a) |
| if should_ignore(p): |
| continue |
| try: |
| process(p) |
| except Exception as e: |
| if not should_ignore(p, ALLOW_ERRORS): |
| raise |
| print("Failed to process ignorable:", p) |
| traceback.print_exc() |
| |
| |
| if __name__ == "__main__": |
| import doctest |
| doctest.testmod() |
| sys.exit(main(sys.argv[1:])) |