| #!/usr/bin/env python3 |
| # Copyright 2020 The Skywater PDK Authors |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); |
| # you may not use this file except in compliance with the License. |
| # You may obtain a copy of the License at |
| # |
| # https://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| |
| import json |
| import sys |
| import csv |
| import os |
| import re |
| import shutil |
| import common |
| from pathlib import Path |
| from collections import defaultdict |
| import argparse |
| |
| descriptions = defaultdict(list) |
| |
| common_prefix="skywater-pdk/libraries" |
| |
| file_name = "README.rst" |
| |
| name_map = { |
| "sky130_fd_io": "SkyWater Foundry provided IO cells", |
| "sky130_fd_pr_base": "SkyWater Foundry Primitives", |
| "sky130_fd_pr_rf": "SkyWater Foundry Legacy RF Primitives", |
| "sky130_fd_pr_rf2": "SkyWater Foundry New RF Primitives", |
| "sky130_fd_sc_hd": "SkyWater Foundry High Density Standard Cells", |
| "sky130_fd_sc_hdll": "SkyWater Foundry High Density Low Leakage Standard Cells", |
| "sky130_fd_sc_hs": "SkyWater Foundry High Speed Standard Cells", |
| "sky130_fd_sc_hvl": "SkyWater Foundry High Voltage Standard Cells", |
| "sky130_fd_sc_ls": "SkyWater Foundry Low Voltage Low Speed Standard Cells", |
| "sky130_fd_sc_ms": "SkyWater Foundry Low Voltage High Speed Standard Cells", |
| "sky130_fd_sc_lp": "SkyWater Foundry Low Power Standard Cells", |
| } |
| |
| rst_header = "`` in :lib:``" |
| rst_underline_char = '~' |
| rst_template = """`{cell_name}` in :lib:`{lib_name}` |
| {underline} |
| |
| Description |
| *********** |
| |
| {description} |
| |
| Logic |
| ***** |
| |
| {logic} |
| |
| Module |
| ****** |
| |
| {module} |
| |
| Schematic |
| ********* |
| |
| {schematic} |
| |
| Blackbox Verilog |
| **************** |
| |
| {blackbox} |
| |
| Simplified Verilog |
| ****************** |
| |
| {simple} |
| |
| Drive Strength |
| ************** |
| |
| {drive_strength} |
| |
| Timing Models |
| ************* |
| |
| {timing_models} |
| |
| Layout |
| ****** |
| |
| {layout} |
| |
| Also found in |
| ************* |
| |
| {also_in} |
| |
| """ |
| |
| index_underline_char = '=' |
| library_header = ":lib:`` - " |
| library_template = """:lib:`{lib_name}` - {lib_description} |
| {underline} |
| |
| Contents |
| -------- |
| |
| .. toctree:: |
| |
| cells/README |
| release_notes |
| """ |
| |
| index_header = "Index of :lib:`` (version ) cells" |
| index_template = """Index of :lib:`{lib_name}` (version {version}) cells |
| {underline} |
| |
| .. toctree:: |
| :glob: |
| :maxdepth: 1 |
| :caption: :lib:`{lib_name}` cell index |
| :name: `{lib_name}-v{version}-cells |
| |
| **/README |
| """ |
| |
| referenced_descriptions="Sky130_Cell_short_description - All_cells.tsv" |
| referenced_desc_width = 3 |
| manual_descriptions="Sky130_Cell_short_description - Cells_w-o_description.tsv" |
| manual_desc_width = 9 #7th cell holds the description |
| |
| description_match="(?i)(Description ?:|Function :)" |
| |
| def gen_image(file): |
| if file is None: |
| return "TODO" |
| return f""" |
| .. image:: {file} |
| """ |
| |
| def gen_diagram(file): |
| if file is None: |
| return "TODO" |
| return f""" |
| .. verilog-diagram:: {file} |
| :type: netlistsvg |
| """ |
| |
| def gen_verilog(file): |
| if file is None: |
| return "TODO" |
| return f""" |
| .. literalinclude:: {file} |
| :language: verilog |
| :linenos: |
| """ |
| |
| def calculate_underline(bare_header, item1, item2): |
| return len(item1) + len(item2) + len(bare_header) |
| |
| def extract_from_sources(cell_name, original_source): |
| """ |
| This is currently not used |
| """ |
| desc = extract_from_source(original_source) |
| if desc is None: |
| files = list(args.output.rglob('*' + cell_name + '*.v')) |
| print(f"No desc, looking for {cell_name}, found {len(files)}") |
| for file in files: |
| print(f"Looking at {file}") |
| desc = extract_from_source(file) |
| if desc is not None: |
| print("Found one!") |
| break |
| return desc |
| |
| |
| def extract_from_source(original_source): |
| def cleanup_line(line): |
| line = line.lstrip("/") |
| line = line.strip() |
| line = re.sub(' +', ' ', line) |
| return line |
| |
| |
| have_match = False |
| description = "" |
| try: |
| with open(original_source) as file: |
| for line in file: |
| if not have_match: |
| m = re.search(description_match, line) |
| if m is not None: |
| have_match = True |
| line = line[m.end():] |
| line = cleanup_line(line) |
| description += line |
| else: |
| line = cleanup_line(line) |
| if len(line) == 0: |
| break |
| description += ' ' + line |
| except: |
| #file not found |
| return None |
| |
| if not have_match: |
| return None |
| |
| need_caps = description.islower() or description.isupper() |
| description = (i.strip() for i in description.split(". ")) |
| if need_caps: |
| description = (".\n".join(i.capitalize() for i in description)) |
| else: |
| description = (".\n".join(i for i in description)) |
| return description |
| |
| def fixup_description(description): |
| lib_patterns = [ |
| "s8iom0s8", |
| ] |
| for pattern in lib_patterns: |
| description = description.replace(pattern, common.convert_libname(pattern) + "_") |
| return description |
| |
| def try_extract_description(cell_name, original_source): |
| if cell_name.endswith('_m'): |
| cell_name = cell_name.replace('_m', '') |
| if cell_name.endswith('_lp'): |
| cell_name = cell_name.replace('_lp', '') |
| if cell_name.endswith('_lp2'): |
| cell_name = cell_name.replace('_lp2', '') |
| description = None |
| if cell_name in descriptions: |
| description = descriptions[cell_name] |
| if description is None: |
| description = extract_from_source(original_source) |
| #description = extract_from_sources(cell_name, original_source) |
| descriptions[cell_name] = description |
| if description is not None: |
| description = fixup_description(description) |
| return description |
| original_source = str(original_source) |
| print(f"No description found for {cell_name} ({original_source[original_source.find('skywater-src-nda'):]})") |
| return "TODO: no description found" |
| |
| def prepare_description_cache(): |
| max = 0 |
| with open(manual_descriptions, newline='') as csvfile: |
| reader = csv.reader(csvfile, delimiter='\t') |
| for line in reader: |
| if len(line) < manual_desc_width: |
| continue |
| if len(line[manual_desc_width - 1].strip()) == 0: |
| continue |
| |
| descriptions[line[0]] = line[manual_desc_width - 1] |
| with open(referenced_descriptions, newline='') as csvfile: |
| reader = csv.reader(csvfile, delimiter='\t') |
| for line in reader: |
| if len(line) != referenced_desc_width: |
| continue |
| if line[0] in descriptions: |
| #todo: combine both descriptions? |
| continue |
| descriptions[line[0]] = None |
| |
| |
| def create_readme(k, v): |
| if len(v) > 1: |
| raise Exception (f"multiple files mapping to {k}: str{v}") |
| |
| v = Path(v[0]) |
| destination = Path(k) |
| print(f"Destination {destination}") |
| dest_parts = destination.parts |
| found = False |
| |
| for i in reversed(range(len(dest_parts))): |
| if dest_parts[i] in ("cells", "tech"): |
| cell_name = dest_parts[i + 1] |
| #version in between |
| lib_name = dest_parts[i - 2] |
| found = True |
| |
| if not found: |
| raise Exception(destination) |
| |
| readme = destination.parent / file_name |
| |
| print(f"Creating {readme}") |
| description = try_extract_description(cell_name, v) |
| |
| modules = list(destination.parent.glob('*.module.png')) |
| module = None |
| if len(modules) > 0: |
| module = modules[0].name |
| |
| schs = list(destination.parent.glob('*.sch.png')) |
| sch = None |
| if len(schs) > 0: |
| sch = schs[0].name |
| |
| verilog = list(destination.parent.glob('*.v')) |
| try: |
| blackbox = next(x for x in verilog if "blackbox.v" in x.name) |
| except: |
| blackbox = None |
| try: |
| simple = next(x for x in verilog if "simple.v" in x.name) |
| except: |
| simple = None |
| |
| try: |
| with open(readme, "w") as r: |
| r.write(rst_template.format( |
| lib_name = lib_name, |
| cell_name = cell_name, |
| underline = rst_underline_char * calculate_underline(rst_header, cell_name, lib_name), |
| description = description, |
| schematic = gen_image(sch), |
| blackbox = gen_verilog(blackbox.name if blackbox else None), |
| module = gen_image(module), |
| simple = gen_verilog(simple.name if simple else None), |
| drive_strength = "TODO", |
| timing_models = "TODO", |
| layout = "TODO", |
| also_in = "TODO", |
| logic = "TODO", |
| )) |
| except FileNotFoundError: |
| print(f"ERROR: Failed to create README at {readme}", file=sys.stderr) |
| |
| |
| def create_index(): |
| libraries = args.output / common_prefix |
| for library in libraries.iterdir(): |
| if not library.is_dir(): |
| continue |
| lib_name = library.name |
| lib_description = name_map[lib_name] |
| for version_path in library.iterdir(): |
| if not version_path.is_dir(): |
| continue |
| version = common.version_extract_from_path(str(version_path)) |
| if version is None: |
| continue |
| # we are anticipating a new version |
| version = f"0.{version[0] * 10 + version[1]}.{version[2]}" |
| |
| readme = version_path / "cells" / file_name |
| |
| print(f"Creating index RST file {readme}") |
| try: |
| with open(readme, "w") as r: |
| r.write(index_template.format( |
| lib_name = lib_name, |
| version = version, |
| underline = index_underline_char * calculate_underline(index_header, version, lib_name), |
| )) |
| except FileNotFoundError: |
| print(f"ERROR: Failed to create cell index at {readme}", file=sys.stderr) |
| |
| readme = version_path / file_name |
| |
| print(f"Creating index RST file {readme}") |
| try: |
| with open(readme, "w") as r: |
| r.write(library_template.format( |
| lib_name = lib_name, |
| lib_description = lib_description, |
| underline = index_underline_char * calculate_underline(library_header, lib_name, lib_description), |
| )) |
| except FileNotFoundError: |
| print(f"ERROR: Failed to create version index {readme}", file=sys.stderr) |
| |
| |
| def copy_docs(): |
| docs_path = args.documentation / "docs" |
| base = args.output / "skywater-pdk" |
| os.system(f"cp -fr {docs_path} {base}") |
| notes_docs = base / "docs" / "release_notes" |
| for notes in notes_docs.rglob("release_notes.rst"): |
| if "fmlt" in str(notes): |
| continue |
| subpath = notes.relative_to(notes_docs) |
| notes_path = base / subpath |
| notes_path.parent.mkdir(parents = True, exist_ok = True) |
| shutil.copy2(str(notes), str(notes_path)) |
| |
| if __name__ == "__main__": |
| parser = argparse.ArgumentParser() |
| parser.add_argument( |
| "output", |
| help="The path to the output directory", |
| type=Path) |
| |
| parser.add_argument( |
| "--mapping", |
| action="append", |
| help="output file mapping input to ouput", |
| type=Path) |
| |
| parser.add_argument( |
| "documentation", |
| help="documentation directory", |
| nargs="?", |
| type=Path) |
| args = parser.parse_args() |
| args.output = args.output.resolve() |
| args.documentation = args.documentation.resolve() |
| prepare_description_cache() |
| good_mapping={} |
| fixup_mapping={} |
| for mapping in args.mapping: |
| map_files = json.load(open(mapping)) |
| for k, v in map_files.items(): |
| if str(args.output.resolve()) not in v[0]: |
| good_mapping[k] = v |
| else: |
| fixup_mapping[k] = v |
| |
| for k, v in fixup_mapping.items(): |
| good_mapping[k] = good_mapping[v[0]] |
| for k, v in fixup_mapping.items(): |
| if v[0] in good_mapping: |
| del good_mapping[v[0]] |
| for k, v in good_mapping.items(): |
| create_readme(k, v) |
| create_index() |
| |
| # As decided, we are not automerging docs with scratch |
| #copy_docs() |
| |