More changes to support JSON metadata file.
diff --git a/scripts/python-skywater-pdk/collect_metadata.py b/scripts/python-skywater-pdk/collect_metadata.py
index 99a77da..3a9978a 100755
--- a/scripts/python-skywater-pdk/collect_metadata.py
+++ b/scripts/python-skywater-pdk/collect_metadata.py
@@ -9,13 +9,100 @@
 #
 # SPDX-License-Identifier: Apache-2.0
 
+import hashlib
+import json
 import os
 import pprint
+import re
 import sys
 import traceback
 
 from skywater_pdk import base, corners, drives
 
+import hdlparse.verilog_parser as vlog
+
+copyright_header = """\
+// Copyright 2020 The Skywater PDK Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// SPDX-License-Identifier: Apache-2.0
+"""
+
+vlog_ex = vlog.VerilogExtractor()
+
+IMPORTANT = [
+    'cell.json',
+    'full.v',
+    'specify.v',
+    'gds',
+    'cdl',
+]
+
+IGNORE = [
+    re.compile('README.rst$'),
+    re.compile('metadata.json$'),
+    re.compile('wrap.json'),
+    re.compile('wrap.lib'),
+]
+ALLOW_ERRORS = [
+    re.compile('/pg_u_'),
+    re.compile('fill'),
+    re.compile('tap'),
+    re.compile('lpflow_'),
+]
+
+def should_ignore(f, x=IGNORE):
+    """
+    >>> should_ignore('README.rst')
+    True
+    >>> should_ignore('metadata.json')
+    True
+    >>> should_ignore('asdfasdfasdf/README.rst')
+    True
+    >>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/README.rst')
+    True
+    >>> should_ignore('/home/tim/gob/foss-eda-tools/skywater-pdk-scratch/skywater-pdk/libraries/sky130_fd_sc_hd/v0.0.1/cells/XXXX')
+    False
+
+    """
+    for i in x:
+        if i.search(f):
+            return True
+    return False
+
+
+def get_description(cellpath):
+    readme_fn = os.path.join(cellpath, 'README.rst')
+    if not os.path.exists(readme_fn):
+        return ''
+    readme = open(readme_fn).read()
+
+    desc = """\
+Description
+***********
+"""
+    logic = """\
+Logic
+*****
+"""
+    assert desc in readme, readme
+    assert logic in readme, readme
+
+    _, readme = readme.split(desc, 1)
+    readme, _ = readme.split(logic, 1)
+    return readme.strip()
+
 
 def process(cellpath):
     assert os.path.exists(cellpath), cellpath
@@ -29,14 +116,17 @@
     dcell, fname = base.parse_pathname(cellpath)
     assert isinstance(dcell, base.Cell), (cellpath, dcell, fname)
     assert fname is None, (cellpath, dcell, fname)
+
     extensions = set()
     dcorners = set()
     ddrives = set()
+    checksums = {}
     errors = []
     for fname, fpath in files:
         print("Processing:", fname)
-        if fname in ('README.rst',):
+        if should_ignore(fpath):
             continue
+
         try:
             fcell, fextra, fext = base.parse_filename(fpath)
         except Exception as e:
@@ -44,6 +134,9 @@
             errors.append(e)
         assert isinstance(fcell, base.Cell), (fpath, fcell, fextra, ext)
 
+        if fext in IMPORTANT:
+            checksums[fname] = hashlib.sha1(open(fpath, 'rb').read()).hexdigest()
+
         extensions.add(fext)
 
         assert fcell.library == dcell.library, (fcell, dcell)
@@ -55,40 +148,118 @@
         except Exception as e:
             traceback.print_exc()
             errors.append(e)
-
-        try:
-            assert fcell.name.startswith(dcell.name), (fcell, dcell)
-            fdrive = fcell.name[len(dcell.name):]
-
-            ddrives.add(drives.parse_drive(fdrive))
-        except Exception as e:
-            traceback.print_exc()
-            errors.append(e)
-
         dcorners.add(fcorner)
 
-    dcorners = list(sorted(dcorners))
-    ddrives = list(sorted(ddrives))
+        assert fcell.name.startswith(dcell.name), (fcell, dcell)
+        if dcell.name != fcell.name:
+            try:
+                fdrive = fcell.name[len(dcell.name):]
 
+                ddrives.add(drives.parse_drive(fdrive))
+            except Exception as e:
+                traceback.print_exc()
+                errors.append(e)
+
+    basepath = cellpath.split("libraries", 1)[0]
+    cellrelpath = os.path.relpath(cellpath, basepath)
+    print(cellrelpath)
+
+    metadata = dcell.to_dict()
+    metadata['fullname'] = dcell.fullname
+    metadata['description'] = get_description(cellpath)
+
+    if 'blackbox.v' in extensions:
+        bbv_fname = os.path.join(cellpath, "{}.blackbox.v".format(dcell.fullname))
+        assert os.path.exists(bbv_fname), bbv_fname
+        o = vlog_ex.extract_objects(bbv_fname)
+        assert len(o) == 1, o
+        o = o[0]
+        assert dcell.fullname in o.name, (dcell.fullname, o)
+        assert not o.generics, (dcell.fullname, o)
+        #metadata['all_ports'] = [(p.name, p.mode, p.data_type) for p in o.ports]
+
+    if 'full.v' in extensions:
+        full_fname = os.path.join(cellpath, "{}.full.v".format(dcell.fullname))
+        assert os.path.exists(full_fname), full_fname
+        o = vlog_ex.extract_objects(full_fname)
+        if not o:
+            simple_fname = os.path.join(cellpath, "{}.simple.v".format(dcell.fullname))
+            assert os.path.exists(simple_fname), simple_fname
+            o = vlog_ex.extract_objects(simple_fname)
+        assert len(o) == 1, o
+        o = o[0]
+        assert dcell.fullname in o.name, (dcell.fullname, o)
+        assert not o.generics, (dcell.fullname, o)
+        non_pwr = []
+        pwr = []
+
+        current_list = non_pwr
+        p = list(o.ports)
+        while len(p) > 0:
+            a = p.pop(0)
+            if a.name == 'ifdef':
+                assert len(p) > 2, p
+                pg_pin = p.pop(0)
+                assert 'SC_USE_PG_PIN' == pg_pin.name, pg_pin
+                current_list = pwr
+                continue
+            elif a.name == 'endif':
+                assert len(p) == 0, p
+                break
+            else:
+                current_list.append((a.name, a.mode))
+        metadata['ports'] = {
+            'signal': non_pwr,
+            'power': pwr,
+        }
+
+    extensions.add('metadata.json')
+
+    assert checksums
+    metadata['files'] = checksums
+    if dcorners:
+        metadata['corners'] = [d.to_dict() for d in sorted(dcorners)]
+    else:
+        errors.append('Missing corners for: {}\n'.format(cellpath))
+
+    assert extensions
+    metadata['extensions'] = list(sorted(extensions))
+
+    if ddrives:
+        metadata['drives'] = [d.to_dict() for d in sorted(ddrives)]
+
+    # Save the metadata file.
+    with open(os.path.join(cellpath, 'metadata.json'), 'w') as f:
+        json.dump(metadata, f, sort_keys=True, indent="   ")
+
+    # Create verilog files for each drive strength
     print()
-    print(cellpath)
-    print('-'*75)
-    print('Cell:', dcell)
-    print('Cell drives:', ddrives)
-    print('Cell corners:')
-    pprint.pprint(dcorners)
-    print('File types:', extensions)
+    print()
+    print(dcell.name)
+    print("-"*75)
+    pprint.pprint(metadata)
+
     if errors:
-        raise ValueError("\n".join(errors))
+        raise ValueError("\n".join(str(e) for e in errors))
 
 
 def main(args):
     for a in args:
         print()
         print()
-        process(os.path.abspath(a))
-
+        p = os.path.abspath(a)
+        if should_ignore(p):
+            continue
+        try:
+            process(p)
+        except Exception as e:
+            if not should_ignore(p, ALLOW_ERRORS):
+                raise
+            print("Failed to process ignorable:", p)
+            traceback.print_exc()
 
 
 if __name__ == "__main__":
+    import doctest
+    doctest.testmod()
     sys.exit(main(sys.argv[1:]))
diff --git a/scripts/python-skywater-pdk/generate_drive_verilog.py b/scripts/python-skywater-pdk/generate_drive_verilog.py
new file mode 100755
index 0000000..f4de732
--- /dev/null
+++ b/scripts/python-skywater-pdk/generate_drive_verilog.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 The SkyWater PDK Authors.
+#
+# Use of this source code is governed by the Apache 2.0
+# license that can be found in the LICENSE file or at
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# SPDX-License-Identifier: Apache-2.0
+
+
+import os
+import sys
+import json
+
+
+copyright_header = """\
+// Copyright 2020 The Skywater PDK Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// SPDX-License-Identifier: Apache-2.0
+"""
+
+include_header = """\
+`ifndef VERILOG_MODE_SIMPLE
+`ifndef VERILOG_MODE_BLACKBOX
+`ifndef VERILOG_MODE_FULL
+// No VERILOG_MODE_XXX defined, fallback to VERILOG_MODE_SIMPLE
+`define VERILOG_MODE_SIMPLE 1
+`endif
+`endif
+`endif
+
+`ifdef VERILOG_MODE_SIMPLE
+`include "{0}.simple.v"
+`endif
+
+`ifdef VERILOG_MODE_BLACKBOX
+`include "{0}.blackbox.v"
+`endif
+
+`ifdef VERILOG_MODE_FULL
+`include "{0}.full.v"
+`endif
+"""
+
+module_header = """\
+//# {library_name}: {cell_name} ({drive_value} {drive_name})
+//# {description}
+module {module_base_name}_{drive_value} (
+    {module_signal_defports}
+`ifdef SC_USE_PG_PNG
+    {module_power_defports}
+`endif
+);
+
+    {module_base_name} (
+        {module_signal_ports}
+`ifdef SC_USE_PG_PNG
+        {module_power_ports}
+`endif
+    );
+
+endmodule
+"""
+
+def write_verilog(mdata, outfile, drive=None):
+    with open(outfile, 'w') as f:
+        f.write(copyright_header)
+        f.write('\n')
+        f.write(include_header.format(mdata['fullname']))
+        f.write('\n')
+        if not drive:
+            return
+        drive_name, drive_value = drive
+
+        module_signal_defports = []
+        module_signal_ports = []
+        for pname, ptype in mdata['ports']['signal']:
+            module_signal_defports.append("{} {}, ".format(ptype, pname))
+            module_signal_ports.append(pname)
+
+        module_signal_defports = "".join(module_signal_defports)
+        assert module_signal_defports.endswith(", "), module_signal_defports
+        module_signal_defports = module_signal_defports[:-2]
+        module_signal_ports = ", ".join(module_signal_ports)
+
+        module_power_defports = []
+        module_power_ports = []
+        for pname, ptype in mdata['ports']['power']:
+            module_power_defports.append(", {} {}".format('input', pname))
+            module_power_ports.append(", {}".format(pname))
+        module_power_defports = "".join(module_power_defports)
+        module_power_ports = "".join(module_power_ports)
+
+        library_name = "{} {}".format(
+            mdata['library']['name'].upper(), mdata['library']['type'])
+
+        f.write(module_header.format(
+            module_base_name = mdata['fullname'],
+            cell_name = mdata['name'],
+            library_name = library_name,
+            drive_name = drive_name,
+            drive_value = drive_value,
+            description = mdata.get('description', ''),
+            module_signal_defports = module_signal_defports,
+            module_signal_ports = module_signal_ports,
+            module_power_defports = module_power_defports,
+            module_power_ports = module_power_ports,
+        ))
+
+def echo_file(fname):
+    with open(fname) as f:
+        sys.stdout.write('\n')
+        sys.stdout.write('File:')
+        sys.stdout.write(fname)
+        sys.stdout.write('\n')
+        sys.stdout.write('------\n')
+        sys.stdout.write(f.read())
+        sys.stdout.write('------\n')
+        sys.stdout.flush()
+
+
+
+def process(cellpath):
+    md_json = os.path.join(cellpath, 'metadata.json')
+    if not os.path.exists(md_json):
+        return
+    assert os.path.exists(md_json), md_json
+    mdata = json.load(open(md_json))
+    drives = mdata.get('drives', [])
+    for d in drives:
+        assert len(d) == 1, d
+        drive_name = list(d.keys())[0]
+        drive_value = list(d.values())[0]
+        dvfile = os.path.join(cellpath, "{}_{}.v".format(mdata['fullname'], drive_value))
+        write_verilog(mdata, dvfile, list(d.items())[0])
+        echo_file(dvfile)
+
+    if not drives:
+        outfile = os.path.join(cellpath, "{}.v".format(mdata['fullname']))
+        write_verilog(mdata, outfile)
+        echo_file(outfile)
+
+
+def main(args):
+    for a in args:
+        process(a)
+
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))
diff --git a/scripts/python-skywater-pdk/skywater_pdk/base.py b/scripts/python-skywater-pdk/skywater_pdk/base.py
index 0b2e205..07b4708 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/base.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/base.py
@@ -17,6 +17,7 @@
 from typing import Optional, Union, Tuple
 
 from .utils import comparable_to_none
+from .utils import dataclass_json_passthru_config as dj_pass_cfg
 
 
 LibraryOrCell = Union['Library', 'Cell']
@@ -317,6 +318,9 @@
     def __repr__(self):
         return "LibraryNode."+self.name
 
+    def to_json(self):
+        return self.name
+
 
 class LibrarySource(str):
     """Where a library was created."""
@@ -339,6 +343,11 @@
     def __repr__(self):
         return 'LibrarySource({})'.format(str.__repr__(self))
 
+    def to_json(self):
+        if self in self.Known:
+            return self.__doc__
+        return str.__repr__(self)
+
 
 Foundary = LibrarySource("fd")
 Foundary.__doc__ = "The SkyWater Foundary"
@@ -372,6 +381,9 @@
     def __str__(self):
         return self.value
 
+    def to_json(self):
+        return self.value
+
 
 @comparable_to_none
 @dataclass_json
@@ -404,9 +416,9 @@
 
     """
 
-    node: LibraryNode
-    source: LibrarySource
-    type: LibraryType
+    node: LibraryNode = dj_pass_cfg()
+    source: LibrarySource = dj_pass_cfg()
+    type: LibraryType = dj_pass_cfg()
     name: str = ''
     version: Optional[LibraryVersion] = None
 
diff --git a/scripts/python-skywater-pdk/skywater_pdk/corners.py b/scripts/python-skywater-pdk/skywater_pdk/corners.py
index c46dba1..d8108ae 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/corners.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/corners.py
@@ -20,6 +20,7 @@
 from . import base
 from .utils import OrderedFlag
 from .utils import comparable_to_none
+from .utils import dataclass_json_passthru_sequence_config as dj_pass_cfg
 
 
 CornerTypeMappings = {}
@@ -68,6 +69,9 @@
     def __str__(self):
         return self.value
 
+    def to_json(self):
+        return self.name
+
 
 class CornerFlag(OrderedFlag):
     nointpr = 'No internal power'
@@ -88,6 +92,9 @@
     def __str__(self):
         return self.value
 
+    def to_json(self):
+        return self.name
+
 
 @comparable_to_none
 class OptionalTuple(tuple):
@@ -98,10 +105,10 @@
 @dataclass_json
 @dataclass(frozen=True, order=True)
 class Corner:
-    corner: Tuple[CornerType, CornerType]
-    volts: Tuple[float, ...]
-    temps: Tuple[int, ...]
-    flags: Optional[Tuple[CornerFlag, ...]] = None
+    corner: Tuple[CornerType, CornerType] = dj_pass_cfg()
+    volts: Tuple[float, ...] = dj_pass_cfg()
+    temps: Tuple[int, ...] = dj_pass_cfg()
+    flags: Optional[Tuple[CornerFlag, ...]] = dj_pass_cfg(default=None)
 
     def __post_init__(self):
         if self.flags:
diff --git a/scripts/python-skywater-pdk/skywater_pdk/drives.py b/scripts/python-skywater-pdk/skywater_pdk/drives.py
index 1b6b5ac..1cba416 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/drives.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/drives.py
@@ -136,9 +136,11 @@
     """
     units: int
 
+    VALID_UNIT_VALUES = (0, 1, 2, 4)
+
     def describe(self):
         suffix = ""
-        if self.units not in (1, 2, 4):
+        if self.units not in self.VALID_UNIT_VALUES:
             suffix = " (invalid?)"
 
         return "{} units{}".format(self.units, suffix)
@@ -152,7 +154,7 @@
         if not s.startswith("_"):
             raise InvalidSuffixError(s)
         i = int(s[1:])
-        if i <= 0:
+        if i < 0:
             raise InvalidSuffixError(s)
         return cls(i)
 
diff --git a/scripts/python-skywater-pdk/skywater_pdk/utils.py b/scripts/python-skywater-pdk/skywater_pdk/utils.py
index d3752b2..cc8bfc0 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/utils.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/utils.py
@@ -10,14 +10,49 @@
 # SPDX-License-Identifier: Apache-2.0
 
 import dataclasses
+import dataclasses_json
 import random
 import sys
 
 from dataclasses import dataclass
+from dataclasses_json import dataclass_json
 from enum import Flag
 from typing import Optional, Tuple, Any
 
 
+def dataclass_json_passthru_config(*args, **kw):
+    return dataclasses.field(
+        *args,
+        metadata=dataclasses_json.config(
+            encoder=lambda x: x.to_json(),
+            #decoder=lambda x: x.from_json(),
+        ),
+        **kw,
+    )
+
+def dataclass_json_passthru_sequence_config(*args, **kw):
+    def to_json_sequence(s):
+        if s is None:
+            return None
+        o = []
+        for i in s:
+            if hasattr(i, 'to_json'):
+                o.append(i.to_json())
+            else:
+                o.append(i)
+        return o
+
+    return dataclasses.field(
+        *args,
+        metadata=dataclasses_json.config(
+            encoder=to_json_sequence,
+            #decoder=lambda x: x.from_json(),
+        ),
+        **kw,
+    )
+
+
+
 def comparable_to_none(cls):
     """