Commit everything to push.
diff --git a/scripts/python-skywater-pdk/generate_metadata_json.py b/scripts/python-skywater-pdk/generate_metadata_json.py
index 3a9978a..e057762 100755
--- a/scripts/python-skywater-pdk/generate_metadata_json.py
+++ b/scripts/python-skywater-pdk/generate_metadata_json.py
@@ -47,19 +47,32 @@
     'specify.v',
     'gds',
     'cdl',
+    'sp',
 ]
 
 IGNORE = [
     re.compile('README.rst$'),
+    re.compile('.Cat$'),
     re.compile('metadata.json$'),
     re.compile('wrap.json'),
     re.compile('wrap.lib'),
+    re.compile('define_functional'),
 ]
 ALLOW_ERRORS = [
     re.compile('/pg_u_'),
+    re.compile('/udb_pg_u_'),
+    re.compile('/udb_u_'),
+    re.compile('/u_'),
+    re.compile('_pg$'),
     re.compile('fill'),
     re.compile('tap'),
+    re.compile('decap'),
+    re.compile('diode'),
+    re.compile('probe'),
+    re.compile('libcell'),
     re.compile('lpflow_'),
+    re.compile('macro_sparecell'),
+    re.compile('macro_sync'),
 ]
 
 def should_ignore(f, x=IGNORE):
@@ -123,7 +136,6 @@
     checksums = {}
     errors = []
     for fname, fpath in files:
-        print("Processing:", fname)
         if should_ignore(fpath):
             continue
 
@@ -143,12 +155,15 @@
         if not fextra:
             continue
 
+        if fextra.startswith('extracted') or 'spectre' in fextra:
+            continue
+
         try:
             fcorner = corners.parse_filename(fextra)
+            dcorners.add(fcorner)
         except Exception as e:
             traceback.print_exc()
-            errors.append(e)
-        dcorners.add(fcorner)
+            errors.append('Invalid corner: {} -- {} (from {})'.format(e, fextra, fpath))
 
         assert fcell.name.startswith(dcell.name), (fcell, dcell)
         if dcell.name != fcell.name:
@@ -158,65 +173,62 @@
                 ddrives.add(drives.parse_drive(fdrive))
             except Exception as e:
                 traceback.print_exc()
-                errors.append(e)
+                errors.append('Invalid drive: {} -- {} (from {})'.format(e, fdrive, fpath))
 
     basepath = cellpath.split("libraries", 1)[0]
     cellrelpath = os.path.relpath(cellpath, basepath)
-    print(cellrelpath)
 
     metadata = dcell.to_dict()
     metadata['fullname'] = dcell.fullname
     metadata['description'] = get_description(cellpath)
 
-    if 'blackbox.v' in extensions:
-        bbv_fname = os.path.join(cellpath, "{}.blackbox.v".format(dcell.fullname))
-        assert os.path.exists(bbv_fname), bbv_fname
-        o = vlog_ex.extract_objects(bbv_fname)
-        assert len(o) == 1, o
-        o = o[0]
-        assert dcell.fullname in o.name, (dcell.fullname, o)
-        assert not o.generics, (dcell.fullname, o)
-        #metadata['all_ports'] = [(p.name, p.mode, p.data_type) for p in o.ports]
-
     if 'full.v' in extensions:
-        full_fname = os.path.join(cellpath, "{}.full.v".format(dcell.fullname))
-        assert os.path.exists(full_fname), full_fname
-        o = vlog_ex.extract_objects(full_fname)
-        if not o:
-            simple_fname = os.path.join(cellpath, "{}.simple.v".format(dcell.fullname))
-            assert os.path.exists(simple_fname), simple_fname
-            o = vlog_ex.extract_objects(simple_fname)
-        assert len(o) == 1, o
-        o = o[0]
-        assert dcell.fullname in o.name, (dcell.fullname, o)
-        assert not o.generics, (dcell.fullname, o)
-        non_pwr = []
-        pwr = []
-
-        current_list = non_pwr
-        p = list(o.ports)
-        while len(p) > 0:
-            a = p.pop(0)
-            if a.name == 'ifdef':
-                assert len(p) > 2, p
-                pg_pin = p.pop(0)
-                assert 'SC_USE_PG_PIN' == pg_pin.name, pg_pin
-                current_list = pwr
+        o = None
+        for ext in ['full.v', 'simple.v']:
+            fname = os.path.join(cellpath, "{}.{}".format(dcell.fullname, ext))
+            if not os.path.exists(fname):
+                errors.append("Missing {} file".format(fname))
                 continue
-            elif a.name == 'endif':
-                assert len(p) == 0, p
-                break
-            else:
-                current_list.append((a.name, a.mode))
-        metadata['ports'] = {
-            'signal': non_pwr,
-            'power': pwr,
-        }
+            o = vlog_ex.extract_objects(fname)
+            if not o or len(o) != 1:
+                errors.append("Invalid {} file ({})".format(fname, o))
+                continue
+            o = o[0]
+            if not o:
+                errors.append("Invalid {} file ({})".format(fname, o))
+            break
+        if o:
+            assert dcell.fullname in o.name, (dcell.fullname, o)
+            assert not o.generics, (dcell.fullname, o)
+            non_pwr = []
+            pwr = []
+
+            current_list = non_pwr
+            p = list(o.ports)
+            while len(p) > 0:
+                a = p.pop(0)
+                if a.name == 'ifdef':
+                    assert len(p) > 2, p
+                    pg_pin = p.pop(0)
+                    assert 'SC_USE_PG_PIN' == pg_pin.name, pg_pin
+                    current_list = pwr
+                    continue
+                elif a.name == 'endif':
+                    assert len(p) == 0, p
+                    break
+                else:
+                    current_list.append((a.name, a.mode))
+            metadata['ports'] = {
+                'signal': non_pwr,
+                'power': pwr,
+            }
 
     extensions.add('metadata.json')
 
-    assert checksums
-    metadata['files'] = checksums
+    if not checksums:
+        errors.append('No important files for {}: {}'.format(cellpath, files))
+    else:
+        metadata['files'] = checksums
     if dcorners:
         metadata['corners'] = [d.to_dict() for d in sorted(dcorners)]
     else:
@@ -229,15 +241,10 @@
         metadata['drives'] = [d.to_dict() for d in sorted(ddrives)]
 
     # Save the metadata file.
-    with open(os.path.join(cellpath, 'metadata.json'), 'w') as f:
+    mdata_file = os.path.join(cellpath, 'metadata.json')
+    with open(mdata_file, 'w') as f:
         json.dump(metadata, f, sort_keys=True, indent="   ")
-
-    # Create verilog files for each drive strength
-    print()
-    print()
-    print(dcell.name)
-    print("-"*75)
-    pprint.pprint(metadata)
+    print("Wrote:", mdata_file)
 
     if errors:
         raise ValueError("\n".join(str(e) for e in errors))
@@ -245,8 +252,6 @@
 
 def main(args):
     for a in args:
-        print()
-        print()
         p = os.path.abspath(a)
         if should_ignore(p):
             continue
diff --git a/scripts/python-skywater-pdk/skywater_pdk/corners.py b/scripts/python-skywater-pdk/skywater_pdk/corners.py
index d8108ae..184218b 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/corners.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/corners.py
@@ -76,8 +76,11 @@
 class CornerFlag(OrderedFlag):
     nointpr = 'No internal power'
     lv = 'Low voltage'
+    hv = 'High voltage'
     ccsnoise = 'Composite Current Source Noise'
     pwr = 'Power'
+    xx = 'xx'
+    w = 'w'
 
     @classmethod
     def parse(cls, s):
@@ -165,6 +168,10 @@
         extra = cell.name
         cell = None
 
+    # FIXME: Hack?
+    extra = extra.replace("lpflow_","")
+    extra = extra.replace("udb_","")
+
     kw = {}
     kw['flags'] = []
     kw['volts'] = []
diff --git a/scripts/python-skywater-pdk/skywater_pdk/drives.py b/scripts/python-skywater-pdk/skywater_pdk/drives.py
index 1cba416..b26c2d4 100644
--- a/scripts/python-skywater-pdk/skywater_pdk/drives.py
+++ b/scripts/python-skywater-pdk/skywater_pdk/drives.py
@@ -105,10 +105,10 @@
     >>> s1 = DriveStrengthNumeric.from_suffix("_1")
     >>> s2 = DriveStrengthNumeric.from_suffix("_2")
     >>> s3 = DriveStrengthNumeric.from_suffix("_3")
-    >>> DriveStrengthNumeric.from_suffix("_0")
+    >>> DriveStrengthNumeric.from_suffix("_-1")
     Traceback (most recent call last):
         ...
-    InvalidSuffixError: Invalid suffix: _0
+    InvalidSuffixError: Invalid suffix: _-1
     >>> s1
     DriveStrengthNumeric(units=1)
     >>> s2
@@ -247,7 +247,16 @@
     'minimum'
     >>> m.suffix
     '_m'
+
+    >>> m1 = DriveStrengthMinimum()
+    >>> m2 = DriveStrengthMinimum()
+    >>> assert m1 is m2
     """
+    _object = None
+    def __new__(cls):
+        if cls._object is None:
+            cls._object = object.__new__(cls)
+        return cls._object
 
     def __repr__(self):
         return "DriveStrengthMinimum()"
@@ -265,6 +274,11 @@
             raise InvalidSuffixError(s)
         return cls()
 
+    def __hash__(self):
+        return id(self)
+
+DriveStrengthMinimum._object = DriveStrengthMinimum()
+
 
 if __name__ == "__main__":
     import doctest