blob: ecedff8a38cd522cdc5d138b60a8ceffd614ce6e [file] [log] [blame]
#!/usr/bin/env python3
# Copyright 2020 The Skywater PDK Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import csv
import functools
import json
import os
import pprint
import re
import subprocess
import sys
import textwrap
from pathlib import Path
from collections import defaultdict
import hdlparse.verilog_parser as vlog
from primitive_map import routmap as primitive_routmap
import common
from common import \
lib_extract_from_name, \
extract_version_and_lib_from_path, \
copy_file_to_output, \
\
convert_libname, \
convert_cell_fullname, \
convert_pinname
debug = True
RE_BITS = re.compile(
r'(?P<cell>(\s*)`celldefine(?P<cell_content>.*?)`endcelldefine)|'
r'(?P<prim>(\s*)primitive(?P<prim_name>[^(]+).*?endprimitive)|'
r'(?P<mod>(\(\*[^*]*\*\))?(\s*)module(?P<mod_name>[^(]+).*?endmodule)', re.DOTALL|re.I)
class IfDef:
def __init__(self):
self.invert = False
self.flag = ''
self.ifdef = ''
self._true_path = []
self.ifelse = ''
self._false_path = []
self.ifend = ''
def clone(self):
c = IfDef()
c.invert = self.invert
c.flag = self.flag
c.ifdef = self.ifdef
c._true_path = list(self._true_path)
c.ifelse = self.ifelse
c._false_path = list(self._false_path)
c.ifend = self.ifend
return c
def set_ifdef(self, s):
s = s.strip()
if s.startswith('`ifdef '):
s = s[7:].strip()
elif s.startswith('`ifndef '):
self.invert = True
s = s[8:].strip()
else:
raise ValueError('Invalid:'+s)
flag = s
extra = ''
if '//' in s:
flag, extra = s.split('//', 1)
self.flag = flag.strip()
self.ifdef = extra.strip()
@property
def true_path(self):
if self.invert:
return self._false_path
else:
return self._true_path
@property
def false_path(self):
if self.invert:
return self._true_path
else:
return self._false_path
@property
def name(self):
names = ['IfDef', 'IfNotDef']
if self.invert:
names = list(reversed(names))
if not self._true_path and self._false_path:
return names[-1]
return names[0]
@property
def elname(self):
if self.true_path:
return 'Else'
else:
return ''
def __repr__(self):
o = ['{}({})'.format(self.name,self.flag)]
if self.true_path:
o.append(str(self.true_path))
o.append(self.elname)
if self.false_path:
o.append(str(self.false_path))
return ''.join(o)
def __str__(self):
o = ['{}({})'.format(self.name, self.flag)]
if self.ifdef:
o[-1] += ' //'+self.ifdef
if self.true_path:
for i in self.true_path:
for l in str(i).splitlines():
o.append(' '+l)
if self.true_path and self.false_path:
o.append('Else')
if self.ifelse:
o[-1] += ' //'+self.ifelse
if self.false_path:
for i in self.false_path:
for l in str(i).splitlines():
o.append(' '+l)
o.append('End{}({})'.format(self.name, self.flag))
if self.ifend:
o[-1] += ' //'+self.ifend
return '\n'.join(o)
class Specify(list):
def __repr__(self):
return 'Specify'+list.__repr__(self)
def __str__(self):
o = []
assert len(self) > 2, repr(self)
o.append(self[0])
for v in specify_align(self[1:-1]):
for l in v.splitlines():
o.append(' '+l)
o.append(self[-1])
return '\n'.join(o)
# if ((!A1&!A2&!C1&!D1)) (B1 -=> Y) = (0:0:0,0:0:0);
# (.*)\\((\w+)\s(-=>)\s*(\w))\s*(=)\s*(\\([^)]*\\))\s*;
def specify_align(lines):
split_lines = []
for l in lines:
bits = l.split()
if not bits[0].startswith('if'):
bits.insert(0, '')
split_lines.append(bits)
slen = max(len(l) for l in split_lines)
maxlen = []
for i in range(0, slen):
lengths = []
for l in split_lines:
if len(l) > i:
lengths.append(len(l[i]))
maxlen.append(max(lengths)+1)
output = []
for l in split_lines:
output.append(''.join(s.ljust(maxlen[i]) for i, s in enumerate(l)))
return output
def get_flags(l):
flags = []
for o in l:
if isinstance(o, IfDef):
flags.extend(get_flags(o.true_path))
flags.extend(get_flags(o.false_path))
flags.append(o.flag)
assert not o.ifelse, str(o)
assert not o.ifend, str(o)
return set(flags)
def process_ifdef(content):
output = []
stack = [output]
for l in content.splitlines():
l = l.strip()
if not l:
continue
if l.startswith('`if'):
new_def = IfDef()
new_def.set_ifdef(l)
stack.append(new_def)
stack.append(new_def.true_path)
elif l.startswith('`else'):
stack.pop(-1)
stack[-1].ifelse = l[5:].strip()
stack.append(stack[-1].false_path)
elif l.startswith('`endif'):
stack.pop(-1)
ifdef = stack.pop(-1)
ifdef.ifend = l[6:].strip()
stack[-1].append(ifdef)
elif l.startswith('specify'):
s = Specify()
s.append(l)
stack.append(s)
elif l.startswith('endspecify'):
stack[-1].append(l)
s = stack.pop(-1)
assert isinstance(s, Specify), s
stack[-1].append(s)
else:
stack[-1].append(l)
assert len(stack) == 1, stack
return output
def strip_stupid_headers(header):
o = []
for s in header.splitlines():
s = s.strip()
if s.startswith('//'):
continue
if s.startswith('`'):
continue
o.append(s)
return '\n'.join(o).strip()
def output_ifdef(l, flags={}, include_specify=True):
output = []
for o in l:
if isinstance(o, Specify) and not include_specify:
continue
assert has_specify, (l, flags)
if isinstance(o, IfDef):
if o.flag in flags:
if flags[o.flag]:
output.extend(output_ifdef(o.true_path, flags, include_specify))
else:
output.extend(output_ifdef(o.false_path, flags, include_specify))
else:
n = o.clone()
n._true_path = output_ifdef(n._true_path, flags)
n._false_path = output_ifdef(n._false_path, flags)
output.append(n)
else:
output.append(o)
return output
def get_specify(l):
output = []
for o in l:
if isinstance(o, Specify):
output.append(o)
elif isinstance(o, IfDef):
output.extend(get_specify(o.true_path))
output.extend(get_specify(o.false_path))
return output
def decode_name(name):
d = {
'l': 'Low Power Variant',
'P': 'Positive Edge Clock / Gate',
'N': 'Negative Edge Clock / Gate',
'S': 'Has Set',
'R': 'Has Reset',
'E': 'Has Data Enable',
'RS': 'Set and Reset (with Reset dominate)',
'SR': 'Set and Reset (with Set dominate)',
'Sa': 'ASync Set',
}
pp = {
# Supplies
'P': ('Power', 'VPWR' ),
'G': ('Ground', 'VGND' ),
'K': ('Keep Alive Power', 'KAPWR' ),
'L': ('Low Voltage Power', 'LVPWR' ),
# Signals
'S': ('SLEEP on High', 'SLEEP' ),
's': ('SLEEP on Low', 'SLEEP_B'),
'N': ('Notifier', '' ),
'rN': ('Registered Notifier', '' ),
}
def rewrite_primitive_name(name):
assert name in primitive_routmap
return primitive_routmap[name]
def rewrite_module_name(name):
assert name.startswith('scs8'), name
old_lib, modname = lib_extract_from_name(name)
MISSING_HEADERS = {
#scs8ls_pg_U_DFB
'U_DFB' : ['S', 'R', 'CK', 'D', 'Qt', 'Qt+1', 'Comments'],
#U_DFB_SETDOM_pg -- scs8hs
#scs8lpa_U_DFB_SETDOM
#scs8ms_pg_U_DFB_SETDOM
#scs8ls_pg_U_DFB_SETDOM
#scs8hd_pg_U_DFB_SETDOM
#scs8hdll_pg_U_DFB_SETDOM
'U_DFB_SETDOM' : ['S', 'R', 'CK', 'D', 'Qt', 'Qt+1', 'Comments'],
'U_DFB_SETDOM_pg' : ['S', 'R', 'CK', 'D', 'VPWR', 'VGND', 'Qt', 'Qt+1', 'Comments'],
#U_DFB_SETDOM_notify_pg -- scs8hs
#scs8lpa_U_DFB_SETDOM_NO_pg
#scs8ms_pg_U_DFB_SETDOM_NO_pg
#scs8ls_pg_U_DFB_SETDOM_NO_pg
#scs8hd_pg_U_DFB_SETDOM_NO_pg
#scs8hdll_pg_U_DFB_SETDOM_NO_pg
#scs8ls_pg_U_DFB_SETDOM_NO_pg
'U_DFB_SETDOM_NO_pg': ['S', 'R', 'CK', 'D', 'NOTIFIER', 'VPWR', 'VGND', 'Qt', 'Qt+1', 'Comments'],
}
RE_COMMENTS=re.compile('//.*$', flags=re.M)
def process_primitive(final_dir, temp_dir, new_lib, version, metadata, active_header, otype, oname, odata):
flags = get_flags(odata)
ostr = '\n'.join(str(l) for l in odata)
for f in flags:
assert f in ['functional'], (flags, otype, ostr)
assert 'table' in ostr, ostr
assert 'endtable' in ostr, ostr
tstart = ostr.find('table')
tend = ostr.rfind('endtable')+len('endtable')
nontable_data = ostr[:tstart]+ostr[tend:]
table_data = ostr[tstart:tend].splitlines()
assert table_data[0] == 'table', table_data
table_data.pop(0)
assert table_data[-1] == 'endtable', table_data
table_data.pop(-1)
if table_data[0].startswith('//'):
table_data[0] = table_data[0][2:].strip() + '// Header'
table_data_out = []
for t in table_data:
if not t.strip():
continue
if '//' in t:
t, cmt = t.split('//', 1)
t = t.strip()
cmt = cmt.strip()
else:
cmt = ''
t = t.replace(',', ' ')
d = t.split()
if not d:
continue
if d[-1] == ';':
d.pop(-1)
if d[-1].endswith(';'):
d[-1] = d[-1][:-1]
if cmt == 'JCWR':
cmt = ''
d.append(cmt)
table_data_out.append(d) #[i for i in d if i != ':'])
# Add missing headers for a number of tables
if table_data_out[0][-1] != 'Header':
if oname.startswith('scs8'):
primname = oname.replace('scs8lpa', 'scs8lp_pg')
libname, primname = primname.split('_pg_', 1)
else:
primname = oname.replace('notify', 'NO')
assert primname in MISSING_HEADERS, (primname, oname)
table_data_out.insert(0, MISSING_HEADERS[primname])
print(table_data_out)
header = []
for d in table_data_out[0]:
if d.startswith('Qt'):
if header[-1] != ':':
header.append(':')
header.append(d)
table_data_out[0] = header
assert len(table_data_out[0]) == len(table_data_out[1]), pprint.pformat(table_data_out)
# Fix the headers in some comments.
if 'VPWR' in oname or 'VGND' in oname or 'iso' in oname:
if table_data_out[0][0] == 'in':
table_data_out[0][0] = 'UDP_IN'
if table_data_out[0][0] == 'X_int':
table_data_out[0][0] = 'UDP_IN'
if table_data_out[0][-2] == 'X':
table_data_out[0][-2] = 'UDP_OUT'
if table_data_out[0][-2] == 'OUT':
table_data_out[0][-2] = 'UDP_OUT'
table_data_out[0][-1] = 'Comments'
comments = []
comments_metadata = {}
for cm in RE_COMMENTS.finditer(nontable_data):
cmt = cm.group(0).strip()
assert cmt.startswith('//'), (cmt, cm)
cmt = cmt[2:].strip()
if not cmt:
continue
if ':' in cmt:
a, b = cmt.split(':')
a = a.strip()
if a not in comments_metadata:
comments_metadata[a] = []
comments_metadata[a].append(b.strip())
fakemod = nontable_data.replace('primitive', 'module')
tfile = os.path.join(temp_dir, f'{oname}.mod.v')
assert not os.path.exists(tfile), tfile
with open(tfile, 'w') as f:
f.write(fakemod)
vlog_info = vlog_ex.extract_objects(tfile)
assert len(vlog_info) == 1, vlog_info
mod_info = vlog_info[0]
decode_name(mod_info.name)
assert mod_info.name, mod_info
prim_name = rewrite_primitive_name(mod_info.name)
prim_fullname = new_lib + '__' + prim_name
print()
print(mod_info.name, prim_name, '-'*45)
print('-'*45)
print(ostr)
# Extract the primitive description (including rewriting the port names inside the description
prim_ports_rewrites = {p.name.upper(): convert_pinname(p.name, prim_name) for p in mod_info.ports}
prim_description = []
if 'FUNCTION' in comments_metadata:
f = '\n'.join(comments_metadata['FUNCTION'])
f = f.replace(' /', ' / ')
f = f.replace('( ', '(')
f = f.replace(' )', ')')
f = re.sub('\\s+', ' ', f)
f = f[0].upper() + f[1:].lower()
f = f.replace('-latch', 'latch')
f = f.replace('flip-flop', 'flipflop')
f = f.replace('latch', '-latch')
f = f.replace('flipflop', 'flip-flop')
port_regex = '\\b('+'|'.join('({})'.format(n) for n in prim_ports_rewrites)+')\\b'
f = re.sub(port_regex, lambda m: prim_ports_rewrites[m.group(0).upper()], f, flags=re.I)
f = re.sub('udp', 'UDP', f, flags=re.I)
prim_description = [f]
if 'UDP_OUT' in comments_metadata:
prim_description = ['UDP_OUT :'+l for l in comments_metadata['UDP_OUT']]
prim_description = '\n'.join(prim_description)
if not prim_description:
prim_description = common.DESCRIPTIONS[prim_name]
# Rewrite the pin names
ports = []
for p in mod_info.ports:
pclass, pt = pin_class(p.name)
if pclass != 'power':
pt = p.data_type
pname = convert_pinname(p.name, prim_name)
ports.append((pclass, pname, p.mode, pt))
# Create the definition file
definition = {
'type': 'primitive',
'name': prim_name,
'verilog_name': prim_fullname,
'file_prefix': prim_fullname.replace('$','_').lower(),
'library': new_lib,
#'parameters': parameters,
#'ports': def_ports,
'description': prim_description,
}
definition['ports'] = ports
definition['parameters'] = [(p.name, p.data_type) for p in mod_info.generics]
print('-')
pprint.pprint(definition)
assert 'description' in definition
assert definition['description']
djsonfile = os.path.join(temp_dir, prim_name+'.definition.json')
assert not os.path.exists(djsonfile), djsonfile
with open(djsonfile, 'w') as f:
json.dump(definition, f, indent=' ', sort_keys=True)
copy_file_to_output(djsonfile, final_dir, new_lib, version, prim_fullname, filename='definition.json')
# Create the primitive table file
for i, p in enumerate(table_data_out[0][:-5]):
if p == ':':
continue
table_data_out[0][i] = convert_pinname(p, prim_name)
print('-')
for r in table_data_out:
for d in r:
print('%10s' % d, end=' ')
print()
tsvfile = os.path.join(temp_dir, prim_name.lower()+'.table.tsv')
assert not os.path.exists(tsvfile), tsvfile
with open(tsvfile, 'w', newline='') as f:
w = csv.writer(f, delimiter='\t')
for r in table_data_out:
w.writerow(r)
copy_file_to_output(tsvfile, final_dir, new_lib, version, prim_fullname)
def write_versions_and_parse(temp_dir, oname, odata):
output_versions = {
'functional.pp.v': (False, {'functional': True, 'SC_USE_PG_PIN': True}),
'functional.v': (False, {'functional': True, 'SC_USE_PG_PIN': False}),
'behavioral.pp.v': (False, {'functional': False, 'SC_USE_PG_PIN': True}),
'behavioral.v': (False, {'functional': False, 'SC_USE_PG_PIN': False}),
}
parsed = {}
data = {}
for v, (include_specify, vflags) in output_versions.items():
fpath = os.path.join(temp_dir, oname+'.'+v)
vdata = '\n'.join(str(i) for i in output_ifdef(odata, vflags, include_specify))
data[v] = vdata
assert not os.path.exists(fpath), fpath
with open(fpath, 'w') as f:
f.write(vdata)
vlog_info = vlog_ex.extract_objects(fpath)
assert len(vlog_info) == 1, vlog_info
parsed[v] = {
'module': vlog_info[0].name,
'ports': [(p.name, p.mode.strip(), p.data_type.strip()) for p in vlog_info[0].ports],
'parameters': [(p.name, p.data_type.strip()) for p in vlog_info[0].generics],
}
return parsed, data
RE_LINE = re.compile('[^;]*(;|$)', re.DOTALL)
RE_STATEMENT = re.compile(
r'^(?P<type>[^\s]*)\s*'
'(?P<strength>\([^)]*\))?\s*'
'(?P<params>#\([^)]*\))?\s*'
'(?P<delay>#[0-9.]*)?\s*'
'(?P<name>[^#=\s]*)\s*'
'\((?P<args>.*)\)\s*;$', re.DOTALL)
RE_DEFINE = re.compile('^((wire)|(input)|(output)|(inout)|(parameter)|(reg)).*;', re.DOTALL)
RE_MULTI_CHECK = re.compile(r'\)\s*,\s*\(', re.DOTALL)
RE_MULTI_EXTRACT = re.compile('\((?P<args>[^)]*)\)', re.DOTALL)
RE_SPLIT = re.compile(r'([\s()]|&+)')
def vsplit(s):
"""
>>> a = "( AWAKE && ( SETB_delayed === 1'b1 ) )"
>>> print(vsplit(a))
['(', 'AWAKE', '&&', '(', 'SETB_delayed', '===', "1'b1", ')', ')']
>>> a = "(AWAKE && ( SETB_delayed === 1'b1))"
>>> print(vsplit(a))
['(', 'AWAKE', '&&', '(', 'SETB_delayed', '===', "1'b1", ')', ')']
>>> a = "(AWAKE&(GATE_delayed === 1'b0))"
>>> print(vsplit(a))
['(', 'AWAKE', '&', '(', 'GATE_delayed', '===', "1'b0", ')', ')']
"""
return [a for a in RE_SPLIT.split(s) if a.strip()]
BUILT_INS = [
'and',
'nand',
'or',
'nor',
'xor',
'xnor',
'buf',
'not',
'bufif0',
'bufif1',
'notif0',
'notif1',
'pullup',
'pulldown',
'nmos',
'pmos',
'rnmos',
'rpmos',
'cmos',
'rcmos',
'tran',
'rtran',
'tranif0',
'tranif1',
'rtranif0',
'rtranif1',
]
def wrap(s, i=''):
s = common.whitespace_convert(s)
p = ' * '+i
m = ' * '+(' '*len(i))
s = "\n".join(textwrap.wrap(
s,
initial_indent=p,
subsequent_indent=m,
break_on_hyphens=False,
expand_tabs=True,
))
return common.whitespace_revert(s)
def file_guard(fname):
fname = re.sub('[^A-Za-z_0-9]', '_', fname)
return fname.upper()
def write_verilog_header(fname, vdesc, define_data):
guard = file_guard(os.path.basename(fname))
output = []
output.append('')
output.append(f'`ifndef {guard}')
output.append(f'`define {guard}')
output.append('')
output.append(f"/**")
assert 'description' in define_data, define_data
desc = define_data['description']
eq = define_data.get('equation', '')
if '\n' in desc:
if eq:
eq = ' '+eq
output.append(wrap(eq, i=define_data['name']+":"))
for l in desc.splitlines():
output.append(wrap(l.rstrip(), i=' '))
else:
output.append(wrap(desc, i=define_data['name']+": "))
if eq:
output.append(" *")
output.append(wrap(eq, i=((len(define_data['name'])+2)*" ")))
output.append(" *")
output.append(wrap(vdesc))
output.append(" */")
output.append('')
output.append('`timescale 1ns / 1ps')
output.append('`default_nettype none')
return output
class NamesMap:
@functools.total_ordering
class MappedName:
def __init__(self, old_name):
#assert old_name, repr(old_name)
self._old_name = old_name
self._new_name = None
self.type = None
self.dir = None
def set_new_name(self, s, override=False):
assert s, (self, s)
assert (self._new_name is None) or (self._new_name == s) or override, (
self.__class__, self._old_name, self._new_name, s)
self._new_name = s
def __str__(self):
return "{}:{}".format(self.old_name, self.new_name)
@property
def old_name(self):
return self._old_name
@property
def new_name(self):
if self._new_name is not None:
return self._new_name
else:
return self._old_name
@property
def has_new_name(self):
return (self._new_name is not None)
def _is_valid_operand(self, other):
return isinstance(other, NamesMap.MappedName)
def __lt__(self, o):
if not self._is_valid_operand(o):
return NotImplemented
return self.new_name < o.new_name
def __eq__(self, o):
if not self._is_valid_operand(o):
return NotImplemented
return self.new_name == o.new_name
class MappedPort(MappedName):
def __init__(self, old_name, new_modname):
NamesMap.MappedName.__init__(self, old_name)
self.set_new_name(convert_pinname(old_name, new_modname))
class MappedSupply(MappedName):
def __init__(self, old_name, new_modname):
NamesMap.MappedName.__init__(self, old_name)
self.set_new_name(convert_pinname(old_name, new_modname))
def __init__(self, new_modname):
self.new_modname = new_modname
self.instances = defaultdict(lambda: 0)
self.imports = {'udp': set(), 'cells': set()}
self.mapping = {}
def __iter__(self):
return iter(sorted(self.mapping.values()))
def all_new_names(self):
return [x.new_name for x in self.mapping.values()]
@classmethod
def is_localname(cls, v):
return not isinstance(v, (NamesMap.MappedPort, NamesMap.MappedSupply))
def localnames(self):
o = []
for v in self.mapping.values():
if self.is_localname(v):
o.append(v)
return o
def ports(self):
o = []
for v in self.mapping.values():
if isinstance(v, NamesMap.MappedPort):
o.append(v)
return o
def ports_names(self):
o = []
for v in self.mapping.values():
if isinstance(v, NamesMap.MappedPort):
o.append(v.new_name)
return o
def supplies(self):
o = []
for v in self.mapping.values():
if isinstance(v, NamesMap.MappedSupply):
o.append(v)
return o
def get_localname(self, name):
if name not in self.mapping:
self.mapping[name] = self.MappedName(name)
if name.endswith('_delayed'):
old_portname = name[:-8]
new_portname = convert_pinname(old_portname, self.new_modname)
self.mapping[name].set_new_name(new_portname+'_delayed')
return self.mapping[name]
def get_portname(self, name):
if name not in self.mapping:
self.mapping[name] = self.MappedPort(name, self.new_modname)
return self.mapping[name]
def get_supply(self, name):
if name not in self.mapping:
self.mapping[name] = self.MappedSupply(name, self.new_modname)
return self.mapping[name]
def get_name(self, name):
if name == 'clk':
name = 'CLK'
if name not in self.mapping:
return self.get_localname(name)
return self.mapping[name]
def get_instance_name(self, new_lib, itype):
if itype not in BUILT_INS:
if '_U_' in itype:
prim_name = rewrite_primitive_name(itype)
modname = new_lib + '__' + prim_name
self.imports['udp'].add(modname)
iname = prim_name.replace('udp_', '').split('$', 1)[0]
else:
modname = convert_cell_fullname(itype, new_lib)
self.imports['cells'].add(modname)
iname = common.strip_strength(modname.split('__')[-1])
itype = modname
else:
iname = itype
itype = itype
iname = iname.lower()
i = self.instances[iname]
self.instances[iname] += 1
newname = '{}{}'.format(iname, i)
return itype, newname
RE_ARG_OKAY = re.compile(r'^(([0-9A-Za-z_$\[\]!]+)|(.[0-9A-Za-z_$]+\([0-9A-Za-z_$]+\))|()|(1\'b[01]))$')
RE_ARG_MATCH = re.compile('^[A-Za-z0-9_$]*$')
RE_SIGSPLIT = re.compile('^(?P<not>!?)(?P<name>[A-Za-z0-9_]+)(\[(?P<subsig>[0-9]+)\])?$')
def convert_instance(new_lib, names_map, i):
if 'name' in i and i['name'] and re.match('I[0-9]*', i['name'], flags=re.I):
del i['name']
newtype, newname = names_map.get_instance_name(new_lib, i['type'])
i['type'] = newtype
i['name'] = newname
args = i['args']
for j, a in enumerate(args):
if not a.startswith('.'):
a_src = a
a_dst = None
else:
assert a[0] == '.', a
assert '(' in a, a
assert a[-1] == ')', a
a_dst, a_src = a.split('(', 1)
assert a_src[-1] == ')', a_src
a_src = a_src[:-1]
assert a_dst[0] == '.', a_dst
a_dst = a_dst[1:]
assert RE_ARG_MATCH.match(a_dst), a_dst
a_dst = convert_pinname(a_dst, i['type'][1])
if a_src in ("1'b0", "1'b1"):
src_invert = False
src_name = a_src
src_subsig = None
else:
m = RE_SIGSPLIT.match(a_src)
if m:
# !D
# !D[0]
src_invert = m.group('not')
src_name = m.group('name')
src_subsig = m.group('subsig')
else:
src_invert = False
src_name = a_src
src_subsig = None
assert RE_ARG_MATCH.match(src_name), (src_name, a_src)
src_name = names_map.get_name(src_name)
if 'udp_' in src_name.new_name.lower():
new_name = src_name.new_name.lower()
new_name = new_name.replace('udp_out_', 'out_')
new_name = new_name.replace('udp_in_', 'out_')
new_name = i['name']+'_'+new_name
assert new_name not in names_map.all_new_names()
src_name.set_new_name(new_name)
if not src_name.has_new_name and ('csi_' in src_name.new_name.lower()):
if j == 0:
new_name = i['name']+'_out'
assert new_name not in names_map.all_new_names()
src_name.set_new_name(new_name)
args[j] = [(src_invert, src_name, src_subsig), a_dst]
return i
def convert_assignment(names_map, dst, calc):
dst_new = names_map.get_localname(dst)
calc_old = vsplit(calc)
calc_new = []
for c in calc_old:
if c in ('(', ')', '&', '&&', '|', '||', '===', '!==', '=', '!=', '==', '!==', "1'b1", "1'b0"):
calc_new.append(c)
continue
assert re.match('^[A-Za-z0-9_$]*$', c), c
c_new = names_map.get_name(c)
calc_new.append(c_new)
return dst_new, calc_new
def rewrite_verilog_module(definition, output_file, parsed, contents, port_rewrites, new_lib):
contents = contents.replace('(* blackbox *)', '')
statements_bits = [m.group(0).strip() for m in RE_LINE.finditer(contents) if m.group(0).strip()]
statements_compound = []
# Strip all the comments
for s in statements_bits:
sl = []
for l in s.splitlines():
if '//' in l:
l = l.split('//', 1)[0]
l = l.strip()
if l:
sl.append(l)
if not sl:
continue
s = ' '.join(sl)
statements_compound.append(s)
# Remove the module / endmodule bits
assert statements_compound[0].startswith('module '), statements_compound[0]
assert statements_compound[-1].startswith('endmodule'), statements_compound[-1]
statements_compound = statements_compound[1:-1]
statements_singular = []
# Convert xxx (), (); --> xxx (); xxx();
for s in statements_compound:
m = RE_MULTI_CHECK.search(s)
if not m:
statements_singular.append(s)
continue
prefix = None
args = []
for a in RE_MULTI_EXTRACT.finditer(s):
if not prefix:
prefix = s[:a.start(0)]
args.append(f"{prefix}({a.group('args')});")
statements_singular.extend(args)
# Check all the statements end in ';'
for s in statements_singular:
assert not s.startswith(' '), s
assert s.endswith(';'), s
print('----')
print(contents)
print('^^^^')
names_map = NamesMap(definition['verilog_name'])
for old_name, new_name in port_rewrites.items():
for n, d, t in parsed['ports']:
if n == old_name:
break
else:
continue
assert False, (old_name, new_name, parsed['ports'])
m = names_map.get_portname(old_name)
m.type = t
m.dir = d
assert m.new_name == new_name, (m, m.old_name, m.new_name, old_name, new_name)
converted_statements = []
unprocessed_statements = []
for s in statements_singular:
ms = RE_STATEMENT.match(s)
md = RE_DEFINE.match(s)
if ms:
t = ms.group('type').strip()
params = ms.group('params')
if params and params.strip():
params = params.strip()
assert False, params
else:
params = None
delay = ms.group('delay')
if delay and delay.strip():
delay = delay.strip()
assert delay == "#0.001", delay
delay = '`UNIT_DELAY'
else:
delay = None
name = ms.group('name')
if name and name.strip():
name = name.strip()
else:
name = None
strength = ms.group('strength')
if strength and strength.strip():
strength = strength.strip()
assert False, strength
else:
strength = None
args = ms.group('args').strip()
args = [a.strip() for a in args.split(',')]
for a in args:
assert RE_ARG_OKAY.match(a), (a, s, ms.groups())
i = {
'type': t,
'name': name,
'delay': delay,
'params': params,
'strength': strength,
'args': args,
}
print(' Instance:', i)
converted_statements.append(convert_instance(new_lib, names_map, i))
elif md:
print('Definitions:', repr(s))
assert s.endswith(';'), s
dtype, dnames = s[:-1].split(' ', 1)
dnames = [d.strip() for d in dnames.split(',')]
assert dtype, (dtype, s)
for n in dnames:
if 'csi_notifier' in n:
continue
m = names_map.get_name(n)
m.type = dtype
elif s.startswith('initial'):
print(' Initial:', repr(s))
unprocessed_statements.append(s)
elif s.startswith('supply'):
a, b = s.split(' ', 1)
a = a.strip()
b = b.strip()
assert a.startswith('supply')
assert b.endswith(';')
b = b[:-1].strip()
m = names_map.get_supply(b)
m.type = a
elif s.startswith('assign'):
print(' Assign:', repr(s))
a, b = s.split('=', 1)
a = a.strip()
assign, a = a.split(' ', 1)
assert assign.strip() == 'assign', (assign, a, s)
a = a.strip()
assert a, (assign, a, s)
b = b.strip()
assert b.endswith(';')
b = b[:-1].strip()
assert b, (a, b, s)
converted_statements.append(convert_assignment(names_map, a, b))
elif s.startswith('always'):
print(' Always:', repr(s))
unprocessed_statements.append(s)
else:
print(repr(s))
assert False, s
assert not unprocessed_statements, unprocessed_statements
port_names = [l.lower() for l in names_map.ports_names()]
for l in names_map.ports_names():
if "_" in l:
port_names.append(l.split("_")[0].lower())
for n in names_map:
if names_map.is_localname(n):
newname = n.new_name.lower()
if '_' in newname:
bits = newname.rsplit('_', 1)
if bits[0] in port_names:
newname = bits[0].upper()+'_'+bits[-1]
if bits[-1] in port_names:
newname = bits[0]+'_'+bits[-1].upper()
bits = newname.split('_', 1)
if bits[0] in port_names:
newname = bits[0].upper()+'_'+bits[-1]
if bits[-1] in port_names:
newname = bits[0] + "_" + bits[-1].upper()
elif newname in port_names:
newname = newname.upper()
if '$' in newname:
newname = newname.replace('$', '_')
if newname == 'DE$D':
newname = 'mux_DE_or_D'
if newname and newname != n.new_name:
assert newname not in names_map.all_new_names(), (n, n.old_name, n.new_name, newname, names_map.all_new_names())
n.set_new_name(newname, override=True)
if n.type is None:
n.type = 'wire'
if n.new_name:
print('WARNING:', definition['verilog_name'], n, repr(n), 'implicitly defined!?')
output = write_verilog_header(
output_file,
"Verilog simulation functional model.",
definition)
if names_map.imports['udp']:
output.append('')
output.append('// Import user defined primitives.')
for p in names_map.imports['udp']:
cdir = common.directory_for_cell(p)
output.append(f'`include "../../{cdir[0]}/{cdir[1]}/{new_lib}__{cdir[1]}.v"')
if names_map.imports['cells']:
output.append('')
output.append('// Import sub cells.')
for i in names_map.imports['cells']:
ilib, icell = i.split('__', 1)
cdir = common.directory_for_cell(i)
assert cdir[0] == 'cells', (i, cdir)
assert ilib == new_lib, (ilib, new_lib, i)
output.append(f'`include "../{cdir[1]}/{ilib}__{cdir[1]}.v"')
ports = [(convert_pinname(n, definition['verilog_name']), d, t) for n, d, t in parsed['ports']]
for i, (n, d, t) in enumerate(ports):
if t.startswith('supply'):
ports[i][2] = ''
output.append('')
output.append("`celldefine")
output.append(f"module {definition['verilog_name']} (")
if ports:
maxlen = max(len(n) for n, _, _ in ports)
for name, _, _ in ports:
name = name.ljust(maxlen)
output.append(f' {name},')
assert output[-1][-1] == ',', output[-1]
output[-1] = output[-1][:-1].rstrip() + '\n'
output[-1] += f");"
if ports:
maxlen = {}
maxlen['n'] = max(len(n) for n, _, _ in ports)
maxlen['d'] = max(len(d) for _, d, _ in ports)
maxlen['t'] = max(len(t) for _, _, t in ports)
output.append('')
output.append(' // Module ports')
for n, d, t in ports:
n = n.ljust(maxlen['n'])
d = d.ljust(maxlen['d'])
if maxlen['t'] > 0:
t = t.ljust(maxlen['t']+1)
output.append(f' {d} {t}{n};')
if names_map.supplies():
maxlen = max(len(a.new_name) for a in names_map.supplies())
output.append('')
output.append(' // Module supplies')
for s in names_map.supplies():
n = s.new_name.ljust(maxlen)
output.append(f' {s.type} {n};')
localnames = names_map.localnames()
if localnames:
output.append('')
output.append(' // Local signals')
maxlen = {
'n': max(len(n.new_name) for n in localnames),
't': max(len(n.type) for n in localnames),
}
for n in localnames:
nt = n.type.ljust(maxlen['t'])
nn = n.new_name.ljust(maxlen['n'])
if nn.strip():
output.append(f' {nt} {nn};')
instances = [d for d in converted_statements if isinstance(d, dict)]
if instances:
headers = {
't': '// ',
'n': 'Name',
'a0': 'Output',
}
maxlen = {
't': len(headers['t']),
'n': len(headers['n']),
'd': 0,
'a0': len(headers['a0']),
'an': 0,
}
for obj in instances:
t = obj['type']
assert t, obj
if not obj['delay']:
obj['delay'] = ''
d = obj['delay']
assert d is not None, obj
n = obj['name']
assert n, obj
maxlen['t'] = max(maxlen['t'], len(t))
maxlen['d'] = max(maxlen['d'], len(d))
maxlen['n'] = max(maxlen['n'], len(n))
aouts = []
for i, obj in enumerate(converted_statements):
aout = []
if isinstance(obj, dict):
assert len(aouts) == i, (i, aouts)
for (a_from_invert, a_from_name, a_from_subsig), a_to in obj['args']:
a_from = a_from_name.new_name
if a_from_invert:
a_from = '!'+a_from
if a_from_subsig:
a_from = a_from+"["+a_from_subsig+"]"
if a_to:
aout.append(f'.{a_to}({a_from})')
else:
aout.append(a_from)
if aout:
a0 = aout[0]
assert a0, (aout, obj)
an = aout[1:]
aout = [aout[0], ', '.join(an)]
if an:
aout[1] = ', '+aout[1]
if not aout:
aout = ['', '']
aouts.append(aout)
maxlen['a0'] = max(maxlen['a0'], max((len(a[0]) for a in aouts), default=0))
maxlen['an'] = max(maxlen['an'], max((len(a[1]) for a in aouts), default=0))
if maxlen['d'] > 0:
headers['d'] = 'Delay'
maxlen['d'] = max(len(headers['d']), maxlen['d'])
if maxlen['an'] > 0:
headers['an'] = ' Other arguments'
maxlen['an'] = max(len(headers['an']), maxlen['an'])
t = headers['t'].ljust(maxlen['t'])
d = ''
if maxlen['d']:
d = headers['d'].ljust(maxlen['d']+1)
n = headers['n'].ljust(maxlen['n']);
a0 = headers['a0'].ljust(maxlen['a0'])
an = ''
if maxlen['an']:
an = headers['an']
output.append('')
output.append(f" {t} {d}{n} {a0}{an}")
if converted_statements:
for i, obj in enumerate(converted_statements):
if isinstance(obj, dict):
t = obj['type'].ljust(maxlen['t'])
d = ''
if maxlen['d']:
d = obj['delay'].ljust(maxlen['d']+1)
n = obj['name'].ljust(maxlen['n']);
aout = aouts[i]
a0 = aout[0].ljust(maxlen['a0'])
an = ''
if aout[1]:
an = aout[1].ljust(maxlen['an'])
output.append(f" {t} {d}{n} ({a0}{an});")
else:
o = []
for c in obj[1]:
if isinstance(c, str):
o.append(c)
else:
o.append(c.new_name)
output.append(f" assign {obj[0].new_name} = "+" ".join(o)+";")
output.append('')
else:
output.append(' // No contents.')
output.append("endmodule")
output.append("`endcelldefine")
output.append('')
guard = file_guard(os.path.basename(output_file))
output.append('`default_nettype wire')
output.append(f'`endif // {guard}')
print(output_file, "----")
print("\n".join(output))
print('='*75)
return output
def bad_cell(name):
#if 'isowell' in name:
# return True
if 'scs8hd_macro' in name:
return True
if 'macro_sparecell' in name:
return True
return False
def extract_power_pins_via_flag(parsed, data):
assert parsed['behavioral.v'] == parsed['functional.v']
assert parsed['behavioral.pp.v'] == parsed['functional.pp.v']
modname = parsed['behavioral.v']['module']
base_ports = list(parsed['behavioral.v']['ports'])
pwr_ports = list(parsed['behavioral.pp.v']['ports'])
for p in list(base_ports):
assert p in pwr_ports
if bad_cell(modname):
pclass, pt = pin_class(p[0])
if pclass == 'signal':
pwr_ports.remove(p)
else:
base_ports.remove(p)
continue
pwr_ports.remove(p)
opwr_ports = []
# Extract the supply style
for name, d, t in pwr_ports:
m = re.search('((supply[^\\s]+)|(wire))\\s+{}'.format(name), data['behavioral.v'], flags=re.I)
if bad_cell(modname):
pclass, pt = pin_class(name)
assert pclass == 'power', (name, pclass, pt)
elif not m and name in ('vpwrin', 'lowlvpwr'):
pt = 'wire'
else:
assert m, (name, d, t)
pt = m.group(1)
opwr_ports.append((name, d, pt))
assert opwr_ports, (base_ports, pwr_ports)
extracted_ports = []
for pname, pdir, ptype in base_ports:
extracted_ports.append(check_port('signal', pname, pdir, ptype))
for pname, pdir, ptype in opwr_ports:
extracted_ports.append(check_port('power', pname, pdir, ptype))
return modname, parsed['behavioral.v']['parameters'], extracted_ports
def check_port(pclass, pname, pdir, ptype):
o = (pclass, pname, pdir, ptype)
assert pclass in ('power', 'signal'), o
assert pdir in ('input', 'output', 'inout'), o
if pclass == 'power':
assert ptype in ('', 'supply0', 'supply1', 'wire'), o
elif pclass == 'signal':
if ptype.startswith('wire'):
ptype = ptype[4:].strip()
o = (pclass, pname, pdir, ptype)
if ptype != '':
ptype_m = re.match(r'\[(([0-9]+)|width):0\]', ptype)
assert ptype_m, o
return o
def pin_class(n):
t = None
n = n.lower()
if n.startswith('v'):
pclass = 'power'
if 'p' in n:
t = 'supply1'
else:
t = 'supply0'
else:
pclass = 'signal'
return pclass, t
RE_MODULE = re.compile(r'module\s+(?P<name>[^\s]+)\s*\(\s*(?P<ports>[^)]+)\)\s*;', re.DOTALL)
def reorder(order, ports):
output_ports = []
for o in order:
for i, pn in enumerate(ports):
if pn[0] == o:
output_ports.append(ports.pop(i))
break
else:
raise ValueError(f"Didn't find {o} in {ports}")
assert not ports, ports
ports.extend(output_ports)
def extract_power_pins_via_name(parsed, data):
assert data['behavioral.v'] == data['behavioral.pp.v']
assert data['functional.v'] == data['functional.pp.v']
assert parsed['behavioral.v'] == parsed['functional.v']
assert parsed['behavioral.v'] == parsed['behavioral.pp.v']
assert parsed['functional.v'] == parsed['functional.pp.v']
assert parsed['behavioral.pp.v'] == parsed['functional.pp.v']
moddef = RE_MODULE.search(data['behavioral.v'])
assert moddef, data['behavioral.v']
modname = moddef.group('name')
ports = [s.strip() for s in moddef.group('ports').split(',')]
base_info = parsed['behavioral.v']
reorder(ports, base_info['ports'])
extracted_ports = []
for pname, pdir, ptype in base_info['ports']:
pclass, pt = pin_class(pname)
if pt:
ptype = pt
extracted_ports.append(check_port(pclass, pname, pdir, ptype))
return base_info['module'], base_info['parameters'], extracted_ports
vlog_ex = vlog.VerilogExtractor()
def process_module(final_dir, temp_dir, new_lib, version, metadata, active_header, otype, oname, odata):
flags = get_flags(odata)
if 'TETRAMAX' in flags:
print("Removing 'TETRAMAX'")
print('*'*10)
print('\n'.join(str(l) for l in odata))
print('*'*10)
odata = output_ifdef(odata, {'TETRAMAX': False})
flags = get_flags(odata)
ostr = '\n'.join(str(l) for l in odata)
for f in flags:
assert f in ['functional', 'SC_USE_PG_PIN'], (flags, otype, ostr)
assert not active_header, active_header
parsed, data = write_versions_and_parse(temp_dir, oname, odata)
if 'SC_USE_PG_PIN' in flags:
extracted_modulename, parameters, extracted_ports = extract_power_pins_via_flag(parsed, data)
else:
extracted_modulename, parameters, extracted_ports = extract_power_pins_via_name(parsed, data)
cell_fullname = convert_cell_fullname(extracted_modulename)
cell_mostname = common.strip_strength(cell_fullname)
assert cell_fullname.startswith(new_lib), (cell_fullname, extracted_modulename, new_lib)
cell_basename = cell_mostname.split('__')[-1]
cell_descname = cell_basename
if 'lpflow' in cell_basename:
cell_descname = cell_basename.replace('lpflow_', '')
if 'udb' in cell_basename:
cell_descname = cell_basename.replace('udb_', '')
allport_rewrites = {}
port_rewrites = {}
def_ports = []
for pclass, pname, pdir, ptype in extracted_ports:
assert pclass in ('power', 'signal'), extracted_ports
before_name = pname
after_name = convert_pinname(pname)
assert before_name not in allport_rewrites
allport_rewrites[before_name] = after_name
def_ports.append((pclass, after_name, pdir, ptype))
if before_name != after_name:
port_rewrites[before_name] = after_name
re_ports = re.compile(r'\b('+'|'.join('('+p+')' for p in port_rewrites)+')\\b', flags=re.I)
def r(m):
s = m.start(1)-m.start(0)
e = m.end(1)-m.start(0)
k = None
for k in port_rewrites:
if k.lower() == m.group(1).lower():
break
assert k in port_rewrites, (k, m.groups(), port_rewrites)
r = port_rewrites[k]
return m.group(0)[:s] + r + m.group(0)[e:]
definition = {
'type': 'cell',
'name': cell_basename,
'verilog_name': cell_mostname,
'file_prefix': cell_mostname,
'library': new_lib,
'parameters': parameters,
'ports': def_ports,
}
if 'description' in metadata:
definition['description'] = metadata['description']
if 'description' in definition:
if cell_descname in common.DESCRIPTIONS:
print('WARNING, Have description from file :', definition['description'])
print('WARNING, Have description from spreadsheet:', common.DESCRIPTIONS[cell_descname])
if 'description' not in definition:
if cell_descname not in common.DESCRIPTIONS:
print('ERROR:', new_lib, cell_descname)
else:
assert cell_descname in common.DESCRIPTIONS, cell_descname
definition['description'] = common.DESCRIPTIONS[cell_descname]
if cell_descname in common.EQUATIONS:
eq = common.EQUATIONS[cell_descname]
if '=' not in eq:
outputs = [n for c, n, d, t in def_ports if d == 'output']
assert len(outputs) == 1, (outputs, def_ports)
eq = outputs[0]+' = '+eq
definition['equation'] = re_ports.sub(r, eq)
print()
print(extracted_modulename, cell_fullname, '-'*45)
pprint.pprint(definition)
print('-'*45)
print(ostr)
print('-'*45)
if 'description' not in definition and "sky130_fd_sc_ls" in new_lib:
definition['description'] = ''
else:
assert 'description' in definition, (cell_descname, definition)
assert definition['description']
if definition['description'][-1] != '.':
definition['description'] += '.'
# Write out the cell definition
djsonfile = os.path.join(temp_dir, extracted_modulename+'.definition.json')
assert not os.path.exists(djsonfile), djsonfile
with open(djsonfile, 'w') as f:
json.dump(definition, f, indent=' ', sort_keys=True)
copy_file_to_output(djsonfile, final_dir, new_lib, version, cell_mostname, filename='definition.json')
# Write out a functional simulation model
# FIXME: Work for non-standard cells
for f in ['functional', 'functional.pp', 'behavioral', 'behavioral.pp']:
if '_sc' in definition['library'] and 'bleeder' not in cell_fullname and 'bushold' not in cell_fullname:
output = rewrite_verilog_module(
definition,
f"{definition['file_prefix']}.{f}.v",
parsed[f+'.v'],
data[f+'.v'],
allport_rewrites,
new_lib)
else:
output = []
funfile = os.path.join(temp_dir, f'{cell_fullname}.{f}.v')
with open(funfile, 'w') as f:
f.write(common.copyright_header['/**/'])
f.write('\n'.join(output))
copy_file_to_output(funfile, final_dir, new_lib, version, cell_mostname)
# Write out the specify verilog section
specify = get_specify(odata)
assert len(specify) <= 1, specify
if specify:
sfile = os.path.join(temp_dir, f'{cell_fullname}.specify.v')
specify_str = '\n'.join(specify[0])
specify_str = re_ports.sub(r, specify_str)
with open(sfile, 'w') as f:
f.write(common.copyright_header['/**/'])
f.write(specify_str)
try:
copy_file_to_output(sfile, final_dir, new_lib, version, cell_mostname)
except AssertionError as e:
seperate_specify = False
if cell_fullname.endswith('lp2'):
seperate_specify = True
if cell_mostname in ('sky130_fd_sc_lp__dlrtn','sky130_fd_sc_lp__xnor3'):
seperate_specify = True
if not seperate_specify:
raise
copy_file_to_output(sfile, final_dir, new_lib, version, cell_fullname)
def filemain(input_file, temp_dir, final_dir, args):
if 'scs' not in input_file:
#print('WARNING: Skipping', input_file)
return
if 'stubs_' in input_file:
print('WARNING: Skipping', input_file)
return
with open(input_file, 'r') as in_f:
contents = in_f.read()
contents = contents.replace('`timescale 1ns / 1ps', '')
output = []
last_bit_endpos = 0
for bit in RE_BITS.finditer(contents):
between = contents[last_bit_endpos:bit.start(0)]
if between.strip():
output.append(('???', '', process_ifdef(between)))
last_bit_endpos = bit.end(0)
cell = bit.group('cell')
prim = bit.group('prim')
mod = bit.group('mod')
if cell:
assert not prim, (cell, prim, mod)
assert not mod, (cell, prim, mod)
output_type = 'cell'
output_data = cell.strip()
content = bit.group('cell_content').strip()
#content = re.sub(r'//.*?$', '', content, flags=re.MULTILINE)
assert content, cell
m = RE_BITS.search(content)
assert m, output_data
assert m.group('mod'), output_data
content_header = content[:m.start(0)].strip()
stripped_content_header = strip_stupid_headers(content_header)
assert not stripped_content_header, pprint.pformat([stripped_content_header, content_header])
content_footer = content[m.end(0):].strip()
stripped_content_footer = strip_stupid_headers(content_footer)
assert not stripped_content_footer, pprint.pformat([stripped_content_footer, content_footer])
output_name = m.group('mod_name').strip()
output_data = m.group('mod').strip()
elif prim:
assert not cell, (cell, prim, mod)
assert not mod, (cell, prim, mod)
output_type = 'prim'
output_name = bit.group('prim_name').strip()
output_data = prim.strip()
elif mod:
assert not cell, (cell, prim, mod)
assert not prim, (cell, prim, mod)
output_type = 'mod'
output_name = bit.group('mod_name').strip()
output_data = mod.strip()
else:
assert False, bit
output_data = process_ifdef(output_data)
output.append((output_type, output_name, output_data))
if not output:
print('WARNING: Nothing found in file!', input_file)
return
header = ''
if output[0][0] == '???':
header = '\n'.join(output.pop(0)[-1])
active_header = []
metadata = {None: []}
current_section = None
for h in header.splitlines():
if not h.strip():
continue
if not h.startswith('//'):
active_header.append(h)
continue
if h.startswith('// '):
h = h[3:].rstrip()
else:
h = h[2:].rstrip()
if not h.strip():
continue
if h[0] == ' ':
h = '\n'+h[1:].rstrip()
else:
h = h.strip()
if ':' in h:
a, b = h.split(':', 1)
current_section = a.strip().lower()
metadata[current_section] = []
b = b.strip()
if b:
metadata[current_section].append(b)
continue
metadata[current_section].append(h)
for m in list(metadata.keys()):
s = ' '.join(metadata[m]).strip()
metadata[m] = s
if m is None and s.startswith('Automatically'):
s = None
if m and m.startswith('iptguser'):
s = None
if m and m.startswith('$Id'):
s = None
if not s:
del metadata[m]
old_lib, new_lib, version = extract_version_and_lib_from_path(input_file)
for otype, oname, odata in output:
if otype in ('cell', 'mod'):
process_module(final_dir, temp_dir, new_lib, version, metadata, active_header, otype, oname, odata)
elif otype in ('prim',):
process_primitive(final_dir, temp_dir, new_lib, version, metadata, active_header, otype, oname, odata)
if __name__ == "__main__":
import doctest
fails, _ = doctest.testmod()
if fails != 0:
sys.exit("Some test failed")
parser = argparse.ArgumentParser()
parser.add_argument(
"input",
help="The path to the source directory/file",
type=Path)
parser.add_argument(
"output",
help="The path to the output directory",
type=Path)
parser.add_argument(
"temp",
help="The path to the temp directory",
type=Path)
args = parser.parse_args()
common.main('v', filemain, args)