Reverted the previous commit which removed the installation of the
klayout .map file for sky130. The problem was caused by a mix-up
of repository source locations on my end and the file should not
have been removed. Also: Updated the run-time python scripts for
the CACE system, but this is an intermediate stage of a work in
progress.
diff --git a/VERSION b/VERSION
index 3a6453e..2b52611 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.0.398
+1.0.399
diff --git a/runtime/cace.py b/runtime/cace.py
index 6106677..86f3eac 100755
--- a/runtime/cace.py
+++ b/runtime/cace.py
@@ -46,11 +46,12 @@
from settings import Settings
from simhints import SimHints
-import config
-
# User preferences file (if it exists)
prefsfile = '~/design/.profile/prefs.json'
+# Application path (path where this script is located)
+apps_path = os.path.realpath(os.path.dirname(__file__))
+
#------------------------------------------------------
# Simple dialog for confirming quit or upload
#------------------------------------------------------
@@ -176,7 +177,7 @@
self.help = HelpWindow(self, fontsize = fontsize)
with io.StringIO() as buf, contextlib.redirect_stdout(buf):
- self.help.add_pages_from_file(config.apps_path + '/characterize_help.txt')
+ self.help.add_pages_from_file(apps_path + '/characterize_help.txt')
message = buf.getvalue()
# Set the help display to the first page
@@ -220,21 +221,7 @@
if 'username' in self.prefs:
username = self.prefs['username']
else:
- userid = os.environ['USER']
- p = subprocess.run(['/ef/apps/bin/withnet',
- config.apps_path + '/og_uid_service.py', userid],
- stdout = subprocess.PIPE)
- if p.stdout:
- uid_string = p.stdout.splitlines()[0].decode('utf-8')
- userspec = re.findall(r'[^"\s]\S*|".+?"', uid_string)
- if len(userspec) > 0:
- username = userspec[0].strip('"')
- # Note userspec[1] = UID and userspec[2] = role, useful
- # for future applications.
- else:
- username = userid
- else:
- username = userid
+ username = os.environ['USER']
# Label with the user
self.toppane.title_frame = ttk.Frame(self.toppane)
@@ -540,8 +527,7 @@
datasheet = os.path.split(self.cur_datasheet)[1]
designname = os.path.splitext(datasheet)[0]
print('Cancel characterization of ' + designname + ' (' + dspath + ' )')
- subprocess.run(['/ef/apps/bin/withnet',
- config.apps_path + '/cace_design_upload.py', '-cancel',
+ subprocess.run([apps_path + '/cace_design_upload.py', '-cancel',
dspath])
self.removeprogress()
self.bbar.upload_button.configure(text='Submit', state = 'enabled',
@@ -704,7 +690,7 @@
return
print('Upload selected')
- # Save hints in file in spi/ directory.
+ # Save hints in file in spice/ directory.
hintlist = []
for eparam in dsheet['electrical-params']:
if not 'editable' in eparam:
@@ -723,9 +709,7 @@
if not self.settings.get_test():
self.progress_bar_setup(dspath)
self.update_idletasks()
- subprocess.run(['/ef/apps/bin/withnet',
- config.apps_path + '/cace_design_upload.py',
- dspath])
+ subprocess.run([apps_path + '/cace_design_upload.py', dspath])
# Remove the settings file
os.remove(dspath + '/settings.json')
@@ -1006,13 +990,17 @@
self.stat_label.configure(text='(in progress)', style='blue.TLabel')
# Update status now
self.update_idletasks()
+
+ if dspath == '':
+ dspath = '.'
+
print('Datasheet directory is = ' + dspath + '\n')
# Instead of using the original datasheet, use the one in memory so that
# it accumulates results. A "save" button will update the original.
if not os.path.isdir(dspath + '/ngspice'):
os.makedirs(dspath + '/ngspice')
- dsdir = dspath + '/ngspice/char'
+ dsdir = dspath + '/ngspice'
if not os.path.isdir(dsdir):
os.makedirs(dsdir)
with open(dsdir + '/datasheet.json', 'w') as file:
@@ -1025,7 +1013,7 @@
# Call cace_gensim with full set of options
# First argument is the root directory
# (Diagnostic)
- design_path = dspath + '/spi'
+ design_path = dspath + '/spice'
print('Calling cace_gensim.py ' + dspath +
' -local -method=' + method)
@@ -1043,7 +1031,7 @@
print(' -layoutdir=' + dspath + '/mag' + ' -testbenchdir=' + dspath + '/testbench')
print(' -datasheet=datasheet.json')
- self.caceproc = subprocess.Popen([config.apps_path + '/cace_gensim.py', dspath,
+ self.caceproc = subprocess.Popen([apps_path + '/cace_gensim.py', dspath,
*modetext,
'-method=' + method, # Call local mode w/method
'-simdir=' + dsdir,
@@ -1194,7 +1182,9 @@
# file if it predates the unannotated datasheet (that indicates
# simulator failure, and no results).
dspath = os.path.split(self.cur_datasheet)[0]
- dsdir = dspath + '/ngspice/char'
+ if dspath == '':
+ dspath = '.'
+ dsdir = dspath + '/ngspice'
anno = dsdir + '/datasheet_' + suffix + '.json'
unanno = dsdir + '/datasheet.json'
@@ -1221,7 +1211,7 @@
def save_results(self):
# Write datasheet_save with all the locally processed results.
dspath = os.path.split(self.cur_datasheet)[0]
- dsdir = dspath + '/ngspice/char'
+ dsdir = dspath + '/ngspice'
if self.origin.get() == 'Layout Extracted':
jsonfile = dsdir + '/datasheet_lsave.json'
@@ -1255,7 +1245,7 @@
# recent than 'datasheet_anno'. If so, return True, else False.
[dspath, dsname] = os.path.split(self.cur_datasheet)
- dsdir = dspath + '/ngspice/char'
+ dsdir = dspath + '/ngspice'
if self.origin.get() == 'Layout Extracted':
savefile = dsdir + '/datasheet_lsave.json'
@@ -1338,7 +1328,7 @@
def load_results(self, value={}):
# Check if datasheet_save exists and is more recent than the
# latest design netlist. If so, load it; otherwise, not.
- # NOTE: Name of .spi file comes from the project 'ip-name'
+ # NOTE: Name of .spice file comes from the project 'ip-name'
# in the datasheet.
[dspath, dsname] = os.path.split(self.cur_datasheet)
@@ -1347,6 +1337,9 @@
except KeyError:
return
+ if dspath == '':
+ dspath = '.'
+
dsroot = dsheet['ip-name']
# Remove any existing results from the datasheet records
@@ -1357,15 +1350,19 @@
# dsroot = os.path.splitext(dsname)[0]
- dsdir = dspath + '/spi'
+ dsdir = dspath + '/spice'
+
+ if not os.path.exists(dsdir):
+ print('Error: Cannot find directory spice/ in path ' + dspath)
+
if self.origin.get() == 'Layout Extracted':
- spifile = dsdir + '/pex/' + dsroot + '.spi'
+ spifile = dsdir + '/pex/' + dsroot + '.spice'
savesuffix = 'lsave'
else:
- spifile = dsdir + '/' + dsroot + '.spi'
+ spifile = dsdir + '/' + dsroot + '.spice'
savesuffix = 'save'
- dsdir = dspath + '/ngspice/char'
+ dsdir = dspath + '/ngspice'
savefile = dsdir + '/datasheet_' + savesuffix + '.json'
if os.path.exists(savefile):
diff --git a/runtime/cace_datasheet_upload.py b/runtime/cace_datasheet_upload.py
index af5db63..f7fe9d2 100755
--- a/runtime/cace_datasheet_upload.py
+++ b/runtime/cace_datasheet_upload.py
@@ -17,8 +17,6 @@
import file_compressor
import file_request_hash
-import config
-
"""
standalone script.
Makes rest calls to marketplace REST server to save datasheet
@@ -27,7 +25,7 @@
has no other side effects.
"""
-mktp_server_url = config.mktp_server_url
+mktp_server_url = ""
# Make request to server sending json passed in.
def send_doc(doc):
diff --git a/runtime/cace_design_upload.py b/runtime/cace_design_upload.py
index 4b4c47f..272deca 100755
--- a/runtime/cace_design_upload.py
+++ b/runtime/cace_design_upload.py
@@ -19,8 +19,6 @@
import file_request_hash
import local_uid_services
-import config
-
"""
standalone script.
Makes rest calls to marketplace REST server to save datasheet
@@ -30,8 +28,8 @@
on the CACE server.
"""
-mktp_server_url = config.mktp_server_url
-cace_server_url = config.cace_server_url
+mktp_server_url = ""
+cace_server_url = ""
# Make request to server sending json passed in.
def send_doc(doc):
diff --git a/runtime/cace_gensim.py b/runtime/cace_gensim.py
index 603b28a..6b323bd 100755
--- a/runtime/cace_gensim.py
+++ b/runtime/cace_gensim.py
@@ -69,13 +69,10 @@
import faulthandler
from functools import reduce
from spiceunits import spice_unit_convert
-from fix_libdirs import fix_libdirs
-import config
+# Application path (path where this script is located)
+apps_path = os.path.realpath(os.path.dirname(__file__))
-# Values obtained from config:
-#
-apps_path = config.apps_path
launchproc = []
def construct_dut_from_path(pname, pathname, pinlist, foundry, node):
@@ -107,16 +104,9 @@
nlfoundry = lmatch.group(2)
if nlfoundry != foundry:
print('Error: Foundry is ' + foundry + ' in spec sheet, ' + nlfoundry + ' in netlist.')
- # Not yet fixed in Electric
- ## return ""
if nlnode != node:
- # Hack for legacy node name
- if nlnode == 'XH035A' and node == 'XH035':
- pass
- else:
- print('Error: Node is ' + node + ' in spec sheet, ' + nlnode + ' in netlist.')
- # Not yet fixed in Electric
- ## return ""
+ print('Error: Node is ' + node + ' in spec sheet, ' + nlnode + ' in netlist.')
+
lmatch = subrex.match(line)
if lmatch:
rest = lmatch.group(1)
@@ -131,11 +121,12 @@
except StopIteration:
# Maybe this is not the DUT?
found = 0
- # Try the next line
+ # Try the next line (to be done)
break
else:
outline = outline + pin + ' '
found += 1
+ break
if found == 0 and dutname == "":
print('File ' + pathname + ' does not contain any subcircuits!')
@@ -413,13 +404,13 @@
endrex = re.compile(r'[ \t]*\.end[ \t]*', re.IGNORECASE)
endsrex = re.compile(r'[ \t]*\.ends[ \t]*', re.IGNORECASE)
# IP names in the ridiculously complicated form
- # <user_path>/design/ip/<proj_name>/<version>/<spi-type>/<proj_name>/<proj_netlist>
+ # <user_path>/design/ip/<proj_name>/<version>/<spice-type>/<proj_name>/<proj_netlist>
ippathrex = re.compile(r'(.+)/design/ip/([^/]+)/([^/]+)/([^/]+)/([^/]+)/([^/ \t]+)')
locpathrex = re.compile(r'(.+)/design/([^/]+)/spi/([^/]+)/([^/ \t]+)')
# This form does not appear on servers but is used if an IP block is being prepared locally.
altpathrex = re.compile(r'(.+)/design/([^/]+)/([^/]+)/([^/]+)/([^/ \t]+)')
# Local IP names in the form
- # <user_path>/design/<project>/spi/<spi-type>/<proj_netlist>
+ # <user_path>/design/<project>/spi/<spice-type>/<proj_netlist>
# To be completed
with open(filename, 'r') as ifile:
@@ -462,7 +453,7 @@
spitype = ippath.group(4)
ipname3 = ippath.group(5)
ipnetlist = ippath.group(6)
- funcpath = userpath + '/design/ip/' + ipname2 + '/' + ipversion + '/spi-func/' + ipname + '.spi'
+ funcpath = userpath + '/design/ip/' + ipname2 + '/' + ipversion + '/spice-func/' + ipname + '.spice'
else:
locpath = locpathrex.match(incpath)
if locpath:
@@ -470,7 +461,7 @@
ipname2 = locpath.group(2)
spitype = locpath.group(3)
ipnetlist = locpath.group(4)
- funcpath = userpath + '/design/' + ipname2 + '/spi/func/' + ipname + '.spi'
+ funcpath = userpath + '/design/' + ipname2 + '/spi/func/' + ipname + '.spice'
else:
altpath = altpathrex.match(incpath)
if altpath:
@@ -479,7 +470,7 @@
spitype = altpath.group(3)
ipname3 = altpath.group(4)
ipnetlist = altpath.group(5)
- funcpath = userpath + '/design/' + ipname2 + '/spi/func/' + ipname + '.spi'
+ funcpath = userpath + '/design/' + ipname2 + '/spi/func/' + ipname + '.spice'
funcpath = os.path.expanduser(funcpath)
if funcpath and os.path.exists(funcpath):
@@ -553,6 +544,7 @@
colonsepex = re.compile(r'^([^:]+):([^:]+)$') # a:b (colon-separated values)
vectrex = re.compile(r'([^\[]+)\[([0-9]+)\]') # pin name is a vector signal
vect2rex = re.compile(r'([^<]+)<([0-9]+)>') # pin name is a vector signal (alternate style)
+ vect3rex = re.compile(r'([a-zA-Z][^0-9]*)([0-9]+)') # pin name is a vector signal (alternate style)
libdirrex = re.compile(r'.lib[ \t]+(.*)[ \t]+') # pick up library name from .lib
vinclrex = re.compile(r'[ \t]*`include[ \t]+"([^"]+)"') # verilog include statement
@@ -669,6 +661,7 @@
repl = []
no_repl_ok = False
+ vtype = -1
sweeprec = sweepex.match(vpattern)
if sweeprec:
sweeptype = sweeprec.group(2)
@@ -687,11 +680,19 @@
if lmatch:
pinidx = int(lmatch.group(2))
vcondition = lmatch.group(1)
+ vtype = 0
else:
lmatch = vect2rex.match(condition)
if lmatch:
pinidx = int(lmatch.group(2))
vcondition = lmatch.group(1)
+ vtype = 1
+ else:
+ lmatch = vect3rex.match(condition)
+ if lmatch:
+ pinidx = int(lmatch.group(2))
+ vcondition = lmatch.group(1)
+ vtype = 3
try:
entry = next((item for item in simval if item[0] == condition))
@@ -821,8 +822,18 @@
try:
entry = next((item for item in simval if item[0].split('[')[0].split('<')[0] == vcondition))
except:
- # if no match, subsline remains as-is.
- pass
+ if vtype == 3:
+ for entry in simval:
+ lmatch = vect3rex.match(entry[0])
+ if lmatch:
+ if lmatch.group(1) == vcondition:
+ vlen = len(entry[2])
+ uval = entry[2][(vlen - 1) - pinidx]
+ repl = str(uval)
+ break
+ else:
+ # if no match, subsline remains as-is.
+ pass
else:
# Handle as vector bit slice (see below)
vlen = len(entry[2])
@@ -978,6 +989,7 @@
pinlist = []
vectrex = re.compile(r"([^\[]+)\[([0-9]+):([0-9]+)\]")
vect2rex = re.compile(r"([^<]+)\<([0-9]+):([0-9]+)\>")
+ vect3rex = re.compile(r"([^0-9]+)([0-9]+):([0-9]+)")
for pinrec in dsheet['pins']:
vmatch = vectrex.match(pinrec['name'])
if vmatch:
@@ -1005,7 +1017,20 @@
pinlist.append(newpinrec)
newpinrec['name'] = pinname + '<' + str(i) + '>'
else:
- pinlist.append(pinrec)
+ vmatch = vect3rex.match(pinrec['name'])
+ if vmatch:
+ pinname = vmatch.group(1)
+ pinmin = vmatch.group(2)
+ pinmax = vmatch.group(3)
+ if int(pinmin) > int(pinmax):
+ pinmin = vmatch.group(3)
+ pinmax = vmatch.group(2)
+ for i in range(int(pinmin), int(pinmax) + 1):
+ newpinrec = pinrec.copy()
+ pinlist.append(newpinrec)
+ newpinrec['name'] = pinname + str(i)
+ else:
+ pinlist.append(pinrec)
# Make sure all local conditions define a pin. Those that are not
# associated with a pin will have a null string for the pin name.
@@ -1162,21 +1187,21 @@
# it and make substitutions
# NOTE: Schematic methods are bundled with the DUT schematic
- template = testbenchpath + '/' + testbench.lower() + '.spi'
+ template = testbenchpath + '/' + testbench.lower() + '.spice'
if testbench_orig and not os.path.isfile(template):
print('Warning: Alternate testbench ' + testbench + ' cannot be found.')
print('Reverting to original testbench ' + testbench_orig)
testbench = testbench_orig
filename = testbench + fsuffix
- template = testbenchpath + '/' + testbench.lower() + '.spi'
+ template = testbenchpath + '/' + testbench.lower() + '.spice'
if os.path.isfile(template):
param['testbenches'] = substitute(filename, fileinfo, template,
simvals, maxtime, schemline, localmode, param)
- # For cosimulations, if there is a '.tv' file corresponding to the '.spi' file,
- # then make substitutions as for the .spi file, and place in characterization
+ # For cosimulations, if there is a '.tv' file corresponding to the '.spice' file,
+ # then make substitutions as for the .spice file, and place in characterization
# directory.
vtemplate = testbenchpath + '/' + testbench.lower() + '.tv'
@@ -1218,16 +1243,16 @@
return prescore
-def check_layout_out_of_date(spipath, layoutpath):
- # Check if a netlist (spipath) is out-of-date relative to the layouts
+def check_layout_out_of_date(spicepath, layoutpath):
+ # Check if a netlist (spicepath) is out-of-date relative to the layouts
# (layoutpath). Need to read the netlist and check all of the subcells.
need_capture = False
- if not os.path.isfile(spipath):
+ if not os.path.isfile(spicepath):
need_capture = True
elif not os.path.isfile(layoutpath):
need_capture = True
else:
- spi_statbuf = os.stat(spipath)
+ spi_statbuf = os.stat(spicepath)
lay_statbuf = os.stat(layoutpath)
if spi_statbuf.st_mtime < lay_statbuf.st_mtime:
# netlist exists but is out-of-date
@@ -1238,7 +1263,7 @@
# and check those dates, too.
layoutdir = os.path.split(layoutpath)[0]
subrex = re.compile('^[^\*]*[ \t]*.subckt[ \t]+([^ \t]+).*$', re.IGNORECASE)
- with open(spipath, 'r') as ifile:
+ with open(spicepath, 'r') as ifile:
duttext = ifile.read()
dutlines = duttext.replace('\n+', ' ').splitlines()
for line in dutlines:
@@ -1256,19 +1281,19 @@
break
return need_capture
-def check_schematic_out_of_date(spipath, schempath):
- # Check if a netlist (spipath) is out-of-date relative to the schematics
+def check_schematic_out_of_date(spicepath, schempath):
+ # Check if a netlist (spicepath) is out-of-date relative to the schematics
# (schempath). Need to read the netlist and check all of the subcells.
need_capture = False
- if not os.path.isfile(spipath):
+ if not os.path.isfile(spicepath):
print('Schematic-captured netlist does not exist. Need to regenerate.')
need_capture = True
elif not os.path.isfile(schempath):
need_capture = True
else:
- spi_statbuf = os.stat(spipath)
+ spi_statbuf = os.stat(spicepath)
sch_statbuf = os.stat(schempath)
- print('DIAGNOSTIC: Comparing ' + spipath + ' to ' + schempath)
+ print('DIAGNOSTIC: Comparing ' + spicepath + ' to ' + schempath)
if spi_statbuf.st_mtime < sch_statbuf.st_mtime:
# netlist exists but is out-of-date
print('Netlist is older than top-level schematic')
@@ -1279,27 +1304,22 @@
# netlist. Now need to read the netlist, find all subcircuits,
# and check those dates, too.
schemdir = os.path.split(schempath)[0]
+ schrex = re.compile('\*\*[ \t]*sch_path:[ \t]*([^ \t\n]+)', re.IGNORECASE)
subrex = re.compile('^[^\*]*[ \t]*.subckt[ \t]+([^ \t]+).*$', re.IGNORECASE)
- with open(spipath, 'r') as ifile:
+ with open(spicepath, 'r') as ifile:
duttext = ifile.read()
dutlines = duttext.replace('\n+', ' ').splitlines()
for line in dutlines:
- lmatch = subrex.match(line)
+ # xschem helpfully adds a "sch_path" comment line for every subcircuit
+ # coming from a separate schematic file.
+
+ lmatch = schrex.match(line)
if lmatch:
- subname = lmatch.group(1)
- # NOTE: Electric uses library:cell internally to track libraries,
- # and maps the ":" to "__" in the netlist. Not entirely certain that
- # the double-underscore uniquely identifies the library:cell. . .
- librex = re.compile('(.*)__(.*)', re.IGNORECASE)
- lmatch = librex.match(subname)
- if lmatch:
- elecpath = os.path.split(os.path.split(schempath)[0])[0]
- libname = lmatch.group(1)
- subschem = elecpath + '/' + libname + '.delib/' + lmatch.group(2) + '.sch'
- else:
- libname = {}
- subschem = schemdir + '/' + subname + '.sch'
+ subschem = lmatch.group(1)
+ subfile = os.path.split(subschem)[1]
+ subname = os.path.splitext(subfile)[0]
+
# subcircuits that cannot be found in the current directory are
# assumed to be library components or read-only IP components and
# therefore never out-of-date.
@@ -1310,31 +1330,6 @@
print('Netlist is older than subcircuit schematic ' + subname)
need_capture = True
break
- # mapping of characters to what's allowed in SPICE makes finding
- # the associated schematic file a bit difficult. Requires wild-card
- # searching.
- elif libname:
- restr = lmatch.group(2) + '.sch'
- restr = restr.replace('.', '\.')
- restr = restr.replace('_', '.')
- schrex = re.compile(restr, re.IGNORECASE)
- try:
- liblist = os.listdir(elecpath + '/' + libname + '.delib')
- except FileNotFoundError:
- # Potentially could look through the paths in LIBDIR. . .
- pass
- else:
- for file in liblist:
- lmatch = schrex.match(file)
- if lmatch:
- subschem = elecpath + '/' + libname + '.delib/' + file
- sub_statbuf = os.stat(subschem)
- if spi_statbuf.st_mtime < sch_statbuf.st_mtime:
- # netlist exists but is out-of-date
- need_capture = True
- print('Netlist is older than subcircuit schematic ' + file)
- print('In library ' + libname)
- break
return need_capture
def printwarn(output):
@@ -1472,17 +1467,17 @@
versionpath = ipfullpath + '/' + useversion
- # First to do: Check for /spi-stub entry (which is readable), and
+ # First to do: Check for /spice/lvs entry (which is readable), and
# check if pin order is correct. Flag a warning if it is not, and
# save the pin order in a record so that all X records can be pin
# sorted correctly.
- if os.path.exists(versionpath + '/spi-stub'):
- stubpath = versionpath + '/spi-stub/' + subname + '/' + subname + '__' + subname + '.spi'
+ if os.path.exists(versionpath + '/spice/lvs'):
+ lvspath = versionpath + '/spice/lvs/' + subname + '.spice'
# More spice file reading! This should be quick, as these files have
# only a single empty subcircuit in them.
found = False
- with open(stubpath, 'r') as sfile:
+ with open(lvspath, 'r') as sfile:
stubtext = sfile.read()
stublines = stubtext.replace('\n+', ' ').replace(',', '|').splitlines()
for line in stublines:
@@ -1492,7 +1487,7 @@
stubname = smatch.group(1)
stublist = smatch.group(2).split()
if stubname != subname + '__' + subname:
- print('Error: Looking for subcircuit ' + subname + '__' + subname + ' in file ' + stubpath + ' but found subcircuit ' + stubname + ' instead!')
+ print('Error: Looking for subcircuit ' + subname + '__' + subname + ' in file ' + lvspath + ' but found subcircuit ' + stubname + ' instead!')
print("This simulation probably isn't going to go well.")
else:
needsort = False
@@ -1508,26 +1503,26 @@
pinsorts[subname] = pinorder
break
if not found:
- print('Error: Cannot find subcircuit in IP spi-stub entry.')
+ print('Error: Cannot find subcircuit in IP spice-stub entry.')
else:
- print('Warning: IP has no spi-stub entry, cannot verify pin order.')
+ print('Warning: IP has no spice-stub entry, cannot verify pin order.')
- if os.path.exists(versionpath + '/spi-rcx'):
+ if os.path.exists(versionpath + '/spice/rcx'):
# This path is restricted and can only be seen by ngspice, which is privileged
- # to read it. So we can only assume that it matches the spi-stub entry.
+ # to read it. So we can only assume that it matches the spice/stub entry.
# NOTE (10/16/2018): Use unexpanded tilde expression in file.
- # rcxpath = versionpath + '/spi-rcx/' + subname + '/' + subname + '__' + subname + '.spi'
- rcxpath = ippath + '/' + useversion + '/spi-rcx/' + subname + '/' + subname + '__' + subname + '.spi'
+ # rcxpath = versionpath + '/spice/rcx/' + subname + '/' + subname + '__' + subname + '.spice'
+ rcxpath = ippath + '/' + useversion + '/spice/rcx/' + subname + '/' + subname + '__' + subname + '.spice'
newspilines.append('* Black-box entry replaced by path to RCX netlist')
newspilines.append('.include ' + rcxpath)
extended_names.append(subname.upper())
- elif os.path.exists(ipfullpath + '/' + useversion + '/spi'):
- # In a pinch, if there is no spi-rcx, try plain spi
+ elif os.path.exists(ipfullpath + '/' + useversion + '/spice'):
+ # In a pinch, if there is no spice/rcx, try plain spice
# NOTE (10/16/2018): Use unexpanded tilde expression in file.
- # spipath = versionpath + '/spi/' + subname + '.spi'
- spipath = ippath + '/' + useversion + '/spi/' + subname + '.spi'
+ # spicepath = versionpath + '/spice/' + subname + '.spice'
+ spicepath = ippath + '/' + useversion + '/spice/' + subname + '.spice'
newspilines.append('* Black-box entry replaced by path to schematic netlist')
- newspilines.append('.include ' + spipath)
+ newspilines.append('.include ' + spicepath)
else:
# Leave as is, and let it force an error
newspilines.append(line)
@@ -1550,12 +1545,12 @@
# files in spi/!
newspilines.append('* Need include to schematic netlist for ' + subname)
- # However, the CDL stub file can be used to check pin order
- stubpath = techdir + '/libs.ref/cdlStub/' + techsubdir + '/stub.cdl'
- if os.path.exists(stubpath):
+ # However, the CDL file can be used to check pin order
+ cdlpath = techdir + '/libs.ref/' + techsubdir + '/' + techsubdir + '.cdl'
+ if os.path.exists(cdlpath):
# More spice file reading! This should be quick, as these files have
# only a empty subcircuits in them.
- with open(stubpath, 'r') as sfile:
+ with open(cdlpath, 'r') as sfile:
stubtext = sfile.read()
stublines = stubtext.replace('\n+', ' ').replace(',', '|').splitlines()
for line in spilines:
@@ -1580,7 +1575,7 @@
break
else:
- print('No file ' + stubpath + ' found.')
+ print('No file ' + cdlpath + ' found.')
print('Failure to find stub netlist for checking pin order. Good luck.')
break
@@ -1605,28 +1600,25 @@
dname = dsheet['ip-name']
magpath = dspath + '/mag/'
- spipath = dspath + '/spi/' # Schematic netlist for sim
- stubpath = dspath + '/spi/stub/' # Schematic netlist for LVS
- pexpath = dspath + '/spi/pex/' # Layout netlist for sim
- lvspath = dspath + '/spi/lvs/' # Layout netlist for LVS
+ spicepath = dspath + '/spice/' # Schematic netlist for sim
+ pexpath = dspath + '/spice/pex/' # Layout netlist for sim (C-parasitics)
+ rcxpath = dspath + '/spice/rcx/' # Layout netlist for sim (R+C-parasitics)
+ lvspath = dspath + '/spice/lvs/' # Layout netlist for LVS
vlogpath = dspath + '/verilog/' # Verilog netlist for sim and LVS
- netlistname = dname + '.spi'
- schnetlist = spipath + netlistname
- stubnetlist = stubpath + netlistname
+ netlistname = dname + '.spice'
+ schnetlist = spicepath + netlistname
+ rcxnetlist = rcxpath + netlistname
pexnetlist = pexpath + netlistname
- laynetlist = lvspath + netlistname
+ lvsnetlist = lvspath + netlistname
layoutpath = magpath + dname + '.mag'
- elecpath = dspath + '/elec/' + dname + '.delib'
- schempath = elecpath + '/' + dname + '.sch'
+ schempath = dspath + '/xschem/' + dname + '.sch'
verilogpath = vlogpath + dname + '.v'
pathlast = os.path.split(dspath)[1]
- verilogaltpath = vlogpath + pathlast + '/' + dname + '.vgl'
need_sch_capture = True
- need_stub_capture = True
- need_lay_capture = True
- need_pex_capture = True
+ need_extract = True
+ need_pex = True
force_regenerate = False
# Check if datasheet has been marked for forced netlist regeneration
@@ -1634,20 +1626,8 @@
if dsheet['regenerate'] == 'force':
force_regenerate = True
- # If schempath does not exist, check if the .sch file is in a different
- # library.
if not os.path.exists(schempath):
print('No schematic in path ' + schempath)
- print('Checking for other library paths.')
- for libname in os.listdir(dspath + '/elec/'):
- if os.path.splitext(libname)[1] == '.delib':
- elecpath = dspath + '/elec/' + libname
- if os.path.exists(elecpath):
- for schfile in os.listdir(elecpath):
- if schfile == dname + '.sch':
- schempath = elecpath + '/' + schfile
- print('Schematic found in ' + schempath)
- break
# Guess the source based on the file or files in the design directory,
# with preference given to layout. This may be overridden in local mode.
@@ -1656,15 +1636,14 @@
print("Checking for out-of-date netlists.\n")
netlist_source = dsheet['netlist-source']
need_sch_capture = check_schematic_out_of_date(schnetlist, schempath)
- need_stub_capture = check_schematic_out_of_date(stubnetlist, schempath)
if netlist_source == 'layout':
netlist_path = pexnetlist
- need_pex_capture = check_layout_out_of_date(pexnetlist, layoutpath)
- need_lay_capture = check_layout_out_of_date(laynetlist, layoutpath)
+ need_pex_extract = check_layout_out_of_date(pexnetlist, layoutpath)
+ need_lvs_extract = check_layout_out_of_date(laynetlist, layoutpath)
else:
netlist_path = schnetlist
- need_lay_capture = False
- need_pex_capture = False
+ need_lvs_extract = False
+ need_pex_extract = False
else:
if not localmode:
print("Remote use, ", end='');
@@ -1675,8 +1654,8 @@
netlist_path = pexnetlist
else:
netlist_path = schnetlist
- need_lay_capture = False
- need_pex_capture = False
+ need_lvs_extract = False
+ need_pex_extract = False
else:
if os.path.exists(layoutpath):
netlist_path = pexnetlist
@@ -1684,24 +1663,22 @@
elif os.path.exists(schempath):
netlist_path = schnetlist
dsheet['netlist-source'] = 'schematic'
- need_lay_capture = False
- need_pex_capture = False
+ need_lvs_extract = False
+ need_pex_extract = False
elif os.path.exists(verilogpath):
netlist_path = verilogpath
dsheet['netlist-source'] = 'verilog'
- need_lay_capture = False
- need_pex_capture = False
+ need_lvs_extract = False
+ need_pex_extract = False
need_sch_capture = False
- need_stub_capture = False
elif os.path.exists(verilogaltpath):
netlist_path = verilogaltpath
dsheet['netlist-source'] = 'verilog'
- need_lay_capture = False
- need_pex_capture = False
+ need_lvs_extract = False
+ need_pex_extract = False
need_sch_capture = False
- need_stub_capture = False
- if need_lay_capture or need_pex_capture:
+ if need_lvs_extract or need_pex_extract:
# Layout LVS netlist needs regenerating. Check for magic layout.
if not os.path.isfile(layoutpath):
print('Error: No netlist or layout for project ' + dname + '.')
@@ -1717,7 +1694,7 @@
os.makedirs(pexpath)
print("Extracting LVS netlist from layout. . .")
- mproc = subprocess.Popen(['/ef/apps/bin/magic', '-dnull', '-noconsole',
+ mproc = subprocess.Popen(['magic', '-dnull', '-noconsole',
layoutpath], stdin = subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd = dspath + '/mag',
universal_newlines = True)
@@ -1733,11 +1710,11 @@
# Don't want black box entries, but create them so that we know which
# subcircuits are in the ip path, then replace them.
mproc.stdin.write("ext2spice blackbox on\n")
- if need_lay_capture:
+ if need_lvs_extract:
mproc.stdin.write("ext2spice cthresh infinite\n")
mproc.stdin.write("ext2spice rthresh infinite\n")
mproc.stdin.write("ext2spice -o " + laynetlist + "\n")
- if need_pex_capture:
+ if need_pex_extract:
mproc.stdin.write("ext2spice cthresh 0.005\n")
mproc.stdin.write("ext2spice rthresh 1\n")
mproc.stdin.write("ext2spice -o " + pexnetlist + "\n")
@@ -1747,20 +1724,20 @@
if mproc.returncode != 0:
print('Magic process returned error code ' + str(mproc.returncode) + '\n')
- if need_lay_capture and not os.path.isfile(laynetlist):
+ if need_lvs_extract and not os.path.isfile(laynetlist):
print('Error: No LVS netlist extracted from magic.')
- if need_pex_capture and not os.path.isfile(pexnetlist):
+ if need_pex_extract and not os.path.isfile(pexnetlist):
print('Error: No parasitic extracted netlist extracted from magic.')
- if (mproc.returncode != 0) or (need_lay_capture and not os.path.isfile(laynetlist)) or (need_pex_capture and not os.path.isfile(pexnetlist)):
+ if (mproc.returncode != 0) or (need_lvs_extract and not os.path.isfile(laynetlist)) or (need_pex_extract and not os.path.isfile(pexnetlist)):
return False
- if need_pex_capture and os.path.isfile(pexnetlist):
+ if need_pex_extract and os.path.isfile(pexnetlist):
print('Generating include statements for read-only IP blocks in layout, if needed')
layout_netlist_includes(pexnetlist, dspath)
- if need_sch_capture or need_stub_capture:
- # Netlist needs regenerating. Check for electric schematic
+ if need_sch_capture:
+ # Netlist needs regenerating. Check for xschem schematic
if not os.path.isfile(schempath):
if os.path.isfile(verilogpath):
print('No schematic for project.')
@@ -1776,80 +1753,35 @@
print('Error: No verilog netlist ' + verilogpath + ' or ' + verilogaltpath + ', either.')
return False
- # Check if there is a .java directory, if not (e.g., for remote CACE),
- # then copy it from the defaults.
- if not os.path.exists(dspath + '/elec/.java'):
- shutil.copytree('/ef/efabless/deskel/dotjava', dspath + '/elec/.java',
- symlinks = True)
-
- # Fix the LIBDIRS file if needed
- if not os.path.isfile(dspath + '/elec/LIBDIRS'):
- fix_libdirs(dspath, create = True)
- elif need_sch_capture or need_stub_capture:
- fix_libdirs(dspath)
-
if need_sch_capture:
print("Generating simulation netlist from schematic. . .")
# Generate the netlist
- print('Calling /ef/efabless/bin/elec2spi -o ')
- libpath = os.path.split(schempath)[0]
- libname = os.path.split(libpath)[1]
- print(schnetlist + ' -TS -NTI ' + libname + ' ' + dname + '.sch\n')
+ print('Calling xschem to generate netlist')
- # elec2spi requires that the /spi/ and /spi/stub directory exists
- if not os.path.exists(spipath):
- os.makedirs(spipath)
+ if not os.path.exists(spicepath):
+ os.makedirs(spicepath)
- eproc = subprocess.Popen(['/ef/efabless/bin/elec2spi',
- '-o', schnetlist, '-TS', '-NTI', libname, dname + '.sch'],
+ xproc = subprocess.Popen(['xschem', '-n', '-r', '-q',
+ '--tcl "set top_subckt 1',
+ '-o', schnetlist, dname + '.sch'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- cwd = dspath + '/elec')
+ cwd = dspath + '/xschem')
- elecout = eproc.communicate()[0]
- if eproc.returncode != 0:
- for line in elecout.splitlines():
+ xout = xproc.communicate()[0]
+ if xproc.returncode != 0:
+ for line in xout.splitlines():
print(line.decode('utf-8'))
- print('Electric process returned error code ' + str(eproc.returncode) + '\n')
+ print('Xschem process returned error code ' + str(xproc.returncode) + '\n')
else:
- printwarn(elecout)
+ printwarn(xout)
if not os.path.isfile(schnetlist):
print('Error: No netlist found for the circuit!\n')
print('(schematic netlist for simulation ' + schnetlist + ' not found.)\n')
- if need_stub_capture:
- print("Generating LVS netlist from schematic. . .")
- # Generate the netlist
- print('Calling /ef/efabless/bin/elec2spi -o ')
- libpath = os.path.split(schempath)[0]
- libname = os.path.split(libpath)[1]
- print(stubnetlist + ' -LP -TS -NTI ' + libname + ' ' + dname + '.sch\n')
-
- # elec2spi requires that the /spi/stub directory exists
- if not os.path.exists(stubpath):
- os.makedirs(stubpath)
-
- eproc = subprocess.Popen(['/ef/efabless/bin/elec2spi',
- '-o', stubnetlist, '-LP', '-TS', '-NTI', libname, dname + '.sch'],
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- cwd = dspath + '/elec')
-
- elecout = eproc.communicate()[0]
- if eproc.returncode != 0:
- for line in elecout.splitlines():
- print(line.decode('utf-8'))
-
- print('Electric process returned error code ' + str(eproc.returncode) + '\n')
- else:
- printwarn(elecout)
-
- if not os.path.isfile(stubnetlist):
- print('Error: No netlist found for the circuit!\n')
- print('(schematic netlist for LVS ' + stubnetlist + ' not found.)\n')
-
- if need_sch_capture or need_stub_capture:
- if (not os.path.isfile(schnetlist)) or (not os.path.isfile(stubnetlist)):
+ if need_sch_capture:
+ if (not os.path.isfile(schnetlist)):
return False
return netlist_path
@@ -2009,7 +1941,7 @@
datasheet_name = 'datasheet.json'
elif localmode and root_path:
# Use normal path to local simulation workspace
- simulation_path = root_path + '/ngspice/char'
+ simulation_path = root_path + '/ngspice'
# Check that datasheet path exists and that the datasheet is there
if not os.path.isdir(datasheet_path):
@@ -2032,8 +1964,8 @@
if 'request-hash' in datatop:
hashname = datatop['request-hash']
simulation_path = root_path + '/' + hashname
- elif os.path.isdir(root_path + '/ngspice/char'):
- simulation_path = root_path + '/ngspice/char'
+ elif os.path.isdir(root_path + '/ngspice'):
+ simulation_path = root_path + '/ngspice'
else:
simulation_path = root_path
elif not os.path.isabs(simulation_path):
diff --git a/runtime/cace_launch.py b/runtime/cace_launch.py
index ed85b47..5913e49 100755
--- a/runtime/cace_launch.py
+++ b/runtime/cace_launch.py
@@ -31,13 +31,10 @@
import file_compressor
import cace_makeplot
-import config
-
-# Values imported from config:
-#
-mktp_server_url = config.mktp_server_url
-# obs: og_server_url = config.og_server_url
-simulation_path = config.simulation_path
+# Fix this. . .
+simulation_path = ""
+og_server_url = ""
+mktp_server_url = ""
# Variables needing to be global until this file is properly made into a class
simfiles_path = []
@@ -91,7 +88,7 @@
else:
subprocess.run(['rm', '-r', root_path])
else:
- # Remove all .spi files, .data files, .raw files and copy of datasheet
+ # Remove all .spice files, .data files, .raw files and copy of datasheet
os.chdir(simfiles_path)
if os.path.exists('datasheet.json'):
os.remove('datasheet.json')
@@ -102,7 +99,7 @@
except:
pass
else:
- if fileext == '.spi' or fileext == '.data' or fileext == '.raw':
+ if fileext == '.spice' or fileext == '.data' or fileext == '.raw':
os.remove(filename)
elif fileext == '.tv' or fileext == '.tvo' or fileext == '.lxt' or fileext == '.vcd':
os.remove(filename)
@@ -491,11 +488,11 @@
# the original one for backwards compatibility.
if node == 'XH035':
node = 'EFXH035A'
- mag_path = netlist_path + '/lvs/' + ipname + '.spi'
- schem_path = netlist_path + '/stub/' + ipname + '.spi'
+ mag_path = netlist_path + '/lvs/' + ipname + '.spice'
+ schem_path = netlist_path + ipname + '.spice'
if not os.path.exists(schem_path):
- schem_path = netlist_path + '/' + ipname + '.spi'
+ schem_path = netlist_path + '/' + ipname + '.spice'
if not os.path.exists(schem_path):
if os.path.exists(root_path + '/verilog'):
schem_path = root_path + '/verilog/' + ipname + '.v'
@@ -527,16 +524,16 @@
pdkdir = os.path.realpath(root_path + '/.ef-config/techdir')
else:
foundry = dsheet['foundry']
- pdkdir = '/ef/tech/' + foundry + '/' + node
+ pdkdir = '/usr/share/pdk/' + node
lvs_setup = pdkdir + '/libs.tech/netgen/' + node + '_setup.tcl'
# Run LVS as a subprocess and wait for it to finish. Use the -json
# switch to get a file that is easy to parse.
- print('cace_launch.py: running /ef/apps/bin/netgen -batch lvs ')
+ print('cace_launch.py: running netgen -batch lvs ')
print(layout_arg + ' ' + schem_path + ' ' + ipname + ' ' + lvs_setup + ' comp.out -json -blackbox')
- lvsproc = subprocess.run(['/ef/apps/bin/netgen', '-batch', 'lvs',
+ lvsproc = subprocess.run(['netgen', '-batch', 'lvs',
layout_arg, schem_path + ' ' + ipname,
lvs_setup, 'comp.out', '-json', '-blackbox'], cwd=layout_path,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0)
@@ -1185,7 +1182,7 @@
if root_path:
simfiles_path = root_path + '/' + hashname
else:
- simfiles_path = config.simulation_path + '/' + hashname
+ simfiles_path = simulation_path + '/' + hashname
if not os.path.isdir(simfiles_path):
print('Error: Simulation folder ' + simfiles_path + ' does not exist.')
@@ -1197,10 +1194,10 @@
if not netlist_path:
if root_path:
- netlist_path = root_path + '/spi'
+ netlist_path = root_path + '/spice'
# Change location to the simulation directory
- os.chdir(simfiles_path)
+ # os.chdir(simfiles_path)
# pull out the relevant part of the JSON file, which is "data-sheet"
dsheet = datatop['data-sheet']
@@ -1322,13 +1319,13 @@
my_env = os.environ.copy()
if os.path.exists(verilog):
cosim = True
- simulator = '/ef/apps/bin/vvp'
+ simulator = 'vvp'
simargs = ['-M.', '-md_hdl_vpi']
filename = verilog + 'o'
# Copy the d_hdl object file into the simulation directory
- shutil.copy('/ef/efabless/lib/iverilog/d_hdl_vpi.vpi', simfiles_path)
+ shutil.copy('d_hdl_vpi.vpi', simfiles_path)
# Generate the output executable (.tvo) file for vvp.
- subprocess.call(['/ef/apps/bin/iverilog', '-o' + filename, verilog])
+ subprocess.call(['iverilog', '-o' + filename, verilog])
# Specific version of ngspice must be used for cosimulation
# (Deprecated; default version of ngspice now supports cosimulation)
# my_env['NGSPICE_VERSION'] = 'cosim1'
@@ -1338,7 +1335,7 @@
os.remove('simulator_pipe')
else:
cosim = False
- simulator = '/ef/apps/bin/ngspice'
+ simulator = 'ngspice'
simargs = ['-b']
# Do not generate LXT files, as CACE does not have any methods to handle
# the data in them anyway.
@@ -1855,7 +1852,7 @@
# May want to watch stderr for error messages and/or handle
# exit status.
- postproc = subprocess.Popen(['/ef/apps/bin/octave-cli', tb_path],
+ postproc = subprocess.Popen(['octave-cli', tb_path],
stdout = subprocess.PIPE)
rvalues = postproc.communicate()[0].decode('ascii').splitlines()
@@ -2036,9 +2033,9 @@
if layout_path and netlist_path:
# Run the device area (area estimation) script
- if os.path.exists(netlist_path + '/' + ipname + '.spi'):
- estproc = subprocess.Popen(['/ef/efabless/bin/layout_estimate.py',
- netlist_path + '/' + ipname + '.spi', node.lower()],
+ if os.path.exists(netlist_path + '/' + ipname + '.spice'):
+ estproc = subprocess.Popen(['layout_estimate.py',
+ netlist_path + '/' + ipname + '.spice', node.lower()],
stdout=subprocess.PIPE,
cwd = layout_path, universal_newlines = True)
outlines = estproc.communicate()[0]
@@ -2096,7 +2093,7 @@
# script. Result is either an actual area or an area estimate.
if os.path.exists(layout_path + '/' + ipname + '.mag'):
- areaproc = subprocess.Popen(['/ef/apps/bin/magic',
+ areaproc = subprocess.Popen(['magic',
'-dnull', '-noconsole', layout_path + '/' + ipname + '.mag'],
stdin = subprocess.PIPE, stdout = subprocess.PIPE,
cwd = layout_path, universal_newlines = True)
@@ -2167,7 +2164,7 @@
# Find the layout directory and check if there is a layout
# for the cell there.
- areaproc = subprocess.Popen(['/ef/apps/bin/magic',
+ areaproc = subprocess.Popen(['magic',
'-dnull', '-noconsole', layout_path + '/' + ipname + '.mag'],
stdin = subprocess.PIPE, stdout = subprocess.PIPE,
cwd = layout_path, universal_newlines = True)
@@ -2207,15 +2204,15 @@
if not os.path.exists(layout_path):
os.makedirs(layout_path)
if not os.path.exists(layout_path + '/.magicrc'):
- pdkdir = '/ef/tech/' + foundry + '/' + node + '/libs.tech/magic/current'
+ pdkdir = '/usr/share/pdk/' + node + '/libs.tech/magic'
if os.path.exists(pdkdir + '/' + node + '.magicrc'):
shutil.copy(pdkdir + '/' + node + '.magicrc', layout_path + '/.magicrc')
# Netlists should have been generated by cace_gensim.py
- has_layout_nl = os.path.exists(netlist_path + '/lvs/' + ipname + '.spi')
- has_schem_nl = os.path.exists(netlist_path + '/' + ipname + '.spi')
+ has_layout_nl = os.path.exists(netlist_path + '/lvs/' + ipname + '.spice')
+ has_schem_nl = os.path.exists(netlist_path + '/' + ipname + '.spice')
has_vlog_nl = os.path.exists(root_path + '/verilog/' + ipname + '.v')
- has_stub_nl = os.path.exists(netlist_path + '/stub/' + ipname + '.spi')
+ has_stub_nl = os.path.exists(netlist_path + '/stub/' + ipname + '.spice')
if has_layout_nl and has_stub_nl and not netlist_source == 'schematic':
failures = run_and_analyze_lvs(dsheet)
elif has_layout_nl and has_vlog_nl and not netlist_source == 'schematic':
@@ -2223,23 +2220,23 @@
elif netlist_path and has_schem_nl:
if not has_layout_nl or not has_stub_nl:
if not has_layout_nl:
- print("Did not find layout LVS netlist " + netlist_path + '/lvs/' + ipname + '.spi')
+ print("Did not find layout LVS netlist " + netlist_path + '/lvs/' + ipname + '.spice')
if not has_stub_nl:
- print("Did not find schematic LVS netlist " + netlist_path + '/' + ipname + '.spi')
+ print("Did not find schematic LVS netlist " + netlist_path + '/' + ipname + '.spice')
print("Running layout device pre-check.")
if localmode == True:
if keepmode == True:
precheck_opts = ['-log', '-debug']
else:
precheck_opts = ['-log']
- print('/ef/efabless/bin/layout_precheck.py ' + netlist_path + '/' + ipname + '.spi ' + node.lower() + ' ' + ' '.join(precheck_opts))
- chkproc = subprocess.Popen(['/ef/efabless/bin/layout_precheck.py',
- netlist_path + '/' + ipname + '.spi', node.lower(), *precheck_opts],
+ print('layout_precheck.py ' + netlist_path + '/' + ipname + '.spice ' + node.lower() + ' ' + ' '.join(precheck_opts))
+ chkproc = subprocess.Popen(['layout_precheck.py',
+ netlist_path + '/' + ipname + '.spice', node.lower(), *precheck_opts],
stdout=subprocess.PIPE,
cwd = layout_path, universal_newlines = True)
else:
- chkproc = subprocess.Popen(['/ef/efabless/bin/layout_precheck.py',
- netlist_path + '/' + ipname + '.spi', node.lower()],
+ chkproc = subprocess.Popen(['layout_precheck.py',
+ netlist_path + '/' + ipname + '.spice', node.lower()],
stdout=subprocess.PIPE,
cwd = layout_path, universal_newlines = True)
outlines = chkproc.communicate()[0]
@@ -2307,7 +2304,7 @@
print('Simulation results retained per -local option\n')
# If cace_gensim and cace_launch are run locally, keep the results
# since they won't be posted, but remove all other generated files.
- os.chdir(simfiles_path)
+ # os.chdir(simfiles_path)
if os.path.exists('datasheet.json'):
os.remove('datasheet.json')
for filename in filessimmed:
diff --git a/runtime/cace_makeplot.py b/runtime/cace_makeplot.py
index 76eb649..20f35fb 100755
--- a/runtime/cace_makeplot.py
+++ b/runtime/cace_makeplot.py
@@ -8,6 +8,10 @@
import os
import matplotlib
from matplotlib.figure import Figure
+
+# Warning: PIL Tk required, may not be in default install of python3.
+# For Fedora, for example, need "yum install python-pillow-tk"
+
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_agg import FigureCanvasAgg
diff --git a/runtime/file_compressor.py b/runtime/file_compressor.py
new file mode 100755
index 0000000..ef15545
--- /dev/null
+++ b/runtime/file_compressor.py
@@ -0,0 +1,118 @@
+#!/ef/efabless/opengalaxy/venv/bin/python3
+import os
+import re
+from io import BytesIO, BufferedReader, BufferedRandom
+import tarfile
+
+"""
+ This module tars and compresses a filder location and
+ all subdirectories.
+"""
+
+# tar and compress a directory in memory and return the result.
+def tar_directory(source_dir):
+ output = BytesIO()
+ with tarfile.open(fileobj=output, mode='w:gz') as archive:
+ archive.add(source_dir, arcname=os.path.basename(source_dir), recursive=True)
+
+ return output
+
+# tar and compress the files in a directory, not to include the directory itself.
+# 'nodescend' is a list of directories not to descend into, although the directory
+# itself will be added.
+def tar_directory_contents(source_dir, exclude=[]):
+ output = BytesIO()
+ curdir = os.getcwd()
+ os.chdir(source_dir)
+
+ rexclude = []
+ for pattern in exclude:
+ rexclude.append(re.compile(pattern))
+
+ with tarfile.open(fileobj=output, mode='w:gz') as archive:
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if root == '.':
+ filepath = filename
+ else:
+ rootnodot = os.path.normpath(root)
+ filepath = os.path.join(rootnodot, filename)
+ doexclude = False
+ for regexp in rexclude:
+ if re.match(regexp, filepath):
+ doexclude = True
+ break
+ if not doexclude:
+ try:
+ archive.add(filepath, recursive=False)
+ except PermissionError:
+ pass
+ for dirname in dirs[:]:
+ if root == '.':
+ dirpath = dirname
+ else:
+ rootnodot = os.path.normpath(root)
+ dirpath = os.path.join(rootnodot, dirname)
+ doexclude = False
+ for regexp in rexclude:
+ if re.match(regexp, dirpath):
+ doexclude = True
+ break
+ if doexclude:
+ dirs.remove(dirname)
+ else:
+ try:
+ archive.add(dirpath, recursive=False)
+ except PermissionError:
+ pass
+
+
+ os.chdir(curdir)
+ return output
+
+def tar_directory_contents_to_file(source_dir, tarballname, exclude=[]):
+ curdir = os.getcwd()
+ os.chdir(source_dir)
+
+ rexclude = []
+ for pattern in exclude:
+ rexclude.append(re.compile(pattern))
+
+ with tarfile.open(tarballname, mode='w:gz') as archive:
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if root == '.':
+ filepath = filename
+ else:
+ rootnodot = os.path.normpath(root)
+ filepath = os.path.join(rootnodot, filename)
+ doexclude = False
+ for regexp in rexclude:
+ if re.match(regexp, filepath):
+ doexclude = True
+ break
+ if not doexclude:
+ try:
+ archive.add(filepath, recursive=False)
+ except PermissionError:
+ pass
+ for dirname in dirs[:]:
+ if root == '.':
+ dirpath = dirname
+ else:
+ rootnodot = os.path.normpath(root)
+ dirpath = os.path.join(rootnodot, dirname)
+ doexclude = False
+ for regexp in rexclude:
+ if re.match(regexp, dirpath):
+ doexclude = True
+ break
+ if doexclude:
+ dirs.remove(dirname)
+ else:
+ try:
+ archive.add(dirpath, recursive=False)
+ except PermissionError:
+ pass
+
+ os.chdir(curdir)
diff --git a/runtime/project_manager.py b/runtime/project_manager.py
index 2be5f58..247f55c 100755
--- a/runtime/project_manager.py
+++ b/runtime/project_manager.py
@@ -81,7 +81,8 @@
except:
pdk_root = 'PREFIX/pdk'
-apps_path = pdk_root + '/scripts'
+# Application path (path where this script is located)
+apps_path = os.path.realpath(os.path.dirname(__file__))
#---------------------------------------------------------------
# Watch a directory for modified time change. Repeat every two
@@ -223,7 +224,7 @@
self.nentry.insert(0, seed or '') # may be None
self.pvar = tkinter.StringVar(master)
if not importnode:
- # Add PDKs as found by searching /ef/tech for 'libs.tech' directories
+ # Add PDKs as found by searching /usr/share/pdk for 'libs.tech' directories
ttk.Label(master, text="Select foundry/node:").grid(row = 2, column = 0)
else:
ttk.Label(master, text="Foundry/node:").grid(row = 2, column = 0)
@@ -239,9 +240,6 @@
node_def = "EFXH035B"
# use glob instead of os.walk. Don't need to recurse large PDK hier.
- # TODO: stop hardwired default EFXH035B: get from an overall flow /ef/tech/.ef-config/plist.json
- # (or get it from the currently selected project)
- #EFABLESS PLATFORM
for pdkdir_lr in glob.glob(pdk_root + '/*/libs.tech/'):
pdkdir = os.path.split( os.path.split( pdkdir_lr )[0])[0] # discard final .../libs.tech/
(foundry, foundry_name, node, desc, status) = ProjectManager.pdkdir2fnd( pdkdir )
@@ -254,8 +252,6 @@
if node == node_def and not pdk_def:
pdk_def = key
- # Quick hack: sorting puts EFXH035A before EFXH035LEGACY. However, some
- # ranking is needed.
pdklist = sorted( self.pdkmap.keys())
if not pdklist:
raise ValueError( "assertion failed, no available PDKs found")
@@ -656,7 +652,7 @@
self.error_label.configure(text = 'Cannot import a parent directory into itself.')
return False
#Find project pdk
- if os.path.exists(self.projectpath + '/.config/techdir') or os.path.exists(self.projectpath + '/.ef-config/techdir'):
+ if os.path.exists(self.projectpath + '/.config/techdir') or os.path.exists(self.projectpath + '/.config/techdir'):
self.project_pdkdir = os.path.realpath(self.projectpath + ProjectManager.config_path( self.projectpath) + '/techdir')
self.foundry, foundry_name, self.node, desc, status = ProjectManager.pdkdir2fnd( self.project_pdkdir )
else:
@@ -1181,12 +1177,10 @@
@classmethod
def config_path(cls, path):
- #returns the config directory that 'path' contains between .config and .ef-config
+ #returns the config directory that 'path' contains (.config)
if (os.path.exists(path + '/.config')):
return '/.config'
- elif (os.path.exists(path + '/.ef-config')):
- return '/.ef-config'
- raise Exception('Neither '+path+'/.config nor '+path+'/.ef-config exists.')
+ raise Exception(' '+path+'/.config does not exist.')
#------------------------------------------------------------------------
# Check if a name is blacklisted for being a project folder
@@ -1231,8 +1225,7 @@
#
#EFABLESS PLATFORM
- p = subprocess.run(['/ef/apps/bin/withnet' ,
- apps_path + '/og_uid_service.py', userid],
+ p = subprocess.run([apps_path + '/og_uid_service.py', userid],
stdout = subprocess.PIPE)
if p.stdout:
uid_string = p.stdout.splitlines()[0].decode('utf-8')
@@ -1362,60 +1355,6 @@
return projectlist
#------------------------------------------------------------------------
- # utility: [re]intialize a project's elec/ dir: the .java preferences and LIBDIRS.
- # So user can just delete .java, and restart electric (from projectManager), to reinit preferences.
- # So user can just delete LIBDIRS, and restart electric (from projectManager), to reinit LIBDIRS.
- # So project copies/imports can filter ngspice/run (and ../.allwaves), we'll recreate it here.
- #
- # The global /ef/efabless/deskel/* is used and the PDK name substituted.
- #
- # This SINGLE function is used to setup elec/ contents for new projects, in addition to being
- # called in-line prior to "Edit Schematics" (on-the-fly).
- #------------------------------------------------------------------------
- @classmethod
- def reinitElec(cls, design):
- pdkdir = os.path.join( design, ".ef-config/techdir")
- elec = os.path.join( design, "elec")
-
- # on the fly, ensure has elec/ dir, ensure has ngspice/run/allwaves dir
- try:
- os.makedirs(design + '/elec', exist_ok=True)
- except IOError as e:
- print('Error in os.makedirs(elec): ' + str(e))
- try:
- os.makedirs(design + '/ngspice/run/.allwaves', exist_ok=True)
- except IOError as e:
- print('Error in os.makedirs(.../.allwaves): ' + str(e))
- #EFABLESS PLATFORM
- deskel = '/ef/efabless/deskel'
-
- # on the fly:
- # .../elec/.java : reinstall if missing. From PDK-specific if any.
- if not os.path.exists( os.path.join( elec, '.java')):
- # Copy Electric preferences
- try:
- shutil.copytree(deskel + '/dotjava', design + '/elec/.java', symlinks = True)
- except IOError as e:
- print('Error copying files: ' + str(e))
-
- # .../elec/LIBDIRS : reinstall if missing, from PDK-specific LIBDIRS
- # in libs.tech/elec/LIBDIRS
-
- libdirsloc = pdkdir + '/libs.tech/elec/LIBDIRS'
-
- if not os.path.exists( os.path.join( elec, 'LIBDIRS')):
- if os.path.exists( libdirsloc ):
- # Copy Electric LIBDIRS
- try:
- shutil.copy(libdirsloc, design + '/elec/LIBDIRS')
- except IOError as e:
- print('Error copying files: ' + str(e))
- else:
- print('Info: PDK not configured for Electric: no libs.tech/elec/LIBDIRS')
-
- return None
-
- #------------------------------------------------------------------------
# utility: filter a list removing: empty strings, strings with any whitespace
#------------------------------------------------------------------------
whitespaceREX = re.compile('\s')
@@ -1444,7 +1383,7 @@
# is always ''. And an optional foundry extension is pruned/dropped.
# thus '.../XFAB.2/EFXP018A4' -> 'XFAB', 'EFXP018A4', ''
#
- # optionally store in each PDK: .ef-config/nodeinfo.json which can define keys:
+ # optionally store in each PDK: .config/nodeinfo.json which can define keys:
# 'foundry', 'node', 'description' to override the foundry (computed from the path)
# and (fixed, empty) description currently returned by this.
#
@@ -1488,7 +1427,7 @@
status = nodeinfo['status']
return foundry, foundry_name, node, description, status
- infofile = pdkdir + '/.ef-config/nodeinfo.json'
+ infofile = pdkdir + '/.config/nodeinfo.json'
if os.path.exists(infofile):
with open(infofile, 'r') as ifile:
nodeinfo = json.load(ifile)
@@ -1541,11 +1480,11 @@
# Get the PDK attached to a project for display as: '<foundry> : <node>'
# unless path=True: then return true PDK dir-path.
#
- # TODO: the ef-config prog output is not used below. Intent was use
- # ef-config to be the one official query for *any* project's PDK value, and
+ # TODO: the config prog output is not used below. Intent was use
+ # config to be the one official query for *any* project's PDK value, and
# therein-only hide a built-in default for legacy projects without techdir symlink.
- # In below ef-config will always give an EF_TECHDIR, so that code-branch always
- # says '(default)', the ef-config subproc is wasted, and '(no PDK)' is never
+ # In below config will always give an EF_TECHDIR, so that code-branch always
+ # says '(default)', the config subproc is wasted, and '(no PDK)' is never
# reached.
#------------------------------------------------------------------------
def get_pdk_dir(self, project, path=False):
@@ -1555,9 +1494,9 @@
foundry, foundry_name, node, desc, status = self.pdkdir2fnd( pdkdir )
return foundry + ' : ' + node
'''
- if os.path.isdir(project + '/.ef-config'):
- if os.path.exists(project + '/.ef-config/techdir'):
- pdkdir = os.path.realpath(project + '/.ef-config/techdir')
+ if os.path.isdir(project + '/.config'):
+ if os.path.exists(project + '/.config/techdir'):
+ pdkdir = os.path.realpath(project + '/.config/techdir')
elif os.path.isdir(project + '/.config'):
if os.path.exists(project + '/.config/techdir'):
@@ -1569,10 +1508,10 @@
'''
'''
if not pdkdir:
- # Run "ef-config" script for backward compatibility
+ # Run "config" script for backward compatibility
export = {'EF_DESIGNDIR': project}
#EFABLESS PLATFORM
- p = subprocess.run(['/ef/efabless/bin/ef-config', '-sh', '-t'],
+ p = subprocess.run(['config', '-sh', '-t'],
stdout = subprocess.PIPE, env = export)
config_out = p.stdout.splitlines()
for line in config_out:
@@ -1823,8 +1762,6 @@
if not os.path.exists(newproject + '/mag'):
os.makedirs(newproject + '/mag')
- self.reinitElec(newproject) # [re]install elec/.java, elec/LIBDIRS if needed, from pdk-specific if-any
-
return 1 # Success
#------------------------------------------------------------------------
@@ -1856,7 +1793,7 @@
os.makedirs(newproject + '/cdl/')
shutil.copy(importfile, newproject + '/cdl/' + newfile)
try:
- p = subprocess.run(['/ef/apps/bin/cdl2spi', importfile],
+ p = subprocess.run(['cdl2spi', importfile],
stdout = subprocess.PIPE, stderr = subprocess.PIPE,
check = True)
except subprocess.CalledProcessError as e:
@@ -1906,7 +1843,7 @@
# Run cdl2icon perl script
try:
- p = subprocess.run(['/ef/apps/bin/cdl2icon', '-file', importfile, '-cellname',
+ p = subprocess.run(['cdl2icon', '-file', importfile, '-cellname',
subname, '-libname', pname, '-projname', pname, '--prntgussddirs'],
stdout = subprocess.PIPE, stderr = subprocess.PIPE, check = True)
except subprocess.CalledProcessError as e:
@@ -1940,7 +1877,7 @@
# Call cdl2icon with the final pin directions
outname = newproject + '/elec/' + pname + '.delib/' + os.path.splitext(newfile)[0] + '.ic'
try:
- p = subprocess.run(['/ef/apps/bin/cdl2icon', '-file', importfile, '-cellname',
+ p = subprocess.run(['cdl2icon', '-file', importfile, '-cellname',
subname, '-libname', pname, '-projname', pname, '-output',
outname, '-pindircmbndstring', ','.join(pin_info_list)],
stdout = subprocess.PIPE, stderr = subprocess.PIPE, check = True)
@@ -2446,7 +2383,7 @@
+ parentdir + "/docs/.\n")
# Get the names of verilog libraries in this PDK.
- pdkdir = os.path.realpath(ppath + '/.ef-config/techdir')
+ pdkdir = os.path.realpath(ppath + '/.config/techdir')
pdkvlog = pdkdir + '/libs.ref/verilog'
pdkvlogfiles = glob.glob(pdkvlog + '/*/*.v')
@@ -2746,7 +2683,7 @@
newproject = self.projectdir + '/' + pname
try:
- p = subprocess.run(['/ef/apps/bin/vglImport', importfile, pname, elecLib],
+ p = subprocess.run(['vglImport', importfile, pname, elecLib],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
check=True, universal_newlines=True)
except subprocess.CalledProcessError as e:
@@ -2935,7 +2872,7 @@
if not confirm == 'okay':
print('Warning: Must quit and restart to get any fixes or updates.')
return
- os.execl('/ef/efabless/opengalaxy/project_manager.py', 'appsel_zenity.sh')
+ os.execl('project_manager.py', 'appsel_zenity.sh')
# Does not return; replaces existing process.
#----------------------------------------------------------------------
@@ -3146,33 +3083,16 @@
os.makedirs(newproject + '/testbench')
os.makedirs(newproject + '/verilog')
os.makedirs(newproject + '/verilog/source')
- os.makedirs(newproject + '/.ef-config')
+ os.makedirs(newproject + '/.config')
if 'xschem' in schemapps:
os.makedirs(newproject + '/xschem')
pdkname = os.path.split(newpdk)[1]
# Symbolic links
- os.symlink(newpdk, newproject + '/.ef-config/techdir')
-
- # Copy preferences
- # deskel = '/ef/efabless/deskel'
- #
- # Copy examples (disabled; this is too confusing to the end user. Also, they
- # should not be in user space at all, as they are not user editable.
- #
- # for item in os.listdir(deskel + '/exlibs'):
- # shutil.copytree(deskel + '/exlibs/' + item, newproject + '/elec/' + item)
- # for item in os.listdir(deskel + '/exmag'):
- # if os.path.splitext(item)[1] == '.mag':
- # shutil.copy(deskel + '/exmag/' + item, newproject + '/mag/' + item)
+ os.symlink(newpdk, newproject + '/.config/techdir')
# Put tool-specific startup files into the appropriate user directories.
- if 'electric' in layoutapps or 'electric' in schemapps:
- self.reinitElec(newproject) # [re]install elec/.java, elec/LIBDIRS if needed, from pdk-specific if-any
- # Set up electric
- self.create_electric_header_file(newproject, newname)
-
if 'magic' in layoutapps:
shutil.copy(newpdk + '/libs.tech/magic/' + pdkname + '.magicrc', newproject + '/mag/.magicrc')
@@ -3415,17 +3335,8 @@
os.makedirs(newproject + '/ngspice/run/.allwaves')
except FileExistsError:
pass
- '''
- if not elprefs:
- # Copy preferences
- deskel = '/ef/efabless/deskel'
- try:
- shutil.copytree(deskel + '/dotjava', newproject + '/elec/.java', symlinks = True)
- except IOError as e:
- print('Error copying files: ' + e)
- '''
-#----------------------------------------------------------------------
+ #----------------------------------------------------------------------
# Allow the user to choose the flow of the project
#----------------------------------------------------------------------
@@ -3594,7 +3505,7 @@
def make_techdirs(projectpath, project_pdkdir):
# Recursively create techdirs in project and subproject folders
- if not (os.path.exists(projectpath + '/.config') or os.path.exists(projectpath + '/.ef-config')):
+ if not (os.path.exists(projectpath + '/.config') or os.path.exists(projectpath + '/.config')):
os.makedirs(projectpath + '/.config')
if not os.path.exists(projectpath + self.config_path(projectpath) + '/techdir'):
os.symlink(project_pdkdir, projectpath + self.config_path(projectpath) + '/techdir')
@@ -3936,7 +3847,7 @@
export = dict(os.environ)
export['EF_DESIGNDIR'] = ppath
- subprocess.Popen(['/ef/apps/bin/padframe-calc', elecLib, cellname], cwd = ppath, env = export)
+ subprocess.Popen(['padframe-calc', elecLib, cellname], cwd = ppath, env = export)
# not yet any useful return value or reporting of results here in projectManager...
return 1
@@ -3994,21 +3905,7 @@
libs = []
ellibrex = re.compile(r'^(tech_.*|ef_examples)\.[dj]elib$', re.IGNORECASE)
- self.reinitElec(design)
-
- # /elec and /.java are prerequisites for running electric
- if not os.path.exists(design + '/elec'):
- print("No path to electric design folder.")
- return
-
- if not os.path.exists(design + '/elec/.java'):
- print("No path to electric .java folder.")
- return
-
- # Fix the LIBDIRS file if needed
- #fix_libdirs(design, create = True)
-
- # Check for legacy directory (missing .ef-config and/or .ef-config/techdir);
+ # Check for legacy directory (missing .config and/or .config/techdir);
# Handle as necessary.
# don't sometimes yield pdkdir as some subdir of techdir
@@ -4017,7 +3914,7 @@
export = dict(os.environ)
export['EF_DESIGNDIR'] = design
'''
- p = subprocess.run(['/ef/efabless/bin/ef-config', '-sh', '-t'],
+ p = subprocess.run(['config', '-sh', '-t'],
stdout = subprocess.PIPE, env = export)
config_out = p.stdout.splitlines()
for line in config_out:
@@ -4084,7 +3981,7 @@
if indirectlibs:
export['EOPENARGS'] = ' '.join(indirectlibs)
arguments.append('-s')
- arguments.append('/ef/efabless/lib/elec/elecOpen.bsh')
+ arguments.append('elecOpen.bsh')
try:
arguments.append(libs[-1])
@@ -4213,9 +4110,9 @@
pdkdir = ''
pdkname = ''
- if os.path.exists(design + '/.ef-config/techdir/libs.tech'):
- pdkdir = design + '/.ef-config/techdir/libs.tech/magic/current'
- pdkname = os.path.split(os.path.realpath(design + '/.ef-config/techdir'))[1]
+ if os.path.exists(design + '/.config/techdir/libs.tech'):
+ pdkdir = design + '/.config/techdir/libs.tech/magic/current'
+ pdkname = os.path.split(os.path.realpath(design + '/.config/techdir'))[1]
elif os.path.exists(design + '/.config/techdir/libs.tech'):
pdkdir = design + '/.config/techdir/libs.tech/magic'
pdkname = os.path.split(os.path.realpath(design + '/.config/techdir'))[1]
@@ -4287,9 +4184,9 @@
# NOTE: netlist_to_layout script will attempt to generate a
# schematic netlist if one does not exist.
- print('Running /ef/efabless/bin/netlist_to_layout.py ../spi/' + designname + '.spi')
+ print('Running netlist_to_layout.py ../spi/' + designname + '.spi')
try:
- p = subprocess.run(['/ef/efabless/bin/netlist_to_layout.py',
+ p = subprocess.run(['netlist_to_layout.py',
'../spi/' + designname + '.spi'],
stdin = subprocess.PIPE, stdout = subprocess.PIPE,
stderr = subprocess.PIPE, cwd = design + '/mag')
@@ -4303,7 +4200,7 @@
else:
if os.path.exists(design + '/mag/create_script.tcl'):
with open(design + '/mag/create_script.tcl', 'r') as infile:
- magproc = subprocess.run(['/ef/apps/bin/magic',
+ magproc = subprocess.run(['magic',
'-dnull', '-noconsole', '-rcfile ',
pdkdir + '/' + pdkname + '.magicrc', designname],
stdin = infile, stdout = subprocess.PIPE,
@@ -4356,7 +4253,6 @@
#----------------------------------------------------------------------
def upload(self):
- '''
global apps_path
value = self.projectselect.selected()
if value:
@@ -4364,10 +4260,8 @@
# designname = value['text']
designname = self.project_name
print('Upload design ' + designname + ' (' + design + ' )')
- subprocess.run(['/ef/apps/bin/withnet',
- apps_path + '/cace_design_upload.py',
+ subprocess.run([apps_path + '/cace_design_upload.py',
design, '-test'])
- '''
#--------------------------------------------------------------------------
@@ -4398,8 +4292,8 @@
if os.path.exists(svalues[0] + '/.config'):
pdkdir = svalues[0] + '/.config/techdir'
- elif os.path.exists(svalues[0] + '/.ef-config'):
- pdkdir = svalues[0] + '/.ef-config/techdir'
+ elif os.path.exists(svalues[0] + '/.config'):
+ pdkdir = svalues[0] + '/.config/techdir'
ef_style=True
if pdkdir == '':
diff --git a/runtime/spiceunits.py b/runtime/spiceunits.py
new file mode 100755
index 0000000..6cd45ba
--- /dev/null
+++ b/runtime/spiceunits.py
@@ -0,0 +1,209 @@
+#!/ef/efabless/opengalaxy/venv/bin/python3
+"""spice_units.py: Converts tuple of (unit, value) into standard unit numeric value."""
+
+import re
+
+# set of metric prefixes and the value needed to multiply by to
+# get the "standard" unit for SPICE. Only standard units will
+# be written into the SPICE file, for reasons of universal
+# compatibility.
+
+prefixtypes = {
+ "T": 1E12, "tera" : 1E12,
+ "G": 1E9, "giga" : 1E9,
+ "M": 1E6, "mega" : 1E6, "MEG": 1E6, "meg": 1E6,
+ "K": 1E3, "kilo" : 1E3, "k":1E3,
+ "D": 1E1, "deca" : 1E1,
+ "d": 1E-1, "deci" : 1E-1,
+ "c": 1E-2, "centi": 1E-2, "%": 1E-2,
+ "m": 1E-3, "milli": 1E-3,
+ "u": 1E-6, "micro": 1E-6, "\u00b5": 1E-6, "ppm": 1E-6,
+ "n": 1E-9, "nano" : 1E-9, "ppb": 1E-9,
+ "p": 1E-12, "pico" : 1E-12, "ppt": 1E-12,
+ "f": 1E-15, "femto": 1E-15,
+ "a": 1E-18, "atto" : 1E-15,
+}
+
+# set of known unit types, including some with suffixes, along with a
+# keyword that can be used to limit the search if an expected type for
+# the value is known. Keys are used in regular expressions, and so
+# may use any regular expression syntax.
+
+unittypes = {
+ "[Ff]": "capacitance",
+ "[Ff]arad[s]*": "capacitance",
+ "\u03a9": "resistance",
+ "[Oo]hm[s]*": "resistance",
+ "[Vv]": "voltage",
+ "[Vv]olt[s]*": "voltage",
+ "[Aa]": "current",
+ "[Aa]mp[s]*": "current",
+ "[Aa]mpere[s]*": "current",
+ "[Ss]": "time",
+ "[Ss]econd[s]*": "time",
+ "[Hh]": "inductance",
+ "[Hh]enry[s]*": "inductance",
+ "[Hh]enries": "inductance",
+ "[Hh]z": "frequency",
+ "[Hh]ertz": "frequency",
+ "[Mm]": "distance",
+ "[Mm]eter[s]*": "distance",
+ "[\u00b0]*[Cc]": "temperature",
+ "[\u00b0]*[Cc]elsius": "temperature",
+ "[\u00b0]*[Kk]": "temperature",
+ "[\u00b0]*[Kk]elvin": "temperature",
+ "[Ww]": "power",
+ "[Ww]att[s]*": "power",
+ "[Vv]-rms": "noise",
+ "[Vv]olt[s]*-rms": "noise",
+ "'[bohd]": "digital",
+ "": "none"
+}
+
+# Define how to convert SI units to spice values
+#
+# NOTE: spice_unit_unconvert can act on a tuple of (units, value) where
+# value is either a single value or a list of values. spice_unit_convert
+# only acts on a tuple with a single value. This is because the only large
+# vectors are produced by ngspice, and these values need unconverting back
+# into the units specified by the datasheet. Values being converted to
+# ngspice units are from the datasheet and are only computed a few at a
+# time, so handling vectors is not particularly efficient.
+
+def spice_unit_convert(valuet, restrict=[]):
+ """Convert SI units into spice values"""
+ # valuet is a tuple of (unit, value), where "value" is numeric
+ # and "unit" is a string. "restrict" may be used to require that
+ # the value be of a specific class like "time" or "resistance".
+
+ # Recursive handling of '/' and multiplicatioon dot in expressions
+ if '/' in valuet[0]:
+ parts = valuet[0].split('/', 1)
+ result = float(spice_unit_convert([parts[0], valuet[1]], restrict))
+ result /= float(spice_unit_convert([parts[1], "1.0"], restrict))
+ return str(result)
+
+ if '\u22c5' in valuet[0]: # multiplication dot
+ parts = valuet[0].split('\u22c5')
+ result = float(spice_unit_convert([parts[0], valuet[1]], restrict))
+ result *= float(spice_unit_convert([parts[1], "1.0"], restrict))
+ return str(result)
+
+ if '\u00b2' in valuet[0]: # squared
+ part = valuet[0].split('\u00b2')[0]
+ result = float(spice_unit_unconvert([part, valuet[1]], restrict))
+ result *= float(spice_unit_unconvert([part, "1.0"], restrict))
+ return str(result)
+
+ if valuet[0] == "": # null case, no units
+ return valuet[1]
+
+ for unitrec in unittypes: # case of no prefix
+ if re.match('^' + unitrec + '$', valuet[0]):
+ if restrict:
+ if unittypes[unitrec] == restrict.lower():
+ return valuet[1]
+ else:
+ return valuet[1]
+
+ for prerec in prefixtypes:
+ for unitrec in unittypes:
+ if re.match('^' + prerec + unitrec + '$', valuet[0]):
+ if restrict:
+ if unittypes[unitrec] == restrict.lower():
+ newvalue = float(valuet[1]) * prefixtypes[prerec]
+ return str(newvalue)
+ else:
+ newvalue = float(valuet[1]) * prefixtypes[prerec]
+ return str(newvalue)
+
+ # Check for "%", which can apply to anything.
+ if valuet[0][0] == '%':
+ newvalue = float(valuet[1]) * 0.01
+ return str(newvalue)
+
+ if restrict:
+ raise ValueError('units ' + valuet[0] + ' cannot be parsed as ' + restrict.lower())
+ else:
+ # raise ValueError('units ' + valuet[0] + ' cannot be parsed')
+ # (Assume value is not in SI units and will be passed back as-is)
+ return valuet[1]
+
+# Define how to convert spice values back into SI units
+
+def spice_unit_unconvert(valuet, restrict=[]):
+ """Convert spice values back into SI units"""
+ # valuet is a tuple of (unit, value), where "value" is numeric
+ # and "unit" is a string. "restrict" may be used to require that
+ # the value be of a specific class like "time" or "resistance".
+
+ # Recursive handling of '/' and multiplicatioon dot in expressions
+ if '/' in valuet[0]:
+ parts = valuet[0].split('/', 1)
+ result = spice_unit_unconvert([parts[0], valuet[1]], restrict)
+ if isinstance(result, list):
+ result = list(item / spice_unit_unconvert([parts[1], 1.0],
+ restrict) for item in result)
+ else:
+ result /= spice_unit_unconvert([parts[1], 1.0], restrict)
+ return result
+
+ if '\u22c5' in valuet[0]: # multiplication dot
+ parts = valuet[0].split('\u22c5')
+ result = spice_unit_unconvert([parts[0], valuet[1]], restrict)
+ if isinstance(result, list):
+ result = list(item * spice_unit_unconvert([parts[1], 1.0],
+ restrict) for item in result)
+ else:
+ result *= spice_unit_unconvert([parts[1], 1.0], restrict)
+ return result
+
+ if '\u00b2' in valuet[0]: # squared
+ part = valuet[0].split('\u00b2')[0]
+ result = spice_unit_unconvert([part, valuet[1]], restrict)
+ if isinstance(result, list):
+ result = list(item * spice_unit_unconvert([part, 1.0],
+ restrict) for item in result)
+ else:
+ result *= spice_unit_unconvert([part, 1.0], restrict)
+ return result
+
+ if valuet[0] == "": # null case, no units
+ return valuet[1]
+
+ for unitrec in unittypes: # case of no prefix
+ if re.match('^' + unitrec + '$', valuet[0]):
+ if restrict:
+ if unittypes[unitrec] == restrict.lower():
+ return valuet[1]
+ else:
+ return valuet[1]
+
+ for prerec in prefixtypes:
+ for unitrec in unittypes:
+ if re.match('^' + prerec + unitrec + '$', valuet[0]):
+ if restrict:
+ if unittypes[unitrec] == restrict.lower():
+ if isinstance(valuet[1], list):
+ return list(item / prefixtypes[prerec] for item in valuet[1])
+ else:
+ return valuet[1] / prefixtypes[prerec]
+ else:
+ if isinstance(valuet[1], list):
+ return list(item / prefixtypes[prerec] for item in valuet[1])
+ else:
+ return valuet[1] / prefixtypes[prerec]
+
+ # Check for "%", which can apply to anything.
+ if valuet[0][0] == '%':
+ if isinstance(valuet[1], list):
+ return list(item * 100 for item in valuet[1])
+ else:
+ return valuet[1] * 100
+
+ if restrict:
+ raise ValueError('units ' + valuet[0] + ' cannot be parsed as ' + restrict.lower())
+ else:
+ # raise ValueError('units ' + valuet[0] + ' cannot be parsed')
+ # (Assume value is not in SI units and will be passed back as-is)
+ return valuet[1]
diff --git a/runtime/textreport.py b/runtime/textreport.py
new file mode 100644
index 0000000..f44e09c
--- /dev/null
+++ b/runtime/textreport.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+#
+#--------------------------------------------------------
+# Text Report Window for the Open Galaxy characterization
+# tool (simple text window with contents read from file)
+#
+#--------------------------------------------------------
+# Written by Tim Edwards
+# efabless, inc.
+# June 27, 2017
+# Version 0.1
+#--------------------------------------------------------
+
+import os
+import re
+import tkinter
+from tkinter import ttk
+
+class TextReport(tkinter.Toplevel):
+ """Open Galaxy text report window."""
+
+ def __init__(self, parent=None, fontsize = 11, *args, **kwargs):
+ '''See the __init__ for Tkinter.Toplevel.'''
+ tkinter.Toplevel.__init__(self, parent, *args, **kwargs)
+
+ s = ttk.Style()
+ s.configure('normal.TButton', font=('Helvetica', fontsize), border = 3, relief = 'raised')
+ self.protocol("WM_DELETE_WINDOW", self.close)
+
+ self.withdraw()
+ self.title('Open Galaxy Text Report')
+
+ self.texttitle = ttk.Label(self, style='title.TLabel', text = '(no text)')
+ self.texttitle.grid(column = 0, row = 0, sticky = "news")
+ self.textbar = ttk.Separator(self, orient='horizontal')
+ self.textbar.grid(column = 0, row = 1, sticky = "news")
+
+ self.hframe = tkinter.Frame(self)
+ self.hframe.grid(column = 0, row = 2, sticky = "news")
+ self.hframe.textdisplay = ttk.Frame(self.hframe)
+ self.hframe.textdisplay.pack(side = 'left', fill = 'both', expand = 'true')
+ self.hframe.textdisplay.page = tkinter.Text(self.hframe.textdisplay, wrap = 'word')
+ self.hframe.textdisplay.page.pack(side = 'top', fill = 'both', expand = 'true')
+ # Add scrollbar to text window
+ self.hframe.scrollbar = ttk.Scrollbar(self.hframe)
+ self.hframe.scrollbar.pack(side='right', fill='y')
+ # attach text window to scrollbar
+ self.hframe.textdisplay.page.config(yscrollcommand = self.hframe.scrollbar.set)
+ self.hframe.scrollbar.config(command = self.hframe.textdisplay.page.yview)
+
+ self.bbar = ttk.Frame(self)
+ self.bbar.grid(column = 0, row = 3, sticky = "news")
+ self.bbar.close_button = ttk.Button(self.bbar, text='Close',
+ command=self.close, style = 'normal.TButton')
+ self.bbar.close_button.grid(column = 0, row = 0, padx = 5)
+
+ self.rowconfigure(0, weight=0)
+ self.rowconfigure(1, weight=0)
+ self.rowconfigure(2, weight=1)
+ self.rowconfigure(3, weight=0)
+ self.columnconfigure(0, weight=1)
+
+ # Initialize with empty page
+ self.text = []
+ self.title = '(No file to display)'
+ self.timestamp = 0
+
+ def grid_configure(self, padx, pady):
+ pass
+
+ def display(self, filename=''):
+ # Read from file if text is empty
+ if filename != '':
+ if filename == self.title:
+ statbuf = os.stat(filename)
+ if self.text == [] or self.timestamp < statbuf.st_mtime:
+ self.add_text_from_file(filename)
+ self.timestamp = statbuf.st_mtime
+ else:
+ self.add_text_from_file(filename)
+
+ # Remove and replace contents
+ self.hframe.textdisplay.page.delete('1.0', 'end')
+ self.hframe.textdisplay.page.insert('end', self.text)
+ self.textttitle.configure(text = self.title)
+ self.open()
+
+ # Fill the text report from a file.
+
+ def add_text_from_file(self, filename):
+ print('Loading text from file ' + filename)
+ with open(filename, 'r') as f:
+ self.text = f.read()
+ self.title = filename
+ self.display()
+
+ def close(self):
+ # pop down text window
+ self.withdraw()
+
+ def open(self):
+ # pop up text window
+ self.deiconify()
+ self.lift()
diff --git a/sky130/Makefile.in b/sky130/Makefile.in
index 8428d14..f0e35cc 100644
--- a/sky130/Makefile.in
+++ b/sky130/Makefile.in
@@ -1009,6 +1009,7 @@
cp -rp ${KLAYOUT_PATH}/sky130_tech/tech/sky130/pymacros/* ${KLAYOUT_STAGING_$*}/pymacros/ ; \
cp ${KLAYOUT_PATH}/sky130_tech/tech/sky130/${TECH}.lyp ${KLAYOUT_STAGING_$*}/tech/${SKY130$*}.lyp ; \
cp ${KLAYOUT_PATH}/sky130_tech/tech/sky130/${TECH}.lyt ${KLAYOUT_STAGING_$*}/tech/${SKY130$*}.lyt ; \
+ cp ${KLAYOUT_PATH}/sky130_tech/tech/sky130/${TECH}.map ${KLAYOUT_STAGING_$*}/tech/${SKY130$*}.map ; \
fi
# Copy original DRC deck from open_pdks (is this useful?)
cp klayout/sky130.lydrc ${KLAYOUT_STAGING_$*}/drc/${SKY130$*}.lydrc