Made a fix to the sky130.json file, which was missing a comma and
therefore illegal JSON syntax. Corrected the openlane configuration
for the "_OPT" tech LEF from "__max" to "__nom". Made some corrections
to the project manager script, which comes up and runs now, although
there is plenty of additional work to be done.
diff --git a/runtime/profile.py b/runtime/profile.py
index 2fb3e0b..d533b07 100755
--- a/runtime/profile.py
+++ b/runtime/profile.py
@@ -17,8 +17,6 @@
import subprocess
from tkinter import ttk
-import config
-
class Profile(tkinter.Toplevel):
"""Project manager profile settings management."""
@@ -74,27 +72,7 @@
if 'username' in prefs:
self.username.set(prefs['username'])
else:
- userid = os.environ['USER']
- '''
- p = subprocess.run(['/ef/apps/bin/withnet',
- config.apps_path + '/og_uid_service.py', userid],
- stdout = subprocess.PIPE)
-
- if p.stdout:
- uid_string = p.stdout.splitlines()[0].decode('utf-8')
- userspec = re.findall(r'[^"\s]\S*|".+?"', uid_string)
- if len(userspec) > 0:
- username = userspec[0].strip('"')
- # Note userspec[1] = UID and userspec[2] = role, useful
- # for future applications.
- else:
- username = userid
- else:
- username = userid
- '''
- username=userid
-
- self.username.set(username)
+ self.username.set(os.environ['USER'])
# Graphics format for magic
magicgraphics = ['X11', 'CAIRO', 'OPENGL']
diff --git a/runtime/project_manager.py b/runtime/project_manager.py
index 4d26945..2be5f58 100755
--- a/runtime/project_manager.py
+++ b/runtime/project_manager.py
@@ -660,8 +660,8 @@
self.project_pdkdir = os.path.realpath(self.projectpath + ProjectManager.config_path( self.projectpath) + '/techdir')
self.foundry, foundry_name, self.node, desc, status = ProjectManager.pdkdir2fnd( self.project_pdkdir )
else:
- if not os.path.exists(self.projectpath + '/info.yaml'):
- self.error_label.configure(text = self.projectpath + ' does not contain an info.yaml file.')
+ if not os.path.exists(self.projectpath + '/project.json'):
+ self.error_label.configure(text = self.projectpath + ' does not contain an project.json file.')
self.project_pdkdir = ""
self.foundry = ""
self.node = ""
@@ -1592,10 +1592,10 @@
@classmethod
def get_import_pdk(cls, projectpath):
print(projectpath)
- yamlname = projectpath + '/info.yaml'
+ jsonname = projectpath + '/project.json'
- with open(yamlname, 'r') as f:
- datatop = yaml.safe_load(f)
+ with open(jsonname, 'r') as f:
+ datatop = json.load(f)
project_data = datatop['project']
project_foundry = project_data['foundry']
project_process = project_data['process']
@@ -1634,13 +1634,6 @@
return root + ext
return None
- def yaml2targz(self, yamlPath):
- root = os.path.splitext(yamlPath)[0]
- for ext in ('.tgz', '.tar.gz'):
- if os.path.isfile(root + ext):
- return root + ext
- return None
-
#------------------------------------------------------------------------
# Remove a .json and associated tar.gz (or .tgz) if any.
# If not a .json, remove just that file (no test for a tar).
@@ -1706,7 +1699,7 @@
if os.path.isdir(ipath):
if os.path.islink(ipath) or not self.validProjectName(item) \
or self.importProjNameBadrex1.match(item) \
- or not os.path.isfile(ipath + '/info.yaml'):
+ or not os.path.isfile(ipath + '/project.json'):
importlist.remove(item)
continue
else:
@@ -1752,7 +1745,7 @@
# Import for json documents and related tarballs (.gz or .tgz):
#------------------------------------------------------------------------
- def importyaml(self, projname, importfile):
+ def importjson(self, projname, importfile):
# (1) Check if there is a tarball with the same root name as the JSON
importroot = os.path.splitext(importfile)[0]
badrex1 = re.compile("^\.")
@@ -1793,10 +1786,10 @@
# document, then change the name to match that of the project
# folder.
- yamlfile = newproject + '/info.yaml'
+ jaonfile = newproject + '/project.json'
try:
- shutil.copy(importfile, yamlfile)
+ shutil.copy(importfile, jsonfile)
except IOError as e:
print('Error copying files: ' + str(e))
return None
@@ -2026,14 +2019,6 @@
return self.tarVglImportable(tar)
- def yamlTarVglImportable(self, path):
- ext = os.path.splitext(path)[1]
- if ext != '.yaml': return None, None, None
-
- tar = self.yaml2targz(path)
- if not tar: return None, None, None
-
- return self.tarVglImportable(tar)
#------------------------------------------------------------------------
# Get a single named member (memPath) out of a JSON's tar file.
# This is thin wrapper around tarMember2tempfile. Find the JSON's associated
@@ -2049,15 +2034,6 @@
return self.tarMember2tempfile(tar, memPath)
- def yamlTarMember2tempfile(self, path, memPath):
- ext = os.path.splitext(path)[1]
- if ext != '.yaml': return None
-
- tar = self.yaml2targz(path)
- if not tar: return None
-
- return self.tarMember2tempfile(tar, memPath)
-
#------------------------------------------------------------------------
# Determine if tar-file can be imported as-if it were just a *.v.
# Require exactly one yosys-output .netlist.v, and exactly one .json.
@@ -2252,11 +2228,12 @@
return jData
-#------------------------------------------------------------------------
- # Create info.yaml file (automatically done in create_project.py in case it's executed from the command line)
+ #------------------------------------------------------------------------
+ # Create project.json file (automatically done in create_project.py in
+ # case it's executed from the command line)
#------------------------------------------------------------------------
- def create_yaml(self, ipname, pdk_dir, description="(Add project description here)"):
+ def create_json(self, ipname, pdk_dir, description="(Add project description here)"):
# ipname: Project Name
data = {}
project= {}
@@ -2270,6 +2247,7 @@
project['flow'] = 'none'
data['project']=project
return data
+
#------------------------------------------------------------------------
# For a single named member (memPath) out of an open tarfile (tarf),
# determine if it is a JSON file, and attempt to extract value of entry
@@ -2377,10 +2355,10 @@
os.chdir(curdir)
# Create a simple qflow_vars.sh file so that the project manager
- # qflow launcher will see it as a qflow sub-project. If the meta.yaml
- # file has a "stdcell" entry for the subproject, then add the line
- # "techname=" with the name of the standard cell library as pulled
- # from meta.yaml.
+ # qflow launcher will see it as a qflow sub-project. If the
+ # project.json file has a "stdcell" entry for the subproject, then
+ # add the line "techname=" with the name of the standard cell
+ # library as pulled from project.json.
stdcell = None
buildname = 'build/' + vname + '.netlist.v'
@@ -3241,8 +3219,8 @@
netlistfile = None
# Pull process and standard cell library from the YAML file created by
- # CloudV. NOTE: YAML file has multiple documents, so must use
- # yaml.load_all(), not yaml.load(). If there are refinements of this
+ # CloudV. NOTE: json file has multiple documents, so must use
+ # json.load_all(), not json.load(). If there are refinements of this
# process for individual build files, they will override (see further down).
# To do: Check entries for SoC builds. If there are multiple SoC builds,
@@ -3252,23 +3230,24 @@
# that there can be multiple SoC builds in the project, so for now retaining
# the existing parsing assuming default names.)
- if os.path.exists(ppath + '/.ef-config/meta.yaml'):
- print("Reading YAML file:")
- ydicts = []
- with open(ppath + '/.ef-config/meta.yaml', 'r') as ifile:
- yalldata = yaml.load_all(ifile, Loader=yaml.Loader)
- for ydict in yalldata:
- ydicts.append(ydict)
+ if os.path.exists(ppath + '/.config/nodeinfo.json'):
+ print("Reading nodeinfo.json file:")
+ jdicts = []
- for ydict in ydicts:
- for yentry in ydict.values():
- if 'process' in yentry:
- importnode = yentry['process']
+ with open(ppath + '/.config/nodeinfo.json', 'r') as ifile:
+ jsondata = json.load_all(ifile, Loader=json.Loader)
+ for jdict in jsondata:
+ jdicts.append(jdict)
+
+ for jdict in jdicts:
+ for jentry in jdict.values():
+ if 'process' in jentry:
+ importnode = jentry['process']
# If there is a file ().soc and a directory ().model, then pull the file
# ().model/().model.v, which is a chip top-level netlist.
- ydicts = []
+ jdicts = []
has_soc = False
save_vdir = None
vdirs = glob.glob(ppath + '/*')
@@ -3303,22 +3282,22 @@
# Pull process and standard cell library from the YAML file
# created by CloudV
- # Use yaml.load_all(), not yaml.load() (see above)
+ # Use json.load_all(), not json.load() (see above)
- if os.path.exists(ppath + '/.ef-config/meta.yaml'):
+ if os.path.exists(ppath + '/.config/nodeinfo.json'):
print("Reading YAML file:")
- ydicts = []
- with open(ppath + '/.ef-config/meta.yaml', 'r') as ifile:
- yalldata = yaml.load_all(ifile, Loader=yaml.Loader)
- for ydict in yalldata:
- ydicts.append(ydict)
+ jdicts = []
+ with open(ppath + '/.config/nodeinfo.json', 'r') as ifile:
+ jsondata = json.load_all(ifile, Loader=json.Loader)
+ for jdict in jsondata:
+ jdicts.append(jdict)
- for ydict in ydicts:
- for yentry in ydict.values():
- if 'process' in yentry:
- importnode = yentry['process']
- if 'stdcell' in yentry:
- stdcell = yentry['stdcell']
+ for jdict in jdicts:
+ for jentry in jdict.values():
+ if 'process' in jentry:
+ importnode = jentry['process']
+ if 'stdcell' in jentry:
+ stdcell = jentry['stdcell']
break
if importnode:
@@ -3400,31 +3379,30 @@
# NOTE: Behavior is for project files to depend on "project_name". Using
# the project filename as a project name is a fallback behavior. If
- # there is a info.yaml file, and it defines a project_name entry, then
+ # there is a project.json file, and it defines a project_name entry, then
# there is no need to make changes within the project. If there is
- # no info.yaml file, then create one and set the project_name entry to
+ # no project.json file, then create one and set the project_name entry to
# the old project name, which avoids the need to make changes within
# the project.
else:
- # Check info.yaml
- yamlname = newproject + '/info.yaml'
+ # Check project.json
+ jsonname = newproject + '/project.json'
found = False
- if os.path.isfile(yamlname):
+ if os.path.isfile(jsonname):
# Pull the project_name into local store (may want to do this with the
# datasheet as well)
- with open(yamlname, 'r') as f:
- datatop = yaml.safe_load(f)
+ with open(jsonname, 'r') as f:
+ datatop = json.safe_load(f)
if 'project_name' in datatop['project']:
found = True
if not found:
pdkdir = self.get_pdk_dir(newproject, path=True)
- yData = self.create_yaml(oldname, pdkdir)
- with open(newproject + '/info.yaml', 'w') as ofile:
- print('---',file=ofile)
- yaml.dump(yData, ofile)
+ yData = self.create_json(oldname, pdkdir)
+ with open(newproject + '/project.json', 'w') as ofile:
+ json.dump(yData, ofile)
# If ngspice and electric prefs were not copied from the source
# to the target, as recommended, then copy these from the
@@ -3464,16 +3442,15 @@
is_subproject = True
except:
pass
- if not os.path.exists(projectpath + '/info.yaml'):
+ if not os.path.exists(projectpath + '/project.json'):
project_pdkdir = self.get_pdk_dir(projectpath, path=True)
- data = self.create_yaml(os.path.split(projectpath)[1], project_pdkdir)
- with open(projectpath + '/info.yaml', 'w') as ofile:
- print('---',file=ofile)
- yaml.dump(data, ofile)
+ data = self.create_json(os.path.split(projectpath)[1], project_pdkdir)
+ with open(projectpath + '/project.json', 'w') as ofile:
+ json.dump(data, ofile)
- # Read yaml file for the selected flow
- with open(projectpath + '/info.yaml','r') as f:
- data = yaml.safe_load(f)
+ # Read json file for the selected flow
+ with open(projectpath + '/project.json','r') as f:
+ data = json.safe_load(f)
project = data['project']
if 'flow' in project.keys() and project['flow']=='none' or 'flow' not in project.keys():
while True:
@@ -3487,9 +3464,8 @@
break
project['flow']=flow
data['project']=project
- with open(projectpath + '/info.yaml', 'w') as ofile:
- print('---',file=ofile)
- yaml.dump(data, ofile)
+ with open(projectpath + '/project.json', 'w') as ofile:
+ json.dump(data, ofile)
else:
flow = project['flow']
@@ -3515,16 +3491,16 @@
# read it and pull the ip-name record. If not, the fallback position
# is to assume that the project filename is the project name.
- # Check info.yaml
+ # Check project.json
projectpath = self.projectdir + '/' + projname
- yamlname = projectpath + '/info.yaml'
+ jsonname = projectpath + '/project.json'
oldname = projname
- if os.path.isfile(yamlname):
+ if os.path.isfile(jsonname):
# Pull the ipname into local store (may want to do this with the
# datasheet as well)
- with open(yamlname, 'r') as f:
- datatop = yaml.safe_load(f)
+ with open(jsonname, 'r') as f:
+ datatop = json.safe_load(f)
project_data = datatop['project']
if 'project_name' in project_data:
oldname = project_data['project_name']
@@ -3636,11 +3612,10 @@
os.symlink(projectpath, self.projectdir + '/' + newname)
else:
shutil.copytree(projectpath, self.projectdir + '/' + newname, symlinks = True)
- if not os.path.exists(projectpath + '/info.yaml'):
- yData = self.create_yaml(newname, project_pdkdir)
- with open(projectpath + '/info.yaml', 'w') as ofile:
- print('---',file=ofile)
- yaml.dump(yData, ofile)
+ if not os.path.exists(projectpath + '/project.json'):
+ jData = self.create_json(newname, project_pdkdir)
+ with open(projectpath + '/project.json', 'w') as ofile:
+ json.dump(jData, ofile)
else:
#Create a subproject
if not os.path.exists(parent_path + '/subcells'):
@@ -3651,11 +3626,10 @@
self.clean(parent_path + '/subcells/' + newname)
else:
os.symlink(projectpath, parent_path + '/subcells/' + newname)
- if not os.path.exists(parent_path + '/subcells/' + newname + '/info.yaml'):
- yData = self.create_yaml(newname, project_pdkdir)
- with open(parent_path + '/subcells/' + newname + '/info.yaml', 'w') as ofile:
- print('---',file=ofile)
- yaml.dump(yData, ofile)
+ if not os.path.exists(parent_path + '/subcells/' + newname + '/project.json'):
+ yData = self.create_json(newname, project_pdkdir)
+ with open(parent_path + '/subcells/' + newname + '/project.json', 'w') as ofile:
+ json.dump(yData, ofile)
self.update_project_views()
#----------------------------------------------------------------------
# "Import As" a dir in import/ as a project. based on renameproject().
@@ -4446,16 +4420,17 @@
# it.
# NOTE: project.json is the preferred name for the datasheet
# file. However, the .spi file, .delib file, etc., all have the name of the
- # project from "project_name" in the info.yaml file, which is separate from the datasheet.
+ # project from "project_name" in the project.json file, which is separate from
+ # the datasheet.
found = False
ppath = selection['values'][0]
- yamlname = ppath + '/info.yaml'
+ jsonname = ppath + '/project.json'
- if os.path.isfile(yamlname):
+ if os.path.isfile(jsonname):
# Pull the project_name into local store
- with open(yamlname, 'r') as f:
- datatop = yaml.safe_load(f)
+ with open(jsonname, 'r') as f:
+ datatop = json.safe_load(f)
project_data = datatop['project']
ipname = project_data['project_name']
self.project_name = ipname
@@ -4468,18 +4443,6 @@
datatop = json.load(f)
dsheet = datatop['data-sheet']
found = True
- # Do not specifically prohibit opening the characterization app if
- # there is no schematic or netlist. Otherwise the user is prevented
- # even from seeing the electrical parameters. Let the characterization
- # tool allow or prohibit simulation based on this.
- # if os.path.exists(ppath + '/spi'):
- # if os.path.isfile(ppath + '/spi/' + ipname + '.spi'):
- # found = True
- #
- # if found == False and os.path.exists(ppath + '/elec'):
- # if os.path.isdir(ppath + '/elec/' + ipname + '.delib'):
- # if os.path.isfile(ppath + '/elec/' + ipname + '.delib/' + ipname + '.sch'):
- # found = True
else:
# Use 'pname' as the default project name.
print('No characterization file ' + jsonname)
diff --git a/sky130/openlane/config.tcl b/sky130/openlane/config.tcl
index 35f63f4..126e8aa 100755
--- a/sky130/openlane/config.tcl
+++ b/sky130/openlane/config.tcl
@@ -32,7 +32,7 @@
"
# Optimization library
-set ::env(TECH_LEF_OPT) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/techLEF/$::env(STD_CELL_LIBRARY_OPT)/$::env(STD_CELL_LIBRARY_OPT)__max.tlef"
+set ::env(TECH_LEF_OPT) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/techLEF/$::env(STD_CELL_LIBRARY_OPT)/$::env(STD_CELL_LIBRARY_OPT)__nom.tlef"
set ::env(CELLS_LEF_OPT) [glob "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/lef/$::env(STD_CELL_LIBRARY_OPT)/*.lef"]
set ::env(GDS_FILES_OPT) [glob "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/gds/$::env(STD_CELL_LIBRARY_OPT)/*.gds"]
set ::env(STD_CELL_LIBRARY_OPT_CDL) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/cdl/$::env(STD_CELL_LIBRARY_OPT)/$::env(STD_CELL_LIBRARY_OPT).cdl"
@@ -56,7 +56,7 @@
"
# Optimization library
-set ::env(TECH_LEF_OPT) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/$::env(STD_CELL_LIBRARY_OPT)/techlef/$::env(STD_CELL_LIBRARY_OPT)__max.tlef"
+set ::env(TECH_LEF_OPT) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/$::env(STD_CELL_LIBRARY_OPT)/techlef/$::env(STD_CELL_LIBRARY_OPT)__nom.tlef"
set ::env(CELLS_LEF_OPT) [glob "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/$::env(STD_CELL_LIBRARY_OPT)/lef/*.lef"]
set ::env(GDS_FILES_OPT) [glob "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/$::env(STD_CELL_LIBRARY_OPT)/gds/*.gds"]
set ::env(STD_CELL_LIBRARY_OPT_CDL) "$::env(PDK_ROOT)/$::env(PDK)/libs.ref/$::env(STD_CELL_LIBRARY_OPT)/cdl/$::env(STD_CELL_LIBRARY_OPT).cdl"
diff --git a/sky130/sky130.json b/sky130/sky130.json
index a399110..97a74ad 100644
--- a/sky130/sky130.json
+++ b/sky130/sky130.json
@@ -83,7 +83,7 @@
"build": {
"open_pdks": "OPEN_PDKS_VERSION",
"magic": "MAGIC_VERSION"
- }
+ },
"commit": {
"open_pdks": "OPEN_PDKS_COMMIT",
"magic": "MAGIC_COMMIT"