From 95acae9ad9935a1bf9dd1fd5c01d24f6a22c6ca9 Mon Sep 17 00:00:00 2001
From: Erik Kooistra <kooistra@astron.nl>
Date: Thu, 22 May 2014 09:52:59 +0000
Subject: [PATCH] Moved or copied files to RadioHDL.

---
 tools/modelsim/commands.do              | 562 ++++++++++++++++++++++++
 tools/oneclick/base/common_dict_file.py | 255 +++++++++++
 tools/oneclick/base/hdl_config.py       | 162 +++++++
 tools/oneclick/base/modelsim_config.py  | 186 ++++++++
 4 files changed, 1165 insertions(+)
 create mode 100644 tools/modelsim/commands.do
 create mode 100644 tools/oneclick/base/common_dict_file.py
 create mode 100644 tools/oneclick/base/hdl_config.py
 create mode 100644 tools/oneclick/base/modelsim_config.py

diff --git a/tools/modelsim/commands.do b/tools/modelsim/commands.do
new file mode 100644
index 0000000000..0664b008bd
--- /dev/null
+++ b/tools/modelsim/commands.do
@@ -0,0 +1,562 @@
+###############################################################################
+#
+# Copyright (C) 2009
+# ASTRON (Netherlands Institute for Radio Astronomy) <http://www.astron.nl/>
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+###############################################################################
+
+# Purpose: Provide useful commands for simulating Modelsim projects
+# Desription:
+# 
+# * The user commands are typically used at the Modelsim prompt are:
+#   
+#   . lp <name> : load UniBoard <name>.mpf project
+#   . mk <name> : make one or range of UniBoard mpf projects
+#   . as #      : add signals for # levels of hierarchy to the wave window
+#   . ds        : delete all signals from the wave window
+#   
+# * The other procedures in this commands.do are internal command procedures
+#   that are typically not used at the Modelsim prompt. However they can be
+#   used to create user commands for other projects (i.e. another arg_env then
+#   UNB with other arg_dir for the libraries).
+#
+# * The general lp and mk commands assume that the Modelsim project file is
+#   located at:
+#
+#     "$env($arg_env)/Firmware/$sdir/$arg_lib/build/sim/modelsim"
+#
+#   or for designs with an SOPC system at:
+#
+#     "$env($arg_env)/Firmware/$sdir/$arg_lib/build/synth/quartus/*_sim/"
+#
+# * The recommended project directory structure is:
+#
+#     $arg_lib/build/sim/modelsim    : Modelsim project file
+#                   /synth/quartus   : Quartus project file
+#             /src/vhdl              : VHDL source code that gets synthesized
+#             /tb/vhdl               : VHDL source code for test bench
+#
+
+
+#-------------------------------------------------------------------------------
+# UniBoard settings
+#-------------------------------------------------------------------------------
+
+puts "Loading general UniBoard commands..."
+
+# UniBoard environment variable
+proc unb_env {} {
+  return "UNB"
+}
+
+# UniBoard project directories
+proc unb_dir {} {
+  return {"designs" "modules" "modules/Altera" "modules/MegaWizard" "modules/Lofar" "dsp" "systems"}
+}
+
+
+#-------------------------------------------------------------------------------
+# LP = Load project
+#-------------------------------------------------------------------------------
+
+# Default load UniBoard project
+proc lp {{arg_lib ""}} {
+  lpu $arg_lib
+}
+
+# UniBoard load project
+proc lpu {{arg_lib ""}} {
+  set arg_env [unb_env]
+  set arg_dir [unb_dir]
+  lp_gen $arg_env $arg_dir $arg_lib
+}
+
+# General load project
+proc lp_gen {arg_env arg_dir arg_lib} {
+  set mpf [eval project env]
+  set cur_lib [string range $mpf [expr [string last / $mpf]+1] [expr [string last . $mpf]-1]]
+
+  if {[string equal $arg_lib ""] || [string equal $arg_lib $cur_lib]} {
+    # find out current module name
+    return $cur_lib
+  } elseif [file exists $arg_lib] {
+    set mpf $arg_lib
+  } else {
+    set sim [simdir $arg_env $arg_dir $arg_lib]
+    set mpf $sim/$arg_lib.mpf
+  }
+  if [file exists $mpf] then {
+#    if {[this_os]=="Windows"} {
+#      if [file attributes $mpf -readonly] then {
+#        file attributes $mpf -readonly 0
+#      }
+#    } else {
+#      set mpf_readonly [file attributes $mpf -permissions]            ;# 5 char string: 0,0,u(rwx),g(rwx),a(rwx)
+#      set mpf_readonly [expr !([string index $mpf_readonly 2] & 0x2)] ;# filter out user write status
+#      if {$mpf_readonly==1} then {
+#        file attributes $mpf -permissions u+w
+#      }
+#    }
+    if {! [string equal $cur_lib ""]} then {
+      project close
+    }
+    project open $mpf
+  } else {
+    error "Project file $arg_lib not found"
+  }
+  return $arg_lib
+}
+
+
+#-------------------------------------------------------------------------------
+# MK = Make project
+#-------------------------------------------------------------------------------
+
+# Default make UniBoard project
+# . The args is a special TCL argument because it allows more than one formal.
+#   However when args is subsequently passed on to proc mku, then all arguments
+#   in args will be treated as a signal argument in mku. Therefore duplicate
+#   the content of proc mku in proc mk, because simply calling mku $args in mk
+#   does not work.
+# . However using an alias also works and is a nicer solution:
+#   One can also define a command as an alias to a sequence of one or more
+#   words, which will be substituted for it before execution. (The funny {}
+#   arguments are names of the source and target interpreter, which typically
+#   is the current one, named by the empty string {} or ""). 
+interp alias {} mk {} mku
+
+
+# UniBoard make project
+proc mku args {
+  set arg_env [unb_env]
+  set arg_dir [unb_dir]
+  set arg_cmd [parse_for_cmds $args]
+  set arg_lib [parse_for_libs $args]
+  set arg_lib [extract_unb_libs $arg_lib]
+  
+  mk_gen $arg_env $arg_dir $arg_cmd $arg_lib
+}
+
+
+# Extract groups of UniBoard libs from arg_lib
+proc extract_unb_libs arg_lib {
+  # Check arg_lib for make groups of  UniBoard modules and designs
+  # Remarks:
+  # . order of groups is important
+  # . for the designs that have an SOPC system that needs to be generated first with SOPC Builder (to avoid pop up windows if these files are missing)
+  # . e.g. group of designs for which the node component is reused in other designs
+  # . e.g. group of reference designs that are still maintained
+  set m_unb_common {fmf easics tst common mm dp uth}
+  set m_unb_lofar {async_logic diag util i2c rcuh sens mdio eth ado pfs pft2 ss st}
+  set m_unb_dsp {bf rTwoSDF fft filter wpfb}
+  set m_unb_modules {diagnostics ppsh tse aduh tr_nonbonded ddr3 udp_packetizer remu epcs unb_common}
+  set m_unb_designs {unb_tr_nonbonded unb_ddr3 unb_tr_xaui bn_terminal_bg fn_terminal_db}
+  set m_old_designs {unb_sens unb_tse unb_heater fn_mdio unb_base bn_base fn_base}
+  set m_apertif_designs {bn_capture fn_bf bn_filterbank fn_beamformer}
+  
+  if       [ string equal $arg_lib "all_mod"     ] { set arg_lib "$m_unb_common $m_unb_lofar $m_unb_modules $m_unb_dsp"
+  } elseif [ string equal $arg_lib "unb_designs" ] { set arg_lib "$m_unb_designs"
+  } elseif [ string equal $arg_lib "all_unb"     ] { set arg_lib "$m_unb_common $m_unb_lofar $m_unb_modules $m_unb_dsp $m_unb_designs"
+  } elseif [ string equal $arg_lib "all_apertif" ] { set arg_lib "$m_unb_common $m_unb_lofar $m_unb_modules $m_unb_dsp $m_unb_designs $m_apertif_designs"
+  } elseif [ string equal $arg_lib "old_designs" ] { set arg_lib "$m_old_designs"
+  } elseif [ string equal $arg_lib "all"         ] { set arg_lib "$m_unb_common $m_unb_lofar $m_unb_modules $m_unb_dsp $m_unb_designs $m_apertif_designs $m_old_designs"
+  }
+  return $arg_lib
+}
+
+# Get commands from the mk args
+proc parse_for_cmds arg_list {
+  set cmds {}
+  if [ string equal $arg_list "help" ] then {
+    puts "mk \[commands\] \[projects\]"
+    puts "  possible commands are:"
+    puts "    check:   check for absolute paths in project sources"
+    puts "    clean:   removes the library files"
+    puts "    compile: runs project compileall"
+    puts "    delete:  delete Modelsim project file"
+    puts "    files:   list files in compile order"
+    puts "    help:    displays this help"
+    puts "    make:    runs makefile"
+    puts "    test:    runs test cases"
+    puts "    vmake:   creates makefile"
+    puts ""
+    puts "commands are executed for the projects indicated"
+    puts "- when no command is specified, 'make' is used as default"
+    puts "- when no projects are specified, the current project is used"
+    puts "- the keyword 'all_mod' is expanded to a subset of all uniboard reuseable modules"
+    puts "- the keyword 'all_unb' is expanded to a subset of all uniboard reuseable designs and reference designs"
+    puts ""
+    return
+  } else {
+    # search for commands in arg_list
+    foreach cmd {check clean compile delete files make test vmake} {
+      if {[lsearch $arg_list $cmd] >= 0} then {
+        lappend cmds $cmd
+      }
+    }
+    if {[llength $cmds] == 0} then {
+      # no commands found, use default commands
+      set cmds {make}
+    }
+  }
+  return $cmds
+}
+
+
+# Get libraries (modules, designs) from the mk args
+proc parse_for_libs arg_list {
+  # strip the commands from arg_list to keep only the modules
+  set libs $arg_list
+  foreach cmd {check clean compile delete files make test vmake} {
+    set i [lsearch $libs $cmd]
+    if {$i >= 0} then {
+      set libs [lreplace $libs $i $i]
+    }
+  }
+  return $libs
+}
+
+
+# General make project
+proc mk_gen {arg_env arg_dir arg_cmd arg_lib} {
+  if {[llength $arg_cmd] > 0} then {
+    set cur_lib [lp_gen $arg_env $arg_dir ""]
+  
+    # without arguments mk current lib
+    if { [llength $arg_lib] == 0 } {
+      set arg_lib $cur_lib
+    }
+    # perform the commands on the specified libs
+    foreach cmd $arg_cmd {
+      foreach lib $arg_lib {
+        if { [ catch { eval ["mk_$cmd" $arg_env $arg_dir $lib] } msg ] } {
+          puts stderr $msg
+        }
+      }
+    }
+    # back to original lib
+    lp_gen $arg_env $arg_dir $cur_lib
+  }
+}
+
+
+proc mk_check {arg_env arg_dir arg_lib} {
+  lp_gen $arg_env $arg_dir $arg_lib
+  puts "\[mk check $arg_lib\]"
+  foreach file [project compileorder] {
+    if {[string first "../" $file] != 0 &&
+        [string first "\$" $file] != 0} {
+      puts stderr "Warning: $file is an absolute path"
+    }
+  }
+}
+
+# Issue mk delete all to delete all Modelsim project files to ensure that SVN
+# update refreshes them all from the repository. This is necessary because
+# Modelsim edits the mpf files even when they have no significant modification
+# and then the edited mpf files will not get SVN updated.
+proc mk_delete {arg_env arg_dir arg_lib} {
+  puts "\[mk delete $arg_lib\]"
+  set sim [simdir $arg_env $arg_dir $arg_lib]
+  if {[file exists "$sim/$arg_lib.mpf"]} then {
+    file delete $sim/$arg_lib.mpf
+  }
+}
+
+proc mk_clean {arg_env arg_dir arg_lib} {
+  puts "\[mk clean $arg_lib\]"
+  set sim [simdir $arg_env $arg_dir $arg_lib]
+  if {[file exists "$sim/work"]} then {
+    vdel -lib $sim/work -all
+  }
+  if {[file exists "$sim/makefile"]} then {
+    file delete $sim/makefile
+  }
+  if {[file exists "$sim/vsim.wlf"]} then {
+    file delete $sim/vsim.wlf
+  }
+  if {[file exists "$sim/$arg_lib.cr.mti"]} then {
+    file delete $sim/$arg_lib.cr.mti
+  }
+}
+
+proc mk_compile {arg_env arg_dir arg_lib} {
+  if {[string compare [env] "<No Context>"] != 0} {
+    puts "A project cannot be closed while a simulation is in progress.\nUse the \"quit -sim\" command to unload the design first."
+    return
+  }
+  puts "\[mk compile $arg_lib\]"
+  lp_gen $arg_env $arg_dir $arg_lib
+  if {![file exists work"]} then {
+    vlib work;
+  }
+  project compileall
+}
+
+proc mk_files {arg_env arg_dir arg_lib} {
+  lp_gen $arg_env $arg_dir $arg_lib
+  foreach file [project compileorder] {
+    puts $file
+  }
+}
+
+proc mk_vmake {arg_env arg_dir arg_lib} {
+  set sim [simdir $arg_env $arg_dir $arg_lib]
+  if {![file exists "$sim/work/_info"]} then {
+    mk_compile $arg_env $arg_dir $arg_lib
+  }
+  puts "\[mk vmake $arg_lib\]"
+  if {![file exists "$sim/makefile"] ||
+    ([file mtime "$sim/makefile"] < [file mtime "$sim/work/_info"]) } then {
+    # Both the specific library name $(arg_lib)_lib and the work library map to the same local work library,
+    # so to be compatible for both names always use work to generate the makefile
+    puts [exec vmake -fullsrcpath work > $sim/makefile]
+    adapt_makefile "$sim/makefile"
+  }
+  if {[file exists "$sim/work"]} then {
+    vdel -lib $sim/work -all
+    vlib work
+  }
+}
+
+proc mk_make {arg_env arg_dir arg_lib} {
+  global env
+  set sim [simdir $arg_env $arg_dir $arg_lib]
+  if {! [file exists "$sim/makefile"] } then {
+    mk_vmake $arg_env $arg_dir $arg_lib
+  }
+  puts "\[mk make $arg_lib\]"
+  if {[this_os]=="Windows"} {
+    puts [exec $env(UNB)/Firmware/sim/bin/make.exe -C $sim -s -k -f makefile]
+  } else {
+    puts [exec /usr/bin/make -C $sim -s -k -f makefile]
+  }
+}
+
+proc mk_test {arg_env arg_dir arg_lib} {
+  # only for directory /modules, so arg_dir is not used but kept to match the other proc mk_*
+  puts "\[mk test $arg_lib\]"
+  radix -decimal
+  vsim -quiet tst_lib.tb_$arg_lib
+  set tb [tbdir $arg_env $arg_lib]
+
+  foreach tc [glob -directory $tb/data -type d -nocomplain tc*] {
+    puts "testcase $tc"
+    foreach fileName [glob -directory $tc -type f -nocomplain *.in *.out *.ref] {
+      file copy -force $fileName .
+    }
+    restart -force
+    run 1 ms
+    foreach fileName [glob -dir . -type f -nocomplain *.in *.out *.ref] {
+      file delete -force $fileName
+    }
+  }
+  quit -sim
+}
+
+
+#-------------------------------------------------------------------------------
+# Auxiliary procedures
+#-------------------------------------------------------------------------------
+
+# compute simulation directory
+proc simdir {arg_env arg_dir arg_lib {return_code -1}} {
+  global env
+  # The order of arg_dir is important when modules with the same name exist,
+  # the first one where the mpf is found will be used
+  foreach sdir $arg_dir {
+    # First look in the project default Modelsim project directory
+    if {[file exists "$env($arg_env)/Firmware/$sdir/$arg_lib/build/sim/modelsim"]} {
+      return $env($arg_env)/Firmware/$sdir/$arg_lib/build/sim/modelsim
+    } else {
+      # then also support the <sopc design name>_sim directory generated by SOPC Builder
+      # note that for this path to be found the modelsim/ directory in sim/ must be deleted
+      if {[catch {glob -directory $env($arg_env)/Firmware/$sdir/$arg_lib/build/synth/quartus *_sim/}] == 0} {
+        # If there exists more then one dir ending at '_sim' then return only the first
+        return [lindex [glob -directory $env($arg_env)/Firmware/$sdir/$arg_lib/build/synth/quartus *_sim/] 0]
+      }
+    }
+  }
+  if {$return_code==-1} {
+    # Default raise error to abort script
+    error "Project directory $arg_lib not found"
+  } else {
+    # Optionally return with return_code to continue script
+    return $return_code
+  }
+}
+
+# compute tb directory
+proc tbdir {arg_env arg_lib} {
+  global env
+  return $arg_env/Firmware/modules/$arg_lib/tb
+}
+
+# find out which environment operating system we are on
+proc this_os {} {
+  if {$::tcl_platform(platform)=="windows"} {
+    return "Windows"
+  } else {
+    return "Not Windows"   ;# Linux, Unix, ...
+  }
+}
+
+# adapt makefile to allow (arch_name) in filenames by changing them into \\(arch_name\\)
+proc adapt_makefile arg {
+  if {[this_os]=="Windows"} {
+    # Nothing to do, works OK
+  } else {
+    set arch_names {"pkg" "rtl" "str" "wrap" "recursive" "beh" "empty" "stratix4"}
+    set fh [open $arg r]
+    set txt [read $fh]
+    close $fh
+    foreach an $arch_names {
+      set ai [string first "($an)" $txt]
+      while {$ai != -1} {
+        set txt [string replace $txt $ai [expr $ai + [string length $an] + 1] "\\($an\\)"]
+        incr ai 2
+        set ai [string first "($an)" $txt $ai]
+      }
+    }
+    set fh [open $arg w]
+    puts $fh $txt
+    close $fh
+  }
+}
+
+
+
+#-------------------------------------------------------------------------------
+# DS = Delete Signals : deletes all signals in the waveform window.
+#-------------------------------------------------------------------------------
+proc ds {} {
+  delete wave *
+}
+
+#-------------------------------------------------------------------------------
+# AS = Add signals : adds all signals up to hierarchy depth to the wave window
+#-------------------------------------------------------------------------------
+proc as {depth {inst ""}} {
+  #asf $depth
+  asg $depth $inst
+}
+
+#-------------------------------------------------------------------------------
+# ASF = add signals flat : flat adds all signals up to hierarchy depth to the wave window
+# It will automatically add dividers between the blocks, and it will discard all
+# nxt_ and i_ signals. Altera alt_ blocks will also be ignored.
+#-------------------------------------------------------------------------------
+proc asf depth {
+  global env
+  # Start with all signals in the model.
+  add wave -noupdate -divider {as}
+  add wave -noupdate -depth $depth -r "/*"
+  # Allow users to set environment variable if they don't want the signals to be deleted
+  if { ![info exists ::env(MODELSIM_WAVE_NO_DEL) ] } {
+    delete wave */nxt_*
+    delete wave */i_*
+ }
+  #delete wave */alt*
+  configure wave -signalnamewidth 0
+  echo "Done."
+}
+
+#-------------------------------------------------------------------------------
+# ASG = add signals in groups : recursively scans the hierarchy and adds signals
+#       groupwise to the wave window.
+#       Normal use: 
+#       . asg [depth] 
+#         => Adds all signals down to a depth of [depth].
+#       Advanced/debugging use:
+#       . asg [depth] [instance_name]
+#         => Adds all signals in [instance_name] down to to a level of [depth]
+#         NOTE: instance_name = NOT the entity name!
+#-------------------------------------------------------------------------------
+proc asg {depth {inst ""}} {
+  add_wave_grouped_recursive "" "" $depth $inst 0
+  wave refresh
+  # The grouping already displays the hierarchy, so use short signal names.
+  config wave -signalnamewidth 1
+  # With our short signal names, the name column can be narrower than default.
+  config wave -namecolwidth 300
+}
+
+# called by ASG:
+proc add_wave_grouped_recursive {current_level prev_group_option depth target_inst target_inst_det} {
+  # Find all instances (=next hierarchy levels) in the ecurrent hierarchy level
+  set found_instances [find instances "$current_level/*"]
+
+  # Find all blocks (=GENERATE statement labels that are also hierarchy levels to be explored)
+  set found_blocks [find blocks "$current_level/*"]
+
+  # Concatenate the instance list with the block list, sort them alphabetically
+  set objects [lsort -dictionary [concat $found_instances $found_blocks]]
+
+  foreach object $objects {
+    # Separate "/object_path"  from "(entity_name)"
+    set object_path [lindex [split $object " "] 0]
+    # Get the word after last "/"
+    set gname     [lrange [split $object_path "/"] end end]
+
+    if {[path_depth $object_path]<$depth} {
+      if  { $gname == $target_inst || $target_inst_det==1}  {
+        # Found an instance that matches user input - or we're already inside that instance.
+        add_wave_grouped_recursive  "$object_path"  "$prev_group_option -group $gname" $depth $target_inst 1 
+      } else {
+        add_wave_grouped_recursive  "$object_path"  "$prev_group_option -group $gname" $depth $target_inst 0
+      }
+    }
+  }
+  
+  if { $current_level != "" } {
+    # First check if what we're about to add is an instance, not merely a GENERATE level
+    if {[context isInst $current_level]==1} {
+      set CMD "add wave -noupdate -radix unsigned $prev_group_option $current_level/*"
+
+      if {$target_inst!=""} {
+        # User passed a target inst. Check if we inside of it.
+        if {$target_inst_det==0} {
+          # We're not in in instance. Only add a group and move on.
+          set CMD "add wave -noupdate -radix unsigned $prev_group_option"
+        }
+      }
+      # Use catch so e.g. empty entities don't cause script to fail
+      catch {eval $CMD}
+    }
+   return
+  }
+}
+
+# Count the number of occurences in a string:
+proc scount {subs string} {
+  regsub -all $subs $string $subs string
+}
+
+# Return the depth of a given path; e.g. /some/path/to/some/thing = 5.
+proc path_depth path {
+  scount "/" $path 
+}
+
+
+#-------------------------------------------------------------------------------
+# NOWARN default disables the library warnings for subsequent simulation runs.
+# Use argument 0 to enable the warnings again.
+#-------------------------------------------------------------------------------
+proc nowarn {{off 1}} {
+  set ::StdArithNoWarnings   $off
+  set ::NumericStdNoWarnings $off
+}
diff --git a/tools/oneclick/base/common_dict_file.py b/tools/oneclick/base/common_dict_file.py
new file mode 100644
index 0000000000..322371c775
--- /dev/null
+++ b/tools/oneclick/base/common_dict_file.py
@@ -0,0 +1,255 @@
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy) <http://www.astron.nl/>
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+###############################################################################
+
+"""Common class for accessing a Python dictionary in one or more files
+
+   The contents of the dictionary file consist of a series of key - value
+   pairs. These key - value pairs are read from the file and kept in a
+   single dictionary of keys and values.
+
+   The format of the dictionary file is similar to that of an ini file but
+   without the sections. For ini files Python has the ConfigParser package.
+   Effectively the dictionary file only contains one section. It is not
+   obvious how to deal with sections if only one dictionary needs to be
+   stored flat. Defining a single section, is misleading because it suggests
+   that more sections are supported. Alternatively prepending a dummy
+   section when the file is read is also a bit awkward. In fact the parsing
+   by read_dict_file() is relatively simple and the other methods would also
+   be needed if the ConfigParser would be used. Therefore the Python
+   ConfigParser package for ini files is not used.
+
+   The key and value string in the dictionary file are separated by '='. Hence
+   the '=' character can not be used in keys. The '=' can be used in values,
+   because subsequent '=' on the same line are part of the value.
+   Every key must start on a new line. The value string can extend over one
+   or multiple lines. Comment lines are supported by preceding it with a '#'
+   and will get stripped.
+
+     # This is a comment section
+     # a key starts on a new line and extends until the '='
+     # a key and its values are separated by '='
+
+     key=string
+     key =string
+     key = string
+
+     # a key with multiple values in its string
+     key = value value value
+
+     # how the values are separated depends on the dictionary user
+     key = value, value, value
+     key = value value
+
+     # a key with many values can have its string extend on multiple lines,
+     # the newline is replaced by a ' ' and any indent is removed
+     key =
+     value
+     value
+     value
+
+     # empty lines and spaces are allowed
+     key = value
+
+   Whether the key is a valid key depends on the dict user that interprets
+   the dictionary.
+
+   It can be useful to be able to refer to the dicts based on a certain key
+   value, e.g. a key value that gives the name of the dict. The methods
+   get_key_values() and get_dicts() provide a means to easily link a dict
+   to its name and its name to the dict. The argument and the return can be
+   a list or a single value. Default the init list of self.dicts is used as
+   argument.
+
+   'fileName'     = the name of the dictionary file including extension
+   'filePath'     = the full directory name where the dictionary file is
+                    stored
+   'filePathName' = filePath/fileName
+
+"""
+
+import common as cm
+import sys
+import os
+import os.path
+import collections
+
+class CommonDictFile:
+
+    def __init__(self, rootDir, fileName='dict.txt'):
+        """Store the dictionaries from all fileName files in rootDir."""
+        self.CDF_COMMENT = '#'
+        self.CDF_SEPARATOR = '='
+        self.rootDir = rootDir
+        self.fileName = fileName                            # all dictionary files have the same fileName
+        self.filePaths = self.find_all_dict_file_paths()    # list of all directory paths of dictionary files that are available in the rootDir tree
+        if len(self.filePaths)==0:
+            sys.exit('Error : No %s file found in %s directory tree.' % (fileName, rootDir))
+        self.filePathNames = []                             # list of all directory paths + fileName of the available dictionary files
+        for path in self.filePaths:
+            self.filePathNames.append(os.path.join(path, self.fileName))
+        self.dicts = self.read_all_dict_files()             # list of dictionaries that are read from the available dictionary files
+        self.nof_dicts = len(self.dicts)                    # number of dictionaries = number of dictorary files
+
+    def find_all_dict_file_paths(self, rootDir=None):
+        """Recursively search the rootDir tree to find the paths to all fileName files."""
+        if rootDir==None: rootDir=self.rootDir
+        paths = []
+        for root, _, files in os.walk(rootDir):
+            if self.fileName in files:
+                 paths.append(root)
+        return paths
+
+    def read_all_dict_files(self, filePathNames=None):
+        """Read the dictionary information from all files that were found in the rootDir tree."""
+        if filePathNames==None: filePathNames=self.filePathNames
+        read_dicts = []
+        for fp in self.filePathNames:
+            read_dicts.append(self.read_dict_file(fp))
+        return read_dicts
+
+    def read_dict_file(self, filePathName=None, valueSeparator=' '):
+        """Read the dictionary information the filePathName file."""
+        if filePathName==None: filePathName=self.filePathNames[0]
+        file_dict = collections.OrderedDict()
+        with open(filePathName, 'r') as fp:
+            lineNr=0
+            key = ''
+            value = ''
+            for line in fp:
+                if line.find(self.CDF_COMMENT)==-1:          # strip lines with comment
+                    key_end = line.find(self.CDF_SEPARATOR)  # find key
+                    if key_end>=0:
+                        key = line[0:key_end].strip()        # new key
+                        value = line[key_end+1:].strip()     # new value
+                    else:
+                        value += valueSeparator              # replace newline by valueSeparator
+                        value += line.strip()                # append value
+                    if key!='':
+                        file_dict[key] = value.strip()       # update key in dict
+                lineNr += 1
+        return file_dict
+
+    def write_dict_file(self, dicts, filePathNames, keySeparator=None):
+        if keySeparator==None: keySeparator=self.CDF_SEPARATOR
+        for fpn, the_dict in zip(cm.listify(filePathNames), cm.listify(dicts)):
+            with open(fpn, 'w') as fp:
+                for key in the_dict:
+                    fp.write('%s%s%s\n' % (key, keySeparator, the_dict[key]))
+
+    def append_key_to_dict_file(self, filePathName, key, values):
+        with open(filePathName, 'a') as fp:
+            if len(cm.listify(values))==1:
+                fp.write('%s = %s' % (key, values))
+            else:
+                fp.write('%s = \n' % key)
+                for v in cm.listify(values):
+                    fp.write('%s\n' % v)
+
+    def get_filePath(self, the_dict):
+        return self.filePaths[self.dicts.index(the_dict)]
+
+    def get_filePathName(self, the_dict):
+        return self.filePathNames[self.dicts.index(the_dict)]
+
+    def get_key_values(self, key, dicts=None):
+        """Get the value of a key in the dicts or None in case the key does not exist."""
+        if dicts==None: dicts=self.dicts
+        key_values = []
+        for fd in cm.listify(dicts):
+            if key in fd:
+                key_values.append(fd[key])
+            else:
+                key_values.append(None)
+        return cm.unlistify(key_values)
+
+    def get_dicts(self, key, values, dicts=None):
+        """Get all dictionaries in dicts that contain a key with a value specified in values."""
+        if dicts==None:
+            dicts=self.dicts
+        the_dicts = []
+        for fd in cm.listify(dicts):
+            if fd not in the_dicts:
+                if key in fd:
+                    if fd[key] in cm.listify(values):
+                       the_dicts.append(fd)
+        return cm.unlistify(the_dicts)
+
+
+if __name__ == '__main__':
+    tmpFileName = 'tmp_dict.txt'
+    tmpDirName = 'tmp_dict'
+
+    # Create some example file
+    with open(tmpFileName, 'w') as f:
+        f.write('# Example dictionary file 0\n')
+        f.write('src=\n')
+        f.write('z.vhd\n')
+        f.write('\n')
+        f.write('u.vhd\n')
+        f.write('tb = \'c.vhd\n')
+        f.write('x.vhd\'\n')
+        f.write('abc.vhd\n')
+        f.write('syn=\n')
+        f.write(' = \n')
+        f.write('              \n')
+        f.write('sim= d, u\n')
+        f.write('test= d u\n')
+        f.write('src_files = \n')
+        f.write('     x.vhd \n')
+        f.write('    y.vhd \n')
+        f.write('equal= with = in value\n')
+        f.write('\n')
+
+    # and created another example file in a sub directory
+    os.mkdir(tmpDirName)
+    tmpFilePathName = os.path.join(tmpDirName, tmpFileName)
+    with open(tmpFilePathName, 'w') as f:
+        f.write('# Example dictionary file 1\n')
+        f.write('   # Strip indent comment\n')
+        f.write('skip = will get skipped due to # this indent comment\n')
+        f.write('src = x.vhd y.vhd\n')
+
+    # Read the dictionary from the example files
+    cdf = CommonDictFile(rootDir='./', fileName=tmpFileName)
+
+    print '#'
+    print '# Test for CommonDictFile.py'
+    print '#'
+    print 'rootDir       = %s' % cdf.rootDir
+    print 'nof_dicts     = %d' % cdf.nof_dicts
+    print 'fileName      = %s' % cdf.fileName
+    print 'filePaths     = %s' % cdf.filePaths
+    print 'filePathNames = %s' % cdf.filePathNames
+    print ''
+    for i, p in enumerate(cdf.filePaths):
+        print os.path.join(p, cdf.fileName)
+        d = cdf.dicts[i]
+        for k,v in d.iteritems():
+            print k, '=', v
+        print ''
+
+    # Write dict file
+    cdf.write_dict_file(cdf.dicts[0], 'tmp_dict.out')
+
+    # Remove the example files
+    os.remove(tmpFileName)
+    os.remove(tmpFilePathName)
+    os.rmdir(tmpDirName)
\ No newline at end of file
diff --git a/tools/oneclick/base/hdl_config.py b/tools/oneclick/base/hdl_config.py
new file mode 100644
index 0000000000..f342ecf6e4
--- /dev/null
+++ b/tools/oneclick/base/hdl_config.py
@@ -0,0 +1,162 @@
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy) <http://www.astron.nl/>
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+###############################################################################
+
+"""HDL configuration for building simulation and synthesis targets.
+
+   There should be one hdltool.cfg file somewhere in the toolRootDir and at
+   least one hdllib.cfg file somewhere in the libRootDir. Every HDL library
+   that is in the libRootDir can be found if it has a hdllib.cfg file.
+   Together the hdltool.cfg and hdllib.cfg files contain all the keys and
+   values that are sufficient to be able to build the targets for the HDL
+   library. The possible targets are:
+
+   - compile to created the library binaries for simulation
+   - synthesize to created an image that can be loaded ion the FPGA
+   - verify VHDL test benches in simulation
+   - verify Python test cases via the MM control interface in simulation
+   - validate Python test cases on hardware via the MM control interface
+
+   The contents of the cfg files consist of a series of key - value pairs
+   that are read into a dictionary as defined in common_dict_file.py. Whether
+   the key is a valid key depends on the application that interprets the
+   dictionary.
+
+   The methods can have the library dictionary or the library name as
+   argument. The default arguments are the self.libs.dicts and the
+   corresponding self.lib_names. The argument can be a list or a single value.
+   Similar the return can be a list or a single value, because a list of one
+   element is unlistified.
+
+"""
+
+import common as cm
+import common_dict_file
+import sys
+import os
+import os.path
+
+class HdlConfig:
+
+    def __init__(self, libRootDir, toolRootDir='', libFileName='hdllib.cfg', toolFileName='hdltool.cfg'):
+        """Get tool dictionary info from toolRootDir and all HDL library dictionary info from libRootDir."""
+        # HDL tool config file
+        self.tool = common_dict_file.CommonDictFile(toolRootDir, toolFileName)      # tool dict file
+        if self.tool.nof_dicts==0: sys.exit('Error : No HDL tool config file found')
+        if self.tool.nof_dicts >1: sys.exit('Error : Multiple HDL tool config files found')
+        # HDL library config files
+        self.libs = common_dict_file.CommonDictFile(libRootDir, libFileName)        # library dict files
+        if self.libs.nof_dicts==0: sys.exit('Error : No HDL library config file found')
+        self.lib_names = self.libs.get_key_values('hdl_lib_name')
+        self.MODELSIM_MPF_NAME = 'hdllib.mpf'
+
+    def derive_all_use_libs(self, lib_name):
+        """Derive a list of all HDL libraries that the specified HDL lib_name library depends on."""
+        if lib_name in self.lib_names:
+            all_use_libs = [lib_name]
+            lib_dict = self.libs.dicts[self.lib_names.index(lib_name)]
+            if 'hdl_lib_uses' in lib_dict:
+                use_libs = lib_dict['hdl_lib_uses'].split()
+                for use_lib in use_libs:
+                    all_use_libs.append(use_lib)
+                    all_use_libs += self.derive_all_use_libs(use_lib)
+            return cm.unique(all_use_libs)
+        else:
+            sys.exit('Error : Unknown HDL library name')
+
+    def derive_lib_order(self, lib_names=None):
+        """Derive the dependency order for all HDL libraries in the specified list of lib_names."""
+        if lib_names==None: lib_names=self.lib_names
+        lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names)
+        lib_order = []
+        for lib_dict in lib_dicts:
+            lib_name = lib_dict['hdl_lib_name']
+            if not lib_name in lib_order:
+                lib_order.append(lib_name)                                        # append this lib
+            if 'hdl_lib_uses' in lib_dict:
+                use_libs = lib_dict['hdl_lib_uses'].split()
+                for use_lib in use_libs:
+                    if not use_lib in lib_order:
+                        lib_order.insert(lib_order.index(lib_name), use_lib)      # insert used lib just before this lib
+                    else:
+                        if lib_order.index(use_lib) > lib_order.index(lib_name):
+                            lib_order.remove(use_lib)
+                            lib_order.insert(lib_order.index(lib_name), use_lib)  # move used lib to just before this lib
+        return lib_order
+
+    def get_lib_build_sim_dirs(self, lib_dicts=None):
+        """Get the simulation build directory for all HDL libraries in the specified list of lib_dicts."""
+        if lib_dicts==None: lib_dicts=self.libs.dicts
+        sim_dirs = []
+        for lib_dict in cm.listify(lib_dicts):
+            lib_path = self.libs.get_filePath(lib_dict)
+            build_dir = self.libs.get_key_values('build_dir', lib_dict)
+            sim_tool = self.tool.get_key_values('sim_tool')
+            sim_dirs.append(os.path.join(lib_path, build_dir, sim_tool))
+        return cm.unlistify(sim_dirs)
+
+
+if __name__ == '__main__':
+    # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+    libRootDir = 'RADIOHDL'
+    #libRootDir = 'UNB'
+    hdl = HdlConfig(libRootDir=os.environ[libRootDir], toolRootDir=os.environ['UNB'], libFileName='hdllib.cfg', toolFileName='hdltool.cfg')
+
+    print '#'
+    print '# HdlConfig:'
+    print '#'
+    for i, p in enumerate(hdl.libs.filePathNames):
+        print i, p
+        d = hdl.libs.dicts[i]
+        for k,v in d.iteritems():
+            print k, '=', v
+        print ''
+
+    print ''
+    print 'tool = ', hdl.tool.filePathNames[0]
+    
+    print ''
+    print 'lib paths'
+    for p in hdl.libs.filePaths:
+        print '    ', p
+        
+    print ''
+    print 'lib paths names'
+    for p in hdl.libs.filePathNames:
+        print '    ', p
+        
+    print ''
+    print 'lib_names        = ', hdl.lib_names
+    print 'derive_lib_order : ', hdl.derive_lib_order()
+
+    print ''
+    print 'get_lib_build_sim_dirs:'
+    for sim_dir in hdl.get_lib_build_sim_dirs():
+        print '    ', sim_dir
+
+    print ''    
+    if libRootDir=='RADIOHDL':
+        print 'derive_all_use_libs = ', hdl.derive_lib_order(hdl.derive_all_use_libs('technology_memory'))
+    if libRootDir=='UNB':
+        print 'derive_all_use_libs = ', hdl.derive_lib_order(hdl.derive_all_use_libs('dp'))
+        print 'derive_all_use_libs = ', hdl.derive_lib_order(hdl.derive_all_use_libs('uth'))
+        print 'derive_all_use_libs = ', hdl.derive_lib_order(hdl.derive_all_use_libs('unb_common'))
+        print 'derive_all_use_libs = ', hdl.derive_lib_order(hdl.derive_all_use_libs('unb_minimal'))
+
diff --git a/tools/oneclick/base/modelsim_config.py b/tools/oneclick/base/modelsim_config.py
new file mode 100644
index 0000000000..8b755ed113
--- /dev/null
+++ b/tools/oneclick/base/modelsim_config.py
@@ -0,0 +1,186 @@
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy) <http://www.astron.nl/>
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+###############################################################################
+
+"""HDL configuration for building Modelsim simulation targets.
+"""
+
+import common as cm
+import hdl_config
+import sys
+import os
+import os.path
+
+class ModelsimConfig(hdl_config.HdlConfig):
+
+    def __init__(self, libRootDir, toolRootDir='', libFileName='hdllib.cfg', toolFileName='hdltool.cfg'):
+        """Get Modelsim tool info from toolRootDir and all HDL library info from libRootDir."""
+        hdl_config.HdlConfig.__init__(self, libRootDir, toolRootDir, libFileName, toolFileName)
+        # Modelsim
+        self.MODELSIM_MPF_NAME = 'hdllib.mpf'
+
+    def read_compile_order_from_mpf(self, mpfPathName):
+        """Utility to read the compile order of the project files from an existing <mpfPathName>.mpf and save it into hdl_order.out."""
+        # read <mpfPathName>.mpf to find all project files
+        project_file_indices = []
+        project_file_names = []
+        with open(mpfPathName, 'r') as fp:
+            for line in fp:
+                words = line.split()
+                if len(words)>0:
+                    key = words[0]
+                    if key.find('Project_File_')>=0 and key.find('Project_File_P_')==-1:
+                        project_file_indices.append(key[len('Project_File_'):])
+                        project_file_names.append(words[2])
+        # read <mpfPathName>.mpf again to find compile order for the project files
+        compile_order = range(len(project_file_names))
+        with open(mpfPathName, 'r') as fp:
+            for line in fp:
+                words = line.split()
+                if len(words)>0:
+                    key = words[0]
+                    if key.find('Project_File_P_')>=0:
+                        project_file_index = project_file_indices.index(key[len('Project_File_P_'):])
+                        project_file_name = project_file_names[project_file_index]
+                        k = words.index('compile_order')
+                        k = int(words[k+1])
+                        compile_order[k]=project_file_name
+        return compile_order
+
+    def create_modelsim_project_file(self, lib_names=None):
+        """Create the Modelsim project file for all HDL libraries in the specified list of lib_names."""
+        if lib_names==None: lib_names=self.lib_names
+        lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names)
+        for lib_dict in cm.listify(lib_dicts):
+            # Open mpf
+            mpf_name = self.MODELSIM_MPF_NAME
+            mpf_path = self.get_lib_build_sim_dirs(lib_dict)
+            cm.mkdir(mpf_path)
+            mpfPathName = os.path.join(mpf_path, mpf_name)
+            with open(mpfPathName, 'w') as fp:
+                # Write [Library] section for afileNamell used libraries
+                fp.write('[Library]\n')
+                lib_name = lib_dict['hdl_lib_name']
+                use_lib_names = self.derive_all_use_libs(lib_name)
+                use_lib_dicts = self.libs.get_dicts('hdl_lib_name', use_lib_names)
+                use_lib_build_sim_dirs = self.get_lib_build_sim_dirs(use_lib_dicts)
+                use_lib_clause_names = self.libs.get_key_values('hdl_library_clause_name', use_lib_dicts)
+                for lib_clause, lib_dir in zip(cm.listify(use_lib_clause_names), cm.listify(use_lib_build_sim_dirs)):
+                    lib_work = os.path.join(lib_dir, 'work')
+                    fp.write('%s = %s\n' % (lib_clause, lib_work))
+                fp.write('work = work\n')
+                model_tech_dir = os.path.expandvars(self.tool.get_key_values('model_tech_dir'))
+                fp.write('others = %s\n' % os.path.join(model_tech_dir, 'modelsim.ini'))
+                # Write [Project] section for all used libraries
+                fp.write('[Project]\n')
+                fp.write('Project_Version = 6\n')  # must be >= 6 to fit all
+                fp.write('Project_DefaultLib = work\n')
+                fp.write('Project_SortMethod = unused\n')
+                # - hdl files
+                synth_files = lib_dict['synth_files'].split()
+                test_bench_files = lib_dict['test_bench_files'].split()
+                sim_files = synth_files + test_bench_files
+                fp.write('Project_Files_Count = %d\n' % len(sim_files))
+                lib_path = self.libs.get_filePath(lib_dict)
+                for i, fn in enumerate(sim_files):
+                    filePathName = os.path.join(lib_path, fn)
+                    fp.write('Project_File_%d = %s\n' % (i, filePathName))
+                project_file_p_defaults = 'vhdl_novitalcheck 0 file_type vhdl group_id 0 cover_nofec 0 vhdl_nodebug 0 vhdl_1164 1 vhdl_noload 0 vhdl_synth 0 vhdl_enable0In 0 vlog_1995compat 0 last_compile 0 vhdl_disableopt 0 cover_excludedefault 0 vhdl_vital 0 vhdl_warn1 1 vhdl_warn2 1 vhdl_explicit 0 vhdl_showsource 0 cover_covercells 0 vhdl_0InOptions {} vhdl_warn3 1 vlog_vopt {} cover_optlevel 3 voptflow 1 vhdl_options {} vhdl_warn4 1 toggle - ood 0 vhdl_warn5 1 cover_noshort 0 compile_to work cover_nosub 0 dont_compile 0 vhdl_use93 2002 cover_stmt 1'
+                project_folders = []
+                if len(synth_files)>0:
+                    project_folders.append('synth')
+                    for i, fn in enumerate(synth_files):
+                        filePathName = os.path.join(lib_path, fn)
+                        fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (i, project_folders[-1], i, project_file_p_defaults))
+                if len(test_bench_files)>0:
+                    project_folders.append('test_bench')
+                    for j, fn in enumerate(test_bench_files):
+                        filePathName = os.path.join(lib_path, fn)
+                        fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (i+j+1, project_folders[-1], i+j+1, project_file_p_defaults))
+                fp.write('Project_Folder_Count = %d\n' % len(project_folders))
+                for i, fd in enumerate(project_folders):
+                    fp.write('Project_Folder_%d = %s\n' % (i, fd))
+                    fp.write('Project_Folder_P_%d = folder {Top Level}\n' % i)
+                # - simulation configurations
+                fp.write('Project_Sim_Count = %d\n' % len(test_bench_files))
+                project_sim_p_defaults = 'Generics {} timing default -std_output {} -nopsl 0 +notimingchecks 0 -L {} selected_du {} -hazards 0 -sdf {} ok 1 -0in 0 -nosva 0 +pulse_r {} -absentisempty 0 OtherArgs {} -multisource_delay {} +pulse_e {} vopt_env 1 -coverage 0 -sdfnoerror 0 +plusarg {} -vital2.2b 0 -t default -memprof 0 is_vopt_flow 0 -noglitch 0 -nofileshare 0 -wlf {} -assertdebug 0 +no_pulse_msg 0 -0in_options {} -assertfile {} -sdfnowarn 0 -Lf {} -std_input {}'
+                project_sim_p_optimization = 'is_vopt_opt_used 2'  # = when 'Enable optimization' is not selected in GUI
+                project_sim_p_optimization = 'is_vopt_opt_used 1 voptargs {OtherVoptArgs {} timing default VoptOutFile {} -vopt_keep_delta 0 -0in 0 -fvopt {} VoptOptimize:method 1 -vopt_00 2 +vopt_notimingcheck 0 -Lfvopt {} VoptOptimize:list .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.listbox -Lvopt {} +vopt_acc {} VoptOptimize .vopt_opt.nb.canvas.notebook.cs.page1.cs -vopt_hazards 0 VoptOptimize:Buttons .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.bf 0InOptionsWgt .vopt_opt.nb.canvas.notebook.cs.page3.cs.zf.ze -0in_options {}}' # = when 'Enable optimization' is selected in GUI for full visibility
+                for i, fn in enumerate(test_bench_files):
+                    fName = os.path.basename(fn)
+                    tbName = os.path.splitext(fName)[0]
+                    fp.write('Project_Sim_%d = %s\n' % (i, tbName))
+                for i, fn in enumerate(test_bench_files):
+                    fName = os.path.basename(fn)
+                    tbName = os.path.splitext(fName)[0]
+                    fp.write('Project_Sim_P_%d = folder {Top Level} additional_dus work.%s %s %s\n' % (i, tbName, project_sim_p_defaults, project_sim_p_optimization))
+                # Write [vsim] section
+                fp.write('[vsim]\n')
+                fp.write('RunLength = 0 ps\n')
+                fp.write('resolution = 1ps\n')
+                fp.write('IterationLimit = 100\n')
+                fp.write('DefaultRadix = decimal\n')
+
+    def create_modelsim_project_files_file(self, fileNamePath='modelsim_project_files.txt', lib_names=None):
+        """Create file with list of the Modelsim project files for all HDL libraries in the specified list of lib_names."""
+        if lib_names==None: lib_names=self.lib_names
+        with open(fileNamePath, 'w') as fp:
+            lib_dicts = self.libs.get_dicts('hdl_lib_name', lib_names)
+            mpf_paths = self.get_lib_build_sim_dirs(lib_dicts)
+            for lib, path in zip(cm.listify(lib_names),cm.listify(mpf_paths)):
+                fp.write('%s = %s\n' % (lib, path))
+
+
+if __name__ == '__main__':
+    # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+    libRootDir = 'RADIOHDL'
+    #libRootDir = 'UNB'
+    msim = ModelsimConfig(libRootDir=os.environ[libRootDir], toolRootDir=os.environ['UNB'], libFileName='hdllib.cfg', toolFileName='hdltool.cfg')
+
+    print '#'
+    print '# ModelsimConfig:'
+    print '#'
+    print 'derive_lib_order = ', msim.derive_lib_order()
+    print ''
+    print 'Create modelsim projects list file.'
+    msim.create_modelsim_project_files_file()
+
+    print ''
+    print 'Create modelsim project files for all HDL libraries in $%s.' % libRootDir
+    msim.create_modelsim_project_file()
+
+    # Use save_compile_order_in_mpf = True to avoid having to manually edit the compile order in the hdllib.cfg, because
+    # the synth_files need to be in hierarchical order. The test_bench_files are typically independent so these may be
+    # put in alphabetical order. The hdllib.cfg still does need some manual editing to set the proper key and paths.
+    lib_name = 'mm'
+    save_compile_order_in_mpf = False 
+    if libRootDir=='UNB' and save_compile_order_in_mpf:
+        # Read compile order from existing <lib_name>.mpf
+        mpfPathName = os.path.expandvars('$UNB/Firmware/designs/%s/build/synth/quartus/sopc_%s_sim/%s.mpf' % (lib_name, lib_name, lib_name))
+        mpfPathName = os.path.expandvars('$UNB/Firmware/modules/Lofar/%s/build/sim/modelsim/%s.mpf' % (lib_name, lib_name))
+        mpfPathName = os.path.expandvars('$UNB/Firmware/modules/%s/build/sim/modelsim/%s.mpf' % (lib_name, lib_name))
+        compile_order = msim.read_compile_order_from_mpf(mpfPathName)
+        # Append the compile_order list to the lib_name dictionary hdllib.cfg file
+        lib_dict = msim.libs.get_dicts('hdl_lib_name', lib_name)
+        lib_path = msim.libs.get_filePath(lib_dict)
+        filePathName = os.path.join(lib_path, 'hdllib.cfg')
+        print ''
+        print 'Save modelsim compile order for', lib_name, 'in HDL library config file', filePathName
+        msim.libs.append_key_to_dict_file(filePathName, 'files', compile_order)
-- 
GitLab