echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
echo set\\(HIDE_PRIVATE_SYMBOLS ON\\) >> config.cmake
+ echo set\\(USE_VTA_TSIM ON\\) >> config.cmake
+ echo set\\(USE_VTA_FSIM ON\\) >> config.cmake
"""
make(ci_cpu, 'build', '-j2')
pack_lib('cpu', tvm_lib)
echo set\\(USE_LLVM llvm-config-4.0\\) >> config.cmake
echo set\\(CMAKE_CXX_COMPILER g++\\) >> config.cmake
echo set\\(CMAKE_CXX_FLAGS -Werror\\) >> config.cmake
+ echo set\\(USE_VTA_TSIM ON\\) >> config.cmake
+ echo set\\(USE_VTA_FSIM ON\\) >> config.cmake
"""
make(ci_i386, 'build', '-j2')
pack_lib('i386', tvm_multilib)
DLPACK_PATH = $(ROOTDIR)/3rdparty/dlpack
endif
+ifndef VTA_HW_PATH
+ VTA_HW_PATH = $(ROOTDIR)/vta/vta-hw
+endif
+
INCLUDE_FLAGS = -Iinclude -I$(DLPACK_PATH)/include -I$(DMLC_CORE_PATH)/include
PKG_CFLAGS = -std=c++11 -Wall -O2 $(INCLUDE_FLAGS) -fPIC
PKG_LDFLAGS =
python3 3rdparty/dmlc-core/scripts/lint.py tvm4j-jni cpp jvm/native/src
scalalint:
- make -C vta/vta-hw/hardware/chisel lint
+ make -C $(VTA_HW_PATH)/hardware/chisel lint
lint: cpplint pylint jnilint scalalint
PROJROOT="$( cd "$( dirname '${BASH_SOURCE[0]}' )/../../" && pwd )"
# Derive target specified by vta_config.json
-VTA_CONFIG=${PROJROOT}/vta/vta-hw/config/vta_config.py
+VTA_CONFIG=${VTA_HW_PATH}/config/vta_config.py
TARGET=$(python ${VTA_CONFIG} --target)
export PYTHONPATH=${PYTHONPATH}:${PROJROOT}/python:${PROJROOT}/vta/python
set(USE_RELAY_DEBUG OFF)
# Whether to build fast VTA simulator driver
-set(USE_VTA_FSIM ON)
+set(USE_VTA_FSIM OFF)
# Whether to build cycle-accurate VTA simulator driver
-set(USE_VTA_TSIM ON)
+set(USE_VTA_TSIM OFF)
# Whether to build VTA FPGA driver (device side only)
set(USE_VTA_FPGA OFF)
# CMake Build rules for VTA
find_program(PYTHON NAMES python python3 python3.6)
-# VTA sources directory
-set(VTA_DIR ${CMAKE_CURRENT_SOURCE_DIR}/vta/vta-hw)
+# Throw error if VTA_HW_PATH is not set
+if(NOT DEFINED ENV{VTA_HW_PATH})
+ set(ENV{VTA_HW_PATH} vta/vta-hw)
+endif()
if(MSVC)
message(STATUS "VTA build is skipped in Windows..")
elseif(PYTHON)
- set(VTA_CONFIG ${PYTHON} ${VTA_DIR}/config/vta_config.py)
+ set(VTA_CONFIG ${PYTHON} $ENV{VTA_HW_PATH}/config/vta_config.py)
if(EXISTS ${CMAKE_CURRENT_BINARY_DIR}/vta_config.json)
message(STATUS "Use VTA config " ${CMAKE_CURRENT_BINARY_DIR}/vta_config.json)
- set(VTA_CONFIG ${PYTHON} ${VTA_DIR}/config/vta_config.py
+ set(VTA_CONFIG ${PYTHON} $ENV{VTA_HW_PATH}/config/vta_config.py
--use-cfg=${CMAKE_CURRENT_BINARY_DIR}/vta_config.json)
endif()
# Fast simulator driver build
if(USE_VTA_FSIM)
# Add fsim driver sources
- file(GLOB FSIM_RUNTIME_SRCS ${VTA_DIR}/src/*.cc)
+ file(GLOB FSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/*.cc)
file(GLOB FSIM_RUNTIME_SRCS vta/runtime/*.cc)
- list(APPEND FSIM_RUNTIME_SRCS ${VTA_DIR}/src/sim/sim_driver.cc)
- list(APPEND FSIM_RUNTIME_SRCS ${VTA_DIR}/src/sim/sim_tlpp.cc)
- list(APPEND FSIM_RUNTIME_SRCS ${VTA_DIR}/src/vmem/virtual_memory.cc)
+ list(APPEND FSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/sim/sim_driver.cc)
+ list(APPEND FSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/sim/sim_tlpp.cc)
+ list(APPEND FSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/vmem/virtual_memory.cc)
# Target lib: vta_fsim
add_library(vta_fsim SHARED ${FSIM_RUNTIME_SRCS})
- target_include_directories(vta_fsim PUBLIC ${VTA_DIR}/include)
+ target_include_directories(vta_fsim PUBLIC $ENV{VTA_HW_PATH}/include)
foreach(__def ${VTA_DEFINITIONS})
string(SUBSTRING ${__def} 3 -1 __strip_def)
target_compile_definitions(vta_fsim PUBLIC ${__strip_def})
# Cycle accurate simulator driver build
if(USE_VTA_TSIM)
# Add tsim driver sources
- file(GLOB TSIM_RUNTIME_SRCS ${VTA_DIR}/src/*.cc)
+ file(GLOB TSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/*.cc)
file(GLOB TSIM_RUNTIME_SRCS vta/runtime/*.cc)
- list(APPEND TSIM_RUNTIME_SRCS ${VTA_DIR}/src/tsim/tsim_driver.cc)
- list(APPEND TSIM_RUNTIME_SRCS ${VTA_DIR}/src/dpi/module.cc)
- list(APPEND TSIM_RUNTIME_SRCS ${VTA_DIR}/src/vmem/virtual_memory.cc)
+ list(APPEND TSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/tsim/tsim_driver.cc)
+ list(APPEND TSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/dpi/module.cc)
+ list(APPEND TSIM_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/vmem/virtual_memory.cc)
# Target lib: vta_tsim
add_library(vta_tsim SHARED ${TSIM_RUNTIME_SRCS})
- target_include_directories(vta_tsim PUBLIC ${VTA_DIR}/include)
+ target_include_directories(vta_tsim PUBLIC $ENV{VTA_HW_PATH}/include)
foreach(__def ${VTA_DEFINITIONS})
string(SUBSTRING ${__def} 3 -1 __strip_def)
target_compile_definitions(vta_tsim PUBLIC ${__strip_def})
# VTA FPGA driver sources
if(USE_VTA_FPGA)
- file(GLOB FPGA_RUNTIME_SRCS ${VTA_HW_DIR}/src/*.cc)
+ file(GLOB FPGA_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/*.cc)
# Rules for Zynq-class FPGAs with pynq OS support (see pynq.io)
if(${VTA_TARGET} STREQUAL "pynq" OR
${VTA_TARGET} STREQUAL "ultra96")
- list(APPEND FPGA_RUNTIME_SRCS ${VTA_HW_DIR}/src/pynq/pynq_driver.cc)
+ list(APPEND FPGA_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/pynq/pynq_driver.cc)
# Rules for Pynq v2.4
find_library(__cma_lib NAMES cma PATH /usr/lib)
elseif(${VTA_TARGET} STREQUAL "de10nano") # DE10-Nano rules
- file(GLOB FPGA_RUNTIME_SRCS ${VTA_HW_DIR}/src/de10nano/*.cc ${VTA_HW_DIR}/src/*.cc)
+ file(GLOB FPGA_RUNTIME_SRCS $ENV{VTA_HW_PATH}/src/de10nano/*.cc $ENV{VTA_HW_PATH}/src/*.cc)
endif()
# Target lib: vta
add_library(vta SHARED ${FPGA_RUNTIME_SRCS})
target_link_libraries(vta ${__cma_lib})
elseif(${VTA_TARGET} STREQUAL "de10nano") # DE10-Nano rules
#target_compile_definitions(vta PUBLIC VTA_MAX_XFER=2097152) # (1<<21)
- target_include_directories(vta PUBLIC ${VTA_HW_DIR}/src/de10nano)
+ target_include_directories(vta PUBLIC $ENV{VTA_HW_PATH}/src/de10nano)
target_include_directories(vta PUBLIC 3rdparty)
target_include_directories(vta PUBLIC
"/usr/local/intelFPGA_lite/18.1/embedded/ds-5/sw/gcc/arm-linux-gnueabihf/include")
# spaces.
# Note: If this tag is empty the current directory is searched.
-INPUT = include/tvm topi/include/topi vta/vta-hw/include/vta
+INPUT = include/tvm topi/include/topi
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
You need [TVM installed](https://docs.tvm.ai/install/index.html) on your machine.
For a quick and easy start, use the pre-built [TVM Docker image](https://docs.tvm.ai/install/docker.html).
-The VTA simulator library is built by default with TVM.
-Add the VTA library to your python path to run the VTA examples.
+You'll need to set the following paths to use VTA:
+```bash
+export TVM_PATH=<path to TVM root>
+export VTA_HW_PATH=$TVM_PATH/vta/vta-hw
+```
+
+The VTA functional simulation library needs to be enabled when building TVM.
+```bash
+cd <tvm-root>
+mkdir build
+cp cmake/config.cmake build/.
+echo 'set(USE_VTA_FSIM ON)' >> build/config.cmake
+cd build && cmake .. && make -j4
+```
+
+Add the VTA python library to your python path to run the VTA examples.
```bash
export PYTHONPATH=/path/to/vta/python:${PYTHONPATH}
cd /home/xilinx/tvm
mkdir build
cp cmake/config.cmake build/.
-echo 'set(USE_VTA_FSIM OFF)' >> build/config.cmake
-echo 'set(USE_VTA_TSIM OFF)' >> build/config.cmake
echo 'set(USE_VTA_FPGA ON)' >> build/config.cmake
# Copy pynq specific configuration
cp vta/vta-hw/config/pynq_sample.json vta/vta-hw/config/vta_config.json
set -e
set -u
-export PYTHONPATH=python:vta/python:topi/python
+export TVM_PATH=.
+export PYTHONPATH=${TVM_PATH}/python:${TVM_PATH}/vta/python:${TVM_PATH}/topi/python
+export VTA_HW_PATH=vta/vta-hw
# cleanup pycache
find . -type f -path "*.pyc" | xargs rm -f
make cython3
# Reset default fsim simulation
-cp vta/vta-hw/config/fsim_sample.json vta/vta-hw/config/vta_config.json
+cp ${VTA_HW_PATH}/config/fsim_sample.json ${VTA_HW_PATH}/config/vta_config.json
# Run unit tests in functional/fast simulator
echo "Running unittest in fsim..."
-python3 -m pytest -v vta/tests/python/unittest
+python3 -m pytest -v ${TVM_PATH}/vta/tests/python/unittest
# Run unit tests in functional/fast simulator
echo "Running integration test in fsim..."
-python3 -m pytest -v vta/tests/python/integration
+python3 -m pytest -v ${TVM_PATH}/vta/tests/python/integration
set -e
set -u
-export PYTHONPATH=python:vta/python:topi/python
+export TVM_PATH=.
+export PYTHONPATH=${TVM_PATH}/python:${TVM_PATH}/vta/python:${TVM_PATH}/topi/python
+export VTA_HW_PATH=vta/vta-hw
# cleanup pycache
find . -type f -path "*.pyc" | xargs rm -f
make cython3
# Set default VTA config to use TSIM cycle accurate sim
-cp vta/vta-hw/config/tsim_sample.json vta/vta-hw/config/vta_config.json
+cp ${VTA_HW_PATH}/config/tsim_sample.json ${VTA_HW_PATH}/config/vta_config.json
# Build and run the TSIM apps (disable until refactor is complete)
# echo "Test the TSIM apps..."
-# make -C vta/vta-hw/apps/tsim_example/ run_verilog
-# make -C vta/vta-hw/apps/tsim_example/ run_chisel
-# make -C vta/vta-hw/apps/gemm/ default
+# make -C ${VTA_HW_PATH}/apps/tsim_example/ run_verilog
+# make -C ${VTA_HW_PATH}/apps/tsim_example/ run_chisel
+# make -C ${VTA_HW_PATH}/apps/gemm/ default
# Check style of scala code
echo "Check style of scala code..."
-make -C vta/vta-hw/hardware/chisel lint
+make -C ${VTA_HW_PATH}/hardware/chisel lint
# Build VTA chisel design and verilator simulator
echo "Building VTA chisel design..."
-make -C vta/vta-hw/hardware/chisel cleanall
-make -C vta/vta-hw/hardware/chisel USE_THREADS=0 lib
+make -C ${VTA_HW_PATH}/hardware/chisel cleanall
+make -C ${VTA_HW_PATH}/hardware/chisel USE_THREADS=0 lib
# Run unit tests in cycle accurate simulator
echo "Running unittest in tsim..."
-python3 -m pytest -v vta/tests/python/unittest
+python3 -m pytest -v ${TVM_PATH}/vta/tests/python/unittest
# Run unit tests in cycle accurate simulator
echo "Running integration test in tsim..."
-python3 -m pytest -v vta/tests/python/integration
+python3 -m pytest -v ${TVM_PATH}/vta/tests/python/integration
# Reset default fsim simulation
-cp vta/vta-hw/config/fsim_sample.json vta/vta-hw/config/vta_config.json
+cp ${VTA_HW_PATH}/config/fsim_sample.json ${VTA_HW_PATH}/config/vta_config.json
# specific language governing permissions and limitations
# under the License.
"""Configurable VTA Hareware Environment scope."""
-# pylint: disable=invalid-name
+# pylint: disable=invalid-name, exec-used
from __future__ import absolute_import as _abs
import os
import tvm
from tvm import te
from . import intrin
-from .pkg_config import PkgConfig
+def get_vta_hw_path():
+ """Get the VTA HW path."""
+ curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
+ vta_hw_default = os.path.abspath(os.path.join(curr_path, "../../vta-hw"))
+ VTA_HW_PATH = os.getenv('VTA_HW_PATH', vta_hw_default)
+ return VTA_HW_PATH
+
+def pkg_config(cfg):
+ """Returns PkgConfig pkg config object."""
+ pkg_config_py = os.path.join(get_vta_hw_path(), "config/pkg_config.py")
+ libpkg = {"__file__": pkg_config_py}
+ exec(compile(open(pkg_config_py, "rb").read(), pkg_config_py, "exec"), libpkg, libpkg)
+ PkgConfig = libpkg["PkgConfig"]
+ return PkgConfig(cfg)
class DevContext(object):
"""Internal development context
# initialization function
def __init__(self, cfg):
# Produce the derived parameters and update dict
- self.pkg = self.pkg_config(cfg)
+ self.pkg = pkg_config(cfg)
self.__dict__.update(self.pkg.cfg_dict)
# data type width
self.INP_WIDTH = 1 << self.LOG_INP_WIDTH
def __exit__(self, ptype, value, trace):
Environment.current = self._last_env
- def pkg_config(self, cfg):
- """PkgConfig instance"""
- curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
- proj_root = os.path.abspath(os.path.join(curr_path, "../../"))
- return PkgConfig(cfg, proj_root)
-
@property
def cfg_dict(self):
return self.pkg.cfg_dict
def _init_env():
"""Initialize the default global env"""
- curr_path = os.path.dirname(
- os.path.abspath(os.path.expanduser(__file__)))
- proj_root = os.path.abspath(os.path.join(curr_path, "../../../"))
- path_list = [
- os.path.join(proj_root, "vta/vta-hw/config/vta_config.json")
- ]
- path_list = [p for p in path_list if os.path.exists(p)]
- if not path_list:
- raise RuntimeError(
- "Error: vta_config.json not found.")
- cfg = json.load(open(path_list[0]))
+ config_path = os.path.join(get_vta_hw_path(), "config/vta_config.json")
+ if not os.path.exists(config_path):
+ raise RuntimeError("Cannot find config in %s" % str(config_path))
+ cfg = json.load(open(config_path))
return Environment(cfg)
Environment.current = _init_env()
from tvm.contrib import cc
from vta import program_bitstream
-from ..environment import get_env
-from ..pkg_config import PkgConfig
+from ..environment import get_env, pkg_config
from ..libinfo import find_libvta
raise RuntimeError("Can only reconfig in the beginning of session...")
cfg = json.loads(cfg_json)
cfg["TARGET"] = env.TARGET
- pkg = PkgConfig(cfg, proj_root)
+ pkg = pkg_config(cfg)
# check if the configuration is already the same
if os.path.isfile(cfg_path):
old_cfg = json.loads(open(cfg_path, "r").read())
import sys
import os
+from .environment import get_vta_hw_path
+
def _get_lib_name(lib_name):
"""Get lib name with extension
"""
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
lib_search = [os.path.join(curr_path, "..", "..", "..", "build",)]
- lib_search += [os.path.join(curr_path, "..", "..", "vta-hw", "build")]
+ lib_search += [os.path.join(get_vta_hw_path(), "build")]
lib_name = _get_lib_name(lib_vta)
lib_path = [os.path.join(x, lib_name) for x in lib_search]
lib_found = [x for x in lib_path if os.path.exists(x)]
if env.TARGET == "tsim":
lib_hw = find_libvta("libvta_hw", optional=True)
- assert lib_hw # make sure to build vta/vta-hw/hardware/chisel
+ assert lib_hw # make sure to make in ${VTA_HW_PATH}/hardware/chisel
try:
f = tvm.get_global_func("vta.tsim.init")
m = tvm.runtime.load_module(lib_hw[0], "vta-tsim")
cmake_minimum_required(VERSION 3.2)
project(tsim C CXX)
-set(TVM_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../)
-set(VTA_HW_DIR ${TVM_DIR}/3rdparty/vta-hw)
+if(NOT DEFINED ENV{TVM_PATH})
+ message(ERROR "Make sure to set TVM_PATH in your environment")
+endif()
+
+if(NOT DEFINED ENV{VTA_HW_PATH})
+ message(ERROR "Make sure to set VTA_HW_PATH in your environment")
+endif()
-include_directories("${TVM_DIR}/include")
-include_directories("${TVM_DIR}/3rdparty/dlpack/include")
-include_directories("${TVM_DIR}/3rdparty/dmlc-core/include")
-include_directories("${VTA_HW_DIR}/src/dpi")
+include_directories("$ENV{TVM_PATH}/include")
+include_directories("$ENV{TVM_PATH}/3rdparty/dlpack/include")
+include_directories("$ENV{TVM_PATH}/3rdparty/dmlc-core/include")
+include_directories("$ENV{VTA_HW_PATH}/src/dpi")
set(CMAKE_C_FLAGS "-O2 -Wall -fPIC -fvisibility=hidden")
set(CMAKE_CXX_FLAGS "-O2 -Wall -fPIC -fvisibility=hidden -std=c++11")
endif()
file(GLOB TSIM_SW_SRC src/driver.cc)
-list(APPEND TSIM_SW_SRC ${VTA_HW_DIR}/src/vmem/virtual_memory.cc)
-list(APPEND TSIM_SW_SRC ${VTA_HW_DIR}/src/dpi/module.cc)
+list(APPEND TSIM_SW_SRC $ENV{VTA_HW_PATH}/src/vmem/virtual_memory.cc)
+list(APPEND TSIM_SW_SRC $ENV{VTA_HW_PATH}/src/dpi/module.cc)
add_library(sw SHARED ${TSIM_SW_SRC})
-target_include_directories(sw PRIVATE ${VTA_HW_DIR}/include ${VTA_HW_DIR}/src)
+target_include_directories(sw PRIVATE $ENV{VTA_HW_PATH}/include $ENV{VTA_HW_PATH}/src)
if(APPLE)
set_target_properties(sw PROPERTIES LINK_FLAGS "-undefined dynamic_lookup")
# under the License.
export PYTHONPATH:=$(abspath .)/python:$(PYTHONPATH)
-export PYTHONPATH:=$(abspath .)/../../../../python:$(PYTHONPATH)
BUILD_NAME = build
build_dir = $(abspath .)/$(BUILD_NAME)
VTA TSIM Application
======================
-Prior to this application, please take a look at `<tvm-root>/vta/vta-hw/apps/tsim_example` for installation
+Prior to this application, please take a look at `<vta-hw-root>/apps/tsim_example` for installation
This is an application that performs Bit Serial Multiplication for GEMM utilizing TSIM.
**Bit Serial Multiplication for GEMM:**
We can sufficiently reduce the cycles required to perform a gemm given that the data bit width is small. This GEMM application uses TSIM for future accerlerator prototypes.
* Test Chisel3 backend with bit serial GEMM
- * Go to `<tvm-root>/vta/vta-hw/apps/gemm`
+ * Go to `<vta-hw-root>/apps/gemm`
* Run `make`
* If you have already compiled chisel backend (i.e. ran `make`)
* Bit parallel test with another input set, run `make parallel`
* Some steps for creating your own custom TSIM application
- * Go to `<tvm-root>/vta/vta-hw/apps/gemm`
+ * Go to `<vta-hw-root>/apps/gemm`
* Create custom circuit within `./hardware/chisel/src/scala.main/accel/Compute.scala`
* Map the according Registers in `./hardware/chisel/src/scala.main/accel/RegFile.scala`
* Create your test script
* Map the registers in `./src/driver.cc` and link it with both `RegFile.scala` and the test script
- * Understanding of `<tvm-root>/vta/vta-hw/apps/tsim_example`, which performs add by one to a vector, is highly encouraged to create a more complex application
+ * Understanding of `<vta-hw-root>/apps/tsim_example`, which performs add by one to a vector, is highly encouraged to create a more complex application
* Some pointers
- * Chisel3 tests in `<tvm-root>/vta/vta-hw/apps/gemm/tests/python`
- * Chisel3 accelerator backend `<tvm-root>/vta/vta-hw/apps/gemm/hardware/chisel`
- * Software C++ driver (backend) that handles the accelerator `<tvm-root>/vta/vta-hw/apps/gemm/src/driver.cc`
- * Software Python driver (frontend) that handles the accelerator `<tvm-root>vtay/vta-hw/apps/gemm/python/accel`
+ * Chisel3 tests in `<vta-hw-root>/apps/gemm/tests/python`
+ * Chisel3 accelerator backend `<vta-hw-root>/apps/gemm/hardware/chisel`
+ * Software C++ driver (backend) that handles the accelerator `<vta-hw-root>/apps/gemm/src/driver.cc`
+ * Software Python driver (frontend) that handles the accelerator `<vta-hw-root>/apps/gemm/python/accel`
cmake_minimum_required(VERSION 3.2)
project(tsim C CXX)
-set(TVM_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../)
-set(VTA_HW_DIR ${TVM_DIR}/vta/vta-hw)
+if(NOT DEFINED ENV{TVM_PATH})
+ message(ERROR "Make sure to set TVM_PATH in your environment")
+endif()
+
+if(NOT DEFINED ENV{VTA_HW_PATH})
+ message(ERROR "Make sure to set VTA_HW_PATH in your environment")
+endif()
-include_directories("${TVM_DIR}/include")
-include_directories("${TVM_DIR}/3rdparty/dlpack/include")
-include_directories("${TVM_DIR}/3rdparty/dmlc-core/include")
-include_directories("${VTA_HW_DIR}/src/dpi")
+include_directories("$ENV{TVM_PATH}/include")
+include_directories("$ENV{TVM_PATH}/3rdparty/dlpack/include")
+include_directories("$ENV{TVM_PATH}/3rdparty/dmlc-core/include")
+include_directories("$ENV{VTA_HW_PATH}/src/dpi")
set(CMAKE_C_FLAGS "-O2 -Wall -fPIC -fvisibility=hidden")
set(CMAKE_CXX_FLAGS "-O2 -Wall -fPIC -fvisibility=hidden -std=c++11")
endif()
file(GLOB TSIM_SW_SRC src/driver.cc)
-list(APPEND TSIM_SW_SRC ${VTA_HW_DIR}/src/vmem/virtual_memory.cc)
-list(APPEND TSIM_SW_SRC ${VTA_HW_DIR}/src/dpi/module.cc)
+list(APPEND TSIM_SW_SRC $ENV{VTA_HW_PATH}/src/vmem/virtual_memory.cc)
+list(APPEND TSIM_SW_SRC $ENV{VTA_HW_PATH}/src/dpi/module.cc)
add_library(sw SHARED ${TSIM_SW_SRC})
-target_include_directories(sw PRIVATE ${VTA_HW_DIR}/include ${VTA_HW_DIR}/src)
+target_include_directories(sw PRIVATE $ENV{VTA_HW_PATH}/include $ENV{VTA_HW_PATH}/src)
if(APPLE)
set_target_properties(sw PROPERTIES LINK_FLAGS "-undefined dynamic_lookup")
how to run both of them:
* Test Verilog backend
- * Go to `<tvm-root>/vta/vta-hw/apps/tsim_example`
+ * Go to `<vta-hw-root>/apps/tsim_example`
* Run `make`
* Test Chisel3 backend
- * Go to `<tvm-root>/vta/vta-hw/apps/tsim_example`
+ * Go to `<vta-hw-root>/apps/tsim_example`
* Run `make run_chisel`
* Some pointers
- * Verilog and Chisel3 tests in `<tvm-root>/vta/vta-hw/apps/tsim_example/tests/python`
- * Verilog accelerator backend `<tvm-root>/vta/vta-hw/apps/tsim_example/hardware/verilog`
- * Chisel3 accelerator backend `<tvm-root>/vta/vta-hw/apps/tsim_example/hardware/chisel`
- * Software C++ driver (backend) that handles the accelerator `<tvm-root>/vta/vta-hw/apps/tsim_example/src/driver.cc`
- * Software Python driver (frontend) that handles the accelerator `<tvm-root>vtay/vta-hw/apps/tsim_example/python/accel`
+ * Verilog and Chisel3 tests in `<vta-hw-root>/apps/tsim_example/tests/python`
+ * Verilog accelerator backend `<vta-hw-root>/apps/tsim_example/hardware/verilog`
+ * Chisel3 accelerator backend `<vta-hw-root>/apps/tsim_example/hardware/chisel`
+ * Software C++ driver (backend) that handles the accelerator `<vta-hw-root>/apps/tsim_example/src/driver.cc`
+ * Software Python driver (frontend) that handles the accelerator `<vta-hw-root>/apps/tsim_example/python/accel`
import json
import glob
+import os
+
+
+def get_vta_hw_path():
+ """Get the VTA HW path."""
+ curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
+ vta_hw_default = os.path.abspath(os.path.join(curr_path, ".."))
+ VTA_HW_PATH = os.getenv('VTA_HW_PATH', vta_hw_default)
+ return VTA_HW_PATH
+
+def get_tvm_path():
+ """Get the TVM path."""
+ curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
+ tvm_default = os.path.abspath(os.path.join(curr_path, "../../.."))
+ TVM_PATH = os.getenv('TVM_PATH', tvm_default)
+ return TVM_PATH
class PkgConfig(object):
"""Simple package config tool for VTA.
----------
cfg : dict
The config dictionary
-
- proj_root : str
- Path to the project root
"""
cfg_keys = [
"TARGET",
"LOG_ACC_BUFF_SIZE",
]
- def __init__(self, cfg, proj_root):
+ def __init__(self, cfg):
# Derived parameters
cfg["LOG_BLOCK_IN"] = cfg["LOG_BLOCK"]
# Update cfg now that we've extended it
self.__dict__.update(cfg)
+ # VTA_HW path and TVM_PATH
+ vta_hw_path = get_vta_hw_path()
+ tvm_path = get_tvm_path()
+
# Include path
self.include_path = [
- "-I%s/include" % proj_root,
- "-I%s/vta/vta-hw/include" % proj_root,
- "-I%s/3rdparty/dlpack/include" % proj_root,
- "-I%s/3rdparty/dmlc-core/include" % proj_root
+ "-I%s/include" % tvm_path,
+ "-I%s/include" % vta_hw_path,
+ "-I%s/3rdparty/dlpack/include" % tvm_path,
+ "-I%s/3rdparty/dmlc-core/include" % tvm_path
]
# List of source files that can be used to build standalone library.
self.lib_source = []
- self.lib_source += glob.glob("%s/vta/vta-hw/src/*.cc" % proj_root)
+ self.lib_source += glob.glob("%s/src/*.cc" % vta_hw_path)
if self.TARGET in ["pynq", "ultra96"]:
# add pynq drivers for any board that uses pynq driver stack (see pynq.io)
- self.lib_source += glob.glob("%s/vta/vta-hw/src/pynq/*.cc" % (proj_root))
+ self.lib_source += glob.glob("%s/src/pynq/*.cc" % vta_hw_path)
elif self.TARGET in ["de10nano"]:
- self.lib_source += glob.glob("%s/vta/vta-hw/src/de10nano/*.cc" % (proj_root))
+ self.lib_source += glob.glob("%s/src/de10nano/*.cc" % vta_hw_path)
self.include_path += [
- "-I%s/vta/vta-hw/src/de10nano" % proj_root,
- "-I%s/3rdparty" % proj_root
+ "-I%s/src/de10nano" % vta_hw_path,
+ "-I%s/3rdparty" % tvm_path
]
# Linker flags
import json
import argparse
-def get_pkg_config(cfg):
- """Get the pkg config object."""
- curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
- proj_root = os.path.abspath(os.path.join(curr_path, "../../../"))
- pkg_config_py = os.path.join(proj_root, "vta/python/vta/pkg_config.py")
+
+def pkg_config(cfg):
+ """Returns PkgConfig pkg config object."""
+ pkg_config_py = os.path.join(
+ os.path.dirname(os.path.abspath(os.path.expanduser(__file__))),
+ "pkg_config.py"
+ )
libpkg = {"__file__": pkg_config_py}
exec(compile(open(pkg_config_py, "rb").read(), pkg_config_py, "exec"), libpkg, libpkg)
PkgConfig = libpkg["PkgConfig"]
- return PkgConfig(cfg, proj_root)
+ return PkgConfig(cfg)
def main():
"""Main funciton"""
parser.print_help()
return
- curr_path = os.path.dirname(
- os.path.abspath(os.path.expanduser(__file__)))
- proj_root = os.path.abspath(os.path.join(curr_path, "../../../"))
- path_list = [
- os.path.join(proj_root, "vta/vta-hw/config/vta_config.json")
- ]
- if args.use_cfg:
- path_list = [args.use_cfg]
- ok_path_list = [p for p in path_list if os.path.exists(p)]
- if not ok_path_list:
- raise RuntimeError("Cannot find config in %s" % str(path_list))
- cfg = json.load(open(ok_path_list[0]))
-
- pkg = get_pkg_config(cfg)
+ # Path to vta config
+ config_path = "vta_config.json"
+ if not os.path.exists(config_path):
+ raise RuntimeError("Cannot find config in %s" % str(config_path))
+ cfg = json.load(open(config_path))
+
+ pkg = pkg_config(cfg)
if args.target:
print(pkg.TARGET)