- python: onnx >= 1.14, onnxsim, numpy, torch (optional, for model export only)
- c++ libraries: cuda, cublas, curand
- Build dependency
mkdir build g++ mc/operators/cuda/cublas.cpp -o build/cublas_util -lcublas -lcudart -lcublasLt -lcurand
- Add this library to PYTHON_PATH
cd MCCompiler export PYTHONPATH=`pwd`:$PYTHONPATH
- export a model to ONNX format and save sample input&output to io_dir
python3 example/mm_trans.py --onnx tmp/test.onnx --io_dir tmp/test
- compile and run the model
python3 run.py --onnx tmp/test.onnx --io_dir tmp/test