workspace(name = "Torch-TensorRT") load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") http_archive( name = "rules_python", sha256 = "863ba0fa944319f7e3d695711427d9ad80ba92c6edd0b7c7443b84e904689539", strip_prefix = "rules_python-0.22.0", url = "https://github.com/bazelbuild/rules_python/releases/download/0.22.0/rules_python-0.22.0.tar.gz", ) load("@rules_python//python:repositories.bzl", "py_repositories") py_repositories() http_archive( name = "rules_pkg", sha256 = "8f9ee2dc10c1ae514ee599a8b42ed99fa262b757058f65ad3c384289ff70c4b8", urls = [ "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.9.1/rules_pkg-0.9.1.tar.gz", "https://github.com/bazelbuild/rules_pkg/releases/download/0.9.1/rules_pkg-0.9.1.tar.gz", ], ) load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies") rules_pkg_dependencies() http_archive( name = "googletest", sha256 = "755f9a39bc7205f5a0c428e920ddad092c33c8a1b46997def3f1d4a82aded6e1", strip_prefix = "googletest-5ab508a01f9eb089207ee87fd547d290da39d015", urls = ["https://github.com/google/googletest/archive/5ab508a01f9eb089207ee87fd547d290da39d015.zip"], ) # External dependency for torch_tensorrt if you already have precompiled binaries. #local_repository( # name = "torch_tensorrt", # path = "/opt/conda/lib/python3.8/site-packages/torch_tensorrt", #) # CUDA should be installed on the system locally new_local_repository( name = "cuda", build_file = "@//third_party/cuda:BUILD", path = "/usr/local/cuda/", ) new_local_repository( name = "cublas", build_file = "@//third_party/cublas:BUILD", path = "/usr", ) #################################################################################### # Locally installed dependencies (use in cases of custom dependencies or aarch64) #################################################################################### # NOTE: In the case you are using just the pre-cxx11-abi path or just the cxx11 abi path # with your local libtorch, just point deps at the same path to satisfy bazel. # NOTE: NVIDIA's aarch64 PyTorch (python) wheel file uses the CXX11 ABI unlike PyTorch's standard # x86_64 python distribution. If using NVIDIA's version just point to the root of the package # for both versions here and do not use --config=pre-cxx11-abi new_local_repository( name = "libtorch", #path = "/usr/local/lib/python3.8/dist-packages/torch", path = "/home/br/github/torch/pytorch/torch", build_file = "third_party/libtorch/BUILD" ) # NOTE: Unused on aarch64-jetson with NVIDIA provided PyTorch distribu†ion new_local_repository( name = "libtorch_pre_cxx11_abi", #path = "/usr/local/lib/python3.8/dist-packages/torch", path = "/home/br/github/torch/pytorch/torch", build_file = "third_party/libtorch/BUILD" ) new_local_repository( name = "cudnn", path = "/usr/", build_file = "@//third_party/cudnn/local:BUILD" ) new_local_repository( name = "tensorrt", path = "/usr/", build_file = "@//third_party/tensorrt/local:BUILD" ) ######################################################################### # Development Dependencies (optional - comment out on aarch64) ######################################################################### #load("@rules_python//python:pip.bzl", "pip_parse") #pip_parse( # name = "devtools_deps", # requirements = "//:requirements-dev.txt", #)