└── Tensorflow 2.6.1 Ubuntu 24.04.txt /Tensorflow 2.6.1 Ubuntu 24.04.txt: -------------------------------------------------------------------------------- 1 | ################## 2 | Cuda and cuDNN: 3 | https://www.tensorflow.org/install/source#gpu_support_2 4 | 5 | 6 | Tensorrt Support or TensorFlow 2.16.1: 7 | https://github.com/tensorflow/tensorflow/issues/61468 8 | ################## 9 | 10 | 11 | ################## 12 | sudo apt update 13 | 14 | sudo apt upgrade 15 | 16 | sudo apt install build-essential 17 | 18 | wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh 19 | 20 | bash ./Miniconda3-latest-Linux-x86_64.sh 21 | ################## 22 | 23 | 24 | ################## 25 | wget https://developer.download.nvidia.com/compute/cuda/12.1.1/local_installers/cuda_12.1.1_530.30.02_linux.run 26 | 27 | sudo sh cuda_12.1.1_530.30.02_linux.run 28 | 29 | 30 | nano ~/.bashrc 31 | 32 | export PATH=/usr/local/cuda-12.1/bin${PATH:+:${PATH}} 33 | export LD_LIBRARY_PATH=/usr/local/cuda-12.1/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} 34 | 35 | source ~/.bashrc 36 | 37 | 38 | sudo nano /etc/ld.so.conf 39 | 40 | /usr/local/cuda-12.1/lib64 41 | 42 | sudo ldconfig 43 | 44 | echo $PATH 45 | 46 | echo $LD_LIBRARY_PATH 47 | 48 | sudo ldconfig -p | grep cuda 49 | 50 | nvcc --version 51 | ################## 52 | 53 | ################## 54 | 55 | https://developer.nvidia.com/rdp/cudnn-archive 56 | 57 | ################## 58 | 59 | 60 | tar -xvf cudnn-linux-x86_64-8.9.7.29_cuda12-archive.tar.xz 61 | 62 | cd cudnn-linux-x86_64-8.9.7.29_cuda12-archive 63 | 64 | 65 | 66 | 67 | sudo cp include/cudnn*.h /usr/local/cuda-12.1/include 68 | 69 | sudo cp lib/libcudnn* /usr/local/cuda-12.1/lib64 70 | 71 | sudo chmod a+r /usr/local/cuda-12.1/include/cudnn*.h /usr/local/cuda-12.1/lib64/libcudnn* 72 | 73 | cd .. 74 | 75 | ls -l /usr/local/cuda-12.1/lib64/libcudnn* 76 | 77 | 78 | test cudnn: 79 | 80 | nano test_cudnn.c 81 | 82 | // test_cudnn.c 83 | #include 84 | #include 85 | 86 | int main() { 87 | cudnnHandle_t handle; 88 | cudnnStatus_t status = cudnnCreate(&handle); 89 | if (status == CUDNN_STATUS_SUCCESS) { 90 | printf("cuDNN successfully initialized.\n"); 91 | } else { 92 | printf("cuDNN initialization failed.\n"); 93 | } 94 | cudnnDestroy(handle); 95 | return 0; 96 | } 97 | 98 | 99 | gcc -o test_cudnn test_cudnn.c -I/usr/local/cuda-12.1/include -L/usr/local/cuda-12.1/lib64 -lcudnn 100 | 101 | ./test_cudnn 102 | 103 | 104 | ################## 105 | 106 | https://developer.nvidia.com/tensorrt/download 107 | 108 | ################## 109 | 110 | tar -xzvf TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-12.0.tar.gz 111 | 112 | sudo mv TensorRT-8.6.1.6 /usr/local/TensorRT-8.6.1 113 | 114 | 115 | 116 | nano ~/.bashrc 117 | 118 | export PATH=/usr/local/cuda-12.1/bin:/usr/local/TensorRT-8.6.1/bin:$PATH 119 | export LD_LIBRARY_PATH=/usr/local/cuda-12.1/lib64:/usr/local/TensorRT-8.6.1/lib:$LD_LIBRARY_PATH 120 | 121 | source ~/.bashrc 122 | 123 | sudo ldconfig 124 | 125 | sudo rm /usr/local/cuda-12.1/targets/x86_64-linux/lib/libcudnn*.so.8 126 | 127 | sudo ln -s /usr/local/cuda-12.1/targets/x86_64-linux/lib/libcudnn_adv_infer.so.8.x.x /usr/local/cuda-12.1/targets/x86_64-linux/lib/libcudnn_adv_infer.so.8 128 | 129 | 130 | 131 | conda create --name tf python=3.9 132 | 133 | conda activate tf 134 | 135 | 136 | pip install tensorflow[and-cuda] 137 | 138 | python -c "import tensorflow as tf; print(tf.config.list_physical_devices('GPU'))" 139 | 140 | 141 | 142 | 143 | 144 | cd /usr/local/TensorRT-8.6.1/python 145 | 146 | 147 | 148 | pip install tensorrt-8.6.1-cp39-none-linux_x86_64.whl 149 | 150 | pip install tensorrt_dispatch-8.6.1-cp39-none-linux_x86_64.whl 151 | 152 | pip install tensorrt_lean-8.6.1-cp39-none-linux_x86_64.whl 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | pip install jupyterlab 162 | 163 | jupyter lab 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | nvidia-smi 198 | --------------------------------------------------------------------------------